blob: f2151c2d4712ecb8f1e583d8c83cde22dce8ac2d [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# BitBake ToasterUI Implementation
3#
4# Copyright (C) 2013 Intel Corporation
5#
Brad Bishopc342db32019-05-15 21:57:59 -04006# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -05007#
Patrick Williamsc124f4f2015-09-15 14:41:29 -05008
9import sys
10import bb
11import re
12import os
13
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050014import django
Patrick Williamsc124f4f2015-09-15 14:41:29 -050015from django.utils import timezone
16
Patrick Williamsc0f7c042017-02-23 20:41:17 -060017import toaster
18# Add toaster module to the search path to help django.setup() find the right
19# modules
20sys.path.insert(0, os.path.dirname(toaster.__file__))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050021
Patrick Williamsc0f7c042017-02-23 20:41:17 -060022#Set the DJANGO_SETTINGS_MODULE if it's not already set
23os.environ["DJANGO_SETTINGS_MODULE"] =\
24 os.environ.get("DJANGO_SETTINGS_MODULE",
25 "toaster.toastermain.settings")
26# Setup django framework (needs to be done before importing modules)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050027django.setup()
Patrick Williamsc124f4f2015-09-15 14:41:29 -050028
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050029from orm.models import Build, Task, Recipe, Layer_Version, Layer, Target, LogMessage, HelpText
Patrick Williamsc0f7c042017-02-23 20:41:17 -060030from orm.models import Target_Image_File, TargetKernelFile, TargetSDKFile
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050031from orm.models import Variable, VariableHistory
32from orm.models import Package, Package_File, Target_Installed_Package, Target_File
33from orm.models import Task_Dependency, Package_Dependency
34from orm.models import Recipe_Dependency, Provides
Brad Bishop6e60e8b2018-02-01 10:27:11 -050035from orm.models import Project, CustomImagePackage
Patrick Williamsc0f7c042017-02-23 20:41:17 -060036from orm.models import signal_runbuilds
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050037
Patrick Williamsc124f4f2015-09-15 14:41:29 -050038from bldcontrol.models import BuildEnvironment, BuildRequest
Brad Bishop6e60e8b2018-02-01 10:27:11 -050039from bldcontrol.models import BRLayer
40from bldcontrol import bbcontroller
Patrick Williamsc124f4f2015-09-15 14:41:29 -050041
42from bb.msg import BBLogFormatter as formatter
43from django.db import models
44from pprint import pformat
45import logging
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050046from datetime import datetime, timedelta
Patrick Williamsc124f4f2015-09-15 14:41:29 -050047
48from django.db import transaction, connection
49
Patrick Williamsc0f7c042017-02-23 20:41:17 -060050
Patrick Williamsc124f4f2015-09-15 14:41:29 -050051# pylint: disable=invalid-name
52# the logger name is standard throughout BitBake
53logger = logging.getLogger("ToasterLogger")
54
Patrick Williamsc124f4f2015-09-15 14:41:29 -050055class NotExisting(Exception):
56 pass
57
58class ORMWrapper(object):
59 """ This class creates the dictionaries needed to store information in the database
60 following the format defined by the Django models. It is also used to save this
61 information in the database.
62 """
63
64 def __init__(self):
65 self.layer_version_objects = []
Patrick Williamsf1e5d692016-03-30 15:21:19 -050066 self.layer_version_built = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -050067 self.task_objects = {}
68 self.recipe_objects = {}
69
70 @staticmethod
71 def _build_key(**kwargs):
72 key = "0"
73 for k in sorted(kwargs.keys()):
74 if isinstance(kwargs[k], models.Model):
75 key += "-%d" % kwargs[k].id
76 else:
77 key += "-%s" % str(kwargs[k])
78 return key
79
80
81 def _cached_get_or_create(self, clazz, **kwargs):
82 """ This is a memory-cached get_or_create. We assume that the objects will not be created in the
83 database through any other means.
84 """
85
86 assert issubclass(clazz, models.Model), "_cached_get_or_create needs to get the class as first argument"
87
88 key = ORMWrapper._build_key(**kwargs)
89 dictname = "objects_%s" % clazz.__name__
90 if not dictname in vars(self).keys():
91 vars(self)[dictname] = {}
92
93 created = False
94 if not key in vars(self)[dictname].keys():
Patrick Williamsf1e5d692016-03-30 15:21:19 -050095 vars(self)[dictname][key], created = \
96 clazz.objects.get_or_create(**kwargs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050097
98 return (vars(self)[dictname][key], created)
99
100
101 def _cached_get(self, clazz, **kwargs):
102 """ This is a memory-cached get. We assume that the objects will not change in the database between gets.
103 """
104 assert issubclass(clazz, models.Model), "_cached_get needs to get the class as first argument"
105
106 key = ORMWrapper._build_key(**kwargs)
107 dictname = "objects_%s" % clazz.__name__
108
109 if not dictname in vars(self).keys():
110 vars(self)[dictname] = {}
111
112 if not key in vars(self)[dictname].keys():
113 vars(self)[dictname][key] = clazz.objects.get(**kwargs)
114
115 return vars(self)[dictname][key]
116
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600117 def get_similar_target_with_image_files(self, target):
118 """
119 Get a Target object "similar" to target; i.e. with the same target
120 name ('core-image-minimal' etc.) and machine.
121 """
122 return target.get_similar_target_with_image_files()
123
124 def get_similar_target_with_sdk_files(self, target):
125 return target.get_similar_target_with_sdk_files()
126
127 def clone_image_artifacts(self, target_from, target_to):
128 target_to.clone_image_artifacts_from(target_from)
129
130 def clone_sdk_artifacts(self, target_from, target_to):
131 target_to.clone_sdk_artifacts_from(target_from)
132
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500133 def _timestamp_to_datetime(self, secs):
134 """
135 Convert timestamp in seconds to Python datetime
136 """
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600137 return timezone.make_aware(datetime(1970, 1, 1) + timedelta(seconds=secs))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500138
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500139 # pylint: disable=no-self-use
140 # we disable detection of no self use in functions because the methods actually work on the object
141 # even if they don't touch self anywhere
142
143 # pylint: disable=bad-continuation
144 # we do not follow the python conventions for continuation indentation due to long lines here
145
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600146 def get_or_create_build_object(self, brbe):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500147 prj = None
148 buildrequest = None
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600149 if brbe is not None:
150 # Toaster-triggered build
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500151 logger.debug(1, "buildinfohelper: brbe is %s" % brbe)
152 br, _ = brbe.split(":")
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600153 buildrequest = BuildRequest.objects.get(pk=br)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500154 prj = buildrequest.project
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600155 else:
156 # CLI build
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500157 prj = Project.objects.get_or_create_default_project()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500158 logger.debug(1, "buildinfohelper: project is not specified, defaulting to %s" % prj)
159
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500160 if buildrequest is not None:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600161 # reuse existing Build object
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500162 build = buildrequest.build
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500163 build.project = prj
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500164 build.save()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500165 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600166 # create new Build object
167 now = timezone.now()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500168 build = Build.objects.create(
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600169 project=prj,
170 started_on=now,
171 completed_on=now,
172 build_name='')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500173
174 logger.debug(1, "buildinfohelper: build is created %s" % build)
175
176 if buildrequest is not None:
177 buildrequest.build = build
178 buildrequest.save()
179
180 return build
181
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600182 def update_build(self, build, data_dict):
183 for key in data_dict:
184 setattr(build, key, data_dict[key])
185 build.save()
186
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500187 @staticmethod
188 def get_or_create_targets(target_info):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600189 """
190 NB get_or_create() is used here because for Toaster-triggered builds,
191 we already created the targets when the build was triggered.
192 """
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500193 result = []
194 for target in target_info['targets']:
195 task = ''
196 if ':' in target:
197 target, task = target.split(':', 1)
198 if task.startswith('do_'):
199 task = task[3:]
200 if task == 'build':
201 task = ''
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600202
203 obj, _ = Target.objects.get_or_create(build=target_info['build'],
204 target=target,
205 task=task)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500206 result.append(obj)
207 return result
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500208
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600209 def update_build_stats_and_outcome(self, build, errors, warnings, taskfailures):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500210 assert isinstance(build,Build)
211 assert isinstance(errors, int)
212 assert isinstance(warnings, int)
213
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500214 if build.outcome == Build.CANCELLED:
215 return
216 try:
217 if build.buildrequest.state == BuildRequest.REQ_CANCELLING:
218 return
219 except AttributeError:
220 # We may not have a buildrequest if this is a command line build
221 pass
222
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500223 outcome = Build.SUCCEEDED
224 if errors or taskfailures:
225 outcome = Build.FAILED
226
227 build.completed_on = timezone.now()
228 build.outcome = outcome
229 build.save()
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600230 signal_runbuilds()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500231
232 def update_target_set_license_manifest(self, target, license_manifest_path):
233 target.license_manifest_path = license_manifest_path
234 target.save()
235
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600236 def update_target_set_package_manifest(self, target, package_manifest_path):
237 target.package_manifest_path = package_manifest_path
238 target.save()
239
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500240 def update_task_object(self, build, task_name, recipe_name, task_stats):
241 """
242 Find the task for build which matches the recipe and task name
243 to be stored
244 """
245 task_to_update = Task.objects.get(
246 build = build,
247 task_name = task_name,
248 recipe__name = recipe_name
249 )
250
251 if 'started' in task_stats and 'ended' in task_stats:
252 task_to_update.started = self._timestamp_to_datetime(task_stats['started'])
253 task_to_update.ended = self._timestamp_to_datetime(task_stats['ended'])
254 task_to_update.elapsed_time = (task_stats['ended'] - task_stats['started'])
255 task_to_update.cpu_time_user = task_stats.get('cpu_time_user')
256 task_to_update.cpu_time_system = task_stats.get('cpu_time_system')
257 if 'disk_io_read' in task_stats and 'disk_io_write' in task_stats:
258 task_to_update.disk_io_read = task_stats['disk_io_read']
259 task_to_update.disk_io_write = task_stats['disk_io_write']
260 task_to_update.disk_io = task_stats['disk_io_read'] + task_stats['disk_io_write']
261
262 task_to_update.save()
263
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500264 def get_update_task_object(self, task_information, must_exist = False):
265 assert 'build' in task_information
266 assert 'recipe' in task_information
267 assert 'task_name' in task_information
268
269 # we use must_exist info for database look-up optimization
270 task_object, created = self._cached_get_or_create(Task,
271 build=task_information['build'],
272 recipe=task_information['recipe'],
273 task_name=task_information['task_name']
274 )
275 if created and must_exist:
276 task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk)
277 raise NotExisting("Task object created when expected to exist", task_information)
278
279 object_changed = False
280 for v in vars(task_object):
281 if v in task_information.keys():
282 if vars(task_object)[v] != task_information[v]:
283 vars(task_object)[v] = task_information[v]
284 object_changed = True
285
286 # update setscene-related information if the task has a setscene
287 if task_object.outcome == Task.OUTCOME_COVERED and 1 == task_object.get_related_setscene().count():
288 task_object.outcome = Task.OUTCOME_CACHED
289 object_changed = True
290
291 outcome_task_setscene = Task.objects.get(task_executed=True, build = task_object.build,
292 recipe = task_object.recipe, task_name=task_object.task_name+"_setscene").outcome
293 if outcome_task_setscene == Task.OUTCOME_SUCCESS:
294 task_object.sstate_result = Task.SSTATE_RESTORED
295 object_changed = True
296 elif outcome_task_setscene == Task.OUTCOME_FAILED:
297 task_object.sstate_result = Task.SSTATE_FAILED
298 object_changed = True
299
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500300 if object_changed:
301 task_object.save()
302 return task_object
303
304
305 def get_update_recipe_object(self, recipe_information, must_exist = False):
306 assert 'layer_version' in recipe_information
307 assert 'file_path' in recipe_information
308 assert 'pathflags' in recipe_information
309
310 assert not recipe_information['file_path'].startswith("/") # we should have layer-relative paths at all times
311
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500312
313 def update_recipe_obj(recipe_object):
314 object_changed = False
315 for v in vars(recipe_object):
316 if v in recipe_information.keys():
317 object_changed = True
318 vars(recipe_object)[v] = recipe_information[v]
319
320 if object_changed:
321 recipe_object.save()
322
323 recipe, created = self._cached_get_or_create(Recipe, layer_version=recipe_information['layer_version'],
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500324 file_path=recipe_information['file_path'], pathflags = recipe_information['pathflags'])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500325
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500326 update_recipe_obj(recipe)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500327
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500328 built_recipe = None
329 # Create a copy of the recipe for historical puposes and update it
330 for built_layer in self.layer_version_built:
331 if built_layer.layer == recipe_information['layer_version'].layer:
332 built_recipe, c = self._cached_get_or_create(Recipe,
333 layer_version=built_layer,
334 file_path=recipe_information['file_path'],
335 pathflags = recipe_information['pathflags'])
336 update_recipe_obj(built_recipe)
337 break
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500338
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500339
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500340 # If we're in analysis mode or if this is a custom recipe
341 # then we are wholly responsible for the data
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500342 # and therefore we return the 'real' recipe rather than the build
343 # history copy of the recipe.
344 if recipe_information['layer_version'].build is not None and \
345 recipe_information['layer_version'].build.project == \
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500346 Project.objects.get_or_create_default_project():
347 return recipe
348
349 if built_recipe is None:
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500350 return recipe
351
352 return built_recipe
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500353
354 def get_update_layer_version_object(self, build_obj, layer_obj, layer_version_information):
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500355 if isinstance(layer_obj, Layer_Version):
356 # We already found our layer version for this build so just
357 # update it with the new build information
358 logger.debug("We found our layer from toaster")
359 layer_obj.local_path = layer_version_information['local_path']
360 layer_obj.save()
361 self.layer_version_objects.append(layer_obj)
362
363 # create a new copy of this layer version as a snapshot for
364 # historical purposes
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500365 layer_copy, c = Layer_Version.objects.get_or_create(
366 build=build_obj,
367 layer=layer_obj.layer,
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600368 release=layer_obj.release,
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500369 branch=layer_version_information['branch'],
370 commit=layer_version_information['commit'],
371 local_path=layer_version_information['local_path'],
372 )
373
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500374 logger.debug("Created new layer version %s for build history",
375 layer_copy.layer.name)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500376
377 self.layer_version_built.append(layer_copy)
378
379 return layer_obj
380
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500381 assert isinstance(build_obj, Build)
382 assert isinstance(layer_obj, Layer)
383 assert 'branch' in layer_version_information
384 assert 'commit' in layer_version_information
385 assert 'priority' in layer_version_information
386 assert 'local_path' in layer_version_information
387
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500388 # If we're doing a command line build then associate this new layer with the
389 # project to avoid it 'contaminating' toaster data
390 project = None
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500391 if build_obj.project == Project.objects.get_or_create_default_project():
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500392 project = build_obj.project
393
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500394 layer_version_object, _ = Layer_Version.objects.get_or_create(
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500395 build = build_obj,
396 layer = layer_obj,
397 branch = layer_version_information['branch'],
398 commit = layer_version_information['commit'],
399 priority = layer_version_information['priority'],
400 local_path = layer_version_information['local_path'],
401 project=project)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500402
403 self.layer_version_objects.append(layer_version_object)
404
405 return layer_version_object
406
407 def get_update_layer_object(self, layer_information, brbe):
408 assert 'name' in layer_information
409 assert 'layer_index_url' in layer_information
410
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600411 # From command line builds we have no brbe as the request is directly
412 # from bitbake
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500413 if brbe is None:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600414 # If we don't have git commit sha then we're using a non-git
415 # layer so set the layer_source_dir to identify it as such
416 if not layer_information['version']['commit']:
417 local_source_dir = layer_information["local_path"]
418 else:
419 local_source_dir = None
420
421 layer_object, _ = \
422 Layer.objects.get_or_create(
423 name=layer_information['name'],
424 local_source_dir=local_source_dir,
425 layer_index_url=layer_information['layer_index_url'])
426
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500427 return layer_object
428 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500429 br_id, be_id = brbe.split(":")
430
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500431 # Find the layer version by matching the layer event information
432 # against the metadata we have in Toaster
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500433
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500434 try:
435 br_layer = BRLayer.objects.get(req=br_id,
436 name=layer_information['name'])
437 return br_layer.layer_version
438 except (BRLayer.MultipleObjectsReturned, BRLayer.DoesNotExist):
439 # There are multiple of the same layer name or the name
440 # hasn't been determined by the toaster.bbclass layer
441 # so let's filter by the local_path
442 bc = bbcontroller.getBuildEnvironmentController(pk=be_id)
443 for br_layer in BRLayer.objects.filter(req=br_id):
444 if br_layer.giturl and \
445 layer_information['local_path'].endswith(
446 bc.getGitCloneDirectory(br_layer.giturl,
447 br_layer.commit)):
448 return br_layer.layer_version
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500449
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500450 if br_layer.local_source_dir == \
451 layer_information['local_path']:
452 return br_layer.layer_version
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500453
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500454 # We've reached the end of our search and couldn't find the layer
455 # we can continue but some data may be missing
456 raise NotExisting("Unidentified layer %s" %
457 pformat(layer_information))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500458
459 def save_target_file_information(self, build_obj, target_obj, filedata):
460 assert isinstance(build_obj, Build)
461 assert isinstance(target_obj, Target)
462 dirs = filedata['dirs']
463 files = filedata['files']
464 syms = filedata['syms']
465
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500466 # always create the root directory as a special case;
467 # note that this is never displayed, so the owner, group,
468 # size, permission are irrelevant
469 tf_obj = Target_File.objects.create(target = target_obj,
470 path = '/',
471 size = 0,
472 owner = '',
473 group = '',
474 permission = '',
475 inodetype = Target_File.ITYPE_DIRECTORY)
476 tf_obj.save()
477
478 # insert directories, ordered by name depth
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500479 for d in sorted(dirs, key=lambda x:len(x[-1].split("/"))):
480 (user, group, size) = d[1:4]
481 permission = d[0][1:]
482 path = d[4].lstrip(".")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500483
484 # we already created the root directory, so ignore any
485 # entry for it
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500486 if len(path) == 0:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500487 continue
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500488
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500489 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
490 if len(parent_path) == 0:
491 parent_path = "/"
492 parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
493 tf_obj = Target_File.objects.create(
494 target = target_obj,
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600495 path = path,
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500496 size = size,
497 inodetype = Target_File.ITYPE_DIRECTORY,
498 permission = permission,
499 owner = user,
500 group = group,
501 directory = parent_obj)
502
503
504 # we insert files
505 for d in files:
506 (user, group, size) = d[1:4]
507 permission = d[0][1:]
508 path = d[4].lstrip(".")
509 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
510 inodetype = Target_File.ITYPE_REGULAR
511 if d[0].startswith('b'):
512 inodetype = Target_File.ITYPE_BLOCK
513 if d[0].startswith('c'):
514 inodetype = Target_File.ITYPE_CHARACTER
515 if d[0].startswith('p'):
516 inodetype = Target_File.ITYPE_FIFO
517
518 tf_obj = Target_File.objects.create(
519 target = target_obj,
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600520 path = path,
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500521 size = size,
522 inodetype = inodetype,
523 permission = permission,
524 owner = user,
525 group = group)
526 parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
527 tf_obj.directory = parent_obj
528 tf_obj.save()
529
530 # we insert symlinks
531 for d in syms:
532 (user, group, size) = d[1:4]
533 permission = d[0][1:]
534 path = d[4].lstrip(".")
535 filetarget_path = d[6]
536
537 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
538 if not filetarget_path.startswith("/"):
539 # we have a relative path, get a normalized absolute one
540 filetarget_path = parent_path + "/" + filetarget_path
541 fcp = filetarget_path.split("/")
542 fcpl = []
543 for i in fcp:
544 if i == "..":
545 fcpl.pop()
546 else:
547 fcpl.append(i)
548 filetarget_path = "/".join(fcpl)
549
550 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600551 filetarget_obj = Target_File.objects.get(target = target_obj, path = filetarget_path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500552 except Target_File.DoesNotExist:
553 # we might have an invalid link; no way to detect this. just set it to None
554 filetarget_obj = None
555
556 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
557
558 tf_obj = Target_File.objects.create(
559 target = target_obj,
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600560 path = path,
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500561 size = size,
562 inodetype = Target_File.ITYPE_SYMLINK,
563 permission = permission,
564 owner = user,
565 group = group,
566 directory = parent_obj,
567 sym_target = filetarget_obj)
568
569
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500570 def save_target_package_information(self, build_obj, target_obj, packagedict, pkgpnmap, recipes, built_package=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500571 assert isinstance(build_obj, Build)
572 assert isinstance(target_obj, Target)
573
574 errormsg = ""
575 for p in packagedict:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500576 # Search name swtiches round the installed name vs package name
577 # by default installed name == package name
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500578 searchname = p
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500579 if p not in pkgpnmap:
580 logger.warning("Image packages list contains %p, but is"
581 " missing from all packages list where the"
582 " metadata comes from. Skipping...", p)
583 continue
584
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500585 if 'OPKGN' in pkgpnmap[p].keys():
586 searchname = pkgpnmap[p]['OPKGN']
587
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500588 built_recipe = recipes[pkgpnmap[p]['PN']]
589
590 if built_package:
591 packagedict[p]['object'], created = Package.objects.get_or_create( build = build_obj, name = searchname )
592 recipe = built_recipe
593 else:
594 packagedict[p]['object'], created = \
595 CustomImagePackage.objects.get_or_create(name=searchname)
596 # Clear the Package_Dependency objects as we're going to update
597 # the CustomImagePackage with the latest dependency information
598 packagedict[p]['object'].package_dependencies_target.all().delete()
599 packagedict[p]['object'].package_dependencies_source.all().delete()
600 try:
601 recipe = self._cached_get(
602 Recipe,
603 name=built_recipe.name,
604 layer_version__build=None,
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600605 layer_version__release=
606 built_recipe.layer_version.release,
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500607 file_path=built_recipe.file_path,
608 version=built_recipe.version
609 )
610 except (Recipe.DoesNotExist,
611 Recipe.MultipleObjectsReturned) as e:
612 logger.info("We did not find one recipe for the"
613 "configuration data package %s %s" % (p, e))
614 continue
615
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500616 if created or packagedict[p]['object'].size == -1: # save the data anyway we can, not just if it was not created here; bug [YOCTO #6887]
617 # fill in everything we can from the runtime-reverse package data
618 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500619 packagedict[p]['object'].recipe = recipe
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500620 packagedict[p]['object'].version = pkgpnmap[p]['PV']
621 packagedict[p]['object'].installed_name = p
622 packagedict[p]['object'].revision = pkgpnmap[p]['PR']
623 packagedict[p]['object'].license = pkgpnmap[p]['LICENSE']
624 packagedict[p]['object'].section = pkgpnmap[p]['SECTION']
625 packagedict[p]['object'].summary = pkgpnmap[p]['SUMMARY']
626 packagedict[p]['object'].description = pkgpnmap[p]['DESCRIPTION']
627 packagedict[p]['object'].size = int(pkgpnmap[p]['PKGSIZE'])
628
629 # no files recorded for this package, so save files info
630 packagefile_objects = []
631 for targetpath in pkgpnmap[p]['FILES_INFO']:
632 targetfilesize = pkgpnmap[p]['FILES_INFO'][targetpath]
633 packagefile_objects.append(Package_File( package = packagedict[p]['object'],
634 path = targetpath,
635 size = targetfilesize))
636 if len(packagefile_objects):
637 Package_File.objects.bulk_create(packagefile_objects)
638 except KeyError as e:
639 errormsg += " stpi: Key error, package %s key %s \n" % ( p, e )
640
641 # save disk installed size
642 packagedict[p]['object'].installed_size = packagedict[p]['size']
643 packagedict[p]['object'].save()
644
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500645 if built_package:
646 Target_Installed_Package.objects.create(target = target_obj, package = packagedict[p]['object'])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500647
648 packagedeps_objs = []
649 for p in packagedict:
650 for (px,deptype) in packagedict[p]['depends']:
651 if deptype == 'depends':
652 tdeptype = Package_Dependency.TYPE_TRDEPENDS
653 elif deptype == 'recommends':
654 tdeptype = Package_Dependency.TYPE_TRECOMMENDS
655
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500656 try:
657 packagedeps_objs.append(Package_Dependency(
658 package = packagedict[p]['object'],
659 depends_on = packagedict[px]['object'],
660 dep_type = tdeptype,
661 target = target_obj))
662 except KeyError as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600663 logger.warning("Could not add dependency to the package %s "
664 "because %s is an unknown package", p, px)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500665
666 if len(packagedeps_objs) > 0:
667 Package_Dependency.objects.bulk_create(packagedeps_objs)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500668 else:
669 logger.info("No package dependencies created")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500670
671 if len(errormsg) > 0:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600672 logger.warning("buildinfohelper: target_package_info could not identify recipes: \n%s", errormsg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500673
674 def save_target_image_file_information(self, target_obj, file_name, file_size):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600675 Target_Image_File.objects.create(target=target_obj,
676 file_name=file_name, file_size=file_size)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500677
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600678 def save_target_kernel_file(self, target_obj, file_name, file_size):
679 """
680 Save kernel file (bzImage, modules*) information for a Target target_obj.
681 """
682 TargetKernelFile.objects.create(target=target_obj,
683 file_name=file_name, file_size=file_size)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500684
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600685 def save_target_sdk_file(self, target_obj, file_name, file_size):
686 """
687 Save SDK artifacts to the database, associating them with a
688 Target object.
689 """
690 TargetSDKFile.objects.create(target=target_obj, file_name=file_name,
691 file_size=file_size)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500692
693 def create_logmessage(self, log_information):
694 assert 'build' in log_information
695 assert 'level' in log_information
696 assert 'message' in log_information
697
698 log_object = LogMessage.objects.create(
699 build = log_information['build'],
700 level = log_information['level'],
701 message = log_information['message'])
702
703 for v in vars(log_object):
704 if v in log_information.keys():
705 vars(log_object)[v] = log_information[v]
706
707 return log_object.save()
708
709
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500710 def save_build_package_information(self, build_obj, package_info, recipes,
711 built_package):
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500712 # assert isinstance(build_obj, Build)
713
714 if not 'PN' in package_info.keys():
715 # no package data to save (e.g. 'OPKGN'="lib64-*"|"lib32-*")
716 return None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500717
718 # create and save the object
719 pname = package_info['PKG']
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500720 built_recipe = recipes[package_info['PN']]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500721 if 'OPKGN' in package_info.keys():
722 pname = package_info['OPKGN']
723
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500724 if built_package:
725 bp_object, _ = Package.objects.get_or_create( build = build_obj,
726 name = pname )
727 recipe = built_recipe
728 else:
729 bp_object, created = \
730 CustomImagePackage.objects.get_or_create(name=pname)
731 try:
732 recipe = self._cached_get(Recipe,
733 name=built_recipe.name,
734 layer_version__build=None,
735 file_path=built_recipe.file_path,
736 version=built_recipe.version)
737
738 except (Recipe.DoesNotExist, Recipe.MultipleObjectsReturned):
739 logger.debug("We did not find one recipe for the configuration"
740 "data package %s" % pname)
741 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500742
743 bp_object.installed_name = package_info['PKG']
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500744 bp_object.recipe = recipe
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500745 bp_object.version = package_info['PKGV']
746 bp_object.revision = package_info['PKGR']
747 bp_object.summary = package_info['SUMMARY']
748 bp_object.description = package_info['DESCRIPTION']
749 bp_object.size = int(package_info['PKGSIZE'])
750 bp_object.section = package_info['SECTION']
751 bp_object.license = package_info['LICENSE']
752 bp_object.save()
753
754 # save any attached file information
755 packagefile_objects = []
756 for path in package_info['FILES_INFO']:
757 packagefile_objects.append(Package_File( package = bp_object,
758 path = path,
759 size = package_info['FILES_INFO'][path] ))
760 if len(packagefile_objects):
761 Package_File.objects.bulk_create(packagefile_objects)
762
763 def _po_byname(p):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500764 if built_package:
765 pkg, created = Package.objects.get_or_create(build=build_obj,
766 name=p)
767 else:
768 pkg, created = CustomImagePackage.objects.get_or_create(name=p)
769
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500770 if created:
771 pkg.size = -1
772 pkg.save()
773 return pkg
774
775 packagedeps_objs = []
776 # save soft dependency information
777 if 'RDEPENDS' in package_info and package_info['RDEPENDS']:
778 for p in bb.utils.explode_deps(package_info['RDEPENDS']):
779 packagedeps_objs.append(Package_Dependency( package = bp_object,
780 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS))
781 if 'RPROVIDES' in package_info and package_info['RPROVIDES']:
782 for p in bb.utils.explode_deps(package_info['RPROVIDES']):
783 packagedeps_objs.append(Package_Dependency( package = bp_object,
784 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES))
785 if 'RRECOMMENDS' in package_info and package_info['RRECOMMENDS']:
786 for p in bb.utils.explode_deps(package_info['RRECOMMENDS']):
787 packagedeps_objs.append(Package_Dependency( package = bp_object,
788 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS))
789 if 'RSUGGESTS' in package_info and package_info['RSUGGESTS']:
790 for p in bb.utils.explode_deps(package_info['RSUGGESTS']):
791 packagedeps_objs.append(Package_Dependency( package = bp_object,
792 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS))
793 if 'RREPLACES' in package_info and package_info['RREPLACES']:
794 for p in bb.utils.explode_deps(package_info['RREPLACES']):
795 packagedeps_objs.append(Package_Dependency( package = bp_object,
796 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES))
797 if 'RCONFLICTS' in package_info and package_info['RCONFLICTS']:
798 for p in bb.utils.explode_deps(package_info['RCONFLICTS']):
799 packagedeps_objs.append(Package_Dependency( package = bp_object,
800 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS))
801
802 if len(packagedeps_objs) > 0:
803 Package_Dependency.objects.bulk_create(packagedeps_objs)
804
805 return bp_object
806
807 def save_build_variables(self, build_obj, vardump):
808 assert isinstance(build_obj, Build)
809
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500810 for k in vardump:
811 desc = vardump[k]['doc']
812 if desc is None:
813 var_words = [word for word in k.split('_')]
814 root_var = "_".join([word for word in var_words if word.isupper()])
815 if root_var and root_var != k and root_var in vardump:
816 desc = vardump[root_var]['doc']
817 if desc is None:
818 desc = ''
819 if len(desc):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500820 HelpText.objects.get_or_create(build=build_obj,
821 area=HelpText.VARIABLE,
822 key=k, text=desc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500823 if not bool(vardump[k]['func']):
824 value = vardump[k]['v']
825 if value is None:
826 value = ''
827 variable_obj = Variable.objects.create( build = build_obj,
828 variable_name = k,
829 variable_value = value,
830 description = desc)
831
832 varhist_objects = []
833 for vh in vardump[k]['history']:
834 if not 'documentation.conf' in vh['file']:
835 varhist_objects.append(VariableHistory( variable = variable_obj,
836 file_name = vh['file'],
837 line_number = vh['line'],
838 operation = vh['op']))
839 if len(varhist_objects):
840 VariableHistory.objects.bulk_create(varhist_objects)
841
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500842
843class MockEvent(object):
844 """ This object is used to create event, for which normal event-processing methods can
845 be used, out of data that is not coming via an actual event
846 """
847 def __init__(self):
848 self.msg = None
849 self.levelno = None
850 self.taskname = None
851 self.taskhash = None
852 self.pathname = None
853 self.lineno = None
854
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500855 def getMessage(self):
856 """
857 Simulate LogRecord message return
858 """
859 return self.msg
860
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500861
862class BuildInfoHelper(object):
863 """ This class gathers the build information from the server and sends it
864 towards the ORM wrapper for storing in the database
865 It is instantiated once per build
866 Keeps in memory all data that needs matching before writing it to the database
867 """
868
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600869 # tasks which produce image files; note we include '', as we set
870 # the task for a target to '' (i.e. 'build') if no target is
871 # explicitly defined
872 IMAGE_GENERATING_TASKS = ['', 'build', 'image', 'populate_sdk_ext']
873
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500874 # pylint: disable=protected-access
875 # the code will look into the protected variables of the event; no easy way around this
876 # pylint: disable=bad-continuation
877 # we do not follow the python conventions for continuation indentation due to long lines here
878
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500879 def __init__(self, server, has_build_history = False, brbe = None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500880 self.internal_state = {}
881 self.internal_state['taskdata'] = {}
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500882 self.internal_state['targets'] = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500883 self.task_order = 0
884 self.autocommit_step = 1
885 self.server = server
886 # we use manual transactions if the database doesn't autocommit on us
887 if not connection.features.autocommits_when_autocommit_is_off:
888 transaction.set_autocommit(False)
889 self.orm_wrapper = ORMWrapper()
890 self.has_build_history = has_build_history
891 self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500892
893 # this is set for Toaster-triggered builds by localhostbecontroller
894 # via toasterui
895 self.brbe = brbe
896
897 self.project = None
898
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500899 logger.debug(1, "buildinfohelper: Build info helper inited %s" % vars(self))
900
901
902 ###################
903 ## methods to convert event/external info into objects that the ORM layer uses
904
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600905 def _ensure_build(self):
906 """
907 Ensure the current build object exists and is up to date with
908 data on the bitbake server
909 """
910 if not 'build' in self.internal_state or not self.internal_state['build']:
911 # create the Build object
912 self.internal_state['build'] = \
913 self.orm_wrapper.get_or_create_build_object(self.brbe)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500914
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600915 build = self.internal_state['build']
916
917 # update missing fields on the Build object with found data
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500918 build_info = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600919
920 # set to True if at least one field is going to be set
921 changed = False
922
923 if not build.build_name:
924 build_name = self.server.runCommand(["getVariable", "BUILDNAME"])[0]
925
926 # only reset the build name if the one on the server is actually
927 # a valid value for the build_name field
928 if build_name != None:
929 build_info['build_name'] = build_name
930 changed = True
931
932 if not build.machine:
933 build_info['machine'] = self.server.runCommand(["getVariable", "MACHINE"])[0]
934 changed = True
935
936 if not build.distro:
937 build_info['distro'] = self.server.runCommand(["getVariable", "DISTRO"])[0]
938 changed = True
939
940 if not build.distro_version:
941 build_info['distro_version'] = self.server.runCommand(["getVariable", "DISTRO_VERSION"])[0]
942 changed = True
943
944 if not build.bitbake_version:
945 build_info['bitbake_version'] = self.server.runCommand(["getVariable", "BB_VERSION"])[0]
946 changed = True
947
948 if changed:
949 self.orm_wrapper.update_build(self.internal_state['build'], build_info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500950
951 def _get_task_information(self, event, recipe):
952 assert 'taskname' in vars(event)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600953 self._ensure_build()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500954
955 task_information = {}
956 task_information['build'] = self.internal_state['build']
957 task_information['outcome'] = Task.OUTCOME_NA
958 task_information['recipe'] = recipe
959 task_information['task_name'] = event.taskname
960 try:
961 # some tasks don't come with a hash. and that's ok
962 task_information['sstate_checksum'] = event.taskhash
963 except AttributeError:
964 pass
965 return task_information
966
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500967 def _get_layer_version_for_dependency(self, pathRE):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500968 """ Returns the layer in the toaster db that has a full regex
969 match to the pathRE. pathRE - the layer path passed as a regex in the
970 event. It is created in cooker.py as a collection for the layer
971 priorities.
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500972 """
973 self._ensure_build()
974
975 def _sort_longest_path(layer_version):
976 assert isinstance(layer_version, Layer_Version)
977 return len(layer_version.local_path)
978
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500979 # Our paths don't append a trailing slash
980 if pathRE.endswith("/"):
981 pathRE = pathRE[:-1]
982
983 p = re.compile(pathRE)
984 path=re.sub(r'[$^]',r'',pathRE)
985 # Heuristics: we always match recipe to the deepest layer path in
986 # the discovered layers
987 for lvo in sorted(self.orm_wrapper.layer_version_objects,
988 reverse=True, key=_sort_longest_path):
989 if p.fullmatch(os.path.abspath(lvo.local_path)):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500990 return lvo
991 if lvo.layer.local_source_dir:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500992 if p.fullmatch(os.path.abspath(lvo.layer.local_source_dir)):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500993 return lvo
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500994 if 0 == path.find(lvo.local_path):
995 # sub-layer path inside existing layer
996 return lvo
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500997
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500998 # if we get here, we didn't read layers correctly;
999 # dump whatever information we have on the error log
1000 logger.warning("Could not match layer dependency for path %s : %s",
1001 pathRE,
1002 self.orm_wrapper.layer_version_objects)
1003 return None
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001004
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001005 def _get_layer_version_for_path(self, path):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001006 self._ensure_build()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001007
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001008 def _slkey_interactive(layer_version):
1009 assert isinstance(layer_version, Layer_Version)
1010 return len(layer_version.local_path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001011
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001012 # Heuristics: we always match recipe to the deepest layer path in the discovered layers
1013 for lvo in sorted(self.orm_wrapper.layer_version_objects, reverse=True, key=_slkey_interactive):
1014 # we can match to the recipe file path
1015 if path.startswith(lvo.local_path):
1016 return lvo
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001017 if lvo.layer.local_source_dir and \
1018 path.startswith(lvo.layer.local_source_dir):
1019 return lvo
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001020
1021 #if we get here, we didn't read layers correctly; dump whatever information we have on the error log
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001022 logger.warning("Could not match layer version for recipe path %s : %s", path, self.orm_wrapper.layer_version_objects)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001023
1024 #mockup the new layer
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001025 unknown_layer, _ = Layer.objects.get_or_create(name="Unidentified layer", layer_index_url="")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001026 unknown_layer_version_obj, _ = Layer_Version.objects.get_or_create(layer = unknown_layer, build = self.internal_state['build'])
1027
1028 # append it so we don't run into this error again and again
1029 self.orm_wrapper.layer_version_objects.append(unknown_layer_version_obj)
1030
1031 return unknown_layer_version_obj
1032
1033 def _get_recipe_information_from_taskfile(self, taskfile):
1034 localfilepath = taskfile.split(":")[-1]
1035 filepath_flags = ":".join(sorted(taskfile.split(":")[:-1]))
1036 layer_version_obj = self._get_layer_version_for_path(localfilepath)
1037
1038
1039
1040 recipe_info = {}
1041 recipe_info['layer_version'] = layer_version_obj
1042 recipe_info['file_path'] = localfilepath
1043 recipe_info['pathflags'] = filepath_flags
1044
1045 if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path):
1046 recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/")
1047 else:
1048 raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path))
1049
1050 return recipe_info
1051
1052 def _get_path_information(self, task_object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001053 self._ensure_build()
1054
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001055 assert isinstance(task_object, Task)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001056 build_stats_format = "{tmpdir}/buildstats/{buildname}/{package}/"
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001057 build_stats_path = []
1058
1059 for t in self.internal_state['targets']:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001060 buildname = self.internal_state['build'].build_name
1061 pe, pv = task_object.recipe.version.split(":",1)
1062 if len(pe) > 0:
1063 package = task_object.recipe.name + "-" + pe + "_" + pv
1064 else:
1065 package = task_object.recipe.name + "-" + pv
1066
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001067 build_stats_path.append(build_stats_format.format(tmpdir=self.tmp_dir,
1068 buildname=buildname,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001069 package=package))
1070
1071 return build_stats_path
1072
1073
1074 ################################
1075 ## external available methods to store information
1076 @staticmethod
1077 def _get_data_from_event(event):
1078 evdata = None
1079 if '_localdata' in vars(event):
1080 evdata = event._localdata
1081 elif 'data' in vars(event):
1082 evdata = event.data
1083 else:
1084 raise Exception("Event with neither _localdata or data properties")
1085 return evdata
1086
1087 def store_layer_info(self, event):
1088 layerinfos = BuildInfoHelper._get_data_from_event(event)
1089 self.internal_state['lvs'] = {}
1090 for layer in layerinfos:
1091 try:
1092 self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)] = layerinfos[layer]['version']
1093 self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)]['local_path'] = layerinfos[layer]['local_path']
1094 except NotExisting as nee:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001095 logger.warning("buildinfohelper: cannot identify layer exception:%s ", nee)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001096
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001097 def store_started_build(self):
1098 self._ensure_build()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001099
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001100 def save_build_log_file_path(self, build_log_path):
1101 self._ensure_build()
1102
1103 if not self.internal_state['build'].cooker_log_path:
1104 data_dict = {'cooker_log_path': build_log_path}
1105 self.orm_wrapper.update_build(self.internal_state['build'], data_dict)
1106
1107 def save_build_targets(self, event):
1108 self._ensure_build()
1109
1110 # create target information
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001111 assert '_pkgs' in vars(event)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001112 target_information = {}
1113 target_information['targets'] = event._pkgs
1114 target_information['build'] = self.internal_state['build']
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001115
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001116 self.internal_state['targets'] = self.orm_wrapper.get_or_create_targets(target_information)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001117
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001118 def save_build_layers_and_variables(self):
1119 self._ensure_build()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001120
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001121 build_obj = self.internal_state['build']
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001122
1123 # save layer version information for this build
1124 if not 'lvs' in self.internal_state:
1125 logger.error("Layer version information not found; Check if the bitbake server was configured to inherit toaster.bbclass.")
1126 else:
1127 for layer_obj in self.internal_state['lvs']:
1128 self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj])
1129
1130 del self.internal_state['lvs']
1131
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001132 # Save build configuration
1133 data = self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0]
1134
1135 # convert the paths from absolute to relative to either the build directory or layer checkouts
1136 path_prefixes = []
1137
1138 if self.brbe is not None:
1139 _, be_id = self.brbe.split(":")
1140 be = BuildEnvironment.objects.get(pk = be_id)
1141 path_prefixes.append(be.builddir)
1142
1143 for layer in sorted(self.orm_wrapper.layer_version_objects, key = lambda x:len(x.local_path), reverse=True):
1144 path_prefixes.append(layer.local_path)
1145
1146 # we strip the prefixes
1147 for k in data:
1148 if not bool(data[k]['func']):
1149 for vh in data[k]['history']:
1150 if not 'documentation.conf' in vh['file']:
1151 abs_file_name = vh['file']
1152 for pp in path_prefixes:
1153 if abs_file_name.startswith(pp + "/"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001154 # preserve layer name in relative path
1155 vh['file']=abs_file_name[pp.rfind("/")+1:]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001156 break
1157
1158 # save the variables
1159 self.orm_wrapper.save_build_variables(build_obj, data)
1160
1161 return self.brbe
1162
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001163 def set_recipes_to_parse(self, num_recipes):
1164 """
1165 Set the number of recipes which need to be parsed for this build.
1166 This is set the first time ParseStarted is received by toasterui.
1167 """
1168 self._ensure_build()
1169 self.internal_state['build'].recipes_to_parse = num_recipes
1170 self.internal_state['build'].save()
1171
1172 def set_recipes_parsed(self, num_recipes):
1173 """
1174 Set the number of recipes parsed so far for this build; this is updated
1175 each time a ParseProgress or ParseCompleted event is received by
1176 toasterui.
1177 """
1178 self._ensure_build()
1179 if num_recipes <= self.internal_state['build'].recipes_to_parse:
1180 self.internal_state['build'].recipes_parsed = num_recipes
1181 self.internal_state['build'].save()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001182
1183 def update_target_image_file(self, event):
1184 evdata = BuildInfoHelper._get_data_from_event(event)
1185
1186 for t in self.internal_state['targets']:
1187 if t.is_image == True:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001188 output_files = list(evdata.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001189 for output in output_files:
1190 if t.target in output and 'rootfs' in output and not output.endswith(".manifest"):
1191 self.orm_wrapper.save_target_image_file_information(t, output, evdata[output])
1192
1193 def update_artifact_image_file(self, event):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001194 self._ensure_build()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001195 evdata = BuildInfoHelper._get_data_from_event(event)
1196 for artifact_path in evdata.keys():
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001197 self.orm_wrapper.save_artifact_information(
1198 self.internal_state['build'], artifact_path,
1199 evdata[artifact_path])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001200
1201 def update_build_information(self, event, errors, warnings, taskfailures):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001202 self._ensure_build()
1203 self.orm_wrapper.update_build_stats_and_outcome(
1204 self.internal_state['build'], errors, warnings, taskfailures)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001205
1206 def store_started_task(self, event):
1207 assert isinstance(event, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted, bb.runqueue.runQueueTaskSkipped))
1208 assert 'taskfile' in vars(event)
1209 localfilepath = event.taskfile.split(":")[-1]
1210 assert localfilepath.startswith("/")
1211
1212 identifier = event.taskfile + ":" + event.taskname
1213
1214 recipe_information = self._get_recipe_information_from_taskfile(event.taskfile)
1215 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True)
1216
1217 task_information = self._get_task_information(event, recipe)
1218 task_information['outcome'] = Task.OUTCOME_NA
1219
1220 if isinstance(event, bb.runqueue.runQueueTaskSkipped):
1221 assert 'reason' in vars(event)
1222 task_information['task_executed'] = False
1223 if event.reason == "covered":
1224 task_information['outcome'] = Task.OUTCOME_COVERED
1225 if event.reason == "existing":
1226 task_information['outcome'] = Task.OUTCOME_PREBUILT
1227 else:
1228 task_information['task_executed'] = True
1229 if 'noexec' in vars(event) and event.noexec == True:
1230 task_information['task_executed'] = False
1231 task_information['outcome'] = Task.OUTCOME_EMPTY
1232 task_information['script_type'] = Task.CODING_NA
1233
1234 # do not assign order numbers to scene tasks
1235 if not isinstance(event, bb.runqueue.sceneQueueTaskStarted):
1236 self.task_order += 1
1237 task_information['order'] = self.task_order
1238
1239 self.orm_wrapper.get_update_task_object(task_information)
1240
1241 self.internal_state['taskdata'][identifier] = {
1242 'outcome': task_information['outcome'],
1243 }
1244
1245
1246 def store_tasks_stats(self, event):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001247 self._ensure_build()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001248 task_data = BuildInfoHelper._get_data_from_event(event)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001249
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001250 for (task_file, task_name, task_stats, recipe_name) in task_data:
1251 build = self.internal_state['build']
1252 self.orm_wrapper.update_task_object(build, task_name, recipe_name, task_stats)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001253
1254 def update_and_store_task(self, event):
1255 assert 'taskfile' in vars(event)
1256 localfilepath = event.taskfile.split(":")[-1]
1257 assert localfilepath.startswith("/")
1258
1259 identifier = event.taskfile + ":" + event.taskname
1260 if not identifier in self.internal_state['taskdata']:
1261 if isinstance(event, bb.build.TaskBase):
1262 # we do a bit of guessing
1263 candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)]
1264 if len(candidates) == 1:
1265 identifier = candidates[0]
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001266 elif len(candidates) > 1 and hasattr(event,'_package'):
1267 if 'native-' in event._package:
1268 identifier = 'native:' + identifier
1269 if 'nativesdk-' in event._package:
1270 identifier = 'nativesdk:' + identifier
1271 candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)]
1272 if len(candidates) == 1:
1273 identifier = candidates[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001274
1275 assert identifier in self.internal_state['taskdata']
1276 identifierlist = identifier.split(":")
1277 realtaskfile = ":".join(identifierlist[0:len(identifierlist)-1])
1278 recipe_information = self._get_recipe_information_from_taskfile(realtaskfile)
1279 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True)
1280 task_information = self._get_task_information(event,recipe)
1281
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001282 task_information['outcome'] = self.internal_state['taskdata'][identifier]['outcome']
1283
1284 if 'logfile' in vars(event):
1285 task_information['logfile'] = event.logfile
1286
1287 if '_message' in vars(event):
1288 task_information['message'] = event._message
1289
1290 if 'taskflags' in vars(event):
1291 # with TaskStarted, we get even more information
1292 if 'python' in event.taskflags.keys() and event.taskflags['python'] == '1':
1293 task_information['script_type'] = Task.CODING_PYTHON
1294 else:
1295 task_information['script_type'] = Task.CODING_SHELL
1296
1297 if task_information['outcome'] == Task.OUTCOME_NA:
1298 if isinstance(event, (bb.runqueue.runQueueTaskCompleted, bb.runqueue.sceneQueueTaskCompleted)):
1299 task_information['outcome'] = Task.OUTCOME_SUCCESS
1300 del self.internal_state['taskdata'][identifier]
1301
1302 if isinstance(event, (bb.runqueue.runQueueTaskFailed, bb.runqueue.sceneQueueTaskFailed)):
1303 task_information['outcome'] = Task.OUTCOME_FAILED
1304 del self.internal_state['taskdata'][identifier]
1305
1306 if not connection.features.autocommits_when_autocommit_is_off:
1307 # we force a sync point here, to get the progress bar to show
1308 if self.autocommit_step % 3 == 0:
1309 transaction.set_autocommit(True)
1310 transaction.set_autocommit(False)
1311 self.autocommit_step += 1
1312
1313 self.orm_wrapper.get_update_task_object(task_information, True) # must exist
1314
1315
1316 def store_missed_state_tasks(self, event):
1317 for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['missed']:
1318
1319 # identifier = fn + taskname + "_setscene"
1320 recipe_information = self._get_recipe_information_from_taskfile(fn)
1321 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
1322 mevent = MockEvent()
1323 mevent.taskname = taskname
1324 mevent.taskhash = taskhash
1325 task_information = self._get_task_information(mevent,recipe)
1326
1327 task_information['start_time'] = timezone.now()
1328 task_information['outcome'] = Task.OUTCOME_NA
1329 task_information['sstate_checksum'] = taskhash
1330 task_information['sstate_result'] = Task.SSTATE_MISS
1331 task_information['path_to_sstate_obj'] = sstatefile
1332
1333 self.orm_wrapper.get_update_task_object(task_information)
1334
1335 for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['found']:
1336
1337 # identifier = fn + taskname + "_setscene"
1338 recipe_information = self._get_recipe_information_from_taskfile(fn)
1339 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
1340 mevent = MockEvent()
1341 mevent.taskname = taskname
1342 mevent.taskhash = taskhash
1343 task_information = self._get_task_information(mevent,recipe)
1344
1345 task_information['path_to_sstate_obj'] = sstatefile
1346
1347 self.orm_wrapper.get_update_task_object(task_information)
1348
1349
1350 def store_target_package_data(self, event):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001351 self._ensure_build()
1352
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001353 # for all image targets
1354 for target in self.internal_state['targets']:
1355 if target.is_image:
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001356 pkgdata = BuildInfoHelper._get_data_from_event(event)['pkgdata']
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001357 imgdata = BuildInfoHelper._get_data_from_event(event)['imgdata'].get(target.target, {})
1358 filedata = BuildInfoHelper._get_data_from_event(event)['filedata'].get(target.target, {})
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001359
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001360 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001361 self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes'], built_package=True)
1362 self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata.copy(), pkgdata, self.internal_state['recipes'], built_package=False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001363 except KeyError as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001364 logger.warning("KeyError in save_target_package_information"
1365 "%s ", e)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001366
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001367 # only try to find files in the image if the task for this
1368 # target is one which produces image files; otherwise, the old
1369 # list of files in the files-in-image.txt file will be
1370 # appended to the target even if it didn't produce any images
1371 if target.task in BuildInfoHelper.IMAGE_GENERATING_TASKS:
1372 try:
1373 self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata)
1374 except KeyError as e:
1375 logger.warning("KeyError in save_target_file_information"
1376 "%s ", e)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001377
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001378
1379
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001380 def cancel_cli_build(self):
1381 """
1382 If a build is currently underway, set its state to CANCELLED;
1383 note that this only gets called for command line builds which are
1384 interrupted, so it doesn't touch any BuildRequest objects
1385 """
1386 self._ensure_build()
1387 self.internal_state['build'].outcome = Build.CANCELLED
1388 self.internal_state['build'].save()
1389 signal_runbuilds()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001390
1391 def store_dependency_information(self, event):
1392 assert '_depgraph' in vars(event)
1393 assert 'layer-priorities' in event._depgraph
1394 assert 'pn' in event._depgraph
1395 assert 'tdepends' in event._depgraph
1396
1397 errormsg = ""
1398
1399 # save layer version priorities
1400 if 'layer-priorities' in event._depgraph.keys():
1401 for lv in event._depgraph['layer-priorities']:
1402 (_, path, _, priority) = lv
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001403 layer_version_obj = self._get_layer_version_for_dependency(path)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001404 if layer_version_obj:
1405 layer_version_obj.priority = priority
1406 layer_version_obj.save()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001407
1408 # save recipe information
1409 self.internal_state['recipes'] = {}
1410 for pn in event._depgraph['pn']:
1411
1412 file_name = event._depgraph['pn'][pn]['filename'].split(":")[-1]
1413 pathflags = ":".join(sorted(event._depgraph['pn'][pn]['filename'].split(":")[:-1]))
1414 layer_version_obj = self._get_layer_version_for_path(file_name)
1415
1416 assert layer_version_obj is not None
1417
1418 recipe_info = {}
1419 recipe_info['name'] = pn
1420 recipe_info['layer_version'] = layer_version_obj
1421
1422 if 'version' in event._depgraph['pn'][pn]:
1423 recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":")
1424
1425 if 'summary' in event._depgraph['pn'][pn]:
1426 recipe_info['summary'] = event._depgraph['pn'][pn]['summary']
1427
1428 if 'license' in event._depgraph['pn'][pn]:
1429 recipe_info['license'] = event._depgraph['pn'][pn]['license']
1430
1431 if 'description' in event._depgraph['pn'][pn]:
1432 recipe_info['description'] = event._depgraph['pn'][pn]['description']
1433
1434 if 'section' in event._depgraph['pn'][pn]:
1435 recipe_info['section'] = event._depgraph['pn'][pn]['section']
1436
1437 if 'homepage' in event._depgraph['pn'][pn]:
1438 recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage']
1439
1440 if 'bugtracker' in event._depgraph['pn'][pn]:
1441 recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker']
1442
1443 recipe_info['file_path'] = file_name
1444 recipe_info['pathflags'] = pathflags
1445
1446 if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path):
1447 recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/")
1448 else:
1449 raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path))
1450
1451 recipe = self.orm_wrapper.get_update_recipe_object(recipe_info)
1452 recipe.is_image = False
1453 if 'inherits' in event._depgraph['pn'][pn].keys():
1454 for cls in event._depgraph['pn'][pn]['inherits']:
1455 if cls.endswith('/image.bbclass'):
1456 recipe.is_image = True
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001457 recipe_info['is_image'] = True
1458 # Save the is_image state to the relevant recipe objects
1459 self.orm_wrapper.get_update_recipe_object(recipe_info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001460 break
1461 if recipe.is_image:
1462 for t in self.internal_state['targets']:
1463 if pn == t.target:
1464 t.is_image = True
1465 t.save()
1466 self.internal_state['recipes'][pn] = recipe
1467
1468 # we'll not get recipes for key w/ values listed in ASSUME_PROVIDED
1469
1470 assume_provided = self.server.runCommand(["getVariable", "ASSUME_PROVIDED"])[0].split()
1471
1472 # save recipe dependency
1473 # buildtime
1474 recipedeps_objects = []
1475 for recipe in event._depgraph['depends']:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001476 target = self.internal_state['recipes'][recipe]
1477 for dep in event._depgraph['depends'][recipe]:
1478 if dep in assume_provided:
1479 continue
1480 via = None
1481 if 'providermap' in event._depgraph and dep in event._depgraph['providermap']:
1482 deprecipe = event._depgraph['providermap'][dep][0]
1483 dependency = self.internal_state['recipes'][deprecipe]
1484 via = Provides.objects.get_or_create(name=dep,
1485 recipe=dependency)[0]
1486 elif dep in self.internal_state['recipes']:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001487 dependency = self.internal_state['recipes'][dep]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001488 else:
1489 errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, dep)
1490 continue
1491 recipe_dep = Recipe_Dependency(recipe=target,
1492 depends_on=dependency,
1493 via=via,
1494 dep_type=Recipe_Dependency.TYPE_DEPENDS)
1495 recipedeps_objects.append(recipe_dep)
1496
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001497 Recipe_Dependency.objects.bulk_create(recipedeps_objects)
1498
1499 # save all task information
1500 def _save_a_task(taskdesc):
1501 spec = re.split(r'\.', taskdesc)
1502 pn = ".".join(spec[0:-1])
1503 taskname = spec[-1]
1504 e = event
1505 e.taskname = pn
1506 recipe = self.internal_state['recipes'][pn]
1507 task_info = self._get_task_information(e, recipe)
1508 task_info['task_name'] = taskname
1509 task_obj = self.orm_wrapper.get_update_task_object(task_info)
1510 return task_obj
1511
1512 # create tasks
1513 tasks = {}
1514 for taskdesc in event._depgraph['tdepends']:
1515 tasks[taskdesc] = _save_a_task(taskdesc)
1516
1517 # create dependencies between tasks
1518 taskdeps_objects = []
1519 for taskdesc in event._depgraph['tdepends']:
1520 target = tasks[taskdesc]
1521 for taskdep in event._depgraph['tdepends'][taskdesc]:
1522 if taskdep not in tasks:
1523 # Fetch tasks info is not collected previously
1524 dep = _save_a_task(taskdep)
1525 else:
1526 dep = tasks[taskdep]
1527 taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep ))
1528 Task_Dependency.objects.bulk_create(taskdeps_objects)
1529
1530 if len(errormsg) > 0:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001531 logger.warning("buildinfohelper: dependency info not identify recipes: \n%s", errormsg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001532
1533
1534 def store_build_package_information(self, event):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001535 self._ensure_build()
1536
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001537 package_info = BuildInfoHelper._get_data_from_event(event)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001538 self.orm_wrapper.save_build_package_information(
1539 self.internal_state['build'],
1540 package_info,
1541 self.internal_state['recipes'],
1542 built_package=True)
1543
1544 self.orm_wrapper.save_build_package_information(
1545 self.internal_state['build'],
1546 package_info,
1547 self.internal_state['recipes'],
1548 built_package=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001549
1550 def _store_build_done(self, errorcode):
1551 logger.info("Build exited with errorcode %d", errorcode)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001552
1553 if not self.brbe:
1554 return
1555
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001556 br_id, be_id = self.brbe.split(":")
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001557
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001558 br = BuildRequest.objects.get(pk = br_id)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001559
1560 # if we're 'done' because we got cancelled update the build outcome
1561 if br.state == BuildRequest.REQ_CANCELLING:
1562 logger.info("Build cancelled")
1563 br.build.outcome = Build.CANCELLED
1564 br.build.save()
1565 self.internal_state['build'] = br.build
1566 errorcode = 0
1567
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001568 if errorcode == 0:
1569 # request archival of the project artifacts
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001570 br.state = BuildRequest.REQ_COMPLETED
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001571 else:
1572 br.state = BuildRequest.REQ_FAILED
1573 br.save()
1574
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001575 be = BuildEnvironment.objects.get(pk = be_id)
1576 be.lock = BuildEnvironment.LOCK_FREE
1577 be.save()
1578 signal_runbuilds()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001579
1580 def store_log_error(self, text):
1581 mockevent = MockEvent()
1582 mockevent.levelno = formatter.ERROR
1583 mockevent.msg = text
1584 mockevent.pathname = '-- None'
1585 mockevent.lineno = LogMessage.ERROR
1586 self.store_log_event(mockevent)
1587
1588 def store_log_exception(self, text, backtrace = ""):
1589 mockevent = MockEvent()
1590 mockevent.levelno = -1
1591 mockevent.msg = text
1592 mockevent.pathname = backtrace
1593 mockevent.lineno = -1
1594 self.store_log_event(mockevent)
1595
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001596 def store_log_event(self, event,cli_backlog=True):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001597 self._ensure_build()
1598
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001599 if event.levelno < formatter.WARNING:
1600 return
1601
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001602 # early return for CLI builds
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001603 if cli_backlog and self.brbe is None:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001604 if not 'backlog' in self.internal_state:
1605 self.internal_state['backlog'] = []
1606 self.internal_state['backlog'].append(event)
1607 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001608
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001609 if 'backlog' in self.internal_state:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001610 # if we have a backlog of events, do our best to save them here
1611 if len(self.internal_state['backlog']):
1612 tempevent = self.internal_state['backlog'].pop()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001613 logger.debug(1, "buildinfohelper: Saving stored event %s "
1614 % tempevent)
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001615 self.store_log_event(tempevent,cli_backlog)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001616 else:
1617 logger.info("buildinfohelper: All events saved")
1618 del self.internal_state['backlog']
1619
1620 log_information = {}
1621 log_information['build'] = self.internal_state['build']
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001622 if event.levelno == formatter.CRITICAL:
1623 log_information['level'] = LogMessage.CRITICAL
1624 elif event.levelno == formatter.ERROR:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001625 log_information['level'] = LogMessage.ERROR
1626 elif event.levelno == formatter.WARNING:
1627 log_information['level'] = LogMessage.WARNING
1628 elif event.levelno == -2: # toaster self-logging
1629 log_information['level'] = -2
1630 else:
1631 log_information['level'] = LogMessage.INFO
1632
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001633 log_information['message'] = event.getMessage()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001634 log_information['pathname'] = event.pathname
1635 log_information['lineno'] = event.lineno
1636 logger.info("Logging error 2: %s", log_information)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001637
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001638 self.orm_wrapper.create_logmessage(log_information)
1639
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001640 def _get_filenames_from_image_license(self, image_license_manifest_path):
1641 """
1642 Find the FILES line in the image_license.manifest file,
1643 which has the basenames of the bzImage and modules files
1644 in this format:
1645 FILES: bzImage--4.4.11+git0+3a5f494784_53e84104c5-r0-qemux86-20160603165040.bin modules--4.4.11+git0+3a5f494784_53e84104c5-r0-qemux86-20160603165040.tgz
1646 """
1647 files = []
1648 with open(image_license_manifest_path) as image_license:
1649 for line in image_license:
1650 if line.startswith('FILES'):
1651 files_str = line.split(':')[1].strip()
1652 files_str = re.sub(r' {2,}', ' ', files_str)
1653
1654 # ignore lines like "FILES:" with no filenames
1655 if files_str:
1656 files += files_str.split(' ')
1657 return files
1658
1659 def _endswith(self, str_to_test, endings):
1660 """
1661 Returns True if str ends with one of the strings in the list
1662 endings, False otherwise
1663 """
1664 endswith = False
1665 for ending in endings:
1666 if str_to_test.endswith(ending):
1667 endswith = True
1668 break
1669 return endswith
1670
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001671 def scan_task_artifacts(self, event):
1672 """
1673 The 'TaskArtifacts' event passes the manifest file content for the
1674 tasks 'do_deploy', 'do_image_complete', 'do_populate_sdk', and
1675 'do_populate_sdk_ext'. The first two will be implemented later.
1676 """
1677 task_vars = BuildInfoHelper._get_data_from_event(event)
1678 task_name = task_vars['task'][task_vars['task'].find(':')+1:]
1679 task_artifacts = task_vars['artifacts']
1680
1681 if task_name in ['do_populate_sdk', 'do_populate_sdk_ext']:
1682 targets = [target for target in self.internal_state['targets'] \
1683 if target.task == task_name[3:]]
1684 if not targets:
1685 logger.warning("scan_task_artifacts: SDK targets not found: %s\n", task_name)
1686 return
1687 for artifact_path in task_artifacts:
1688 if not os.path.isfile(artifact_path):
1689 logger.warning("scan_task_artifacts: artifact file not found: %s\n", artifact_path)
1690 continue
1691 for target in targets:
1692 # don't record the file if it's already been added
1693 # to this target
1694 matching_files = TargetSDKFile.objects.filter(
1695 target=target, file_name=artifact_path)
1696 if matching_files.count() == 0:
1697 artifact_size = os.stat(artifact_path).st_size
1698 self.orm_wrapper.save_target_sdk_file(
1699 target, artifact_path, artifact_size)
1700
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001701 def _get_image_files(self, deploy_dir_image, image_name, image_file_extensions):
1702 """
1703 Find files in deploy_dir_image whose basename starts with the
1704 string image_name and ends with one of the strings in
1705 image_file_extensions.
1706
1707 Returns a list of file dictionaries like
1708
1709 [
1710 {
1711 'path': '/path/to/image/file',
1712 'size': <file size in bytes>
1713 }
1714 ]
1715 """
1716 image_files = []
1717
1718 for dirpath, _, filenames in os.walk(deploy_dir_image):
1719 for filename in filenames:
1720 if filename.startswith(image_name) and \
1721 self._endswith(filename, image_file_extensions):
1722 image_file_path = os.path.join(dirpath, filename)
1723 image_file_size = os.stat(image_file_path).st_size
1724
1725 image_files.append({
1726 'path': image_file_path,
1727 'size': image_file_size
1728 })
1729
1730 return image_files
1731
1732 def scan_image_artifacts(self):
1733 """
1734 Scan for built image artifacts in DEPLOY_DIR_IMAGE and associate them
1735 with a Target object in self.internal_state['targets'].
1736
1737 We have two situations to handle:
1738
1739 1. This is the first time a target + machine has been built, so
1740 add files from the DEPLOY_DIR_IMAGE to the target.
1741
1742 OR
1743
1744 2. There are no new files for the target (they were already produced by
1745 a previous build), so copy them from the most recent previous build with
1746 the same target, task and machine.
1747 """
1748 deploy_dir_image = \
1749 self.server.runCommand(['getVariable', 'DEPLOY_DIR_IMAGE'])[0]
1750
1751 # if there's no DEPLOY_DIR_IMAGE, there aren't going to be
1752 # any image artifacts, so we can return immediately
1753 if not deploy_dir_image:
1754 return
1755
1756 buildname = self.server.runCommand(['getVariable', 'BUILDNAME'])[0]
1757 machine = self.server.runCommand(['getVariable', 'MACHINE'])[0]
1758 image_name = self.server.runCommand(['getVariable', 'IMAGE_NAME'])[0]
1759
1760 # location of the manifest files for this build;
1761 # note that this file is only produced if an image is produced
1762 license_directory = \
1763 self.server.runCommand(['getVariable', 'LICENSE_DIRECTORY'])[0]
1764
1765 # file name extensions for image files
1766 image_file_extensions_unique = {}
1767 image_fstypes = self.server.runCommand(
1768 ['getVariable', 'IMAGE_FSTYPES'])[0]
1769 if image_fstypes != None:
1770 image_types_str = image_fstypes.strip()
1771 image_file_extensions = re.sub(r' {2,}', ' ', image_types_str)
1772 image_file_extensions_unique = set(image_file_extensions.split(' '))
1773
1774 targets = self.internal_state['targets']
1775
1776 # filter out anything which isn't an image target
1777 image_targets = [target for target in targets if target.is_image]
1778
1779 for image_target in image_targets:
1780 # this is set to True if we find at least one file relating to
1781 # this target; if this remains False after the scan, we copy the
1782 # files from the most-recent Target with the same target + machine
1783 # onto this Target instead
1784 has_files = False
1785
1786 # we construct this because by the time we reach
1787 # BuildCompleted, this has reset to
1788 # 'defaultpkgname-<MACHINE>-<BUILDNAME>';
1789 # we need to change it to
1790 # <TARGET>-<MACHINE>-<BUILDNAME>
1791 real_image_name = re.sub(r'^defaultpkgname', image_target.target,
1792 image_name)
1793
1794 image_license_manifest_path = os.path.join(
1795 license_directory,
1796 real_image_name,
1797 'image_license.manifest')
1798
1799 image_package_manifest_path = os.path.join(
1800 license_directory,
1801 real_image_name,
1802 'image_license.manifest')
1803
1804 # if image_license.manifest exists, we can read the names of
1805 # bzImage, modules etc. files for this build from it, then look for
1806 # them in the DEPLOY_DIR_IMAGE; note that this file is only produced
1807 # if an image file was produced
1808 if os.path.isfile(image_license_manifest_path):
1809 has_files = True
1810
1811 basenames = self._get_filenames_from_image_license(
1812 image_license_manifest_path)
1813
1814 for basename in basenames:
1815 artifact_path = os.path.join(deploy_dir_image, basename)
1816 if not os.path.exists(artifact_path):
1817 logger.warning("artifact %s doesn't exist, skipping" % artifact_path)
1818 continue
1819 artifact_size = os.stat(artifact_path).st_size
1820
1821 # note that the artifact will only be saved against this
1822 # build if it hasn't been already
1823 self.orm_wrapper.save_target_kernel_file(image_target,
1824 artifact_path, artifact_size)
1825
1826 # store the license manifest path on the target
1827 # (this file is also created any time an image file is created)
1828 license_manifest_path = os.path.join(license_directory,
1829 real_image_name, 'license.manifest')
1830
1831 self.orm_wrapper.update_target_set_license_manifest(
1832 image_target, license_manifest_path)
1833
1834 # store the package manifest path on the target (this file
1835 # is created any time an image file is created)
1836 package_manifest_path = os.path.join(deploy_dir_image,
1837 real_image_name + '.rootfs.manifest')
1838
1839 if os.path.exists(package_manifest_path):
1840 self.orm_wrapper.update_target_set_package_manifest(
1841 image_target, package_manifest_path)
1842
1843 # scan the directory for image files relating to this build
1844 # (via real_image_name); note that we don't have to set
1845 # has_files = True, as searching for the license manifest file
1846 # will already have set it to true if at least one image file was
1847 # produced; note that the real_image_name includes BUILDNAME, which
1848 # in turn includes a timestamp; so if no files were produced for
1849 # this timestamp (i.e. the build reused existing image files already
1850 # in the directory), no files will be recorded against this target
1851 image_files = self._get_image_files(deploy_dir_image,
1852 real_image_name, image_file_extensions_unique)
1853
1854 for image_file in image_files:
1855 self.orm_wrapper.save_target_image_file_information(
1856 image_target, image_file['path'], image_file['size'])
1857
1858 if not has_files:
1859 # copy image files and build artifacts from the
1860 # most-recently-built Target with the
1861 # same target + machine as this Target; also copy the license
1862 # manifest path, as that is not treated as an artifact and needs
1863 # to be set separately
1864 similar_target = \
1865 self.orm_wrapper.get_similar_target_with_image_files(
1866 image_target)
1867
1868 if similar_target:
1869 logger.info('image artifacts for target %s cloned from ' \
1870 'target %s' % (image_target.pk, similar_target.pk))
1871 self.orm_wrapper.clone_image_artifacts(similar_target,
1872 image_target)
1873
1874 def _get_sdk_targets(self):
1875 """
1876 Return targets which could generate SDK artifacts, i.e.
1877 "do_populate_sdk" and "do_populate_sdk_ext".
1878 """
1879 return [target for target in self.internal_state['targets'] \
1880 if target.task in ['populate_sdk', 'populate_sdk_ext']]
1881
1882 def scan_sdk_artifacts(self, event):
1883 """
1884 Note that we have to intercept an SDKArtifactInfo event from
1885 toaster.bbclass (via toasterui) to get hold of the SDK variables we
1886 need to be able to scan for files accurately: this is because
1887 variables like TOOLCHAIN_OUTPUTNAME have reset to None by the time
1888 BuildCompleted is fired by bitbake, so we have to get those values
1889 while the build is still in progress.
1890
1891 For populate_sdk_ext, this runs twice, with two different
1892 TOOLCHAIN_OUTPUTNAME settings, each of which will capture some of the
1893 files in the SDK output directory.
1894 """
1895 sdk_vars = BuildInfoHelper._get_data_from_event(event)
1896 toolchain_outputname = sdk_vars['TOOLCHAIN_OUTPUTNAME']
1897
1898 # targets which might have created SDK artifacts
1899 sdk_targets = self._get_sdk_targets()
1900
1901 # location of SDK artifacts
1902 tmpdir = self.server.runCommand(['getVariable', 'TMPDIR'])[0]
1903 sdk_dir = os.path.join(tmpdir, 'deploy', 'sdk')
1904
1905 # all files in the SDK directory
1906 artifacts = []
1907 for dir_path, _, filenames in os.walk(sdk_dir):
1908 for filename in filenames:
1909 full_path = os.path.join(dir_path, filename)
1910 if not os.path.islink(full_path):
1911 artifacts.append(full_path)
1912
1913 for sdk_target in sdk_targets:
1914 # find files in the SDK directory which haven't already been
1915 # recorded against a Target and whose basename matches
1916 # TOOLCHAIN_OUTPUTNAME
1917 for artifact_path in artifacts:
1918 basename = os.path.basename(artifact_path)
1919
1920 toolchain_match = basename.startswith(toolchain_outputname)
1921
1922 # files which match the name of the target which produced them;
1923 # for example,
1924 # poky-glibc-x86_64-core-image-sato-i586-toolchain-ext-2.1+snapshot.sh
1925 target_match = re.search(sdk_target.target, basename)
1926
1927 # targets which produce "*-nativesdk-*" files
1928 is_ext_sdk_target = sdk_target.task in \
1929 ['do_populate_sdk_ext', 'populate_sdk_ext']
1930
1931 # SDK files which don't match the target name, i.e.
1932 # x86_64-nativesdk-libc.*
1933 # poky-glibc-x86_64-buildtools-tarball-i586-buildtools-nativesdk-standalone-2.1+snapshot*
1934 is_ext_sdk_file = re.search('-nativesdk-', basename)
1935
1936 file_from_target = (toolchain_match and target_match) or \
1937 (is_ext_sdk_target and is_ext_sdk_file)
1938
1939 if file_from_target:
1940 # don't record the file if it's already been added to this
1941 # target
1942 matching_files = TargetSDKFile.objects.filter(
1943 target=sdk_target, file_name=artifact_path)
1944
1945 if matching_files.count() == 0:
1946 artifact_size = os.stat(artifact_path).st_size
1947
1948 self.orm_wrapper.save_target_sdk_file(
1949 sdk_target, artifact_path, artifact_size)
1950
1951 def clone_required_sdk_artifacts(self):
1952 """
1953 If an SDK target doesn't have any SDK artifacts, this means that
1954 the postfuncs of populate_sdk or populate_sdk_ext didn't fire, which
1955 in turn means that the targets of this build didn't generate any new
1956 artifacts.
1957
1958 In this case, clone SDK artifacts for targets in the current build
1959 from existing targets for this build.
1960 """
1961 sdk_targets = self._get_sdk_targets()
1962 for sdk_target in sdk_targets:
1963 # only clone for SDK targets which have no TargetSDKFiles yet
1964 if sdk_target.targetsdkfile_set.all().count() == 0:
1965 similar_target = \
1966 self.orm_wrapper.get_similar_target_with_sdk_files(
1967 sdk_target)
1968 if similar_target:
1969 logger.info('SDK artifacts for target %s cloned from ' \
1970 'target %s' % (sdk_target.pk, similar_target.pk))
1971 self.orm_wrapper.clone_sdk_artifacts(similar_target,
1972 sdk_target)
1973
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001974 def close(self, errorcode):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001975 self._store_build_done(errorcode)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001976
1977 if 'backlog' in self.internal_state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001978 # we save missed events in the database for the current build
1979 tempevent = self.internal_state['backlog'].pop()
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001980 # Do not skip command line build events
1981 self.store_log_event(tempevent,False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001982
1983 if not connection.features.autocommits_when_autocommit_is_off:
1984 transaction.set_autocommit(True)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001985
1986 # unset the brbe; this is to prevent subsequent command-line builds
1987 # being incorrectly attached to the previous Toaster-triggered build;
1988 # see https://bugzilla.yoctoproject.org/show_bug.cgi?id=9021
1989 self.brbe = None
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001990
1991 # unset the internal Build object to prevent it being reused for the
1992 # next build
1993 self.internal_state['build'] = None