blob: 835e92c2992637744b8522c2ff80d8e4fb6a9fe2 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# BitBake ToasterUI Implementation
3#
4# Copyright (C) 2013 Intel Corporation
5#
Brad Bishopc342db32019-05-15 21:57:59 -04006# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -05007#
Patrick Williamsc124f4f2015-09-15 14:41:29 -05008
9import sys
10import bb
11import re
12import os
13
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050014import django
Patrick Williamsc124f4f2015-09-15 14:41:29 -050015from django.utils import timezone
16
Patrick Williamsc0f7c042017-02-23 20:41:17 -060017import toaster
18# Add toaster module to the search path to help django.setup() find the right
19# modules
20sys.path.insert(0, os.path.dirname(toaster.__file__))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050021
Patrick Williamsc0f7c042017-02-23 20:41:17 -060022#Set the DJANGO_SETTINGS_MODULE if it's not already set
23os.environ["DJANGO_SETTINGS_MODULE"] =\
24 os.environ.get("DJANGO_SETTINGS_MODULE",
25 "toaster.toastermain.settings")
26# Setup django framework (needs to be done before importing modules)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050027django.setup()
Patrick Williamsc124f4f2015-09-15 14:41:29 -050028
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050029from orm.models import Build, Task, Recipe, Layer_Version, Layer, Target, LogMessage, HelpText
Patrick Williamsc0f7c042017-02-23 20:41:17 -060030from orm.models import Target_Image_File, TargetKernelFile, TargetSDKFile
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050031from orm.models import Variable, VariableHistory
32from orm.models import Package, Package_File, Target_Installed_Package, Target_File
33from orm.models import Task_Dependency, Package_Dependency
34from orm.models import Recipe_Dependency, Provides
Brad Bishop6e60e8b2018-02-01 10:27:11 -050035from orm.models import Project, CustomImagePackage
Patrick Williamsc0f7c042017-02-23 20:41:17 -060036from orm.models import signal_runbuilds
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050037
Patrick Williamsc124f4f2015-09-15 14:41:29 -050038from bldcontrol.models import BuildEnvironment, BuildRequest
Brad Bishop6e60e8b2018-02-01 10:27:11 -050039from bldcontrol.models import BRLayer
40from bldcontrol import bbcontroller
Patrick Williamsc124f4f2015-09-15 14:41:29 -050041
42from bb.msg import BBLogFormatter as formatter
43from django.db import models
44from pprint import pformat
45import logging
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050046from datetime import datetime, timedelta
Patrick Williamsc124f4f2015-09-15 14:41:29 -050047
48from django.db import transaction, connection
49
Patrick Williamsc0f7c042017-02-23 20:41:17 -060050
Patrick Williamsc124f4f2015-09-15 14:41:29 -050051# pylint: disable=invalid-name
52# the logger name is standard throughout BitBake
53logger = logging.getLogger("ToasterLogger")
54
Patrick Williamsc124f4f2015-09-15 14:41:29 -050055class NotExisting(Exception):
56 pass
57
58class ORMWrapper(object):
59 """ This class creates the dictionaries needed to store information in the database
60 following the format defined by the Django models. It is also used to save this
61 information in the database.
62 """
63
64 def __init__(self):
65 self.layer_version_objects = []
Patrick Williamsf1e5d692016-03-30 15:21:19 -050066 self.layer_version_built = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -050067 self.task_objects = {}
68 self.recipe_objects = {}
69
70 @staticmethod
71 def _build_key(**kwargs):
72 key = "0"
73 for k in sorted(kwargs.keys()):
74 if isinstance(kwargs[k], models.Model):
75 key += "-%d" % kwargs[k].id
76 else:
77 key += "-%s" % str(kwargs[k])
78 return key
79
80
81 def _cached_get_or_create(self, clazz, **kwargs):
82 """ This is a memory-cached get_or_create. We assume that the objects will not be created in the
83 database through any other means.
84 """
85
86 assert issubclass(clazz, models.Model), "_cached_get_or_create needs to get the class as first argument"
87
88 key = ORMWrapper._build_key(**kwargs)
89 dictname = "objects_%s" % clazz.__name__
90 if not dictname in vars(self).keys():
91 vars(self)[dictname] = {}
92
93 created = False
94 if not key in vars(self)[dictname].keys():
Patrick Williamsf1e5d692016-03-30 15:21:19 -050095 vars(self)[dictname][key], created = \
96 clazz.objects.get_or_create(**kwargs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050097
98 return (vars(self)[dictname][key], created)
99
100
101 def _cached_get(self, clazz, **kwargs):
102 """ This is a memory-cached get. We assume that the objects will not change in the database between gets.
103 """
104 assert issubclass(clazz, models.Model), "_cached_get needs to get the class as first argument"
105
106 key = ORMWrapper._build_key(**kwargs)
107 dictname = "objects_%s" % clazz.__name__
108
109 if not dictname in vars(self).keys():
110 vars(self)[dictname] = {}
111
112 if not key in vars(self)[dictname].keys():
113 vars(self)[dictname][key] = clazz.objects.get(**kwargs)
114
115 return vars(self)[dictname][key]
116
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600117 def get_similar_target_with_image_files(self, target):
118 """
119 Get a Target object "similar" to target; i.e. with the same target
120 name ('core-image-minimal' etc.) and machine.
121 """
122 return target.get_similar_target_with_image_files()
123
124 def get_similar_target_with_sdk_files(self, target):
125 return target.get_similar_target_with_sdk_files()
126
127 def clone_image_artifacts(self, target_from, target_to):
128 target_to.clone_image_artifacts_from(target_from)
129
130 def clone_sdk_artifacts(self, target_from, target_to):
131 target_to.clone_sdk_artifacts_from(target_from)
132
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500133 def _timestamp_to_datetime(self, secs):
134 """
135 Convert timestamp in seconds to Python datetime
136 """
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600137 return timezone.make_aware(datetime(1970, 1, 1) + timedelta(seconds=secs))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500138
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500139 # pylint: disable=no-self-use
140 # we disable detection of no self use in functions because the methods actually work on the object
141 # even if they don't touch self anywhere
142
143 # pylint: disable=bad-continuation
144 # we do not follow the python conventions for continuation indentation due to long lines here
145
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600146 def get_or_create_build_object(self, brbe):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500147 prj = None
148 buildrequest = None
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600149 if brbe is not None:
150 # Toaster-triggered build
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600151 logger.debug("buildinfohelper: brbe is %s" % brbe)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500152 br, _ = brbe.split(":")
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600153 buildrequest = BuildRequest.objects.get(pk=br)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500154 prj = buildrequest.project
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600155 else:
156 # CLI build
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500157 prj = Project.objects.get_or_create_default_project()
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600158 logger.debug("buildinfohelper: project is not specified, defaulting to %s" % prj)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500159
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500160 if buildrequest is not None:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600161 # reuse existing Build object
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500162 build = buildrequest.build
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500163 build.project = prj
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500164 build.save()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500165 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600166 # create new Build object
167 now = timezone.now()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500168 build = Build.objects.create(
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600169 project=prj,
170 started_on=now,
171 completed_on=now,
172 build_name='')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500173
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600174 logger.debug("buildinfohelper: build is created %s" % build)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500175
176 if buildrequest is not None:
177 buildrequest.build = build
178 buildrequest.save()
179
180 return build
181
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600182 def update_build(self, build, data_dict):
183 for key in data_dict:
184 setattr(build, key, data_dict[key])
185 build.save()
186
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500187 @staticmethod
188 def get_or_create_targets(target_info):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600189 """
190 NB get_or_create() is used here because for Toaster-triggered builds,
191 we already created the targets when the build was triggered.
192 """
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500193 result = []
194 for target in target_info['targets']:
195 task = ''
196 if ':' in target:
197 target, task = target.split(':', 1)
198 if task.startswith('do_'):
199 task = task[3:]
200 if task == 'build':
201 task = ''
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600202
203 obj, _ = Target.objects.get_or_create(build=target_info['build'],
204 target=target,
205 task=task)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500206 result.append(obj)
207 return result
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500208
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600209 def update_build_stats_and_outcome(self, build, errors, warnings, taskfailures):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500210 assert isinstance(build,Build)
211 assert isinstance(errors, int)
212 assert isinstance(warnings, int)
213
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500214 if build.outcome == Build.CANCELLED:
215 return
216 try:
217 if build.buildrequest.state == BuildRequest.REQ_CANCELLING:
218 return
219 except AttributeError:
220 # We may not have a buildrequest if this is a command line build
221 pass
222
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500223 outcome = Build.SUCCEEDED
224 if errors or taskfailures:
225 outcome = Build.FAILED
226
227 build.completed_on = timezone.now()
228 build.outcome = outcome
229 build.save()
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600230 signal_runbuilds()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500231
232 def update_target_set_license_manifest(self, target, license_manifest_path):
233 target.license_manifest_path = license_manifest_path
234 target.save()
235
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600236 def update_target_set_package_manifest(self, target, package_manifest_path):
237 target.package_manifest_path = package_manifest_path
238 target.save()
239
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500240 def update_task_object(self, build, task_name, recipe_name, task_stats):
241 """
242 Find the task for build which matches the recipe and task name
243 to be stored
244 """
245 task_to_update = Task.objects.get(
246 build = build,
247 task_name = task_name,
248 recipe__name = recipe_name
249 )
250
251 if 'started' in task_stats and 'ended' in task_stats:
252 task_to_update.started = self._timestamp_to_datetime(task_stats['started'])
253 task_to_update.ended = self._timestamp_to_datetime(task_stats['ended'])
254 task_to_update.elapsed_time = (task_stats['ended'] - task_stats['started'])
255 task_to_update.cpu_time_user = task_stats.get('cpu_time_user')
256 task_to_update.cpu_time_system = task_stats.get('cpu_time_system')
257 if 'disk_io_read' in task_stats and 'disk_io_write' in task_stats:
258 task_to_update.disk_io_read = task_stats['disk_io_read']
259 task_to_update.disk_io_write = task_stats['disk_io_write']
260 task_to_update.disk_io = task_stats['disk_io_read'] + task_stats['disk_io_write']
261
262 task_to_update.save()
263
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500264 def get_update_task_object(self, task_information, must_exist = False):
265 assert 'build' in task_information
266 assert 'recipe' in task_information
267 assert 'task_name' in task_information
268
269 # we use must_exist info for database look-up optimization
270 task_object, created = self._cached_get_or_create(Task,
271 build=task_information['build'],
272 recipe=task_information['recipe'],
273 task_name=task_information['task_name']
274 )
275 if created and must_exist:
276 task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk)
277 raise NotExisting("Task object created when expected to exist", task_information)
278
279 object_changed = False
280 for v in vars(task_object):
281 if v in task_information.keys():
282 if vars(task_object)[v] != task_information[v]:
283 vars(task_object)[v] = task_information[v]
284 object_changed = True
285
286 # update setscene-related information if the task has a setscene
287 if task_object.outcome == Task.OUTCOME_COVERED and 1 == task_object.get_related_setscene().count():
288 task_object.outcome = Task.OUTCOME_CACHED
289 object_changed = True
290
291 outcome_task_setscene = Task.objects.get(task_executed=True, build = task_object.build,
292 recipe = task_object.recipe, task_name=task_object.task_name+"_setscene").outcome
293 if outcome_task_setscene == Task.OUTCOME_SUCCESS:
294 task_object.sstate_result = Task.SSTATE_RESTORED
295 object_changed = True
296 elif outcome_task_setscene == Task.OUTCOME_FAILED:
297 task_object.sstate_result = Task.SSTATE_FAILED
298 object_changed = True
299
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500300 if object_changed:
301 task_object.save()
302 return task_object
303
304
305 def get_update_recipe_object(self, recipe_information, must_exist = False):
306 assert 'layer_version' in recipe_information
307 assert 'file_path' in recipe_information
308 assert 'pathflags' in recipe_information
309
310 assert not recipe_information['file_path'].startswith("/") # we should have layer-relative paths at all times
311
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500312
313 def update_recipe_obj(recipe_object):
314 object_changed = False
315 for v in vars(recipe_object):
316 if v in recipe_information.keys():
317 object_changed = True
318 vars(recipe_object)[v] = recipe_information[v]
319
320 if object_changed:
321 recipe_object.save()
322
323 recipe, created = self._cached_get_or_create(Recipe, layer_version=recipe_information['layer_version'],
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500324 file_path=recipe_information['file_path'], pathflags = recipe_information['pathflags'])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500325
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500326 update_recipe_obj(recipe)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500327
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500328 built_recipe = None
329 # Create a copy of the recipe for historical puposes and update it
330 for built_layer in self.layer_version_built:
331 if built_layer.layer == recipe_information['layer_version'].layer:
332 built_recipe, c = self._cached_get_or_create(Recipe,
333 layer_version=built_layer,
334 file_path=recipe_information['file_path'],
335 pathflags = recipe_information['pathflags'])
336 update_recipe_obj(built_recipe)
337 break
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500338
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500339
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500340 # If we're in analysis mode or if this is a custom recipe
341 # then we are wholly responsible for the data
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500342 # and therefore we return the 'real' recipe rather than the build
343 # history copy of the recipe.
344 if recipe_information['layer_version'].build is not None and \
345 recipe_information['layer_version'].build.project == \
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500346 Project.objects.get_or_create_default_project():
347 return recipe
348
349 if built_recipe is None:
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500350 return recipe
351
352 return built_recipe
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500353
354 def get_update_layer_version_object(self, build_obj, layer_obj, layer_version_information):
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500355 if isinstance(layer_obj, Layer_Version):
356 # We already found our layer version for this build so just
357 # update it with the new build information
358 logger.debug("We found our layer from toaster")
359 layer_obj.local_path = layer_version_information['local_path']
360 layer_obj.save()
361 self.layer_version_objects.append(layer_obj)
362
363 # create a new copy of this layer version as a snapshot for
364 # historical purposes
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500365 layer_copy, c = Layer_Version.objects.get_or_create(
366 build=build_obj,
367 layer=layer_obj.layer,
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600368 release=layer_obj.release,
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500369 branch=layer_version_information['branch'],
370 commit=layer_version_information['commit'],
371 local_path=layer_version_information['local_path'],
372 )
373
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500374 logger.debug("Created new layer version %s for build history",
375 layer_copy.layer.name)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500376
377 self.layer_version_built.append(layer_copy)
378
379 return layer_obj
380
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500381 assert isinstance(build_obj, Build)
382 assert isinstance(layer_obj, Layer)
383 assert 'branch' in layer_version_information
384 assert 'commit' in layer_version_information
385 assert 'priority' in layer_version_information
386 assert 'local_path' in layer_version_information
387
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500388 # If we're doing a command line build then associate this new layer with the
389 # project to avoid it 'contaminating' toaster data
390 project = None
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500391 if build_obj.project == Project.objects.get_or_create_default_project():
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500392 project = build_obj.project
393
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500394 layer_version_object, _ = Layer_Version.objects.get_or_create(
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500395 build = build_obj,
396 layer = layer_obj,
397 branch = layer_version_information['branch'],
398 commit = layer_version_information['commit'],
399 priority = layer_version_information['priority'],
400 local_path = layer_version_information['local_path'],
401 project=project)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500402
403 self.layer_version_objects.append(layer_version_object)
404
405 return layer_version_object
406
407 def get_update_layer_object(self, layer_information, brbe):
408 assert 'name' in layer_information
409 assert 'layer_index_url' in layer_information
410
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600411 # From command line builds we have no brbe as the request is directly
412 # from bitbake
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500413 if brbe is None:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600414 # If we don't have git commit sha then we're using a non-git
415 # layer so set the layer_source_dir to identify it as such
416 if not layer_information['version']['commit']:
417 local_source_dir = layer_information["local_path"]
418 else:
419 local_source_dir = None
420
421 layer_object, _ = \
422 Layer.objects.get_or_create(
423 name=layer_information['name'],
424 local_source_dir=local_source_dir,
425 layer_index_url=layer_information['layer_index_url'])
426
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500427 return layer_object
428 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500429 br_id, be_id = brbe.split(":")
430
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500431 # Find the layer version by matching the layer event information
432 # against the metadata we have in Toaster
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500433
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500434 try:
435 br_layer = BRLayer.objects.get(req=br_id,
436 name=layer_information['name'])
437 return br_layer.layer_version
438 except (BRLayer.MultipleObjectsReturned, BRLayer.DoesNotExist):
439 # There are multiple of the same layer name or the name
440 # hasn't been determined by the toaster.bbclass layer
441 # so let's filter by the local_path
442 bc = bbcontroller.getBuildEnvironmentController(pk=be_id)
443 for br_layer in BRLayer.objects.filter(req=br_id):
444 if br_layer.giturl and \
445 layer_information['local_path'].endswith(
446 bc.getGitCloneDirectory(br_layer.giturl,
447 br_layer.commit)):
448 return br_layer.layer_version
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500449
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500450 if br_layer.local_source_dir == \
451 layer_information['local_path']:
452 return br_layer.layer_version
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500453
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500454 # We've reached the end of our search and couldn't find the layer
455 # we can continue but some data may be missing
456 raise NotExisting("Unidentified layer %s" %
457 pformat(layer_information))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500458
459 def save_target_file_information(self, build_obj, target_obj, filedata):
460 assert isinstance(build_obj, Build)
461 assert isinstance(target_obj, Target)
462 dirs = filedata['dirs']
463 files = filedata['files']
464 syms = filedata['syms']
465
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500466 # always create the root directory as a special case;
467 # note that this is never displayed, so the owner, group,
468 # size, permission are irrelevant
469 tf_obj = Target_File.objects.create(target = target_obj,
470 path = '/',
471 size = 0,
472 owner = '',
473 group = '',
474 permission = '',
475 inodetype = Target_File.ITYPE_DIRECTORY)
476 tf_obj.save()
477
478 # insert directories, ordered by name depth
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500479 for d in sorted(dirs, key=lambda x:len(x[-1].split("/"))):
480 (user, group, size) = d[1:4]
481 permission = d[0][1:]
482 path = d[4].lstrip(".")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500483
484 # we already created the root directory, so ignore any
485 # entry for it
Andrew Geissler595f6302022-01-24 19:11:47 +0000486 if not path:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500487 continue
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500488
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500489 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
Andrew Geissler595f6302022-01-24 19:11:47 +0000490 if not parent_path:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500491 parent_path = "/"
492 parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
493 tf_obj = Target_File.objects.create(
494 target = target_obj,
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600495 path = path,
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500496 size = size,
497 inodetype = Target_File.ITYPE_DIRECTORY,
498 permission = permission,
499 owner = user,
500 group = group,
501 directory = parent_obj)
502
503
504 # we insert files
505 for d in files:
506 (user, group, size) = d[1:4]
507 permission = d[0][1:]
508 path = d[4].lstrip(".")
509 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
510 inodetype = Target_File.ITYPE_REGULAR
511 if d[0].startswith('b'):
512 inodetype = Target_File.ITYPE_BLOCK
513 if d[0].startswith('c'):
514 inodetype = Target_File.ITYPE_CHARACTER
515 if d[0].startswith('p'):
516 inodetype = Target_File.ITYPE_FIFO
517
518 tf_obj = Target_File.objects.create(
519 target = target_obj,
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600520 path = path,
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500521 size = size,
522 inodetype = inodetype,
523 permission = permission,
524 owner = user,
525 group = group)
526 parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
527 tf_obj.directory = parent_obj
528 tf_obj.save()
529
530 # we insert symlinks
531 for d in syms:
532 (user, group, size) = d[1:4]
533 permission = d[0][1:]
534 path = d[4].lstrip(".")
535 filetarget_path = d[6]
536
537 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
538 if not filetarget_path.startswith("/"):
539 # we have a relative path, get a normalized absolute one
540 filetarget_path = parent_path + "/" + filetarget_path
541 fcp = filetarget_path.split("/")
542 fcpl = []
543 for i in fcp:
544 if i == "..":
545 fcpl.pop()
546 else:
547 fcpl.append(i)
548 filetarget_path = "/".join(fcpl)
549
550 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600551 filetarget_obj = Target_File.objects.get(target = target_obj, path = filetarget_path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500552 except Target_File.DoesNotExist:
553 # we might have an invalid link; no way to detect this. just set it to None
554 filetarget_obj = None
555
556 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
557
558 tf_obj = Target_File.objects.create(
559 target = target_obj,
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600560 path = path,
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500561 size = size,
562 inodetype = Target_File.ITYPE_SYMLINK,
563 permission = permission,
564 owner = user,
565 group = group,
566 directory = parent_obj,
567 sym_target = filetarget_obj)
568
569
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500570 def save_target_package_information(self, build_obj, target_obj, packagedict, pkgpnmap, recipes, built_package=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500571 assert isinstance(build_obj, Build)
572 assert isinstance(target_obj, Target)
573
Andrew Geissler595f6302022-01-24 19:11:47 +0000574 errormsg = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500575 for p in packagedict:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500576 # Search name swtiches round the installed name vs package name
577 # by default installed name == package name
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500578 searchname = p
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500579 if p not in pkgpnmap:
580 logger.warning("Image packages list contains %p, but is"
581 " missing from all packages list where the"
582 " metadata comes from. Skipping...", p)
583 continue
584
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500585 if 'OPKGN' in pkgpnmap[p].keys():
586 searchname = pkgpnmap[p]['OPKGN']
587
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500588 built_recipe = recipes[pkgpnmap[p]['PN']]
589
590 if built_package:
591 packagedict[p]['object'], created = Package.objects.get_or_create( build = build_obj, name = searchname )
592 recipe = built_recipe
593 else:
594 packagedict[p]['object'], created = \
595 CustomImagePackage.objects.get_or_create(name=searchname)
596 # Clear the Package_Dependency objects as we're going to update
597 # the CustomImagePackage with the latest dependency information
598 packagedict[p]['object'].package_dependencies_target.all().delete()
599 packagedict[p]['object'].package_dependencies_source.all().delete()
600 try:
601 recipe = self._cached_get(
602 Recipe,
603 name=built_recipe.name,
604 layer_version__build=None,
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600605 layer_version__release=
606 built_recipe.layer_version.release,
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500607 file_path=built_recipe.file_path,
608 version=built_recipe.version
609 )
610 except (Recipe.DoesNotExist,
611 Recipe.MultipleObjectsReturned) as e:
612 logger.info("We did not find one recipe for the"
613 "configuration data package %s %s" % (p, e))
614 continue
615
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500616 if created or packagedict[p]['object'].size == -1: # save the data anyway we can, not just if it was not created here; bug [YOCTO #6887]
617 # fill in everything we can from the runtime-reverse package data
618 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500619 packagedict[p]['object'].recipe = recipe
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500620 packagedict[p]['object'].version = pkgpnmap[p]['PV']
621 packagedict[p]['object'].installed_name = p
622 packagedict[p]['object'].revision = pkgpnmap[p]['PR']
623 packagedict[p]['object'].license = pkgpnmap[p]['LICENSE']
624 packagedict[p]['object'].section = pkgpnmap[p]['SECTION']
625 packagedict[p]['object'].summary = pkgpnmap[p]['SUMMARY']
626 packagedict[p]['object'].description = pkgpnmap[p]['DESCRIPTION']
627 packagedict[p]['object'].size = int(pkgpnmap[p]['PKGSIZE'])
628
629 # no files recorded for this package, so save files info
630 packagefile_objects = []
631 for targetpath in pkgpnmap[p]['FILES_INFO']:
632 targetfilesize = pkgpnmap[p]['FILES_INFO'][targetpath]
633 packagefile_objects.append(Package_File( package = packagedict[p]['object'],
634 path = targetpath,
635 size = targetfilesize))
Andrew Geissler595f6302022-01-24 19:11:47 +0000636 if packagefile_objects:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500637 Package_File.objects.bulk_create(packagefile_objects)
638 except KeyError as e:
Andrew Geissler595f6302022-01-24 19:11:47 +0000639 errormsg.append(" stpi: Key error, package %s key %s \n" % (p, e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500640
641 # save disk installed size
642 packagedict[p]['object'].installed_size = packagedict[p]['size']
643 packagedict[p]['object'].save()
644
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500645 if built_package:
646 Target_Installed_Package.objects.create(target = target_obj, package = packagedict[p]['object'])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500647
648 packagedeps_objs = []
Brad Bishop00e122a2019-10-05 11:10:57 -0400649 pattern_so = re.compile(r'.*\.so(\.\d*)?$')
650 pattern_lib = re.compile(r'.*\-suffix(\d*)?$')
651 pattern_ko = re.compile(r'^kernel-module-.*')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500652 for p in packagedict:
653 for (px,deptype) in packagedict[p]['depends']:
654 if deptype == 'depends':
655 tdeptype = Package_Dependency.TYPE_TRDEPENDS
656 elif deptype == 'recommends':
657 tdeptype = Package_Dependency.TYPE_TRECOMMENDS
658
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500659 try:
Brad Bishop00e122a2019-10-05 11:10:57 -0400660 # Skip known non-package objects like libraries and kernel modules
661 if pattern_so.match(px) or pattern_lib.match(px):
662 logger.info("Toaster does not add library file dependencies to packages (%s,%s)", p, px)
663 continue
664 if pattern_ko.match(px):
665 logger.info("Toaster does not add kernel module dependencies to packages (%s,%s)", p, px)
666 continue
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500667 packagedeps_objs.append(Package_Dependency(
668 package = packagedict[p]['object'],
669 depends_on = packagedict[px]['object'],
670 dep_type = tdeptype,
671 target = target_obj))
672 except KeyError as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600673 logger.warning("Could not add dependency to the package %s "
674 "because %s is an unknown package", p, px)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500675
Andrew Geissler595f6302022-01-24 19:11:47 +0000676 if packagedeps_objs:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500677 Package_Dependency.objects.bulk_create(packagedeps_objs)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500678 else:
679 logger.info("No package dependencies created")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500680
Andrew Geissler595f6302022-01-24 19:11:47 +0000681 if errormsg:
682 logger.warning("buildinfohelper: target_package_info could not identify recipes: \n%s", "".join(errormsg))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500683
684 def save_target_image_file_information(self, target_obj, file_name, file_size):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600685 Target_Image_File.objects.create(target=target_obj,
686 file_name=file_name, file_size=file_size)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500687
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600688 def save_target_kernel_file(self, target_obj, file_name, file_size):
689 """
690 Save kernel file (bzImage, modules*) information for a Target target_obj.
691 """
692 TargetKernelFile.objects.create(target=target_obj,
693 file_name=file_name, file_size=file_size)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500694
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600695 def save_target_sdk_file(self, target_obj, file_name, file_size):
696 """
697 Save SDK artifacts to the database, associating them with a
698 Target object.
699 """
700 TargetSDKFile.objects.create(target=target_obj, file_name=file_name,
701 file_size=file_size)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500702
703 def create_logmessage(self, log_information):
704 assert 'build' in log_information
705 assert 'level' in log_information
706 assert 'message' in log_information
707
708 log_object = LogMessage.objects.create(
709 build = log_information['build'],
710 level = log_information['level'],
711 message = log_information['message'])
712
713 for v in vars(log_object):
714 if v in log_information.keys():
715 vars(log_object)[v] = log_information[v]
716
717 return log_object.save()
718
719
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500720 def save_build_package_information(self, build_obj, package_info, recipes,
721 built_package):
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500722 # assert isinstance(build_obj, Build)
723
724 if not 'PN' in package_info.keys():
725 # no package data to save (e.g. 'OPKGN'="lib64-*"|"lib32-*")
726 return None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500727
728 # create and save the object
729 pname = package_info['PKG']
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500730 built_recipe = recipes[package_info['PN']]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500731 if 'OPKGN' in package_info.keys():
732 pname = package_info['OPKGN']
733
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500734 if built_package:
735 bp_object, _ = Package.objects.get_or_create( build = build_obj,
736 name = pname )
737 recipe = built_recipe
738 else:
739 bp_object, created = \
740 CustomImagePackage.objects.get_or_create(name=pname)
741 try:
742 recipe = self._cached_get(Recipe,
743 name=built_recipe.name,
744 layer_version__build=None,
745 file_path=built_recipe.file_path,
746 version=built_recipe.version)
747
748 except (Recipe.DoesNotExist, Recipe.MultipleObjectsReturned):
749 logger.debug("We did not find one recipe for the configuration"
750 "data package %s" % pname)
751 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500752
753 bp_object.installed_name = package_info['PKG']
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500754 bp_object.recipe = recipe
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500755 bp_object.version = package_info['PKGV']
756 bp_object.revision = package_info['PKGR']
757 bp_object.summary = package_info['SUMMARY']
758 bp_object.description = package_info['DESCRIPTION']
759 bp_object.size = int(package_info['PKGSIZE'])
760 bp_object.section = package_info['SECTION']
761 bp_object.license = package_info['LICENSE']
762 bp_object.save()
763
764 # save any attached file information
765 packagefile_objects = []
766 for path in package_info['FILES_INFO']:
767 packagefile_objects.append(Package_File( package = bp_object,
768 path = path,
769 size = package_info['FILES_INFO'][path] ))
Andrew Geissler595f6302022-01-24 19:11:47 +0000770 if packagefile_objects:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500771 Package_File.objects.bulk_create(packagefile_objects)
772
773 def _po_byname(p):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500774 if built_package:
775 pkg, created = Package.objects.get_or_create(build=build_obj,
776 name=p)
777 else:
778 pkg, created = CustomImagePackage.objects.get_or_create(name=p)
779
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500780 if created:
781 pkg.size = -1
782 pkg.save()
783 return pkg
784
785 packagedeps_objs = []
786 # save soft dependency information
787 if 'RDEPENDS' in package_info and package_info['RDEPENDS']:
788 for p in bb.utils.explode_deps(package_info['RDEPENDS']):
789 packagedeps_objs.append(Package_Dependency( package = bp_object,
790 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS))
791 if 'RPROVIDES' in package_info and package_info['RPROVIDES']:
792 for p in bb.utils.explode_deps(package_info['RPROVIDES']):
793 packagedeps_objs.append(Package_Dependency( package = bp_object,
794 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES))
795 if 'RRECOMMENDS' in package_info and package_info['RRECOMMENDS']:
796 for p in bb.utils.explode_deps(package_info['RRECOMMENDS']):
797 packagedeps_objs.append(Package_Dependency( package = bp_object,
798 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS))
799 if 'RSUGGESTS' in package_info and package_info['RSUGGESTS']:
800 for p in bb.utils.explode_deps(package_info['RSUGGESTS']):
801 packagedeps_objs.append(Package_Dependency( package = bp_object,
802 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS))
803 if 'RREPLACES' in package_info and package_info['RREPLACES']:
804 for p in bb.utils.explode_deps(package_info['RREPLACES']):
805 packagedeps_objs.append(Package_Dependency( package = bp_object,
806 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES))
807 if 'RCONFLICTS' in package_info and package_info['RCONFLICTS']:
808 for p in bb.utils.explode_deps(package_info['RCONFLICTS']):
809 packagedeps_objs.append(Package_Dependency( package = bp_object,
810 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS))
811
Andrew Geissler595f6302022-01-24 19:11:47 +0000812 if packagedeps_objs:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500813 Package_Dependency.objects.bulk_create(packagedeps_objs)
814
815 return bp_object
816
817 def save_build_variables(self, build_obj, vardump):
818 assert isinstance(build_obj, Build)
819
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500820 for k in vardump:
821 desc = vardump[k]['doc']
822 if desc is None:
823 var_words = [word for word in k.split('_')]
824 root_var = "_".join([word for word in var_words if word.isupper()])
825 if root_var and root_var != k and root_var in vardump:
826 desc = vardump[root_var]['doc']
827 if desc is None:
828 desc = ''
Andrew Geissler595f6302022-01-24 19:11:47 +0000829 if desc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500830 HelpText.objects.get_or_create(build=build_obj,
831 area=HelpText.VARIABLE,
832 key=k, text=desc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500833 if not bool(vardump[k]['func']):
834 value = vardump[k]['v']
835 if value is None:
836 value = ''
837 variable_obj = Variable.objects.create( build = build_obj,
838 variable_name = k,
839 variable_value = value,
840 description = desc)
841
842 varhist_objects = []
843 for vh in vardump[k]['history']:
844 if not 'documentation.conf' in vh['file']:
845 varhist_objects.append(VariableHistory( variable = variable_obj,
846 file_name = vh['file'],
847 line_number = vh['line'],
848 operation = vh['op']))
Andrew Geissler595f6302022-01-24 19:11:47 +0000849 if varhist_objects:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500850 VariableHistory.objects.bulk_create(varhist_objects)
851
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500852
853class MockEvent(object):
854 """ This object is used to create event, for which normal event-processing methods can
855 be used, out of data that is not coming via an actual event
856 """
857 def __init__(self):
858 self.msg = None
859 self.levelno = None
860 self.taskname = None
861 self.taskhash = None
862 self.pathname = None
863 self.lineno = None
864
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500865 def getMessage(self):
866 """
867 Simulate LogRecord message return
868 """
869 return self.msg
870
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500871
872class BuildInfoHelper(object):
873 """ This class gathers the build information from the server and sends it
874 towards the ORM wrapper for storing in the database
875 It is instantiated once per build
876 Keeps in memory all data that needs matching before writing it to the database
877 """
878
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600879 # tasks which produce image files; note we include '', as we set
880 # the task for a target to '' (i.e. 'build') if no target is
881 # explicitly defined
882 IMAGE_GENERATING_TASKS = ['', 'build', 'image', 'populate_sdk_ext']
883
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500884 # pylint: disable=protected-access
885 # the code will look into the protected variables of the event; no easy way around this
886 # pylint: disable=bad-continuation
887 # we do not follow the python conventions for continuation indentation due to long lines here
888
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500889 def __init__(self, server, has_build_history = False, brbe = None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500890 self.internal_state = {}
891 self.internal_state['taskdata'] = {}
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500892 self.internal_state['targets'] = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500893 self.task_order = 0
894 self.autocommit_step = 1
895 self.server = server
896 # we use manual transactions if the database doesn't autocommit on us
897 if not connection.features.autocommits_when_autocommit_is_off:
898 transaction.set_autocommit(False)
899 self.orm_wrapper = ORMWrapper()
900 self.has_build_history = has_build_history
901 self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500902
903 # this is set for Toaster-triggered builds by localhostbecontroller
904 # via toasterui
905 self.brbe = brbe
906
907 self.project = None
908
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600909 logger.debug("buildinfohelper: Build info helper inited %s" % vars(self))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500910
911
912 ###################
913 ## methods to convert event/external info into objects that the ORM layer uses
914
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600915 def _ensure_build(self):
916 """
917 Ensure the current build object exists and is up to date with
918 data on the bitbake server
919 """
920 if not 'build' in self.internal_state or not self.internal_state['build']:
921 # create the Build object
922 self.internal_state['build'] = \
923 self.orm_wrapper.get_or_create_build_object(self.brbe)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500924
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600925 build = self.internal_state['build']
926
927 # update missing fields on the Build object with found data
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500928 build_info = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600929
930 # set to True if at least one field is going to be set
931 changed = False
932
933 if not build.build_name:
934 build_name = self.server.runCommand(["getVariable", "BUILDNAME"])[0]
935
936 # only reset the build name if the one on the server is actually
937 # a valid value for the build_name field
Andrew Geissler82c905d2020-04-13 13:39:40 -0500938 if build_name is not None:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600939 build_info['build_name'] = build_name
940 changed = True
941
942 if not build.machine:
943 build_info['machine'] = self.server.runCommand(["getVariable", "MACHINE"])[0]
944 changed = True
945
946 if not build.distro:
947 build_info['distro'] = self.server.runCommand(["getVariable", "DISTRO"])[0]
948 changed = True
949
950 if not build.distro_version:
951 build_info['distro_version'] = self.server.runCommand(["getVariable", "DISTRO_VERSION"])[0]
952 changed = True
953
954 if not build.bitbake_version:
955 build_info['bitbake_version'] = self.server.runCommand(["getVariable", "BB_VERSION"])[0]
956 changed = True
957
958 if changed:
959 self.orm_wrapper.update_build(self.internal_state['build'], build_info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500960
961 def _get_task_information(self, event, recipe):
962 assert 'taskname' in vars(event)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600963 self._ensure_build()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500964
965 task_information = {}
966 task_information['build'] = self.internal_state['build']
967 task_information['outcome'] = Task.OUTCOME_NA
968 task_information['recipe'] = recipe
969 task_information['task_name'] = event.taskname
970 try:
971 # some tasks don't come with a hash. and that's ok
972 task_information['sstate_checksum'] = event.taskhash
973 except AttributeError:
974 pass
975 return task_information
976
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500977 def _get_layer_version_for_dependency(self, pathRE):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500978 """ Returns the layer in the toaster db that has a full regex
979 match to the pathRE. pathRE - the layer path passed as a regex in the
980 event. It is created in cooker.py as a collection for the layer
981 priorities.
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500982 """
983 self._ensure_build()
984
985 def _sort_longest_path(layer_version):
986 assert isinstance(layer_version, Layer_Version)
987 return len(layer_version.local_path)
988
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500989 # Our paths don't append a trailing slash
990 if pathRE.endswith("/"):
991 pathRE = pathRE[:-1]
992
993 p = re.compile(pathRE)
994 path=re.sub(r'[$^]',r'',pathRE)
995 # Heuristics: we always match recipe to the deepest layer path in
996 # the discovered layers
997 for lvo in sorted(self.orm_wrapper.layer_version_objects,
998 reverse=True, key=_sort_longest_path):
999 if p.fullmatch(os.path.abspath(lvo.local_path)):
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001000 return lvo
1001 if lvo.layer.local_source_dir:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001002 if p.fullmatch(os.path.abspath(lvo.layer.local_source_dir)):
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001003 return lvo
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001004 if 0 == path.find(lvo.local_path):
1005 # sub-layer path inside existing layer
1006 return lvo
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001007
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001008 # if we get here, we didn't read layers correctly;
1009 # dump whatever information we have on the error log
1010 logger.warning("Could not match layer dependency for path %s : %s",
1011 pathRE,
1012 self.orm_wrapper.layer_version_objects)
1013 return None
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001014
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001015 def _get_layer_version_for_path(self, path):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001016 self._ensure_build()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001017
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001018 def _slkey_interactive(layer_version):
1019 assert isinstance(layer_version, Layer_Version)
1020 return len(layer_version.local_path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001021
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001022 # Heuristics: we always match recipe to the deepest layer path in the discovered layers
1023 for lvo in sorted(self.orm_wrapper.layer_version_objects, reverse=True, key=_slkey_interactive):
1024 # we can match to the recipe file path
1025 if path.startswith(lvo.local_path):
1026 return lvo
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001027 if lvo.layer.local_source_dir and \
1028 path.startswith(lvo.layer.local_source_dir):
1029 return lvo
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001030
1031 #if we get here, we didn't read layers correctly; dump whatever information we have on the error log
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001032 logger.warning("Could not match layer version for recipe path %s : %s", path, self.orm_wrapper.layer_version_objects)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001033
1034 #mockup the new layer
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001035 unknown_layer, _ = Layer.objects.get_or_create(name="Unidentified layer", layer_index_url="")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001036 unknown_layer_version_obj, _ = Layer_Version.objects.get_or_create(layer = unknown_layer, build = self.internal_state['build'])
1037
1038 # append it so we don't run into this error again and again
1039 self.orm_wrapper.layer_version_objects.append(unknown_layer_version_obj)
1040
1041 return unknown_layer_version_obj
1042
1043 def _get_recipe_information_from_taskfile(self, taskfile):
1044 localfilepath = taskfile.split(":")[-1]
1045 filepath_flags = ":".join(sorted(taskfile.split(":")[:-1]))
1046 layer_version_obj = self._get_layer_version_for_path(localfilepath)
1047
1048
1049
1050 recipe_info = {}
1051 recipe_info['layer_version'] = layer_version_obj
1052 recipe_info['file_path'] = localfilepath
1053 recipe_info['pathflags'] = filepath_flags
1054
1055 if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path):
1056 recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/")
1057 else:
1058 raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path))
1059
1060 return recipe_info
1061
1062 def _get_path_information(self, task_object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001063 self._ensure_build()
1064
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001065 assert isinstance(task_object, Task)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001066 build_stats_format = "{tmpdir}/buildstats/{buildname}/{package}/"
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001067 build_stats_path = []
1068
1069 for t in self.internal_state['targets']:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001070 buildname = self.internal_state['build'].build_name
1071 pe, pv = task_object.recipe.version.split(":",1)
Andrew Geissler595f6302022-01-24 19:11:47 +00001072 if pe:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001073 package = task_object.recipe.name + "-" + pe + "_" + pv
1074 else:
1075 package = task_object.recipe.name + "-" + pv
1076
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001077 build_stats_path.append(build_stats_format.format(tmpdir=self.tmp_dir,
1078 buildname=buildname,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001079 package=package))
1080
1081 return build_stats_path
1082
1083
1084 ################################
1085 ## external available methods to store information
1086 @staticmethod
1087 def _get_data_from_event(event):
1088 evdata = None
1089 if '_localdata' in vars(event):
1090 evdata = event._localdata
1091 elif 'data' in vars(event):
1092 evdata = event.data
1093 else:
1094 raise Exception("Event with neither _localdata or data properties")
1095 return evdata
1096
1097 def store_layer_info(self, event):
1098 layerinfos = BuildInfoHelper._get_data_from_event(event)
1099 self.internal_state['lvs'] = {}
1100 for layer in layerinfos:
1101 try:
1102 self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)] = layerinfos[layer]['version']
1103 self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)]['local_path'] = layerinfos[layer]['local_path']
1104 except NotExisting as nee:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001105 logger.warning("buildinfohelper: cannot identify layer exception:%s ", nee)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001106
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001107 def store_started_build(self):
1108 self._ensure_build()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001109
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001110 def save_build_log_file_path(self, build_log_path):
1111 self._ensure_build()
1112
1113 if not self.internal_state['build'].cooker_log_path:
1114 data_dict = {'cooker_log_path': build_log_path}
1115 self.orm_wrapper.update_build(self.internal_state['build'], data_dict)
1116
1117 def save_build_targets(self, event):
1118 self._ensure_build()
1119
1120 # create target information
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001121 assert '_pkgs' in vars(event)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001122 target_information = {}
1123 target_information['targets'] = event._pkgs
1124 target_information['build'] = self.internal_state['build']
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001125
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001126 self.internal_state['targets'] = self.orm_wrapper.get_or_create_targets(target_information)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001127
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001128 def save_build_layers_and_variables(self):
1129 self._ensure_build()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001130
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001131 build_obj = self.internal_state['build']
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001132
1133 # save layer version information for this build
1134 if not 'lvs' in self.internal_state:
1135 logger.error("Layer version information not found; Check if the bitbake server was configured to inherit toaster.bbclass.")
1136 else:
1137 for layer_obj in self.internal_state['lvs']:
1138 self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj])
1139
1140 del self.internal_state['lvs']
1141
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001142 # Save build configuration
1143 data = self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0]
1144
1145 # convert the paths from absolute to relative to either the build directory or layer checkouts
1146 path_prefixes = []
1147
1148 if self.brbe is not None:
1149 _, be_id = self.brbe.split(":")
1150 be = BuildEnvironment.objects.get(pk = be_id)
1151 path_prefixes.append(be.builddir)
1152
1153 for layer in sorted(self.orm_wrapper.layer_version_objects, key = lambda x:len(x.local_path), reverse=True):
1154 path_prefixes.append(layer.local_path)
1155
1156 # we strip the prefixes
1157 for k in data:
1158 if not bool(data[k]['func']):
1159 for vh in data[k]['history']:
1160 if not 'documentation.conf' in vh['file']:
1161 abs_file_name = vh['file']
1162 for pp in path_prefixes:
1163 if abs_file_name.startswith(pp + "/"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001164 # preserve layer name in relative path
1165 vh['file']=abs_file_name[pp.rfind("/")+1:]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001166 break
1167
1168 # save the variables
1169 self.orm_wrapper.save_build_variables(build_obj, data)
1170
1171 return self.brbe
1172
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001173 def set_recipes_to_parse(self, num_recipes):
1174 """
1175 Set the number of recipes which need to be parsed for this build.
1176 This is set the first time ParseStarted is received by toasterui.
1177 """
1178 self._ensure_build()
1179 self.internal_state['build'].recipes_to_parse = num_recipes
1180 self.internal_state['build'].save()
1181
1182 def set_recipes_parsed(self, num_recipes):
1183 """
1184 Set the number of recipes parsed so far for this build; this is updated
1185 each time a ParseProgress or ParseCompleted event is received by
1186 toasterui.
1187 """
1188 self._ensure_build()
1189 if num_recipes <= self.internal_state['build'].recipes_to_parse:
1190 self.internal_state['build'].recipes_parsed = num_recipes
1191 self.internal_state['build'].save()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001192
1193 def update_target_image_file(self, event):
1194 evdata = BuildInfoHelper._get_data_from_event(event)
1195
1196 for t in self.internal_state['targets']:
Andrew Geissler82c905d2020-04-13 13:39:40 -05001197 if t.is_image:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001198 output_files = list(evdata.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001199 for output in output_files:
1200 if t.target in output and 'rootfs' in output and not output.endswith(".manifest"):
1201 self.orm_wrapper.save_target_image_file_information(t, output, evdata[output])
1202
1203 def update_artifact_image_file(self, event):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001204 self._ensure_build()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001205 evdata = BuildInfoHelper._get_data_from_event(event)
1206 for artifact_path in evdata.keys():
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001207 self.orm_wrapper.save_artifact_information(
1208 self.internal_state['build'], artifact_path,
1209 evdata[artifact_path])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001210
1211 def update_build_information(self, event, errors, warnings, taskfailures):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001212 self._ensure_build()
1213 self.orm_wrapper.update_build_stats_and_outcome(
1214 self.internal_state['build'], errors, warnings, taskfailures)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001215
1216 def store_started_task(self, event):
1217 assert isinstance(event, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted, bb.runqueue.runQueueTaskSkipped))
1218 assert 'taskfile' in vars(event)
1219 localfilepath = event.taskfile.split(":")[-1]
1220 assert localfilepath.startswith("/")
1221
1222 identifier = event.taskfile + ":" + event.taskname
1223
1224 recipe_information = self._get_recipe_information_from_taskfile(event.taskfile)
1225 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True)
1226
1227 task_information = self._get_task_information(event, recipe)
1228 task_information['outcome'] = Task.OUTCOME_NA
1229
1230 if isinstance(event, bb.runqueue.runQueueTaskSkipped):
1231 assert 'reason' in vars(event)
1232 task_information['task_executed'] = False
1233 if event.reason == "covered":
1234 task_information['outcome'] = Task.OUTCOME_COVERED
1235 if event.reason == "existing":
1236 task_information['outcome'] = Task.OUTCOME_PREBUILT
1237 else:
1238 task_information['task_executed'] = True
Andrew Geissler82c905d2020-04-13 13:39:40 -05001239 if 'noexec' in vars(event) and event.noexec:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001240 task_information['task_executed'] = False
1241 task_information['outcome'] = Task.OUTCOME_EMPTY
1242 task_information['script_type'] = Task.CODING_NA
1243
1244 # do not assign order numbers to scene tasks
1245 if not isinstance(event, bb.runqueue.sceneQueueTaskStarted):
1246 self.task_order += 1
1247 task_information['order'] = self.task_order
1248
1249 self.orm_wrapper.get_update_task_object(task_information)
1250
1251 self.internal_state['taskdata'][identifier] = {
1252 'outcome': task_information['outcome'],
1253 }
1254
1255
1256 def store_tasks_stats(self, event):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001257 self._ensure_build()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001258 task_data = BuildInfoHelper._get_data_from_event(event)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001259
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001260 for (task_file, task_name, task_stats, recipe_name) in task_data:
1261 build = self.internal_state['build']
1262 self.orm_wrapper.update_task_object(build, task_name, recipe_name, task_stats)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001263
1264 def update_and_store_task(self, event):
1265 assert 'taskfile' in vars(event)
1266 localfilepath = event.taskfile.split(":")[-1]
1267 assert localfilepath.startswith("/")
1268
1269 identifier = event.taskfile + ":" + event.taskname
1270 if not identifier in self.internal_state['taskdata']:
1271 if isinstance(event, bb.build.TaskBase):
1272 # we do a bit of guessing
1273 candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)]
1274 if len(candidates) == 1:
1275 identifier = candidates[0]
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001276 elif len(candidates) > 1 and hasattr(event,'_package'):
1277 if 'native-' in event._package:
1278 identifier = 'native:' + identifier
1279 if 'nativesdk-' in event._package:
1280 identifier = 'nativesdk:' + identifier
1281 candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)]
1282 if len(candidates) == 1:
1283 identifier = candidates[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001284
1285 assert identifier in self.internal_state['taskdata']
1286 identifierlist = identifier.split(":")
1287 realtaskfile = ":".join(identifierlist[0:len(identifierlist)-1])
1288 recipe_information = self._get_recipe_information_from_taskfile(realtaskfile)
1289 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True)
1290 task_information = self._get_task_information(event,recipe)
1291
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001292 task_information['outcome'] = self.internal_state['taskdata'][identifier]['outcome']
1293
1294 if 'logfile' in vars(event):
1295 task_information['logfile'] = event.logfile
1296
1297 if '_message' in vars(event):
1298 task_information['message'] = event._message
1299
1300 if 'taskflags' in vars(event):
1301 # with TaskStarted, we get even more information
1302 if 'python' in event.taskflags.keys() and event.taskflags['python'] == '1':
1303 task_information['script_type'] = Task.CODING_PYTHON
1304 else:
1305 task_information['script_type'] = Task.CODING_SHELL
1306
1307 if task_information['outcome'] == Task.OUTCOME_NA:
1308 if isinstance(event, (bb.runqueue.runQueueTaskCompleted, bb.runqueue.sceneQueueTaskCompleted)):
1309 task_information['outcome'] = Task.OUTCOME_SUCCESS
1310 del self.internal_state['taskdata'][identifier]
1311
1312 if isinstance(event, (bb.runqueue.runQueueTaskFailed, bb.runqueue.sceneQueueTaskFailed)):
1313 task_information['outcome'] = Task.OUTCOME_FAILED
1314 del self.internal_state['taskdata'][identifier]
1315
1316 if not connection.features.autocommits_when_autocommit_is_off:
1317 # we force a sync point here, to get the progress bar to show
1318 if self.autocommit_step % 3 == 0:
1319 transaction.set_autocommit(True)
1320 transaction.set_autocommit(False)
1321 self.autocommit_step += 1
1322
1323 self.orm_wrapper.get_update_task_object(task_information, True) # must exist
1324
1325
1326 def store_missed_state_tasks(self, event):
1327 for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['missed']:
1328
1329 # identifier = fn + taskname + "_setscene"
1330 recipe_information = self._get_recipe_information_from_taskfile(fn)
1331 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
1332 mevent = MockEvent()
1333 mevent.taskname = taskname
1334 mevent.taskhash = taskhash
1335 task_information = self._get_task_information(mevent,recipe)
1336
1337 task_information['start_time'] = timezone.now()
1338 task_information['outcome'] = Task.OUTCOME_NA
1339 task_information['sstate_checksum'] = taskhash
1340 task_information['sstate_result'] = Task.SSTATE_MISS
1341 task_information['path_to_sstate_obj'] = sstatefile
1342
1343 self.orm_wrapper.get_update_task_object(task_information)
1344
1345 for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['found']:
1346
1347 # identifier = fn + taskname + "_setscene"
1348 recipe_information = self._get_recipe_information_from_taskfile(fn)
1349 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
1350 mevent = MockEvent()
1351 mevent.taskname = taskname
1352 mevent.taskhash = taskhash
1353 task_information = self._get_task_information(mevent,recipe)
1354
1355 task_information['path_to_sstate_obj'] = sstatefile
1356
1357 self.orm_wrapper.get_update_task_object(task_information)
1358
1359
1360 def store_target_package_data(self, event):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001361 self._ensure_build()
1362
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001363 # for all image targets
1364 for target in self.internal_state['targets']:
1365 if target.is_image:
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001366 pkgdata = BuildInfoHelper._get_data_from_event(event)['pkgdata']
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001367 imgdata = BuildInfoHelper._get_data_from_event(event)['imgdata'].get(target.target, {})
1368 filedata = BuildInfoHelper._get_data_from_event(event)['filedata'].get(target.target, {})
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001369
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001370 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001371 self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes'], built_package=True)
1372 self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata.copy(), pkgdata, self.internal_state['recipes'], built_package=False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001373 except KeyError as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001374 logger.warning("KeyError in save_target_package_information"
1375 "%s ", e)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001376
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001377 # only try to find files in the image if the task for this
1378 # target is one which produces image files; otherwise, the old
1379 # list of files in the files-in-image.txt file will be
1380 # appended to the target even if it didn't produce any images
1381 if target.task in BuildInfoHelper.IMAGE_GENERATING_TASKS:
1382 try:
1383 self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata)
1384 except KeyError as e:
1385 logger.warning("KeyError in save_target_file_information"
1386 "%s ", e)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001387
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001388
1389
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001390 def cancel_cli_build(self):
1391 """
1392 If a build is currently underway, set its state to CANCELLED;
1393 note that this only gets called for command line builds which are
1394 interrupted, so it doesn't touch any BuildRequest objects
1395 """
1396 self._ensure_build()
1397 self.internal_state['build'].outcome = Build.CANCELLED
1398 self.internal_state['build'].save()
1399 signal_runbuilds()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001400
1401 def store_dependency_information(self, event):
1402 assert '_depgraph' in vars(event)
1403 assert 'layer-priorities' in event._depgraph
1404 assert 'pn' in event._depgraph
1405 assert 'tdepends' in event._depgraph
1406
Andrew Geissler595f6302022-01-24 19:11:47 +00001407 errormsg = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001408
1409 # save layer version priorities
1410 if 'layer-priorities' in event._depgraph.keys():
1411 for lv in event._depgraph['layer-priorities']:
1412 (_, path, _, priority) = lv
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001413 layer_version_obj = self._get_layer_version_for_dependency(path)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001414 if layer_version_obj:
1415 layer_version_obj.priority = priority
1416 layer_version_obj.save()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001417
1418 # save recipe information
1419 self.internal_state['recipes'] = {}
1420 for pn in event._depgraph['pn']:
1421
1422 file_name = event._depgraph['pn'][pn]['filename'].split(":")[-1]
1423 pathflags = ":".join(sorted(event._depgraph['pn'][pn]['filename'].split(":")[:-1]))
1424 layer_version_obj = self._get_layer_version_for_path(file_name)
1425
1426 assert layer_version_obj is not None
1427
1428 recipe_info = {}
1429 recipe_info['name'] = pn
1430 recipe_info['layer_version'] = layer_version_obj
1431
1432 if 'version' in event._depgraph['pn'][pn]:
1433 recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":")
1434
1435 if 'summary' in event._depgraph['pn'][pn]:
1436 recipe_info['summary'] = event._depgraph['pn'][pn]['summary']
1437
1438 if 'license' in event._depgraph['pn'][pn]:
1439 recipe_info['license'] = event._depgraph['pn'][pn]['license']
1440
1441 if 'description' in event._depgraph['pn'][pn]:
1442 recipe_info['description'] = event._depgraph['pn'][pn]['description']
1443
1444 if 'section' in event._depgraph['pn'][pn]:
1445 recipe_info['section'] = event._depgraph['pn'][pn]['section']
1446
1447 if 'homepage' in event._depgraph['pn'][pn]:
1448 recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage']
1449
1450 if 'bugtracker' in event._depgraph['pn'][pn]:
1451 recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker']
1452
1453 recipe_info['file_path'] = file_name
1454 recipe_info['pathflags'] = pathflags
1455
1456 if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path):
1457 recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/")
1458 else:
1459 raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path))
1460
1461 recipe = self.orm_wrapper.get_update_recipe_object(recipe_info)
1462 recipe.is_image = False
1463 if 'inherits' in event._depgraph['pn'][pn].keys():
1464 for cls in event._depgraph['pn'][pn]['inherits']:
1465 if cls.endswith('/image.bbclass'):
1466 recipe.is_image = True
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001467 recipe_info['is_image'] = True
1468 # Save the is_image state to the relevant recipe objects
1469 self.orm_wrapper.get_update_recipe_object(recipe_info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001470 break
1471 if recipe.is_image:
1472 for t in self.internal_state['targets']:
1473 if pn == t.target:
1474 t.is_image = True
1475 t.save()
1476 self.internal_state['recipes'][pn] = recipe
1477
1478 # we'll not get recipes for key w/ values listed in ASSUME_PROVIDED
1479
1480 assume_provided = self.server.runCommand(["getVariable", "ASSUME_PROVIDED"])[0].split()
1481
1482 # save recipe dependency
1483 # buildtime
1484 recipedeps_objects = []
1485 for recipe in event._depgraph['depends']:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001486 target = self.internal_state['recipes'][recipe]
1487 for dep in event._depgraph['depends'][recipe]:
1488 if dep in assume_provided:
1489 continue
1490 via = None
1491 if 'providermap' in event._depgraph and dep in event._depgraph['providermap']:
1492 deprecipe = event._depgraph['providermap'][dep][0]
1493 dependency = self.internal_state['recipes'][deprecipe]
1494 via = Provides.objects.get_or_create(name=dep,
1495 recipe=dependency)[0]
1496 elif dep in self.internal_state['recipes']:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001497 dependency = self.internal_state['recipes'][dep]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001498 else:
Andrew Geissler595f6302022-01-24 19:11:47 +00001499 errormsg.append(" stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, dep))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001500 continue
1501 recipe_dep = Recipe_Dependency(recipe=target,
1502 depends_on=dependency,
1503 via=via,
1504 dep_type=Recipe_Dependency.TYPE_DEPENDS)
1505 recipedeps_objects.append(recipe_dep)
1506
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001507 Recipe_Dependency.objects.bulk_create(recipedeps_objects)
1508
1509 # save all task information
1510 def _save_a_task(taskdesc):
1511 spec = re.split(r'\.', taskdesc)
1512 pn = ".".join(spec[0:-1])
1513 taskname = spec[-1]
1514 e = event
1515 e.taskname = pn
1516 recipe = self.internal_state['recipes'][pn]
1517 task_info = self._get_task_information(e, recipe)
1518 task_info['task_name'] = taskname
1519 task_obj = self.orm_wrapper.get_update_task_object(task_info)
1520 return task_obj
1521
1522 # create tasks
1523 tasks = {}
1524 for taskdesc in event._depgraph['tdepends']:
1525 tasks[taskdesc] = _save_a_task(taskdesc)
1526
1527 # create dependencies between tasks
1528 taskdeps_objects = []
1529 for taskdesc in event._depgraph['tdepends']:
1530 target = tasks[taskdesc]
1531 for taskdep in event._depgraph['tdepends'][taskdesc]:
1532 if taskdep not in tasks:
1533 # Fetch tasks info is not collected previously
1534 dep = _save_a_task(taskdep)
1535 else:
1536 dep = tasks[taskdep]
1537 taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep ))
1538 Task_Dependency.objects.bulk_create(taskdeps_objects)
1539
Andrew Geissler595f6302022-01-24 19:11:47 +00001540 if errormsg:
1541 logger.warning("buildinfohelper: dependency info not identify recipes: \n%s", "".join(errormsg))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001542
1543
1544 def store_build_package_information(self, event):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001545 self._ensure_build()
1546
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001547 package_info = BuildInfoHelper._get_data_from_event(event)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001548 self.orm_wrapper.save_build_package_information(
1549 self.internal_state['build'],
1550 package_info,
1551 self.internal_state['recipes'],
1552 built_package=True)
1553
1554 self.orm_wrapper.save_build_package_information(
1555 self.internal_state['build'],
1556 package_info,
1557 self.internal_state['recipes'],
1558 built_package=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001559
1560 def _store_build_done(self, errorcode):
1561 logger.info("Build exited with errorcode %d", errorcode)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001562
1563 if not self.brbe:
1564 return
1565
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001566 br_id, be_id = self.brbe.split(":")
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001567
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001568 br = BuildRequest.objects.get(pk = br_id)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001569
1570 # if we're 'done' because we got cancelled update the build outcome
1571 if br.state == BuildRequest.REQ_CANCELLING:
1572 logger.info("Build cancelled")
1573 br.build.outcome = Build.CANCELLED
1574 br.build.save()
1575 self.internal_state['build'] = br.build
1576 errorcode = 0
1577
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001578 if errorcode == 0:
1579 # request archival of the project artifacts
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001580 br.state = BuildRequest.REQ_COMPLETED
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001581 else:
1582 br.state = BuildRequest.REQ_FAILED
1583 br.save()
1584
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001585 be = BuildEnvironment.objects.get(pk = be_id)
1586 be.lock = BuildEnvironment.LOCK_FREE
1587 be.save()
1588 signal_runbuilds()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001589
1590 def store_log_error(self, text):
1591 mockevent = MockEvent()
1592 mockevent.levelno = formatter.ERROR
1593 mockevent.msg = text
1594 mockevent.pathname = '-- None'
1595 mockevent.lineno = LogMessage.ERROR
1596 self.store_log_event(mockevent)
1597
1598 def store_log_exception(self, text, backtrace = ""):
1599 mockevent = MockEvent()
1600 mockevent.levelno = -1
1601 mockevent.msg = text
1602 mockevent.pathname = backtrace
1603 mockevent.lineno = -1
1604 self.store_log_event(mockevent)
1605
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001606 def store_log_event(self, event,cli_backlog=True):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001607 self._ensure_build()
1608
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001609 if event.levelno < formatter.WARNING:
1610 return
1611
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001612 # early return for CLI builds
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001613 if cli_backlog and self.brbe is None:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001614 if not 'backlog' in self.internal_state:
1615 self.internal_state['backlog'] = []
1616 self.internal_state['backlog'].append(event)
1617 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001618
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001619 if 'backlog' in self.internal_state:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001620 # if we have a backlog of events, do our best to save them here
Andrew Geissler595f6302022-01-24 19:11:47 +00001621 if self.internal_state['backlog']:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001622 tempevent = self.internal_state['backlog'].pop()
Andrew Geisslerd1e89492021-02-12 15:35:20 -06001623 logger.debug("buildinfohelper: Saving stored event %s "
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001624 % tempevent)
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001625 self.store_log_event(tempevent,cli_backlog)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001626 else:
1627 logger.info("buildinfohelper: All events saved")
1628 del self.internal_state['backlog']
1629
1630 log_information = {}
1631 log_information['build'] = self.internal_state['build']
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001632 if event.levelno == formatter.CRITICAL:
1633 log_information['level'] = LogMessage.CRITICAL
1634 elif event.levelno == formatter.ERROR:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001635 log_information['level'] = LogMessage.ERROR
1636 elif event.levelno == formatter.WARNING:
1637 log_information['level'] = LogMessage.WARNING
1638 elif event.levelno == -2: # toaster self-logging
1639 log_information['level'] = -2
1640 else:
1641 log_information['level'] = LogMessage.INFO
1642
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001643 log_information['message'] = event.getMessage()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001644 log_information['pathname'] = event.pathname
1645 log_information['lineno'] = event.lineno
1646 logger.info("Logging error 2: %s", log_information)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001647
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001648 self.orm_wrapper.create_logmessage(log_information)
1649
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001650 def _get_filenames_from_image_license(self, image_license_manifest_path):
1651 """
1652 Find the FILES line in the image_license.manifest file,
1653 which has the basenames of the bzImage and modules files
1654 in this format:
1655 FILES: bzImage--4.4.11+git0+3a5f494784_53e84104c5-r0-qemux86-20160603165040.bin modules--4.4.11+git0+3a5f494784_53e84104c5-r0-qemux86-20160603165040.tgz
1656 """
1657 files = []
1658 with open(image_license_manifest_path) as image_license:
1659 for line in image_license:
1660 if line.startswith('FILES'):
1661 files_str = line.split(':')[1].strip()
1662 files_str = re.sub(r' {2,}', ' ', files_str)
1663
1664 # ignore lines like "FILES:" with no filenames
1665 if files_str:
1666 files += files_str.split(' ')
1667 return files
1668
1669 def _endswith(self, str_to_test, endings):
1670 """
1671 Returns True if str ends with one of the strings in the list
1672 endings, False otherwise
1673 """
1674 endswith = False
1675 for ending in endings:
1676 if str_to_test.endswith(ending):
1677 endswith = True
1678 break
1679 return endswith
1680
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001681 def scan_task_artifacts(self, event):
1682 """
1683 The 'TaskArtifacts' event passes the manifest file content for the
1684 tasks 'do_deploy', 'do_image_complete', 'do_populate_sdk', and
1685 'do_populate_sdk_ext'. The first two will be implemented later.
1686 """
1687 task_vars = BuildInfoHelper._get_data_from_event(event)
1688 task_name = task_vars['task'][task_vars['task'].find(':')+1:]
1689 task_artifacts = task_vars['artifacts']
1690
1691 if task_name in ['do_populate_sdk', 'do_populate_sdk_ext']:
1692 targets = [target for target in self.internal_state['targets'] \
1693 if target.task == task_name[3:]]
1694 if not targets:
1695 logger.warning("scan_task_artifacts: SDK targets not found: %s\n", task_name)
1696 return
1697 for artifact_path in task_artifacts:
1698 if not os.path.isfile(artifact_path):
1699 logger.warning("scan_task_artifacts: artifact file not found: %s\n", artifact_path)
1700 continue
1701 for target in targets:
1702 # don't record the file if it's already been added
1703 # to this target
1704 matching_files = TargetSDKFile.objects.filter(
1705 target=target, file_name=artifact_path)
1706 if matching_files.count() == 0:
1707 artifact_size = os.stat(artifact_path).st_size
1708 self.orm_wrapper.save_target_sdk_file(
1709 target, artifact_path, artifact_size)
1710
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001711 def _get_image_files(self, deploy_dir_image, image_name, image_file_extensions):
1712 """
1713 Find files in deploy_dir_image whose basename starts with the
1714 string image_name and ends with one of the strings in
1715 image_file_extensions.
1716
1717 Returns a list of file dictionaries like
1718
1719 [
1720 {
1721 'path': '/path/to/image/file',
1722 'size': <file size in bytes>
1723 }
1724 ]
1725 """
1726 image_files = []
1727
1728 for dirpath, _, filenames in os.walk(deploy_dir_image):
1729 for filename in filenames:
1730 if filename.startswith(image_name) and \
1731 self._endswith(filename, image_file_extensions):
1732 image_file_path = os.path.join(dirpath, filename)
1733 image_file_size = os.stat(image_file_path).st_size
1734
1735 image_files.append({
1736 'path': image_file_path,
1737 'size': image_file_size
1738 })
1739
1740 return image_files
1741
1742 def scan_image_artifacts(self):
1743 """
1744 Scan for built image artifacts in DEPLOY_DIR_IMAGE and associate them
1745 with a Target object in self.internal_state['targets'].
1746
1747 We have two situations to handle:
1748
1749 1. This is the first time a target + machine has been built, so
1750 add files from the DEPLOY_DIR_IMAGE to the target.
1751
1752 OR
1753
1754 2. There are no new files for the target (they were already produced by
1755 a previous build), so copy them from the most recent previous build with
1756 the same target, task and machine.
1757 """
1758 deploy_dir_image = \
1759 self.server.runCommand(['getVariable', 'DEPLOY_DIR_IMAGE'])[0]
1760
1761 # if there's no DEPLOY_DIR_IMAGE, there aren't going to be
1762 # any image artifacts, so we can return immediately
1763 if not deploy_dir_image:
1764 return
1765
1766 buildname = self.server.runCommand(['getVariable', 'BUILDNAME'])[0]
1767 machine = self.server.runCommand(['getVariable', 'MACHINE'])[0]
1768 image_name = self.server.runCommand(['getVariable', 'IMAGE_NAME'])[0]
1769
1770 # location of the manifest files for this build;
1771 # note that this file is only produced if an image is produced
1772 license_directory = \
1773 self.server.runCommand(['getVariable', 'LICENSE_DIRECTORY'])[0]
1774
1775 # file name extensions for image files
1776 image_file_extensions_unique = {}
1777 image_fstypes = self.server.runCommand(
1778 ['getVariable', 'IMAGE_FSTYPES'])[0]
Andrew Geissler82c905d2020-04-13 13:39:40 -05001779 if image_fstypes is not None:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001780 image_types_str = image_fstypes.strip()
1781 image_file_extensions = re.sub(r' {2,}', ' ', image_types_str)
1782 image_file_extensions_unique = set(image_file_extensions.split(' '))
1783
1784 targets = self.internal_state['targets']
1785
1786 # filter out anything which isn't an image target
1787 image_targets = [target for target in targets if target.is_image]
1788
1789 for image_target in image_targets:
1790 # this is set to True if we find at least one file relating to
1791 # this target; if this remains False after the scan, we copy the
1792 # files from the most-recent Target with the same target + machine
1793 # onto this Target instead
1794 has_files = False
1795
1796 # we construct this because by the time we reach
1797 # BuildCompleted, this has reset to
1798 # 'defaultpkgname-<MACHINE>-<BUILDNAME>';
1799 # we need to change it to
1800 # <TARGET>-<MACHINE>-<BUILDNAME>
1801 real_image_name = re.sub(r'^defaultpkgname', image_target.target,
1802 image_name)
1803
1804 image_license_manifest_path = os.path.join(
1805 license_directory,
1806 real_image_name,
1807 'image_license.manifest')
1808
1809 image_package_manifest_path = os.path.join(
1810 license_directory,
1811 real_image_name,
1812 'image_license.manifest')
1813
1814 # if image_license.manifest exists, we can read the names of
1815 # bzImage, modules etc. files for this build from it, then look for
1816 # them in the DEPLOY_DIR_IMAGE; note that this file is only produced
1817 # if an image file was produced
1818 if os.path.isfile(image_license_manifest_path):
1819 has_files = True
1820
1821 basenames = self._get_filenames_from_image_license(
1822 image_license_manifest_path)
1823
1824 for basename in basenames:
1825 artifact_path = os.path.join(deploy_dir_image, basename)
1826 if not os.path.exists(artifact_path):
1827 logger.warning("artifact %s doesn't exist, skipping" % artifact_path)
1828 continue
1829 artifact_size = os.stat(artifact_path).st_size
1830
1831 # note that the artifact will only be saved against this
1832 # build if it hasn't been already
1833 self.orm_wrapper.save_target_kernel_file(image_target,
1834 artifact_path, artifact_size)
1835
1836 # store the license manifest path on the target
1837 # (this file is also created any time an image file is created)
1838 license_manifest_path = os.path.join(license_directory,
1839 real_image_name, 'license.manifest')
1840
1841 self.orm_wrapper.update_target_set_license_manifest(
1842 image_target, license_manifest_path)
1843
1844 # store the package manifest path on the target (this file
1845 # is created any time an image file is created)
1846 package_manifest_path = os.path.join(deploy_dir_image,
1847 real_image_name + '.rootfs.manifest')
1848
1849 if os.path.exists(package_manifest_path):
1850 self.orm_wrapper.update_target_set_package_manifest(
1851 image_target, package_manifest_path)
1852
1853 # scan the directory for image files relating to this build
1854 # (via real_image_name); note that we don't have to set
1855 # has_files = True, as searching for the license manifest file
1856 # will already have set it to true if at least one image file was
1857 # produced; note that the real_image_name includes BUILDNAME, which
1858 # in turn includes a timestamp; so if no files were produced for
1859 # this timestamp (i.e. the build reused existing image files already
1860 # in the directory), no files will be recorded against this target
1861 image_files = self._get_image_files(deploy_dir_image,
1862 real_image_name, image_file_extensions_unique)
1863
1864 for image_file in image_files:
1865 self.orm_wrapper.save_target_image_file_information(
1866 image_target, image_file['path'], image_file['size'])
1867
1868 if not has_files:
1869 # copy image files and build artifacts from the
1870 # most-recently-built Target with the
1871 # same target + machine as this Target; also copy the license
1872 # manifest path, as that is not treated as an artifact and needs
1873 # to be set separately
1874 similar_target = \
1875 self.orm_wrapper.get_similar_target_with_image_files(
1876 image_target)
1877
1878 if similar_target:
1879 logger.info('image artifacts for target %s cloned from ' \
1880 'target %s' % (image_target.pk, similar_target.pk))
1881 self.orm_wrapper.clone_image_artifacts(similar_target,
1882 image_target)
1883
1884 def _get_sdk_targets(self):
1885 """
1886 Return targets which could generate SDK artifacts, i.e.
1887 "do_populate_sdk" and "do_populate_sdk_ext".
1888 """
1889 return [target for target in self.internal_state['targets'] \
1890 if target.task in ['populate_sdk', 'populate_sdk_ext']]
1891
1892 def scan_sdk_artifacts(self, event):
1893 """
1894 Note that we have to intercept an SDKArtifactInfo event from
1895 toaster.bbclass (via toasterui) to get hold of the SDK variables we
1896 need to be able to scan for files accurately: this is because
1897 variables like TOOLCHAIN_OUTPUTNAME have reset to None by the time
1898 BuildCompleted is fired by bitbake, so we have to get those values
1899 while the build is still in progress.
1900
1901 For populate_sdk_ext, this runs twice, with two different
1902 TOOLCHAIN_OUTPUTNAME settings, each of which will capture some of the
1903 files in the SDK output directory.
1904 """
1905 sdk_vars = BuildInfoHelper._get_data_from_event(event)
1906 toolchain_outputname = sdk_vars['TOOLCHAIN_OUTPUTNAME']
1907
1908 # targets which might have created SDK artifacts
1909 sdk_targets = self._get_sdk_targets()
1910
1911 # location of SDK artifacts
1912 tmpdir = self.server.runCommand(['getVariable', 'TMPDIR'])[0]
1913 sdk_dir = os.path.join(tmpdir, 'deploy', 'sdk')
1914
1915 # all files in the SDK directory
1916 artifacts = []
1917 for dir_path, _, filenames in os.walk(sdk_dir):
1918 for filename in filenames:
1919 full_path = os.path.join(dir_path, filename)
1920 if not os.path.islink(full_path):
1921 artifacts.append(full_path)
1922
1923 for sdk_target in sdk_targets:
1924 # find files in the SDK directory which haven't already been
1925 # recorded against a Target and whose basename matches
1926 # TOOLCHAIN_OUTPUTNAME
1927 for artifact_path in artifacts:
1928 basename = os.path.basename(artifact_path)
1929
1930 toolchain_match = basename.startswith(toolchain_outputname)
1931
1932 # files which match the name of the target which produced them;
1933 # for example,
1934 # poky-glibc-x86_64-core-image-sato-i586-toolchain-ext-2.1+snapshot.sh
1935 target_match = re.search(sdk_target.target, basename)
1936
1937 # targets which produce "*-nativesdk-*" files
1938 is_ext_sdk_target = sdk_target.task in \
1939 ['do_populate_sdk_ext', 'populate_sdk_ext']
1940
1941 # SDK files which don't match the target name, i.e.
1942 # x86_64-nativesdk-libc.*
1943 # poky-glibc-x86_64-buildtools-tarball-i586-buildtools-nativesdk-standalone-2.1+snapshot*
1944 is_ext_sdk_file = re.search('-nativesdk-', basename)
1945
1946 file_from_target = (toolchain_match and target_match) or \
1947 (is_ext_sdk_target and is_ext_sdk_file)
1948
1949 if file_from_target:
1950 # don't record the file if it's already been added to this
1951 # target
1952 matching_files = TargetSDKFile.objects.filter(
1953 target=sdk_target, file_name=artifact_path)
1954
1955 if matching_files.count() == 0:
1956 artifact_size = os.stat(artifact_path).st_size
1957
1958 self.orm_wrapper.save_target_sdk_file(
1959 sdk_target, artifact_path, artifact_size)
1960
1961 def clone_required_sdk_artifacts(self):
1962 """
1963 If an SDK target doesn't have any SDK artifacts, this means that
1964 the postfuncs of populate_sdk or populate_sdk_ext didn't fire, which
1965 in turn means that the targets of this build didn't generate any new
1966 artifacts.
1967
1968 In this case, clone SDK artifacts for targets in the current build
1969 from existing targets for this build.
1970 """
1971 sdk_targets = self._get_sdk_targets()
1972 for sdk_target in sdk_targets:
1973 # only clone for SDK targets which have no TargetSDKFiles yet
1974 if sdk_target.targetsdkfile_set.all().count() == 0:
1975 similar_target = \
1976 self.orm_wrapper.get_similar_target_with_sdk_files(
1977 sdk_target)
1978 if similar_target:
1979 logger.info('SDK artifacts for target %s cloned from ' \
1980 'target %s' % (sdk_target.pk, similar_target.pk))
1981 self.orm_wrapper.clone_sdk_artifacts(similar_target,
1982 sdk_target)
1983
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001984 def close(self, errorcode):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001985 self._store_build_done(errorcode)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001986
1987 if 'backlog' in self.internal_state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001988 # we save missed events in the database for the current build
1989 tempevent = self.internal_state['backlog'].pop()
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001990 # Do not skip command line build events
1991 self.store_log_event(tempevent,False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001992
1993 if not connection.features.autocommits_when_autocommit_is_off:
1994 transaction.set_autocommit(True)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001995
1996 # unset the brbe; this is to prevent subsequent command-line builds
1997 # being incorrectly attached to the previous Toaster-triggered build;
1998 # see https://bugzilla.yoctoproject.org/show_bug.cgi?id=9021
1999 self.brbe = None
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002000
2001 # unset the internal Build object to prevent it being reused for the
2002 # next build
2003 self.internal_state['build'] = None