blob: 5b69660a3930b25e78433dbab9ebddab76c1d3d5 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# BitBake ToasterUI Implementation
3#
4# Copyright (C) 2013 Intel Corporation
5#
6# This program is free software; you can redistribute it and/or modify
7# it under the terms of the GNU General Public License version 2 as
8# published by the Free Software Foundation.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License along
16# with this program; if not, write to the Free Software Foundation, Inc.,
17# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
18
19import sys
20import bb
21import re
22import os
23
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050024import django
Patrick Williamsc124f4f2015-09-15 14:41:29 -050025from django.utils import timezone
26
Patrick Williamsc0f7c042017-02-23 20:41:17 -060027import toaster
28# Add toaster module to the search path to help django.setup() find the right
29# modules
30sys.path.insert(0, os.path.dirname(toaster.__file__))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050031
Patrick Williamsc0f7c042017-02-23 20:41:17 -060032#Set the DJANGO_SETTINGS_MODULE if it's not already set
33os.environ["DJANGO_SETTINGS_MODULE"] =\
34 os.environ.get("DJANGO_SETTINGS_MODULE",
35 "toaster.toastermain.settings")
36# Setup django framework (needs to be done before importing modules)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050037django.setup()
Patrick Williamsc124f4f2015-09-15 14:41:29 -050038
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050039from orm.models import Build, Task, Recipe, Layer_Version, Layer, Target, LogMessage, HelpText
Patrick Williamsc0f7c042017-02-23 20:41:17 -060040from orm.models import Target_Image_File, TargetKernelFile, TargetSDKFile
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050041from orm.models import Variable, VariableHistory
42from orm.models import Package, Package_File, Target_Installed_Package, Target_File
43from orm.models import Task_Dependency, Package_Dependency
44from orm.models import Recipe_Dependency, Provides
45from orm.models import Project, CustomImagePackage, CustomImageRecipe
Patrick Williamsc0f7c042017-02-23 20:41:17 -060046from orm.models import signal_runbuilds
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050047
Patrick Williamsc124f4f2015-09-15 14:41:29 -050048from bldcontrol.models import BuildEnvironment, BuildRequest
49
50from bb.msg import BBLogFormatter as formatter
51from django.db import models
52from pprint import pformat
53import logging
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050054from datetime import datetime, timedelta
Patrick Williamsc124f4f2015-09-15 14:41:29 -050055
56from django.db import transaction, connection
57
Patrick Williamsc0f7c042017-02-23 20:41:17 -060058
Patrick Williamsc124f4f2015-09-15 14:41:29 -050059# pylint: disable=invalid-name
60# the logger name is standard throughout BitBake
61logger = logging.getLogger("ToasterLogger")
62
Patrick Williamsc124f4f2015-09-15 14:41:29 -050063class NotExisting(Exception):
64 pass
65
66class ORMWrapper(object):
67 """ This class creates the dictionaries needed to store information in the database
68 following the format defined by the Django models. It is also used to save this
69 information in the database.
70 """
71
72 def __init__(self):
73 self.layer_version_objects = []
Patrick Williamsf1e5d692016-03-30 15:21:19 -050074 self.layer_version_built = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -050075 self.task_objects = {}
76 self.recipe_objects = {}
77
78 @staticmethod
79 def _build_key(**kwargs):
80 key = "0"
81 for k in sorted(kwargs.keys()):
82 if isinstance(kwargs[k], models.Model):
83 key += "-%d" % kwargs[k].id
84 else:
85 key += "-%s" % str(kwargs[k])
86 return key
87
88
89 def _cached_get_or_create(self, clazz, **kwargs):
90 """ This is a memory-cached get_or_create. We assume that the objects will not be created in the
91 database through any other means.
92 """
93
94 assert issubclass(clazz, models.Model), "_cached_get_or_create needs to get the class as first argument"
95
96 key = ORMWrapper._build_key(**kwargs)
97 dictname = "objects_%s" % clazz.__name__
98 if not dictname in vars(self).keys():
99 vars(self)[dictname] = {}
100
101 created = False
102 if not key in vars(self)[dictname].keys():
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500103 vars(self)[dictname][key], created = \
104 clazz.objects.get_or_create(**kwargs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500105
106 return (vars(self)[dictname][key], created)
107
108
109 def _cached_get(self, clazz, **kwargs):
110 """ This is a memory-cached get. We assume that the objects will not change in the database between gets.
111 """
112 assert issubclass(clazz, models.Model), "_cached_get needs to get the class as first argument"
113
114 key = ORMWrapper._build_key(**kwargs)
115 dictname = "objects_%s" % clazz.__name__
116
117 if not dictname in vars(self).keys():
118 vars(self)[dictname] = {}
119
120 if not key in vars(self)[dictname].keys():
121 vars(self)[dictname][key] = clazz.objects.get(**kwargs)
122
123 return vars(self)[dictname][key]
124
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600125 def get_similar_target_with_image_files(self, target):
126 """
127 Get a Target object "similar" to target; i.e. with the same target
128 name ('core-image-minimal' etc.) and machine.
129 """
130 return target.get_similar_target_with_image_files()
131
132 def get_similar_target_with_sdk_files(self, target):
133 return target.get_similar_target_with_sdk_files()
134
135 def clone_image_artifacts(self, target_from, target_to):
136 target_to.clone_image_artifacts_from(target_from)
137
138 def clone_sdk_artifacts(self, target_from, target_to):
139 target_to.clone_sdk_artifacts_from(target_from)
140
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500141 def _timestamp_to_datetime(self, secs):
142 """
143 Convert timestamp in seconds to Python datetime
144 """
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600145 return timezone.make_aware(datetime(1970, 1, 1) + timedelta(seconds=secs))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500146
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500147 # pylint: disable=no-self-use
148 # we disable detection of no self use in functions because the methods actually work on the object
149 # even if they don't touch self anywhere
150
151 # pylint: disable=bad-continuation
152 # we do not follow the python conventions for continuation indentation due to long lines here
153
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600154 def get_or_create_build_object(self, brbe):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500155 prj = None
156 buildrequest = None
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600157 if brbe is not None:
158 # Toaster-triggered build
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500159 logger.debug(1, "buildinfohelper: brbe is %s" % brbe)
160 br, _ = brbe.split(":")
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600161 buildrequest = BuildRequest.objects.get(pk=br)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500162 prj = buildrequest.project
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600163 else:
164 # CLI build
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500165 prj = Project.objects.get_or_create_default_project()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500166 logger.debug(1, "buildinfohelper: project is not specified, defaulting to %s" % prj)
167
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500168 if buildrequest is not None:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600169 # reuse existing Build object
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500170 build = buildrequest.build
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500171 build.project = prj
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500172 build.save()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500173 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600174 # create new Build object
175 now = timezone.now()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500176 build = Build.objects.create(
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600177 project=prj,
178 started_on=now,
179 completed_on=now,
180 build_name='')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500181
182 logger.debug(1, "buildinfohelper: build is created %s" % build)
183
184 if buildrequest is not None:
185 buildrequest.build = build
186 buildrequest.save()
187
188 return build
189
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600190 def update_build(self, build, data_dict):
191 for key in data_dict:
192 setattr(build, key, data_dict[key])
193 build.save()
194
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500195 @staticmethod
196 def get_or_create_targets(target_info):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600197 """
198 NB get_or_create() is used here because for Toaster-triggered builds,
199 we already created the targets when the build was triggered.
200 """
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500201 result = []
202 for target in target_info['targets']:
203 task = ''
204 if ':' in target:
205 target, task = target.split(':', 1)
206 if task.startswith('do_'):
207 task = task[3:]
208 if task == 'build':
209 task = ''
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600210
211 obj, _ = Target.objects.get_or_create(build=target_info['build'],
212 target=target,
213 task=task)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500214 result.append(obj)
215 return result
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500216
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600217 def update_build_stats_and_outcome(self, build, errors, warnings, taskfailures):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500218 assert isinstance(build,Build)
219 assert isinstance(errors, int)
220 assert isinstance(warnings, int)
221
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500222 if build.outcome == Build.CANCELLED:
223 return
224 try:
225 if build.buildrequest.state == BuildRequest.REQ_CANCELLING:
226 return
227 except AttributeError:
228 # We may not have a buildrequest if this is a command line build
229 pass
230
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500231 outcome = Build.SUCCEEDED
232 if errors or taskfailures:
233 outcome = Build.FAILED
234
235 build.completed_on = timezone.now()
236 build.outcome = outcome
237 build.save()
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600238 signal_runbuilds()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500239
240 def update_target_set_license_manifest(self, target, license_manifest_path):
241 target.license_manifest_path = license_manifest_path
242 target.save()
243
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600244 def update_target_set_package_manifest(self, target, package_manifest_path):
245 target.package_manifest_path = package_manifest_path
246 target.save()
247
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500248 def update_task_object(self, build, task_name, recipe_name, task_stats):
249 """
250 Find the task for build which matches the recipe and task name
251 to be stored
252 """
253 task_to_update = Task.objects.get(
254 build = build,
255 task_name = task_name,
256 recipe__name = recipe_name
257 )
258
259 if 'started' in task_stats and 'ended' in task_stats:
260 task_to_update.started = self._timestamp_to_datetime(task_stats['started'])
261 task_to_update.ended = self._timestamp_to_datetime(task_stats['ended'])
262 task_to_update.elapsed_time = (task_stats['ended'] - task_stats['started'])
263 task_to_update.cpu_time_user = task_stats.get('cpu_time_user')
264 task_to_update.cpu_time_system = task_stats.get('cpu_time_system')
265 if 'disk_io_read' in task_stats and 'disk_io_write' in task_stats:
266 task_to_update.disk_io_read = task_stats['disk_io_read']
267 task_to_update.disk_io_write = task_stats['disk_io_write']
268 task_to_update.disk_io = task_stats['disk_io_read'] + task_stats['disk_io_write']
269
270 task_to_update.save()
271
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500272 def get_update_task_object(self, task_information, must_exist = False):
273 assert 'build' in task_information
274 assert 'recipe' in task_information
275 assert 'task_name' in task_information
276
277 # we use must_exist info for database look-up optimization
278 task_object, created = self._cached_get_or_create(Task,
279 build=task_information['build'],
280 recipe=task_information['recipe'],
281 task_name=task_information['task_name']
282 )
283 if created and must_exist:
284 task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk)
285 raise NotExisting("Task object created when expected to exist", task_information)
286
287 object_changed = False
288 for v in vars(task_object):
289 if v in task_information.keys():
290 if vars(task_object)[v] != task_information[v]:
291 vars(task_object)[v] = task_information[v]
292 object_changed = True
293
294 # update setscene-related information if the task has a setscene
295 if task_object.outcome == Task.OUTCOME_COVERED and 1 == task_object.get_related_setscene().count():
296 task_object.outcome = Task.OUTCOME_CACHED
297 object_changed = True
298
299 outcome_task_setscene = Task.objects.get(task_executed=True, build = task_object.build,
300 recipe = task_object.recipe, task_name=task_object.task_name+"_setscene").outcome
301 if outcome_task_setscene == Task.OUTCOME_SUCCESS:
302 task_object.sstate_result = Task.SSTATE_RESTORED
303 object_changed = True
304 elif outcome_task_setscene == Task.OUTCOME_FAILED:
305 task_object.sstate_result = Task.SSTATE_FAILED
306 object_changed = True
307
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500308 if object_changed:
309 task_object.save()
310 return task_object
311
312
313 def get_update_recipe_object(self, recipe_information, must_exist = False):
314 assert 'layer_version' in recipe_information
315 assert 'file_path' in recipe_information
316 assert 'pathflags' in recipe_information
317
318 assert not recipe_information['file_path'].startswith("/") # we should have layer-relative paths at all times
319
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500320
321 def update_recipe_obj(recipe_object):
322 object_changed = False
323 for v in vars(recipe_object):
324 if v in recipe_information.keys():
325 object_changed = True
326 vars(recipe_object)[v] = recipe_information[v]
327
328 if object_changed:
329 recipe_object.save()
330
331 recipe, created = self._cached_get_or_create(Recipe, layer_version=recipe_information['layer_version'],
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500332 file_path=recipe_information['file_path'], pathflags = recipe_information['pathflags'])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500333
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500334 update_recipe_obj(recipe)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500335
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500336 built_recipe = None
337 # Create a copy of the recipe for historical puposes and update it
338 for built_layer in self.layer_version_built:
339 if built_layer.layer == recipe_information['layer_version'].layer:
340 built_recipe, c = self._cached_get_or_create(Recipe,
341 layer_version=built_layer,
342 file_path=recipe_information['file_path'],
343 pathflags = recipe_information['pathflags'])
344 update_recipe_obj(built_recipe)
345 break
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500346
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500347
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500348 # If we're in analysis mode or if this is a custom recipe
349 # then we are wholly responsible for the data
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500350 # and therefore we return the 'real' recipe rather than the build
351 # history copy of the recipe.
352 if recipe_information['layer_version'].build is not None and \
353 recipe_information['layer_version'].build.project == \
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500354 Project.objects.get_or_create_default_project():
355 return recipe
356
357 if built_recipe is None:
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500358 return recipe
359
360 return built_recipe
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500361
362 def get_update_layer_version_object(self, build_obj, layer_obj, layer_version_information):
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500363 if isinstance(layer_obj, Layer_Version):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500364 # Special case the toaster-custom-images layer which is created
365 # on the fly so don't update the values which may cause the layer
366 # to be duplicated on a future get_or_create
367 if layer_obj.layer.name == CustomImageRecipe.LAYER_NAME:
368 return layer_obj
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500369 # We already found our layer version for this build so just
370 # update it with the new build information
371 logger.debug("We found our layer from toaster")
372 layer_obj.local_path = layer_version_information['local_path']
373 layer_obj.save()
374 self.layer_version_objects.append(layer_obj)
375
376 # create a new copy of this layer version as a snapshot for
377 # historical purposes
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500378 layer_copy, c = Layer_Version.objects.get_or_create(
379 build=build_obj,
380 layer=layer_obj.layer,
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600381 release=layer_obj.release,
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500382 branch=layer_version_information['branch'],
383 commit=layer_version_information['commit'],
384 local_path=layer_version_information['local_path'],
385 )
386
387 logger.info("created new historical layer version %d",
388 layer_copy.pk)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500389
390 self.layer_version_built.append(layer_copy)
391
392 return layer_obj
393
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500394 assert isinstance(build_obj, Build)
395 assert isinstance(layer_obj, Layer)
396 assert 'branch' in layer_version_information
397 assert 'commit' in layer_version_information
398 assert 'priority' in layer_version_information
399 assert 'local_path' in layer_version_information
400
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500401 # If we're doing a command line build then associate this new layer with the
402 # project to avoid it 'contaminating' toaster data
403 project = None
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500404 if build_obj.project == Project.objects.get_or_create_default_project():
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500405 project = build_obj.project
406
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500407 layer_version_object, _ = Layer_Version.objects.get_or_create(
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500408 build = build_obj,
409 layer = layer_obj,
410 branch = layer_version_information['branch'],
411 commit = layer_version_information['commit'],
412 priority = layer_version_information['priority'],
413 local_path = layer_version_information['local_path'],
414 project=project)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500415
416 self.layer_version_objects.append(layer_version_object)
417
418 return layer_version_object
419
420 def get_update_layer_object(self, layer_information, brbe):
421 assert 'name' in layer_information
422 assert 'layer_index_url' in layer_information
423
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600424 # From command line builds we have no brbe as the request is directly
425 # from bitbake
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500426 if brbe is None:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600427 # If we don't have git commit sha then we're using a non-git
428 # layer so set the layer_source_dir to identify it as such
429 if not layer_information['version']['commit']:
430 local_source_dir = layer_information["local_path"]
431 else:
432 local_source_dir = None
433
434 layer_object, _ = \
435 Layer.objects.get_or_create(
436 name=layer_information['name'],
437 local_source_dir=local_source_dir,
438 layer_index_url=layer_information['layer_index_url'])
439
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500440 return layer_object
441 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500442 br_id, be_id = brbe.split(":")
443
444 # find layer by checkout path;
445 from bldcontrol import bbcontroller
446 bc = bbcontroller.getBuildEnvironmentController(pk = be_id)
447
448 # we might have a race condition here, as the project layers may change between the build trigger and the actual build execution
449 # but we can only match on the layer name, so the worst thing can happen is a mis-identification of the layer, not a total failure
450
451 # note that this is different
452 buildrequest = BuildRequest.objects.get(pk = br_id)
453 for brl in buildrequest.brlayer_set.all():
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600454 if brl.local_source_dir:
455 localdirname = os.path.join(brl.local_source_dir,
456 brl.dirpath)
457 else:
458 localdirname = os.path.join(bc.getGitCloneDirectory(brl.giturl, brl.commit), brl.dirpath)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500459 # we get a relative path, unless running in HEAD mode where the path is absolute
460 if not localdirname.startswith("/"):
461 localdirname = os.path.join(bc.be.sourcedir, localdirname)
462 #logger.debug(1, "Localdirname %s lcal_path %s" % (localdirname, layer_information['local_path']))
463 if localdirname.startswith(layer_information['local_path']):
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500464 # If the build request came from toaster this field
465 # should contain the information from the layer_version
466 # That created this build request.
467 if brl.layer_version:
468 return brl.layer_version
469
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600470 # This might be a local layer (i.e. no git info) so try
471 # matching local_source_dir
472 if brl.local_source_dir and brl.local_source_dir == layer_information["local_path"]:
473 return brl.layer_version
474
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500475 # we matched the BRLayer, but we need the layer_version that generated this BR; reverse of the Project.schedule_build()
476 #logger.debug(1, "Matched %s to BRlayer %s" % (pformat(layer_information["local_path"]), localdirname))
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500477
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500478 for pl in buildrequest.project.projectlayer_set.filter(layercommit__layer__name = brl.name):
479 if pl.layercommit.layer.vcs_url == brl.giturl :
480 layer = pl.layercommit.layer
481 layer.save()
482 return layer
483
484 raise NotExisting("Unidentified layer %s" % pformat(layer_information))
485
486
487 def save_target_file_information(self, build_obj, target_obj, filedata):
488 assert isinstance(build_obj, Build)
489 assert isinstance(target_obj, Target)
490 dirs = filedata['dirs']
491 files = filedata['files']
492 syms = filedata['syms']
493
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500494 # always create the root directory as a special case;
495 # note that this is never displayed, so the owner, group,
496 # size, permission are irrelevant
497 tf_obj = Target_File.objects.create(target = target_obj,
498 path = '/',
499 size = 0,
500 owner = '',
501 group = '',
502 permission = '',
503 inodetype = Target_File.ITYPE_DIRECTORY)
504 tf_obj.save()
505
506 # insert directories, ordered by name depth
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500507 for d in sorted(dirs, key=lambda x:len(x[-1].split("/"))):
508 (user, group, size) = d[1:4]
509 permission = d[0][1:]
510 path = d[4].lstrip(".")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500511
512 # we already created the root directory, so ignore any
513 # entry for it
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500514 if len(path) == 0:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500515 continue
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500516
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500517 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
518 if len(parent_path) == 0:
519 parent_path = "/"
520 parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
521 tf_obj = Target_File.objects.create(
522 target = target_obj,
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600523 path = path,
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500524 size = size,
525 inodetype = Target_File.ITYPE_DIRECTORY,
526 permission = permission,
527 owner = user,
528 group = group,
529 directory = parent_obj)
530
531
532 # we insert files
533 for d in files:
534 (user, group, size) = d[1:4]
535 permission = d[0][1:]
536 path = d[4].lstrip(".")
537 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
538 inodetype = Target_File.ITYPE_REGULAR
539 if d[0].startswith('b'):
540 inodetype = Target_File.ITYPE_BLOCK
541 if d[0].startswith('c'):
542 inodetype = Target_File.ITYPE_CHARACTER
543 if d[0].startswith('p'):
544 inodetype = Target_File.ITYPE_FIFO
545
546 tf_obj = Target_File.objects.create(
547 target = target_obj,
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600548 path = path,
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500549 size = size,
550 inodetype = inodetype,
551 permission = permission,
552 owner = user,
553 group = group)
554 parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
555 tf_obj.directory = parent_obj
556 tf_obj.save()
557
558 # we insert symlinks
559 for d in syms:
560 (user, group, size) = d[1:4]
561 permission = d[0][1:]
562 path = d[4].lstrip(".")
563 filetarget_path = d[6]
564
565 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
566 if not filetarget_path.startswith("/"):
567 # we have a relative path, get a normalized absolute one
568 filetarget_path = parent_path + "/" + filetarget_path
569 fcp = filetarget_path.split("/")
570 fcpl = []
571 for i in fcp:
572 if i == "..":
573 fcpl.pop()
574 else:
575 fcpl.append(i)
576 filetarget_path = "/".join(fcpl)
577
578 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600579 filetarget_obj = Target_File.objects.get(target = target_obj, path = filetarget_path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500580 except Target_File.DoesNotExist:
581 # we might have an invalid link; no way to detect this. just set it to None
582 filetarget_obj = None
583
584 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
585
586 tf_obj = Target_File.objects.create(
587 target = target_obj,
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600588 path = path,
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500589 size = size,
590 inodetype = Target_File.ITYPE_SYMLINK,
591 permission = permission,
592 owner = user,
593 group = group,
594 directory = parent_obj,
595 sym_target = filetarget_obj)
596
597
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500598 def save_target_package_information(self, build_obj, target_obj, packagedict, pkgpnmap, recipes, built_package=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500599 assert isinstance(build_obj, Build)
600 assert isinstance(target_obj, Target)
601
602 errormsg = ""
603 for p in packagedict:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500604 # Search name swtiches round the installed name vs package name
605 # by default installed name == package name
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500606 searchname = p
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500607 if p not in pkgpnmap:
608 logger.warning("Image packages list contains %p, but is"
609 " missing from all packages list where the"
610 " metadata comes from. Skipping...", p)
611 continue
612
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500613 if 'OPKGN' in pkgpnmap[p].keys():
614 searchname = pkgpnmap[p]['OPKGN']
615
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500616 built_recipe = recipes[pkgpnmap[p]['PN']]
617
618 if built_package:
619 packagedict[p]['object'], created = Package.objects.get_or_create( build = build_obj, name = searchname )
620 recipe = built_recipe
621 else:
622 packagedict[p]['object'], created = \
623 CustomImagePackage.objects.get_or_create(name=searchname)
624 # Clear the Package_Dependency objects as we're going to update
625 # the CustomImagePackage with the latest dependency information
626 packagedict[p]['object'].package_dependencies_target.all().delete()
627 packagedict[p]['object'].package_dependencies_source.all().delete()
628 try:
629 recipe = self._cached_get(
630 Recipe,
631 name=built_recipe.name,
632 layer_version__build=None,
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600633 layer_version__release=
634 built_recipe.layer_version.release,
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500635 file_path=built_recipe.file_path,
636 version=built_recipe.version
637 )
638 except (Recipe.DoesNotExist,
639 Recipe.MultipleObjectsReturned) as e:
640 logger.info("We did not find one recipe for the"
641 "configuration data package %s %s" % (p, e))
642 continue
643
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500644 if created or packagedict[p]['object'].size == -1: # save the data anyway we can, not just if it was not created here; bug [YOCTO #6887]
645 # fill in everything we can from the runtime-reverse package data
646 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500647 packagedict[p]['object'].recipe = recipe
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500648 packagedict[p]['object'].version = pkgpnmap[p]['PV']
649 packagedict[p]['object'].installed_name = p
650 packagedict[p]['object'].revision = pkgpnmap[p]['PR']
651 packagedict[p]['object'].license = pkgpnmap[p]['LICENSE']
652 packagedict[p]['object'].section = pkgpnmap[p]['SECTION']
653 packagedict[p]['object'].summary = pkgpnmap[p]['SUMMARY']
654 packagedict[p]['object'].description = pkgpnmap[p]['DESCRIPTION']
655 packagedict[p]['object'].size = int(pkgpnmap[p]['PKGSIZE'])
656
657 # no files recorded for this package, so save files info
658 packagefile_objects = []
659 for targetpath in pkgpnmap[p]['FILES_INFO']:
660 targetfilesize = pkgpnmap[p]['FILES_INFO'][targetpath]
661 packagefile_objects.append(Package_File( package = packagedict[p]['object'],
662 path = targetpath,
663 size = targetfilesize))
664 if len(packagefile_objects):
665 Package_File.objects.bulk_create(packagefile_objects)
666 except KeyError as e:
667 errormsg += " stpi: Key error, package %s key %s \n" % ( p, e )
668
669 # save disk installed size
670 packagedict[p]['object'].installed_size = packagedict[p]['size']
671 packagedict[p]['object'].save()
672
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500673 if built_package:
674 Target_Installed_Package.objects.create(target = target_obj, package = packagedict[p]['object'])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500675
676 packagedeps_objs = []
677 for p in packagedict:
678 for (px,deptype) in packagedict[p]['depends']:
679 if deptype == 'depends':
680 tdeptype = Package_Dependency.TYPE_TRDEPENDS
681 elif deptype == 'recommends':
682 tdeptype = Package_Dependency.TYPE_TRECOMMENDS
683
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500684 try:
685 packagedeps_objs.append(Package_Dependency(
686 package = packagedict[p]['object'],
687 depends_on = packagedict[px]['object'],
688 dep_type = tdeptype,
689 target = target_obj))
690 except KeyError as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600691 logger.warning("Could not add dependency to the package %s "
692 "because %s is an unknown package", p, px)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500693
694 if len(packagedeps_objs) > 0:
695 Package_Dependency.objects.bulk_create(packagedeps_objs)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500696 else:
697 logger.info("No package dependencies created")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500698
699 if len(errormsg) > 0:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600700 logger.warning("buildinfohelper: target_package_info could not identify recipes: \n%s", errormsg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500701
702 def save_target_image_file_information(self, target_obj, file_name, file_size):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600703 Target_Image_File.objects.create(target=target_obj,
704 file_name=file_name, file_size=file_size)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500705
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600706 def save_target_kernel_file(self, target_obj, file_name, file_size):
707 """
708 Save kernel file (bzImage, modules*) information for a Target target_obj.
709 """
710 TargetKernelFile.objects.create(target=target_obj,
711 file_name=file_name, file_size=file_size)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500712
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600713 def save_target_sdk_file(self, target_obj, file_name, file_size):
714 """
715 Save SDK artifacts to the database, associating them with a
716 Target object.
717 """
718 TargetSDKFile.objects.create(target=target_obj, file_name=file_name,
719 file_size=file_size)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500720
721 def create_logmessage(self, log_information):
722 assert 'build' in log_information
723 assert 'level' in log_information
724 assert 'message' in log_information
725
726 log_object = LogMessage.objects.create(
727 build = log_information['build'],
728 level = log_information['level'],
729 message = log_information['message'])
730
731 for v in vars(log_object):
732 if v in log_information.keys():
733 vars(log_object)[v] = log_information[v]
734
735 return log_object.save()
736
737
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500738 def save_build_package_information(self, build_obj, package_info, recipes,
739 built_package):
740 # assert isinstance(build_obj, Build)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500741
742 # create and save the object
743 pname = package_info['PKG']
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500744 built_recipe = recipes[package_info['PN']]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500745 if 'OPKGN' in package_info.keys():
746 pname = package_info['OPKGN']
747
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500748 if built_package:
749 bp_object, _ = Package.objects.get_or_create( build = build_obj,
750 name = pname )
751 recipe = built_recipe
752 else:
753 bp_object, created = \
754 CustomImagePackage.objects.get_or_create(name=pname)
755 try:
756 recipe = self._cached_get(Recipe,
757 name=built_recipe.name,
758 layer_version__build=None,
759 file_path=built_recipe.file_path,
760 version=built_recipe.version)
761
762 except (Recipe.DoesNotExist, Recipe.MultipleObjectsReturned):
763 logger.debug("We did not find one recipe for the configuration"
764 "data package %s" % pname)
765 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500766
767 bp_object.installed_name = package_info['PKG']
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500768 bp_object.recipe = recipe
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500769 bp_object.version = package_info['PKGV']
770 bp_object.revision = package_info['PKGR']
771 bp_object.summary = package_info['SUMMARY']
772 bp_object.description = package_info['DESCRIPTION']
773 bp_object.size = int(package_info['PKGSIZE'])
774 bp_object.section = package_info['SECTION']
775 bp_object.license = package_info['LICENSE']
776 bp_object.save()
777
778 # save any attached file information
779 packagefile_objects = []
780 for path in package_info['FILES_INFO']:
781 packagefile_objects.append(Package_File( package = bp_object,
782 path = path,
783 size = package_info['FILES_INFO'][path] ))
784 if len(packagefile_objects):
785 Package_File.objects.bulk_create(packagefile_objects)
786
787 def _po_byname(p):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500788 if built_package:
789 pkg, created = Package.objects.get_or_create(build=build_obj,
790 name=p)
791 else:
792 pkg, created = CustomImagePackage.objects.get_or_create(name=p)
793
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500794 if created:
795 pkg.size = -1
796 pkg.save()
797 return pkg
798
799 packagedeps_objs = []
800 # save soft dependency information
801 if 'RDEPENDS' in package_info and package_info['RDEPENDS']:
802 for p in bb.utils.explode_deps(package_info['RDEPENDS']):
803 packagedeps_objs.append(Package_Dependency( package = bp_object,
804 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS))
805 if 'RPROVIDES' in package_info and package_info['RPROVIDES']:
806 for p in bb.utils.explode_deps(package_info['RPROVIDES']):
807 packagedeps_objs.append(Package_Dependency( package = bp_object,
808 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES))
809 if 'RRECOMMENDS' in package_info and package_info['RRECOMMENDS']:
810 for p in bb.utils.explode_deps(package_info['RRECOMMENDS']):
811 packagedeps_objs.append(Package_Dependency( package = bp_object,
812 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS))
813 if 'RSUGGESTS' in package_info and package_info['RSUGGESTS']:
814 for p in bb.utils.explode_deps(package_info['RSUGGESTS']):
815 packagedeps_objs.append(Package_Dependency( package = bp_object,
816 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS))
817 if 'RREPLACES' in package_info and package_info['RREPLACES']:
818 for p in bb.utils.explode_deps(package_info['RREPLACES']):
819 packagedeps_objs.append(Package_Dependency( package = bp_object,
820 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES))
821 if 'RCONFLICTS' in package_info and package_info['RCONFLICTS']:
822 for p in bb.utils.explode_deps(package_info['RCONFLICTS']):
823 packagedeps_objs.append(Package_Dependency( package = bp_object,
824 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS))
825
826 if len(packagedeps_objs) > 0:
827 Package_Dependency.objects.bulk_create(packagedeps_objs)
828
829 return bp_object
830
831 def save_build_variables(self, build_obj, vardump):
832 assert isinstance(build_obj, Build)
833
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500834 for k in vardump:
835 desc = vardump[k]['doc']
836 if desc is None:
837 var_words = [word for word in k.split('_')]
838 root_var = "_".join([word for word in var_words if word.isupper()])
839 if root_var and root_var != k and root_var in vardump:
840 desc = vardump[root_var]['doc']
841 if desc is None:
842 desc = ''
843 if len(desc):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500844 HelpText.objects.get_or_create(build=build_obj,
845 area=HelpText.VARIABLE,
846 key=k, text=desc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500847 if not bool(vardump[k]['func']):
848 value = vardump[k]['v']
849 if value is None:
850 value = ''
851 variable_obj = Variable.objects.create( build = build_obj,
852 variable_name = k,
853 variable_value = value,
854 description = desc)
855
856 varhist_objects = []
857 for vh in vardump[k]['history']:
858 if not 'documentation.conf' in vh['file']:
859 varhist_objects.append(VariableHistory( variable = variable_obj,
860 file_name = vh['file'],
861 line_number = vh['line'],
862 operation = vh['op']))
863 if len(varhist_objects):
864 VariableHistory.objects.bulk_create(varhist_objects)
865
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500866
867class MockEvent(object):
868 """ This object is used to create event, for which normal event-processing methods can
869 be used, out of data that is not coming via an actual event
870 """
871 def __init__(self):
872 self.msg = None
873 self.levelno = None
874 self.taskname = None
875 self.taskhash = None
876 self.pathname = None
877 self.lineno = None
878
879
880class BuildInfoHelper(object):
881 """ This class gathers the build information from the server and sends it
882 towards the ORM wrapper for storing in the database
883 It is instantiated once per build
884 Keeps in memory all data that needs matching before writing it to the database
885 """
886
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600887 # tasks which produce image files; note we include '', as we set
888 # the task for a target to '' (i.e. 'build') if no target is
889 # explicitly defined
890 IMAGE_GENERATING_TASKS = ['', 'build', 'image', 'populate_sdk_ext']
891
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500892 # pylint: disable=protected-access
893 # the code will look into the protected variables of the event; no easy way around this
894 # pylint: disable=bad-continuation
895 # we do not follow the python conventions for continuation indentation due to long lines here
896
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500897 def __init__(self, server, has_build_history = False, brbe = None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500898 self.internal_state = {}
899 self.internal_state['taskdata'] = {}
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500900 self.internal_state['targets'] = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500901 self.task_order = 0
902 self.autocommit_step = 1
903 self.server = server
904 # we use manual transactions if the database doesn't autocommit on us
905 if not connection.features.autocommits_when_autocommit_is_off:
906 transaction.set_autocommit(False)
907 self.orm_wrapper = ORMWrapper()
908 self.has_build_history = has_build_history
909 self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500910
911 # this is set for Toaster-triggered builds by localhostbecontroller
912 # via toasterui
913 self.brbe = brbe
914
915 self.project = None
916
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500917 logger.debug(1, "buildinfohelper: Build info helper inited %s" % vars(self))
918
919
920 ###################
921 ## methods to convert event/external info into objects that the ORM layer uses
922
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600923 def _ensure_build(self):
924 """
925 Ensure the current build object exists and is up to date with
926 data on the bitbake server
927 """
928 if not 'build' in self.internal_state or not self.internal_state['build']:
929 # create the Build object
930 self.internal_state['build'] = \
931 self.orm_wrapper.get_or_create_build_object(self.brbe)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500932
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600933 build = self.internal_state['build']
934
935 # update missing fields on the Build object with found data
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500936 build_info = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600937
938 # set to True if at least one field is going to be set
939 changed = False
940
941 if not build.build_name:
942 build_name = self.server.runCommand(["getVariable", "BUILDNAME"])[0]
943
944 # only reset the build name if the one on the server is actually
945 # a valid value for the build_name field
946 if build_name != None:
947 build_info['build_name'] = build_name
948 changed = True
949
950 if not build.machine:
951 build_info['machine'] = self.server.runCommand(["getVariable", "MACHINE"])[0]
952 changed = True
953
954 if not build.distro:
955 build_info['distro'] = self.server.runCommand(["getVariable", "DISTRO"])[0]
956 changed = True
957
958 if not build.distro_version:
959 build_info['distro_version'] = self.server.runCommand(["getVariable", "DISTRO_VERSION"])[0]
960 changed = True
961
962 if not build.bitbake_version:
963 build_info['bitbake_version'] = self.server.runCommand(["getVariable", "BB_VERSION"])[0]
964 changed = True
965
966 if changed:
967 self.orm_wrapper.update_build(self.internal_state['build'], build_info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500968
969 def _get_task_information(self, event, recipe):
970 assert 'taskname' in vars(event)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600971 self._ensure_build()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500972
973 task_information = {}
974 task_information['build'] = self.internal_state['build']
975 task_information['outcome'] = Task.OUTCOME_NA
976 task_information['recipe'] = recipe
977 task_information['task_name'] = event.taskname
978 try:
979 # some tasks don't come with a hash. and that's ok
980 task_information['sstate_checksum'] = event.taskhash
981 except AttributeError:
982 pass
983 return task_information
984
985 def _get_layer_version_for_path(self, path):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600986 self._ensure_build()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500987
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500988 def _slkey_interactive(layer_version):
989 assert isinstance(layer_version, Layer_Version)
990 return len(layer_version.local_path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500991
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500992 # Heuristics: we always match recipe to the deepest layer path in the discovered layers
993 for lvo in sorted(self.orm_wrapper.layer_version_objects, reverse=True, key=_slkey_interactive):
994 # we can match to the recipe file path
995 if path.startswith(lvo.local_path):
996 return lvo
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600997 if lvo.layer.local_source_dir and \
998 path.startswith(lvo.layer.local_source_dir):
999 return lvo
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001000
1001 #if we get here, we didn't read layers correctly; dump whatever information we have on the error log
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001002 logger.warning("Could not match layer version for recipe path %s : %s", path, self.orm_wrapper.layer_version_objects)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001003
1004 #mockup the new layer
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001005 unknown_layer, _ = Layer.objects.get_or_create(name="Unidentified layer", layer_index_url="")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001006 unknown_layer_version_obj, _ = Layer_Version.objects.get_or_create(layer = unknown_layer, build = self.internal_state['build'])
1007
1008 # append it so we don't run into this error again and again
1009 self.orm_wrapper.layer_version_objects.append(unknown_layer_version_obj)
1010
1011 return unknown_layer_version_obj
1012
1013 def _get_recipe_information_from_taskfile(self, taskfile):
1014 localfilepath = taskfile.split(":")[-1]
1015 filepath_flags = ":".join(sorted(taskfile.split(":")[:-1]))
1016 layer_version_obj = self._get_layer_version_for_path(localfilepath)
1017
1018
1019
1020 recipe_info = {}
1021 recipe_info['layer_version'] = layer_version_obj
1022 recipe_info['file_path'] = localfilepath
1023 recipe_info['pathflags'] = filepath_flags
1024
1025 if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path):
1026 recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/")
1027 else:
1028 raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path))
1029
1030 return recipe_info
1031
1032 def _get_path_information(self, task_object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001033 self._ensure_build()
1034
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001035 assert isinstance(task_object, Task)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001036 build_stats_format = "{tmpdir}/buildstats/{buildname}/{package}/"
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001037 build_stats_path = []
1038
1039 for t in self.internal_state['targets']:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001040 buildname = self.internal_state['build'].build_name
1041 pe, pv = task_object.recipe.version.split(":",1)
1042 if len(pe) > 0:
1043 package = task_object.recipe.name + "-" + pe + "_" + pv
1044 else:
1045 package = task_object.recipe.name + "-" + pv
1046
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001047 build_stats_path.append(build_stats_format.format(tmpdir=self.tmp_dir,
1048 buildname=buildname,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001049 package=package))
1050
1051 return build_stats_path
1052
1053
1054 ################################
1055 ## external available methods to store information
1056 @staticmethod
1057 def _get_data_from_event(event):
1058 evdata = None
1059 if '_localdata' in vars(event):
1060 evdata = event._localdata
1061 elif 'data' in vars(event):
1062 evdata = event.data
1063 else:
1064 raise Exception("Event with neither _localdata or data properties")
1065 return evdata
1066
1067 def store_layer_info(self, event):
1068 layerinfos = BuildInfoHelper._get_data_from_event(event)
1069 self.internal_state['lvs'] = {}
1070 for layer in layerinfos:
1071 try:
1072 self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)] = layerinfos[layer]['version']
1073 self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)]['local_path'] = layerinfos[layer]['local_path']
1074 except NotExisting as nee:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001075 logger.warning("buildinfohelper: cannot identify layer exception:%s ", nee)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001076
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001077 def store_started_build(self):
1078 self._ensure_build()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001079
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001080 def save_build_log_file_path(self, build_log_path):
1081 self._ensure_build()
1082
1083 if not self.internal_state['build'].cooker_log_path:
1084 data_dict = {'cooker_log_path': build_log_path}
1085 self.orm_wrapper.update_build(self.internal_state['build'], data_dict)
1086
1087 def save_build_targets(self, event):
1088 self._ensure_build()
1089
1090 # create target information
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001091 assert '_pkgs' in vars(event)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001092 target_information = {}
1093 target_information['targets'] = event._pkgs
1094 target_information['build'] = self.internal_state['build']
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001095
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001096 self.internal_state['targets'] = self.orm_wrapper.get_or_create_targets(target_information)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001097
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001098 def save_build_layers_and_variables(self):
1099 self._ensure_build()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001100
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001101 build_obj = self.internal_state['build']
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001102
1103 # save layer version information for this build
1104 if not 'lvs' in self.internal_state:
1105 logger.error("Layer version information not found; Check if the bitbake server was configured to inherit toaster.bbclass.")
1106 else:
1107 for layer_obj in self.internal_state['lvs']:
1108 self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj])
1109
1110 del self.internal_state['lvs']
1111
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001112 # Save build configuration
1113 data = self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0]
1114
1115 # convert the paths from absolute to relative to either the build directory or layer checkouts
1116 path_prefixes = []
1117
1118 if self.brbe is not None:
1119 _, be_id = self.brbe.split(":")
1120 be = BuildEnvironment.objects.get(pk = be_id)
1121 path_prefixes.append(be.builddir)
1122
1123 for layer in sorted(self.orm_wrapper.layer_version_objects, key = lambda x:len(x.local_path), reverse=True):
1124 path_prefixes.append(layer.local_path)
1125
1126 # we strip the prefixes
1127 for k in data:
1128 if not bool(data[k]['func']):
1129 for vh in data[k]['history']:
1130 if not 'documentation.conf' in vh['file']:
1131 abs_file_name = vh['file']
1132 for pp in path_prefixes:
1133 if abs_file_name.startswith(pp + "/"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001134 # preserve layer name in relative path
1135 vh['file']=abs_file_name[pp.rfind("/")+1:]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001136 break
1137
1138 # save the variables
1139 self.orm_wrapper.save_build_variables(build_obj, data)
1140
1141 return self.brbe
1142
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001143 def set_recipes_to_parse(self, num_recipes):
1144 """
1145 Set the number of recipes which need to be parsed for this build.
1146 This is set the first time ParseStarted is received by toasterui.
1147 """
1148 self._ensure_build()
1149 self.internal_state['build'].recipes_to_parse = num_recipes
1150 self.internal_state['build'].save()
1151
1152 def set_recipes_parsed(self, num_recipes):
1153 """
1154 Set the number of recipes parsed so far for this build; this is updated
1155 each time a ParseProgress or ParseCompleted event is received by
1156 toasterui.
1157 """
1158 self._ensure_build()
1159 if num_recipes <= self.internal_state['build'].recipes_to_parse:
1160 self.internal_state['build'].recipes_parsed = num_recipes
1161 self.internal_state['build'].save()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001162
1163 def update_target_image_file(self, event):
1164 evdata = BuildInfoHelper._get_data_from_event(event)
1165
1166 for t in self.internal_state['targets']:
1167 if t.is_image == True:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001168 output_files = list(evdata.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001169 for output in output_files:
1170 if t.target in output and 'rootfs' in output and not output.endswith(".manifest"):
1171 self.orm_wrapper.save_target_image_file_information(t, output, evdata[output])
1172
1173 def update_artifact_image_file(self, event):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001174 self._ensure_build()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001175 evdata = BuildInfoHelper._get_data_from_event(event)
1176 for artifact_path in evdata.keys():
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001177 self.orm_wrapper.save_artifact_information(
1178 self.internal_state['build'], artifact_path,
1179 evdata[artifact_path])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001180
1181 def update_build_information(self, event, errors, warnings, taskfailures):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001182 self._ensure_build()
1183 self.orm_wrapper.update_build_stats_and_outcome(
1184 self.internal_state['build'], errors, warnings, taskfailures)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001185
1186 def store_started_task(self, event):
1187 assert isinstance(event, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted, bb.runqueue.runQueueTaskSkipped))
1188 assert 'taskfile' in vars(event)
1189 localfilepath = event.taskfile.split(":")[-1]
1190 assert localfilepath.startswith("/")
1191
1192 identifier = event.taskfile + ":" + event.taskname
1193
1194 recipe_information = self._get_recipe_information_from_taskfile(event.taskfile)
1195 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True)
1196
1197 task_information = self._get_task_information(event, recipe)
1198 task_information['outcome'] = Task.OUTCOME_NA
1199
1200 if isinstance(event, bb.runqueue.runQueueTaskSkipped):
1201 assert 'reason' in vars(event)
1202 task_information['task_executed'] = False
1203 if event.reason == "covered":
1204 task_information['outcome'] = Task.OUTCOME_COVERED
1205 if event.reason == "existing":
1206 task_information['outcome'] = Task.OUTCOME_PREBUILT
1207 else:
1208 task_information['task_executed'] = True
1209 if 'noexec' in vars(event) and event.noexec == True:
1210 task_information['task_executed'] = False
1211 task_information['outcome'] = Task.OUTCOME_EMPTY
1212 task_information['script_type'] = Task.CODING_NA
1213
1214 # do not assign order numbers to scene tasks
1215 if not isinstance(event, bb.runqueue.sceneQueueTaskStarted):
1216 self.task_order += 1
1217 task_information['order'] = self.task_order
1218
1219 self.orm_wrapper.get_update_task_object(task_information)
1220
1221 self.internal_state['taskdata'][identifier] = {
1222 'outcome': task_information['outcome'],
1223 }
1224
1225
1226 def store_tasks_stats(self, event):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001227 self._ensure_build()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001228 task_data = BuildInfoHelper._get_data_from_event(event)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001229
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001230 for (task_file, task_name, task_stats, recipe_name) in task_data:
1231 build = self.internal_state['build']
1232 self.orm_wrapper.update_task_object(build, task_name, recipe_name, task_stats)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001233
1234 def update_and_store_task(self, event):
1235 assert 'taskfile' in vars(event)
1236 localfilepath = event.taskfile.split(":")[-1]
1237 assert localfilepath.startswith("/")
1238
1239 identifier = event.taskfile + ":" + event.taskname
1240 if not identifier in self.internal_state['taskdata']:
1241 if isinstance(event, bb.build.TaskBase):
1242 # we do a bit of guessing
1243 candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)]
1244 if len(candidates) == 1:
1245 identifier = candidates[0]
1246
1247 assert identifier in self.internal_state['taskdata']
1248 identifierlist = identifier.split(":")
1249 realtaskfile = ":".join(identifierlist[0:len(identifierlist)-1])
1250 recipe_information = self._get_recipe_information_from_taskfile(realtaskfile)
1251 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True)
1252 task_information = self._get_task_information(event,recipe)
1253
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001254 task_information['outcome'] = self.internal_state['taskdata'][identifier]['outcome']
1255
1256 if 'logfile' in vars(event):
1257 task_information['logfile'] = event.logfile
1258
1259 if '_message' in vars(event):
1260 task_information['message'] = event._message
1261
1262 if 'taskflags' in vars(event):
1263 # with TaskStarted, we get even more information
1264 if 'python' in event.taskflags.keys() and event.taskflags['python'] == '1':
1265 task_information['script_type'] = Task.CODING_PYTHON
1266 else:
1267 task_information['script_type'] = Task.CODING_SHELL
1268
1269 if task_information['outcome'] == Task.OUTCOME_NA:
1270 if isinstance(event, (bb.runqueue.runQueueTaskCompleted, bb.runqueue.sceneQueueTaskCompleted)):
1271 task_information['outcome'] = Task.OUTCOME_SUCCESS
1272 del self.internal_state['taskdata'][identifier]
1273
1274 if isinstance(event, (bb.runqueue.runQueueTaskFailed, bb.runqueue.sceneQueueTaskFailed)):
1275 task_information['outcome'] = Task.OUTCOME_FAILED
1276 del self.internal_state['taskdata'][identifier]
1277
1278 if not connection.features.autocommits_when_autocommit_is_off:
1279 # we force a sync point here, to get the progress bar to show
1280 if self.autocommit_step % 3 == 0:
1281 transaction.set_autocommit(True)
1282 transaction.set_autocommit(False)
1283 self.autocommit_step += 1
1284
1285 self.orm_wrapper.get_update_task_object(task_information, True) # must exist
1286
1287
1288 def store_missed_state_tasks(self, event):
1289 for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['missed']:
1290
1291 # identifier = fn + taskname + "_setscene"
1292 recipe_information = self._get_recipe_information_from_taskfile(fn)
1293 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
1294 mevent = MockEvent()
1295 mevent.taskname = taskname
1296 mevent.taskhash = taskhash
1297 task_information = self._get_task_information(mevent,recipe)
1298
1299 task_information['start_time'] = timezone.now()
1300 task_information['outcome'] = Task.OUTCOME_NA
1301 task_information['sstate_checksum'] = taskhash
1302 task_information['sstate_result'] = Task.SSTATE_MISS
1303 task_information['path_to_sstate_obj'] = sstatefile
1304
1305 self.orm_wrapper.get_update_task_object(task_information)
1306
1307 for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['found']:
1308
1309 # identifier = fn + taskname + "_setscene"
1310 recipe_information = self._get_recipe_information_from_taskfile(fn)
1311 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
1312 mevent = MockEvent()
1313 mevent.taskname = taskname
1314 mevent.taskhash = taskhash
1315 task_information = self._get_task_information(mevent,recipe)
1316
1317 task_information['path_to_sstate_obj'] = sstatefile
1318
1319 self.orm_wrapper.get_update_task_object(task_information)
1320
1321
1322 def store_target_package_data(self, event):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001323 self._ensure_build()
1324
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001325 # for all image targets
1326 for target in self.internal_state['targets']:
1327 if target.is_image:
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001328 pkgdata = BuildInfoHelper._get_data_from_event(event)['pkgdata']
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001329 imgdata = BuildInfoHelper._get_data_from_event(event)['imgdata'].get(target.target, {})
1330 filedata = BuildInfoHelper._get_data_from_event(event)['filedata'].get(target.target, {})
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001331
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001332 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001333 self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes'], built_package=True)
1334 self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata.copy(), pkgdata, self.internal_state['recipes'], built_package=False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001335 except KeyError as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001336 logger.warning("KeyError in save_target_package_information"
1337 "%s ", e)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001338
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001339 # only try to find files in the image if the task for this
1340 # target is one which produces image files; otherwise, the old
1341 # list of files in the files-in-image.txt file will be
1342 # appended to the target even if it didn't produce any images
1343 if target.task in BuildInfoHelper.IMAGE_GENERATING_TASKS:
1344 try:
1345 self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata)
1346 except KeyError as e:
1347 logger.warning("KeyError in save_target_file_information"
1348 "%s ", e)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001349
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001350
1351
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001352 def cancel_cli_build(self):
1353 """
1354 If a build is currently underway, set its state to CANCELLED;
1355 note that this only gets called for command line builds which are
1356 interrupted, so it doesn't touch any BuildRequest objects
1357 """
1358 self._ensure_build()
1359 self.internal_state['build'].outcome = Build.CANCELLED
1360 self.internal_state['build'].save()
1361 signal_runbuilds()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001362
1363 def store_dependency_information(self, event):
1364 assert '_depgraph' in vars(event)
1365 assert 'layer-priorities' in event._depgraph
1366 assert 'pn' in event._depgraph
1367 assert 'tdepends' in event._depgraph
1368
1369 errormsg = ""
1370
1371 # save layer version priorities
1372 if 'layer-priorities' in event._depgraph.keys():
1373 for lv in event._depgraph['layer-priorities']:
1374 (_, path, _, priority) = lv
1375 layer_version_obj = self._get_layer_version_for_path(path[1:]) # paths start with a ^
1376 assert layer_version_obj is not None
1377 layer_version_obj.priority = priority
1378 layer_version_obj.save()
1379
1380 # save recipe information
1381 self.internal_state['recipes'] = {}
1382 for pn in event._depgraph['pn']:
1383
1384 file_name = event._depgraph['pn'][pn]['filename'].split(":")[-1]
1385 pathflags = ":".join(sorted(event._depgraph['pn'][pn]['filename'].split(":")[:-1]))
1386 layer_version_obj = self._get_layer_version_for_path(file_name)
1387
1388 assert layer_version_obj is not None
1389
1390 recipe_info = {}
1391 recipe_info['name'] = pn
1392 recipe_info['layer_version'] = layer_version_obj
1393
1394 if 'version' in event._depgraph['pn'][pn]:
1395 recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":")
1396
1397 if 'summary' in event._depgraph['pn'][pn]:
1398 recipe_info['summary'] = event._depgraph['pn'][pn]['summary']
1399
1400 if 'license' in event._depgraph['pn'][pn]:
1401 recipe_info['license'] = event._depgraph['pn'][pn]['license']
1402
1403 if 'description' in event._depgraph['pn'][pn]:
1404 recipe_info['description'] = event._depgraph['pn'][pn]['description']
1405
1406 if 'section' in event._depgraph['pn'][pn]:
1407 recipe_info['section'] = event._depgraph['pn'][pn]['section']
1408
1409 if 'homepage' in event._depgraph['pn'][pn]:
1410 recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage']
1411
1412 if 'bugtracker' in event._depgraph['pn'][pn]:
1413 recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker']
1414
1415 recipe_info['file_path'] = file_name
1416 recipe_info['pathflags'] = pathflags
1417
1418 if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path):
1419 recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/")
1420 else:
1421 raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path))
1422
1423 recipe = self.orm_wrapper.get_update_recipe_object(recipe_info)
1424 recipe.is_image = False
1425 if 'inherits' in event._depgraph['pn'][pn].keys():
1426 for cls in event._depgraph['pn'][pn]['inherits']:
1427 if cls.endswith('/image.bbclass'):
1428 recipe.is_image = True
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001429 recipe_info['is_image'] = True
1430 # Save the is_image state to the relevant recipe objects
1431 self.orm_wrapper.get_update_recipe_object(recipe_info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001432 break
1433 if recipe.is_image:
1434 for t in self.internal_state['targets']:
1435 if pn == t.target:
1436 t.is_image = True
1437 t.save()
1438 self.internal_state['recipes'][pn] = recipe
1439
1440 # we'll not get recipes for key w/ values listed in ASSUME_PROVIDED
1441
1442 assume_provided = self.server.runCommand(["getVariable", "ASSUME_PROVIDED"])[0].split()
1443
1444 # save recipe dependency
1445 # buildtime
1446 recipedeps_objects = []
1447 for recipe in event._depgraph['depends']:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001448 target = self.internal_state['recipes'][recipe]
1449 for dep in event._depgraph['depends'][recipe]:
1450 if dep in assume_provided:
1451 continue
1452 via = None
1453 if 'providermap' in event._depgraph and dep in event._depgraph['providermap']:
1454 deprecipe = event._depgraph['providermap'][dep][0]
1455 dependency = self.internal_state['recipes'][deprecipe]
1456 via = Provides.objects.get_or_create(name=dep,
1457 recipe=dependency)[0]
1458 elif dep in self.internal_state['recipes']:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001459 dependency = self.internal_state['recipes'][dep]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001460 else:
1461 errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, dep)
1462 continue
1463 recipe_dep = Recipe_Dependency(recipe=target,
1464 depends_on=dependency,
1465 via=via,
1466 dep_type=Recipe_Dependency.TYPE_DEPENDS)
1467 recipedeps_objects.append(recipe_dep)
1468
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001469 Recipe_Dependency.objects.bulk_create(recipedeps_objects)
1470
1471 # save all task information
1472 def _save_a_task(taskdesc):
1473 spec = re.split(r'\.', taskdesc)
1474 pn = ".".join(spec[0:-1])
1475 taskname = spec[-1]
1476 e = event
1477 e.taskname = pn
1478 recipe = self.internal_state['recipes'][pn]
1479 task_info = self._get_task_information(e, recipe)
1480 task_info['task_name'] = taskname
1481 task_obj = self.orm_wrapper.get_update_task_object(task_info)
1482 return task_obj
1483
1484 # create tasks
1485 tasks = {}
1486 for taskdesc in event._depgraph['tdepends']:
1487 tasks[taskdesc] = _save_a_task(taskdesc)
1488
1489 # create dependencies between tasks
1490 taskdeps_objects = []
1491 for taskdesc in event._depgraph['tdepends']:
1492 target = tasks[taskdesc]
1493 for taskdep in event._depgraph['tdepends'][taskdesc]:
1494 if taskdep not in tasks:
1495 # Fetch tasks info is not collected previously
1496 dep = _save_a_task(taskdep)
1497 else:
1498 dep = tasks[taskdep]
1499 taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep ))
1500 Task_Dependency.objects.bulk_create(taskdeps_objects)
1501
1502 if len(errormsg) > 0:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001503 logger.warning("buildinfohelper: dependency info not identify recipes: \n%s", errormsg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001504
1505
1506 def store_build_package_information(self, event):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001507 self._ensure_build()
1508
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001509 package_info = BuildInfoHelper._get_data_from_event(event)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001510 self.orm_wrapper.save_build_package_information(
1511 self.internal_state['build'],
1512 package_info,
1513 self.internal_state['recipes'],
1514 built_package=True)
1515
1516 self.orm_wrapper.save_build_package_information(
1517 self.internal_state['build'],
1518 package_info,
1519 self.internal_state['recipes'],
1520 built_package=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001521
1522 def _store_build_done(self, errorcode):
1523 logger.info("Build exited with errorcode %d", errorcode)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001524
1525 if not self.brbe:
1526 return
1527
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001528 br_id, be_id = self.brbe.split(":")
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001529
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001530 br = BuildRequest.objects.get(pk = br_id)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001531
1532 # if we're 'done' because we got cancelled update the build outcome
1533 if br.state == BuildRequest.REQ_CANCELLING:
1534 logger.info("Build cancelled")
1535 br.build.outcome = Build.CANCELLED
1536 br.build.save()
1537 self.internal_state['build'] = br.build
1538 errorcode = 0
1539
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001540 if errorcode == 0:
1541 # request archival of the project artifacts
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001542 br.state = BuildRequest.REQ_COMPLETED
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001543 else:
1544 br.state = BuildRequest.REQ_FAILED
1545 br.save()
1546
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001547 be = BuildEnvironment.objects.get(pk = be_id)
1548 be.lock = BuildEnvironment.LOCK_FREE
1549 be.save()
1550 signal_runbuilds()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001551
1552 def store_log_error(self, text):
1553 mockevent = MockEvent()
1554 mockevent.levelno = formatter.ERROR
1555 mockevent.msg = text
1556 mockevent.pathname = '-- None'
1557 mockevent.lineno = LogMessage.ERROR
1558 self.store_log_event(mockevent)
1559
1560 def store_log_exception(self, text, backtrace = ""):
1561 mockevent = MockEvent()
1562 mockevent.levelno = -1
1563 mockevent.msg = text
1564 mockevent.pathname = backtrace
1565 mockevent.lineno = -1
1566 self.store_log_event(mockevent)
1567
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001568 def store_log_event(self, event):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001569 self._ensure_build()
1570
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001571 if event.levelno < formatter.WARNING:
1572 return
1573
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001574 # early return for CLI builds
1575 if self.brbe is None:
1576 if not 'backlog' in self.internal_state:
1577 self.internal_state['backlog'] = []
1578 self.internal_state['backlog'].append(event)
1579 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001580
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001581 if 'backlog' in self.internal_state:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001582 # if we have a backlog of events, do our best to save them here
1583 if len(self.internal_state['backlog']):
1584 tempevent = self.internal_state['backlog'].pop()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001585 logger.debug(1, "buildinfohelper: Saving stored event %s "
1586 % tempevent)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001587 self.store_log_event(tempevent)
1588 else:
1589 logger.info("buildinfohelper: All events saved")
1590 del self.internal_state['backlog']
1591
1592 log_information = {}
1593 log_information['build'] = self.internal_state['build']
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001594 if event.levelno == formatter.CRITICAL:
1595 log_information['level'] = LogMessage.CRITICAL
1596 elif event.levelno == formatter.ERROR:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001597 log_information['level'] = LogMessage.ERROR
1598 elif event.levelno == formatter.WARNING:
1599 log_information['level'] = LogMessage.WARNING
1600 elif event.levelno == -2: # toaster self-logging
1601 log_information['level'] = -2
1602 else:
1603 log_information['level'] = LogMessage.INFO
1604
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001605 log_information['message'] = event.getMessage()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001606 log_information['pathname'] = event.pathname
1607 log_information['lineno'] = event.lineno
1608 logger.info("Logging error 2: %s", log_information)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001609
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001610 self.orm_wrapper.create_logmessage(log_information)
1611
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001612 def _get_filenames_from_image_license(self, image_license_manifest_path):
1613 """
1614 Find the FILES line in the image_license.manifest file,
1615 which has the basenames of the bzImage and modules files
1616 in this format:
1617 FILES: bzImage--4.4.11+git0+3a5f494784_53e84104c5-r0-qemux86-20160603165040.bin modules--4.4.11+git0+3a5f494784_53e84104c5-r0-qemux86-20160603165040.tgz
1618 """
1619 files = []
1620 with open(image_license_manifest_path) as image_license:
1621 for line in image_license:
1622 if line.startswith('FILES'):
1623 files_str = line.split(':')[1].strip()
1624 files_str = re.sub(r' {2,}', ' ', files_str)
1625
1626 # ignore lines like "FILES:" with no filenames
1627 if files_str:
1628 files += files_str.split(' ')
1629 return files
1630
1631 def _endswith(self, str_to_test, endings):
1632 """
1633 Returns True if str ends with one of the strings in the list
1634 endings, False otherwise
1635 """
1636 endswith = False
1637 for ending in endings:
1638 if str_to_test.endswith(ending):
1639 endswith = True
1640 break
1641 return endswith
1642
1643 def _get_image_files(self, deploy_dir_image, image_name, image_file_extensions):
1644 """
1645 Find files in deploy_dir_image whose basename starts with the
1646 string image_name and ends with one of the strings in
1647 image_file_extensions.
1648
1649 Returns a list of file dictionaries like
1650
1651 [
1652 {
1653 'path': '/path/to/image/file',
1654 'size': <file size in bytes>
1655 }
1656 ]
1657 """
1658 image_files = []
1659
1660 for dirpath, _, filenames in os.walk(deploy_dir_image):
1661 for filename in filenames:
1662 if filename.startswith(image_name) and \
1663 self._endswith(filename, image_file_extensions):
1664 image_file_path = os.path.join(dirpath, filename)
1665 image_file_size = os.stat(image_file_path).st_size
1666
1667 image_files.append({
1668 'path': image_file_path,
1669 'size': image_file_size
1670 })
1671
1672 return image_files
1673
1674 def scan_image_artifacts(self):
1675 """
1676 Scan for built image artifacts in DEPLOY_DIR_IMAGE and associate them
1677 with a Target object in self.internal_state['targets'].
1678
1679 We have two situations to handle:
1680
1681 1. This is the first time a target + machine has been built, so
1682 add files from the DEPLOY_DIR_IMAGE to the target.
1683
1684 OR
1685
1686 2. There are no new files for the target (they were already produced by
1687 a previous build), so copy them from the most recent previous build with
1688 the same target, task and machine.
1689 """
1690 deploy_dir_image = \
1691 self.server.runCommand(['getVariable', 'DEPLOY_DIR_IMAGE'])[0]
1692
1693 # if there's no DEPLOY_DIR_IMAGE, there aren't going to be
1694 # any image artifacts, so we can return immediately
1695 if not deploy_dir_image:
1696 return
1697
1698 buildname = self.server.runCommand(['getVariable', 'BUILDNAME'])[0]
1699 machine = self.server.runCommand(['getVariable', 'MACHINE'])[0]
1700 image_name = self.server.runCommand(['getVariable', 'IMAGE_NAME'])[0]
1701
1702 # location of the manifest files for this build;
1703 # note that this file is only produced if an image is produced
1704 license_directory = \
1705 self.server.runCommand(['getVariable', 'LICENSE_DIRECTORY'])[0]
1706
1707 # file name extensions for image files
1708 image_file_extensions_unique = {}
1709 image_fstypes = self.server.runCommand(
1710 ['getVariable', 'IMAGE_FSTYPES'])[0]
1711 if image_fstypes != None:
1712 image_types_str = image_fstypes.strip()
1713 image_file_extensions = re.sub(r' {2,}', ' ', image_types_str)
1714 image_file_extensions_unique = set(image_file_extensions.split(' '))
1715
1716 targets = self.internal_state['targets']
1717
1718 # filter out anything which isn't an image target
1719 image_targets = [target for target in targets if target.is_image]
1720
1721 for image_target in image_targets:
1722 # this is set to True if we find at least one file relating to
1723 # this target; if this remains False after the scan, we copy the
1724 # files from the most-recent Target with the same target + machine
1725 # onto this Target instead
1726 has_files = False
1727
1728 # we construct this because by the time we reach
1729 # BuildCompleted, this has reset to
1730 # 'defaultpkgname-<MACHINE>-<BUILDNAME>';
1731 # we need to change it to
1732 # <TARGET>-<MACHINE>-<BUILDNAME>
1733 real_image_name = re.sub(r'^defaultpkgname', image_target.target,
1734 image_name)
1735
1736 image_license_manifest_path = os.path.join(
1737 license_directory,
1738 real_image_name,
1739 'image_license.manifest')
1740
1741 image_package_manifest_path = os.path.join(
1742 license_directory,
1743 real_image_name,
1744 'image_license.manifest')
1745
1746 # if image_license.manifest exists, we can read the names of
1747 # bzImage, modules etc. files for this build from it, then look for
1748 # them in the DEPLOY_DIR_IMAGE; note that this file is only produced
1749 # if an image file was produced
1750 if os.path.isfile(image_license_manifest_path):
1751 has_files = True
1752
1753 basenames = self._get_filenames_from_image_license(
1754 image_license_manifest_path)
1755
1756 for basename in basenames:
1757 artifact_path = os.path.join(deploy_dir_image, basename)
1758 if not os.path.exists(artifact_path):
1759 logger.warning("artifact %s doesn't exist, skipping" % artifact_path)
1760 continue
1761 artifact_size = os.stat(artifact_path).st_size
1762
1763 # note that the artifact will only be saved against this
1764 # build if it hasn't been already
1765 self.orm_wrapper.save_target_kernel_file(image_target,
1766 artifact_path, artifact_size)
1767
1768 # store the license manifest path on the target
1769 # (this file is also created any time an image file is created)
1770 license_manifest_path = os.path.join(license_directory,
1771 real_image_name, 'license.manifest')
1772
1773 self.orm_wrapper.update_target_set_license_manifest(
1774 image_target, license_manifest_path)
1775
1776 # store the package manifest path on the target (this file
1777 # is created any time an image file is created)
1778 package_manifest_path = os.path.join(deploy_dir_image,
1779 real_image_name + '.rootfs.manifest')
1780
1781 if os.path.exists(package_manifest_path):
1782 self.orm_wrapper.update_target_set_package_manifest(
1783 image_target, package_manifest_path)
1784
1785 # scan the directory for image files relating to this build
1786 # (via real_image_name); note that we don't have to set
1787 # has_files = True, as searching for the license manifest file
1788 # will already have set it to true if at least one image file was
1789 # produced; note that the real_image_name includes BUILDNAME, which
1790 # in turn includes a timestamp; so if no files were produced for
1791 # this timestamp (i.e. the build reused existing image files already
1792 # in the directory), no files will be recorded against this target
1793 image_files = self._get_image_files(deploy_dir_image,
1794 real_image_name, image_file_extensions_unique)
1795
1796 for image_file in image_files:
1797 self.orm_wrapper.save_target_image_file_information(
1798 image_target, image_file['path'], image_file['size'])
1799
1800 if not has_files:
1801 # copy image files and build artifacts from the
1802 # most-recently-built Target with the
1803 # same target + machine as this Target; also copy the license
1804 # manifest path, as that is not treated as an artifact and needs
1805 # to be set separately
1806 similar_target = \
1807 self.orm_wrapper.get_similar_target_with_image_files(
1808 image_target)
1809
1810 if similar_target:
1811 logger.info('image artifacts for target %s cloned from ' \
1812 'target %s' % (image_target.pk, similar_target.pk))
1813 self.orm_wrapper.clone_image_artifacts(similar_target,
1814 image_target)
1815
1816 def _get_sdk_targets(self):
1817 """
1818 Return targets which could generate SDK artifacts, i.e.
1819 "do_populate_sdk" and "do_populate_sdk_ext".
1820 """
1821 return [target for target in self.internal_state['targets'] \
1822 if target.task in ['populate_sdk', 'populate_sdk_ext']]
1823
1824 def scan_sdk_artifacts(self, event):
1825 """
1826 Note that we have to intercept an SDKArtifactInfo event from
1827 toaster.bbclass (via toasterui) to get hold of the SDK variables we
1828 need to be able to scan for files accurately: this is because
1829 variables like TOOLCHAIN_OUTPUTNAME have reset to None by the time
1830 BuildCompleted is fired by bitbake, so we have to get those values
1831 while the build is still in progress.
1832
1833 For populate_sdk_ext, this runs twice, with two different
1834 TOOLCHAIN_OUTPUTNAME settings, each of which will capture some of the
1835 files in the SDK output directory.
1836 """
1837 sdk_vars = BuildInfoHelper._get_data_from_event(event)
1838 toolchain_outputname = sdk_vars['TOOLCHAIN_OUTPUTNAME']
1839
1840 # targets which might have created SDK artifacts
1841 sdk_targets = self._get_sdk_targets()
1842
1843 # location of SDK artifacts
1844 tmpdir = self.server.runCommand(['getVariable', 'TMPDIR'])[0]
1845 sdk_dir = os.path.join(tmpdir, 'deploy', 'sdk')
1846
1847 # all files in the SDK directory
1848 artifacts = []
1849 for dir_path, _, filenames in os.walk(sdk_dir):
1850 for filename in filenames:
1851 full_path = os.path.join(dir_path, filename)
1852 if not os.path.islink(full_path):
1853 artifacts.append(full_path)
1854
1855 for sdk_target in sdk_targets:
1856 # find files in the SDK directory which haven't already been
1857 # recorded against a Target and whose basename matches
1858 # TOOLCHAIN_OUTPUTNAME
1859 for artifact_path in artifacts:
1860 basename = os.path.basename(artifact_path)
1861
1862 toolchain_match = basename.startswith(toolchain_outputname)
1863
1864 # files which match the name of the target which produced them;
1865 # for example,
1866 # poky-glibc-x86_64-core-image-sato-i586-toolchain-ext-2.1+snapshot.sh
1867 target_match = re.search(sdk_target.target, basename)
1868
1869 # targets which produce "*-nativesdk-*" files
1870 is_ext_sdk_target = sdk_target.task in \
1871 ['do_populate_sdk_ext', 'populate_sdk_ext']
1872
1873 # SDK files which don't match the target name, i.e.
1874 # x86_64-nativesdk-libc.*
1875 # poky-glibc-x86_64-buildtools-tarball-i586-buildtools-nativesdk-standalone-2.1+snapshot*
1876 is_ext_sdk_file = re.search('-nativesdk-', basename)
1877
1878 file_from_target = (toolchain_match and target_match) or \
1879 (is_ext_sdk_target and is_ext_sdk_file)
1880
1881 if file_from_target:
1882 # don't record the file if it's already been added to this
1883 # target
1884 matching_files = TargetSDKFile.objects.filter(
1885 target=sdk_target, file_name=artifact_path)
1886
1887 if matching_files.count() == 0:
1888 artifact_size = os.stat(artifact_path).st_size
1889
1890 self.orm_wrapper.save_target_sdk_file(
1891 sdk_target, artifact_path, artifact_size)
1892
1893 def clone_required_sdk_artifacts(self):
1894 """
1895 If an SDK target doesn't have any SDK artifacts, this means that
1896 the postfuncs of populate_sdk or populate_sdk_ext didn't fire, which
1897 in turn means that the targets of this build didn't generate any new
1898 artifacts.
1899
1900 In this case, clone SDK artifacts for targets in the current build
1901 from existing targets for this build.
1902 """
1903 sdk_targets = self._get_sdk_targets()
1904 for sdk_target in sdk_targets:
1905 # only clone for SDK targets which have no TargetSDKFiles yet
1906 if sdk_target.targetsdkfile_set.all().count() == 0:
1907 similar_target = \
1908 self.orm_wrapper.get_similar_target_with_sdk_files(
1909 sdk_target)
1910 if similar_target:
1911 logger.info('SDK artifacts for target %s cloned from ' \
1912 'target %s' % (sdk_target.pk, similar_target.pk))
1913 self.orm_wrapper.clone_sdk_artifacts(similar_target,
1914 sdk_target)
1915
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001916 def close(self, errorcode):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001917 self._store_build_done(errorcode)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001918
1919 if 'backlog' in self.internal_state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001920 # we save missed events in the database for the current build
1921 tempevent = self.internal_state['backlog'].pop()
1922 self.store_log_event(tempevent)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001923
1924 if not connection.features.autocommits_when_autocommit_is_off:
1925 transaction.set_autocommit(True)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001926
1927 # unset the brbe; this is to prevent subsequent command-line builds
1928 # being incorrectly attached to the previous Toaster-triggered build;
1929 # see https://bugzilla.yoctoproject.org/show_bug.cgi?id=9021
1930 self.brbe = None
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001931
1932 # unset the internal Build object to prevent it being reused for the
1933 # next build
1934 self.internal_state['build'] = None