blob: 3ddcb2ac6d0913c57c65e472e89fed3be0206a0d [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# BitBake ToasterUI Implementation
3#
4# Copyright (C) 2013 Intel Corporation
5#
6# This program is free software; you can redistribute it and/or modify
7# it under the terms of the GNU General Public License version 2 as
8# published by the Free Software Foundation.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License along
16# with this program; if not, write to the Free Software Foundation, Inc.,
17# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
18
19import sys
20import bb
21import re
22import os
23
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050024import django
Patrick Williamsc124f4f2015-09-15 14:41:29 -050025from django.utils import timezone
26
Patrick Williamsc0f7c042017-02-23 20:41:17 -060027import toaster
28# Add toaster module to the search path to help django.setup() find the right
29# modules
30sys.path.insert(0, os.path.dirname(toaster.__file__))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050031
Patrick Williamsc0f7c042017-02-23 20:41:17 -060032#Set the DJANGO_SETTINGS_MODULE if it's not already set
33os.environ["DJANGO_SETTINGS_MODULE"] =\
34 os.environ.get("DJANGO_SETTINGS_MODULE",
35 "toaster.toastermain.settings")
36# Setup django framework (needs to be done before importing modules)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050037django.setup()
Patrick Williamsc124f4f2015-09-15 14:41:29 -050038
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050039from orm.models import Build, Task, Recipe, Layer_Version, Layer, Target, LogMessage, HelpText
Patrick Williamsc0f7c042017-02-23 20:41:17 -060040from orm.models import Target_Image_File, TargetKernelFile, TargetSDKFile
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050041from orm.models import Variable, VariableHistory
42from orm.models import Package, Package_File, Target_Installed_Package, Target_File
43from orm.models import Task_Dependency, Package_Dependency
44from orm.models import Recipe_Dependency, Provides
45from orm.models import Project, CustomImagePackage, CustomImageRecipe
Patrick Williamsc0f7c042017-02-23 20:41:17 -060046from orm.models import signal_runbuilds
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050047
Patrick Williamsc124f4f2015-09-15 14:41:29 -050048from bldcontrol.models import BuildEnvironment, BuildRequest
49
50from bb.msg import BBLogFormatter as formatter
51from django.db import models
52from pprint import pformat
53import logging
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050054from datetime import datetime, timedelta
Patrick Williamsc124f4f2015-09-15 14:41:29 -050055
56from django.db import transaction, connection
57
Patrick Williamsc0f7c042017-02-23 20:41:17 -060058
Patrick Williamsc124f4f2015-09-15 14:41:29 -050059# pylint: disable=invalid-name
60# the logger name is standard throughout BitBake
61logger = logging.getLogger("ToasterLogger")
62
Patrick Williamsc124f4f2015-09-15 14:41:29 -050063class NotExisting(Exception):
64 pass
65
66class ORMWrapper(object):
67 """ This class creates the dictionaries needed to store information in the database
68 following the format defined by the Django models. It is also used to save this
69 information in the database.
70 """
71
72 def __init__(self):
73 self.layer_version_objects = []
Patrick Williamsf1e5d692016-03-30 15:21:19 -050074 self.layer_version_built = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -050075 self.task_objects = {}
76 self.recipe_objects = {}
77
78 @staticmethod
79 def _build_key(**kwargs):
80 key = "0"
81 for k in sorted(kwargs.keys()):
82 if isinstance(kwargs[k], models.Model):
83 key += "-%d" % kwargs[k].id
84 else:
85 key += "-%s" % str(kwargs[k])
86 return key
87
88
89 def _cached_get_or_create(self, clazz, **kwargs):
90 """ This is a memory-cached get_or_create. We assume that the objects will not be created in the
91 database through any other means.
92 """
93
94 assert issubclass(clazz, models.Model), "_cached_get_or_create needs to get the class as first argument"
95
96 key = ORMWrapper._build_key(**kwargs)
97 dictname = "objects_%s" % clazz.__name__
98 if not dictname in vars(self).keys():
99 vars(self)[dictname] = {}
100
101 created = False
102 if not key in vars(self)[dictname].keys():
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500103 vars(self)[dictname][key], created = \
104 clazz.objects.get_or_create(**kwargs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500105
106 return (vars(self)[dictname][key], created)
107
108
109 def _cached_get(self, clazz, **kwargs):
110 """ This is a memory-cached get. We assume that the objects will not change in the database between gets.
111 """
112 assert issubclass(clazz, models.Model), "_cached_get needs to get the class as first argument"
113
114 key = ORMWrapper._build_key(**kwargs)
115 dictname = "objects_%s" % clazz.__name__
116
117 if not dictname in vars(self).keys():
118 vars(self)[dictname] = {}
119
120 if not key in vars(self)[dictname].keys():
121 vars(self)[dictname][key] = clazz.objects.get(**kwargs)
122
123 return vars(self)[dictname][key]
124
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600125 def get_similar_target_with_image_files(self, target):
126 """
127 Get a Target object "similar" to target; i.e. with the same target
128 name ('core-image-minimal' etc.) and machine.
129 """
130 return target.get_similar_target_with_image_files()
131
132 def get_similar_target_with_sdk_files(self, target):
133 return target.get_similar_target_with_sdk_files()
134
135 def clone_image_artifacts(self, target_from, target_to):
136 target_to.clone_image_artifacts_from(target_from)
137
138 def clone_sdk_artifacts(self, target_from, target_to):
139 target_to.clone_sdk_artifacts_from(target_from)
140
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500141 def _timestamp_to_datetime(self, secs):
142 """
143 Convert timestamp in seconds to Python datetime
144 """
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600145 return timezone.make_aware(datetime(1970, 1, 1) + timedelta(seconds=secs))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500146
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500147 # pylint: disable=no-self-use
148 # we disable detection of no self use in functions because the methods actually work on the object
149 # even if they don't touch self anywhere
150
151 # pylint: disable=bad-continuation
152 # we do not follow the python conventions for continuation indentation due to long lines here
153
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600154 def get_or_create_build_object(self, brbe):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500155 prj = None
156 buildrequest = None
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600157 if brbe is not None:
158 # Toaster-triggered build
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500159 logger.debug(1, "buildinfohelper: brbe is %s" % brbe)
160 br, _ = brbe.split(":")
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600161 buildrequest = BuildRequest.objects.get(pk=br)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500162 prj = buildrequest.project
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600163 else:
164 # CLI build
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500165 prj = Project.objects.get_or_create_default_project()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500166 logger.debug(1, "buildinfohelper: project is not specified, defaulting to %s" % prj)
167
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500168 if buildrequest is not None:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600169 # reuse existing Build object
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500170 build = buildrequest.build
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500171 build.project = prj
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500172 build.save()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500173 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600174 # create new Build object
175 now = timezone.now()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500176 build = Build.objects.create(
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600177 project=prj,
178 started_on=now,
179 completed_on=now,
180 build_name='')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500181
182 logger.debug(1, "buildinfohelper: build is created %s" % build)
183
184 if buildrequest is not None:
185 buildrequest.build = build
186 buildrequest.save()
187
188 return build
189
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600190 def update_build(self, build, data_dict):
191 for key in data_dict:
192 setattr(build, key, data_dict[key])
193 build.save()
194
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500195 @staticmethod
196 def get_or_create_targets(target_info):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600197 """
198 NB get_or_create() is used here because for Toaster-triggered builds,
199 we already created the targets when the build was triggered.
200 """
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500201 result = []
202 for target in target_info['targets']:
203 task = ''
204 if ':' in target:
205 target, task = target.split(':', 1)
206 if task.startswith('do_'):
207 task = task[3:]
208 if task == 'build':
209 task = ''
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600210
211 obj, _ = Target.objects.get_or_create(build=target_info['build'],
212 target=target,
213 task=task)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500214 result.append(obj)
215 return result
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500216
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600217 def update_build_stats_and_outcome(self, build, errors, warnings, taskfailures):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500218 assert isinstance(build,Build)
219 assert isinstance(errors, int)
220 assert isinstance(warnings, int)
221
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500222 if build.outcome == Build.CANCELLED:
223 return
224 try:
225 if build.buildrequest.state == BuildRequest.REQ_CANCELLING:
226 return
227 except AttributeError:
228 # We may not have a buildrequest if this is a command line build
229 pass
230
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500231 outcome = Build.SUCCEEDED
232 if errors or taskfailures:
233 outcome = Build.FAILED
234
235 build.completed_on = timezone.now()
236 build.outcome = outcome
237 build.save()
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600238 signal_runbuilds()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500239
240 def update_target_set_license_manifest(self, target, license_manifest_path):
241 target.license_manifest_path = license_manifest_path
242 target.save()
243
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600244 def update_target_set_package_manifest(self, target, package_manifest_path):
245 target.package_manifest_path = package_manifest_path
246 target.save()
247
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500248 def update_task_object(self, build, task_name, recipe_name, task_stats):
249 """
250 Find the task for build which matches the recipe and task name
251 to be stored
252 """
253 task_to_update = Task.objects.get(
254 build = build,
255 task_name = task_name,
256 recipe__name = recipe_name
257 )
258
259 if 'started' in task_stats and 'ended' in task_stats:
260 task_to_update.started = self._timestamp_to_datetime(task_stats['started'])
261 task_to_update.ended = self._timestamp_to_datetime(task_stats['ended'])
262 task_to_update.elapsed_time = (task_stats['ended'] - task_stats['started'])
263 task_to_update.cpu_time_user = task_stats.get('cpu_time_user')
264 task_to_update.cpu_time_system = task_stats.get('cpu_time_system')
265 if 'disk_io_read' in task_stats and 'disk_io_write' in task_stats:
266 task_to_update.disk_io_read = task_stats['disk_io_read']
267 task_to_update.disk_io_write = task_stats['disk_io_write']
268 task_to_update.disk_io = task_stats['disk_io_read'] + task_stats['disk_io_write']
269
270 task_to_update.save()
271
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500272 def get_update_task_object(self, task_information, must_exist = False):
273 assert 'build' in task_information
274 assert 'recipe' in task_information
275 assert 'task_name' in task_information
276
277 # we use must_exist info for database look-up optimization
278 task_object, created = self._cached_get_or_create(Task,
279 build=task_information['build'],
280 recipe=task_information['recipe'],
281 task_name=task_information['task_name']
282 )
283 if created and must_exist:
284 task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk)
285 raise NotExisting("Task object created when expected to exist", task_information)
286
287 object_changed = False
288 for v in vars(task_object):
289 if v in task_information.keys():
290 if vars(task_object)[v] != task_information[v]:
291 vars(task_object)[v] = task_information[v]
292 object_changed = True
293
294 # update setscene-related information if the task has a setscene
295 if task_object.outcome == Task.OUTCOME_COVERED and 1 == task_object.get_related_setscene().count():
296 task_object.outcome = Task.OUTCOME_CACHED
297 object_changed = True
298
299 outcome_task_setscene = Task.objects.get(task_executed=True, build = task_object.build,
300 recipe = task_object.recipe, task_name=task_object.task_name+"_setscene").outcome
301 if outcome_task_setscene == Task.OUTCOME_SUCCESS:
302 task_object.sstate_result = Task.SSTATE_RESTORED
303 object_changed = True
304 elif outcome_task_setscene == Task.OUTCOME_FAILED:
305 task_object.sstate_result = Task.SSTATE_FAILED
306 object_changed = True
307
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500308 if object_changed:
309 task_object.save()
310 return task_object
311
312
313 def get_update_recipe_object(self, recipe_information, must_exist = False):
314 assert 'layer_version' in recipe_information
315 assert 'file_path' in recipe_information
316 assert 'pathflags' in recipe_information
317
318 assert not recipe_information['file_path'].startswith("/") # we should have layer-relative paths at all times
319
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500320
321 def update_recipe_obj(recipe_object):
322 object_changed = False
323 for v in vars(recipe_object):
324 if v in recipe_information.keys():
325 object_changed = True
326 vars(recipe_object)[v] = recipe_information[v]
327
328 if object_changed:
329 recipe_object.save()
330
331 recipe, created = self._cached_get_or_create(Recipe, layer_version=recipe_information['layer_version'],
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500332 file_path=recipe_information['file_path'], pathflags = recipe_information['pathflags'])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500333
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500334 update_recipe_obj(recipe)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500335
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500336 built_recipe = None
337 # Create a copy of the recipe for historical puposes and update it
338 for built_layer in self.layer_version_built:
339 if built_layer.layer == recipe_information['layer_version'].layer:
340 built_recipe, c = self._cached_get_or_create(Recipe,
341 layer_version=built_layer,
342 file_path=recipe_information['file_path'],
343 pathflags = recipe_information['pathflags'])
344 update_recipe_obj(built_recipe)
345 break
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500346
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500347
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500348 # If we're in analysis mode or if this is a custom recipe
349 # then we are wholly responsible for the data
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500350 # and therefore we return the 'real' recipe rather than the build
351 # history copy of the recipe.
352 if recipe_information['layer_version'].build is not None and \
353 recipe_information['layer_version'].build.project == \
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500354 Project.objects.get_or_create_default_project():
355 return recipe
356
357 if built_recipe is None:
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500358 return recipe
359
360 return built_recipe
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500361
362 def get_update_layer_version_object(self, build_obj, layer_obj, layer_version_information):
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500363 if isinstance(layer_obj, Layer_Version):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500364 # Special case the toaster-custom-images layer which is created
365 # on the fly so don't update the values which may cause the layer
366 # to be duplicated on a future get_or_create
367 if layer_obj.layer.name == CustomImageRecipe.LAYER_NAME:
368 return layer_obj
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500369 # We already found our layer version for this build so just
370 # update it with the new build information
371 logger.debug("We found our layer from toaster")
372 layer_obj.local_path = layer_version_information['local_path']
373 layer_obj.save()
374 self.layer_version_objects.append(layer_obj)
375
376 # create a new copy of this layer version as a snapshot for
377 # historical purposes
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500378 layer_copy, c = Layer_Version.objects.get_or_create(
379 build=build_obj,
380 layer=layer_obj.layer,
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600381 release=layer_obj.release,
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500382 branch=layer_version_information['branch'],
383 commit=layer_version_information['commit'],
384 local_path=layer_version_information['local_path'],
385 )
386
387 logger.info("created new historical layer version %d",
388 layer_copy.pk)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500389
390 self.layer_version_built.append(layer_copy)
391
392 return layer_obj
393
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500394 assert isinstance(build_obj, Build)
395 assert isinstance(layer_obj, Layer)
396 assert 'branch' in layer_version_information
397 assert 'commit' in layer_version_information
398 assert 'priority' in layer_version_information
399 assert 'local_path' in layer_version_information
400
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500401 # If we're doing a command line build then associate this new layer with the
402 # project to avoid it 'contaminating' toaster data
403 project = None
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500404 if build_obj.project == Project.objects.get_or_create_default_project():
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500405 project = build_obj.project
406
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500407 layer_version_object, _ = Layer_Version.objects.get_or_create(
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500408 build = build_obj,
409 layer = layer_obj,
410 branch = layer_version_information['branch'],
411 commit = layer_version_information['commit'],
412 priority = layer_version_information['priority'],
413 local_path = layer_version_information['local_path'],
414 project=project)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500415
416 self.layer_version_objects.append(layer_version_object)
417
418 return layer_version_object
419
420 def get_update_layer_object(self, layer_information, brbe):
421 assert 'name' in layer_information
422 assert 'layer_index_url' in layer_information
423
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600424 # From command line builds we have no brbe as the request is directly
425 # from bitbake
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500426 if brbe is None:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600427 # If we don't have git commit sha then we're using a non-git
428 # layer so set the layer_source_dir to identify it as such
429 if not layer_information['version']['commit']:
430 local_source_dir = layer_information["local_path"]
431 else:
432 local_source_dir = None
433
434 layer_object, _ = \
435 Layer.objects.get_or_create(
436 name=layer_information['name'],
437 local_source_dir=local_source_dir,
438 layer_index_url=layer_information['layer_index_url'])
439
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500440 return layer_object
441 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500442 br_id, be_id = brbe.split(":")
443
444 # find layer by checkout path;
445 from bldcontrol import bbcontroller
446 bc = bbcontroller.getBuildEnvironmentController(pk = be_id)
447
448 # we might have a race condition here, as the project layers may change between the build trigger and the actual build execution
449 # but we can only match on the layer name, so the worst thing can happen is a mis-identification of the layer, not a total failure
450
451 # note that this is different
452 buildrequest = BuildRequest.objects.get(pk = br_id)
453 for brl in buildrequest.brlayer_set.all():
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600454 if brl.local_source_dir:
455 localdirname = os.path.join(brl.local_source_dir,
456 brl.dirpath)
457 else:
458 localdirname = os.path.join(bc.getGitCloneDirectory(brl.giturl, brl.commit), brl.dirpath)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500459 # we get a relative path, unless running in HEAD mode where the path is absolute
460 if not localdirname.startswith("/"):
461 localdirname = os.path.join(bc.be.sourcedir, localdirname)
462 #logger.debug(1, "Localdirname %s lcal_path %s" % (localdirname, layer_information['local_path']))
463 if localdirname.startswith(layer_information['local_path']):
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500464 # If the build request came from toaster this field
465 # should contain the information from the layer_version
466 # That created this build request.
467 if brl.layer_version:
468 return brl.layer_version
469
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600470 # This might be a local layer (i.e. no git info) so try
471 # matching local_source_dir
472 if brl.local_source_dir and brl.local_source_dir == layer_information["local_path"]:
473 return brl.layer_version
474
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500475 # we matched the BRLayer, but we need the layer_version that generated this BR; reverse of the Project.schedule_build()
476 #logger.debug(1, "Matched %s to BRlayer %s" % (pformat(layer_information["local_path"]), localdirname))
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500477
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500478 for pl in buildrequest.project.projectlayer_set.filter(layercommit__layer__name = brl.name):
479 if pl.layercommit.layer.vcs_url == brl.giturl :
480 layer = pl.layercommit.layer
481 layer.save()
482 return layer
483
484 raise NotExisting("Unidentified layer %s" % pformat(layer_information))
485
486
487 def save_target_file_information(self, build_obj, target_obj, filedata):
488 assert isinstance(build_obj, Build)
489 assert isinstance(target_obj, Target)
490 dirs = filedata['dirs']
491 files = filedata['files']
492 syms = filedata['syms']
493
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500494 # always create the root directory as a special case;
495 # note that this is never displayed, so the owner, group,
496 # size, permission are irrelevant
497 tf_obj = Target_File.objects.create(target = target_obj,
498 path = '/',
499 size = 0,
500 owner = '',
501 group = '',
502 permission = '',
503 inodetype = Target_File.ITYPE_DIRECTORY)
504 tf_obj.save()
505
506 # insert directories, ordered by name depth
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500507 for d in sorted(dirs, key=lambda x:len(x[-1].split("/"))):
508 (user, group, size) = d[1:4]
509 permission = d[0][1:]
510 path = d[4].lstrip(".")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500511
512 # we already created the root directory, so ignore any
513 # entry for it
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500514 if len(path) == 0:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500515 continue
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500516
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500517 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
518 if len(parent_path) == 0:
519 parent_path = "/"
520 parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
521 tf_obj = Target_File.objects.create(
522 target = target_obj,
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600523 path = path,
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500524 size = size,
525 inodetype = Target_File.ITYPE_DIRECTORY,
526 permission = permission,
527 owner = user,
528 group = group,
529 directory = parent_obj)
530
531
532 # we insert files
533 for d in files:
534 (user, group, size) = d[1:4]
535 permission = d[0][1:]
536 path = d[4].lstrip(".")
537 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
538 inodetype = Target_File.ITYPE_REGULAR
539 if d[0].startswith('b'):
540 inodetype = Target_File.ITYPE_BLOCK
541 if d[0].startswith('c'):
542 inodetype = Target_File.ITYPE_CHARACTER
543 if d[0].startswith('p'):
544 inodetype = Target_File.ITYPE_FIFO
545
546 tf_obj = Target_File.objects.create(
547 target = target_obj,
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600548 path = path,
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500549 size = size,
550 inodetype = inodetype,
551 permission = permission,
552 owner = user,
553 group = group)
554 parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
555 tf_obj.directory = parent_obj
556 tf_obj.save()
557
558 # we insert symlinks
559 for d in syms:
560 (user, group, size) = d[1:4]
561 permission = d[0][1:]
562 path = d[4].lstrip(".")
563 filetarget_path = d[6]
564
565 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
566 if not filetarget_path.startswith("/"):
567 # we have a relative path, get a normalized absolute one
568 filetarget_path = parent_path + "/" + filetarget_path
569 fcp = filetarget_path.split("/")
570 fcpl = []
571 for i in fcp:
572 if i == "..":
573 fcpl.pop()
574 else:
575 fcpl.append(i)
576 filetarget_path = "/".join(fcpl)
577
578 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600579 filetarget_obj = Target_File.objects.get(target = target_obj, path = filetarget_path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500580 except Target_File.DoesNotExist:
581 # we might have an invalid link; no way to detect this. just set it to None
582 filetarget_obj = None
583
584 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
585
586 tf_obj = Target_File.objects.create(
587 target = target_obj,
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600588 path = path,
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500589 size = size,
590 inodetype = Target_File.ITYPE_SYMLINK,
591 permission = permission,
592 owner = user,
593 group = group,
594 directory = parent_obj,
595 sym_target = filetarget_obj)
596
597
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500598 def save_target_package_information(self, build_obj, target_obj, packagedict, pkgpnmap, recipes, built_package=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500599 assert isinstance(build_obj, Build)
600 assert isinstance(target_obj, Target)
601
602 errormsg = ""
603 for p in packagedict:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500604 # Search name swtiches round the installed name vs package name
605 # by default installed name == package name
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500606 searchname = p
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500607 if p not in pkgpnmap:
608 logger.warning("Image packages list contains %p, but is"
609 " missing from all packages list where the"
610 " metadata comes from. Skipping...", p)
611 continue
612
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500613 if 'OPKGN' in pkgpnmap[p].keys():
614 searchname = pkgpnmap[p]['OPKGN']
615
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500616 built_recipe = recipes[pkgpnmap[p]['PN']]
617
618 if built_package:
619 packagedict[p]['object'], created = Package.objects.get_or_create( build = build_obj, name = searchname )
620 recipe = built_recipe
621 else:
622 packagedict[p]['object'], created = \
623 CustomImagePackage.objects.get_or_create(name=searchname)
624 # Clear the Package_Dependency objects as we're going to update
625 # the CustomImagePackage with the latest dependency information
626 packagedict[p]['object'].package_dependencies_target.all().delete()
627 packagedict[p]['object'].package_dependencies_source.all().delete()
628 try:
629 recipe = self._cached_get(
630 Recipe,
631 name=built_recipe.name,
632 layer_version__build=None,
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600633 layer_version__release=
634 built_recipe.layer_version.release,
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500635 file_path=built_recipe.file_path,
636 version=built_recipe.version
637 )
638 except (Recipe.DoesNotExist,
639 Recipe.MultipleObjectsReturned) as e:
640 logger.info("We did not find one recipe for the"
641 "configuration data package %s %s" % (p, e))
642 continue
643
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500644 if created or packagedict[p]['object'].size == -1: # save the data anyway we can, not just if it was not created here; bug [YOCTO #6887]
645 # fill in everything we can from the runtime-reverse package data
646 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500647 packagedict[p]['object'].recipe = recipe
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500648 packagedict[p]['object'].version = pkgpnmap[p]['PV']
649 packagedict[p]['object'].installed_name = p
650 packagedict[p]['object'].revision = pkgpnmap[p]['PR']
651 packagedict[p]['object'].license = pkgpnmap[p]['LICENSE']
652 packagedict[p]['object'].section = pkgpnmap[p]['SECTION']
653 packagedict[p]['object'].summary = pkgpnmap[p]['SUMMARY']
654 packagedict[p]['object'].description = pkgpnmap[p]['DESCRIPTION']
655 packagedict[p]['object'].size = int(pkgpnmap[p]['PKGSIZE'])
656
657 # no files recorded for this package, so save files info
658 packagefile_objects = []
659 for targetpath in pkgpnmap[p]['FILES_INFO']:
660 targetfilesize = pkgpnmap[p]['FILES_INFO'][targetpath]
661 packagefile_objects.append(Package_File( package = packagedict[p]['object'],
662 path = targetpath,
663 size = targetfilesize))
664 if len(packagefile_objects):
665 Package_File.objects.bulk_create(packagefile_objects)
666 except KeyError as e:
667 errormsg += " stpi: Key error, package %s key %s \n" % ( p, e )
668
669 # save disk installed size
670 packagedict[p]['object'].installed_size = packagedict[p]['size']
671 packagedict[p]['object'].save()
672
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500673 if built_package:
674 Target_Installed_Package.objects.create(target = target_obj, package = packagedict[p]['object'])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500675
676 packagedeps_objs = []
677 for p in packagedict:
678 for (px,deptype) in packagedict[p]['depends']:
679 if deptype == 'depends':
680 tdeptype = Package_Dependency.TYPE_TRDEPENDS
681 elif deptype == 'recommends':
682 tdeptype = Package_Dependency.TYPE_TRECOMMENDS
683
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500684 try:
685 packagedeps_objs.append(Package_Dependency(
686 package = packagedict[p]['object'],
687 depends_on = packagedict[px]['object'],
688 dep_type = tdeptype,
689 target = target_obj))
690 except KeyError as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600691 logger.warning("Could not add dependency to the package %s "
692 "because %s is an unknown package", p, px)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500693
694 if len(packagedeps_objs) > 0:
695 Package_Dependency.objects.bulk_create(packagedeps_objs)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500696 else:
697 logger.info("No package dependencies created")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500698
699 if len(errormsg) > 0:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600700 logger.warning("buildinfohelper: target_package_info could not identify recipes: \n%s", errormsg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500701
702 def save_target_image_file_information(self, target_obj, file_name, file_size):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600703 Target_Image_File.objects.create(target=target_obj,
704 file_name=file_name, file_size=file_size)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500705
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600706 def save_target_kernel_file(self, target_obj, file_name, file_size):
707 """
708 Save kernel file (bzImage, modules*) information for a Target target_obj.
709 """
710 TargetKernelFile.objects.create(target=target_obj,
711 file_name=file_name, file_size=file_size)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500712
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600713 def save_target_sdk_file(self, target_obj, file_name, file_size):
714 """
715 Save SDK artifacts to the database, associating them with a
716 Target object.
717 """
718 TargetSDKFile.objects.create(target=target_obj, file_name=file_name,
719 file_size=file_size)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500720
721 def create_logmessage(self, log_information):
722 assert 'build' in log_information
723 assert 'level' in log_information
724 assert 'message' in log_information
725
726 log_object = LogMessage.objects.create(
727 build = log_information['build'],
728 level = log_information['level'],
729 message = log_information['message'])
730
731 for v in vars(log_object):
732 if v in log_information.keys():
733 vars(log_object)[v] = log_information[v]
734
735 return log_object.save()
736
737
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500738 def save_build_package_information(self, build_obj, package_info, recipes,
739 built_package):
740 # assert isinstance(build_obj, Build)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500741
742 # create and save the object
743 pname = package_info['PKG']
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500744 built_recipe = recipes[package_info['PN']]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500745 if 'OPKGN' in package_info.keys():
746 pname = package_info['OPKGN']
747
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500748 if built_package:
749 bp_object, _ = Package.objects.get_or_create( build = build_obj,
750 name = pname )
751 recipe = built_recipe
752 else:
753 bp_object, created = \
754 CustomImagePackage.objects.get_or_create(name=pname)
755 try:
756 recipe = self._cached_get(Recipe,
757 name=built_recipe.name,
758 layer_version__build=None,
759 file_path=built_recipe.file_path,
760 version=built_recipe.version)
761
762 except (Recipe.DoesNotExist, Recipe.MultipleObjectsReturned):
763 logger.debug("We did not find one recipe for the configuration"
764 "data package %s" % pname)
765 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500766
767 bp_object.installed_name = package_info['PKG']
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500768 bp_object.recipe = recipe
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500769 bp_object.version = package_info['PKGV']
770 bp_object.revision = package_info['PKGR']
771 bp_object.summary = package_info['SUMMARY']
772 bp_object.description = package_info['DESCRIPTION']
773 bp_object.size = int(package_info['PKGSIZE'])
774 bp_object.section = package_info['SECTION']
775 bp_object.license = package_info['LICENSE']
776 bp_object.save()
777
778 # save any attached file information
779 packagefile_objects = []
780 for path in package_info['FILES_INFO']:
781 packagefile_objects.append(Package_File( package = bp_object,
782 path = path,
783 size = package_info['FILES_INFO'][path] ))
784 if len(packagefile_objects):
785 Package_File.objects.bulk_create(packagefile_objects)
786
787 def _po_byname(p):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500788 if built_package:
789 pkg, created = Package.objects.get_or_create(build=build_obj,
790 name=p)
791 else:
792 pkg, created = CustomImagePackage.objects.get_or_create(name=p)
793
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500794 if created:
795 pkg.size = -1
796 pkg.save()
797 return pkg
798
799 packagedeps_objs = []
800 # save soft dependency information
801 if 'RDEPENDS' in package_info and package_info['RDEPENDS']:
802 for p in bb.utils.explode_deps(package_info['RDEPENDS']):
803 packagedeps_objs.append(Package_Dependency( package = bp_object,
804 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS))
805 if 'RPROVIDES' in package_info and package_info['RPROVIDES']:
806 for p in bb.utils.explode_deps(package_info['RPROVIDES']):
807 packagedeps_objs.append(Package_Dependency( package = bp_object,
808 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES))
809 if 'RRECOMMENDS' in package_info and package_info['RRECOMMENDS']:
810 for p in bb.utils.explode_deps(package_info['RRECOMMENDS']):
811 packagedeps_objs.append(Package_Dependency( package = bp_object,
812 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS))
813 if 'RSUGGESTS' in package_info and package_info['RSUGGESTS']:
814 for p in bb.utils.explode_deps(package_info['RSUGGESTS']):
815 packagedeps_objs.append(Package_Dependency( package = bp_object,
816 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS))
817 if 'RREPLACES' in package_info and package_info['RREPLACES']:
818 for p in bb.utils.explode_deps(package_info['RREPLACES']):
819 packagedeps_objs.append(Package_Dependency( package = bp_object,
820 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES))
821 if 'RCONFLICTS' in package_info and package_info['RCONFLICTS']:
822 for p in bb.utils.explode_deps(package_info['RCONFLICTS']):
823 packagedeps_objs.append(Package_Dependency( package = bp_object,
824 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS))
825
826 if len(packagedeps_objs) > 0:
827 Package_Dependency.objects.bulk_create(packagedeps_objs)
828
829 return bp_object
830
831 def save_build_variables(self, build_obj, vardump):
832 assert isinstance(build_obj, Build)
833
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500834 for k in vardump:
835 desc = vardump[k]['doc']
836 if desc is None:
837 var_words = [word for word in k.split('_')]
838 root_var = "_".join([word for word in var_words if word.isupper()])
839 if root_var and root_var != k and root_var in vardump:
840 desc = vardump[root_var]['doc']
841 if desc is None:
842 desc = ''
843 if len(desc):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500844 HelpText.objects.get_or_create(build=build_obj,
845 area=HelpText.VARIABLE,
846 key=k, text=desc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500847 if not bool(vardump[k]['func']):
848 value = vardump[k]['v']
849 if value is None:
850 value = ''
851 variable_obj = Variable.objects.create( build = build_obj,
852 variable_name = k,
853 variable_value = value,
854 description = desc)
855
856 varhist_objects = []
857 for vh in vardump[k]['history']:
858 if not 'documentation.conf' in vh['file']:
859 varhist_objects.append(VariableHistory( variable = variable_obj,
860 file_name = vh['file'],
861 line_number = vh['line'],
862 operation = vh['op']))
863 if len(varhist_objects):
864 VariableHistory.objects.bulk_create(varhist_objects)
865
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500866
867class MockEvent(object):
868 """ This object is used to create event, for which normal event-processing methods can
869 be used, out of data that is not coming via an actual event
870 """
871 def __init__(self):
872 self.msg = None
873 self.levelno = None
874 self.taskname = None
875 self.taskhash = None
876 self.pathname = None
877 self.lineno = None
878
879
880class BuildInfoHelper(object):
881 """ This class gathers the build information from the server and sends it
882 towards the ORM wrapper for storing in the database
883 It is instantiated once per build
884 Keeps in memory all data that needs matching before writing it to the database
885 """
886
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600887 # tasks which produce image files; note we include '', as we set
888 # the task for a target to '' (i.e. 'build') if no target is
889 # explicitly defined
890 IMAGE_GENERATING_TASKS = ['', 'build', 'image', 'populate_sdk_ext']
891
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500892 # pylint: disable=protected-access
893 # the code will look into the protected variables of the event; no easy way around this
894 # pylint: disable=bad-continuation
895 # we do not follow the python conventions for continuation indentation due to long lines here
896
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500897 def __init__(self, server, has_build_history = False, brbe = None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500898 self.internal_state = {}
899 self.internal_state['taskdata'] = {}
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500900 self.internal_state['targets'] = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500901 self.task_order = 0
902 self.autocommit_step = 1
903 self.server = server
904 # we use manual transactions if the database doesn't autocommit on us
905 if not connection.features.autocommits_when_autocommit_is_off:
906 transaction.set_autocommit(False)
907 self.orm_wrapper = ORMWrapper()
908 self.has_build_history = has_build_history
909 self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500910
911 # this is set for Toaster-triggered builds by localhostbecontroller
912 # via toasterui
913 self.brbe = brbe
914
915 self.project = None
916
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500917 logger.debug(1, "buildinfohelper: Build info helper inited %s" % vars(self))
918
919
920 ###################
921 ## methods to convert event/external info into objects that the ORM layer uses
922
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600923 def _ensure_build(self):
924 """
925 Ensure the current build object exists and is up to date with
926 data on the bitbake server
927 """
928 if not 'build' in self.internal_state or not self.internal_state['build']:
929 # create the Build object
930 self.internal_state['build'] = \
931 self.orm_wrapper.get_or_create_build_object(self.brbe)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500932
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600933 build = self.internal_state['build']
934
935 # update missing fields on the Build object with found data
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500936 build_info = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600937
938 # set to True if at least one field is going to be set
939 changed = False
940
941 if not build.build_name:
942 build_name = self.server.runCommand(["getVariable", "BUILDNAME"])[0]
943
944 # only reset the build name if the one on the server is actually
945 # a valid value for the build_name field
946 if build_name != None:
947 build_info['build_name'] = build_name
948 changed = True
949
950 if not build.machine:
951 build_info['machine'] = self.server.runCommand(["getVariable", "MACHINE"])[0]
952 changed = True
953
954 if not build.distro:
955 build_info['distro'] = self.server.runCommand(["getVariable", "DISTRO"])[0]
956 changed = True
957
958 if not build.distro_version:
959 build_info['distro_version'] = self.server.runCommand(["getVariable", "DISTRO_VERSION"])[0]
960 changed = True
961
962 if not build.bitbake_version:
963 build_info['bitbake_version'] = self.server.runCommand(["getVariable", "BB_VERSION"])[0]
964 changed = True
965
966 if changed:
967 self.orm_wrapper.update_build(self.internal_state['build'], build_info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500968
969 def _get_task_information(self, event, recipe):
970 assert 'taskname' in vars(event)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600971 self._ensure_build()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500972
973 task_information = {}
974 task_information['build'] = self.internal_state['build']
975 task_information['outcome'] = Task.OUTCOME_NA
976 task_information['recipe'] = recipe
977 task_information['task_name'] = event.taskname
978 try:
979 # some tasks don't come with a hash. and that's ok
980 task_information['sstate_checksum'] = event.taskhash
981 except AttributeError:
982 pass
983 return task_information
984
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500985 def _get_layer_version_for_dependency(self, pathRE):
986 """ Returns the layer in the toaster db that has a full regex match to the pathRE.
987 pathRE - the layer path passed as a regex in the event. It is created in
988 cooker.py as a collection for the layer priorities.
989 """
990 self._ensure_build()
991
992 def _sort_longest_path(layer_version):
993 assert isinstance(layer_version, Layer_Version)
994 return len(layer_version.local_path)
995
996 # we don't care if we match the trailing slashes
997 p = re.compile(re.sub("/[^/]*?$","",pathRE))
998 # Heuristics: we always match recipe to the deepest layer path in the discovered layers
999 for lvo in sorted(self.orm_wrapper.layer_version_objects, reverse=True, key=_sort_longest_path):
1000 if p.fullmatch(lvo.local_path):
1001 return lvo
1002 if lvo.layer.local_source_dir:
1003 if p.fullmatch(lvo.layer.local_source_dir):
1004 return lvo
1005 #if we get here, we didn't read layers correctly; dump whatever information we have on the error log
1006 logger.warning("Could not match layer dependency for path %s : %s", path, self.orm_wrapper.layer_version_objects)
1007
1008
1009
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001010 def _get_layer_version_for_path(self, path):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001011 self._ensure_build()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001012
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001013 def _slkey_interactive(layer_version):
1014 assert isinstance(layer_version, Layer_Version)
1015 return len(layer_version.local_path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001016
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001017 # Heuristics: we always match recipe to the deepest layer path in the discovered layers
1018 for lvo in sorted(self.orm_wrapper.layer_version_objects, reverse=True, key=_slkey_interactive):
1019 # we can match to the recipe file path
1020 if path.startswith(lvo.local_path):
1021 return lvo
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001022 if lvo.layer.local_source_dir and \
1023 path.startswith(lvo.layer.local_source_dir):
1024 return lvo
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001025
1026 #if we get here, we didn't read layers correctly; dump whatever information we have on the error log
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001027 logger.warning("Could not match layer version for recipe path %s : %s", path, self.orm_wrapper.layer_version_objects)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001028
1029 #mockup the new layer
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001030 unknown_layer, _ = Layer.objects.get_or_create(name="Unidentified layer", layer_index_url="")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001031 unknown_layer_version_obj, _ = Layer_Version.objects.get_or_create(layer = unknown_layer, build = self.internal_state['build'])
1032
1033 # append it so we don't run into this error again and again
1034 self.orm_wrapper.layer_version_objects.append(unknown_layer_version_obj)
1035
1036 return unknown_layer_version_obj
1037
1038 def _get_recipe_information_from_taskfile(self, taskfile):
1039 localfilepath = taskfile.split(":")[-1]
1040 filepath_flags = ":".join(sorted(taskfile.split(":")[:-1]))
1041 layer_version_obj = self._get_layer_version_for_path(localfilepath)
1042
1043
1044
1045 recipe_info = {}
1046 recipe_info['layer_version'] = layer_version_obj
1047 recipe_info['file_path'] = localfilepath
1048 recipe_info['pathflags'] = filepath_flags
1049
1050 if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path):
1051 recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/")
1052 else:
1053 raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path))
1054
1055 return recipe_info
1056
1057 def _get_path_information(self, task_object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001058 self._ensure_build()
1059
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001060 assert isinstance(task_object, Task)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001061 build_stats_format = "{tmpdir}/buildstats/{buildname}/{package}/"
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001062 build_stats_path = []
1063
1064 for t in self.internal_state['targets']:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001065 buildname = self.internal_state['build'].build_name
1066 pe, pv = task_object.recipe.version.split(":",1)
1067 if len(pe) > 0:
1068 package = task_object.recipe.name + "-" + pe + "_" + pv
1069 else:
1070 package = task_object.recipe.name + "-" + pv
1071
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001072 build_stats_path.append(build_stats_format.format(tmpdir=self.tmp_dir,
1073 buildname=buildname,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001074 package=package))
1075
1076 return build_stats_path
1077
1078
1079 ################################
1080 ## external available methods to store information
1081 @staticmethod
1082 def _get_data_from_event(event):
1083 evdata = None
1084 if '_localdata' in vars(event):
1085 evdata = event._localdata
1086 elif 'data' in vars(event):
1087 evdata = event.data
1088 else:
1089 raise Exception("Event with neither _localdata or data properties")
1090 return evdata
1091
1092 def store_layer_info(self, event):
1093 layerinfos = BuildInfoHelper._get_data_from_event(event)
1094 self.internal_state['lvs'] = {}
1095 for layer in layerinfos:
1096 try:
1097 self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)] = layerinfos[layer]['version']
1098 self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)]['local_path'] = layerinfos[layer]['local_path']
1099 except NotExisting as nee:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001100 logger.warning("buildinfohelper: cannot identify layer exception:%s ", nee)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001101
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001102 def store_started_build(self):
1103 self._ensure_build()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001104
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001105 def save_build_log_file_path(self, build_log_path):
1106 self._ensure_build()
1107
1108 if not self.internal_state['build'].cooker_log_path:
1109 data_dict = {'cooker_log_path': build_log_path}
1110 self.orm_wrapper.update_build(self.internal_state['build'], data_dict)
1111
1112 def save_build_targets(self, event):
1113 self._ensure_build()
1114
1115 # create target information
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001116 assert '_pkgs' in vars(event)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001117 target_information = {}
1118 target_information['targets'] = event._pkgs
1119 target_information['build'] = self.internal_state['build']
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001120
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001121 self.internal_state['targets'] = self.orm_wrapper.get_or_create_targets(target_information)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001122
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001123 def save_build_layers_and_variables(self):
1124 self._ensure_build()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001125
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001126 build_obj = self.internal_state['build']
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001127
1128 # save layer version information for this build
1129 if not 'lvs' in self.internal_state:
1130 logger.error("Layer version information not found; Check if the bitbake server was configured to inherit toaster.bbclass.")
1131 else:
1132 for layer_obj in self.internal_state['lvs']:
1133 self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj])
1134
1135 del self.internal_state['lvs']
1136
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001137 # Save build configuration
1138 data = self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0]
1139
1140 # convert the paths from absolute to relative to either the build directory or layer checkouts
1141 path_prefixes = []
1142
1143 if self.brbe is not None:
1144 _, be_id = self.brbe.split(":")
1145 be = BuildEnvironment.objects.get(pk = be_id)
1146 path_prefixes.append(be.builddir)
1147
1148 for layer in sorted(self.orm_wrapper.layer_version_objects, key = lambda x:len(x.local_path), reverse=True):
1149 path_prefixes.append(layer.local_path)
1150
1151 # we strip the prefixes
1152 for k in data:
1153 if not bool(data[k]['func']):
1154 for vh in data[k]['history']:
1155 if not 'documentation.conf' in vh['file']:
1156 abs_file_name = vh['file']
1157 for pp in path_prefixes:
1158 if abs_file_name.startswith(pp + "/"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001159 # preserve layer name in relative path
1160 vh['file']=abs_file_name[pp.rfind("/")+1:]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001161 break
1162
1163 # save the variables
1164 self.orm_wrapper.save_build_variables(build_obj, data)
1165
1166 return self.brbe
1167
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001168 def set_recipes_to_parse(self, num_recipes):
1169 """
1170 Set the number of recipes which need to be parsed for this build.
1171 This is set the first time ParseStarted is received by toasterui.
1172 """
1173 self._ensure_build()
1174 self.internal_state['build'].recipes_to_parse = num_recipes
1175 self.internal_state['build'].save()
1176
1177 def set_recipes_parsed(self, num_recipes):
1178 """
1179 Set the number of recipes parsed so far for this build; this is updated
1180 each time a ParseProgress or ParseCompleted event is received by
1181 toasterui.
1182 """
1183 self._ensure_build()
1184 if num_recipes <= self.internal_state['build'].recipes_to_parse:
1185 self.internal_state['build'].recipes_parsed = num_recipes
1186 self.internal_state['build'].save()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001187
1188 def update_target_image_file(self, event):
1189 evdata = BuildInfoHelper._get_data_from_event(event)
1190
1191 for t in self.internal_state['targets']:
1192 if t.is_image == True:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001193 output_files = list(evdata.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001194 for output in output_files:
1195 if t.target in output and 'rootfs' in output and not output.endswith(".manifest"):
1196 self.orm_wrapper.save_target_image_file_information(t, output, evdata[output])
1197
1198 def update_artifact_image_file(self, event):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001199 self._ensure_build()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001200 evdata = BuildInfoHelper._get_data_from_event(event)
1201 for artifact_path in evdata.keys():
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001202 self.orm_wrapper.save_artifact_information(
1203 self.internal_state['build'], artifact_path,
1204 evdata[artifact_path])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001205
1206 def update_build_information(self, event, errors, warnings, taskfailures):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001207 self._ensure_build()
1208 self.orm_wrapper.update_build_stats_and_outcome(
1209 self.internal_state['build'], errors, warnings, taskfailures)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001210
1211 def store_started_task(self, event):
1212 assert isinstance(event, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted, bb.runqueue.runQueueTaskSkipped))
1213 assert 'taskfile' in vars(event)
1214 localfilepath = event.taskfile.split(":")[-1]
1215 assert localfilepath.startswith("/")
1216
1217 identifier = event.taskfile + ":" + event.taskname
1218
1219 recipe_information = self._get_recipe_information_from_taskfile(event.taskfile)
1220 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True)
1221
1222 task_information = self._get_task_information(event, recipe)
1223 task_information['outcome'] = Task.OUTCOME_NA
1224
1225 if isinstance(event, bb.runqueue.runQueueTaskSkipped):
1226 assert 'reason' in vars(event)
1227 task_information['task_executed'] = False
1228 if event.reason == "covered":
1229 task_information['outcome'] = Task.OUTCOME_COVERED
1230 if event.reason == "existing":
1231 task_information['outcome'] = Task.OUTCOME_PREBUILT
1232 else:
1233 task_information['task_executed'] = True
1234 if 'noexec' in vars(event) and event.noexec == True:
1235 task_information['task_executed'] = False
1236 task_information['outcome'] = Task.OUTCOME_EMPTY
1237 task_information['script_type'] = Task.CODING_NA
1238
1239 # do not assign order numbers to scene tasks
1240 if not isinstance(event, bb.runqueue.sceneQueueTaskStarted):
1241 self.task_order += 1
1242 task_information['order'] = self.task_order
1243
1244 self.orm_wrapper.get_update_task_object(task_information)
1245
1246 self.internal_state['taskdata'][identifier] = {
1247 'outcome': task_information['outcome'],
1248 }
1249
1250
1251 def store_tasks_stats(self, event):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001252 self._ensure_build()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001253 task_data = BuildInfoHelper._get_data_from_event(event)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001254
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001255 for (task_file, task_name, task_stats, recipe_name) in task_data:
1256 build = self.internal_state['build']
1257 self.orm_wrapper.update_task_object(build, task_name, recipe_name, task_stats)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001258
1259 def update_and_store_task(self, event):
1260 assert 'taskfile' in vars(event)
1261 localfilepath = event.taskfile.split(":")[-1]
1262 assert localfilepath.startswith("/")
1263
1264 identifier = event.taskfile + ":" + event.taskname
1265 if not identifier in self.internal_state['taskdata']:
1266 if isinstance(event, bb.build.TaskBase):
1267 # we do a bit of guessing
1268 candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)]
1269 if len(candidates) == 1:
1270 identifier = candidates[0]
1271
1272 assert identifier in self.internal_state['taskdata']
1273 identifierlist = identifier.split(":")
1274 realtaskfile = ":".join(identifierlist[0:len(identifierlist)-1])
1275 recipe_information = self._get_recipe_information_from_taskfile(realtaskfile)
1276 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True)
1277 task_information = self._get_task_information(event,recipe)
1278
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001279 task_information['outcome'] = self.internal_state['taskdata'][identifier]['outcome']
1280
1281 if 'logfile' in vars(event):
1282 task_information['logfile'] = event.logfile
1283
1284 if '_message' in vars(event):
1285 task_information['message'] = event._message
1286
1287 if 'taskflags' in vars(event):
1288 # with TaskStarted, we get even more information
1289 if 'python' in event.taskflags.keys() and event.taskflags['python'] == '1':
1290 task_information['script_type'] = Task.CODING_PYTHON
1291 else:
1292 task_information['script_type'] = Task.CODING_SHELL
1293
1294 if task_information['outcome'] == Task.OUTCOME_NA:
1295 if isinstance(event, (bb.runqueue.runQueueTaskCompleted, bb.runqueue.sceneQueueTaskCompleted)):
1296 task_information['outcome'] = Task.OUTCOME_SUCCESS
1297 del self.internal_state['taskdata'][identifier]
1298
1299 if isinstance(event, (bb.runqueue.runQueueTaskFailed, bb.runqueue.sceneQueueTaskFailed)):
1300 task_information['outcome'] = Task.OUTCOME_FAILED
1301 del self.internal_state['taskdata'][identifier]
1302
1303 if not connection.features.autocommits_when_autocommit_is_off:
1304 # we force a sync point here, to get the progress bar to show
1305 if self.autocommit_step % 3 == 0:
1306 transaction.set_autocommit(True)
1307 transaction.set_autocommit(False)
1308 self.autocommit_step += 1
1309
1310 self.orm_wrapper.get_update_task_object(task_information, True) # must exist
1311
1312
1313 def store_missed_state_tasks(self, event):
1314 for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['missed']:
1315
1316 # identifier = fn + taskname + "_setscene"
1317 recipe_information = self._get_recipe_information_from_taskfile(fn)
1318 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
1319 mevent = MockEvent()
1320 mevent.taskname = taskname
1321 mevent.taskhash = taskhash
1322 task_information = self._get_task_information(mevent,recipe)
1323
1324 task_information['start_time'] = timezone.now()
1325 task_information['outcome'] = Task.OUTCOME_NA
1326 task_information['sstate_checksum'] = taskhash
1327 task_information['sstate_result'] = Task.SSTATE_MISS
1328 task_information['path_to_sstate_obj'] = sstatefile
1329
1330 self.orm_wrapper.get_update_task_object(task_information)
1331
1332 for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['found']:
1333
1334 # identifier = fn + taskname + "_setscene"
1335 recipe_information = self._get_recipe_information_from_taskfile(fn)
1336 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
1337 mevent = MockEvent()
1338 mevent.taskname = taskname
1339 mevent.taskhash = taskhash
1340 task_information = self._get_task_information(mevent,recipe)
1341
1342 task_information['path_to_sstate_obj'] = sstatefile
1343
1344 self.orm_wrapper.get_update_task_object(task_information)
1345
1346
1347 def store_target_package_data(self, event):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001348 self._ensure_build()
1349
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001350 # for all image targets
1351 for target in self.internal_state['targets']:
1352 if target.is_image:
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001353 pkgdata = BuildInfoHelper._get_data_from_event(event)['pkgdata']
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001354 imgdata = BuildInfoHelper._get_data_from_event(event)['imgdata'].get(target.target, {})
1355 filedata = BuildInfoHelper._get_data_from_event(event)['filedata'].get(target.target, {})
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001356
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001357 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001358 self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes'], built_package=True)
1359 self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata.copy(), pkgdata, self.internal_state['recipes'], built_package=False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001360 except KeyError as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001361 logger.warning("KeyError in save_target_package_information"
1362 "%s ", e)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001363
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001364 # only try to find files in the image if the task for this
1365 # target is one which produces image files; otherwise, the old
1366 # list of files in the files-in-image.txt file will be
1367 # appended to the target even if it didn't produce any images
1368 if target.task in BuildInfoHelper.IMAGE_GENERATING_TASKS:
1369 try:
1370 self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata)
1371 except KeyError as e:
1372 logger.warning("KeyError in save_target_file_information"
1373 "%s ", e)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001374
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001375
1376
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001377 def cancel_cli_build(self):
1378 """
1379 If a build is currently underway, set its state to CANCELLED;
1380 note that this only gets called for command line builds which are
1381 interrupted, so it doesn't touch any BuildRequest objects
1382 """
1383 self._ensure_build()
1384 self.internal_state['build'].outcome = Build.CANCELLED
1385 self.internal_state['build'].save()
1386 signal_runbuilds()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001387
1388 def store_dependency_information(self, event):
1389 assert '_depgraph' in vars(event)
1390 assert 'layer-priorities' in event._depgraph
1391 assert 'pn' in event._depgraph
1392 assert 'tdepends' in event._depgraph
1393
1394 errormsg = ""
1395
1396 # save layer version priorities
1397 if 'layer-priorities' in event._depgraph.keys():
1398 for lv in event._depgraph['layer-priorities']:
1399 (_, path, _, priority) = lv
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001400 layer_version_obj = self._get_layer_version_for_dependency(path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001401 assert layer_version_obj is not None
1402 layer_version_obj.priority = priority
1403 layer_version_obj.save()
1404
1405 # save recipe information
1406 self.internal_state['recipes'] = {}
1407 for pn in event._depgraph['pn']:
1408
1409 file_name = event._depgraph['pn'][pn]['filename'].split(":")[-1]
1410 pathflags = ":".join(sorted(event._depgraph['pn'][pn]['filename'].split(":")[:-1]))
1411 layer_version_obj = self._get_layer_version_for_path(file_name)
1412
1413 assert layer_version_obj is not None
1414
1415 recipe_info = {}
1416 recipe_info['name'] = pn
1417 recipe_info['layer_version'] = layer_version_obj
1418
1419 if 'version' in event._depgraph['pn'][pn]:
1420 recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":")
1421
1422 if 'summary' in event._depgraph['pn'][pn]:
1423 recipe_info['summary'] = event._depgraph['pn'][pn]['summary']
1424
1425 if 'license' in event._depgraph['pn'][pn]:
1426 recipe_info['license'] = event._depgraph['pn'][pn]['license']
1427
1428 if 'description' in event._depgraph['pn'][pn]:
1429 recipe_info['description'] = event._depgraph['pn'][pn]['description']
1430
1431 if 'section' in event._depgraph['pn'][pn]:
1432 recipe_info['section'] = event._depgraph['pn'][pn]['section']
1433
1434 if 'homepage' in event._depgraph['pn'][pn]:
1435 recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage']
1436
1437 if 'bugtracker' in event._depgraph['pn'][pn]:
1438 recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker']
1439
1440 recipe_info['file_path'] = file_name
1441 recipe_info['pathflags'] = pathflags
1442
1443 if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path):
1444 recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/")
1445 else:
1446 raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path))
1447
1448 recipe = self.orm_wrapper.get_update_recipe_object(recipe_info)
1449 recipe.is_image = False
1450 if 'inherits' in event._depgraph['pn'][pn].keys():
1451 for cls in event._depgraph['pn'][pn]['inherits']:
1452 if cls.endswith('/image.bbclass'):
1453 recipe.is_image = True
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001454 recipe_info['is_image'] = True
1455 # Save the is_image state to the relevant recipe objects
1456 self.orm_wrapper.get_update_recipe_object(recipe_info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001457 break
1458 if recipe.is_image:
1459 for t in self.internal_state['targets']:
1460 if pn == t.target:
1461 t.is_image = True
1462 t.save()
1463 self.internal_state['recipes'][pn] = recipe
1464
1465 # we'll not get recipes for key w/ values listed in ASSUME_PROVIDED
1466
1467 assume_provided = self.server.runCommand(["getVariable", "ASSUME_PROVIDED"])[0].split()
1468
1469 # save recipe dependency
1470 # buildtime
1471 recipedeps_objects = []
1472 for recipe in event._depgraph['depends']:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001473 target = self.internal_state['recipes'][recipe]
1474 for dep in event._depgraph['depends'][recipe]:
1475 if dep in assume_provided:
1476 continue
1477 via = None
1478 if 'providermap' in event._depgraph and dep in event._depgraph['providermap']:
1479 deprecipe = event._depgraph['providermap'][dep][0]
1480 dependency = self.internal_state['recipes'][deprecipe]
1481 via = Provides.objects.get_or_create(name=dep,
1482 recipe=dependency)[0]
1483 elif dep in self.internal_state['recipes']:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001484 dependency = self.internal_state['recipes'][dep]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001485 else:
1486 errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, dep)
1487 continue
1488 recipe_dep = Recipe_Dependency(recipe=target,
1489 depends_on=dependency,
1490 via=via,
1491 dep_type=Recipe_Dependency.TYPE_DEPENDS)
1492 recipedeps_objects.append(recipe_dep)
1493
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001494 Recipe_Dependency.objects.bulk_create(recipedeps_objects)
1495
1496 # save all task information
1497 def _save_a_task(taskdesc):
1498 spec = re.split(r'\.', taskdesc)
1499 pn = ".".join(spec[0:-1])
1500 taskname = spec[-1]
1501 e = event
1502 e.taskname = pn
1503 recipe = self.internal_state['recipes'][pn]
1504 task_info = self._get_task_information(e, recipe)
1505 task_info['task_name'] = taskname
1506 task_obj = self.orm_wrapper.get_update_task_object(task_info)
1507 return task_obj
1508
1509 # create tasks
1510 tasks = {}
1511 for taskdesc in event._depgraph['tdepends']:
1512 tasks[taskdesc] = _save_a_task(taskdesc)
1513
1514 # create dependencies between tasks
1515 taskdeps_objects = []
1516 for taskdesc in event._depgraph['tdepends']:
1517 target = tasks[taskdesc]
1518 for taskdep in event._depgraph['tdepends'][taskdesc]:
1519 if taskdep not in tasks:
1520 # Fetch tasks info is not collected previously
1521 dep = _save_a_task(taskdep)
1522 else:
1523 dep = tasks[taskdep]
1524 taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep ))
1525 Task_Dependency.objects.bulk_create(taskdeps_objects)
1526
1527 if len(errormsg) > 0:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001528 logger.warning("buildinfohelper: dependency info not identify recipes: \n%s", errormsg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001529
1530
1531 def store_build_package_information(self, event):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001532 self._ensure_build()
1533
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001534 package_info = BuildInfoHelper._get_data_from_event(event)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001535 self.orm_wrapper.save_build_package_information(
1536 self.internal_state['build'],
1537 package_info,
1538 self.internal_state['recipes'],
1539 built_package=True)
1540
1541 self.orm_wrapper.save_build_package_information(
1542 self.internal_state['build'],
1543 package_info,
1544 self.internal_state['recipes'],
1545 built_package=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001546
1547 def _store_build_done(self, errorcode):
1548 logger.info("Build exited with errorcode %d", errorcode)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001549
1550 if not self.brbe:
1551 return
1552
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001553 br_id, be_id = self.brbe.split(":")
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001554
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001555 br = BuildRequest.objects.get(pk = br_id)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001556
1557 # if we're 'done' because we got cancelled update the build outcome
1558 if br.state == BuildRequest.REQ_CANCELLING:
1559 logger.info("Build cancelled")
1560 br.build.outcome = Build.CANCELLED
1561 br.build.save()
1562 self.internal_state['build'] = br.build
1563 errorcode = 0
1564
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001565 if errorcode == 0:
1566 # request archival of the project artifacts
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001567 br.state = BuildRequest.REQ_COMPLETED
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001568 else:
1569 br.state = BuildRequest.REQ_FAILED
1570 br.save()
1571
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001572 be = BuildEnvironment.objects.get(pk = be_id)
1573 be.lock = BuildEnvironment.LOCK_FREE
1574 be.save()
1575 signal_runbuilds()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001576
1577 def store_log_error(self, text):
1578 mockevent = MockEvent()
1579 mockevent.levelno = formatter.ERROR
1580 mockevent.msg = text
1581 mockevent.pathname = '-- None'
1582 mockevent.lineno = LogMessage.ERROR
1583 self.store_log_event(mockevent)
1584
1585 def store_log_exception(self, text, backtrace = ""):
1586 mockevent = MockEvent()
1587 mockevent.levelno = -1
1588 mockevent.msg = text
1589 mockevent.pathname = backtrace
1590 mockevent.lineno = -1
1591 self.store_log_event(mockevent)
1592
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001593 def store_log_event(self, event):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001594 self._ensure_build()
1595
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001596 if event.levelno < formatter.WARNING:
1597 return
1598
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001599 # early return for CLI builds
1600 if self.brbe is None:
1601 if not 'backlog' in self.internal_state:
1602 self.internal_state['backlog'] = []
1603 self.internal_state['backlog'].append(event)
1604 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001605
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001606 if 'backlog' in self.internal_state:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001607 # if we have a backlog of events, do our best to save them here
1608 if len(self.internal_state['backlog']):
1609 tempevent = self.internal_state['backlog'].pop()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001610 logger.debug(1, "buildinfohelper: Saving stored event %s "
1611 % tempevent)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001612 self.store_log_event(tempevent)
1613 else:
1614 logger.info("buildinfohelper: All events saved")
1615 del self.internal_state['backlog']
1616
1617 log_information = {}
1618 log_information['build'] = self.internal_state['build']
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001619 if event.levelno == formatter.CRITICAL:
1620 log_information['level'] = LogMessage.CRITICAL
1621 elif event.levelno == formatter.ERROR:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001622 log_information['level'] = LogMessage.ERROR
1623 elif event.levelno == formatter.WARNING:
1624 log_information['level'] = LogMessage.WARNING
1625 elif event.levelno == -2: # toaster self-logging
1626 log_information['level'] = -2
1627 else:
1628 log_information['level'] = LogMessage.INFO
1629
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001630 log_information['message'] = event.getMessage()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001631 log_information['pathname'] = event.pathname
1632 log_information['lineno'] = event.lineno
1633 logger.info("Logging error 2: %s", log_information)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001634
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001635 self.orm_wrapper.create_logmessage(log_information)
1636
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001637 def _get_filenames_from_image_license(self, image_license_manifest_path):
1638 """
1639 Find the FILES line in the image_license.manifest file,
1640 which has the basenames of the bzImage and modules files
1641 in this format:
1642 FILES: bzImage--4.4.11+git0+3a5f494784_53e84104c5-r0-qemux86-20160603165040.bin modules--4.4.11+git0+3a5f494784_53e84104c5-r0-qemux86-20160603165040.tgz
1643 """
1644 files = []
1645 with open(image_license_manifest_path) as image_license:
1646 for line in image_license:
1647 if line.startswith('FILES'):
1648 files_str = line.split(':')[1].strip()
1649 files_str = re.sub(r' {2,}', ' ', files_str)
1650
1651 # ignore lines like "FILES:" with no filenames
1652 if files_str:
1653 files += files_str.split(' ')
1654 return files
1655
1656 def _endswith(self, str_to_test, endings):
1657 """
1658 Returns True if str ends with one of the strings in the list
1659 endings, False otherwise
1660 """
1661 endswith = False
1662 for ending in endings:
1663 if str_to_test.endswith(ending):
1664 endswith = True
1665 break
1666 return endswith
1667
1668 def _get_image_files(self, deploy_dir_image, image_name, image_file_extensions):
1669 """
1670 Find files in deploy_dir_image whose basename starts with the
1671 string image_name and ends with one of the strings in
1672 image_file_extensions.
1673
1674 Returns a list of file dictionaries like
1675
1676 [
1677 {
1678 'path': '/path/to/image/file',
1679 'size': <file size in bytes>
1680 }
1681 ]
1682 """
1683 image_files = []
1684
1685 for dirpath, _, filenames in os.walk(deploy_dir_image):
1686 for filename in filenames:
1687 if filename.startswith(image_name) and \
1688 self._endswith(filename, image_file_extensions):
1689 image_file_path = os.path.join(dirpath, filename)
1690 image_file_size = os.stat(image_file_path).st_size
1691
1692 image_files.append({
1693 'path': image_file_path,
1694 'size': image_file_size
1695 })
1696
1697 return image_files
1698
1699 def scan_image_artifacts(self):
1700 """
1701 Scan for built image artifacts in DEPLOY_DIR_IMAGE and associate them
1702 with a Target object in self.internal_state['targets'].
1703
1704 We have two situations to handle:
1705
1706 1. This is the first time a target + machine has been built, so
1707 add files from the DEPLOY_DIR_IMAGE to the target.
1708
1709 OR
1710
1711 2. There are no new files for the target (they were already produced by
1712 a previous build), so copy them from the most recent previous build with
1713 the same target, task and machine.
1714 """
1715 deploy_dir_image = \
1716 self.server.runCommand(['getVariable', 'DEPLOY_DIR_IMAGE'])[0]
1717
1718 # if there's no DEPLOY_DIR_IMAGE, there aren't going to be
1719 # any image artifacts, so we can return immediately
1720 if not deploy_dir_image:
1721 return
1722
1723 buildname = self.server.runCommand(['getVariable', 'BUILDNAME'])[0]
1724 machine = self.server.runCommand(['getVariable', 'MACHINE'])[0]
1725 image_name = self.server.runCommand(['getVariable', 'IMAGE_NAME'])[0]
1726
1727 # location of the manifest files for this build;
1728 # note that this file is only produced if an image is produced
1729 license_directory = \
1730 self.server.runCommand(['getVariable', 'LICENSE_DIRECTORY'])[0]
1731
1732 # file name extensions for image files
1733 image_file_extensions_unique = {}
1734 image_fstypes = self.server.runCommand(
1735 ['getVariable', 'IMAGE_FSTYPES'])[0]
1736 if image_fstypes != None:
1737 image_types_str = image_fstypes.strip()
1738 image_file_extensions = re.sub(r' {2,}', ' ', image_types_str)
1739 image_file_extensions_unique = set(image_file_extensions.split(' '))
1740
1741 targets = self.internal_state['targets']
1742
1743 # filter out anything which isn't an image target
1744 image_targets = [target for target in targets if target.is_image]
1745
1746 for image_target in image_targets:
1747 # this is set to True if we find at least one file relating to
1748 # this target; if this remains False after the scan, we copy the
1749 # files from the most-recent Target with the same target + machine
1750 # onto this Target instead
1751 has_files = False
1752
1753 # we construct this because by the time we reach
1754 # BuildCompleted, this has reset to
1755 # 'defaultpkgname-<MACHINE>-<BUILDNAME>';
1756 # we need to change it to
1757 # <TARGET>-<MACHINE>-<BUILDNAME>
1758 real_image_name = re.sub(r'^defaultpkgname', image_target.target,
1759 image_name)
1760
1761 image_license_manifest_path = os.path.join(
1762 license_directory,
1763 real_image_name,
1764 'image_license.manifest')
1765
1766 image_package_manifest_path = os.path.join(
1767 license_directory,
1768 real_image_name,
1769 'image_license.manifest')
1770
1771 # if image_license.manifest exists, we can read the names of
1772 # bzImage, modules etc. files for this build from it, then look for
1773 # them in the DEPLOY_DIR_IMAGE; note that this file is only produced
1774 # if an image file was produced
1775 if os.path.isfile(image_license_manifest_path):
1776 has_files = True
1777
1778 basenames = self._get_filenames_from_image_license(
1779 image_license_manifest_path)
1780
1781 for basename in basenames:
1782 artifact_path = os.path.join(deploy_dir_image, basename)
1783 if not os.path.exists(artifact_path):
1784 logger.warning("artifact %s doesn't exist, skipping" % artifact_path)
1785 continue
1786 artifact_size = os.stat(artifact_path).st_size
1787
1788 # note that the artifact will only be saved against this
1789 # build if it hasn't been already
1790 self.orm_wrapper.save_target_kernel_file(image_target,
1791 artifact_path, artifact_size)
1792
1793 # store the license manifest path on the target
1794 # (this file is also created any time an image file is created)
1795 license_manifest_path = os.path.join(license_directory,
1796 real_image_name, 'license.manifest')
1797
1798 self.orm_wrapper.update_target_set_license_manifest(
1799 image_target, license_manifest_path)
1800
1801 # store the package manifest path on the target (this file
1802 # is created any time an image file is created)
1803 package_manifest_path = os.path.join(deploy_dir_image,
1804 real_image_name + '.rootfs.manifest')
1805
1806 if os.path.exists(package_manifest_path):
1807 self.orm_wrapper.update_target_set_package_manifest(
1808 image_target, package_manifest_path)
1809
1810 # scan the directory for image files relating to this build
1811 # (via real_image_name); note that we don't have to set
1812 # has_files = True, as searching for the license manifest file
1813 # will already have set it to true if at least one image file was
1814 # produced; note that the real_image_name includes BUILDNAME, which
1815 # in turn includes a timestamp; so if no files were produced for
1816 # this timestamp (i.e. the build reused existing image files already
1817 # in the directory), no files will be recorded against this target
1818 image_files = self._get_image_files(deploy_dir_image,
1819 real_image_name, image_file_extensions_unique)
1820
1821 for image_file in image_files:
1822 self.orm_wrapper.save_target_image_file_information(
1823 image_target, image_file['path'], image_file['size'])
1824
1825 if not has_files:
1826 # copy image files and build artifacts from the
1827 # most-recently-built Target with the
1828 # same target + machine as this Target; also copy the license
1829 # manifest path, as that is not treated as an artifact and needs
1830 # to be set separately
1831 similar_target = \
1832 self.orm_wrapper.get_similar_target_with_image_files(
1833 image_target)
1834
1835 if similar_target:
1836 logger.info('image artifacts for target %s cloned from ' \
1837 'target %s' % (image_target.pk, similar_target.pk))
1838 self.orm_wrapper.clone_image_artifacts(similar_target,
1839 image_target)
1840
1841 def _get_sdk_targets(self):
1842 """
1843 Return targets which could generate SDK artifacts, i.e.
1844 "do_populate_sdk" and "do_populate_sdk_ext".
1845 """
1846 return [target for target in self.internal_state['targets'] \
1847 if target.task in ['populate_sdk', 'populate_sdk_ext']]
1848
1849 def scan_sdk_artifacts(self, event):
1850 """
1851 Note that we have to intercept an SDKArtifactInfo event from
1852 toaster.bbclass (via toasterui) to get hold of the SDK variables we
1853 need to be able to scan for files accurately: this is because
1854 variables like TOOLCHAIN_OUTPUTNAME have reset to None by the time
1855 BuildCompleted is fired by bitbake, so we have to get those values
1856 while the build is still in progress.
1857
1858 For populate_sdk_ext, this runs twice, with two different
1859 TOOLCHAIN_OUTPUTNAME settings, each of which will capture some of the
1860 files in the SDK output directory.
1861 """
1862 sdk_vars = BuildInfoHelper._get_data_from_event(event)
1863 toolchain_outputname = sdk_vars['TOOLCHAIN_OUTPUTNAME']
1864
1865 # targets which might have created SDK artifacts
1866 sdk_targets = self._get_sdk_targets()
1867
1868 # location of SDK artifacts
1869 tmpdir = self.server.runCommand(['getVariable', 'TMPDIR'])[0]
1870 sdk_dir = os.path.join(tmpdir, 'deploy', 'sdk')
1871
1872 # all files in the SDK directory
1873 artifacts = []
1874 for dir_path, _, filenames in os.walk(sdk_dir):
1875 for filename in filenames:
1876 full_path = os.path.join(dir_path, filename)
1877 if not os.path.islink(full_path):
1878 artifacts.append(full_path)
1879
1880 for sdk_target in sdk_targets:
1881 # find files in the SDK directory which haven't already been
1882 # recorded against a Target and whose basename matches
1883 # TOOLCHAIN_OUTPUTNAME
1884 for artifact_path in artifacts:
1885 basename = os.path.basename(artifact_path)
1886
1887 toolchain_match = basename.startswith(toolchain_outputname)
1888
1889 # files which match the name of the target which produced them;
1890 # for example,
1891 # poky-glibc-x86_64-core-image-sato-i586-toolchain-ext-2.1+snapshot.sh
1892 target_match = re.search(sdk_target.target, basename)
1893
1894 # targets which produce "*-nativesdk-*" files
1895 is_ext_sdk_target = sdk_target.task in \
1896 ['do_populate_sdk_ext', 'populate_sdk_ext']
1897
1898 # SDK files which don't match the target name, i.e.
1899 # x86_64-nativesdk-libc.*
1900 # poky-glibc-x86_64-buildtools-tarball-i586-buildtools-nativesdk-standalone-2.1+snapshot*
1901 is_ext_sdk_file = re.search('-nativesdk-', basename)
1902
1903 file_from_target = (toolchain_match and target_match) or \
1904 (is_ext_sdk_target and is_ext_sdk_file)
1905
1906 if file_from_target:
1907 # don't record the file if it's already been added to this
1908 # target
1909 matching_files = TargetSDKFile.objects.filter(
1910 target=sdk_target, file_name=artifact_path)
1911
1912 if matching_files.count() == 0:
1913 artifact_size = os.stat(artifact_path).st_size
1914
1915 self.orm_wrapper.save_target_sdk_file(
1916 sdk_target, artifact_path, artifact_size)
1917
1918 def clone_required_sdk_artifacts(self):
1919 """
1920 If an SDK target doesn't have any SDK artifacts, this means that
1921 the postfuncs of populate_sdk or populate_sdk_ext didn't fire, which
1922 in turn means that the targets of this build didn't generate any new
1923 artifacts.
1924
1925 In this case, clone SDK artifacts for targets in the current build
1926 from existing targets for this build.
1927 """
1928 sdk_targets = self._get_sdk_targets()
1929 for sdk_target in sdk_targets:
1930 # only clone for SDK targets which have no TargetSDKFiles yet
1931 if sdk_target.targetsdkfile_set.all().count() == 0:
1932 similar_target = \
1933 self.orm_wrapper.get_similar_target_with_sdk_files(
1934 sdk_target)
1935 if similar_target:
1936 logger.info('SDK artifacts for target %s cloned from ' \
1937 'target %s' % (sdk_target.pk, similar_target.pk))
1938 self.orm_wrapper.clone_sdk_artifacts(similar_target,
1939 sdk_target)
1940
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001941 def close(self, errorcode):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001942 self._store_build_done(errorcode)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001943
1944 if 'backlog' in self.internal_state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001945 # we save missed events in the database for the current build
1946 tempevent = self.internal_state['backlog'].pop()
1947 self.store_log_event(tempevent)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001948
1949 if not connection.features.autocommits_when_autocommit_is_off:
1950 transaction.set_autocommit(True)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001951
1952 # unset the brbe; this is to prevent subsequent command-line builds
1953 # being incorrectly attached to the previous Toaster-triggered build;
1954 # see https://bugzilla.yoctoproject.org/show_bug.cgi?id=9021
1955 self.brbe = None
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001956
1957 # unset the internal Build object to prevent it being reused for the
1958 # next build
1959 self.internal_state['build'] = None