blob: e451c630d46ccb065742e086aa2625bf1f7c8a46 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# BitBake ToasterUI Implementation
3#
4# Copyright (C) 2013 Intel Corporation
5#
6# This program is free software; you can redistribute it and/or modify
7# it under the terms of the GNU General Public License version 2 as
8# published by the Free Software Foundation.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License along
16# with this program; if not, write to the Free Software Foundation, Inc.,
17# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
18
19import sys
20import bb
21import re
22import os
23
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050024import django
Patrick Williamsc124f4f2015-09-15 14:41:29 -050025from django.utils import timezone
26
Patrick Williamsc0f7c042017-02-23 20:41:17 -060027import toaster
28# Add toaster module to the search path to help django.setup() find the right
29# modules
30sys.path.insert(0, os.path.dirname(toaster.__file__))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050031
Patrick Williamsc0f7c042017-02-23 20:41:17 -060032#Set the DJANGO_SETTINGS_MODULE if it's not already set
33os.environ["DJANGO_SETTINGS_MODULE"] =\
34 os.environ.get("DJANGO_SETTINGS_MODULE",
35 "toaster.toastermain.settings")
36# Setup django framework (needs to be done before importing modules)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050037django.setup()
Patrick Williamsc124f4f2015-09-15 14:41:29 -050038
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050039from orm.models import Build, Task, Recipe, Layer_Version, Layer, Target, LogMessage, HelpText
Patrick Williamsc0f7c042017-02-23 20:41:17 -060040from orm.models import Target_Image_File, TargetKernelFile, TargetSDKFile
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050041from orm.models import Variable, VariableHistory
42from orm.models import Package, Package_File, Target_Installed_Package, Target_File
43from orm.models import Task_Dependency, Package_Dependency
44from orm.models import Recipe_Dependency, Provides
Brad Bishop6e60e8b2018-02-01 10:27:11 -050045from orm.models import Project, CustomImagePackage
Patrick Williamsc0f7c042017-02-23 20:41:17 -060046from orm.models import signal_runbuilds
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050047
Patrick Williamsc124f4f2015-09-15 14:41:29 -050048from bldcontrol.models import BuildEnvironment, BuildRequest
Brad Bishop6e60e8b2018-02-01 10:27:11 -050049from bldcontrol.models import BRLayer
50from bldcontrol import bbcontroller
Patrick Williamsc124f4f2015-09-15 14:41:29 -050051
52from bb.msg import BBLogFormatter as formatter
53from django.db import models
54from pprint import pformat
55import logging
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050056from datetime import datetime, timedelta
Patrick Williamsc124f4f2015-09-15 14:41:29 -050057
58from django.db import transaction, connection
59
Patrick Williamsc0f7c042017-02-23 20:41:17 -060060
Patrick Williamsc124f4f2015-09-15 14:41:29 -050061# pylint: disable=invalid-name
62# the logger name is standard throughout BitBake
63logger = logging.getLogger("ToasterLogger")
64
Patrick Williamsc124f4f2015-09-15 14:41:29 -050065class NotExisting(Exception):
66 pass
67
68class ORMWrapper(object):
69 """ This class creates the dictionaries needed to store information in the database
70 following the format defined by the Django models. It is also used to save this
71 information in the database.
72 """
73
74 def __init__(self):
75 self.layer_version_objects = []
Patrick Williamsf1e5d692016-03-30 15:21:19 -050076 self.layer_version_built = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -050077 self.task_objects = {}
78 self.recipe_objects = {}
79
80 @staticmethod
81 def _build_key(**kwargs):
82 key = "0"
83 for k in sorted(kwargs.keys()):
84 if isinstance(kwargs[k], models.Model):
85 key += "-%d" % kwargs[k].id
86 else:
87 key += "-%s" % str(kwargs[k])
88 return key
89
90
91 def _cached_get_or_create(self, clazz, **kwargs):
92 """ This is a memory-cached get_or_create. We assume that the objects will not be created in the
93 database through any other means.
94 """
95
96 assert issubclass(clazz, models.Model), "_cached_get_or_create needs to get the class as first argument"
97
98 key = ORMWrapper._build_key(**kwargs)
99 dictname = "objects_%s" % clazz.__name__
100 if not dictname in vars(self).keys():
101 vars(self)[dictname] = {}
102
103 created = False
104 if not key in vars(self)[dictname].keys():
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500105 vars(self)[dictname][key], created = \
106 clazz.objects.get_or_create(**kwargs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500107
108 return (vars(self)[dictname][key], created)
109
110
111 def _cached_get(self, clazz, **kwargs):
112 """ This is a memory-cached get. We assume that the objects will not change in the database between gets.
113 """
114 assert issubclass(clazz, models.Model), "_cached_get needs to get the class as first argument"
115
116 key = ORMWrapper._build_key(**kwargs)
117 dictname = "objects_%s" % clazz.__name__
118
119 if not dictname in vars(self).keys():
120 vars(self)[dictname] = {}
121
122 if not key in vars(self)[dictname].keys():
123 vars(self)[dictname][key] = clazz.objects.get(**kwargs)
124
125 return vars(self)[dictname][key]
126
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600127 def get_similar_target_with_image_files(self, target):
128 """
129 Get a Target object "similar" to target; i.e. with the same target
130 name ('core-image-minimal' etc.) and machine.
131 """
132 return target.get_similar_target_with_image_files()
133
134 def get_similar_target_with_sdk_files(self, target):
135 return target.get_similar_target_with_sdk_files()
136
137 def clone_image_artifacts(self, target_from, target_to):
138 target_to.clone_image_artifacts_from(target_from)
139
140 def clone_sdk_artifacts(self, target_from, target_to):
141 target_to.clone_sdk_artifacts_from(target_from)
142
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500143 def _timestamp_to_datetime(self, secs):
144 """
145 Convert timestamp in seconds to Python datetime
146 """
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600147 return timezone.make_aware(datetime(1970, 1, 1) + timedelta(seconds=secs))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500148
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500149 # pylint: disable=no-self-use
150 # we disable detection of no self use in functions because the methods actually work on the object
151 # even if they don't touch self anywhere
152
153 # pylint: disable=bad-continuation
154 # we do not follow the python conventions for continuation indentation due to long lines here
155
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600156 def get_or_create_build_object(self, brbe):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500157 prj = None
158 buildrequest = None
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600159 if brbe is not None:
160 # Toaster-triggered build
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500161 logger.debug(1, "buildinfohelper: brbe is %s" % brbe)
162 br, _ = brbe.split(":")
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600163 buildrequest = BuildRequest.objects.get(pk=br)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500164 prj = buildrequest.project
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600165 else:
166 # CLI build
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500167 prj = Project.objects.get_or_create_default_project()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500168 logger.debug(1, "buildinfohelper: project is not specified, defaulting to %s" % prj)
169
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500170 if buildrequest is not None:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600171 # reuse existing Build object
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500172 build = buildrequest.build
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500173 build.project = prj
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500174 build.save()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500175 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600176 # create new Build object
177 now = timezone.now()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500178 build = Build.objects.create(
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600179 project=prj,
180 started_on=now,
181 completed_on=now,
182 build_name='')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500183
184 logger.debug(1, "buildinfohelper: build is created %s" % build)
185
186 if buildrequest is not None:
187 buildrequest.build = build
188 buildrequest.save()
189
190 return build
191
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600192 def update_build(self, build, data_dict):
193 for key in data_dict:
194 setattr(build, key, data_dict[key])
195 build.save()
196
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500197 @staticmethod
198 def get_or_create_targets(target_info):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600199 """
200 NB get_or_create() is used here because for Toaster-triggered builds,
201 we already created the targets when the build was triggered.
202 """
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500203 result = []
204 for target in target_info['targets']:
205 task = ''
206 if ':' in target:
207 target, task = target.split(':', 1)
208 if task.startswith('do_'):
209 task = task[3:]
210 if task == 'build':
211 task = ''
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600212
213 obj, _ = Target.objects.get_or_create(build=target_info['build'],
214 target=target,
215 task=task)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500216 result.append(obj)
217 return result
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500218
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600219 def update_build_stats_and_outcome(self, build, errors, warnings, taskfailures):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500220 assert isinstance(build,Build)
221 assert isinstance(errors, int)
222 assert isinstance(warnings, int)
223
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500224 if build.outcome == Build.CANCELLED:
225 return
226 try:
227 if build.buildrequest.state == BuildRequest.REQ_CANCELLING:
228 return
229 except AttributeError:
230 # We may not have a buildrequest if this is a command line build
231 pass
232
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500233 outcome = Build.SUCCEEDED
234 if errors or taskfailures:
235 outcome = Build.FAILED
236
237 build.completed_on = timezone.now()
238 build.outcome = outcome
239 build.save()
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600240 signal_runbuilds()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500241
242 def update_target_set_license_manifest(self, target, license_manifest_path):
243 target.license_manifest_path = license_manifest_path
244 target.save()
245
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600246 def update_target_set_package_manifest(self, target, package_manifest_path):
247 target.package_manifest_path = package_manifest_path
248 target.save()
249
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500250 def update_task_object(self, build, task_name, recipe_name, task_stats):
251 """
252 Find the task for build which matches the recipe and task name
253 to be stored
254 """
255 task_to_update = Task.objects.get(
256 build = build,
257 task_name = task_name,
258 recipe__name = recipe_name
259 )
260
261 if 'started' in task_stats and 'ended' in task_stats:
262 task_to_update.started = self._timestamp_to_datetime(task_stats['started'])
263 task_to_update.ended = self._timestamp_to_datetime(task_stats['ended'])
264 task_to_update.elapsed_time = (task_stats['ended'] - task_stats['started'])
265 task_to_update.cpu_time_user = task_stats.get('cpu_time_user')
266 task_to_update.cpu_time_system = task_stats.get('cpu_time_system')
267 if 'disk_io_read' in task_stats and 'disk_io_write' in task_stats:
268 task_to_update.disk_io_read = task_stats['disk_io_read']
269 task_to_update.disk_io_write = task_stats['disk_io_write']
270 task_to_update.disk_io = task_stats['disk_io_read'] + task_stats['disk_io_write']
271
272 task_to_update.save()
273
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500274 def get_update_task_object(self, task_information, must_exist = False):
275 assert 'build' in task_information
276 assert 'recipe' in task_information
277 assert 'task_name' in task_information
278
279 # we use must_exist info for database look-up optimization
280 task_object, created = self._cached_get_or_create(Task,
281 build=task_information['build'],
282 recipe=task_information['recipe'],
283 task_name=task_information['task_name']
284 )
285 if created and must_exist:
286 task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk)
287 raise NotExisting("Task object created when expected to exist", task_information)
288
289 object_changed = False
290 for v in vars(task_object):
291 if v in task_information.keys():
292 if vars(task_object)[v] != task_information[v]:
293 vars(task_object)[v] = task_information[v]
294 object_changed = True
295
296 # update setscene-related information if the task has a setscene
297 if task_object.outcome == Task.OUTCOME_COVERED and 1 == task_object.get_related_setscene().count():
298 task_object.outcome = Task.OUTCOME_CACHED
299 object_changed = True
300
301 outcome_task_setscene = Task.objects.get(task_executed=True, build = task_object.build,
302 recipe = task_object.recipe, task_name=task_object.task_name+"_setscene").outcome
303 if outcome_task_setscene == Task.OUTCOME_SUCCESS:
304 task_object.sstate_result = Task.SSTATE_RESTORED
305 object_changed = True
306 elif outcome_task_setscene == Task.OUTCOME_FAILED:
307 task_object.sstate_result = Task.SSTATE_FAILED
308 object_changed = True
309
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500310 if object_changed:
311 task_object.save()
312 return task_object
313
314
315 def get_update_recipe_object(self, recipe_information, must_exist = False):
316 assert 'layer_version' in recipe_information
317 assert 'file_path' in recipe_information
318 assert 'pathflags' in recipe_information
319
320 assert not recipe_information['file_path'].startswith("/") # we should have layer-relative paths at all times
321
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500322
323 def update_recipe_obj(recipe_object):
324 object_changed = False
325 for v in vars(recipe_object):
326 if v in recipe_information.keys():
327 object_changed = True
328 vars(recipe_object)[v] = recipe_information[v]
329
330 if object_changed:
331 recipe_object.save()
332
333 recipe, created = self._cached_get_or_create(Recipe, layer_version=recipe_information['layer_version'],
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500334 file_path=recipe_information['file_path'], pathflags = recipe_information['pathflags'])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500335
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500336 update_recipe_obj(recipe)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500337
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500338 built_recipe = None
339 # Create a copy of the recipe for historical puposes and update it
340 for built_layer in self.layer_version_built:
341 if built_layer.layer == recipe_information['layer_version'].layer:
342 built_recipe, c = self._cached_get_or_create(Recipe,
343 layer_version=built_layer,
344 file_path=recipe_information['file_path'],
345 pathflags = recipe_information['pathflags'])
346 update_recipe_obj(built_recipe)
347 break
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500348
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500349
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500350 # If we're in analysis mode or if this is a custom recipe
351 # then we are wholly responsible for the data
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500352 # and therefore we return the 'real' recipe rather than the build
353 # history copy of the recipe.
354 if recipe_information['layer_version'].build is not None and \
355 recipe_information['layer_version'].build.project == \
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500356 Project.objects.get_or_create_default_project():
357 return recipe
358
359 if built_recipe is None:
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500360 return recipe
361
362 return built_recipe
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500363
364 def get_update_layer_version_object(self, build_obj, layer_obj, layer_version_information):
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500365 if isinstance(layer_obj, Layer_Version):
366 # We already found our layer version for this build so just
367 # update it with the new build information
368 logger.debug("We found our layer from toaster")
369 layer_obj.local_path = layer_version_information['local_path']
370 layer_obj.save()
371 self.layer_version_objects.append(layer_obj)
372
373 # create a new copy of this layer version as a snapshot for
374 # historical purposes
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500375 layer_copy, c = Layer_Version.objects.get_or_create(
376 build=build_obj,
377 layer=layer_obj.layer,
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600378 release=layer_obj.release,
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500379 branch=layer_version_information['branch'],
380 commit=layer_version_information['commit'],
381 local_path=layer_version_information['local_path'],
382 )
383
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500384 logger.debug("Created new layer version %s for build history",
385 layer_copy.layer.name)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500386
387 self.layer_version_built.append(layer_copy)
388
389 return layer_obj
390
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500391 assert isinstance(build_obj, Build)
392 assert isinstance(layer_obj, Layer)
393 assert 'branch' in layer_version_information
394 assert 'commit' in layer_version_information
395 assert 'priority' in layer_version_information
396 assert 'local_path' in layer_version_information
397
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500398 # If we're doing a command line build then associate this new layer with the
399 # project to avoid it 'contaminating' toaster data
400 project = None
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500401 if build_obj.project == Project.objects.get_or_create_default_project():
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500402 project = build_obj.project
403
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500404 layer_version_object, _ = Layer_Version.objects.get_or_create(
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500405 build = build_obj,
406 layer = layer_obj,
407 branch = layer_version_information['branch'],
408 commit = layer_version_information['commit'],
409 priority = layer_version_information['priority'],
410 local_path = layer_version_information['local_path'],
411 project=project)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500412
413 self.layer_version_objects.append(layer_version_object)
414
415 return layer_version_object
416
417 def get_update_layer_object(self, layer_information, brbe):
418 assert 'name' in layer_information
419 assert 'layer_index_url' in layer_information
420
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600421 # From command line builds we have no brbe as the request is directly
422 # from bitbake
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500423 if brbe is None:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600424 # If we don't have git commit sha then we're using a non-git
425 # layer so set the layer_source_dir to identify it as such
426 if not layer_information['version']['commit']:
427 local_source_dir = layer_information["local_path"]
428 else:
429 local_source_dir = None
430
431 layer_object, _ = \
432 Layer.objects.get_or_create(
433 name=layer_information['name'],
434 local_source_dir=local_source_dir,
435 layer_index_url=layer_information['layer_index_url'])
436
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500437 return layer_object
438 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500439 br_id, be_id = brbe.split(":")
440
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500441 # Find the layer version by matching the layer event information
442 # against the metadata we have in Toaster
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500443
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500444 try:
445 br_layer = BRLayer.objects.get(req=br_id,
446 name=layer_information['name'])
447 return br_layer.layer_version
448 except (BRLayer.MultipleObjectsReturned, BRLayer.DoesNotExist):
449 # There are multiple of the same layer name or the name
450 # hasn't been determined by the toaster.bbclass layer
451 # so let's filter by the local_path
452 bc = bbcontroller.getBuildEnvironmentController(pk=be_id)
453 for br_layer in BRLayer.objects.filter(req=br_id):
454 if br_layer.giturl and \
455 layer_information['local_path'].endswith(
456 bc.getGitCloneDirectory(br_layer.giturl,
457 br_layer.commit)):
458 return br_layer.layer_version
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500459
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500460 if br_layer.local_source_dir == \
461 layer_information['local_path']:
462 return br_layer.layer_version
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500463
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500464 # We've reached the end of our search and couldn't find the layer
465 # we can continue but some data may be missing
466 raise NotExisting("Unidentified layer %s" %
467 pformat(layer_information))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500468
469 def save_target_file_information(self, build_obj, target_obj, filedata):
470 assert isinstance(build_obj, Build)
471 assert isinstance(target_obj, Target)
472 dirs = filedata['dirs']
473 files = filedata['files']
474 syms = filedata['syms']
475
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500476 # always create the root directory as a special case;
477 # note that this is never displayed, so the owner, group,
478 # size, permission are irrelevant
479 tf_obj = Target_File.objects.create(target = target_obj,
480 path = '/',
481 size = 0,
482 owner = '',
483 group = '',
484 permission = '',
485 inodetype = Target_File.ITYPE_DIRECTORY)
486 tf_obj.save()
487
488 # insert directories, ordered by name depth
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500489 for d in sorted(dirs, key=lambda x:len(x[-1].split("/"))):
490 (user, group, size) = d[1:4]
491 permission = d[0][1:]
492 path = d[4].lstrip(".")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500493
494 # we already created the root directory, so ignore any
495 # entry for it
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500496 if len(path) == 0:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500497 continue
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500498
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500499 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
500 if len(parent_path) == 0:
501 parent_path = "/"
502 parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
503 tf_obj = Target_File.objects.create(
504 target = target_obj,
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600505 path = path,
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500506 size = size,
507 inodetype = Target_File.ITYPE_DIRECTORY,
508 permission = permission,
509 owner = user,
510 group = group,
511 directory = parent_obj)
512
513
514 # we insert files
515 for d in files:
516 (user, group, size) = d[1:4]
517 permission = d[0][1:]
518 path = d[4].lstrip(".")
519 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
520 inodetype = Target_File.ITYPE_REGULAR
521 if d[0].startswith('b'):
522 inodetype = Target_File.ITYPE_BLOCK
523 if d[0].startswith('c'):
524 inodetype = Target_File.ITYPE_CHARACTER
525 if d[0].startswith('p'):
526 inodetype = Target_File.ITYPE_FIFO
527
528 tf_obj = Target_File.objects.create(
529 target = target_obj,
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600530 path = path,
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500531 size = size,
532 inodetype = inodetype,
533 permission = permission,
534 owner = user,
535 group = group)
536 parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
537 tf_obj.directory = parent_obj
538 tf_obj.save()
539
540 # we insert symlinks
541 for d in syms:
542 (user, group, size) = d[1:4]
543 permission = d[0][1:]
544 path = d[4].lstrip(".")
545 filetarget_path = d[6]
546
547 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
548 if not filetarget_path.startswith("/"):
549 # we have a relative path, get a normalized absolute one
550 filetarget_path = parent_path + "/" + filetarget_path
551 fcp = filetarget_path.split("/")
552 fcpl = []
553 for i in fcp:
554 if i == "..":
555 fcpl.pop()
556 else:
557 fcpl.append(i)
558 filetarget_path = "/".join(fcpl)
559
560 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600561 filetarget_obj = Target_File.objects.get(target = target_obj, path = filetarget_path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500562 except Target_File.DoesNotExist:
563 # we might have an invalid link; no way to detect this. just set it to None
564 filetarget_obj = None
565
566 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
567
568 tf_obj = Target_File.objects.create(
569 target = target_obj,
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600570 path = path,
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500571 size = size,
572 inodetype = Target_File.ITYPE_SYMLINK,
573 permission = permission,
574 owner = user,
575 group = group,
576 directory = parent_obj,
577 sym_target = filetarget_obj)
578
579
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500580 def save_target_package_information(self, build_obj, target_obj, packagedict, pkgpnmap, recipes, built_package=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500581 assert isinstance(build_obj, Build)
582 assert isinstance(target_obj, Target)
583
584 errormsg = ""
585 for p in packagedict:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500586 # Search name swtiches round the installed name vs package name
587 # by default installed name == package name
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500588 searchname = p
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500589 if p not in pkgpnmap:
590 logger.warning("Image packages list contains %p, but is"
591 " missing from all packages list where the"
592 " metadata comes from. Skipping...", p)
593 continue
594
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500595 if 'OPKGN' in pkgpnmap[p].keys():
596 searchname = pkgpnmap[p]['OPKGN']
597
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500598 built_recipe = recipes[pkgpnmap[p]['PN']]
599
600 if built_package:
601 packagedict[p]['object'], created = Package.objects.get_or_create( build = build_obj, name = searchname )
602 recipe = built_recipe
603 else:
604 packagedict[p]['object'], created = \
605 CustomImagePackage.objects.get_or_create(name=searchname)
606 # Clear the Package_Dependency objects as we're going to update
607 # the CustomImagePackage with the latest dependency information
608 packagedict[p]['object'].package_dependencies_target.all().delete()
609 packagedict[p]['object'].package_dependencies_source.all().delete()
610 try:
611 recipe = self._cached_get(
612 Recipe,
613 name=built_recipe.name,
614 layer_version__build=None,
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600615 layer_version__release=
616 built_recipe.layer_version.release,
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500617 file_path=built_recipe.file_path,
618 version=built_recipe.version
619 )
620 except (Recipe.DoesNotExist,
621 Recipe.MultipleObjectsReturned) as e:
622 logger.info("We did not find one recipe for the"
623 "configuration data package %s %s" % (p, e))
624 continue
625
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500626 if created or packagedict[p]['object'].size == -1: # save the data anyway we can, not just if it was not created here; bug [YOCTO #6887]
627 # fill in everything we can from the runtime-reverse package data
628 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500629 packagedict[p]['object'].recipe = recipe
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500630 packagedict[p]['object'].version = pkgpnmap[p]['PV']
631 packagedict[p]['object'].installed_name = p
632 packagedict[p]['object'].revision = pkgpnmap[p]['PR']
633 packagedict[p]['object'].license = pkgpnmap[p]['LICENSE']
634 packagedict[p]['object'].section = pkgpnmap[p]['SECTION']
635 packagedict[p]['object'].summary = pkgpnmap[p]['SUMMARY']
636 packagedict[p]['object'].description = pkgpnmap[p]['DESCRIPTION']
637 packagedict[p]['object'].size = int(pkgpnmap[p]['PKGSIZE'])
638
639 # no files recorded for this package, so save files info
640 packagefile_objects = []
641 for targetpath in pkgpnmap[p]['FILES_INFO']:
642 targetfilesize = pkgpnmap[p]['FILES_INFO'][targetpath]
643 packagefile_objects.append(Package_File( package = packagedict[p]['object'],
644 path = targetpath,
645 size = targetfilesize))
646 if len(packagefile_objects):
647 Package_File.objects.bulk_create(packagefile_objects)
648 except KeyError as e:
649 errormsg += " stpi: Key error, package %s key %s \n" % ( p, e )
650
651 # save disk installed size
652 packagedict[p]['object'].installed_size = packagedict[p]['size']
653 packagedict[p]['object'].save()
654
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500655 if built_package:
656 Target_Installed_Package.objects.create(target = target_obj, package = packagedict[p]['object'])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500657
658 packagedeps_objs = []
659 for p in packagedict:
660 for (px,deptype) in packagedict[p]['depends']:
661 if deptype == 'depends':
662 tdeptype = Package_Dependency.TYPE_TRDEPENDS
663 elif deptype == 'recommends':
664 tdeptype = Package_Dependency.TYPE_TRECOMMENDS
665
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500666 try:
667 packagedeps_objs.append(Package_Dependency(
668 package = packagedict[p]['object'],
669 depends_on = packagedict[px]['object'],
670 dep_type = tdeptype,
671 target = target_obj))
672 except KeyError as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600673 logger.warning("Could not add dependency to the package %s "
674 "because %s is an unknown package", p, px)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500675
676 if len(packagedeps_objs) > 0:
677 Package_Dependency.objects.bulk_create(packagedeps_objs)
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500678 else:
679 logger.info("No package dependencies created")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500680
681 if len(errormsg) > 0:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600682 logger.warning("buildinfohelper: target_package_info could not identify recipes: \n%s", errormsg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500683
684 def save_target_image_file_information(self, target_obj, file_name, file_size):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600685 Target_Image_File.objects.create(target=target_obj,
686 file_name=file_name, file_size=file_size)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500687
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600688 def save_target_kernel_file(self, target_obj, file_name, file_size):
689 """
690 Save kernel file (bzImage, modules*) information for a Target target_obj.
691 """
692 TargetKernelFile.objects.create(target=target_obj,
693 file_name=file_name, file_size=file_size)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500694
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600695 def save_target_sdk_file(self, target_obj, file_name, file_size):
696 """
697 Save SDK artifacts to the database, associating them with a
698 Target object.
699 """
700 TargetSDKFile.objects.create(target=target_obj, file_name=file_name,
701 file_size=file_size)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500702
703 def create_logmessage(self, log_information):
704 assert 'build' in log_information
705 assert 'level' in log_information
706 assert 'message' in log_information
707
708 log_object = LogMessage.objects.create(
709 build = log_information['build'],
710 level = log_information['level'],
711 message = log_information['message'])
712
713 for v in vars(log_object):
714 if v in log_information.keys():
715 vars(log_object)[v] = log_information[v]
716
717 return log_object.save()
718
719
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500720 def save_build_package_information(self, build_obj, package_info, recipes,
721 built_package):
722 # assert isinstance(build_obj, Build)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500723
724 # create and save the object
725 pname = package_info['PKG']
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500726 built_recipe = recipes[package_info['PN']]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500727 if 'OPKGN' in package_info.keys():
728 pname = package_info['OPKGN']
729
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500730 if built_package:
731 bp_object, _ = Package.objects.get_or_create( build = build_obj,
732 name = pname )
733 recipe = built_recipe
734 else:
735 bp_object, created = \
736 CustomImagePackage.objects.get_or_create(name=pname)
737 try:
738 recipe = self._cached_get(Recipe,
739 name=built_recipe.name,
740 layer_version__build=None,
741 file_path=built_recipe.file_path,
742 version=built_recipe.version)
743
744 except (Recipe.DoesNotExist, Recipe.MultipleObjectsReturned):
745 logger.debug("We did not find one recipe for the configuration"
746 "data package %s" % pname)
747 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500748
749 bp_object.installed_name = package_info['PKG']
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500750 bp_object.recipe = recipe
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500751 bp_object.version = package_info['PKGV']
752 bp_object.revision = package_info['PKGR']
753 bp_object.summary = package_info['SUMMARY']
754 bp_object.description = package_info['DESCRIPTION']
755 bp_object.size = int(package_info['PKGSIZE'])
756 bp_object.section = package_info['SECTION']
757 bp_object.license = package_info['LICENSE']
758 bp_object.save()
759
760 # save any attached file information
761 packagefile_objects = []
762 for path in package_info['FILES_INFO']:
763 packagefile_objects.append(Package_File( package = bp_object,
764 path = path,
765 size = package_info['FILES_INFO'][path] ))
766 if len(packagefile_objects):
767 Package_File.objects.bulk_create(packagefile_objects)
768
769 def _po_byname(p):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500770 if built_package:
771 pkg, created = Package.objects.get_or_create(build=build_obj,
772 name=p)
773 else:
774 pkg, created = CustomImagePackage.objects.get_or_create(name=p)
775
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500776 if created:
777 pkg.size = -1
778 pkg.save()
779 return pkg
780
781 packagedeps_objs = []
782 # save soft dependency information
783 if 'RDEPENDS' in package_info and package_info['RDEPENDS']:
784 for p in bb.utils.explode_deps(package_info['RDEPENDS']):
785 packagedeps_objs.append(Package_Dependency( package = bp_object,
786 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS))
787 if 'RPROVIDES' in package_info and package_info['RPROVIDES']:
788 for p in bb.utils.explode_deps(package_info['RPROVIDES']):
789 packagedeps_objs.append(Package_Dependency( package = bp_object,
790 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES))
791 if 'RRECOMMENDS' in package_info and package_info['RRECOMMENDS']:
792 for p in bb.utils.explode_deps(package_info['RRECOMMENDS']):
793 packagedeps_objs.append(Package_Dependency( package = bp_object,
794 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS))
795 if 'RSUGGESTS' in package_info and package_info['RSUGGESTS']:
796 for p in bb.utils.explode_deps(package_info['RSUGGESTS']):
797 packagedeps_objs.append(Package_Dependency( package = bp_object,
798 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS))
799 if 'RREPLACES' in package_info and package_info['RREPLACES']:
800 for p in bb.utils.explode_deps(package_info['RREPLACES']):
801 packagedeps_objs.append(Package_Dependency( package = bp_object,
802 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES))
803 if 'RCONFLICTS' in package_info and package_info['RCONFLICTS']:
804 for p in bb.utils.explode_deps(package_info['RCONFLICTS']):
805 packagedeps_objs.append(Package_Dependency( package = bp_object,
806 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS))
807
808 if len(packagedeps_objs) > 0:
809 Package_Dependency.objects.bulk_create(packagedeps_objs)
810
811 return bp_object
812
813 def save_build_variables(self, build_obj, vardump):
814 assert isinstance(build_obj, Build)
815
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500816 for k in vardump:
817 desc = vardump[k]['doc']
818 if desc is None:
819 var_words = [word for word in k.split('_')]
820 root_var = "_".join([word for word in var_words if word.isupper()])
821 if root_var and root_var != k and root_var in vardump:
822 desc = vardump[root_var]['doc']
823 if desc is None:
824 desc = ''
825 if len(desc):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500826 HelpText.objects.get_or_create(build=build_obj,
827 area=HelpText.VARIABLE,
828 key=k, text=desc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500829 if not bool(vardump[k]['func']):
830 value = vardump[k]['v']
831 if value is None:
832 value = ''
833 variable_obj = Variable.objects.create( build = build_obj,
834 variable_name = k,
835 variable_value = value,
836 description = desc)
837
838 varhist_objects = []
839 for vh in vardump[k]['history']:
840 if not 'documentation.conf' in vh['file']:
841 varhist_objects.append(VariableHistory( variable = variable_obj,
842 file_name = vh['file'],
843 line_number = vh['line'],
844 operation = vh['op']))
845 if len(varhist_objects):
846 VariableHistory.objects.bulk_create(varhist_objects)
847
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500848
849class MockEvent(object):
850 """ This object is used to create event, for which normal event-processing methods can
851 be used, out of data that is not coming via an actual event
852 """
853 def __init__(self):
854 self.msg = None
855 self.levelno = None
856 self.taskname = None
857 self.taskhash = None
858 self.pathname = None
859 self.lineno = None
860
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500861 def getMessage(self):
862 """
863 Simulate LogRecord message return
864 """
865 return self.msg
866
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500867
868class BuildInfoHelper(object):
869 """ This class gathers the build information from the server and sends it
870 towards the ORM wrapper for storing in the database
871 It is instantiated once per build
872 Keeps in memory all data that needs matching before writing it to the database
873 """
874
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600875 # tasks which produce image files; note we include '', as we set
876 # the task for a target to '' (i.e. 'build') if no target is
877 # explicitly defined
878 IMAGE_GENERATING_TASKS = ['', 'build', 'image', 'populate_sdk_ext']
879
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500880 # pylint: disable=protected-access
881 # the code will look into the protected variables of the event; no easy way around this
882 # pylint: disable=bad-continuation
883 # we do not follow the python conventions for continuation indentation due to long lines here
884
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500885 def __init__(self, server, has_build_history = False, brbe = None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500886 self.internal_state = {}
887 self.internal_state['taskdata'] = {}
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500888 self.internal_state['targets'] = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500889 self.task_order = 0
890 self.autocommit_step = 1
891 self.server = server
892 # we use manual transactions if the database doesn't autocommit on us
893 if not connection.features.autocommits_when_autocommit_is_off:
894 transaction.set_autocommit(False)
895 self.orm_wrapper = ORMWrapper()
896 self.has_build_history = has_build_history
897 self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500898
899 # this is set for Toaster-triggered builds by localhostbecontroller
900 # via toasterui
901 self.brbe = brbe
902
903 self.project = None
904
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500905 logger.debug(1, "buildinfohelper: Build info helper inited %s" % vars(self))
906
907
908 ###################
909 ## methods to convert event/external info into objects that the ORM layer uses
910
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600911 def _ensure_build(self):
912 """
913 Ensure the current build object exists and is up to date with
914 data on the bitbake server
915 """
916 if not 'build' in self.internal_state or not self.internal_state['build']:
917 # create the Build object
918 self.internal_state['build'] = \
919 self.orm_wrapper.get_or_create_build_object(self.brbe)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500920
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600921 build = self.internal_state['build']
922
923 # update missing fields on the Build object with found data
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500924 build_info = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600925
926 # set to True if at least one field is going to be set
927 changed = False
928
929 if not build.build_name:
930 build_name = self.server.runCommand(["getVariable", "BUILDNAME"])[0]
931
932 # only reset the build name if the one on the server is actually
933 # a valid value for the build_name field
934 if build_name != None:
935 build_info['build_name'] = build_name
936 changed = True
937
938 if not build.machine:
939 build_info['machine'] = self.server.runCommand(["getVariable", "MACHINE"])[0]
940 changed = True
941
942 if not build.distro:
943 build_info['distro'] = self.server.runCommand(["getVariable", "DISTRO"])[0]
944 changed = True
945
946 if not build.distro_version:
947 build_info['distro_version'] = self.server.runCommand(["getVariable", "DISTRO_VERSION"])[0]
948 changed = True
949
950 if not build.bitbake_version:
951 build_info['bitbake_version'] = self.server.runCommand(["getVariable", "BB_VERSION"])[0]
952 changed = True
953
954 if changed:
955 self.orm_wrapper.update_build(self.internal_state['build'], build_info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500956
957 def _get_task_information(self, event, recipe):
958 assert 'taskname' in vars(event)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600959 self._ensure_build()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500960
961 task_information = {}
962 task_information['build'] = self.internal_state['build']
963 task_information['outcome'] = Task.OUTCOME_NA
964 task_information['recipe'] = recipe
965 task_information['task_name'] = event.taskname
966 try:
967 # some tasks don't come with a hash. and that's ok
968 task_information['sstate_checksum'] = event.taskhash
969 except AttributeError:
970 pass
971 return task_information
972
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500973 def _get_layer_version_for_dependency(self, pathRE):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500974 """ Returns the layer in the toaster db that has a full regex
975 match to the pathRE. pathRE - the layer path passed as a regex in the
976 event. It is created in cooker.py as a collection for the layer
977 priorities.
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500978 """
979 self._ensure_build()
980
981 def _sort_longest_path(layer_version):
982 assert isinstance(layer_version, Layer_Version)
983 return len(layer_version.local_path)
984
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500985 # Our paths don't append a trailing slash
986 if pathRE.endswith("/"):
987 pathRE = pathRE[:-1]
988
989 p = re.compile(pathRE)
990 path=re.sub(r'[$^]',r'',pathRE)
991 # Heuristics: we always match recipe to the deepest layer path in
992 # the discovered layers
993 for lvo in sorted(self.orm_wrapper.layer_version_objects,
994 reverse=True, key=_sort_longest_path):
995 if p.fullmatch(os.path.abspath(lvo.local_path)):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500996 return lvo
997 if lvo.layer.local_source_dir:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500998 if p.fullmatch(os.path.abspath(lvo.layer.local_source_dir)):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500999 return lvo
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001000 if 0 == path.find(lvo.local_path):
1001 # sub-layer path inside existing layer
1002 return lvo
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001003
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001004 # if we get here, we didn't read layers correctly;
1005 # dump whatever information we have on the error log
1006 logger.warning("Could not match layer dependency for path %s : %s",
1007 pathRE,
1008 self.orm_wrapper.layer_version_objects)
1009 return None
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001010
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001011 def _get_layer_version_for_path(self, path):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001012 self._ensure_build()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001013
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001014 def _slkey_interactive(layer_version):
1015 assert isinstance(layer_version, Layer_Version)
1016 return len(layer_version.local_path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001017
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001018 # Heuristics: we always match recipe to the deepest layer path in the discovered layers
1019 for lvo in sorted(self.orm_wrapper.layer_version_objects, reverse=True, key=_slkey_interactive):
1020 # we can match to the recipe file path
1021 if path.startswith(lvo.local_path):
1022 return lvo
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001023 if lvo.layer.local_source_dir and \
1024 path.startswith(lvo.layer.local_source_dir):
1025 return lvo
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001026
1027 #if we get here, we didn't read layers correctly; dump whatever information we have on the error log
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001028 logger.warning("Could not match layer version for recipe path %s : %s", path, self.orm_wrapper.layer_version_objects)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001029
1030 #mockup the new layer
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001031 unknown_layer, _ = Layer.objects.get_or_create(name="Unidentified layer", layer_index_url="")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001032 unknown_layer_version_obj, _ = Layer_Version.objects.get_or_create(layer = unknown_layer, build = self.internal_state['build'])
1033
1034 # append it so we don't run into this error again and again
1035 self.orm_wrapper.layer_version_objects.append(unknown_layer_version_obj)
1036
1037 return unknown_layer_version_obj
1038
1039 def _get_recipe_information_from_taskfile(self, taskfile):
1040 localfilepath = taskfile.split(":")[-1]
1041 filepath_flags = ":".join(sorted(taskfile.split(":")[:-1]))
1042 layer_version_obj = self._get_layer_version_for_path(localfilepath)
1043
1044
1045
1046 recipe_info = {}
1047 recipe_info['layer_version'] = layer_version_obj
1048 recipe_info['file_path'] = localfilepath
1049 recipe_info['pathflags'] = filepath_flags
1050
1051 if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path):
1052 recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/")
1053 else:
1054 raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path))
1055
1056 return recipe_info
1057
1058 def _get_path_information(self, task_object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001059 self._ensure_build()
1060
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001061 assert isinstance(task_object, Task)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001062 build_stats_format = "{tmpdir}/buildstats/{buildname}/{package}/"
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001063 build_stats_path = []
1064
1065 for t in self.internal_state['targets']:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001066 buildname = self.internal_state['build'].build_name
1067 pe, pv = task_object.recipe.version.split(":",1)
1068 if len(pe) > 0:
1069 package = task_object.recipe.name + "-" + pe + "_" + pv
1070 else:
1071 package = task_object.recipe.name + "-" + pv
1072
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001073 build_stats_path.append(build_stats_format.format(tmpdir=self.tmp_dir,
1074 buildname=buildname,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001075 package=package))
1076
1077 return build_stats_path
1078
1079
1080 ################################
1081 ## external available methods to store information
1082 @staticmethod
1083 def _get_data_from_event(event):
1084 evdata = None
1085 if '_localdata' in vars(event):
1086 evdata = event._localdata
1087 elif 'data' in vars(event):
1088 evdata = event.data
1089 else:
1090 raise Exception("Event with neither _localdata or data properties")
1091 return evdata
1092
1093 def store_layer_info(self, event):
1094 layerinfos = BuildInfoHelper._get_data_from_event(event)
1095 self.internal_state['lvs'] = {}
1096 for layer in layerinfos:
1097 try:
1098 self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)] = layerinfos[layer]['version']
1099 self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)]['local_path'] = layerinfos[layer]['local_path']
1100 except NotExisting as nee:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001101 logger.warning("buildinfohelper: cannot identify layer exception:%s ", nee)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001102
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001103 def store_started_build(self):
1104 self._ensure_build()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001105
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001106 def save_build_log_file_path(self, build_log_path):
1107 self._ensure_build()
1108
1109 if not self.internal_state['build'].cooker_log_path:
1110 data_dict = {'cooker_log_path': build_log_path}
1111 self.orm_wrapper.update_build(self.internal_state['build'], data_dict)
1112
1113 def save_build_targets(self, event):
1114 self._ensure_build()
1115
1116 # create target information
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001117 assert '_pkgs' in vars(event)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001118 target_information = {}
1119 target_information['targets'] = event._pkgs
1120 target_information['build'] = self.internal_state['build']
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001121
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001122 self.internal_state['targets'] = self.orm_wrapper.get_or_create_targets(target_information)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001123
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001124 def save_build_layers_and_variables(self):
1125 self._ensure_build()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001126
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001127 build_obj = self.internal_state['build']
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001128
1129 # save layer version information for this build
1130 if not 'lvs' in self.internal_state:
1131 logger.error("Layer version information not found; Check if the bitbake server was configured to inherit toaster.bbclass.")
1132 else:
1133 for layer_obj in self.internal_state['lvs']:
1134 self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj])
1135
1136 del self.internal_state['lvs']
1137
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001138 # Save build configuration
1139 data = self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0]
1140
1141 # convert the paths from absolute to relative to either the build directory or layer checkouts
1142 path_prefixes = []
1143
1144 if self.brbe is not None:
1145 _, be_id = self.brbe.split(":")
1146 be = BuildEnvironment.objects.get(pk = be_id)
1147 path_prefixes.append(be.builddir)
1148
1149 for layer in sorted(self.orm_wrapper.layer_version_objects, key = lambda x:len(x.local_path), reverse=True):
1150 path_prefixes.append(layer.local_path)
1151
1152 # we strip the prefixes
1153 for k in data:
1154 if not bool(data[k]['func']):
1155 for vh in data[k]['history']:
1156 if not 'documentation.conf' in vh['file']:
1157 abs_file_name = vh['file']
1158 for pp in path_prefixes:
1159 if abs_file_name.startswith(pp + "/"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001160 # preserve layer name in relative path
1161 vh['file']=abs_file_name[pp.rfind("/")+1:]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001162 break
1163
1164 # save the variables
1165 self.orm_wrapper.save_build_variables(build_obj, data)
1166
1167 return self.brbe
1168
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001169 def set_recipes_to_parse(self, num_recipes):
1170 """
1171 Set the number of recipes which need to be parsed for this build.
1172 This is set the first time ParseStarted is received by toasterui.
1173 """
1174 self._ensure_build()
1175 self.internal_state['build'].recipes_to_parse = num_recipes
1176 self.internal_state['build'].save()
1177
1178 def set_recipes_parsed(self, num_recipes):
1179 """
1180 Set the number of recipes parsed so far for this build; this is updated
1181 each time a ParseProgress or ParseCompleted event is received by
1182 toasterui.
1183 """
1184 self._ensure_build()
1185 if num_recipes <= self.internal_state['build'].recipes_to_parse:
1186 self.internal_state['build'].recipes_parsed = num_recipes
1187 self.internal_state['build'].save()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001188
1189 def update_target_image_file(self, event):
1190 evdata = BuildInfoHelper._get_data_from_event(event)
1191
1192 for t in self.internal_state['targets']:
1193 if t.is_image == True:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001194 output_files = list(evdata.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001195 for output in output_files:
1196 if t.target in output and 'rootfs' in output and not output.endswith(".manifest"):
1197 self.orm_wrapper.save_target_image_file_information(t, output, evdata[output])
1198
1199 def update_artifact_image_file(self, event):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001200 self._ensure_build()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001201 evdata = BuildInfoHelper._get_data_from_event(event)
1202 for artifact_path in evdata.keys():
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001203 self.orm_wrapper.save_artifact_information(
1204 self.internal_state['build'], artifact_path,
1205 evdata[artifact_path])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001206
1207 def update_build_information(self, event, errors, warnings, taskfailures):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001208 self._ensure_build()
1209 self.orm_wrapper.update_build_stats_and_outcome(
1210 self.internal_state['build'], errors, warnings, taskfailures)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001211
1212 def store_started_task(self, event):
1213 assert isinstance(event, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted, bb.runqueue.runQueueTaskSkipped))
1214 assert 'taskfile' in vars(event)
1215 localfilepath = event.taskfile.split(":")[-1]
1216 assert localfilepath.startswith("/")
1217
1218 identifier = event.taskfile + ":" + event.taskname
1219
1220 recipe_information = self._get_recipe_information_from_taskfile(event.taskfile)
1221 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True)
1222
1223 task_information = self._get_task_information(event, recipe)
1224 task_information['outcome'] = Task.OUTCOME_NA
1225
1226 if isinstance(event, bb.runqueue.runQueueTaskSkipped):
1227 assert 'reason' in vars(event)
1228 task_information['task_executed'] = False
1229 if event.reason == "covered":
1230 task_information['outcome'] = Task.OUTCOME_COVERED
1231 if event.reason == "existing":
1232 task_information['outcome'] = Task.OUTCOME_PREBUILT
1233 else:
1234 task_information['task_executed'] = True
1235 if 'noexec' in vars(event) and event.noexec == True:
1236 task_information['task_executed'] = False
1237 task_information['outcome'] = Task.OUTCOME_EMPTY
1238 task_information['script_type'] = Task.CODING_NA
1239
1240 # do not assign order numbers to scene tasks
1241 if not isinstance(event, bb.runqueue.sceneQueueTaskStarted):
1242 self.task_order += 1
1243 task_information['order'] = self.task_order
1244
1245 self.orm_wrapper.get_update_task_object(task_information)
1246
1247 self.internal_state['taskdata'][identifier] = {
1248 'outcome': task_information['outcome'],
1249 }
1250
1251
1252 def store_tasks_stats(self, event):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001253 self._ensure_build()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001254 task_data = BuildInfoHelper._get_data_from_event(event)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001255
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001256 for (task_file, task_name, task_stats, recipe_name) in task_data:
1257 build = self.internal_state['build']
1258 self.orm_wrapper.update_task_object(build, task_name, recipe_name, task_stats)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001259
1260 def update_and_store_task(self, event):
1261 assert 'taskfile' in vars(event)
1262 localfilepath = event.taskfile.split(":")[-1]
1263 assert localfilepath.startswith("/")
1264
1265 identifier = event.taskfile + ":" + event.taskname
1266 if not identifier in self.internal_state['taskdata']:
1267 if isinstance(event, bb.build.TaskBase):
1268 # we do a bit of guessing
1269 candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)]
1270 if len(candidates) == 1:
1271 identifier = candidates[0]
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001272 elif len(candidates) > 1 and hasattr(event,'_package'):
1273 if 'native-' in event._package:
1274 identifier = 'native:' + identifier
1275 if 'nativesdk-' in event._package:
1276 identifier = 'nativesdk:' + identifier
1277 candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)]
1278 if len(candidates) == 1:
1279 identifier = candidates[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001280
1281 assert identifier in self.internal_state['taskdata']
1282 identifierlist = identifier.split(":")
1283 realtaskfile = ":".join(identifierlist[0:len(identifierlist)-1])
1284 recipe_information = self._get_recipe_information_from_taskfile(realtaskfile)
1285 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True)
1286 task_information = self._get_task_information(event,recipe)
1287
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001288 task_information['outcome'] = self.internal_state['taskdata'][identifier]['outcome']
1289
1290 if 'logfile' in vars(event):
1291 task_information['logfile'] = event.logfile
1292
1293 if '_message' in vars(event):
1294 task_information['message'] = event._message
1295
1296 if 'taskflags' in vars(event):
1297 # with TaskStarted, we get even more information
1298 if 'python' in event.taskflags.keys() and event.taskflags['python'] == '1':
1299 task_information['script_type'] = Task.CODING_PYTHON
1300 else:
1301 task_information['script_type'] = Task.CODING_SHELL
1302
1303 if task_information['outcome'] == Task.OUTCOME_NA:
1304 if isinstance(event, (bb.runqueue.runQueueTaskCompleted, bb.runqueue.sceneQueueTaskCompleted)):
1305 task_information['outcome'] = Task.OUTCOME_SUCCESS
1306 del self.internal_state['taskdata'][identifier]
1307
1308 if isinstance(event, (bb.runqueue.runQueueTaskFailed, bb.runqueue.sceneQueueTaskFailed)):
1309 task_information['outcome'] = Task.OUTCOME_FAILED
1310 del self.internal_state['taskdata'][identifier]
1311
1312 if not connection.features.autocommits_when_autocommit_is_off:
1313 # we force a sync point here, to get the progress bar to show
1314 if self.autocommit_step % 3 == 0:
1315 transaction.set_autocommit(True)
1316 transaction.set_autocommit(False)
1317 self.autocommit_step += 1
1318
1319 self.orm_wrapper.get_update_task_object(task_information, True) # must exist
1320
1321
1322 def store_missed_state_tasks(self, event):
1323 for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['missed']:
1324
1325 # identifier = fn + taskname + "_setscene"
1326 recipe_information = self._get_recipe_information_from_taskfile(fn)
1327 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
1328 mevent = MockEvent()
1329 mevent.taskname = taskname
1330 mevent.taskhash = taskhash
1331 task_information = self._get_task_information(mevent,recipe)
1332
1333 task_information['start_time'] = timezone.now()
1334 task_information['outcome'] = Task.OUTCOME_NA
1335 task_information['sstate_checksum'] = taskhash
1336 task_information['sstate_result'] = Task.SSTATE_MISS
1337 task_information['path_to_sstate_obj'] = sstatefile
1338
1339 self.orm_wrapper.get_update_task_object(task_information)
1340
1341 for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['found']:
1342
1343 # identifier = fn + taskname + "_setscene"
1344 recipe_information = self._get_recipe_information_from_taskfile(fn)
1345 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
1346 mevent = MockEvent()
1347 mevent.taskname = taskname
1348 mevent.taskhash = taskhash
1349 task_information = self._get_task_information(mevent,recipe)
1350
1351 task_information['path_to_sstate_obj'] = sstatefile
1352
1353 self.orm_wrapper.get_update_task_object(task_information)
1354
1355
1356 def store_target_package_data(self, event):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001357 self._ensure_build()
1358
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001359 # for all image targets
1360 for target in self.internal_state['targets']:
1361 if target.is_image:
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001362 pkgdata = BuildInfoHelper._get_data_from_event(event)['pkgdata']
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001363 imgdata = BuildInfoHelper._get_data_from_event(event)['imgdata'].get(target.target, {})
1364 filedata = BuildInfoHelper._get_data_from_event(event)['filedata'].get(target.target, {})
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001365
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001366 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001367 self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes'], built_package=True)
1368 self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata.copy(), pkgdata, self.internal_state['recipes'], built_package=False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001369 except KeyError as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001370 logger.warning("KeyError in save_target_package_information"
1371 "%s ", e)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001372
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001373 # only try to find files in the image if the task for this
1374 # target is one which produces image files; otherwise, the old
1375 # list of files in the files-in-image.txt file will be
1376 # appended to the target even if it didn't produce any images
1377 if target.task in BuildInfoHelper.IMAGE_GENERATING_TASKS:
1378 try:
1379 self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata)
1380 except KeyError as e:
1381 logger.warning("KeyError in save_target_file_information"
1382 "%s ", e)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001383
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001384
1385
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001386 def cancel_cli_build(self):
1387 """
1388 If a build is currently underway, set its state to CANCELLED;
1389 note that this only gets called for command line builds which are
1390 interrupted, so it doesn't touch any BuildRequest objects
1391 """
1392 self._ensure_build()
1393 self.internal_state['build'].outcome = Build.CANCELLED
1394 self.internal_state['build'].save()
1395 signal_runbuilds()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001396
1397 def store_dependency_information(self, event):
1398 assert '_depgraph' in vars(event)
1399 assert 'layer-priorities' in event._depgraph
1400 assert 'pn' in event._depgraph
1401 assert 'tdepends' in event._depgraph
1402
1403 errormsg = ""
1404
1405 # save layer version priorities
1406 if 'layer-priorities' in event._depgraph.keys():
1407 for lv in event._depgraph['layer-priorities']:
1408 (_, path, _, priority) = lv
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001409 layer_version_obj = self._get_layer_version_for_dependency(path)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001410 if layer_version_obj:
1411 layer_version_obj.priority = priority
1412 layer_version_obj.save()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001413
1414 # save recipe information
1415 self.internal_state['recipes'] = {}
1416 for pn in event._depgraph['pn']:
1417
1418 file_name = event._depgraph['pn'][pn]['filename'].split(":")[-1]
1419 pathflags = ":".join(sorted(event._depgraph['pn'][pn]['filename'].split(":")[:-1]))
1420 layer_version_obj = self._get_layer_version_for_path(file_name)
1421
1422 assert layer_version_obj is not None
1423
1424 recipe_info = {}
1425 recipe_info['name'] = pn
1426 recipe_info['layer_version'] = layer_version_obj
1427
1428 if 'version' in event._depgraph['pn'][pn]:
1429 recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":")
1430
1431 if 'summary' in event._depgraph['pn'][pn]:
1432 recipe_info['summary'] = event._depgraph['pn'][pn]['summary']
1433
1434 if 'license' in event._depgraph['pn'][pn]:
1435 recipe_info['license'] = event._depgraph['pn'][pn]['license']
1436
1437 if 'description' in event._depgraph['pn'][pn]:
1438 recipe_info['description'] = event._depgraph['pn'][pn]['description']
1439
1440 if 'section' in event._depgraph['pn'][pn]:
1441 recipe_info['section'] = event._depgraph['pn'][pn]['section']
1442
1443 if 'homepage' in event._depgraph['pn'][pn]:
1444 recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage']
1445
1446 if 'bugtracker' in event._depgraph['pn'][pn]:
1447 recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker']
1448
1449 recipe_info['file_path'] = file_name
1450 recipe_info['pathflags'] = pathflags
1451
1452 if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path):
1453 recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/")
1454 else:
1455 raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path))
1456
1457 recipe = self.orm_wrapper.get_update_recipe_object(recipe_info)
1458 recipe.is_image = False
1459 if 'inherits' in event._depgraph['pn'][pn].keys():
1460 for cls in event._depgraph['pn'][pn]['inherits']:
1461 if cls.endswith('/image.bbclass'):
1462 recipe.is_image = True
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001463 recipe_info['is_image'] = True
1464 # Save the is_image state to the relevant recipe objects
1465 self.orm_wrapper.get_update_recipe_object(recipe_info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001466 break
1467 if recipe.is_image:
1468 for t in self.internal_state['targets']:
1469 if pn == t.target:
1470 t.is_image = True
1471 t.save()
1472 self.internal_state['recipes'][pn] = recipe
1473
1474 # we'll not get recipes for key w/ values listed in ASSUME_PROVIDED
1475
1476 assume_provided = self.server.runCommand(["getVariable", "ASSUME_PROVIDED"])[0].split()
1477
1478 # save recipe dependency
1479 # buildtime
1480 recipedeps_objects = []
1481 for recipe in event._depgraph['depends']:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001482 target = self.internal_state['recipes'][recipe]
1483 for dep in event._depgraph['depends'][recipe]:
1484 if dep in assume_provided:
1485 continue
1486 via = None
1487 if 'providermap' in event._depgraph and dep in event._depgraph['providermap']:
1488 deprecipe = event._depgraph['providermap'][dep][0]
1489 dependency = self.internal_state['recipes'][deprecipe]
1490 via = Provides.objects.get_or_create(name=dep,
1491 recipe=dependency)[0]
1492 elif dep in self.internal_state['recipes']:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001493 dependency = self.internal_state['recipes'][dep]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001494 else:
1495 errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, dep)
1496 continue
1497 recipe_dep = Recipe_Dependency(recipe=target,
1498 depends_on=dependency,
1499 via=via,
1500 dep_type=Recipe_Dependency.TYPE_DEPENDS)
1501 recipedeps_objects.append(recipe_dep)
1502
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001503 Recipe_Dependency.objects.bulk_create(recipedeps_objects)
1504
1505 # save all task information
1506 def _save_a_task(taskdesc):
1507 spec = re.split(r'\.', taskdesc)
1508 pn = ".".join(spec[0:-1])
1509 taskname = spec[-1]
1510 e = event
1511 e.taskname = pn
1512 recipe = self.internal_state['recipes'][pn]
1513 task_info = self._get_task_information(e, recipe)
1514 task_info['task_name'] = taskname
1515 task_obj = self.orm_wrapper.get_update_task_object(task_info)
1516 return task_obj
1517
1518 # create tasks
1519 tasks = {}
1520 for taskdesc in event._depgraph['tdepends']:
1521 tasks[taskdesc] = _save_a_task(taskdesc)
1522
1523 # create dependencies between tasks
1524 taskdeps_objects = []
1525 for taskdesc in event._depgraph['tdepends']:
1526 target = tasks[taskdesc]
1527 for taskdep in event._depgraph['tdepends'][taskdesc]:
1528 if taskdep not in tasks:
1529 # Fetch tasks info is not collected previously
1530 dep = _save_a_task(taskdep)
1531 else:
1532 dep = tasks[taskdep]
1533 taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep ))
1534 Task_Dependency.objects.bulk_create(taskdeps_objects)
1535
1536 if len(errormsg) > 0:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001537 logger.warning("buildinfohelper: dependency info not identify recipes: \n%s", errormsg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001538
1539
1540 def store_build_package_information(self, event):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001541 self._ensure_build()
1542
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001543 package_info = BuildInfoHelper._get_data_from_event(event)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001544 self.orm_wrapper.save_build_package_information(
1545 self.internal_state['build'],
1546 package_info,
1547 self.internal_state['recipes'],
1548 built_package=True)
1549
1550 self.orm_wrapper.save_build_package_information(
1551 self.internal_state['build'],
1552 package_info,
1553 self.internal_state['recipes'],
1554 built_package=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001555
1556 def _store_build_done(self, errorcode):
1557 logger.info("Build exited with errorcode %d", errorcode)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001558
1559 if not self.brbe:
1560 return
1561
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001562 br_id, be_id = self.brbe.split(":")
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001563
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001564 br = BuildRequest.objects.get(pk = br_id)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001565
1566 # if we're 'done' because we got cancelled update the build outcome
1567 if br.state == BuildRequest.REQ_CANCELLING:
1568 logger.info("Build cancelled")
1569 br.build.outcome = Build.CANCELLED
1570 br.build.save()
1571 self.internal_state['build'] = br.build
1572 errorcode = 0
1573
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001574 if errorcode == 0:
1575 # request archival of the project artifacts
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001576 br.state = BuildRequest.REQ_COMPLETED
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001577 else:
1578 br.state = BuildRequest.REQ_FAILED
1579 br.save()
1580
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001581 be = BuildEnvironment.objects.get(pk = be_id)
1582 be.lock = BuildEnvironment.LOCK_FREE
1583 be.save()
1584 signal_runbuilds()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001585
1586 def store_log_error(self, text):
1587 mockevent = MockEvent()
1588 mockevent.levelno = formatter.ERROR
1589 mockevent.msg = text
1590 mockevent.pathname = '-- None'
1591 mockevent.lineno = LogMessage.ERROR
1592 self.store_log_event(mockevent)
1593
1594 def store_log_exception(self, text, backtrace = ""):
1595 mockevent = MockEvent()
1596 mockevent.levelno = -1
1597 mockevent.msg = text
1598 mockevent.pathname = backtrace
1599 mockevent.lineno = -1
1600 self.store_log_event(mockevent)
1601
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001602 def store_log_event(self, event):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001603 self._ensure_build()
1604
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001605 if event.levelno < formatter.WARNING:
1606 return
1607
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001608 # early return for CLI builds
1609 if self.brbe is None:
1610 if not 'backlog' in self.internal_state:
1611 self.internal_state['backlog'] = []
1612 self.internal_state['backlog'].append(event)
1613 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001614
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001615 if 'backlog' in self.internal_state:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001616 # if we have a backlog of events, do our best to save them here
1617 if len(self.internal_state['backlog']):
1618 tempevent = self.internal_state['backlog'].pop()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001619 logger.debug(1, "buildinfohelper: Saving stored event %s "
1620 % tempevent)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001621 self.store_log_event(tempevent)
1622 else:
1623 logger.info("buildinfohelper: All events saved")
1624 del self.internal_state['backlog']
1625
1626 log_information = {}
1627 log_information['build'] = self.internal_state['build']
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001628 if event.levelno == formatter.CRITICAL:
1629 log_information['level'] = LogMessage.CRITICAL
1630 elif event.levelno == formatter.ERROR:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001631 log_information['level'] = LogMessage.ERROR
1632 elif event.levelno == formatter.WARNING:
1633 log_information['level'] = LogMessage.WARNING
1634 elif event.levelno == -2: # toaster self-logging
1635 log_information['level'] = -2
1636 else:
1637 log_information['level'] = LogMessage.INFO
1638
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001639 log_information['message'] = event.getMessage()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001640 log_information['pathname'] = event.pathname
1641 log_information['lineno'] = event.lineno
1642 logger.info("Logging error 2: %s", log_information)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001643
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001644 self.orm_wrapper.create_logmessage(log_information)
1645
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001646 def _get_filenames_from_image_license(self, image_license_manifest_path):
1647 """
1648 Find the FILES line in the image_license.manifest file,
1649 which has the basenames of the bzImage and modules files
1650 in this format:
1651 FILES: bzImage--4.4.11+git0+3a5f494784_53e84104c5-r0-qemux86-20160603165040.bin modules--4.4.11+git0+3a5f494784_53e84104c5-r0-qemux86-20160603165040.tgz
1652 """
1653 files = []
1654 with open(image_license_manifest_path) as image_license:
1655 for line in image_license:
1656 if line.startswith('FILES'):
1657 files_str = line.split(':')[1].strip()
1658 files_str = re.sub(r' {2,}', ' ', files_str)
1659
1660 # ignore lines like "FILES:" with no filenames
1661 if files_str:
1662 files += files_str.split(' ')
1663 return files
1664
1665 def _endswith(self, str_to_test, endings):
1666 """
1667 Returns True if str ends with one of the strings in the list
1668 endings, False otherwise
1669 """
1670 endswith = False
1671 for ending in endings:
1672 if str_to_test.endswith(ending):
1673 endswith = True
1674 break
1675 return endswith
1676
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001677 def scan_task_artifacts(self, event):
1678 """
1679 The 'TaskArtifacts' event passes the manifest file content for the
1680 tasks 'do_deploy', 'do_image_complete', 'do_populate_sdk', and
1681 'do_populate_sdk_ext'. The first two will be implemented later.
1682 """
1683 task_vars = BuildInfoHelper._get_data_from_event(event)
1684 task_name = task_vars['task'][task_vars['task'].find(':')+1:]
1685 task_artifacts = task_vars['artifacts']
1686
1687 if task_name in ['do_populate_sdk', 'do_populate_sdk_ext']:
1688 targets = [target for target in self.internal_state['targets'] \
1689 if target.task == task_name[3:]]
1690 if not targets:
1691 logger.warning("scan_task_artifacts: SDK targets not found: %s\n", task_name)
1692 return
1693 for artifact_path in task_artifacts:
1694 if not os.path.isfile(artifact_path):
1695 logger.warning("scan_task_artifacts: artifact file not found: %s\n", artifact_path)
1696 continue
1697 for target in targets:
1698 # don't record the file if it's already been added
1699 # to this target
1700 matching_files = TargetSDKFile.objects.filter(
1701 target=target, file_name=artifact_path)
1702 if matching_files.count() == 0:
1703 artifact_size = os.stat(artifact_path).st_size
1704 self.orm_wrapper.save_target_sdk_file(
1705 target, artifact_path, artifact_size)
1706
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001707 def _get_image_files(self, deploy_dir_image, image_name, image_file_extensions):
1708 """
1709 Find files in deploy_dir_image whose basename starts with the
1710 string image_name and ends with one of the strings in
1711 image_file_extensions.
1712
1713 Returns a list of file dictionaries like
1714
1715 [
1716 {
1717 'path': '/path/to/image/file',
1718 'size': <file size in bytes>
1719 }
1720 ]
1721 """
1722 image_files = []
1723
1724 for dirpath, _, filenames in os.walk(deploy_dir_image):
1725 for filename in filenames:
1726 if filename.startswith(image_name) and \
1727 self._endswith(filename, image_file_extensions):
1728 image_file_path = os.path.join(dirpath, filename)
1729 image_file_size = os.stat(image_file_path).st_size
1730
1731 image_files.append({
1732 'path': image_file_path,
1733 'size': image_file_size
1734 })
1735
1736 return image_files
1737
1738 def scan_image_artifacts(self):
1739 """
1740 Scan for built image artifacts in DEPLOY_DIR_IMAGE and associate them
1741 with a Target object in self.internal_state['targets'].
1742
1743 We have two situations to handle:
1744
1745 1. This is the first time a target + machine has been built, so
1746 add files from the DEPLOY_DIR_IMAGE to the target.
1747
1748 OR
1749
1750 2. There are no new files for the target (they were already produced by
1751 a previous build), so copy them from the most recent previous build with
1752 the same target, task and machine.
1753 """
1754 deploy_dir_image = \
1755 self.server.runCommand(['getVariable', 'DEPLOY_DIR_IMAGE'])[0]
1756
1757 # if there's no DEPLOY_DIR_IMAGE, there aren't going to be
1758 # any image artifacts, so we can return immediately
1759 if not deploy_dir_image:
1760 return
1761
1762 buildname = self.server.runCommand(['getVariable', 'BUILDNAME'])[0]
1763 machine = self.server.runCommand(['getVariable', 'MACHINE'])[0]
1764 image_name = self.server.runCommand(['getVariable', 'IMAGE_NAME'])[0]
1765
1766 # location of the manifest files for this build;
1767 # note that this file is only produced if an image is produced
1768 license_directory = \
1769 self.server.runCommand(['getVariable', 'LICENSE_DIRECTORY'])[0]
1770
1771 # file name extensions for image files
1772 image_file_extensions_unique = {}
1773 image_fstypes = self.server.runCommand(
1774 ['getVariable', 'IMAGE_FSTYPES'])[0]
1775 if image_fstypes != None:
1776 image_types_str = image_fstypes.strip()
1777 image_file_extensions = re.sub(r' {2,}', ' ', image_types_str)
1778 image_file_extensions_unique = set(image_file_extensions.split(' '))
1779
1780 targets = self.internal_state['targets']
1781
1782 # filter out anything which isn't an image target
1783 image_targets = [target for target in targets if target.is_image]
1784
1785 for image_target in image_targets:
1786 # this is set to True if we find at least one file relating to
1787 # this target; if this remains False after the scan, we copy the
1788 # files from the most-recent Target with the same target + machine
1789 # onto this Target instead
1790 has_files = False
1791
1792 # we construct this because by the time we reach
1793 # BuildCompleted, this has reset to
1794 # 'defaultpkgname-<MACHINE>-<BUILDNAME>';
1795 # we need to change it to
1796 # <TARGET>-<MACHINE>-<BUILDNAME>
1797 real_image_name = re.sub(r'^defaultpkgname', image_target.target,
1798 image_name)
1799
1800 image_license_manifest_path = os.path.join(
1801 license_directory,
1802 real_image_name,
1803 'image_license.manifest')
1804
1805 image_package_manifest_path = os.path.join(
1806 license_directory,
1807 real_image_name,
1808 'image_license.manifest')
1809
1810 # if image_license.manifest exists, we can read the names of
1811 # bzImage, modules etc. files for this build from it, then look for
1812 # them in the DEPLOY_DIR_IMAGE; note that this file is only produced
1813 # if an image file was produced
1814 if os.path.isfile(image_license_manifest_path):
1815 has_files = True
1816
1817 basenames = self._get_filenames_from_image_license(
1818 image_license_manifest_path)
1819
1820 for basename in basenames:
1821 artifact_path = os.path.join(deploy_dir_image, basename)
1822 if not os.path.exists(artifact_path):
1823 logger.warning("artifact %s doesn't exist, skipping" % artifact_path)
1824 continue
1825 artifact_size = os.stat(artifact_path).st_size
1826
1827 # note that the artifact will only be saved against this
1828 # build if it hasn't been already
1829 self.orm_wrapper.save_target_kernel_file(image_target,
1830 artifact_path, artifact_size)
1831
1832 # store the license manifest path on the target
1833 # (this file is also created any time an image file is created)
1834 license_manifest_path = os.path.join(license_directory,
1835 real_image_name, 'license.manifest')
1836
1837 self.orm_wrapper.update_target_set_license_manifest(
1838 image_target, license_manifest_path)
1839
1840 # store the package manifest path on the target (this file
1841 # is created any time an image file is created)
1842 package_manifest_path = os.path.join(deploy_dir_image,
1843 real_image_name + '.rootfs.manifest')
1844
1845 if os.path.exists(package_manifest_path):
1846 self.orm_wrapper.update_target_set_package_manifest(
1847 image_target, package_manifest_path)
1848
1849 # scan the directory for image files relating to this build
1850 # (via real_image_name); note that we don't have to set
1851 # has_files = True, as searching for the license manifest file
1852 # will already have set it to true if at least one image file was
1853 # produced; note that the real_image_name includes BUILDNAME, which
1854 # in turn includes a timestamp; so if no files were produced for
1855 # this timestamp (i.e. the build reused existing image files already
1856 # in the directory), no files will be recorded against this target
1857 image_files = self._get_image_files(deploy_dir_image,
1858 real_image_name, image_file_extensions_unique)
1859
1860 for image_file in image_files:
1861 self.orm_wrapper.save_target_image_file_information(
1862 image_target, image_file['path'], image_file['size'])
1863
1864 if not has_files:
1865 # copy image files and build artifacts from the
1866 # most-recently-built Target with the
1867 # same target + machine as this Target; also copy the license
1868 # manifest path, as that is not treated as an artifact and needs
1869 # to be set separately
1870 similar_target = \
1871 self.orm_wrapper.get_similar_target_with_image_files(
1872 image_target)
1873
1874 if similar_target:
1875 logger.info('image artifacts for target %s cloned from ' \
1876 'target %s' % (image_target.pk, similar_target.pk))
1877 self.orm_wrapper.clone_image_artifacts(similar_target,
1878 image_target)
1879
1880 def _get_sdk_targets(self):
1881 """
1882 Return targets which could generate SDK artifacts, i.e.
1883 "do_populate_sdk" and "do_populate_sdk_ext".
1884 """
1885 return [target for target in self.internal_state['targets'] \
1886 if target.task in ['populate_sdk', 'populate_sdk_ext']]
1887
1888 def scan_sdk_artifacts(self, event):
1889 """
1890 Note that we have to intercept an SDKArtifactInfo event from
1891 toaster.bbclass (via toasterui) to get hold of the SDK variables we
1892 need to be able to scan for files accurately: this is because
1893 variables like TOOLCHAIN_OUTPUTNAME have reset to None by the time
1894 BuildCompleted is fired by bitbake, so we have to get those values
1895 while the build is still in progress.
1896
1897 For populate_sdk_ext, this runs twice, with two different
1898 TOOLCHAIN_OUTPUTNAME settings, each of which will capture some of the
1899 files in the SDK output directory.
1900 """
1901 sdk_vars = BuildInfoHelper._get_data_from_event(event)
1902 toolchain_outputname = sdk_vars['TOOLCHAIN_OUTPUTNAME']
1903
1904 # targets which might have created SDK artifacts
1905 sdk_targets = self._get_sdk_targets()
1906
1907 # location of SDK artifacts
1908 tmpdir = self.server.runCommand(['getVariable', 'TMPDIR'])[0]
1909 sdk_dir = os.path.join(tmpdir, 'deploy', 'sdk')
1910
1911 # all files in the SDK directory
1912 artifacts = []
1913 for dir_path, _, filenames in os.walk(sdk_dir):
1914 for filename in filenames:
1915 full_path = os.path.join(dir_path, filename)
1916 if not os.path.islink(full_path):
1917 artifacts.append(full_path)
1918
1919 for sdk_target in sdk_targets:
1920 # find files in the SDK directory which haven't already been
1921 # recorded against a Target and whose basename matches
1922 # TOOLCHAIN_OUTPUTNAME
1923 for artifact_path in artifacts:
1924 basename = os.path.basename(artifact_path)
1925
1926 toolchain_match = basename.startswith(toolchain_outputname)
1927
1928 # files which match the name of the target which produced them;
1929 # for example,
1930 # poky-glibc-x86_64-core-image-sato-i586-toolchain-ext-2.1+snapshot.sh
1931 target_match = re.search(sdk_target.target, basename)
1932
1933 # targets which produce "*-nativesdk-*" files
1934 is_ext_sdk_target = sdk_target.task in \
1935 ['do_populate_sdk_ext', 'populate_sdk_ext']
1936
1937 # SDK files which don't match the target name, i.e.
1938 # x86_64-nativesdk-libc.*
1939 # poky-glibc-x86_64-buildtools-tarball-i586-buildtools-nativesdk-standalone-2.1+snapshot*
1940 is_ext_sdk_file = re.search('-nativesdk-', basename)
1941
1942 file_from_target = (toolchain_match and target_match) or \
1943 (is_ext_sdk_target and is_ext_sdk_file)
1944
1945 if file_from_target:
1946 # don't record the file if it's already been added to this
1947 # target
1948 matching_files = TargetSDKFile.objects.filter(
1949 target=sdk_target, file_name=artifact_path)
1950
1951 if matching_files.count() == 0:
1952 artifact_size = os.stat(artifact_path).st_size
1953
1954 self.orm_wrapper.save_target_sdk_file(
1955 sdk_target, artifact_path, artifact_size)
1956
1957 def clone_required_sdk_artifacts(self):
1958 """
1959 If an SDK target doesn't have any SDK artifacts, this means that
1960 the postfuncs of populate_sdk or populate_sdk_ext didn't fire, which
1961 in turn means that the targets of this build didn't generate any new
1962 artifacts.
1963
1964 In this case, clone SDK artifacts for targets in the current build
1965 from existing targets for this build.
1966 """
1967 sdk_targets = self._get_sdk_targets()
1968 for sdk_target in sdk_targets:
1969 # only clone for SDK targets which have no TargetSDKFiles yet
1970 if sdk_target.targetsdkfile_set.all().count() == 0:
1971 similar_target = \
1972 self.orm_wrapper.get_similar_target_with_sdk_files(
1973 sdk_target)
1974 if similar_target:
1975 logger.info('SDK artifacts for target %s cloned from ' \
1976 'target %s' % (sdk_target.pk, similar_target.pk))
1977 self.orm_wrapper.clone_sdk_artifacts(similar_target,
1978 sdk_target)
1979
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001980 def close(self, errorcode):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001981 self._store_build_done(errorcode)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001982
1983 if 'backlog' in self.internal_state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001984 # we save missed events in the database for the current build
1985 tempevent = self.internal_state['backlog'].pop()
1986 self.store_log_event(tempevent)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001987
1988 if not connection.features.autocommits_when_autocommit_is_off:
1989 transaction.set_autocommit(True)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001990
1991 # unset the brbe; this is to prevent subsequent command-line builds
1992 # being incorrectly attached to the previous Toaster-triggered build;
1993 # see https://bugzilla.yoctoproject.org/show_bug.cgi?id=9021
1994 self.brbe = None
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001995
1996 # unset the internal Build object to prevent it being reused for the
1997 # next build
1998 self.internal_state['build'] = None