| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1 | # | 
|  | 2 | # BitBake ToasterUI Implementation | 
|  | 3 | # | 
|  | 4 | # Copyright (C) 2013        Intel Corporation | 
|  | 5 | # | 
| Brad Bishop | c342db3 | 2019-05-15 21:57:59 -0400 | [diff] [blame] | 6 | # SPDX-License-Identifier: GPL-2.0-only | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 7 | # | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 8 |  | 
|  | 9 | import sys | 
|  | 10 | import bb | 
|  | 11 | import re | 
|  | 12 | import os | 
|  | 13 |  | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 14 | import django | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 15 | from django.utils import timezone | 
|  | 16 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 17 | import toaster | 
|  | 18 | # Add toaster module to the search path to help django.setup() find the right | 
|  | 19 | # modules | 
|  | 20 | sys.path.insert(0, os.path.dirname(toaster.__file__)) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 21 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 22 | #Set the DJANGO_SETTINGS_MODULE if it's not already set | 
|  | 23 | os.environ["DJANGO_SETTINGS_MODULE"] =\ | 
|  | 24 | os.environ.get("DJANGO_SETTINGS_MODULE", | 
|  | 25 | "toaster.toastermain.settings") | 
|  | 26 | # Setup django framework (needs to be done before importing modules) | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 27 | django.setup() | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 28 |  | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 29 | from orm.models import Build, Task, Recipe, Layer_Version, Layer, Target, LogMessage, HelpText | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 30 | from orm.models import Target_Image_File, TargetKernelFile, TargetSDKFile | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 31 | from orm.models import Variable, VariableHistory | 
|  | 32 | from orm.models import Package, Package_File, Target_Installed_Package, Target_File | 
|  | 33 | from orm.models import Task_Dependency, Package_Dependency | 
|  | 34 | from orm.models import Recipe_Dependency, Provides | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 35 | from orm.models import Project, CustomImagePackage | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 36 | from orm.models import signal_runbuilds | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 37 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 38 | from bldcontrol.models import BuildEnvironment, BuildRequest | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 39 | from bldcontrol.models import BRLayer | 
|  | 40 | from bldcontrol import bbcontroller | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 41 |  | 
|  | 42 | from bb.msg import BBLogFormatter as formatter | 
|  | 43 | from django.db import models | 
|  | 44 | from pprint import pformat | 
|  | 45 | import logging | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 46 | from datetime import datetime, timedelta | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 47 |  | 
| Patrick Williams | 03907ee | 2022-05-01 06:28:52 -0500 | [diff] [blame] | 48 | from django.db import transaction | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 49 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 50 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 51 | # pylint: disable=invalid-name | 
|  | 52 | # the logger name is standard throughout BitBake | 
|  | 53 | logger = logging.getLogger("ToasterLogger") | 
|  | 54 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 55 | class NotExisting(Exception): | 
|  | 56 | pass | 
|  | 57 |  | 
|  | 58 | class ORMWrapper(object): | 
|  | 59 | """ This class creates the dictionaries needed to store information in the database | 
|  | 60 | following the format defined by the Django models. It is also used to save this | 
|  | 61 | information in the database. | 
|  | 62 | """ | 
|  | 63 |  | 
|  | 64 | def __init__(self): | 
|  | 65 | self.layer_version_objects = [] | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 66 | self.layer_version_built = [] | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 67 | self.task_objects = {} | 
|  | 68 | self.recipe_objects = {} | 
|  | 69 |  | 
|  | 70 | @staticmethod | 
|  | 71 | def _build_key(**kwargs): | 
|  | 72 | key = "0" | 
|  | 73 | for k in sorted(kwargs.keys()): | 
|  | 74 | if isinstance(kwargs[k], models.Model): | 
|  | 75 | key += "-%d" % kwargs[k].id | 
|  | 76 | else: | 
|  | 77 | key += "-%s" % str(kwargs[k]) | 
|  | 78 | return key | 
|  | 79 |  | 
|  | 80 |  | 
|  | 81 | def _cached_get_or_create(self, clazz, **kwargs): | 
|  | 82 | """ This is a memory-cached get_or_create. We assume that the objects will not be created in the | 
|  | 83 | database through any other means. | 
|  | 84 | """ | 
|  | 85 |  | 
|  | 86 | assert issubclass(clazz, models.Model), "_cached_get_or_create needs to get the class as first argument" | 
|  | 87 |  | 
|  | 88 | key = ORMWrapper._build_key(**kwargs) | 
|  | 89 | dictname = "objects_%s" % clazz.__name__ | 
|  | 90 | if not dictname in vars(self).keys(): | 
|  | 91 | vars(self)[dictname] = {} | 
|  | 92 |  | 
|  | 93 | created = False | 
|  | 94 | if not key in vars(self)[dictname].keys(): | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 95 | vars(self)[dictname][key], created = \ | 
|  | 96 | clazz.objects.get_or_create(**kwargs) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 97 |  | 
|  | 98 | return (vars(self)[dictname][key], created) | 
|  | 99 |  | 
|  | 100 |  | 
|  | 101 | def _cached_get(self, clazz, **kwargs): | 
|  | 102 | """ This is a memory-cached get. We assume that the objects will not change  in the database between gets. | 
|  | 103 | """ | 
|  | 104 | assert issubclass(clazz, models.Model), "_cached_get needs to get the class as first argument" | 
|  | 105 |  | 
|  | 106 | key = ORMWrapper._build_key(**kwargs) | 
|  | 107 | dictname = "objects_%s" % clazz.__name__ | 
|  | 108 |  | 
|  | 109 | if not dictname in vars(self).keys(): | 
|  | 110 | vars(self)[dictname] = {} | 
|  | 111 |  | 
|  | 112 | if not key in vars(self)[dictname].keys(): | 
|  | 113 | vars(self)[dictname][key] = clazz.objects.get(**kwargs) | 
|  | 114 |  | 
|  | 115 | return vars(self)[dictname][key] | 
|  | 116 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 117 | def get_similar_target_with_image_files(self, target): | 
|  | 118 | """ | 
|  | 119 | Get a Target object "similar" to target; i.e. with the same target | 
|  | 120 | name ('core-image-minimal' etc.) and machine. | 
|  | 121 | """ | 
|  | 122 | return target.get_similar_target_with_image_files() | 
|  | 123 |  | 
|  | 124 | def get_similar_target_with_sdk_files(self, target): | 
|  | 125 | return target.get_similar_target_with_sdk_files() | 
|  | 126 |  | 
|  | 127 | def clone_image_artifacts(self, target_from, target_to): | 
|  | 128 | target_to.clone_image_artifacts_from(target_from) | 
|  | 129 |  | 
|  | 130 | def clone_sdk_artifacts(self, target_from, target_to): | 
|  | 131 | target_to.clone_sdk_artifacts_from(target_from) | 
|  | 132 |  | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 133 | def _timestamp_to_datetime(self, secs): | 
|  | 134 | """ | 
|  | 135 | Convert timestamp in seconds to Python datetime | 
|  | 136 | """ | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 137 | return timezone.make_aware(datetime(1970, 1, 1) + timedelta(seconds=secs)) | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 138 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 139 | # pylint: disable=no-self-use | 
|  | 140 | # we disable detection of no self use in functions because the methods actually work on the object | 
|  | 141 | # even if they don't touch self anywhere | 
|  | 142 |  | 
|  | 143 | # pylint: disable=bad-continuation | 
|  | 144 | # we do not follow the python conventions for continuation indentation due to long lines here | 
|  | 145 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 146 | def get_or_create_build_object(self, brbe): | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 147 | prj = None | 
|  | 148 | buildrequest = None | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 149 | if brbe is not None: | 
|  | 150 | # Toaster-triggered build | 
| Andrew Geissler | d1e8949 | 2021-02-12 15:35:20 -0600 | [diff] [blame] | 151 | logger.debug("buildinfohelper: brbe is %s" % brbe) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 152 | br, _ = brbe.split(":") | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 153 | buildrequest = BuildRequest.objects.get(pk=br) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 154 | prj = buildrequest.project | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 155 | else: | 
|  | 156 | # CLI build | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 157 | prj = Project.objects.get_or_create_default_project() | 
| Andrew Geissler | d1e8949 | 2021-02-12 15:35:20 -0600 | [diff] [blame] | 158 | logger.debug("buildinfohelper: project is not specified, defaulting to %s" % prj) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 159 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 160 | if buildrequest is not None: | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 161 | # reuse existing Build object | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 162 | build = buildrequest.build | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 163 | build.project = prj | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 164 | build.save() | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 165 | else: | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 166 | # create new Build object | 
|  | 167 | now = timezone.now() | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 168 | build = Build.objects.create( | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 169 | project=prj, | 
|  | 170 | started_on=now, | 
|  | 171 | completed_on=now, | 
|  | 172 | build_name='') | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 173 |  | 
| Andrew Geissler | d1e8949 | 2021-02-12 15:35:20 -0600 | [diff] [blame] | 174 | logger.debug("buildinfohelper: build is created %s" % build) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 175 |  | 
|  | 176 | if buildrequest is not None: | 
|  | 177 | buildrequest.build = build | 
|  | 178 | buildrequest.save() | 
|  | 179 |  | 
|  | 180 | return build | 
|  | 181 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 182 | def update_build(self, build, data_dict): | 
|  | 183 | for key in data_dict: | 
|  | 184 | setattr(build, key, data_dict[key]) | 
|  | 185 | build.save() | 
|  | 186 |  | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 187 | @staticmethod | 
|  | 188 | def get_or_create_targets(target_info): | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 189 | """ | 
|  | 190 | NB get_or_create() is used here because for Toaster-triggered builds, | 
|  | 191 | we already created the targets when the build was triggered. | 
|  | 192 | """ | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 193 | result = [] | 
|  | 194 | for target in target_info['targets']: | 
|  | 195 | task = '' | 
|  | 196 | if ':' in target: | 
|  | 197 | target, task = target.split(':', 1) | 
|  | 198 | if task.startswith('do_'): | 
|  | 199 | task = task[3:] | 
|  | 200 | if task == 'build': | 
|  | 201 | task = '' | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 202 |  | 
|  | 203 | obj, _ = Target.objects.get_or_create(build=target_info['build'], | 
|  | 204 | target=target, | 
|  | 205 | task=task) | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 206 | result.append(obj) | 
|  | 207 | return result | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 208 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 209 | def update_build_stats_and_outcome(self, build, errors, warnings, taskfailures): | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 210 | assert isinstance(build,Build) | 
|  | 211 | assert isinstance(errors, int) | 
|  | 212 | assert isinstance(warnings, int) | 
|  | 213 |  | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 214 | if build.outcome == Build.CANCELLED: | 
|  | 215 | return | 
|  | 216 | try: | 
|  | 217 | if build.buildrequest.state == BuildRequest.REQ_CANCELLING: | 
|  | 218 | return | 
|  | 219 | except AttributeError: | 
|  | 220 | # We may not have a buildrequest if this is a command line build | 
|  | 221 | pass | 
|  | 222 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 223 | outcome = Build.SUCCEEDED | 
|  | 224 | if errors or taskfailures: | 
|  | 225 | outcome = Build.FAILED | 
|  | 226 |  | 
|  | 227 | build.completed_on = timezone.now() | 
|  | 228 | build.outcome = outcome | 
|  | 229 | build.save() | 
| Andrew Geissler | 9aee500 | 2022-03-30 16:27:02 +0000 | [diff] [blame] | 230 |  | 
|  | 231 | # We force a sync point here to force the outcome status commit, | 
|  | 232 | # which resolves a race condition with the build completion takedown | 
|  | 233 | transaction.set_autocommit(True) | 
|  | 234 | transaction.set_autocommit(False) | 
|  | 235 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 236 | signal_runbuilds() | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 237 |  | 
|  | 238 | def update_target_set_license_manifest(self, target, license_manifest_path): | 
|  | 239 | target.license_manifest_path = license_manifest_path | 
|  | 240 | target.save() | 
|  | 241 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 242 | def update_target_set_package_manifest(self, target, package_manifest_path): | 
|  | 243 | target.package_manifest_path = package_manifest_path | 
|  | 244 | target.save() | 
|  | 245 |  | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 246 | def update_task_object(self, build, task_name, recipe_name, task_stats): | 
|  | 247 | """ | 
|  | 248 | Find the task for build which matches the recipe and task name | 
|  | 249 | to be stored | 
|  | 250 | """ | 
|  | 251 | task_to_update = Task.objects.get( | 
|  | 252 | build = build, | 
|  | 253 | task_name = task_name, | 
|  | 254 | recipe__name = recipe_name | 
|  | 255 | ) | 
|  | 256 |  | 
|  | 257 | if 'started' in task_stats and 'ended' in task_stats: | 
|  | 258 | task_to_update.started = self._timestamp_to_datetime(task_stats['started']) | 
|  | 259 | task_to_update.ended = self._timestamp_to_datetime(task_stats['ended']) | 
|  | 260 | task_to_update.elapsed_time = (task_stats['ended'] - task_stats['started']) | 
|  | 261 | task_to_update.cpu_time_user = task_stats.get('cpu_time_user') | 
|  | 262 | task_to_update.cpu_time_system = task_stats.get('cpu_time_system') | 
|  | 263 | if 'disk_io_read' in task_stats and 'disk_io_write' in task_stats: | 
|  | 264 | task_to_update.disk_io_read = task_stats['disk_io_read'] | 
|  | 265 | task_to_update.disk_io_write = task_stats['disk_io_write'] | 
|  | 266 | task_to_update.disk_io = task_stats['disk_io_read'] + task_stats['disk_io_write'] | 
|  | 267 |  | 
|  | 268 | task_to_update.save() | 
|  | 269 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 270 | def get_update_task_object(self, task_information, must_exist = False): | 
|  | 271 | assert 'build' in task_information | 
|  | 272 | assert 'recipe' in task_information | 
|  | 273 | assert 'task_name' in task_information | 
|  | 274 |  | 
|  | 275 | # we use must_exist info for database look-up optimization | 
|  | 276 | task_object, created = self._cached_get_or_create(Task, | 
|  | 277 | build=task_information['build'], | 
|  | 278 | recipe=task_information['recipe'], | 
|  | 279 | task_name=task_information['task_name'] | 
|  | 280 | ) | 
|  | 281 | if created and must_exist: | 
|  | 282 | task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk) | 
|  | 283 | raise NotExisting("Task object created when expected to exist", task_information) | 
|  | 284 |  | 
|  | 285 | object_changed = False | 
|  | 286 | for v in vars(task_object): | 
|  | 287 | if v in task_information.keys(): | 
|  | 288 | if vars(task_object)[v] != task_information[v]: | 
|  | 289 | vars(task_object)[v] = task_information[v] | 
|  | 290 | object_changed = True | 
|  | 291 |  | 
|  | 292 | # update setscene-related information if the task has a setscene | 
|  | 293 | if task_object.outcome == Task.OUTCOME_COVERED and 1 == task_object.get_related_setscene().count(): | 
|  | 294 | task_object.outcome = Task.OUTCOME_CACHED | 
|  | 295 | object_changed = True | 
|  | 296 |  | 
|  | 297 | outcome_task_setscene = Task.objects.get(task_executed=True, build = task_object.build, | 
|  | 298 | recipe = task_object.recipe, task_name=task_object.task_name+"_setscene").outcome | 
|  | 299 | if outcome_task_setscene == Task.OUTCOME_SUCCESS: | 
|  | 300 | task_object.sstate_result = Task.SSTATE_RESTORED | 
|  | 301 | object_changed = True | 
|  | 302 | elif outcome_task_setscene == Task.OUTCOME_FAILED: | 
|  | 303 | task_object.sstate_result = Task.SSTATE_FAILED | 
|  | 304 | object_changed = True | 
|  | 305 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 306 | if object_changed: | 
|  | 307 | task_object.save() | 
|  | 308 | return task_object | 
|  | 309 |  | 
|  | 310 |  | 
|  | 311 | def get_update_recipe_object(self, recipe_information, must_exist = False): | 
|  | 312 | assert 'layer_version' in recipe_information | 
|  | 313 | assert 'file_path' in recipe_information | 
|  | 314 | assert 'pathflags' in recipe_information | 
|  | 315 |  | 
|  | 316 | assert not recipe_information['file_path'].startswith("/")      # we should have layer-relative paths at all times | 
|  | 317 |  | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 318 |  | 
|  | 319 | def update_recipe_obj(recipe_object): | 
|  | 320 | object_changed = False | 
|  | 321 | for v in vars(recipe_object): | 
|  | 322 | if v in recipe_information.keys(): | 
|  | 323 | object_changed = True | 
|  | 324 | vars(recipe_object)[v] = recipe_information[v] | 
|  | 325 |  | 
|  | 326 | if object_changed: | 
|  | 327 | recipe_object.save() | 
|  | 328 |  | 
|  | 329 | recipe, created = self._cached_get_or_create(Recipe, layer_version=recipe_information['layer_version'], | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 330 | file_path=recipe_information['file_path'], pathflags = recipe_information['pathflags']) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 331 |  | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 332 | update_recipe_obj(recipe) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 333 |  | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 334 | built_recipe = None | 
|  | 335 | # Create a copy of the recipe for historical puposes and update it | 
|  | 336 | for built_layer in self.layer_version_built: | 
|  | 337 | if built_layer.layer == recipe_information['layer_version'].layer: | 
|  | 338 | built_recipe, c = self._cached_get_or_create(Recipe, | 
|  | 339 | layer_version=built_layer, | 
|  | 340 | file_path=recipe_information['file_path'], | 
|  | 341 | pathflags = recipe_information['pathflags']) | 
|  | 342 | update_recipe_obj(built_recipe) | 
|  | 343 | break | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 344 |  | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 345 |  | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 346 | # If we're in analysis mode or if this is a custom recipe | 
|  | 347 | # then we are wholly responsible for the data | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 348 | # and therefore we return the 'real' recipe rather than the build | 
|  | 349 | # history copy of the recipe. | 
|  | 350 | if  recipe_information['layer_version'].build is not None and \ | 
|  | 351 | recipe_information['layer_version'].build.project == \ | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 352 | Project.objects.get_or_create_default_project(): | 
|  | 353 | return recipe | 
|  | 354 |  | 
|  | 355 | if built_recipe is None: | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 356 | return recipe | 
|  | 357 |  | 
|  | 358 | return built_recipe | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 359 |  | 
|  | 360 | def get_update_layer_version_object(self, build_obj, layer_obj, layer_version_information): | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 361 | if isinstance(layer_obj, Layer_Version): | 
|  | 362 | # We already found our layer version for this build so just | 
|  | 363 | # update it with the new build information | 
|  | 364 | logger.debug("We found our layer from toaster") | 
|  | 365 | layer_obj.local_path = layer_version_information['local_path'] | 
|  | 366 | layer_obj.save() | 
|  | 367 | self.layer_version_objects.append(layer_obj) | 
|  | 368 |  | 
|  | 369 | # create a new copy of this layer version as a snapshot for | 
|  | 370 | # historical purposes | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 371 | layer_copy, c = Layer_Version.objects.get_or_create( | 
|  | 372 | build=build_obj, | 
|  | 373 | layer=layer_obj.layer, | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 374 | release=layer_obj.release, | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 375 | branch=layer_version_information['branch'], | 
|  | 376 | commit=layer_version_information['commit'], | 
|  | 377 | local_path=layer_version_information['local_path'], | 
|  | 378 | ) | 
|  | 379 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 380 | logger.debug("Created new layer version %s for build history", | 
|  | 381 | layer_copy.layer.name) | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 382 |  | 
|  | 383 | self.layer_version_built.append(layer_copy) | 
|  | 384 |  | 
|  | 385 | return layer_obj | 
|  | 386 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 387 | assert isinstance(build_obj, Build) | 
|  | 388 | assert isinstance(layer_obj, Layer) | 
|  | 389 | assert 'branch' in layer_version_information | 
|  | 390 | assert 'commit' in layer_version_information | 
|  | 391 | assert 'priority' in layer_version_information | 
|  | 392 | assert 'local_path' in layer_version_information | 
|  | 393 |  | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 394 | # If we're doing a command line build then associate this new layer with the | 
|  | 395 | # project to avoid it 'contaminating' toaster data | 
|  | 396 | project = None | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 397 | if build_obj.project == Project.objects.get_or_create_default_project(): | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 398 | project = build_obj.project | 
|  | 399 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 400 | layer_version_object, _ = Layer_Version.objects.get_or_create( | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 401 | build = build_obj, | 
|  | 402 | layer = layer_obj, | 
|  | 403 | branch = layer_version_information['branch'], | 
|  | 404 | commit = layer_version_information['commit'], | 
|  | 405 | priority = layer_version_information['priority'], | 
|  | 406 | local_path = layer_version_information['local_path'], | 
|  | 407 | project=project) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 408 |  | 
|  | 409 | self.layer_version_objects.append(layer_version_object) | 
|  | 410 |  | 
|  | 411 | return layer_version_object | 
|  | 412 |  | 
|  | 413 | def get_update_layer_object(self, layer_information, brbe): | 
|  | 414 | assert 'name' in layer_information | 
|  | 415 | assert 'layer_index_url' in layer_information | 
|  | 416 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 417 | # From command line builds we have no brbe as the request is directly | 
|  | 418 | # from bitbake | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 419 | if brbe is None: | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 420 | # If we don't have git commit sha then we're using a non-git | 
|  | 421 | # layer so set the layer_source_dir to identify it as such | 
|  | 422 | if not layer_information['version']['commit']: | 
|  | 423 | local_source_dir = layer_information["local_path"] | 
|  | 424 | else: | 
|  | 425 | local_source_dir = None | 
|  | 426 |  | 
|  | 427 | layer_object, _ = \ | 
|  | 428 | Layer.objects.get_or_create( | 
|  | 429 | name=layer_information['name'], | 
|  | 430 | local_source_dir=local_source_dir, | 
|  | 431 | layer_index_url=layer_information['layer_index_url']) | 
|  | 432 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 433 | return layer_object | 
|  | 434 | else: | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 435 | br_id, be_id = brbe.split(":") | 
|  | 436 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 437 | # Find the layer version by matching the layer event information | 
|  | 438 | # against the metadata we have in Toaster | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 439 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 440 | try: | 
|  | 441 | br_layer = BRLayer.objects.get(req=br_id, | 
|  | 442 | name=layer_information['name']) | 
|  | 443 | return br_layer.layer_version | 
|  | 444 | except (BRLayer.MultipleObjectsReturned, BRLayer.DoesNotExist): | 
|  | 445 | # There are multiple of the same layer name or the name | 
|  | 446 | # hasn't been determined by the toaster.bbclass layer | 
|  | 447 | # so let's filter by the local_path | 
|  | 448 | bc = bbcontroller.getBuildEnvironmentController(pk=be_id) | 
|  | 449 | for br_layer in BRLayer.objects.filter(req=br_id): | 
|  | 450 | if br_layer.giturl and \ | 
|  | 451 | layer_information['local_path'].endswith( | 
|  | 452 | bc.getGitCloneDirectory(br_layer.giturl, | 
|  | 453 | br_layer.commit)): | 
|  | 454 | return br_layer.layer_version | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 455 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 456 | if br_layer.local_source_dir == \ | 
|  | 457 | layer_information['local_path']: | 
|  | 458 | return br_layer.layer_version | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 459 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 460 | # We've reached the end of our search and couldn't find the layer | 
|  | 461 | # we can continue but some data may be missing | 
|  | 462 | raise NotExisting("Unidentified layer %s" % | 
|  | 463 | pformat(layer_information)) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 464 |  | 
|  | 465 | def save_target_file_information(self, build_obj, target_obj, filedata): | 
|  | 466 | assert isinstance(build_obj, Build) | 
|  | 467 | assert isinstance(target_obj, Target) | 
|  | 468 | dirs = filedata['dirs'] | 
|  | 469 | files = filedata['files'] | 
|  | 470 | syms = filedata['syms'] | 
|  | 471 |  | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 472 | # always create the root directory as a special case; | 
|  | 473 | # note that this is never displayed, so the owner, group, | 
|  | 474 | # size, permission are irrelevant | 
|  | 475 | tf_obj = Target_File.objects.create(target = target_obj, | 
|  | 476 | path = '/', | 
|  | 477 | size = 0, | 
|  | 478 | owner = '', | 
|  | 479 | group = '', | 
|  | 480 | permission = '', | 
|  | 481 | inodetype = Target_File.ITYPE_DIRECTORY) | 
|  | 482 | tf_obj.save() | 
|  | 483 |  | 
|  | 484 | # insert directories, ordered by name depth | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 485 | for d in sorted(dirs, key=lambda x:len(x[-1].split("/"))): | 
|  | 486 | (user, group, size) = d[1:4] | 
|  | 487 | permission = d[0][1:] | 
|  | 488 | path = d[4].lstrip(".") | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 489 |  | 
|  | 490 | # we already created the root directory, so ignore any | 
|  | 491 | # entry for it | 
| Andrew Geissler | 595f630 | 2022-01-24 19:11:47 +0000 | [diff] [blame] | 492 | if not path: | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 493 | continue | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 494 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 495 | parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1]) | 
| Andrew Geissler | 595f630 | 2022-01-24 19:11:47 +0000 | [diff] [blame] | 496 | if not parent_path: | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 497 | parent_path = "/" | 
|  | 498 | parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) | 
| Patrick Williams | 03907ee | 2022-05-01 06:28:52 -0500 | [diff] [blame] | 499 | Target_File.objects.create( | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 500 | target = target_obj, | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 501 | path = path, | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 502 | size = size, | 
|  | 503 | inodetype = Target_File.ITYPE_DIRECTORY, | 
|  | 504 | permission = permission, | 
|  | 505 | owner = user, | 
|  | 506 | group = group, | 
|  | 507 | directory = parent_obj) | 
|  | 508 |  | 
|  | 509 |  | 
|  | 510 | # we insert files | 
|  | 511 | for d in files: | 
|  | 512 | (user, group, size) = d[1:4] | 
|  | 513 | permission = d[0][1:] | 
|  | 514 | path = d[4].lstrip(".") | 
|  | 515 | parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1]) | 
|  | 516 | inodetype = Target_File.ITYPE_REGULAR | 
|  | 517 | if d[0].startswith('b'): | 
|  | 518 | inodetype = Target_File.ITYPE_BLOCK | 
|  | 519 | if d[0].startswith('c'): | 
|  | 520 | inodetype = Target_File.ITYPE_CHARACTER | 
|  | 521 | if d[0].startswith('p'): | 
|  | 522 | inodetype = Target_File.ITYPE_FIFO | 
|  | 523 |  | 
|  | 524 | tf_obj = Target_File.objects.create( | 
|  | 525 | target = target_obj, | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 526 | path = path, | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 527 | size = size, | 
|  | 528 | inodetype = inodetype, | 
|  | 529 | permission = permission, | 
|  | 530 | owner = user, | 
|  | 531 | group = group) | 
|  | 532 | parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) | 
|  | 533 | tf_obj.directory = parent_obj | 
|  | 534 | tf_obj.save() | 
|  | 535 |  | 
|  | 536 | # we insert symlinks | 
|  | 537 | for d in syms: | 
|  | 538 | (user, group, size) = d[1:4] | 
|  | 539 | permission = d[0][1:] | 
|  | 540 | path = d[4].lstrip(".") | 
|  | 541 | filetarget_path = d[6] | 
|  | 542 |  | 
|  | 543 | parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1]) | 
|  | 544 | if not filetarget_path.startswith("/"): | 
|  | 545 | # we have a relative path, get a normalized absolute one | 
|  | 546 | filetarget_path = parent_path + "/" + filetarget_path | 
|  | 547 | fcp = filetarget_path.split("/") | 
|  | 548 | fcpl = [] | 
|  | 549 | for i in fcp: | 
|  | 550 | if i == "..": | 
|  | 551 | fcpl.pop() | 
|  | 552 | else: | 
|  | 553 | fcpl.append(i) | 
|  | 554 | filetarget_path = "/".join(fcpl) | 
|  | 555 |  | 
|  | 556 | try: | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 557 | filetarget_obj = Target_File.objects.get(target = target_obj, path = filetarget_path) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 558 | except Target_File.DoesNotExist: | 
|  | 559 | # we might have an invalid link; no way to detect this. just set it to None | 
|  | 560 | filetarget_obj = None | 
|  | 561 |  | 
|  | 562 | parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) | 
|  | 563 |  | 
| Patrick Williams | 03907ee | 2022-05-01 06:28:52 -0500 | [diff] [blame] | 564 | Target_File.objects.create( | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 565 | target = target_obj, | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 566 | path = path, | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 567 | size = size, | 
|  | 568 | inodetype = Target_File.ITYPE_SYMLINK, | 
|  | 569 | permission = permission, | 
|  | 570 | owner = user, | 
|  | 571 | group = group, | 
|  | 572 | directory = parent_obj, | 
|  | 573 | sym_target = filetarget_obj) | 
|  | 574 |  | 
|  | 575 |  | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 576 | def save_target_package_information(self, build_obj, target_obj, packagedict, pkgpnmap, recipes, built_package=False): | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 577 | assert isinstance(build_obj, Build) | 
|  | 578 | assert isinstance(target_obj, Target) | 
|  | 579 |  | 
| Andrew Geissler | 595f630 | 2022-01-24 19:11:47 +0000 | [diff] [blame] | 580 | errormsg = [] | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 581 | for p in packagedict: | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 582 | # Search name swtiches round the installed name vs package name | 
|  | 583 | # by default installed name == package name | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 584 | searchname = p | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 585 | if p not in pkgpnmap: | 
|  | 586 | logger.warning("Image packages list contains %p, but is" | 
|  | 587 | " missing from all packages list where the" | 
|  | 588 | " metadata comes from. Skipping...", p) | 
|  | 589 | continue | 
|  | 590 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 591 | if 'OPKGN' in pkgpnmap[p].keys(): | 
|  | 592 | searchname = pkgpnmap[p]['OPKGN'] | 
|  | 593 |  | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 594 | built_recipe = recipes[pkgpnmap[p]['PN']] | 
|  | 595 |  | 
|  | 596 | if built_package: | 
|  | 597 | packagedict[p]['object'], created = Package.objects.get_or_create( build = build_obj, name = searchname ) | 
|  | 598 | recipe = built_recipe | 
|  | 599 | else: | 
|  | 600 | packagedict[p]['object'], created = \ | 
|  | 601 | CustomImagePackage.objects.get_or_create(name=searchname) | 
|  | 602 | # Clear the Package_Dependency objects as we're going to update | 
|  | 603 | # the CustomImagePackage with the latest dependency information | 
|  | 604 | packagedict[p]['object'].package_dependencies_target.all().delete() | 
|  | 605 | packagedict[p]['object'].package_dependencies_source.all().delete() | 
|  | 606 | try: | 
|  | 607 | recipe = self._cached_get( | 
|  | 608 | Recipe, | 
|  | 609 | name=built_recipe.name, | 
|  | 610 | layer_version__build=None, | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 611 | layer_version__release= | 
|  | 612 | built_recipe.layer_version.release, | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 613 | file_path=built_recipe.file_path, | 
|  | 614 | version=built_recipe.version | 
|  | 615 | ) | 
|  | 616 | except (Recipe.DoesNotExist, | 
|  | 617 | Recipe.MultipleObjectsReturned) as e: | 
|  | 618 | logger.info("We did not find one recipe for the" | 
|  | 619 | "configuration data package %s %s" % (p, e)) | 
|  | 620 | continue | 
|  | 621 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 622 | if created or packagedict[p]['object'].size == -1:    # save the data anyway we can, not just if it was not created here; bug [YOCTO #6887] | 
|  | 623 | # fill in everything we can from the runtime-reverse package data | 
|  | 624 | try: | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 625 | packagedict[p]['object'].recipe = recipe | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 626 | packagedict[p]['object'].version = pkgpnmap[p]['PV'] | 
|  | 627 | packagedict[p]['object'].installed_name = p | 
|  | 628 | packagedict[p]['object'].revision = pkgpnmap[p]['PR'] | 
|  | 629 | packagedict[p]['object'].license = pkgpnmap[p]['LICENSE'] | 
|  | 630 | packagedict[p]['object'].section = pkgpnmap[p]['SECTION'] | 
|  | 631 | packagedict[p]['object'].summary = pkgpnmap[p]['SUMMARY'] | 
|  | 632 | packagedict[p]['object'].description = pkgpnmap[p]['DESCRIPTION'] | 
|  | 633 | packagedict[p]['object'].size = int(pkgpnmap[p]['PKGSIZE']) | 
|  | 634 |  | 
|  | 635 | # no files recorded for this package, so save files info | 
|  | 636 | packagefile_objects = [] | 
|  | 637 | for targetpath in pkgpnmap[p]['FILES_INFO']: | 
|  | 638 | targetfilesize = pkgpnmap[p]['FILES_INFO'][targetpath] | 
|  | 639 | packagefile_objects.append(Package_File( package = packagedict[p]['object'], | 
|  | 640 | path = targetpath, | 
|  | 641 | size = targetfilesize)) | 
| Andrew Geissler | 595f630 | 2022-01-24 19:11:47 +0000 | [diff] [blame] | 642 | if packagefile_objects: | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 643 | Package_File.objects.bulk_create(packagefile_objects) | 
|  | 644 | except KeyError as e: | 
| Andrew Geissler | 595f630 | 2022-01-24 19:11:47 +0000 | [diff] [blame] | 645 | errormsg.append("  stpi: Key error, package %s key %s \n" % (p, e)) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 646 |  | 
|  | 647 | # save disk installed size | 
|  | 648 | packagedict[p]['object'].installed_size = packagedict[p]['size'] | 
|  | 649 | packagedict[p]['object'].save() | 
|  | 650 |  | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 651 | if built_package: | 
|  | 652 | Target_Installed_Package.objects.create(target = target_obj, package = packagedict[p]['object']) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 653 |  | 
|  | 654 | packagedeps_objs = [] | 
| Brad Bishop | 00e122a | 2019-10-05 11:10:57 -0400 | [diff] [blame] | 655 | pattern_so = re.compile(r'.*\.so(\.\d*)?$') | 
|  | 656 | pattern_lib = re.compile(r'.*\-suffix(\d*)?$') | 
|  | 657 | pattern_ko = re.compile(r'^kernel-module-.*') | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 658 | for p in packagedict: | 
|  | 659 | for (px,deptype) in packagedict[p]['depends']: | 
|  | 660 | if deptype == 'depends': | 
|  | 661 | tdeptype = Package_Dependency.TYPE_TRDEPENDS | 
|  | 662 | elif deptype == 'recommends': | 
|  | 663 | tdeptype = Package_Dependency.TYPE_TRECOMMENDS | 
|  | 664 |  | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 665 | try: | 
| Brad Bishop | 00e122a | 2019-10-05 11:10:57 -0400 | [diff] [blame] | 666 | # Skip known non-package objects like libraries and kernel modules | 
|  | 667 | if pattern_so.match(px) or pattern_lib.match(px): | 
|  | 668 | logger.info("Toaster does not add library file dependencies to packages (%s,%s)", p, px) | 
|  | 669 | continue | 
|  | 670 | if pattern_ko.match(px): | 
|  | 671 | logger.info("Toaster does not add kernel module dependencies to packages (%s,%s)", p, px) | 
|  | 672 | continue | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 673 | packagedeps_objs.append(Package_Dependency( | 
|  | 674 | package = packagedict[p]['object'], | 
|  | 675 | depends_on = packagedict[px]['object'], | 
|  | 676 | dep_type = tdeptype, | 
|  | 677 | target = target_obj)) | 
|  | 678 | except KeyError as e: | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 679 | logger.warning("Could not add dependency to the package %s " | 
|  | 680 | "because %s is an unknown package", p, px) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 681 |  | 
| Andrew Geissler | 595f630 | 2022-01-24 19:11:47 +0000 | [diff] [blame] | 682 | if packagedeps_objs: | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 683 | Package_Dependency.objects.bulk_create(packagedeps_objs) | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 684 | else: | 
|  | 685 | logger.info("No package dependencies created") | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 686 |  | 
| Andrew Geissler | 595f630 | 2022-01-24 19:11:47 +0000 | [diff] [blame] | 687 | if errormsg: | 
|  | 688 | logger.warning("buildinfohelper: target_package_info could not identify recipes: \n%s", "".join(errormsg)) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 689 |  | 
|  | 690 | def save_target_image_file_information(self, target_obj, file_name, file_size): | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 691 | Target_Image_File.objects.create(target=target_obj, | 
|  | 692 | file_name=file_name, file_size=file_size) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 693 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 694 | def save_target_kernel_file(self, target_obj, file_name, file_size): | 
|  | 695 | """ | 
|  | 696 | Save kernel file (bzImage, modules*) information for a Target target_obj. | 
|  | 697 | """ | 
|  | 698 | TargetKernelFile.objects.create(target=target_obj, | 
|  | 699 | file_name=file_name, file_size=file_size) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 700 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 701 | def save_target_sdk_file(self, target_obj, file_name, file_size): | 
|  | 702 | """ | 
|  | 703 | Save SDK artifacts to the database, associating them with a | 
|  | 704 | Target object. | 
|  | 705 | """ | 
|  | 706 | TargetSDKFile.objects.create(target=target_obj, file_name=file_name, | 
|  | 707 | file_size=file_size) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 708 |  | 
|  | 709 | def create_logmessage(self, log_information): | 
|  | 710 | assert 'build' in log_information | 
|  | 711 | assert 'level' in log_information | 
|  | 712 | assert 'message' in log_information | 
|  | 713 |  | 
|  | 714 | log_object = LogMessage.objects.create( | 
|  | 715 | build = log_information['build'], | 
|  | 716 | level = log_information['level'], | 
|  | 717 | message = log_information['message']) | 
|  | 718 |  | 
|  | 719 | for v in vars(log_object): | 
|  | 720 | if v in log_information.keys(): | 
|  | 721 | vars(log_object)[v] = log_information[v] | 
|  | 722 |  | 
|  | 723 | return log_object.save() | 
|  | 724 |  | 
|  | 725 |  | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 726 | def save_build_package_information(self, build_obj, package_info, recipes, | 
|  | 727 | built_package): | 
| Brad Bishop | d7bf8c1 | 2018-02-25 22:55:05 -0500 | [diff] [blame] | 728 | # assert isinstance(build_obj, Build) | 
|  | 729 |  | 
|  | 730 | if not 'PN' in package_info.keys(): | 
|  | 731 | # no package data to save (e.g. 'OPKGN'="lib64-*"|"lib32-*") | 
|  | 732 | return None | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 733 |  | 
|  | 734 | # create and save the object | 
|  | 735 | pname = package_info['PKG'] | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 736 | built_recipe = recipes[package_info['PN']] | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 737 | if 'OPKGN' in package_info.keys(): | 
|  | 738 | pname = package_info['OPKGN'] | 
|  | 739 |  | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 740 | if built_package: | 
|  | 741 | bp_object, _ = Package.objects.get_or_create( build = build_obj, | 
|  | 742 | name = pname ) | 
|  | 743 | recipe = built_recipe | 
|  | 744 | else: | 
|  | 745 | bp_object, created = \ | 
|  | 746 | CustomImagePackage.objects.get_or_create(name=pname) | 
|  | 747 | try: | 
|  | 748 | recipe = self._cached_get(Recipe, | 
|  | 749 | name=built_recipe.name, | 
|  | 750 | layer_version__build=None, | 
|  | 751 | file_path=built_recipe.file_path, | 
|  | 752 | version=built_recipe.version) | 
|  | 753 |  | 
|  | 754 | except (Recipe.DoesNotExist, Recipe.MultipleObjectsReturned): | 
|  | 755 | logger.debug("We did not find one recipe for the configuration" | 
|  | 756 | "data package %s" % pname) | 
|  | 757 | return | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 758 |  | 
|  | 759 | bp_object.installed_name = package_info['PKG'] | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 760 | bp_object.recipe = recipe | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 761 | bp_object.version = package_info['PKGV'] | 
|  | 762 | bp_object.revision = package_info['PKGR'] | 
|  | 763 | bp_object.summary = package_info['SUMMARY'] | 
|  | 764 | bp_object.description = package_info['DESCRIPTION'] | 
|  | 765 | bp_object.size = int(package_info['PKGSIZE']) | 
|  | 766 | bp_object.section = package_info['SECTION'] | 
|  | 767 | bp_object.license = package_info['LICENSE'] | 
|  | 768 | bp_object.save() | 
|  | 769 |  | 
|  | 770 | # save any attached file information | 
|  | 771 | packagefile_objects = [] | 
|  | 772 | for path in package_info['FILES_INFO']: | 
|  | 773 | packagefile_objects.append(Package_File( package = bp_object, | 
|  | 774 | path = path, | 
|  | 775 | size = package_info['FILES_INFO'][path] )) | 
| Andrew Geissler | 595f630 | 2022-01-24 19:11:47 +0000 | [diff] [blame] | 776 | if packagefile_objects: | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 777 | Package_File.objects.bulk_create(packagefile_objects) | 
|  | 778 |  | 
|  | 779 | def _po_byname(p): | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 780 | if built_package: | 
|  | 781 | pkg, created = Package.objects.get_or_create(build=build_obj, | 
|  | 782 | name=p) | 
|  | 783 | else: | 
|  | 784 | pkg, created = CustomImagePackage.objects.get_or_create(name=p) | 
|  | 785 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 786 | if created: | 
|  | 787 | pkg.size = -1 | 
|  | 788 | pkg.save() | 
|  | 789 | return pkg | 
|  | 790 |  | 
|  | 791 | packagedeps_objs = [] | 
|  | 792 | # save soft dependency information | 
|  | 793 | if 'RDEPENDS' in package_info and package_info['RDEPENDS']: | 
|  | 794 | for p in bb.utils.explode_deps(package_info['RDEPENDS']): | 
|  | 795 | packagedeps_objs.append(Package_Dependency(  package = bp_object, | 
|  | 796 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS)) | 
|  | 797 | if 'RPROVIDES' in package_info and package_info['RPROVIDES']: | 
|  | 798 | for p in bb.utils.explode_deps(package_info['RPROVIDES']): | 
|  | 799 | packagedeps_objs.append(Package_Dependency(  package = bp_object, | 
|  | 800 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES)) | 
|  | 801 | if 'RRECOMMENDS' in package_info and package_info['RRECOMMENDS']: | 
|  | 802 | for p in bb.utils.explode_deps(package_info['RRECOMMENDS']): | 
|  | 803 | packagedeps_objs.append(Package_Dependency(  package = bp_object, | 
|  | 804 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS)) | 
|  | 805 | if 'RSUGGESTS' in package_info and package_info['RSUGGESTS']: | 
|  | 806 | for p in bb.utils.explode_deps(package_info['RSUGGESTS']): | 
|  | 807 | packagedeps_objs.append(Package_Dependency(  package = bp_object, | 
|  | 808 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS)) | 
|  | 809 | if 'RREPLACES' in package_info and package_info['RREPLACES']: | 
|  | 810 | for p in bb.utils.explode_deps(package_info['RREPLACES']): | 
|  | 811 | packagedeps_objs.append(Package_Dependency(  package = bp_object, | 
|  | 812 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES)) | 
|  | 813 | if 'RCONFLICTS' in package_info and package_info['RCONFLICTS']: | 
|  | 814 | for p in bb.utils.explode_deps(package_info['RCONFLICTS']): | 
|  | 815 | packagedeps_objs.append(Package_Dependency(  package = bp_object, | 
|  | 816 | depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS)) | 
|  | 817 |  | 
| Andrew Geissler | 595f630 | 2022-01-24 19:11:47 +0000 | [diff] [blame] | 818 | if packagedeps_objs: | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 819 | Package_Dependency.objects.bulk_create(packagedeps_objs) | 
|  | 820 |  | 
|  | 821 | return bp_object | 
|  | 822 |  | 
|  | 823 | def save_build_variables(self, build_obj, vardump): | 
|  | 824 | assert isinstance(build_obj, Build) | 
|  | 825 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 826 | for k in vardump: | 
|  | 827 | desc = vardump[k]['doc'] | 
|  | 828 | if desc is None: | 
|  | 829 | var_words = [word for word in k.split('_')] | 
|  | 830 | root_var = "_".join([word for word in var_words if word.isupper()]) | 
|  | 831 | if root_var and root_var != k and root_var in vardump: | 
|  | 832 | desc = vardump[root_var]['doc'] | 
|  | 833 | if desc is None: | 
|  | 834 | desc = '' | 
| Andrew Geissler | 595f630 | 2022-01-24 19:11:47 +0000 | [diff] [blame] | 835 | if desc: | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 836 | HelpText.objects.get_or_create(build=build_obj, | 
|  | 837 | area=HelpText.VARIABLE, | 
|  | 838 | key=k, text=desc) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 839 | if not bool(vardump[k]['func']): | 
|  | 840 | value = vardump[k]['v'] | 
|  | 841 | if value is None: | 
|  | 842 | value = '' | 
|  | 843 | variable_obj = Variable.objects.create( build = build_obj, | 
|  | 844 | variable_name = k, | 
|  | 845 | variable_value = value, | 
|  | 846 | description = desc) | 
|  | 847 |  | 
|  | 848 | varhist_objects = [] | 
|  | 849 | for vh in vardump[k]['history']: | 
|  | 850 | if not 'documentation.conf' in vh['file']: | 
|  | 851 | varhist_objects.append(VariableHistory( variable = variable_obj, | 
|  | 852 | file_name = vh['file'], | 
|  | 853 | line_number = vh['line'], | 
|  | 854 | operation = vh['op'])) | 
| Andrew Geissler | 595f630 | 2022-01-24 19:11:47 +0000 | [diff] [blame] | 855 | if varhist_objects: | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 856 | VariableHistory.objects.bulk_create(varhist_objects) | 
|  | 857 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 858 |  | 
|  | 859 | class MockEvent(object): | 
|  | 860 | """ This object is used to create event, for which normal event-processing methods can | 
|  | 861 | be used, out of data that is not coming via an actual event | 
|  | 862 | """ | 
|  | 863 | def __init__(self): | 
|  | 864 | self.msg = None | 
|  | 865 | self.levelno = None | 
|  | 866 | self.taskname = None | 
|  | 867 | self.taskhash = None | 
|  | 868 | self.pathname = None | 
|  | 869 | self.lineno = None | 
|  | 870 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 871 | def getMessage(self): | 
|  | 872 | """ | 
|  | 873 | Simulate LogRecord message return | 
|  | 874 | """ | 
|  | 875 | return self.msg | 
|  | 876 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 877 |  | 
|  | 878 | class BuildInfoHelper(object): | 
|  | 879 | """ This class gathers the build information from the server and sends it | 
|  | 880 | towards the ORM wrapper for storing in the database | 
|  | 881 | It is instantiated once per build | 
|  | 882 | Keeps in memory all data that needs matching before writing it to the database | 
|  | 883 | """ | 
|  | 884 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 885 | # tasks which produce image files; note we include '', as we set | 
|  | 886 | # the task for a target to '' (i.e. 'build') if no target is | 
|  | 887 | # explicitly defined | 
|  | 888 | IMAGE_GENERATING_TASKS = ['', 'build', 'image', 'populate_sdk_ext'] | 
|  | 889 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 890 | # pylint: disable=protected-access | 
|  | 891 | # the code will look into the protected variables of the event; no easy way around this | 
|  | 892 | # pylint: disable=bad-continuation | 
|  | 893 | # we do not follow the python conventions for continuation indentation due to long lines here | 
|  | 894 |  | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 895 | def __init__(self, server, has_build_history = False, brbe = None): | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 896 | self.internal_state = {} | 
|  | 897 | self.internal_state['taskdata'] = {} | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 898 | self.internal_state['targets'] = [] | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 899 | self.task_order = 0 | 
|  | 900 | self.autocommit_step = 1 | 
|  | 901 | self.server = server | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 902 | self.orm_wrapper = ORMWrapper() | 
|  | 903 | self.has_build_history = has_build_history | 
|  | 904 | self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0] | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 905 |  | 
|  | 906 | # this is set for Toaster-triggered builds by localhostbecontroller | 
|  | 907 | # via toasterui | 
|  | 908 | self.brbe = brbe | 
|  | 909 |  | 
|  | 910 | self.project = None | 
|  | 911 |  | 
| Andrew Geissler | d1e8949 | 2021-02-12 15:35:20 -0600 | [diff] [blame] | 912 | logger.debug("buildinfohelper: Build info helper inited %s" % vars(self)) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 913 |  | 
|  | 914 |  | 
|  | 915 | ################### | 
|  | 916 | ## methods to convert event/external info into objects that the ORM layer uses | 
|  | 917 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 918 | def _ensure_build(self): | 
|  | 919 | """ | 
|  | 920 | Ensure the current build object exists and is up to date with | 
|  | 921 | data on the bitbake server | 
|  | 922 | """ | 
|  | 923 | if not 'build' in self.internal_state or not self.internal_state['build']: | 
|  | 924 | # create the Build object | 
|  | 925 | self.internal_state['build'] = \ | 
|  | 926 | self.orm_wrapper.get_or_create_build_object(self.brbe) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 927 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 928 | build = self.internal_state['build'] | 
|  | 929 |  | 
|  | 930 | # update missing fields on the Build object with found data | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 931 | build_info = {} | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 932 |  | 
|  | 933 | # set to True if at least one field is going to be set | 
|  | 934 | changed = False | 
|  | 935 |  | 
|  | 936 | if not build.build_name: | 
|  | 937 | build_name = self.server.runCommand(["getVariable", "BUILDNAME"])[0] | 
|  | 938 |  | 
|  | 939 | # only reset the build name if the one on the server is actually | 
|  | 940 | # a valid value for the build_name field | 
| Andrew Geissler | 82c905d | 2020-04-13 13:39:40 -0500 | [diff] [blame] | 941 | if build_name is not None: | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 942 | build_info['build_name'] = build_name | 
|  | 943 | changed = True | 
|  | 944 |  | 
|  | 945 | if not build.machine: | 
|  | 946 | build_info['machine'] = self.server.runCommand(["getVariable", "MACHINE"])[0] | 
|  | 947 | changed = True | 
|  | 948 |  | 
|  | 949 | if not build.distro: | 
|  | 950 | build_info['distro'] = self.server.runCommand(["getVariable", "DISTRO"])[0] | 
|  | 951 | changed = True | 
|  | 952 |  | 
|  | 953 | if not build.distro_version: | 
|  | 954 | build_info['distro_version'] = self.server.runCommand(["getVariable", "DISTRO_VERSION"])[0] | 
|  | 955 | changed = True | 
|  | 956 |  | 
|  | 957 | if not build.bitbake_version: | 
|  | 958 | build_info['bitbake_version'] = self.server.runCommand(["getVariable", "BB_VERSION"])[0] | 
|  | 959 | changed = True | 
|  | 960 |  | 
|  | 961 | if changed: | 
|  | 962 | self.orm_wrapper.update_build(self.internal_state['build'], build_info) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 963 |  | 
|  | 964 | def _get_task_information(self, event, recipe): | 
|  | 965 | assert 'taskname' in vars(event) | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 966 | self._ensure_build() | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 967 |  | 
|  | 968 | task_information = {} | 
|  | 969 | task_information['build'] = self.internal_state['build'] | 
|  | 970 | task_information['outcome'] = Task.OUTCOME_NA | 
|  | 971 | task_information['recipe'] = recipe | 
|  | 972 | task_information['task_name'] = event.taskname | 
|  | 973 | try: | 
|  | 974 | # some tasks don't come with a hash. and that's ok | 
|  | 975 | task_information['sstate_checksum'] = event.taskhash | 
|  | 976 | except AttributeError: | 
|  | 977 | pass | 
|  | 978 | return task_information | 
|  | 979 |  | 
| Brad Bishop | 37a0e4d | 2017-12-04 01:01:44 -0500 | [diff] [blame] | 980 | def _get_layer_version_for_dependency(self, pathRE): | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 981 | """ Returns the layer in the toaster db that has a full regex | 
|  | 982 | match to the pathRE. pathRE - the layer path passed as a regex in the | 
|  | 983 | event. It is created in cooker.py as a collection for the layer | 
|  | 984 | priorities. | 
| Brad Bishop | 37a0e4d | 2017-12-04 01:01:44 -0500 | [diff] [blame] | 985 | """ | 
|  | 986 | self._ensure_build() | 
|  | 987 |  | 
|  | 988 | def _sort_longest_path(layer_version): | 
|  | 989 | assert isinstance(layer_version, Layer_Version) | 
|  | 990 | return len(layer_version.local_path) | 
|  | 991 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 992 | # Our paths don't append a trailing slash | 
|  | 993 | if pathRE.endswith("/"): | 
|  | 994 | pathRE = pathRE[:-1] | 
|  | 995 |  | 
|  | 996 | p = re.compile(pathRE) | 
|  | 997 | path=re.sub(r'[$^]',r'',pathRE) | 
|  | 998 | # Heuristics: we always match recipe to the deepest layer path in | 
|  | 999 | # the discovered layers | 
|  | 1000 | for lvo in sorted(self.orm_wrapper.layer_version_objects, | 
|  | 1001 | reverse=True, key=_sort_longest_path): | 
|  | 1002 | if p.fullmatch(os.path.abspath(lvo.local_path)): | 
| Brad Bishop | 37a0e4d | 2017-12-04 01:01:44 -0500 | [diff] [blame] | 1003 | return lvo | 
|  | 1004 | if lvo.layer.local_source_dir: | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 1005 | if p.fullmatch(os.path.abspath(lvo.layer.local_source_dir)): | 
| Brad Bishop | 37a0e4d | 2017-12-04 01:01:44 -0500 | [diff] [blame] | 1006 | return lvo | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 1007 | if 0 == path.find(lvo.local_path): | 
|  | 1008 | # sub-layer path inside existing layer | 
|  | 1009 | return lvo | 
| Brad Bishop | 37a0e4d | 2017-12-04 01:01:44 -0500 | [diff] [blame] | 1010 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 1011 | # if we get here, we didn't read layers correctly; | 
|  | 1012 | # dump whatever information we have on the error log | 
|  | 1013 | logger.warning("Could not match layer dependency for path %s : %s", | 
|  | 1014 | pathRE, | 
|  | 1015 | self.orm_wrapper.layer_version_objects) | 
|  | 1016 | return None | 
| Brad Bishop | 37a0e4d | 2017-12-04 01:01:44 -0500 | [diff] [blame] | 1017 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1018 | def _get_layer_version_for_path(self, path): | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1019 | self._ensure_build() | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1020 |  | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 1021 | def _slkey_interactive(layer_version): | 
|  | 1022 | assert isinstance(layer_version, Layer_Version) | 
|  | 1023 | return len(layer_version.local_path) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1024 |  | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 1025 | # Heuristics: we always match recipe to the deepest layer path in the discovered layers | 
|  | 1026 | for lvo in sorted(self.orm_wrapper.layer_version_objects, reverse=True, key=_slkey_interactive): | 
|  | 1027 | # we can match to the recipe file path | 
|  | 1028 | if path.startswith(lvo.local_path): | 
|  | 1029 | return lvo | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1030 | if lvo.layer.local_source_dir and \ | 
|  | 1031 | path.startswith(lvo.layer.local_source_dir): | 
|  | 1032 | return lvo | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1033 |  | 
|  | 1034 | #if we get here, we didn't read layers correctly; dump whatever information we have on the error log | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1035 | logger.warning("Could not match layer version for recipe path %s : %s", path, self.orm_wrapper.layer_version_objects) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1036 |  | 
|  | 1037 | #mockup the new layer | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 1038 | unknown_layer, _ = Layer.objects.get_or_create(name="Unidentified layer", layer_index_url="") | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1039 | unknown_layer_version_obj, _ = Layer_Version.objects.get_or_create(layer = unknown_layer, build = self.internal_state['build']) | 
|  | 1040 |  | 
|  | 1041 | # append it so we don't run into this error again and again | 
|  | 1042 | self.orm_wrapper.layer_version_objects.append(unknown_layer_version_obj) | 
|  | 1043 |  | 
|  | 1044 | return unknown_layer_version_obj | 
|  | 1045 |  | 
|  | 1046 | def _get_recipe_information_from_taskfile(self, taskfile): | 
|  | 1047 | localfilepath = taskfile.split(":")[-1] | 
|  | 1048 | filepath_flags = ":".join(sorted(taskfile.split(":")[:-1])) | 
|  | 1049 | layer_version_obj = self._get_layer_version_for_path(localfilepath) | 
|  | 1050 |  | 
|  | 1051 |  | 
|  | 1052 |  | 
|  | 1053 | recipe_info = {} | 
|  | 1054 | recipe_info['layer_version'] = layer_version_obj | 
|  | 1055 | recipe_info['file_path'] = localfilepath | 
|  | 1056 | recipe_info['pathflags'] = filepath_flags | 
|  | 1057 |  | 
|  | 1058 | if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path): | 
|  | 1059 | recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/") | 
|  | 1060 | else: | 
|  | 1061 | raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path)) | 
|  | 1062 |  | 
|  | 1063 | return recipe_info | 
|  | 1064 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1065 |  | 
|  | 1066 | ################################ | 
|  | 1067 | ## external available methods to store information | 
|  | 1068 | @staticmethod | 
|  | 1069 | def _get_data_from_event(event): | 
|  | 1070 | evdata = None | 
|  | 1071 | if '_localdata' in vars(event): | 
|  | 1072 | evdata = event._localdata | 
|  | 1073 | elif 'data' in vars(event): | 
|  | 1074 | evdata = event.data | 
|  | 1075 | else: | 
|  | 1076 | raise Exception("Event with neither _localdata or data properties") | 
|  | 1077 | return evdata | 
|  | 1078 |  | 
|  | 1079 | def store_layer_info(self, event): | 
|  | 1080 | layerinfos = BuildInfoHelper._get_data_from_event(event) | 
|  | 1081 | self.internal_state['lvs'] = {} | 
|  | 1082 | for layer in layerinfos: | 
|  | 1083 | try: | 
|  | 1084 | self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)] = layerinfos[layer]['version'] | 
|  | 1085 | self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)]['local_path'] = layerinfos[layer]['local_path'] | 
|  | 1086 | except NotExisting as nee: | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1087 | logger.warning("buildinfohelper: cannot identify layer exception:%s ", nee) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1088 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1089 | def store_started_build(self): | 
|  | 1090 | self._ensure_build() | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1091 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1092 | def save_build_log_file_path(self, build_log_path): | 
|  | 1093 | self._ensure_build() | 
|  | 1094 |  | 
|  | 1095 | if not self.internal_state['build'].cooker_log_path: | 
|  | 1096 | data_dict = {'cooker_log_path': build_log_path} | 
|  | 1097 | self.orm_wrapper.update_build(self.internal_state['build'], data_dict) | 
|  | 1098 |  | 
|  | 1099 | def save_build_targets(self, event): | 
|  | 1100 | self._ensure_build() | 
|  | 1101 |  | 
|  | 1102 | # create target information | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1103 | assert '_pkgs' in vars(event) | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1104 | target_information = {} | 
|  | 1105 | target_information['targets'] = event._pkgs | 
|  | 1106 | target_information['build'] = self.internal_state['build'] | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1107 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1108 | self.internal_state['targets'] = self.orm_wrapper.get_or_create_targets(target_information) | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 1109 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1110 | def save_build_layers_and_variables(self): | 
|  | 1111 | self._ensure_build() | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1112 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1113 | build_obj = self.internal_state['build'] | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1114 |  | 
|  | 1115 | # save layer version information for this build | 
|  | 1116 | if not 'lvs' in self.internal_state: | 
|  | 1117 | logger.error("Layer version information not found; Check if the bitbake server was configured to inherit toaster.bbclass.") | 
|  | 1118 | else: | 
|  | 1119 | for layer_obj in self.internal_state['lvs']: | 
|  | 1120 | self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj]) | 
|  | 1121 |  | 
|  | 1122 | del self.internal_state['lvs'] | 
|  | 1123 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1124 | # Save build configuration | 
|  | 1125 | data = self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0] | 
|  | 1126 |  | 
|  | 1127 | # convert the paths from absolute to relative to either the build directory or layer checkouts | 
|  | 1128 | path_prefixes = [] | 
|  | 1129 |  | 
|  | 1130 | if self.brbe is not None: | 
|  | 1131 | _, be_id = self.brbe.split(":") | 
|  | 1132 | be = BuildEnvironment.objects.get(pk = be_id) | 
|  | 1133 | path_prefixes.append(be.builddir) | 
|  | 1134 |  | 
|  | 1135 | for layer in sorted(self.orm_wrapper.layer_version_objects, key = lambda x:len(x.local_path), reverse=True): | 
|  | 1136 | path_prefixes.append(layer.local_path) | 
|  | 1137 |  | 
|  | 1138 | # we strip the prefixes | 
|  | 1139 | for k in data: | 
|  | 1140 | if not bool(data[k]['func']): | 
|  | 1141 | for vh in data[k]['history']: | 
|  | 1142 | if not 'documentation.conf' in vh['file']: | 
|  | 1143 | abs_file_name = vh['file'] | 
|  | 1144 | for pp in path_prefixes: | 
|  | 1145 | if abs_file_name.startswith(pp + "/"): | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1146 | # preserve layer name in relative path | 
|  | 1147 | vh['file']=abs_file_name[pp.rfind("/")+1:] | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1148 | break | 
|  | 1149 |  | 
|  | 1150 | # save the variables | 
|  | 1151 | self.orm_wrapper.save_build_variables(build_obj, data) | 
|  | 1152 |  | 
|  | 1153 | return self.brbe | 
|  | 1154 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1155 | def set_recipes_to_parse(self, num_recipes): | 
|  | 1156 | """ | 
|  | 1157 | Set the number of recipes which need to be parsed for this build. | 
|  | 1158 | This is set the first time ParseStarted is received by toasterui. | 
|  | 1159 | """ | 
|  | 1160 | self._ensure_build() | 
|  | 1161 | self.internal_state['build'].recipes_to_parse = num_recipes | 
|  | 1162 | self.internal_state['build'].save() | 
|  | 1163 |  | 
|  | 1164 | def set_recipes_parsed(self, num_recipes): | 
|  | 1165 | """ | 
|  | 1166 | Set the number of recipes parsed so far for this build; this is updated | 
|  | 1167 | each time a ParseProgress or ParseCompleted event is received by | 
|  | 1168 | toasterui. | 
|  | 1169 | """ | 
|  | 1170 | self._ensure_build() | 
|  | 1171 | if num_recipes <= self.internal_state['build'].recipes_to_parse: | 
|  | 1172 | self.internal_state['build'].recipes_parsed = num_recipes | 
|  | 1173 | self.internal_state['build'].save() | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1174 |  | 
|  | 1175 | def update_target_image_file(self, event): | 
|  | 1176 | evdata = BuildInfoHelper._get_data_from_event(event) | 
|  | 1177 |  | 
|  | 1178 | for t in self.internal_state['targets']: | 
| Andrew Geissler | 82c905d | 2020-04-13 13:39:40 -0500 | [diff] [blame] | 1179 | if t.is_image: | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1180 | output_files = list(evdata.keys()) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1181 | for output in output_files: | 
|  | 1182 | if t.target in output and 'rootfs' in output and not output.endswith(".manifest"): | 
|  | 1183 | self.orm_wrapper.save_target_image_file_information(t, output, evdata[output]) | 
|  | 1184 |  | 
|  | 1185 | def update_artifact_image_file(self, event): | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1186 | self._ensure_build() | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1187 | evdata = BuildInfoHelper._get_data_from_event(event) | 
|  | 1188 | for artifact_path in evdata.keys(): | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1189 | self.orm_wrapper.save_artifact_information( | 
|  | 1190 | self.internal_state['build'], artifact_path, | 
|  | 1191 | evdata[artifact_path]) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1192 |  | 
|  | 1193 | def update_build_information(self, event, errors, warnings, taskfailures): | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1194 | self._ensure_build() | 
|  | 1195 | self.orm_wrapper.update_build_stats_and_outcome( | 
|  | 1196 | self.internal_state['build'], errors, warnings, taskfailures) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1197 |  | 
|  | 1198 | def store_started_task(self, event): | 
|  | 1199 | assert isinstance(event, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted, bb.runqueue.runQueueTaskSkipped)) | 
|  | 1200 | assert 'taskfile' in vars(event) | 
|  | 1201 | localfilepath = event.taskfile.split(":")[-1] | 
|  | 1202 | assert localfilepath.startswith("/") | 
|  | 1203 |  | 
|  | 1204 | identifier = event.taskfile + ":" + event.taskname | 
|  | 1205 |  | 
|  | 1206 | recipe_information = self._get_recipe_information_from_taskfile(event.taskfile) | 
|  | 1207 | recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True) | 
|  | 1208 |  | 
|  | 1209 | task_information = self._get_task_information(event, recipe) | 
|  | 1210 | task_information['outcome'] = Task.OUTCOME_NA | 
|  | 1211 |  | 
|  | 1212 | if isinstance(event, bb.runqueue.runQueueTaskSkipped): | 
|  | 1213 | assert 'reason' in vars(event) | 
|  | 1214 | task_information['task_executed'] = False | 
|  | 1215 | if event.reason == "covered": | 
|  | 1216 | task_information['outcome'] = Task.OUTCOME_COVERED | 
|  | 1217 | if event.reason == "existing": | 
|  | 1218 | task_information['outcome'] = Task.OUTCOME_PREBUILT | 
|  | 1219 | else: | 
|  | 1220 | task_information['task_executed'] = True | 
| Andrew Geissler | 82c905d | 2020-04-13 13:39:40 -0500 | [diff] [blame] | 1221 | if 'noexec' in vars(event) and event.noexec: | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1222 | task_information['task_executed'] = False | 
|  | 1223 | task_information['outcome'] = Task.OUTCOME_EMPTY | 
|  | 1224 | task_information['script_type'] = Task.CODING_NA | 
|  | 1225 |  | 
|  | 1226 | # do not assign order numbers to scene tasks | 
|  | 1227 | if not isinstance(event, bb.runqueue.sceneQueueTaskStarted): | 
|  | 1228 | self.task_order += 1 | 
|  | 1229 | task_information['order'] = self.task_order | 
|  | 1230 |  | 
|  | 1231 | self.orm_wrapper.get_update_task_object(task_information) | 
|  | 1232 |  | 
|  | 1233 | self.internal_state['taskdata'][identifier] = { | 
|  | 1234 | 'outcome': task_information['outcome'], | 
|  | 1235 | } | 
|  | 1236 |  | 
|  | 1237 |  | 
|  | 1238 | def store_tasks_stats(self, event): | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1239 | self._ensure_build() | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 1240 | task_data = BuildInfoHelper._get_data_from_event(event) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1241 |  | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 1242 | for (task_file, task_name, task_stats, recipe_name) in task_data: | 
|  | 1243 | build = self.internal_state['build'] | 
|  | 1244 | self.orm_wrapper.update_task_object(build, task_name, recipe_name, task_stats) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1245 |  | 
|  | 1246 | def update_and_store_task(self, event): | 
|  | 1247 | assert 'taskfile' in vars(event) | 
|  | 1248 | localfilepath = event.taskfile.split(":")[-1] | 
|  | 1249 | assert localfilepath.startswith("/") | 
|  | 1250 |  | 
|  | 1251 | identifier = event.taskfile + ":" + event.taskname | 
|  | 1252 | if not identifier in self.internal_state['taskdata']: | 
|  | 1253 | if isinstance(event, bb.build.TaskBase): | 
|  | 1254 | # we do a bit of guessing | 
|  | 1255 | candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)] | 
|  | 1256 | if len(candidates) == 1: | 
|  | 1257 | identifier = candidates[0] | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 1258 | elif len(candidates) > 1 and hasattr(event,'_package'): | 
|  | 1259 | if 'native-' in event._package: | 
|  | 1260 | identifier = 'native:' + identifier | 
|  | 1261 | if 'nativesdk-' in event._package: | 
|  | 1262 | identifier = 'nativesdk:' + identifier | 
|  | 1263 | candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)] | 
|  | 1264 | if len(candidates) == 1: | 
|  | 1265 | identifier = candidates[0] | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1266 |  | 
|  | 1267 | assert identifier in self.internal_state['taskdata'] | 
|  | 1268 | identifierlist = identifier.split(":") | 
|  | 1269 | realtaskfile = ":".join(identifierlist[0:len(identifierlist)-1]) | 
|  | 1270 | recipe_information = self._get_recipe_information_from_taskfile(realtaskfile) | 
|  | 1271 | recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True) | 
|  | 1272 | task_information = self._get_task_information(event,recipe) | 
|  | 1273 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1274 | task_information['outcome'] = self.internal_state['taskdata'][identifier]['outcome'] | 
|  | 1275 |  | 
|  | 1276 | if 'logfile' in vars(event): | 
|  | 1277 | task_information['logfile'] = event.logfile | 
|  | 1278 |  | 
|  | 1279 | if '_message' in vars(event): | 
|  | 1280 | task_information['message'] = event._message | 
|  | 1281 |  | 
|  | 1282 | if 'taskflags' in vars(event): | 
|  | 1283 | # with TaskStarted, we get even more information | 
|  | 1284 | if 'python' in event.taskflags.keys() and event.taskflags['python'] == '1': | 
|  | 1285 | task_information['script_type'] = Task.CODING_PYTHON | 
|  | 1286 | else: | 
|  | 1287 | task_information['script_type'] = Task.CODING_SHELL | 
|  | 1288 |  | 
|  | 1289 | if task_information['outcome'] == Task.OUTCOME_NA: | 
|  | 1290 | if isinstance(event, (bb.runqueue.runQueueTaskCompleted, bb.runqueue.sceneQueueTaskCompleted)): | 
|  | 1291 | task_information['outcome'] = Task.OUTCOME_SUCCESS | 
|  | 1292 | del self.internal_state['taskdata'][identifier] | 
|  | 1293 |  | 
|  | 1294 | if isinstance(event, (bb.runqueue.runQueueTaskFailed, bb.runqueue.sceneQueueTaskFailed)): | 
|  | 1295 | task_information['outcome'] = Task.OUTCOME_FAILED | 
|  | 1296 | del self.internal_state['taskdata'][identifier] | 
|  | 1297 |  | 
| Andrew Geissler | 9aee500 | 2022-03-30 16:27:02 +0000 | [diff] [blame] | 1298 | # we force a sync point here, to get the progress bar to show | 
|  | 1299 | if self.autocommit_step % 3 == 0: | 
|  | 1300 | transaction.set_autocommit(True) | 
|  | 1301 | transaction.set_autocommit(False) | 
|  | 1302 | self.autocommit_step += 1 | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1303 |  | 
|  | 1304 | self.orm_wrapper.get_update_task_object(task_information, True) # must exist | 
|  | 1305 |  | 
|  | 1306 |  | 
|  | 1307 | def store_missed_state_tasks(self, event): | 
|  | 1308 | for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['missed']: | 
|  | 1309 |  | 
|  | 1310 | # identifier = fn + taskname + "_setscene" | 
|  | 1311 | recipe_information = self._get_recipe_information_from_taskfile(fn) | 
|  | 1312 | recipe = self.orm_wrapper.get_update_recipe_object(recipe_information) | 
|  | 1313 | mevent = MockEvent() | 
|  | 1314 | mevent.taskname = taskname | 
|  | 1315 | mevent.taskhash = taskhash | 
|  | 1316 | task_information = self._get_task_information(mevent,recipe) | 
|  | 1317 |  | 
|  | 1318 | task_information['start_time'] = timezone.now() | 
|  | 1319 | task_information['outcome'] = Task.OUTCOME_NA | 
|  | 1320 | task_information['sstate_checksum'] = taskhash | 
|  | 1321 | task_information['sstate_result'] = Task.SSTATE_MISS | 
|  | 1322 | task_information['path_to_sstate_obj'] = sstatefile | 
|  | 1323 |  | 
|  | 1324 | self.orm_wrapper.get_update_task_object(task_information) | 
|  | 1325 |  | 
|  | 1326 | for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['found']: | 
|  | 1327 |  | 
|  | 1328 | # identifier = fn + taskname + "_setscene" | 
|  | 1329 | recipe_information = self._get_recipe_information_from_taskfile(fn) | 
|  | 1330 | recipe = self.orm_wrapper.get_update_recipe_object(recipe_information) | 
|  | 1331 | mevent = MockEvent() | 
|  | 1332 | mevent.taskname = taskname | 
|  | 1333 | mevent.taskhash = taskhash | 
|  | 1334 | task_information = self._get_task_information(mevent,recipe) | 
|  | 1335 |  | 
|  | 1336 | task_information['path_to_sstate_obj'] = sstatefile | 
|  | 1337 |  | 
|  | 1338 | self.orm_wrapper.get_update_task_object(task_information) | 
|  | 1339 |  | 
|  | 1340 |  | 
|  | 1341 | def store_target_package_data(self, event): | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1342 | self._ensure_build() | 
|  | 1343 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1344 | # for all image targets | 
|  | 1345 | for target in self.internal_state['targets']: | 
|  | 1346 | if target.is_image: | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 1347 | pkgdata = BuildInfoHelper._get_data_from_event(event)['pkgdata'] | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 1348 | imgdata = BuildInfoHelper._get_data_from_event(event)['imgdata'].get(target.target, {}) | 
|  | 1349 | filedata = BuildInfoHelper._get_data_from_event(event)['filedata'].get(target.target, {}) | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 1350 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1351 | try: | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 1352 | self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes'], built_package=True) | 
|  | 1353 | self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata.copy(), pkgdata, self.internal_state['recipes'], built_package=False) | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 1354 | except KeyError as e: | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1355 | logger.warning("KeyError in save_target_package_information" | 
|  | 1356 | "%s ", e) | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 1357 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1358 | # only try to find files in the image if the task for this | 
|  | 1359 | # target is one which produces image files; otherwise, the old | 
|  | 1360 | # list of files in the files-in-image.txt file will be | 
|  | 1361 | # appended to the target even if it didn't produce any images | 
|  | 1362 | if target.task in BuildInfoHelper.IMAGE_GENERATING_TASKS: | 
|  | 1363 | try: | 
|  | 1364 | self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata) | 
|  | 1365 | except KeyError as e: | 
|  | 1366 | logger.warning("KeyError in save_target_file_information" | 
|  | 1367 | "%s ", e) | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 1368 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1369 |  | 
|  | 1370 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1371 | def cancel_cli_build(self): | 
|  | 1372 | """ | 
|  | 1373 | If a build is currently underway, set its state to CANCELLED; | 
|  | 1374 | note that this only gets called for command line builds which are | 
|  | 1375 | interrupted, so it doesn't touch any BuildRequest objects | 
|  | 1376 | """ | 
|  | 1377 | self._ensure_build() | 
|  | 1378 | self.internal_state['build'].outcome = Build.CANCELLED | 
|  | 1379 | self.internal_state['build'].save() | 
|  | 1380 | signal_runbuilds() | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1381 |  | 
|  | 1382 | def store_dependency_information(self, event): | 
|  | 1383 | assert '_depgraph' in vars(event) | 
|  | 1384 | assert 'layer-priorities' in event._depgraph | 
|  | 1385 | assert 'pn' in event._depgraph | 
|  | 1386 | assert 'tdepends' in event._depgraph | 
|  | 1387 |  | 
| Andrew Geissler | 595f630 | 2022-01-24 19:11:47 +0000 | [diff] [blame] | 1388 | errormsg = [] | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1389 |  | 
|  | 1390 | # save layer version priorities | 
|  | 1391 | if 'layer-priorities' in event._depgraph.keys(): | 
|  | 1392 | for lv in event._depgraph['layer-priorities']: | 
|  | 1393 | (_, path, _, priority) = lv | 
| Brad Bishop | 37a0e4d | 2017-12-04 01:01:44 -0500 | [diff] [blame] | 1394 | layer_version_obj = self._get_layer_version_for_dependency(path) | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 1395 | if layer_version_obj: | 
|  | 1396 | layer_version_obj.priority = priority | 
|  | 1397 | layer_version_obj.save() | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1398 |  | 
|  | 1399 | # save recipe information | 
|  | 1400 | self.internal_state['recipes'] = {} | 
|  | 1401 | for pn in event._depgraph['pn']: | 
|  | 1402 |  | 
|  | 1403 | file_name = event._depgraph['pn'][pn]['filename'].split(":")[-1] | 
|  | 1404 | pathflags = ":".join(sorted(event._depgraph['pn'][pn]['filename'].split(":")[:-1])) | 
|  | 1405 | layer_version_obj = self._get_layer_version_for_path(file_name) | 
|  | 1406 |  | 
|  | 1407 | assert layer_version_obj is not None | 
|  | 1408 |  | 
|  | 1409 | recipe_info = {} | 
|  | 1410 | recipe_info['name'] = pn | 
|  | 1411 | recipe_info['layer_version'] = layer_version_obj | 
|  | 1412 |  | 
|  | 1413 | if 'version' in event._depgraph['pn'][pn]: | 
|  | 1414 | recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":") | 
|  | 1415 |  | 
|  | 1416 | if 'summary' in event._depgraph['pn'][pn]: | 
|  | 1417 | recipe_info['summary'] = event._depgraph['pn'][pn]['summary'] | 
|  | 1418 |  | 
|  | 1419 | if 'license' in event._depgraph['pn'][pn]: | 
|  | 1420 | recipe_info['license'] = event._depgraph['pn'][pn]['license'] | 
|  | 1421 |  | 
|  | 1422 | if 'description' in event._depgraph['pn'][pn]: | 
|  | 1423 | recipe_info['description'] = event._depgraph['pn'][pn]['description'] | 
|  | 1424 |  | 
|  | 1425 | if 'section' in event._depgraph['pn'][pn]: | 
|  | 1426 | recipe_info['section'] = event._depgraph['pn'][pn]['section'] | 
|  | 1427 |  | 
|  | 1428 | if 'homepage' in event._depgraph['pn'][pn]: | 
|  | 1429 | recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage'] | 
|  | 1430 |  | 
|  | 1431 | if 'bugtracker' in event._depgraph['pn'][pn]: | 
|  | 1432 | recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker'] | 
|  | 1433 |  | 
|  | 1434 | recipe_info['file_path'] = file_name | 
|  | 1435 | recipe_info['pathflags'] = pathflags | 
|  | 1436 |  | 
|  | 1437 | if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path): | 
|  | 1438 | recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/") | 
|  | 1439 | else: | 
|  | 1440 | raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path)) | 
|  | 1441 |  | 
|  | 1442 | recipe = self.orm_wrapper.get_update_recipe_object(recipe_info) | 
|  | 1443 | recipe.is_image = False | 
|  | 1444 | if 'inherits' in event._depgraph['pn'][pn].keys(): | 
|  | 1445 | for cls in event._depgraph['pn'][pn]['inherits']: | 
|  | 1446 | if cls.endswith('/image.bbclass'): | 
|  | 1447 | recipe.is_image = True | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 1448 | recipe_info['is_image'] = True | 
|  | 1449 | # Save the is_image state to the relevant recipe objects | 
|  | 1450 | self.orm_wrapper.get_update_recipe_object(recipe_info) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1451 | break | 
|  | 1452 | if recipe.is_image: | 
|  | 1453 | for t in self.internal_state['targets']: | 
|  | 1454 | if pn == t.target: | 
|  | 1455 | t.is_image = True | 
|  | 1456 | t.save() | 
|  | 1457 | self.internal_state['recipes'][pn] = recipe | 
|  | 1458 |  | 
|  | 1459 | # we'll not get recipes for key w/ values listed in ASSUME_PROVIDED | 
|  | 1460 |  | 
|  | 1461 | assume_provided = self.server.runCommand(["getVariable", "ASSUME_PROVIDED"])[0].split() | 
|  | 1462 |  | 
|  | 1463 | # save recipe dependency | 
|  | 1464 | # buildtime | 
|  | 1465 | recipedeps_objects = [] | 
|  | 1466 | for recipe in event._depgraph['depends']: | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 1467 | target = self.internal_state['recipes'][recipe] | 
|  | 1468 | for dep in event._depgraph['depends'][recipe]: | 
|  | 1469 | if dep in assume_provided: | 
|  | 1470 | continue | 
|  | 1471 | via = None | 
|  | 1472 | if 'providermap' in event._depgraph and dep in event._depgraph['providermap']: | 
|  | 1473 | deprecipe = event._depgraph['providermap'][dep][0] | 
|  | 1474 | dependency = self.internal_state['recipes'][deprecipe] | 
|  | 1475 | via = Provides.objects.get_or_create(name=dep, | 
|  | 1476 | recipe=dependency)[0] | 
|  | 1477 | elif dep in self.internal_state['recipes']: | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1478 | dependency = self.internal_state['recipes'][dep] | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 1479 | else: | 
| Andrew Geissler | 595f630 | 2022-01-24 19:11:47 +0000 | [diff] [blame] | 1480 | errormsg.append("  stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, dep)) | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 1481 | continue | 
|  | 1482 | recipe_dep = Recipe_Dependency(recipe=target, | 
|  | 1483 | depends_on=dependency, | 
|  | 1484 | via=via, | 
|  | 1485 | dep_type=Recipe_Dependency.TYPE_DEPENDS) | 
|  | 1486 | recipedeps_objects.append(recipe_dep) | 
|  | 1487 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1488 | Recipe_Dependency.objects.bulk_create(recipedeps_objects) | 
|  | 1489 |  | 
|  | 1490 | # save all task information | 
|  | 1491 | def _save_a_task(taskdesc): | 
|  | 1492 | spec = re.split(r'\.', taskdesc) | 
|  | 1493 | pn = ".".join(spec[0:-1]) | 
|  | 1494 | taskname = spec[-1] | 
|  | 1495 | e = event | 
|  | 1496 | e.taskname = pn | 
|  | 1497 | recipe = self.internal_state['recipes'][pn] | 
|  | 1498 | task_info = self._get_task_information(e, recipe) | 
|  | 1499 | task_info['task_name'] = taskname | 
|  | 1500 | task_obj = self.orm_wrapper.get_update_task_object(task_info) | 
|  | 1501 | return task_obj | 
|  | 1502 |  | 
|  | 1503 | # create tasks | 
|  | 1504 | tasks = {} | 
|  | 1505 | for taskdesc in event._depgraph['tdepends']: | 
|  | 1506 | tasks[taskdesc] = _save_a_task(taskdesc) | 
|  | 1507 |  | 
|  | 1508 | # create dependencies between tasks | 
|  | 1509 | taskdeps_objects = [] | 
|  | 1510 | for taskdesc in event._depgraph['tdepends']: | 
|  | 1511 | target = tasks[taskdesc] | 
|  | 1512 | for taskdep in event._depgraph['tdepends'][taskdesc]: | 
|  | 1513 | if taskdep not in tasks: | 
|  | 1514 | # Fetch tasks info is not collected previously | 
|  | 1515 | dep = _save_a_task(taskdep) | 
|  | 1516 | else: | 
|  | 1517 | dep = tasks[taskdep] | 
|  | 1518 | taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep )) | 
|  | 1519 | Task_Dependency.objects.bulk_create(taskdeps_objects) | 
|  | 1520 |  | 
| Andrew Geissler | 595f630 | 2022-01-24 19:11:47 +0000 | [diff] [blame] | 1521 | if errormsg: | 
|  | 1522 | logger.warning("buildinfohelper: dependency info not identify recipes: \n%s", "".join(errormsg)) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1523 |  | 
|  | 1524 |  | 
|  | 1525 | def store_build_package_information(self, event): | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1526 | self._ensure_build() | 
|  | 1527 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1528 | package_info = BuildInfoHelper._get_data_from_event(event) | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 1529 | self.orm_wrapper.save_build_package_information( | 
|  | 1530 | self.internal_state['build'], | 
|  | 1531 | package_info, | 
|  | 1532 | self.internal_state['recipes'], | 
|  | 1533 | built_package=True) | 
|  | 1534 |  | 
|  | 1535 | self.orm_wrapper.save_build_package_information( | 
|  | 1536 | self.internal_state['build'], | 
|  | 1537 | package_info, | 
|  | 1538 | self.internal_state['recipes'], | 
|  | 1539 | built_package=False) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1540 |  | 
|  | 1541 | def _store_build_done(self, errorcode): | 
|  | 1542 | logger.info("Build exited with errorcode %d", errorcode) | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1543 |  | 
|  | 1544 | if not self.brbe: | 
|  | 1545 | return | 
|  | 1546 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1547 | br_id, be_id = self.brbe.split(":") | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1548 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1549 | br = BuildRequest.objects.get(pk = br_id) | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 1550 |  | 
|  | 1551 | # if we're 'done' because we got cancelled update the build outcome | 
|  | 1552 | if br.state == BuildRequest.REQ_CANCELLING: | 
|  | 1553 | logger.info("Build cancelled") | 
|  | 1554 | br.build.outcome = Build.CANCELLED | 
|  | 1555 | br.build.save() | 
|  | 1556 | self.internal_state['build'] = br.build | 
|  | 1557 | errorcode = 0 | 
|  | 1558 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1559 | if errorcode == 0: | 
|  | 1560 | # request archival of the project artifacts | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 1561 | br.state = BuildRequest.REQ_COMPLETED | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1562 | else: | 
|  | 1563 | br.state = BuildRequest.REQ_FAILED | 
|  | 1564 | br.save() | 
|  | 1565 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1566 | be = BuildEnvironment.objects.get(pk = be_id) | 
|  | 1567 | be.lock = BuildEnvironment.LOCK_FREE | 
|  | 1568 | be.save() | 
|  | 1569 | signal_runbuilds() | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1570 |  | 
|  | 1571 | def store_log_error(self, text): | 
|  | 1572 | mockevent = MockEvent() | 
|  | 1573 | mockevent.levelno = formatter.ERROR | 
|  | 1574 | mockevent.msg = text | 
|  | 1575 | mockevent.pathname = '-- None' | 
|  | 1576 | mockevent.lineno = LogMessage.ERROR | 
|  | 1577 | self.store_log_event(mockevent) | 
|  | 1578 |  | 
|  | 1579 | def store_log_exception(self, text, backtrace = ""): | 
|  | 1580 | mockevent = MockEvent() | 
|  | 1581 | mockevent.levelno = -1 | 
|  | 1582 | mockevent.msg = text | 
|  | 1583 | mockevent.pathname = backtrace | 
|  | 1584 | mockevent.lineno = -1 | 
|  | 1585 | self.store_log_event(mockevent) | 
|  | 1586 |  | 
| Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 1587 | def store_log_event(self, event,cli_backlog=True): | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1588 | self._ensure_build() | 
|  | 1589 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1590 | if event.levelno < formatter.WARNING: | 
|  | 1591 | return | 
|  | 1592 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1593 | # early return for CLI builds | 
| Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 1594 | if cli_backlog and self.brbe is None: | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1595 | if not 'backlog' in self.internal_state: | 
|  | 1596 | self.internal_state['backlog'] = [] | 
|  | 1597 | self.internal_state['backlog'].append(event) | 
|  | 1598 | return | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1599 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1600 | if 'backlog' in self.internal_state: | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1601 | # if we have a backlog of events, do our best to save them here | 
| Andrew Geissler | 595f630 | 2022-01-24 19:11:47 +0000 | [diff] [blame] | 1602 | if self.internal_state['backlog']: | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1603 | tempevent = self.internal_state['backlog'].pop() | 
| Andrew Geissler | d1e8949 | 2021-02-12 15:35:20 -0600 | [diff] [blame] | 1604 | logger.debug("buildinfohelper: Saving stored event %s " | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1605 | % tempevent) | 
| Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 1606 | self.store_log_event(tempevent,cli_backlog) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1607 | else: | 
|  | 1608 | logger.info("buildinfohelper: All events saved") | 
|  | 1609 | del self.internal_state['backlog'] | 
|  | 1610 |  | 
|  | 1611 | log_information = {} | 
|  | 1612 | log_information['build'] = self.internal_state['build'] | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 1613 | if event.levelno == formatter.CRITICAL: | 
|  | 1614 | log_information['level'] = LogMessage.CRITICAL | 
|  | 1615 | elif event.levelno == formatter.ERROR: | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1616 | log_information['level'] = LogMessage.ERROR | 
|  | 1617 | elif event.levelno == formatter.WARNING: | 
|  | 1618 | log_information['level'] = LogMessage.WARNING | 
|  | 1619 | elif event.levelno == -2:   # toaster self-logging | 
|  | 1620 | log_information['level'] = -2 | 
|  | 1621 | else: | 
|  | 1622 | log_information['level'] = LogMessage.INFO | 
|  | 1623 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1624 | log_information['message'] = event.getMessage() | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1625 | log_information['pathname'] = event.pathname | 
|  | 1626 | log_information['lineno'] = event.lineno | 
|  | 1627 | logger.info("Logging error 2: %s", log_information) | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 1628 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1629 | self.orm_wrapper.create_logmessage(log_information) | 
|  | 1630 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1631 | def _get_filenames_from_image_license(self, image_license_manifest_path): | 
|  | 1632 | """ | 
|  | 1633 | Find the FILES line in the image_license.manifest file, | 
|  | 1634 | which has the basenames of the bzImage and modules files | 
|  | 1635 | in this format: | 
|  | 1636 | FILES: bzImage--4.4.11+git0+3a5f494784_53e84104c5-r0-qemux86-20160603165040.bin modules--4.4.11+git0+3a5f494784_53e84104c5-r0-qemux86-20160603165040.tgz | 
|  | 1637 | """ | 
|  | 1638 | files = [] | 
|  | 1639 | with open(image_license_manifest_path) as image_license: | 
|  | 1640 | for line in image_license: | 
|  | 1641 | if line.startswith('FILES'): | 
|  | 1642 | files_str = line.split(':')[1].strip() | 
|  | 1643 | files_str = re.sub(r' {2,}', ' ', files_str) | 
|  | 1644 |  | 
|  | 1645 | # ignore lines like "FILES:" with no filenames | 
|  | 1646 | if files_str: | 
|  | 1647 | files += files_str.split(' ') | 
|  | 1648 | return files | 
|  | 1649 |  | 
|  | 1650 | def _endswith(self, str_to_test, endings): | 
|  | 1651 | """ | 
|  | 1652 | Returns True if str ends with one of the strings in the list | 
|  | 1653 | endings, False otherwise | 
|  | 1654 | """ | 
|  | 1655 | endswith = False | 
|  | 1656 | for ending in endings: | 
|  | 1657 | if str_to_test.endswith(ending): | 
|  | 1658 | endswith = True | 
|  | 1659 | break | 
|  | 1660 | return endswith | 
|  | 1661 |  | 
| Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 1662 | def scan_task_artifacts(self, event): | 
|  | 1663 | """ | 
|  | 1664 | The 'TaskArtifacts' event passes the manifest file content for the | 
|  | 1665 | tasks 'do_deploy', 'do_image_complete', 'do_populate_sdk', and | 
|  | 1666 | 'do_populate_sdk_ext'. The first two will be implemented later. | 
|  | 1667 | """ | 
|  | 1668 | task_vars = BuildInfoHelper._get_data_from_event(event) | 
|  | 1669 | task_name = task_vars['task'][task_vars['task'].find(':')+1:] | 
|  | 1670 | task_artifacts = task_vars['artifacts'] | 
|  | 1671 |  | 
|  | 1672 | if task_name in ['do_populate_sdk', 'do_populate_sdk_ext']: | 
|  | 1673 | targets = [target for target in self.internal_state['targets'] \ | 
|  | 1674 | if target.task == task_name[3:]] | 
|  | 1675 | if not targets: | 
|  | 1676 | logger.warning("scan_task_artifacts: SDK targets not found: %s\n", task_name) | 
|  | 1677 | return | 
|  | 1678 | for artifact_path in task_artifacts: | 
|  | 1679 | if not os.path.isfile(artifact_path): | 
|  | 1680 | logger.warning("scan_task_artifacts: artifact file not found: %s\n", artifact_path) | 
|  | 1681 | continue | 
|  | 1682 | for target in targets: | 
|  | 1683 | # don't record the file if it's already been added | 
|  | 1684 | # to this target | 
|  | 1685 | matching_files = TargetSDKFile.objects.filter( | 
|  | 1686 | target=target, file_name=artifact_path) | 
|  | 1687 | if matching_files.count() == 0: | 
|  | 1688 | artifact_size = os.stat(artifact_path).st_size | 
|  | 1689 | self.orm_wrapper.save_target_sdk_file( | 
|  | 1690 | target, artifact_path, artifact_size) | 
|  | 1691 |  | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1692 | def _get_image_files(self, deploy_dir_image, image_name, image_file_extensions): | 
|  | 1693 | """ | 
|  | 1694 | Find files in deploy_dir_image whose basename starts with the | 
|  | 1695 | string image_name and ends with one of the strings in | 
|  | 1696 | image_file_extensions. | 
|  | 1697 |  | 
|  | 1698 | Returns a list of file dictionaries like | 
|  | 1699 |  | 
|  | 1700 | [ | 
|  | 1701 | { | 
|  | 1702 | 'path': '/path/to/image/file', | 
|  | 1703 | 'size': <file size in bytes> | 
|  | 1704 | } | 
|  | 1705 | ] | 
|  | 1706 | """ | 
|  | 1707 | image_files = [] | 
|  | 1708 |  | 
|  | 1709 | for dirpath, _, filenames in os.walk(deploy_dir_image): | 
|  | 1710 | for filename in filenames: | 
|  | 1711 | if filename.startswith(image_name) and \ | 
|  | 1712 | self._endswith(filename, image_file_extensions): | 
|  | 1713 | image_file_path = os.path.join(dirpath, filename) | 
|  | 1714 | image_file_size = os.stat(image_file_path).st_size | 
|  | 1715 |  | 
|  | 1716 | image_files.append({ | 
|  | 1717 | 'path': image_file_path, | 
|  | 1718 | 'size': image_file_size | 
|  | 1719 | }) | 
|  | 1720 |  | 
|  | 1721 | return image_files | 
|  | 1722 |  | 
|  | 1723 | def scan_image_artifacts(self): | 
|  | 1724 | """ | 
|  | 1725 | Scan for built image artifacts in DEPLOY_DIR_IMAGE and associate them | 
|  | 1726 | with a Target object in self.internal_state['targets']. | 
|  | 1727 |  | 
|  | 1728 | We have two situations to handle: | 
|  | 1729 |  | 
|  | 1730 | 1. This is the first time a target + machine has been built, so | 
|  | 1731 | add files from the DEPLOY_DIR_IMAGE to the target. | 
|  | 1732 |  | 
|  | 1733 | OR | 
|  | 1734 |  | 
|  | 1735 | 2. There are no new files for the target (they were already produced by | 
|  | 1736 | a previous build), so copy them from the most recent previous build with | 
|  | 1737 | the same target, task and machine. | 
|  | 1738 | """ | 
|  | 1739 | deploy_dir_image = \ | 
|  | 1740 | self.server.runCommand(['getVariable', 'DEPLOY_DIR_IMAGE'])[0] | 
|  | 1741 |  | 
|  | 1742 | # if there's no DEPLOY_DIR_IMAGE, there aren't going to be | 
|  | 1743 | # any image artifacts, so we can return immediately | 
|  | 1744 | if not deploy_dir_image: | 
|  | 1745 | return | 
|  | 1746 |  | 
|  | 1747 | buildname = self.server.runCommand(['getVariable', 'BUILDNAME'])[0] | 
|  | 1748 | machine = self.server.runCommand(['getVariable', 'MACHINE'])[0] | 
|  | 1749 | image_name = self.server.runCommand(['getVariable', 'IMAGE_NAME'])[0] | 
|  | 1750 |  | 
|  | 1751 | # location of the manifest files for this build; | 
|  | 1752 | # note that this file is only produced if an image is produced | 
|  | 1753 | license_directory = \ | 
|  | 1754 | self.server.runCommand(['getVariable', 'LICENSE_DIRECTORY'])[0] | 
|  | 1755 |  | 
|  | 1756 | # file name extensions for image files | 
|  | 1757 | image_file_extensions_unique = {} | 
|  | 1758 | image_fstypes = self.server.runCommand( | 
|  | 1759 | ['getVariable', 'IMAGE_FSTYPES'])[0] | 
| Andrew Geissler | 82c905d | 2020-04-13 13:39:40 -0500 | [diff] [blame] | 1760 | if image_fstypes is not None: | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1761 | image_types_str = image_fstypes.strip() | 
|  | 1762 | image_file_extensions = re.sub(r' {2,}', ' ', image_types_str) | 
|  | 1763 | image_file_extensions_unique = set(image_file_extensions.split(' ')) | 
|  | 1764 |  | 
|  | 1765 | targets = self.internal_state['targets'] | 
|  | 1766 |  | 
|  | 1767 | # filter out anything which isn't an image target | 
|  | 1768 | image_targets = [target for target in targets if target.is_image] | 
|  | 1769 |  | 
|  | 1770 | for image_target in image_targets: | 
|  | 1771 | # this is set to True if we find at least one file relating to | 
|  | 1772 | # this target; if this remains False after the scan, we copy the | 
|  | 1773 | # files from the most-recent Target with the same target + machine | 
|  | 1774 | # onto this Target instead | 
|  | 1775 | has_files = False | 
|  | 1776 |  | 
|  | 1777 | # we construct this because by the time we reach | 
|  | 1778 | # BuildCompleted, this has reset to | 
|  | 1779 | # 'defaultpkgname-<MACHINE>-<BUILDNAME>'; | 
|  | 1780 | # we need to change it to | 
|  | 1781 | # <TARGET>-<MACHINE>-<BUILDNAME> | 
|  | 1782 | real_image_name = re.sub(r'^defaultpkgname', image_target.target, | 
|  | 1783 | image_name) | 
|  | 1784 |  | 
|  | 1785 | image_license_manifest_path = os.path.join( | 
|  | 1786 | license_directory, | 
|  | 1787 | real_image_name, | 
|  | 1788 | 'image_license.manifest') | 
|  | 1789 |  | 
|  | 1790 | image_package_manifest_path = os.path.join( | 
|  | 1791 | license_directory, | 
|  | 1792 | real_image_name, | 
|  | 1793 | 'image_license.manifest') | 
|  | 1794 |  | 
|  | 1795 | # if image_license.manifest exists, we can read the names of | 
|  | 1796 | # bzImage, modules etc. files for this build from it, then look for | 
|  | 1797 | # them in the DEPLOY_DIR_IMAGE; note that this file is only produced | 
|  | 1798 | # if an image file was produced | 
|  | 1799 | if os.path.isfile(image_license_manifest_path): | 
|  | 1800 | has_files = True | 
|  | 1801 |  | 
|  | 1802 | basenames = self._get_filenames_from_image_license( | 
|  | 1803 | image_license_manifest_path) | 
|  | 1804 |  | 
|  | 1805 | for basename in basenames: | 
|  | 1806 | artifact_path = os.path.join(deploy_dir_image, basename) | 
|  | 1807 | if not os.path.exists(artifact_path): | 
|  | 1808 | logger.warning("artifact %s doesn't exist, skipping" % artifact_path) | 
|  | 1809 | continue | 
|  | 1810 | artifact_size = os.stat(artifact_path).st_size | 
|  | 1811 |  | 
|  | 1812 | # note that the artifact will only be saved against this | 
|  | 1813 | # build if it hasn't been already | 
|  | 1814 | self.orm_wrapper.save_target_kernel_file(image_target, | 
|  | 1815 | artifact_path, artifact_size) | 
|  | 1816 |  | 
|  | 1817 | # store the license manifest path on the target | 
|  | 1818 | # (this file is also created any time an image file is created) | 
|  | 1819 | license_manifest_path = os.path.join(license_directory, | 
|  | 1820 | real_image_name, 'license.manifest') | 
|  | 1821 |  | 
|  | 1822 | self.orm_wrapper.update_target_set_license_manifest( | 
|  | 1823 | image_target, license_manifest_path) | 
|  | 1824 |  | 
|  | 1825 | # store the package manifest path on the target (this file | 
|  | 1826 | # is created any time an image file is created) | 
|  | 1827 | package_manifest_path = os.path.join(deploy_dir_image, | 
|  | 1828 | real_image_name + '.rootfs.manifest') | 
|  | 1829 |  | 
|  | 1830 | if os.path.exists(package_manifest_path): | 
|  | 1831 | self.orm_wrapper.update_target_set_package_manifest( | 
|  | 1832 | image_target, package_manifest_path) | 
|  | 1833 |  | 
|  | 1834 | # scan the directory for image files relating to this build | 
|  | 1835 | # (via real_image_name); note that we don't have to set | 
|  | 1836 | # has_files = True, as searching for the license manifest file | 
|  | 1837 | # will already have set it to true if at least one image file was | 
|  | 1838 | # produced; note that the real_image_name includes BUILDNAME, which | 
|  | 1839 | # in turn includes a timestamp; so if no files were produced for | 
|  | 1840 | # this timestamp (i.e. the build reused existing image files already | 
|  | 1841 | # in the directory), no files will be recorded against this target | 
|  | 1842 | image_files = self._get_image_files(deploy_dir_image, | 
|  | 1843 | real_image_name, image_file_extensions_unique) | 
|  | 1844 |  | 
|  | 1845 | for image_file in image_files: | 
|  | 1846 | self.orm_wrapper.save_target_image_file_information( | 
|  | 1847 | image_target, image_file['path'], image_file['size']) | 
|  | 1848 |  | 
|  | 1849 | if not has_files: | 
|  | 1850 | # copy image files and build artifacts from the | 
|  | 1851 | # most-recently-built Target with the | 
|  | 1852 | # same target + machine as this Target; also copy the license | 
|  | 1853 | # manifest path, as that is not treated as an artifact and needs | 
|  | 1854 | # to be set separately | 
|  | 1855 | similar_target = \ | 
|  | 1856 | self.orm_wrapper.get_similar_target_with_image_files( | 
|  | 1857 | image_target) | 
|  | 1858 |  | 
|  | 1859 | if similar_target: | 
|  | 1860 | logger.info('image artifacts for target %s cloned from ' \ | 
|  | 1861 | 'target %s' % (image_target.pk, similar_target.pk)) | 
|  | 1862 | self.orm_wrapper.clone_image_artifacts(similar_target, | 
|  | 1863 | image_target) | 
|  | 1864 |  | 
|  | 1865 | def _get_sdk_targets(self): | 
|  | 1866 | """ | 
|  | 1867 | Return targets which could generate SDK artifacts, i.e. | 
|  | 1868 | "do_populate_sdk" and "do_populate_sdk_ext". | 
|  | 1869 | """ | 
|  | 1870 | return [target for target in self.internal_state['targets'] \ | 
|  | 1871 | if target.task in ['populate_sdk', 'populate_sdk_ext']] | 
|  | 1872 |  | 
|  | 1873 | def scan_sdk_artifacts(self, event): | 
|  | 1874 | """ | 
|  | 1875 | Note that we have to intercept an SDKArtifactInfo event from | 
|  | 1876 | toaster.bbclass (via toasterui) to get hold of the SDK variables we | 
|  | 1877 | need to be able to scan for files accurately: this is because | 
|  | 1878 | variables like TOOLCHAIN_OUTPUTNAME have reset to None by the time | 
|  | 1879 | BuildCompleted is fired by bitbake, so we have to get those values | 
|  | 1880 | while the build is still in progress. | 
|  | 1881 |  | 
|  | 1882 | For populate_sdk_ext, this runs twice, with two different | 
|  | 1883 | TOOLCHAIN_OUTPUTNAME settings, each of which will capture some of the | 
|  | 1884 | files in the SDK output directory. | 
|  | 1885 | """ | 
|  | 1886 | sdk_vars = BuildInfoHelper._get_data_from_event(event) | 
|  | 1887 | toolchain_outputname = sdk_vars['TOOLCHAIN_OUTPUTNAME'] | 
|  | 1888 |  | 
|  | 1889 | # targets which might have created SDK artifacts | 
|  | 1890 | sdk_targets = self._get_sdk_targets() | 
|  | 1891 |  | 
|  | 1892 | # location of SDK artifacts | 
|  | 1893 | tmpdir = self.server.runCommand(['getVariable', 'TMPDIR'])[0] | 
|  | 1894 | sdk_dir = os.path.join(tmpdir, 'deploy', 'sdk') | 
|  | 1895 |  | 
|  | 1896 | # all files in the SDK directory | 
|  | 1897 | artifacts = [] | 
|  | 1898 | for dir_path, _, filenames in os.walk(sdk_dir): | 
|  | 1899 | for filename in filenames: | 
|  | 1900 | full_path = os.path.join(dir_path, filename) | 
|  | 1901 | if not os.path.islink(full_path): | 
|  | 1902 | artifacts.append(full_path) | 
|  | 1903 |  | 
|  | 1904 | for sdk_target in sdk_targets: | 
|  | 1905 | # find files in the SDK directory which haven't already been | 
|  | 1906 | # recorded against a Target and whose basename matches | 
|  | 1907 | # TOOLCHAIN_OUTPUTNAME | 
|  | 1908 | for artifact_path in artifacts: | 
|  | 1909 | basename = os.path.basename(artifact_path) | 
|  | 1910 |  | 
|  | 1911 | toolchain_match = basename.startswith(toolchain_outputname) | 
|  | 1912 |  | 
|  | 1913 | # files which match the name of the target which produced them; | 
|  | 1914 | # for example, | 
|  | 1915 | # poky-glibc-x86_64-core-image-sato-i586-toolchain-ext-2.1+snapshot.sh | 
|  | 1916 | target_match = re.search(sdk_target.target, basename) | 
|  | 1917 |  | 
|  | 1918 | # targets which produce "*-nativesdk-*" files | 
|  | 1919 | is_ext_sdk_target = sdk_target.task in \ | 
|  | 1920 | ['do_populate_sdk_ext', 'populate_sdk_ext'] | 
|  | 1921 |  | 
|  | 1922 | # SDK files which don't match the target name, i.e. | 
|  | 1923 | # x86_64-nativesdk-libc.* | 
|  | 1924 | # poky-glibc-x86_64-buildtools-tarball-i586-buildtools-nativesdk-standalone-2.1+snapshot* | 
|  | 1925 | is_ext_sdk_file = re.search('-nativesdk-', basename) | 
|  | 1926 |  | 
|  | 1927 | file_from_target = (toolchain_match and target_match) or \ | 
|  | 1928 | (is_ext_sdk_target and is_ext_sdk_file) | 
|  | 1929 |  | 
|  | 1930 | if file_from_target: | 
|  | 1931 | # don't record the file if it's already been added to this | 
|  | 1932 | # target | 
|  | 1933 | matching_files = TargetSDKFile.objects.filter( | 
|  | 1934 | target=sdk_target, file_name=artifact_path) | 
|  | 1935 |  | 
|  | 1936 | if matching_files.count() == 0: | 
|  | 1937 | artifact_size = os.stat(artifact_path).st_size | 
|  | 1938 |  | 
|  | 1939 | self.orm_wrapper.save_target_sdk_file( | 
|  | 1940 | sdk_target, artifact_path, artifact_size) | 
|  | 1941 |  | 
|  | 1942 | def clone_required_sdk_artifacts(self): | 
|  | 1943 | """ | 
|  | 1944 | If an SDK target doesn't have any SDK artifacts, this means that | 
|  | 1945 | the postfuncs of populate_sdk or populate_sdk_ext didn't fire, which | 
|  | 1946 | in turn means that the targets of this build didn't generate any new | 
|  | 1947 | artifacts. | 
|  | 1948 |  | 
|  | 1949 | In this case, clone SDK artifacts for targets in the current build | 
|  | 1950 | from existing targets for this build. | 
|  | 1951 | """ | 
|  | 1952 | sdk_targets = self._get_sdk_targets() | 
|  | 1953 | for sdk_target in sdk_targets: | 
|  | 1954 | # only clone for SDK targets which have no TargetSDKFiles yet | 
|  | 1955 | if sdk_target.targetsdkfile_set.all().count() == 0: | 
|  | 1956 | similar_target = \ | 
|  | 1957 | self.orm_wrapper.get_similar_target_with_sdk_files( | 
|  | 1958 | sdk_target) | 
|  | 1959 | if similar_target: | 
|  | 1960 | logger.info('SDK artifacts for target %s cloned from ' \ | 
|  | 1961 | 'target %s' % (sdk_target.pk, similar_target.pk)) | 
|  | 1962 | self.orm_wrapper.clone_sdk_artifacts(similar_target, | 
|  | 1963 | sdk_target) | 
|  | 1964 |  | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1965 | def close(self, errorcode): | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1966 | self._store_build_done(errorcode) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1967 |  | 
|  | 1968 | if 'backlog' in self.internal_state: | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1969 | # we save missed events in the database for the current build | 
|  | 1970 | tempevent = self.internal_state['backlog'].pop() | 
| Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 1971 | # Do not skip command line build events | 
|  | 1972 | self.store_log_event(tempevent,False) | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1973 |  | 
| Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 1974 |  | 
|  | 1975 | # unset the brbe; this is to prevent subsequent command-line builds | 
|  | 1976 | # being incorrectly attached to the previous Toaster-triggered build; | 
|  | 1977 | # see https://bugzilla.yoctoproject.org/show_bug.cgi?id=9021 | 
|  | 1978 | self.brbe = None | 
| Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1979 |  | 
|  | 1980 | # unset the internal Build object to prevent it being reused for the | 
|  | 1981 | # next build | 
|  | 1982 | self.internal_state['build'] = None |