Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1 | # |
| 2 | # ex:ts=4:sw=4:sts=4:et |
| 3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- |
| 4 | # |
| 5 | # BitBake Toaster Implementation |
| 6 | # |
| 7 | # Copyright (C) 2013 Intel Corporation |
| 8 | # |
| 9 | # This program is free software; you can redistribute it and/or modify |
| 10 | # it under the terms of the GNU General Public License version 2 as |
| 11 | # published by the Free Software Foundation. |
| 12 | # |
| 13 | # This program is distributed in the hope that it will be useful, |
| 14 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 15 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| 16 | # GNU General Public License for more details. |
| 17 | # |
| 18 | # You should have received a copy of the GNU General Public License along |
| 19 | # with this program; if not, write to the Free Software Foundation, Inc., |
| 20 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. |
| 21 | |
| 22 | from django.db import models, IntegrityError |
| 23 | from django.db.models import F, Q, Avg, Max |
| 24 | from django.utils import timezone |
| 25 | |
| 26 | from django.core.urlresolvers import reverse |
| 27 | |
| 28 | from django.core import validators |
| 29 | from django.conf import settings |
| 30 | import django.db.models.signals |
| 31 | |
| 32 | |
| 33 | import logging |
| 34 | logger = logging.getLogger("toaster") |
| 35 | |
| 36 | |
| 37 | class GitURLValidator(validators.URLValidator): |
| 38 | import re |
| 39 | regex = re.compile( |
| 40 | r'^(?:ssh|git|http|ftp)s?://' # http:// or https:// |
| 41 | r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain... |
| 42 | r'localhost|' # localhost... |
| 43 | r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|' # ...or ipv4 |
| 44 | r'\[?[A-F0-9]*:[A-F0-9:]+\]?)' # ...or ipv6 |
| 45 | r'(?::\d+)?' # optional port |
| 46 | r'(?:/?|[/?]\S+)$', re.IGNORECASE) |
| 47 | |
| 48 | def GitURLField(**kwargs): |
| 49 | r = models.URLField(**kwargs) |
| 50 | for i in xrange(len(r.validators)): |
| 51 | if isinstance(r.validators[i], validators.URLValidator): |
| 52 | r.validators[i] = GitURLValidator() |
| 53 | return r |
| 54 | |
| 55 | |
| 56 | class ToasterSetting(models.Model): |
| 57 | name = models.CharField(max_length=63) |
| 58 | helptext = models.TextField() |
| 59 | value = models.CharField(max_length=255) |
| 60 | |
| 61 | def __unicode__(self): |
| 62 | return "Setting %s = %s" % (self.name, self.value) |
| 63 | |
| 64 | class ProjectManager(models.Manager): |
| 65 | def create_project(self, name, release): |
| 66 | if release is not None: |
| 67 | prj = self.model(name = name, bitbake_version = release.bitbake_version, release = release) |
| 68 | else: |
| 69 | prj = self.model(name = name, bitbake_version = None, release = None) |
| 70 | |
| 71 | prj.save() |
| 72 | |
| 73 | for defaultconf in ToasterSetting.objects.filter(name__startswith="DEFCONF_"): |
| 74 | name = defaultconf.name[8:] |
| 75 | ProjectVariable.objects.create( project = prj, |
| 76 | name = name, |
| 77 | value = defaultconf.value) |
| 78 | |
| 79 | if release is None: |
| 80 | return prj |
| 81 | |
| 82 | for rdl in release.releasedefaultlayer_set.all(): |
| 83 | try: |
| 84 | lv = Layer_Version.objects.filter(layer__name = rdl.layer_name, up_branch__name = release.branch_name)[0].get_equivalents_wpriority(prj)[0] |
| 85 | ProjectLayer.objects.create( project = prj, |
| 86 | layercommit = lv, |
| 87 | optional = False ) |
| 88 | except IndexError: |
| 89 | # we may have no valid layer version objects, and that's ok |
| 90 | pass |
| 91 | |
| 92 | return prj |
| 93 | |
| 94 | def create(self, *args, **kwargs): |
| 95 | raise Exception("Invalid call to Project.objects.create. Use Project.objects.create_project() to create a project") |
| 96 | |
| 97 | # return single object with is_default = True |
| 98 | def get_default_project(self): |
| 99 | projects = super(ProjectManager, self).filter(is_default = True) |
| 100 | if len(projects) > 1: |
| 101 | raise Exception("Inconsistent project data: multiple " + |
| 102 | "default projects (i.e. with is_default=True)") |
| 103 | elif len(projects) < 1: |
| 104 | raise Exception("Inconsistent project data: no default project found") |
| 105 | return projects[0] |
| 106 | |
| 107 | class Project(models.Model): |
| 108 | search_allowed_fields = ['name', 'short_description', 'release__name', 'release__branch_name'] |
| 109 | name = models.CharField(max_length=100) |
| 110 | short_description = models.CharField(max_length=50, blank=True) |
| 111 | bitbake_version = models.ForeignKey('BitbakeVersion', null=True) |
| 112 | release = models.ForeignKey("Release", null=True) |
| 113 | created = models.DateTimeField(auto_now_add = True) |
| 114 | updated = models.DateTimeField(auto_now = True) |
| 115 | # This is a horrible hack; since Toaster has no "User" model available when |
| 116 | # running in interactive mode, we can't reference the field here directly |
| 117 | # Instead, we keep a possible null reference to the User id, as not to force |
| 118 | # hard links to possibly missing models |
| 119 | user_id = models.IntegerField(null = True) |
| 120 | objects = ProjectManager() |
| 121 | |
| 122 | # set to True for the project which is the default container |
| 123 | # for builds initiated by the command line etc. |
| 124 | is_default = models.BooleanField(default = False) |
| 125 | |
| 126 | def __unicode__(self): |
| 127 | return "%s (Release %s, BBV %s)" % (self.name, self.release, self.bitbake_version) |
| 128 | |
| 129 | def get_current_machine_name(self): |
| 130 | try: |
| 131 | return self.projectvariable_set.get(name="MACHINE").value |
| 132 | except (ProjectVariable.DoesNotExist,IndexError): |
| 133 | return( "None" ); |
| 134 | |
| 135 | def get_number_of_builds(self): |
| 136 | try: |
| 137 | return len(Build.objects.filter( project = self.id )) |
| 138 | except (Build.DoesNotExist,IndexError): |
| 139 | return( 0 ) |
| 140 | |
| 141 | def get_last_build_id(self): |
| 142 | try: |
| 143 | return Build.objects.filter( project = self.id ).order_by('-completed_on')[0].id |
| 144 | except (Build.DoesNotExist,IndexError): |
| 145 | return( -1 ) |
| 146 | |
| 147 | def get_last_outcome(self): |
| 148 | build_id = self.get_last_build_id |
| 149 | if (-1 == build_id): |
| 150 | return( "" ) |
| 151 | try: |
| 152 | return Build.objects.filter( id = self.get_last_build_id )[ 0 ].outcome |
| 153 | except (Build.DoesNotExist,IndexError): |
| 154 | return( "not_found" ) |
| 155 | |
| 156 | def get_last_target(self): |
| 157 | build_id = self.get_last_build_id |
| 158 | if (-1 == build_id): |
| 159 | return( "" ) |
| 160 | try: |
| 161 | return Target.objects.filter(build = build_id)[0].target |
| 162 | except (Target.DoesNotExist,IndexError): |
| 163 | return( "not_found" ) |
| 164 | |
| 165 | def get_last_errors(self): |
| 166 | build_id = self.get_last_build_id |
| 167 | if (-1 == build_id): |
| 168 | return( 0 ) |
| 169 | try: |
| 170 | return Build.objects.filter(id = build_id)[ 0 ].errors.count() |
| 171 | except (Build.DoesNotExist,IndexError): |
| 172 | return( "not_found" ) |
| 173 | |
| 174 | def get_last_warnings(self): |
| 175 | build_id = self.get_last_build_id |
| 176 | if (-1 == build_id): |
| 177 | return( 0 ) |
| 178 | try: |
| 179 | return Build.objects.filter(id = build_id)[ 0 ].warnings.count() |
| 180 | except (Build.DoesNotExist,IndexError): |
| 181 | return( "not_found" ) |
| 182 | |
| 183 | def get_last_imgfiles(self): |
| 184 | build_id = self.get_last_build_id |
| 185 | if (-1 == build_id): |
| 186 | return( "" ) |
| 187 | try: |
| 188 | return Variable.objects.filter(build = build_id, variable_name = "IMAGE_FSTYPES")[ 0 ].variable_value |
| 189 | except (Variable.DoesNotExist,IndexError): |
| 190 | return( "not_found" ) |
| 191 | |
| 192 | # returns a queryset of compatible layers for a project |
| 193 | def compatible_layerversions(self, release = None, layer_name = None): |
| 194 | if release == None: |
| 195 | release = self.release |
| 196 | # layers on the same branch or layers specifically set for this project |
| 197 | queryset = Layer_Version.objects.filter((Q(up_branch__name = release.branch_name) & Q(project = None)) | Q(project = self) | Q(build__project = self)) |
| 198 | |
| 199 | if layer_name is not None: |
| 200 | # we select only a layer name |
| 201 | queryset = queryset.filter(layer__name = layer_name) |
| 202 | |
| 203 | # order by layer version priority |
| 204 | queryset = queryset.filter(Q(layer_source=None) | Q(layer_source__releaselayersourcepriority__release = release)).select_related('layer_source', 'layer', 'up_branch', "layer_source__releaselayersourcepriority__priority").order_by("-layer_source__releaselayersourcepriority__priority") |
| 205 | |
| 206 | return queryset |
| 207 | |
| 208 | def projectlayer_equivalent_set(self): |
| 209 | return self.compatible_layerversions().filter(layer__name__in = [x.layercommit.layer.name for x in self.projectlayer_set.all()]).select_related("up_branch") |
| 210 | |
| 211 | def get_available_machines(self): |
| 212 | """ Returns QuerySet of all Machines which are provided by the |
| 213 | Layers currently added to the Project """ |
| 214 | queryset = Machine.objects.filter(layer_version__in=self.projectlayer_equivalent_set) |
| 215 | return queryset |
| 216 | |
| 217 | def get_all_compatible_machines(self): |
| 218 | """ Returns QuerySet of all the compatible machines available to the |
| 219 | project including ones from Layers not currently added """ |
| 220 | compatible_layers = self.compatible_layerversions() |
| 221 | |
| 222 | queryset = Machine.objects.filter(layer_version__in=compatible_layers) |
| 223 | return queryset |
| 224 | |
| 225 | def get_available_recipes(self): |
| 226 | """ Returns QuerySet of all Recipes which are provided by the Layers |
| 227 | currently added to the Project """ |
| 228 | project_layers = self.projectlayer_equivalent_set() |
| 229 | queryset = Recipe.objects.filter(layer_version__in = project_layers) |
| 230 | |
| 231 | # Copied from get_all_compatible_recipes |
| 232 | search_maxids = map(lambda i: i[0], list(queryset.values('name').distinct().annotate(max_id=Max('id')).values_list('max_id'))) |
| 233 | queryset = queryset.filter(id__in=search_maxids).select_related('layer_version', 'layer_version__layer', 'layer_version__up_branch', 'layer_source') |
| 234 | # End copy |
| 235 | |
| 236 | return queryset |
| 237 | |
| 238 | def get_all_compatible_recipes(self): |
| 239 | """ Returns QuerySet of all the compatible Recipes available to the |
| 240 | project including ones from Layers not currently added """ |
| 241 | compatible_layerversions = self.compatible_layerversions() |
| 242 | queryset = Recipe.objects.filter(layer_version__in = compatible_layerversions) |
| 243 | |
| 244 | search_maxids = map(lambda i: i[0], list(queryset.values('name').distinct().annotate(max_id=Max('id')).values_list('max_id'))) |
| 245 | |
| 246 | queryset = queryset.filter(id__in=search_maxids).select_related('layer_version', 'layer_version__layer', 'layer_version__up_branch', 'layer_source') |
| 247 | return queryset |
| 248 | |
| 249 | |
| 250 | def schedule_build(self): |
| 251 | from bldcontrol.models import BuildRequest, BRTarget, BRLayer, BRVariable, BRBitbake |
| 252 | br = BuildRequest.objects.create(project = self) |
| 253 | try: |
| 254 | |
| 255 | BRBitbake.objects.create(req = br, |
| 256 | giturl = self.bitbake_version.giturl, |
| 257 | commit = self.bitbake_version.branch, |
| 258 | dirpath = self.bitbake_version.dirpath) |
| 259 | |
| 260 | for l in self.projectlayer_set.all().order_by("pk"): |
| 261 | commit = l.layercommit.get_vcs_reference() |
| 262 | print("ii Building layer ", l.layercommit.layer.name, " at vcs point ", commit) |
| 263 | BRLayer.objects.create(req = br, name = l.layercommit.layer.name, giturl = l.layercommit.layer.vcs_url, commit = commit, dirpath = l.layercommit.dirpath) |
| 264 | |
| 265 | br.state = BuildRequest.REQ_QUEUED |
| 266 | now = timezone.now() |
| 267 | br.build = Build.objects.create(project = self, |
| 268 | completed_on=now, |
| 269 | started_on=now, |
| 270 | ) |
| 271 | for t in self.projecttarget_set.all(): |
| 272 | BRTarget.objects.create(req = br, target = t.target, task = t.task) |
| 273 | Target.objects.create(build = br.build, target = t.target) |
| 274 | |
| 275 | for v in self.projectvariable_set.all(): |
| 276 | BRVariable.objects.create(req = br, name = v.name, value = v.value) |
| 277 | |
| 278 | |
| 279 | try: |
| 280 | br.build.machine = self.projectvariable_set.get(name = 'MACHINE').value |
| 281 | br.build.save() |
| 282 | except ProjectVariable.DoesNotExist: |
| 283 | pass |
| 284 | br.save() |
| 285 | except Exception: |
| 286 | # revert the build request creation since we're not done cleanly |
| 287 | br.delete() |
| 288 | raise |
| 289 | return br |
| 290 | |
| 291 | class Build(models.Model): |
| 292 | SUCCEEDED = 0 |
| 293 | FAILED = 1 |
| 294 | IN_PROGRESS = 2 |
| 295 | |
| 296 | BUILD_OUTCOME = ( |
| 297 | (SUCCEEDED, 'Succeeded'), |
| 298 | (FAILED, 'Failed'), |
| 299 | (IN_PROGRESS, 'In Progress'), |
| 300 | ) |
| 301 | |
| 302 | search_allowed_fields = ['machine', 'cooker_log_path', "target__target", "target__target_image_file__file_name"] |
| 303 | |
| 304 | project = models.ForeignKey(Project) # must have a project |
| 305 | machine = models.CharField(max_length=100) |
| 306 | distro = models.CharField(max_length=100) |
| 307 | distro_version = models.CharField(max_length=100) |
| 308 | started_on = models.DateTimeField() |
| 309 | completed_on = models.DateTimeField() |
| 310 | outcome = models.IntegerField(choices=BUILD_OUTCOME, default=IN_PROGRESS) |
| 311 | cooker_log_path = models.CharField(max_length=500) |
| 312 | build_name = models.CharField(max_length=100) |
| 313 | bitbake_version = models.CharField(max_length=50) |
| 314 | |
| 315 | def completeper(self): |
| 316 | tf = Task.objects.filter(build = self) |
| 317 | tfc = tf.count() |
| 318 | if tfc > 0: |
| 319 | completeper = tf.exclude(order__isnull=True).count()*100/tf.count() |
| 320 | else: |
| 321 | completeper = 0 |
| 322 | return completeper |
| 323 | |
| 324 | def eta(self): |
| 325 | eta = timezone.now() |
| 326 | completeper = self.completeper() |
| 327 | if self.completeper() > 0: |
| 328 | eta += ((eta - self.started_on)*(100-completeper))/completeper |
| 329 | return eta |
| 330 | |
| 331 | |
| 332 | def get_sorted_target_list(self): |
| 333 | tgts = Target.objects.filter(build_id = self.id).order_by( 'target' ); |
| 334 | return( tgts ); |
| 335 | |
| 336 | @property |
| 337 | def toaster_exceptions(self): |
| 338 | return self.logmessage_set.filter(level=LogMessage.EXCEPTION) |
| 339 | |
| 340 | @property |
| 341 | def errors(self): |
| 342 | return (self.logmessage_set.filter(level=LogMessage.ERROR)|self.logmessage_set.filter(level=LogMessage.EXCEPTION)) |
| 343 | |
| 344 | @property |
| 345 | def warnings(self): |
| 346 | return self.logmessage_set.filter(level=LogMessage.WARNING) |
| 347 | |
| 348 | @property |
| 349 | def timespent_seconds(self): |
| 350 | return (self.completed_on - self.started_on).total_seconds() |
| 351 | |
| 352 | def get_current_status(self): |
| 353 | from bldcontrol.models import BuildRequest |
| 354 | if self.outcome == Build.IN_PROGRESS and self.buildrequest.state != BuildRequest.REQ_INPROGRESS: |
| 355 | return self.buildrequest.get_state_display() |
| 356 | return self.get_outcome_display() |
| 357 | |
| 358 | def __str__(self): |
| 359 | return "%d %s %s" % (self.id, self.project, ",".join([t.target for t in self.target_set.all()])) |
| 360 | |
| 361 | |
| 362 | # an Artifact is anything that results from a Build, and may be of interest to the user, and is not stored elsewhere |
| 363 | class BuildArtifact(models.Model): |
| 364 | build = models.ForeignKey(Build) |
| 365 | file_name = models.FilePathField() |
| 366 | file_size = models.IntegerField() |
| 367 | |
| 368 | def get_local_file_name(self): |
| 369 | try: |
| 370 | deploydir = Variable.objects.get(build = self.build, variable_name="DEPLOY_DIR").variable_value |
| 371 | return self.file_name[len(deploydir)+1:] |
| 372 | except: |
| 373 | raise |
| 374 | |
| 375 | return self.file_name |
| 376 | |
| 377 | |
| 378 | def is_available(self): |
| 379 | return self.build.buildrequest.environment.has_artifact(self.file_name) |
| 380 | |
| 381 | class ProjectTarget(models.Model): |
| 382 | project = models.ForeignKey(Project) |
| 383 | target = models.CharField(max_length=100) |
| 384 | task = models.CharField(max_length=100, null=True) |
| 385 | |
| 386 | class Target(models.Model): |
| 387 | search_allowed_fields = ['target', 'file_name'] |
| 388 | build = models.ForeignKey(Build) |
| 389 | target = models.CharField(max_length=100) |
| 390 | task = models.CharField(max_length=100, null=True) |
| 391 | is_image = models.BooleanField(default = False) |
| 392 | image_size = models.IntegerField(default=0) |
| 393 | license_manifest_path = models.CharField(max_length=500, null=True) |
| 394 | |
| 395 | def package_count(self): |
| 396 | return Target_Installed_Package.objects.filter(target_id__exact=self.id).count() |
| 397 | |
| 398 | def __unicode__(self): |
| 399 | return self.target |
| 400 | |
| 401 | class Target_Image_File(models.Model): |
| 402 | target = models.ForeignKey(Target) |
| 403 | file_name = models.FilePathField(max_length=254) |
| 404 | file_size = models.IntegerField() |
| 405 | |
| 406 | class Target_File(models.Model): |
| 407 | ITYPE_REGULAR = 1 |
| 408 | ITYPE_DIRECTORY = 2 |
| 409 | ITYPE_SYMLINK = 3 |
| 410 | ITYPE_SOCKET = 4 |
| 411 | ITYPE_FIFO = 5 |
| 412 | ITYPE_CHARACTER = 6 |
| 413 | ITYPE_BLOCK = 7 |
| 414 | ITYPES = ( (ITYPE_REGULAR ,'regular'), |
| 415 | ( ITYPE_DIRECTORY ,'directory'), |
| 416 | ( ITYPE_SYMLINK ,'symlink'), |
| 417 | ( ITYPE_SOCKET ,'socket'), |
| 418 | ( ITYPE_FIFO ,'fifo'), |
| 419 | ( ITYPE_CHARACTER ,'character'), |
| 420 | ( ITYPE_BLOCK ,'block'), |
| 421 | ) |
| 422 | |
| 423 | target = models.ForeignKey(Target) |
| 424 | path = models.FilePathField() |
| 425 | size = models.IntegerField() |
| 426 | inodetype = models.IntegerField(choices = ITYPES) |
| 427 | permission = models.CharField(max_length=16) |
| 428 | owner = models.CharField(max_length=128) |
| 429 | group = models.CharField(max_length=128) |
| 430 | directory = models.ForeignKey('Target_File', related_name="directory_set", null=True) |
| 431 | sym_target = models.ForeignKey('Target_File', related_name="symlink_set", null=True) |
| 432 | |
| 433 | |
| 434 | class Task(models.Model): |
| 435 | |
| 436 | SSTATE_NA = 0 |
| 437 | SSTATE_MISS = 1 |
| 438 | SSTATE_FAILED = 2 |
| 439 | SSTATE_RESTORED = 3 |
| 440 | |
| 441 | SSTATE_RESULT = ( |
| 442 | (SSTATE_NA, 'Not Applicable'), # For rest of tasks, but they still need checking. |
| 443 | (SSTATE_MISS, 'File not in cache'), # the sstate object was not found |
| 444 | (SSTATE_FAILED, 'Failed'), # there was a pkg, but the script failed |
| 445 | (SSTATE_RESTORED, 'Succeeded'), # successfully restored |
| 446 | ) |
| 447 | |
| 448 | CODING_NA = 0 |
| 449 | CODING_PYTHON = 2 |
| 450 | CODING_SHELL = 3 |
| 451 | |
| 452 | TASK_CODING = ( |
| 453 | (CODING_NA, 'N/A'), |
| 454 | (CODING_PYTHON, 'Python'), |
| 455 | (CODING_SHELL, 'Shell'), |
| 456 | ) |
| 457 | |
| 458 | OUTCOME_NA = -1 |
| 459 | OUTCOME_SUCCESS = 0 |
| 460 | OUTCOME_COVERED = 1 |
| 461 | OUTCOME_CACHED = 2 |
| 462 | OUTCOME_PREBUILT = 3 |
| 463 | OUTCOME_FAILED = 4 |
| 464 | OUTCOME_EMPTY = 5 |
| 465 | |
| 466 | TASK_OUTCOME = ( |
| 467 | (OUTCOME_NA, 'Not Available'), |
| 468 | (OUTCOME_SUCCESS, 'Succeeded'), |
| 469 | (OUTCOME_COVERED, 'Covered'), |
| 470 | (OUTCOME_CACHED, 'Cached'), |
| 471 | (OUTCOME_PREBUILT, 'Prebuilt'), |
| 472 | (OUTCOME_FAILED, 'Failed'), |
| 473 | (OUTCOME_EMPTY, 'Empty'), |
| 474 | ) |
| 475 | |
| 476 | TASK_OUTCOME_HELP = ( |
| 477 | (OUTCOME_SUCCESS, 'This task successfully completed'), |
| 478 | (OUTCOME_COVERED, 'This task did not run because its output is provided by another task'), |
| 479 | (OUTCOME_CACHED, 'This task restored output from the sstate-cache directory or mirrors'), |
| 480 | (OUTCOME_PREBUILT, 'This task did not run because its outcome was reused from a previous build'), |
| 481 | (OUTCOME_FAILED, 'This task did not complete'), |
| 482 | (OUTCOME_EMPTY, 'This task has no executable content'), |
| 483 | (OUTCOME_NA, ''), |
| 484 | ) |
| 485 | |
| 486 | search_allowed_fields = [ "recipe__name", "recipe__version", "task_name", "logfile" ] |
| 487 | |
| 488 | def __init__(self, *args, **kwargs): |
| 489 | super(Task, self).__init__(*args, **kwargs) |
| 490 | try: |
| 491 | self._helptext = HelpText.objects.get(key=self.task_name, area=HelpText.VARIABLE, build=self.build).text |
| 492 | except HelpText.DoesNotExist: |
| 493 | self._helptext = None |
| 494 | |
| 495 | def get_related_setscene(self): |
| 496 | return Task.objects.filter(task_executed=True, build = self.build, recipe = self.recipe, task_name=self.task_name+"_setscene") |
| 497 | |
| 498 | def get_outcome_text(self): |
| 499 | return Task.TASK_OUTCOME[int(self.outcome) + 1][1] |
| 500 | |
| 501 | def get_outcome_help(self): |
| 502 | return Task.TASK_OUTCOME_HELP[int(self.outcome)][1] |
| 503 | |
| 504 | def get_sstate_text(self): |
| 505 | if self.sstate_result==Task.SSTATE_NA: |
| 506 | return '' |
| 507 | else: |
| 508 | return Task.SSTATE_RESULT[int(self.sstate_result)][1] |
| 509 | |
| 510 | def get_executed_display(self): |
| 511 | if self.task_executed: |
| 512 | return "Executed" |
| 513 | return "Not Executed" |
| 514 | |
| 515 | def get_description(self): |
| 516 | return self._helptext |
| 517 | |
| 518 | build = models.ForeignKey(Build, related_name='task_build') |
| 519 | order = models.IntegerField(null=True) |
| 520 | task_executed = models.BooleanField(default=False) # True means Executed, False means Not/Executed |
| 521 | outcome = models.IntegerField(choices=TASK_OUTCOME, default=OUTCOME_NA) |
| 522 | sstate_checksum = models.CharField(max_length=100, blank=True) |
| 523 | path_to_sstate_obj = models.FilePathField(max_length=500, blank=True) |
| 524 | recipe = models.ForeignKey('Recipe', related_name='tasks') |
| 525 | task_name = models.CharField(max_length=100) |
| 526 | source_url = models.FilePathField(max_length=255, blank=True) |
| 527 | work_directory = models.FilePathField(max_length=255, blank=True) |
| 528 | script_type = models.IntegerField(choices=TASK_CODING, default=CODING_NA) |
| 529 | line_number = models.IntegerField(default=0) |
| 530 | disk_io = models.IntegerField(null=True) |
| 531 | cpu_usage = models.DecimalField(max_digits=8, decimal_places=2, null=True) |
| 532 | elapsed_time = models.DecimalField(max_digits=8, decimal_places=2, null=True) |
| 533 | sstate_result = models.IntegerField(choices=SSTATE_RESULT, default=SSTATE_NA) |
| 534 | message = models.CharField(max_length=240) |
| 535 | logfile = models.FilePathField(max_length=255, blank=True) |
| 536 | |
| 537 | outcome_text = property(get_outcome_text) |
| 538 | sstate_text = property(get_sstate_text) |
| 539 | |
| 540 | def __unicode__(self): |
| 541 | return "%d(%d) %s:%s" % (self.pk, self.build.pk, self.recipe.name, self.task_name) |
| 542 | |
| 543 | class Meta: |
| 544 | ordering = ('order', 'recipe' ,) |
| 545 | unique_together = ('build', 'recipe', 'task_name', ) |
| 546 | |
| 547 | |
| 548 | class Task_Dependency(models.Model): |
| 549 | task = models.ForeignKey(Task, related_name='task_dependencies_task') |
| 550 | depends_on = models.ForeignKey(Task, related_name='task_dependencies_depends') |
| 551 | |
| 552 | class Package(models.Model): |
| 553 | search_allowed_fields = ['name', 'version', 'revision', 'recipe__name', 'recipe__version', 'recipe__license', 'recipe__layer_version__layer__name', 'recipe__layer_version__branch', 'recipe__layer_version__commit', 'recipe__layer_version__local_path', 'installed_name'] |
| 554 | build = models.ForeignKey('Build') |
| 555 | recipe = models.ForeignKey('Recipe', null=True) |
| 556 | name = models.CharField(max_length=100) |
| 557 | installed_name = models.CharField(max_length=100, default='') |
| 558 | version = models.CharField(max_length=100, blank=True) |
| 559 | revision = models.CharField(max_length=32, blank=True) |
| 560 | summary = models.TextField(blank=True) |
| 561 | description = models.TextField(blank=True) |
| 562 | size = models.IntegerField(default=0) |
| 563 | installed_size = models.IntegerField(default=0) |
| 564 | section = models.CharField(max_length=80, blank=True) |
| 565 | license = models.CharField(max_length=80, blank=True) |
| 566 | |
| 567 | class Package_DependencyManager(models.Manager): |
| 568 | use_for_related_fields = True |
| 569 | |
| 570 | def get_query_set(self): |
| 571 | return super(Package_DependencyManager, self).get_query_set().exclude(package_id = F('depends_on__id')) |
| 572 | |
| 573 | class Package_Dependency(models.Model): |
| 574 | TYPE_RDEPENDS = 0 |
| 575 | TYPE_TRDEPENDS = 1 |
| 576 | TYPE_RRECOMMENDS = 2 |
| 577 | TYPE_TRECOMMENDS = 3 |
| 578 | TYPE_RSUGGESTS = 4 |
| 579 | TYPE_RPROVIDES = 5 |
| 580 | TYPE_RREPLACES = 6 |
| 581 | TYPE_RCONFLICTS = 7 |
| 582 | ' TODO: bpackage should be changed to remove the DEPENDS_TYPE access ' |
| 583 | DEPENDS_TYPE = ( |
| 584 | (TYPE_RDEPENDS, "depends"), |
| 585 | (TYPE_TRDEPENDS, "depends"), |
| 586 | (TYPE_TRECOMMENDS, "recommends"), |
| 587 | (TYPE_RRECOMMENDS, "recommends"), |
| 588 | (TYPE_RSUGGESTS, "suggests"), |
| 589 | (TYPE_RPROVIDES, "provides"), |
| 590 | (TYPE_RREPLACES, "replaces"), |
| 591 | (TYPE_RCONFLICTS, "conflicts"), |
| 592 | ) |
| 593 | """ Indexed by dep_type, in view order, key for short name and help |
| 594 | description which when viewed will be printf'd with the |
| 595 | package name. |
| 596 | """ |
| 597 | DEPENDS_DICT = { |
| 598 | TYPE_RDEPENDS : ("depends", "%s is required to run %s"), |
| 599 | TYPE_TRDEPENDS : ("depends", "%s is required to run %s"), |
| 600 | TYPE_TRECOMMENDS : ("recommends", "%s extends the usability of %s"), |
| 601 | TYPE_RRECOMMENDS : ("recommends", "%s extends the usability of %s"), |
| 602 | TYPE_RSUGGESTS : ("suggests", "%s is suggested for installation with %s"), |
| 603 | TYPE_RPROVIDES : ("provides", "%s is provided by %s"), |
| 604 | TYPE_RREPLACES : ("replaces", "%s is replaced by %s"), |
| 605 | TYPE_RCONFLICTS : ("conflicts", "%s conflicts with %s, which will not be installed if this package is not first removed"), |
| 606 | } |
| 607 | |
| 608 | package = models.ForeignKey(Package, related_name='package_dependencies_source') |
| 609 | depends_on = models.ForeignKey(Package, related_name='package_dependencies_target') # soft dependency |
| 610 | dep_type = models.IntegerField(choices=DEPENDS_TYPE) |
| 611 | target = models.ForeignKey(Target, null=True) |
| 612 | objects = Package_DependencyManager() |
| 613 | |
| 614 | class Target_Installed_Package(models.Model): |
| 615 | target = models.ForeignKey(Target) |
| 616 | package = models.ForeignKey(Package, related_name='buildtargetlist_package') |
| 617 | |
| 618 | class Package_File(models.Model): |
| 619 | package = models.ForeignKey(Package, related_name='buildfilelist_package') |
| 620 | path = models.FilePathField(max_length=255, blank=True) |
| 621 | size = models.IntegerField() |
| 622 | |
| 623 | class Recipe(models.Model): |
| 624 | search_allowed_fields = ['name', 'version', 'file_path', 'section', 'summary', 'description', 'license', 'layer_version__layer__name', 'layer_version__branch', 'layer_version__commit', 'layer_version__local_path', 'layer_version__layer_source__name'] |
| 625 | |
| 626 | layer_source = models.ForeignKey('LayerSource', default = None, null = True) # from where did we get this recipe |
| 627 | up_id = models.IntegerField(null = True, default = None) # id of entry in the source |
| 628 | up_date = models.DateTimeField(null = True, default = None) |
| 629 | |
| 630 | name = models.CharField(max_length=100, blank=True) # pn |
| 631 | version = models.CharField(max_length=100, blank=True) # pv |
| 632 | layer_version = models.ForeignKey('Layer_Version', related_name='recipe_layer_version') |
| 633 | summary = models.TextField(blank=True) |
| 634 | description = models.TextField(blank=True) |
| 635 | section = models.CharField(max_length=100, blank=True) |
| 636 | license = models.CharField(max_length=200, blank=True) |
| 637 | homepage = models.URLField(blank=True) |
| 638 | bugtracker = models.URLField(blank=True) |
| 639 | file_path = models.FilePathField(max_length=255) |
| 640 | pathflags = models.CharField(max_length=200, blank=True) |
| 641 | is_image = models.BooleanField(default=False) |
| 642 | |
| 643 | def get_layersource_view_url(self): |
| 644 | if self.layer_source is None: |
| 645 | return "" |
| 646 | |
| 647 | url = self.layer_source.get_object_view(self.layer_version.up_branch, "recipes", self.name) |
| 648 | return url |
| 649 | |
| 650 | def __unicode__(self): |
| 651 | return "Recipe " + self.name + ":" + self.version |
| 652 | |
| 653 | def get_vcs_recipe_file_link_url(self): |
| 654 | return self.layer_version.get_vcs_file_link_url(self.file_path) |
| 655 | |
| 656 | def get_description_or_summary(self): |
| 657 | if self.description: |
| 658 | return self.description |
| 659 | elif self.summary: |
| 660 | return self.summary |
| 661 | else: |
| 662 | return "" |
| 663 | |
| 664 | class Meta: |
| 665 | unique_together = (("layer_version", "file_path", "pathflags"), ) |
| 666 | |
| 667 | |
| 668 | class Recipe_DependencyManager(models.Manager): |
| 669 | use_for_related_fields = True |
| 670 | |
| 671 | def get_query_set(self): |
| 672 | return super(Recipe_DependencyManager, self).get_query_set().exclude(recipe_id = F('depends_on__id')) |
| 673 | |
| 674 | class Recipe_Dependency(models.Model): |
| 675 | TYPE_DEPENDS = 0 |
| 676 | TYPE_RDEPENDS = 1 |
| 677 | |
| 678 | DEPENDS_TYPE = ( |
| 679 | (TYPE_DEPENDS, "depends"), |
| 680 | (TYPE_RDEPENDS, "rdepends"), |
| 681 | ) |
| 682 | recipe = models.ForeignKey(Recipe, related_name='r_dependencies_recipe') |
| 683 | depends_on = models.ForeignKey(Recipe, related_name='r_dependencies_depends') |
| 684 | dep_type = models.IntegerField(choices=DEPENDS_TYPE) |
| 685 | objects = Recipe_DependencyManager() |
| 686 | |
| 687 | |
| 688 | class Machine(models.Model): |
| 689 | search_allowed_fields = ["name", "description", "layer_version__layer__name"] |
| 690 | layer_source = models.ForeignKey('LayerSource', default = None, null = True) # from where did we get this machine |
| 691 | up_id = models.IntegerField(null = True, default = None) # id of entry in the source |
| 692 | up_date = models.DateTimeField(null = True, default = None) |
| 693 | |
| 694 | layer_version = models.ForeignKey('Layer_Version') |
| 695 | name = models.CharField(max_length=255) |
| 696 | description = models.CharField(max_length=255) |
| 697 | |
| 698 | def get_vcs_machine_file_link_url(self): |
| 699 | path = 'conf/machine/'+self.name+'.conf' |
| 700 | |
| 701 | return self.layer_version.get_vcs_file_link_url(path) |
| 702 | |
| 703 | def __unicode__(self): |
| 704 | return "Machine " + self.name + "(" + self.description + ")" |
| 705 | |
| 706 | class Meta: |
| 707 | unique_together = ("layer_source", "up_id") |
| 708 | |
| 709 | |
| 710 | from django.db.models.base import ModelBase |
| 711 | |
| 712 | class InheritanceMetaclass(ModelBase): |
| 713 | def __call__(cls, *args, **kwargs): |
| 714 | obj = super(InheritanceMetaclass, cls).__call__(*args, **kwargs) |
| 715 | return obj.get_object() |
| 716 | |
| 717 | |
| 718 | class LayerSource(models.Model): |
| 719 | __metaclass__ = InheritanceMetaclass |
| 720 | |
| 721 | class Meta: |
| 722 | unique_together = (('sourcetype', 'apiurl'), ) |
| 723 | |
| 724 | TYPE_LOCAL = 0 |
| 725 | TYPE_LAYERINDEX = 1 |
| 726 | TYPE_IMPORTED = 2 |
| 727 | SOURCE_TYPE = ( |
| 728 | (TYPE_LOCAL, "local"), |
| 729 | (TYPE_LAYERINDEX, "layerindex"), |
| 730 | (TYPE_IMPORTED, "imported"), |
| 731 | ) |
| 732 | |
| 733 | name = models.CharField(max_length=63, unique = True) |
| 734 | sourcetype = models.IntegerField(choices=SOURCE_TYPE) |
| 735 | apiurl = models.CharField(max_length=255, null=True, default=None) |
| 736 | |
| 737 | def __init__(self, *args, **kwargs): |
| 738 | super(LayerSource, self).__init__(*args, **kwargs) |
| 739 | if self.sourcetype == LayerSource.TYPE_LOCAL: |
| 740 | self.__class__ = LocalLayerSource |
| 741 | elif self.sourcetype == LayerSource.TYPE_LAYERINDEX: |
| 742 | self.__class__ = LayerIndexLayerSource |
| 743 | elif self.sourcetype == LayerSource.TYPE_IMPORTED: |
| 744 | self.__class__ = ImportedLayerSource |
| 745 | elif self.sourcetype == None: |
| 746 | raise Exception("Unknown LayerSource-derived class. If you added a new layer source type, fill out all code stubs.") |
| 747 | |
| 748 | |
| 749 | def update(self): |
| 750 | """ |
| 751 | Updates the local database information from the upstream layer source |
| 752 | """ |
| 753 | raise Exception("Abstract, update() must be implemented by all LayerSource-derived classes (object is %s)" % str(vars(self))) |
| 754 | |
| 755 | def save(self, *args, **kwargs): |
| 756 | return super(LayerSource, self).save(*args, **kwargs) |
| 757 | |
| 758 | def get_object(self): |
| 759 | # preset an un-initilized object |
| 760 | if None == self.name: |
| 761 | self.name="" |
| 762 | if None == self.apiurl: |
| 763 | self.apiurl="" |
| 764 | if None == self.sourcetype: |
| 765 | self.sourcetype=LayerSource.TYPE_LOCAL |
| 766 | |
| 767 | if self.sourcetype == LayerSource.TYPE_LOCAL: |
| 768 | self.__class__ = LocalLayerSource |
| 769 | elif self.sourcetype == LayerSource.TYPE_LAYERINDEX: |
| 770 | self.__class__ = LayerIndexLayerSource |
| 771 | elif self.sourcetype == LayerSource.TYPE_IMPORTED: |
| 772 | self.__class__ = ImportedLayerSource |
| 773 | else: |
| 774 | raise Exception("Unknown LayerSource type. If you added a new layer source type, fill out all code stubs.") |
| 775 | return self |
| 776 | |
| 777 | def __unicode__(self): |
| 778 | return "%s (%s)" % (self.name, self.sourcetype) |
| 779 | |
| 780 | |
| 781 | class LocalLayerSource(LayerSource): |
| 782 | class Meta(LayerSource._meta.__class__): |
| 783 | proxy = True |
| 784 | |
| 785 | def __init__(self, *args, **kwargs): |
| 786 | super(LocalLayerSource, self).__init__(args, kwargs) |
| 787 | self.sourcetype = LayerSource.TYPE_LOCAL |
| 788 | |
| 789 | def update(self): |
| 790 | """ |
| 791 | Fetches layer, recipe and machine information from local repository |
| 792 | """ |
| 793 | pass |
| 794 | |
| 795 | class ImportedLayerSource(LayerSource): |
| 796 | class Meta(LayerSource._meta.__class__): |
| 797 | proxy = True |
| 798 | |
| 799 | def __init__(self, *args, **kwargs): |
| 800 | super(ImportedLayerSource, self).__init__(args, kwargs) |
| 801 | self.sourcetype = LayerSource.TYPE_IMPORTED |
| 802 | |
| 803 | def update(self): |
| 804 | """ |
| 805 | Fetches layer, recipe and machine information from local repository |
| 806 | """ |
| 807 | pass |
| 808 | |
| 809 | |
| 810 | class LayerIndexLayerSource(LayerSource): |
| 811 | class Meta(LayerSource._meta.__class__): |
| 812 | proxy = True |
| 813 | |
| 814 | def __init__(self, *args, **kwargs): |
| 815 | super(LayerIndexLayerSource, self).__init__(args, kwargs) |
| 816 | self.sourcetype = LayerSource.TYPE_LAYERINDEX |
| 817 | |
| 818 | def get_object_view(self, branch, objectype, upid): |
| 819 | return self.apiurl + "../branch/" + branch.name + "/" + objectype + "/?q=" + str(upid) |
| 820 | |
| 821 | def update(self): |
| 822 | """ |
| 823 | Fetches layer, recipe and machine information from remote repository |
| 824 | """ |
| 825 | assert self.apiurl is not None |
| 826 | from django.db import transaction, connection |
| 827 | |
| 828 | import urllib2, urlparse, json |
| 829 | import os |
| 830 | proxy_settings = os.environ.get("http_proxy", None) |
| 831 | |
| 832 | def _get_json_response(apiurl = self.apiurl): |
| 833 | _parsedurl = urlparse.urlparse(apiurl) |
| 834 | path = _parsedurl.path |
| 835 | |
| 836 | try: |
| 837 | res = urllib2.urlopen(apiurl) |
| 838 | except urllib2.URLError as e: |
| 839 | raise Exception("Failed to read %s: %s" % (path, e.reason)) |
| 840 | |
| 841 | return json.loads(res.read()) |
| 842 | |
| 843 | # verify we can get the basic api |
| 844 | try: |
| 845 | apilinks = _get_json_response() |
| 846 | except Exception as e: |
| 847 | import traceback |
| 848 | if proxy_settings is not None: |
| 849 | logger.info("EE: Using proxy %s" % proxy_settings) |
| 850 | logger.warning("EE: could not connect to %s, skipping update: %s\n%s" % (self.apiurl, e, traceback.format_exc(e))) |
| 851 | return |
| 852 | |
| 853 | # update branches; only those that we already have names listed in the |
| 854 | # Releases table |
| 855 | whitelist_branch_names = map(lambda x: x.branch_name, Release.objects.all()) |
| 856 | if len(whitelist_branch_names) == 0: |
| 857 | raise Exception("Failed to make list of branches to fetch") |
| 858 | |
| 859 | logger.debug("Fetching branches") |
| 860 | branches_info = _get_json_response(apilinks['branches'] |
| 861 | + "?filter=name:%s" % "OR".join(whitelist_branch_names)) |
| 862 | for bi in branches_info: |
| 863 | b, created = Branch.objects.get_or_create(layer_source = self, name = bi['name']) |
| 864 | b.up_id = bi['id'] |
| 865 | b.up_date = bi['updated'] |
| 866 | b.name = bi['name'] |
| 867 | b.short_description = bi['short_description'] |
| 868 | b.save() |
| 869 | |
| 870 | # update layers |
| 871 | layers_info = _get_json_response(apilinks['layerItems']) |
| 872 | if not connection.features.autocommits_when_autocommit_is_off: |
| 873 | transaction.set_autocommit(False) |
| 874 | for li in layers_info: |
| 875 | l, created = Layer.objects.get_or_create(layer_source = self, name = li['name']) |
| 876 | l.up_id = li['id'] |
| 877 | l.up_date = li['updated'] |
| 878 | l.vcs_url = li['vcs_url'] |
| 879 | l.vcs_web_url = li['vcs_web_url'] |
| 880 | l.vcs_web_tree_base_url = li['vcs_web_tree_base_url'] |
| 881 | l.vcs_web_file_base_url = li['vcs_web_file_base_url'] |
| 882 | l.summary = li['summary'] |
| 883 | l.description = li['description'] |
| 884 | l.save() |
| 885 | if not connection.features.autocommits_when_autocommit_is_off: |
| 886 | transaction.set_autocommit(True) |
| 887 | |
| 888 | # update layerbranches/layer_versions |
| 889 | logger.debug("Fetching layer information") |
| 890 | layerbranches_info = _get_json_response(apilinks['layerBranches'] |
| 891 | + "?filter=branch:%s" % "OR".join(map(lambda x: str(x.up_id), [i for i in Branch.objects.filter(layer_source = self) if i.up_id is not None] )) |
| 892 | ) |
| 893 | |
| 894 | if not connection.features.autocommits_when_autocommit_is_off: |
| 895 | transaction.set_autocommit(False) |
| 896 | for lbi in layerbranches_info: |
| 897 | lv, created = Layer_Version.objects.get_or_create(layer_source = self, |
| 898 | up_id = lbi['id'], |
| 899 | layer=Layer.objects.get(layer_source = self, up_id = lbi['layer']) |
| 900 | ) |
| 901 | |
| 902 | lv.up_date = lbi['updated'] |
| 903 | lv.up_branch = Branch.objects.get(layer_source = self, up_id = lbi['branch']) |
| 904 | lv.branch = lbi['actual_branch'] |
| 905 | lv.commit = lbi['actual_branch'] |
| 906 | lv.dirpath = lbi['vcs_subdir'] |
| 907 | lv.save() |
| 908 | if not connection.features.autocommits_when_autocommit_is_off: |
| 909 | transaction.set_autocommit(True) |
| 910 | |
| 911 | # update layer dependencies |
| 912 | layerdependencies_info = _get_json_response(apilinks['layerDependencies']) |
| 913 | dependlist = {} |
| 914 | if not connection.features.autocommits_when_autocommit_is_off: |
| 915 | transaction.set_autocommit(False) |
| 916 | for ldi in layerdependencies_info: |
| 917 | try: |
| 918 | lv = Layer_Version.objects.get(layer_source = self, up_id = ldi['layerbranch']) |
| 919 | except Layer_Version.DoesNotExist as e: |
| 920 | continue |
| 921 | |
| 922 | if lv not in dependlist: |
| 923 | dependlist[lv] = [] |
| 924 | try: |
| 925 | dependlist[lv].append(Layer_Version.objects.get(layer_source = self, layer__up_id = ldi['dependency'], up_branch = lv.up_branch)) |
| 926 | except Layer_Version.DoesNotExist: |
| 927 | logger.warning("Cannot find layer version (ls:%s), up_id:%s lv:%s" % (self, ldi['dependency'], lv)) |
| 928 | |
| 929 | for lv in dependlist: |
| 930 | LayerVersionDependency.objects.filter(layer_version = lv).delete() |
| 931 | for lvd in dependlist[lv]: |
| 932 | LayerVersionDependency.objects.get_or_create(layer_version = lv, depends_on = lvd) |
| 933 | if not connection.features.autocommits_when_autocommit_is_off: |
| 934 | transaction.set_autocommit(True) |
| 935 | |
| 936 | |
| 937 | # update machines |
| 938 | logger.debug("Fetching machine information") |
| 939 | machines_info = _get_json_response(apilinks['machines'] |
| 940 | + "?filter=layerbranch:%s" % "OR".join(map(lambda x: str(x.up_id), Layer_Version.objects.filter(layer_source = self))) |
| 941 | ) |
| 942 | |
| 943 | if not connection.features.autocommits_when_autocommit_is_off: |
| 944 | transaction.set_autocommit(False) |
| 945 | for mi in machines_info: |
| 946 | mo, created = Machine.objects.get_or_create(layer_source = self, up_id = mi['id'], layer_version = Layer_Version.objects.get(layer_source = self, up_id = mi['layerbranch'])) |
| 947 | mo.up_date = mi['updated'] |
| 948 | mo.name = mi['name'] |
| 949 | mo.description = mi['description'] |
| 950 | mo.save() |
| 951 | |
| 952 | if not connection.features.autocommits_when_autocommit_is_off: |
| 953 | transaction.set_autocommit(True) |
| 954 | |
| 955 | # update recipes; paginate by layer version / layer branch |
| 956 | logger.debug("Fetching target information") |
| 957 | recipes_info = _get_json_response(apilinks['recipes'] |
| 958 | + "?filter=layerbranch:%s" % "OR".join(map(lambda x: str(x.up_id), Layer_Version.objects.filter(layer_source = self))) |
| 959 | ) |
| 960 | if not connection.features.autocommits_when_autocommit_is_off: |
| 961 | transaction.set_autocommit(False) |
| 962 | for ri in recipes_info: |
| 963 | try: |
| 964 | ro, created = Recipe.objects.get_or_create(layer_source = self, up_id = ri['id'], layer_version = Layer_Version.objects.get(layer_source = self, up_id = ri['layerbranch'])) |
| 965 | ro.up_date = ri['updated'] |
| 966 | ro.name = ri['pn'] |
| 967 | ro.version = ri['pv'] |
| 968 | ro.summary = ri['summary'] |
| 969 | ro.description = ri['description'] |
| 970 | ro.section = ri['section'] |
| 971 | ro.license = ri['license'] |
| 972 | ro.homepage = ri['homepage'] |
| 973 | ro.bugtracker = ri['bugtracker'] |
| 974 | ro.file_path = ri['filepath'] + "/" + ri['filename'] |
| 975 | if 'inherits' in ri: |
| 976 | ro.is_image = 'image' in ri['inherits'].split() |
| 977 | ro.save() |
| 978 | except IntegrityError as e: |
| 979 | logger.debug("Failed saving recipe, ignoring: %s (%s:%s)" % (e, ro.layer_version, ri['filepath']+"/"+ri['filename'])) |
| 980 | if not connection.features.autocommits_when_autocommit_is_off: |
| 981 | transaction.set_autocommit(True) |
| 982 | |
| 983 | class BitbakeVersion(models.Model): |
| 984 | |
| 985 | name = models.CharField(max_length=32, unique = True) |
| 986 | giturl = GitURLField() |
| 987 | branch = models.CharField(max_length=32) |
| 988 | dirpath = models.CharField(max_length=255) |
| 989 | |
| 990 | def __unicode__(self): |
| 991 | return "%s (Branch: %s)" % (self.name, self.branch) |
| 992 | |
| 993 | |
| 994 | class Release(models.Model): |
| 995 | """ A release is a project template, used to pre-populate Project settings with a configuration set """ |
| 996 | name = models.CharField(max_length=32, unique = True) |
| 997 | description = models.CharField(max_length=255) |
| 998 | bitbake_version = models.ForeignKey(BitbakeVersion) |
| 999 | branch_name = models.CharField(max_length=50, default = "") |
| 1000 | helptext = models.TextField(null=True) |
| 1001 | |
| 1002 | def __unicode__(self): |
| 1003 | return "%s (%s)" % (self.name, self.branch_name) |
| 1004 | |
| 1005 | class ReleaseLayerSourcePriority(models.Model): |
| 1006 | """ Each release selects layers from the set up layer sources, ordered by priority """ |
| 1007 | release = models.ForeignKey("Release") |
| 1008 | layer_source = models.ForeignKey("LayerSource") |
| 1009 | priority = models.IntegerField(default = 0) |
| 1010 | |
| 1011 | def __unicode__(self): |
| 1012 | return "%s-%s:%d" % (self.release.name, self.layer_source.name, self.priority) |
| 1013 | class Meta: |
| 1014 | unique_together = (('release', 'layer_source'),) |
| 1015 | |
| 1016 | |
| 1017 | class ReleaseDefaultLayer(models.Model): |
| 1018 | release = models.ForeignKey(Release) |
| 1019 | layer_name = models.CharField(max_length=100, default="") |
| 1020 | |
| 1021 | |
| 1022 | # Branch class is synced with layerindex.Branch, branches can only come from remote layer indexes |
| 1023 | class Branch(models.Model): |
| 1024 | layer_source = models.ForeignKey('LayerSource', null = True, default = True) |
| 1025 | up_id = models.IntegerField(null = True, default = None) # id of branch in the source |
| 1026 | up_date = models.DateTimeField(null = True, default = None) |
| 1027 | |
| 1028 | name = models.CharField(max_length=50) |
| 1029 | short_description = models.CharField(max_length=50, blank=True) |
| 1030 | |
| 1031 | class Meta: |
| 1032 | verbose_name_plural = "Branches" |
| 1033 | unique_together = (('layer_source', 'name'),('layer_source', 'up_id')) |
| 1034 | |
| 1035 | def __unicode__(self): |
| 1036 | return self.name |
| 1037 | |
| 1038 | |
| 1039 | # Layer class synced with layerindex.LayerItem |
| 1040 | class Layer(models.Model): |
| 1041 | layer_source = models.ForeignKey(LayerSource, null = True, default = None) # from where did we got this layer |
| 1042 | up_id = models.IntegerField(null = True, default = None) # id of layer in the remote source |
| 1043 | up_date = models.DateTimeField(null = True, default = None) |
| 1044 | |
| 1045 | name = models.CharField(max_length=100) |
| 1046 | layer_index_url = models.URLField() |
| 1047 | vcs_url = GitURLField(default = None, null = True) |
| 1048 | vcs_web_url = models.URLField(null = True, default = None) |
| 1049 | vcs_web_tree_base_url = models.URLField(null = True, default = None) |
| 1050 | vcs_web_file_base_url = models.URLField(null = True, default = None) |
| 1051 | |
| 1052 | summary = models.TextField(help_text='One-line description of the layer', null = True, default = None) |
| 1053 | description = models.TextField(null = True, default = None) |
| 1054 | |
| 1055 | def __unicode__(self): |
| 1056 | return "%s / %s " % (self.name, self.layer_source) |
| 1057 | |
| 1058 | class Meta: |
| 1059 | unique_together = (("layer_source", "up_id"), ("layer_source", "name")) |
| 1060 | |
| 1061 | |
| 1062 | # LayerCommit class is synced with layerindex.LayerBranch |
| 1063 | class Layer_Version(models.Model): |
| 1064 | search_allowed_fields = ["layer__name", "layer__summary", "layer__description", "layer__vcs_url", "dirpath", "up_branch__name", "commit", "branch"] |
| 1065 | build = models.ForeignKey(Build, related_name='layer_version_build', default = None, null = True) |
| 1066 | layer = models.ForeignKey(Layer, related_name='layer_version_layer') |
| 1067 | |
| 1068 | layer_source = models.ForeignKey(LayerSource, null = True, default = None) # from where did we get this Layer Version |
| 1069 | up_id = models.IntegerField(null = True, default = None) # id of layerbranch in the remote source |
| 1070 | up_date = models.DateTimeField(null = True, default = None) |
| 1071 | up_branch = models.ForeignKey(Branch, null = True, default = None) |
| 1072 | |
| 1073 | branch = models.CharField(max_length=80) # LayerBranch.actual_branch |
| 1074 | commit = models.CharField(max_length=100) # LayerBranch.vcs_last_rev |
| 1075 | dirpath = models.CharField(max_length=255, null = True, default = None) # LayerBranch.vcs_subdir |
| 1076 | priority = models.IntegerField(default = 0) # if -1, this is a default layer |
| 1077 | |
| 1078 | local_path = models.FilePathField(max_length=1024, default = "/") # where this layer was checked-out |
| 1079 | |
| 1080 | project = models.ForeignKey('Project', null = True, default = None) # Set if this layer is project-specific; always set for imported layers, and project-set branches |
| 1081 | |
| 1082 | # code lifted, with adaptations, from the layerindex-web application https://git.yoctoproject.org/cgit/cgit.cgi/layerindex-web/ |
| 1083 | def _handle_url_path(self, base_url, path): |
| 1084 | import re, posixpath |
| 1085 | if base_url: |
| 1086 | if self.dirpath: |
| 1087 | if path: |
| 1088 | extra_path = self.dirpath + '/' + path |
| 1089 | # Normalise out ../ in path for usage URL |
| 1090 | extra_path = posixpath.normpath(extra_path) |
| 1091 | # Minor workaround to handle case where subdirectory has been added between branches |
| 1092 | # (should probably support usage URL per branch to handle this... sigh...) |
| 1093 | if extra_path.startswith('../'): |
| 1094 | extra_path = extra_path[3:] |
| 1095 | else: |
| 1096 | extra_path = self.dirpath |
| 1097 | else: |
| 1098 | extra_path = path |
| 1099 | branchname = self.up_branch.name |
| 1100 | url = base_url.replace('%branch%', branchname) |
| 1101 | |
| 1102 | # If there's a % in the path (e.g. a wildcard bbappend) we need to encode it |
| 1103 | if extra_path: |
| 1104 | extra_path = extra_path.replace('%', '%25') |
| 1105 | |
| 1106 | if '%path%' in base_url: |
| 1107 | if extra_path: |
| 1108 | url = re.sub(r'\[([^\]]*%path%[^\]]*)\]', '\\1', url) |
| 1109 | else: |
| 1110 | url = re.sub(r'\[([^\]]*%path%[^\]]*)\]', '', url) |
| 1111 | return url.replace('%path%', extra_path) |
| 1112 | else: |
| 1113 | return url + extra_path |
| 1114 | return None |
| 1115 | |
| 1116 | def get_vcs_link_url(self): |
| 1117 | if self.layer.vcs_web_url is None: |
| 1118 | return None |
| 1119 | return self.layer.vcs_web_url |
| 1120 | |
| 1121 | def get_vcs_file_link_url(self, file_path=""): |
| 1122 | if self.layer.vcs_web_file_base_url is None: |
| 1123 | return None |
| 1124 | return self._handle_url_path(self.layer.vcs_web_file_base_url, file_path) |
| 1125 | |
| 1126 | def get_vcs_dirpath_link_url(self): |
| 1127 | if self.layer.vcs_web_tree_base_url is None: |
| 1128 | return None |
| 1129 | return self._handle_url_path(self.layer.vcs_web_tree_base_url, '') |
| 1130 | |
| 1131 | def get_equivalents_wpriority(self, project): |
| 1132 | return project.compatible_layerversions(layer_name = self.layer.name) |
| 1133 | |
| 1134 | def get_vcs_reference(self): |
| 1135 | if self.commit is not None and len(self.commit) > 0: |
| 1136 | return self.commit |
| 1137 | if self.branch is not None and len(self.branch) > 0: |
| 1138 | return self.branch |
| 1139 | if self.up_branch is not None: |
| 1140 | return self.up_branch.name |
| 1141 | return ("Cannot determine the vcs_reference for layer version %s" % vars(self)) |
| 1142 | |
| 1143 | def get_detailspage_url(self, project_id): |
| 1144 | return reverse('layerdetails', args=(project_id, self.pk)) |
| 1145 | |
| 1146 | def __unicode__(self): |
| 1147 | return "%d %s (VCS %s, Project %s)" % (self.pk, str(self.layer), self.get_vcs_reference(), self.build.project if self.build is not None else "No project") |
| 1148 | |
| 1149 | class Meta: |
| 1150 | unique_together = ("layer_source", "up_id") |
| 1151 | |
| 1152 | class LayerVersionDependency(models.Model): |
| 1153 | layer_source = models.ForeignKey(LayerSource, null = True, default = None) # from where did we got this layer |
| 1154 | up_id = models.IntegerField(null = True, default = None) # id of layerbranch in the remote source |
| 1155 | |
| 1156 | layer_version = models.ForeignKey(Layer_Version, related_name="dependencies") |
| 1157 | depends_on = models.ForeignKey(Layer_Version, related_name="dependees") |
| 1158 | |
| 1159 | class Meta: |
| 1160 | unique_together = ("layer_source", "up_id") |
| 1161 | |
| 1162 | class ProjectLayer(models.Model): |
| 1163 | project = models.ForeignKey(Project) |
| 1164 | layercommit = models.ForeignKey(Layer_Version, null=True) |
| 1165 | optional = models.BooleanField(default = True) |
| 1166 | |
| 1167 | def __unicode__(self): |
| 1168 | return "%s, %s" % (self.project.name, self.layercommit) |
| 1169 | |
| 1170 | class Meta: |
| 1171 | unique_together = (("project", "layercommit"),) |
| 1172 | |
| 1173 | class ProjectVariable(models.Model): |
| 1174 | project = models.ForeignKey(Project) |
| 1175 | name = models.CharField(max_length=100) |
| 1176 | value = models.TextField(blank = True) |
| 1177 | |
| 1178 | class Variable(models.Model): |
| 1179 | search_allowed_fields = ['variable_name', 'variable_value', |
| 1180 | 'vhistory__file_name', "description"] |
| 1181 | build = models.ForeignKey(Build, related_name='variable_build') |
| 1182 | variable_name = models.CharField(max_length=100) |
| 1183 | variable_value = models.TextField(blank=True) |
| 1184 | changed = models.BooleanField(default=False) |
| 1185 | human_readable_name = models.CharField(max_length=200) |
| 1186 | description = models.TextField(blank=True) |
| 1187 | |
| 1188 | class VariableHistory(models.Model): |
| 1189 | variable = models.ForeignKey(Variable, related_name='vhistory') |
| 1190 | value = models.TextField(blank=True) |
| 1191 | file_name = models.FilePathField(max_length=255) |
| 1192 | line_number = models.IntegerField(null=True) |
| 1193 | operation = models.CharField(max_length=64) |
| 1194 | |
| 1195 | class HelpText(models.Model): |
| 1196 | VARIABLE = 0 |
| 1197 | HELPTEXT_AREA = ((VARIABLE, 'variable'), ) |
| 1198 | |
| 1199 | build = models.ForeignKey(Build, related_name='helptext_build') |
| 1200 | area = models.IntegerField(choices=HELPTEXT_AREA) |
| 1201 | key = models.CharField(max_length=100) |
| 1202 | text = models.TextField() |
| 1203 | |
| 1204 | class LogMessage(models.Model): |
| 1205 | EXCEPTION = -1 # used to signal self-toaster-exceptions |
| 1206 | INFO = 0 |
| 1207 | WARNING = 1 |
| 1208 | ERROR = 2 |
| 1209 | |
| 1210 | LOG_LEVEL = ( (INFO, "info"), |
| 1211 | (WARNING, "warn"), |
| 1212 | (ERROR, "error"), |
| 1213 | (EXCEPTION, "toaster exception")) |
| 1214 | |
| 1215 | build = models.ForeignKey(Build) |
| 1216 | task = models.ForeignKey(Task, blank = True, null=True) |
| 1217 | level = models.IntegerField(choices=LOG_LEVEL, default=INFO) |
| 1218 | message=models.CharField(max_length=240) |
| 1219 | pathname = models.FilePathField(max_length=255, blank=True) |
| 1220 | lineno = models.IntegerField(null=True) |
| 1221 | |
| 1222 | def __str__(self): |
| 1223 | return "%s %s %s" % (self.get_level_display(), self.message, self.build) |
| 1224 | |
| 1225 | def invalidate_cache(**kwargs): |
| 1226 | from django.core.cache import cache |
| 1227 | try: |
| 1228 | cache.clear() |
| 1229 | except Exception as e: |
| 1230 | logger.warning("Problem with cache backend: Failed to clear cache: %s" % e) |
| 1231 | |
| 1232 | django.db.models.signals.post_save.connect(invalidate_cache) |
| 1233 | django.db.models.signals.post_delete.connect(invalidate_cache) |