Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1 | # ex:ts=4:sw=4:sts=4:et |
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- |
| 3 | # |
| 4 | # BitBake Cache implementation |
| 5 | # |
| 6 | # Caching of bitbake variables before task execution |
| 7 | |
| 8 | # Copyright (C) 2006 Richard Purdie |
| 9 | # Copyright (C) 2012 Intel Corporation |
| 10 | |
| 11 | # but small sections based on code from bin/bitbake: |
| 12 | # Copyright (C) 2003, 2004 Chris Larson |
| 13 | # Copyright (C) 2003, 2004 Phil Blundell |
| 14 | # Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer |
| 15 | # Copyright (C) 2005 Holger Hans Peter Freyther |
| 16 | # Copyright (C) 2005 ROAD GmbH |
| 17 | # |
| 18 | # This program is free software; you can redistribute it and/or modify |
| 19 | # it under the terms of the GNU General Public License version 2 as |
| 20 | # published by the Free Software Foundation. |
| 21 | # |
| 22 | # This program is distributed in the hope that it will be useful, |
| 23 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 24 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| 25 | # GNU General Public License for more details. |
| 26 | # |
| 27 | # You should have received a copy of the GNU General Public License along |
| 28 | # with this program; if not, write to the Free Software Foundation, Inc., |
| 29 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. |
| 30 | |
| 31 | |
| 32 | import os |
| 33 | import logging |
| 34 | from collections import defaultdict |
| 35 | import bb.utils |
| 36 | |
| 37 | logger = logging.getLogger("BitBake.Cache") |
| 38 | |
| 39 | try: |
| 40 | import cPickle as pickle |
| 41 | except ImportError: |
| 42 | import pickle |
| 43 | logger.info("Importing cPickle failed. " |
| 44 | "Falling back to a very slow implementation.") |
| 45 | |
| 46 | __cache_version__ = "148" |
| 47 | |
| 48 | def getCacheFile(path, filename, data_hash): |
| 49 | return os.path.join(path, filename + "." + data_hash) |
| 50 | |
| 51 | # RecipeInfoCommon defines common data retrieving methods |
| 52 | # from meta data for caches. CoreRecipeInfo as well as other |
| 53 | # Extra RecipeInfo needs to inherit this class |
| 54 | class RecipeInfoCommon(object): |
| 55 | |
| 56 | @classmethod |
| 57 | def listvar(cls, var, metadata): |
| 58 | return cls.getvar(var, metadata).split() |
| 59 | |
| 60 | @classmethod |
| 61 | def intvar(cls, var, metadata): |
| 62 | return int(cls.getvar(var, metadata) or 0) |
| 63 | |
| 64 | @classmethod |
| 65 | def depvar(cls, var, metadata): |
| 66 | return bb.utils.explode_deps(cls.getvar(var, metadata)) |
| 67 | |
| 68 | @classmethod |
| 69 | def pkgvar(cls, var, packages, metadata): |
| 70 | return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata)) |
| 71 | for pkg in packages) |
| 72 | |
| 73 | @classmethod |
| 74 | def taskvar(cls, var, tasks, metadata): |
| 75 | return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata)) |
| 76 | for task in tasks) |
| 77 | |
| 78 | @classmethod |
| 79 | def flaglist(cls, flag, varlist, metadata, squash=False): |
| 80 | out_dict = dict((var, metadata.getVarFlag(var, flag, True)) |
| 81 | for var in varlist) |
| 82 | if squash: |
| 83 | return dict((k,v) for (k,v) in out_dict.iteritems() if v) |
| 84 | else: |
| 85 | return out_dict |
| 86 | |
| 87 | @classmethod |
| 88 | def getvar(cls, var, metadata): |
| 89 | return metadata.getVar(var, True) or '' |
| 90 | |
| 91 | |
| 92 | class CoreRecipeInfo(RecipeInfoCommon): |
| 93 | __slots__ = () |
| 94 | |
| 95 | cachefile = "bb_cache.dat" |
| 96 | |
| 97 | def __init__(self, filename, metadata): |
| 98 | self.file_depends = metadata.getVar('__depends', False) |
| 99 | self.timestamp = bb.parse.cached_mtime(filename) |
| 100 | self.variants = self.listvar('__VARIANTS', metadata) + [''] |
| 101 | self.appends = self.listvar('__BBAPPEND', metadata) |
| 102 | self.nocache = self.getvar('__BB_DONT_CACHE', metadata) |
| 103 | |
| 104 | self.skipreason = self.getvar('__SKIPPED', metadata) |
| 105 | if self.skipreason: |
| 106 | self.pn = self.getvar('PN', metadata) or bb.parse.BBHandler.vars_from_file(filename,metadata)[0] |
| 107 | self.skipped = True |
| 108 | self.provides = self.depvar('PROVIDES', metadata) |
| 109 | self.rprovides = self.depvar('RPROVIDES', metadata) |
| 110 | return |
| 111 | |
| 112 | self.tasks = metadata.getVar('__BBTASKS', False) |
| 113 | |
| 114 | self.pn = self.getvar('PN', metadata) |
| 115 | self.packages = self.listvar('PACKAGES', metadata) |
| 116 | if not self.pn in self.packages: |
| 117 | self.packages.append(self.pn) |
| 118 | |
| 119 | self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata) |
| 120 | self.hashfilename = self.getvar('BB_HASHFILENAME', metadata) |
| 121 | |
| 122 | self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}} |
| 123 | |
| 124 | self.skipped = False |
| 125 | self.pe = self.getvar('PE', metadata) |
| 126 | self.pv = self.getvar('PV', metadata) |
| 127 | self.pr = self.getvar('PR', metadata) |
| 128 | self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata) |
| 129 | self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata) |
| 130 | self.stamp = self.getvar('STAMP', metadata) |
| 131 | self.stampclean = self.getvar('STAMPCLEAN', metadata) |
| 132 | self.stamp_base = self.flaglist('stamp-base', self.tasks, metadata) |
| 133 | self.stamp_base_clean = self.flaglist('stamp-base-clean', self.tasks, metadata) |
| 134 | self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata) |
| 135 | self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True) |
| 136 | self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata) |
| 137 | self.depends = self.depvar('DEPENDS', metadata) |
| 138 | self.provides = self.depvar('PROVIDES', metadata) |
| 139 | self.rdepends = self.depvar('RDEPENDS', metadata) |
| 140 | self.rprovides = self.depvar('RPROVIDES', metadata) |
| 141 | self.rrecommends = self.depvar('RRECOMMENDS', metadata) |
| 142 | self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata) |
| 143 | self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata) |
| 144 | self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata) |
| 145 | self.inherits = self.getvar('__inherit_cache', metadata) |
| 146 | self.fakerootenv = self.getvar('FAKEROOTENV', metadata) |
| 147 | self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata) |
| 148 | self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata) |
| 149 | |
| 150 | @classmethod |
| 151 | def init_cacheData(cls, cachedata): |
| 152 | # CacheData in Core RecipeInfo Class |
| 153 | cachedata.task_deps = {} |
| 154 | cachedata.pkg_fn = {} |
| 155 | cachedata.pkg_pn = defaultdict(list) |
| 156 | cachedata.pkg_pepvpr = {} |
| 157 | cachedata.pkg_dp = {} |
| 158 | |
| 159 | cachedata.stamp = {} |
| 160 | cachedata.stampclean = {} |
| 161 | cachedata.stamp_base = {} |
| 162 | cachedata.stamp_base_clean = {} |
| 163 | cachedata.stamp_extrainfo = {} |
| 164 | cachedata.file_checksums = {} |
| 165 | cachedata.fn_provides = {} |
| 166 | cachedata.pn_provides = defaultdict(list) |
| 167 | cachedata.all_depends = [] |
| 168 | |
| 169 | cachedata.deps = defaultdict(list) |
| 170 | cachedata.packages = defaultdict(list) |
| 171 | cachedata.providers = defaultdict(list) |
| 172 | cachedata.rproviders = defaultdict(list) |
| 173 | cachedata.packages_dynamic = defaultdict(list) |
| 174 | |
| 175 | cachedata.rundeps = defaultdict(lambda: defaultdict(list)) |
| 176 | cachedata.runrecs = defaultdict(lambda: defaultdict(list)) |
| 177 | cachedata.possible_world = [] |
| 178 | cachedata.universe_target = [] |
| 179 | cachedata.hashfn = {} |
| 180 | |
| 181 | cachedata.basetaskhash = {} |
| 182 | cachedata.inherits = {} |
| 183 | cachedata.fakerootenv = {} |
| 184 | cachedata.fakerootnoenv = {} |
| 185 | cachedata.fakerootdirs = {} |
| 186 | |
| 187 | def add_cacheData(self, cachedata, fn): |
| 188 | cachedata.task_deps[fn] = self.task_deps |
| 189 | cachedata.pkg_fn[fn] = self.pn |
| 190 | cachedata.pkg_pn[self.pn].append(fn) |
| 191 | cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr) |
| 192 | cachedata.pkg_dp[fn] = self.defaultpref |
| 193 | cachedata.stamp[fn] = self.stamp |
| 194 | cachedata.stampclean[fn] = self.stampclean |
| 195 | cachedata.stamp_base[fn] = self.stamp_base |
| 196 | cachedata.stamp_base_clean[fn] = self.stamp_base_clean |
| 197 | cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo |
| 198 | cachedata.file_checksums[fn] = self.file_checksums |
| 199 | |
| 200 | provides = [self.pn] |
| 201 | for provide in self.provides: |
| 202 | if provide not in provides: |
| 203 | provides.append(provide) |
| 204 | cachedata.fn_provides[fn] = provides |
| 205 | |
| 206 | for provide in provides: |
| 207 | cachedata.providers[provide].append(fn) |
| 208 | if provide not in cachedata.pn_provides[self.pn]: |
| 209 | cachedata.pn_provides[self.pn].append(provide) |
| 210 | |
| 211 | for dep in self.depends: |
| 212 | if dep not in cachedata.deps[fn]: |
| 213 | cachedata.deps[fn].append(dep) |
| 214 | if dep not in cachedata.all_depends: |
| 215 | cachedata.all_depends.append(dep) |
| 216 | |
| 217 | rprovides = self.rprovides |
| 218 | for package in self.packages: |
| 219 | cachedata.packages[package].append(fn) |
| 220 | rprovides += self.rprovides_pkg[package] |
| 221 | |
| 222 | for rprovide in rprovides: |
| 223 | cachedata.rproviders[rprovide].append(fn) |
| 224 | |
| 225 | for package in self.packages_dynamic: |
| 226 | cachedata.packages_dynamic[package].append(fn) |
| 227 | |
| 228 | # Build hash of runtime depends and recommends |
| 229 | for package in self.packages + [self.pn]: |
| 230 | cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package] |
| 231 | cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package] |
| 232 | |
| 233 | # Collect files we may need for possible world-dep |
| 234 | # calculations |
| 235 | if self.not_world: |
| 236 | logger.debug(1, "EXCLUDE FROM WORLD: %s", fn) |
| 237 | else: |
| 238 | cachedata.possible_world.append(fn) |
| 239 | |
| 240 | # create a collection of all targets for sanity checking |
| 241 | # tasks, such as upstream versions, license, and tools for |
| 242 | # task and image creation. |
| 243 | cachedata.universe_target.append(self.pn) |
| 244 | |
| 245 | cachedata.hashfn[fn] = self.hashfilename |
| 246 | for task, taskhash in self.basetaskhashes.iteritems(): |
| 247 | identifier = '%s.%s' % (fn, task) |
| 248 | cachedata.basetaskhash[identifier] = taskhash |
| 249 | |
| 250 | cachedata.inherits[fn] = self.inherits |
| 251 | cachedata.fakerootenv[fn] = self.fakerootenv |
| 252 | cachedata.fakerootnoenv[fn] = self.fakerootnoenv |
| 253 | cachedata.fakerootdirs[fn] = self.fakerootdirs |
| 254 | |
| 255 | |
| 256 | |
| 257 | class Cache(object): |
| 258 | """ |
| 259 | BitBake Cache implementation |
| 260 | """ |
| 261 | |
| 262 | def __init__(self, data, data_hash, caches_array): |
| 263 | # Pass caches_array information into Cache Constructor |
| 264 | # It will be used later for deciding whether we |
| 265 | # need extra cache file dump/load support |
| 266 | self.caches_array = caches_array |
| 267 | self.cachedir = data.getVar("CACHE", True) |
| 268 | self.clean = set() |
| 269 | self.checked = set() |
| 270 | self.depends_cache = {} |
| 271 | self.data = None |
| 272 | self.data_fn = None |
| 273 | self.cacheclean = True |
| 274 | self.data_hash = data_hash |
| 275 | |
| 276 | if self.cachedir in [None, '']: |
| 277 | self.has_cache = False |
| 278 | logger.info("Not using a cache. " |
| 279 | "Set CACHE = <directory> to enable.") |
| 280 | return |
| 281 | |
| 282 | self.has_cache = True |
| 283 | self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash) |
| 284 | |
| 285 | logger.debug(1, "Using cache in '%s'", self.cachedir) |
| 286 | bb.utils.mkdirhier(self.cachedir) |
| 287 | |
| 288 | cache_ok = True |
| 289 | if self.caches_array: |
| 290 | for cache_class in self.caches_array: |
| 291 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): |
| 292 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) |
| 293 | cache_ok = cache_ok and os.path.exists(cachefile) |
| 294 | cache_class.init_cacheData(self) |
| 295 | if cache_ok: |
| 296 | self.load_cachefile() |
| 297 | elif os.path.isfile(self.cachefile): |
| 298 | logger.info("Out of date cache found, rebuilding...") |
| 299 | |
| 300 | def load_cachefile(self): |
| 301 | # Firstly, using core cache file information for |
| 302 | # valid checking |
| 303 | with open(self.cachefile, "rb") as cachefile: |
| 304 | pickled = pickle.Unpickler(cachefile) |
| 305 | try: |
| 306 | cache_ver = pickled.load() |
| 307 | bitbake_ver = pickled.load() |
| 308 | except Exception: |
| 309 | logger.info('Invalid cache, rebuilding...') |
| 310 | return |
| 311 | |
| 312 | if cache_ver != __cache_version__: |
| 313 | logger.info('Cache version mismatch, rebuilding...') |
| 314 | return |
| 315 | elif bitbake_ver != bb.__version__: |
| 316 | logger.info('Bitbake version mismatch, rebuilding...') |
| 317 | return |
| 318 | |
| 319 | |
| 320 | cachesize = 0 |
| 321 | previous_progress = 0 |
| 322 | previous_percent = 0 |
| 323 | |
| 324 | # Calculate the correct cachesize of all those cache files |
| 325 | for cache_class in self.caches_array: |
| 326 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): |
| 327 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) |
| 328 | with open(cachefile, "rb") as cachefile: |
| 329 | cachesize += os.fstat(cachefile.fileno()).st_size |
| 330 | |
| 331 | bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data) |
| 332 | |
| 333 | for cache_class in self.caches_array: |
| 334 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): |
| 335 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) |
| 336 | with open(cachefile, "rb") as cachefile: |
| 337 | pickled = pickle.Unpickler(cachefile) |
| 338 | while cachefile: |
| 339 | try: |
| 340 | key = pickled.load() |
| 341 | value = pickled.load() |
| 342 | except Exception: |
| 343 | break |
| 344 | if self.depends_cache.has_key(key): |
| 345 | self.depends_cache[key].append(value) |
| 346 | else: |
| 347 | self.depends_cache[key] = [value] |
| 348 | # only fire events on even percentage boundaries |
| 349 | current_progress = cachefile.tell() + previous_progress |
| 350 | current_percent = 100 * current_progress / cachesize |
| 351 | if current_percent > previous_percent: |
| 352 | previous_percent = current_percent |
| 353 | bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize), |
| 354 | self.data) |
| 355 | |
| 356 | previous_progress += current_progress |
| 357 | |
| 358 | # Note: depends cache number is corresponding to the parsing file numbers. |
| 359 | # The same file has several caches, still regarded as one item in the cache |
| 360 | bb.event.fire(bb.event.CacheLoadCompleted(cachesize, |
| 361 | len(self.depends_cache)), |
| 362 | self.data) |
| 363 | |
| 364 | |
| 365 | @staticmethod |
| 366 | def virtualfn2realfn(virtualfn): |
| 367 | """ |
| 368 | Convert a virtual file name to a real one + the associated subclass keyword |
| 369 | """ |
| 370 | |
| 371 | fn = virtualfn |
| 372 | cls = "" |
| 373 | if virtualfn.startswith('virtual:'): |
| 374 | elems = virtualfn.split(':') |
| 375 | cls = ":".join(elems[1:-1]) |
| 376 | fn = elems[-1] |
| 377 | return (fn, cls) |
| 378 | |
| 379 | @staticmethod |
| 380 | def realfn2virtual(realfn, cls): |
| 381 | """ |
| 382 | Convert a real filename + the associated subclass keyword to a virtual filename |
| 383 | """ |
| 384 | if cls == "": |
| 385 | return realfn |
| 386 | return "virtual:" + cls + ":" + realfn |
| 387 | |
| 388 | @classmethod |
| 389 | def loadDataFull(cls, virtualfn, appends, cfgData): |
| 390 | """ |
| 391 | Return a complete set of data for fn. |
| 392 | To do this, we need to parse the file. |
| 393 | """ |
| 394 | |
| 395 | (fn, virtual) = cls.virtualfn2realfn(virtualfn) |
| 396 | |
| 397 | logger.debug(1, "Parsing %s (full)", fn) |
| 398 | |
| 399 | cfgData.setVar("__ONLYFINALISE", virtual or "default") |
| 400 | bb_data = cls.load_bbfile(fn, appends, cfgData) |
| 401 | return bb_data[virtual] |
| 402 | |
| 403 | @classmethod |
| 404 | def parse(cls, filename, appends, configdata, caches_array): |
| 405 | """Parse the specified filename, returning the recipe information""" |
| 406 | infos = [] |
| 407 | datastores = cls.load_bbfile(filename, appends, configdata) |
| 408 | depends = [] |
| 409 | for variant, data in sorted(datastores.iteritems(), |
| 410 | key=lambda i: i[0], |
| 411 | reverse=True): |
| 412 | virtualfn = cls.realfn2virtual(filename, variant) |
| 413 | depends = depends + (data.getVar("__depends", False) or []) |
| 414 | if depends and not variant: |
| 415 | data.setVar("__depends", depends) |
| 416 | |
| 417 | info_array = [] |
| 418 | for cache_class in caches_array: |
| 419 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): |
| 420 | info = cache_class(filename, data) |
| 421 | info_array.append(info) |
| 422 | infos.append((virtualfn, info_array)) |
| 423 | |
| 424 | return infos |
| 425 | |
| 426 | def load(self, filename, appends, configdata): |
| 427 | """Obtain the recipe information for the specified filename, |
| 428 | using cached values if available, otherwise parsing. |
| 429 | |
| 430 | Note that if it does parse to obtain the info, it will not |
| 431 | automatically add the information to the cache or to your |
| 432 | CacheData. Use the add or add_info method to do so after |
| 433 | running this, or use loadData instead.""" |
| 434 | cached = self.cacheValid(filename, appends) |
| 435 | if cached: |
| 436 | infos = [] |
| 437 | # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo] |
| 438 | info_array = self.depends_cache[filename] |
| 439 | for variant in info_array[0].variants: |
| 440 | virtualfn = self.realfn2virtual(filename, variant) |
| 441 | infos.append((virtualfn, self.depends_cache[virtualfn])) |
| 442 | else: |
| 443 | logger.debug(1, "Parsing %s", filename) |
| 444 | return self.parse(filename, appends, configdata, self.caches_array) |
| 445 | |
| 446 | return cached, infos |
| 447 | |
| 448 | def loadData(self, fn, appends, cfgData, cacheData): |
| 449 | """Load the recipe info for the specified filename, |
| 450 | parsing and adding to the cache if necessary, and adding |
| 451 | the recipe information to the supplied CacheData instance.""" |
| 452 | skipped, virtuals = 0, 0 |
| 453 | |
| 454 | cached, infos = self.load(fn, appends, cfgData) |
| 455 | for virtualfn, info_array in infos: |
| 456 | if info_array[0].skipped: |
| 457 | logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason) |
| 458 | skipped += 1 |
| 459 | else: |
| 460 | self.add_info(virtualfn, info_array, cacheData, not cached) |
| 461 | virtuals += 1 |
| 462 | |
| 463 | return cached, skipped, virtuals |
| 464 | |
| 465 | def cacheValid(self, fn, appends): |
| 466 | """ |
| 467 | Is the cache valid for fn? |
| 468 | Fast version, no timestamps checked. |
| 469 | """ |
| 470 | if fn not in self.checked: |
| 471 | self.cacheValidUpdate(fn, appends) |
| 472 | |
| 473 | # Is cache enabled? |
| 474 | if not self.has_cache: |
| 475 | return False |
| 476 | if fn in self.clean: |
| 477 | return True |
| 478 | return False |
| 479 | |
| 480 | def cacheValidUpdate(self, fn, appends): |
| 481 | """ |
| 482 | Is the cache valid for fn? |
| 483 | Make thorough (slower) checks including timestamps. |
| 484 | """ |
| 485 | # Is cache enabled? |
| 486 | if not self.has_cache: |
| 487 | return False |
| 488 | |
| 489 | self.checked.add(fn) |
| 490 | |
| 491 | # File isn't in depends_cache |
| 492 | if not fn in self.depends_cache: |
| 493 | logger.debug(2, "Cache: %s is not cached", fn) |
| 494 | return False |
| 495 | |
| 496 | mtime = bb.parse.cached_mtime_noerror(fn) |
| 497 | |
| 498 | # Check file still exists |
| 499 | if mtime == 0: |
| 500 | logger.debug(2, "Cache: %s no longer exists", fn) |
| 501 | self.remove(fn) |
| 502 | return False |
| 503 | |
| 504 | info_array = self.depends_cache[fn] |
| 505 | # Check the file's timestamp |
| 506 | if mtime != info_array[0].timestamp: |
| 507 | logger.debug(2, "Cache: %s changed", fn) |
| 508 | self.remove(fn) |
| 509 | return False |
| 510 | |
| 511 | # Check dependencies are still valid |
| 512 | depends = info_array[0].file_depends |
| 513 | if depends: |
| 514 | for f, old_mtime in depends: |
| 515 | fmtime = bb.parse.cached_mtime_noerror(f) |
| 516 | # Check if file still exists |
| 517 | if old_mtime != 0 and fmtime == 0: |
| 518 | logger.debug(2, "Cache: %s's dependency %s was removed", |
| 519 | fn, f) |
| 520 | self.remove(fn) |
| 521 | return False |
| 522 | |
| 523 | if (fmtime != old_mtime): |
| 524 | logger.debug(2, "Cache: %s's dependency %s changed", |
| 525 | fn, f) |
| 526 | self.remove(fn) |
| 527 | return False |
| 528 | |
| 529 | if hasattr(info_array[0], 'file_checksums'): |
| 530 | for _, fl in info_array[0].file_checksums.items(): |
Patrick Williams | d7e9631 | 2015-09-22 08:09:05 -0500 | [diff] [blame] | 531 | fl = fl.strip() |
| 532 | while fl: |
| 533 | # A .split() would be simpler but means spaces or colons in filenames would break |
| 534 | a = fl.find(":True") |
| 535 | b = fl.find(":False") |
| 536 | if ((a < 0) and b) or ((b > 0) and (b < a)): |
| 537 | f = fl[:b+6] |
| 538 | fl = fl[b+7:] |
| 539 | elif ((b < 0) and a) or ((a > 0) and (a < b)): |
| 540 | f = fl[:a+5] |
| 541 | fl = fl[a+6:] |
| 542 | else: |
| 543 | break |
| 544 | fl = fl.strip() |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 545 | if "*" in f: |
| 546 | continue |
| 547 | f, exist = f.split(":") |
| 548 | if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)): |
| 549 | logger.debug(2, "Cache: %s's file checksum list file %s changed", |
| 550 | fn, f) |
| 551 | self.remove(fn) |
| 552 | return False |
| 553 | |
| 554 | if appends != info_array[0].appends: |
| 555 | logger.debug(2, "Cache: appends for %s changed", fn) |
| 556 | logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends))) |
| 557 | self.remove(fn) |
| 558 | return False |
| 559 | |
| 560 | invalid = False |
| 561 | for cls in info_array[0].variants: |
| 562 | virtualfn = self.realfn2virtual(fn, cls) |
| 563 | self.clean.add(virtualfn) |
| 564 | if virtualfn not in self.depends_cache: |
| 565 | logger.debug(2, "Cache: %s is not cached", virtualfn) |
| 566 | invalid = True |
| 567 | |
| 568 | # If any one of the variants is not present, mark as invalid for all |
| 569 | if invalid: |
| 570 | for cls in info_array[0].variants: |
| 571 | virtualfn = self.realfn2virtual(fn, cls) |
| 572 | if virtualfn in self.clean: |
| 573 | logger.debug(2, "Cache: Removing %s from cache", virtualfn) |
| 574 | self.clean.remove(virtualfn) |
| 575 | if fn in self.clean: |
| 576 | logger.debug(2, "Cache: Marking %s as not clean", fn) |
| 577 | self.clean.remove(fn) |
| 578 | return False |
| 579 | |
| 580 | self.clean.add(fn) |
| 581 | return True |
| 582 | |
| 583 | def remove(self, fn): |
| 584 | """ |
| 585 | Remove a fn from the cache |
| 586 | Called from the parser in error cases |
| 587 | """ |
| 588 | if fn in self.depends_cache: |
| 589 | logger.debug(1, "Removing %s from cache", fn) |
| 590 | del self.depends_cache[fn] |
| 591 | if fn in self.clean: |
| 592 | logger.debug(1, "Marking %s as unclean", fn) |
| 593 | self.clean.remove(fn) |
| 594 | |
| 595 | def sync(self): |
| 596 | """ |
| 597 | Save the cache |
| 598 | Called from the parser when complete (or exiting) |
| 599 | """ |
| 600 | |
| 601 | if not self.has_cache: |
| 602 | return |
| 603 | |
| 604 | if self.cacheclean: |
| 605 | logger.debug(2, "Cache is clean, not saving.") |
| 606 | return |
| 607 | |
| 608 | file_dict = {} |
| 609 | pickler_dict = {} |
| 610 | for cache_class in self.caches_array: |
| 611 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): |
| 612 | cache_class_name = cache_class.__name__ |
| 613 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) |
| 614 | file_dict[cache_class_name] = open(cachefile, "wb") |
| 615 | pickler_dict[cache_class_name] = pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL) |
| 616 | |
| 617 | pickler_dict['CoreRecipeInfo'].dump(__cache_version__) |
| 618 | pickler_dict['CoreRecipeInfo'].dump(bb.__version__) |
| 619 | |
| 620 | try: |
| 621 | for key, info_array in self.depends_cache.iteritems(): |
| 622 | for info in info_array: |
| 623 | if isinstance(info, RecipeInfoCommon): |
| 624 | cache_class_name = info.__class__.__name__ |
| 625 | pickler_dict[cache_class_name].dump(key) |
| 626 | pickler_dict[cache_class_name].dump(info) |
| 627 | finally: |
| 628 | for cache_class in self.caches_array: |
| 629 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): |
| 630 | cache_class_name = cache_class.__name__ |
| 631 | file_dict[cache_class_name].close() |
| 632 | |
| 633 | del self.depends_cache |
| 634 | |
| 635 | @staticmethod |
| 636 | def mtime(cachefile): |
| 637 | return bb.parse.cached_mtime_noerror(cachefile) |
| 638 | |
| 639 | def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None): |
| 640 | if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped): |
| 641 | cacheData.add_from_recipeinfo(filename, info_array) |
| 642 | |
| 643 | if watcher: |
| 644 | watcher(info_array[0].file_depends) |
| 645 | |
| 646 | if not self.has_cache: |
| 647 | return |
| 648 | |
| 649 | if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache: |
| 650 | if parsed: |
| 651 | self.cacheclean = False |
| 652 | self.depends_cache[filename] = info_array |
| 653 | |
| 654 | def add(self, file_name, data, cacheData, parsed=None): |
| 655 | """ |
| 656 | Save data we need into the cache |
| 657 | """ |
| 658 | |
| 659 | realfn = self.virtualfn2realfn(file_name)[0] |
| 660 | |
| 661 | info_array = [] |
| 662 | for cache_class in self.caches_array: |
| 663 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): |
| 664 | info_array.append(cache_class(realfn, data)) |
| 665 | self.add_info(file_name, info_array, cacheData, parsed) |
| 666 | |
| 667 | @staticmethod |
| 668 | def load_bbfile(bbfile, appends, config): |
| 669 | """ |
| 670 | Load and parse one .bb build file |
| 671 | Return the data and whether parsing resulted in the file being skipped |
| 672 | """ |
| 673 | chdir_back = False |
| 674 | |
| 675 | from bb import parse |
| 676 | |
| 677 | # expand tmpdir to include this topdir |
| 678 | config.setVar('TMPDIR', config.getVar('TMPDIR', True) or "") |
| 679 | bbfile_loc = os.path.abspath(os.path.dirname(bbfile)) |
| 680 | oldpath = os.path.abspath(os.getcwd()) |
| 681 | parse.cached_mtime_noerror(bbfile_loc) |
| 682 | bb_data = config.createCopy() |
| 683 | # The ConfHandler first looks if there is a TOPDIR and if not |
| 684 | # then it would call getcwd(). |
| 685 | # Previously, we chdir()ed to bbfile_loc, called the handler |
| 686 | # and finally chdir()ed back, a couple of thousand times. We now |
| 687 | # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet. |
| 688 | if not bb_data.getVar('TOPDIR', False): |
| 689 | chdir_back = True |
| 690 | bb_data.setVar('TOPDIR', bbfile_loc) |
| 691 | try: |
| 692 | if appends: |
| 693 | bb_data.setVar('__BBAPPEND', " ".join(appends)) |
| 694 | bb_data = parse.handle(bbfile, bb_data) |
| 695 | if chdir_back: |
| 696 | os.chdir(oldpath) |
| 697 | return bb_data |
| 698 | except: |
| 699 | if chdir_back: |
| 700 | os.chdir(oldpath) |
| 701 | raise |
| 702 | |
| 703 | |
| 704 | def init(cooker): |
| 705 | """ |
| 706 | The Objective: Cache the minimum amount of data possible yet get to the |
| 707 | stage of building packages (i.e. tryBuild) without reparsing any .bb files. |
| 708 | |
| 709 | To do this, we intercept getVar calls and only cache the variables we see |
| 710 | being accessed. We rely on the cache getVar calls being made for all |
| 711 | variables bitbake might need to use to reach this stage. For each cached |
| 712 | file we need to track: |
| 713 | |
| 714 | * Its mtime |
| 715 | * The mtimes of all its dependencies |
| 716 | * Whether it caused a parse.SkipRecipe exception |
| 717 | |
| 718 | Files causing parsing errors are evicted from the cache. |
| 719 | |
| 720 | """ |
| 721 | return Cache(cooker.configuration.data, cooker.configuration.data_hash) |
| 722 | |
| 723 | |
| 724 | class CacheData(object): |
| 725 | """ |
| 726 | The data structures we compile from the cached data |
| 727 | """ |
| 728 | |
| 729 | def __init__(self, caches_array): |
| 730 | self.caches_array = caches_array |
| 731 | for cache_class in self.caches_array: |
| 732 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): |
| 733 | cache_class.init_cacheData(self) |
| 734 | |
| 735 | # Direct cache variables |
| 736 | self.task_queues = {} |
| 737 | self.preferred = {} |
| 738 | self.tasks = {} |
| 739 | # Indirect Cache variables (set elsewhere) |
| 740 | self.ignored_dependencies = [] |
| 741 | self.world_target = set() |
| 742 | self.bbfile_priority = {} |
| 743 | |
| 744 | def add_from_recipeinfo(self, fn, info_array): |
| 745 | for info in info_array: |
| 746 | info.add_cacheData(self, fn) |
| 747 | |
| 748 | class MultiProcessCache(object): |
| 749 | """ |
| 750 | BitBake multi-process cache implementation |
| 751 | |
| 752 | Used by the codeparser & file checksum caches |
| 753 | """ |
| 754 | |
| 755 | def __init__(self): |
| 756 | self.cachefile = None |
| 757 | self.cachedata = self.create_cachedata() |
| 758 | self.cachedata_extras = self.create_cachedata() |
| 759 | |
| 760 | def init_cache(self, d): |
| 761 | cachedir = (d.getVar("PERSISTENT_DIR", True) or |
| 762 | d.getVar("CACHE", True)) |
| 763 | if cachedir in [None, '']: |
| 764 | return |
| 765 | bb.utils.mkdirhier(cachedir) |
| 766 | self.cachefile = os.path.join(cachedir, self.__class__.cache_file_name) |
| 767 | logger.debug(1, "Using cache in '%s'", self.cachefile) |
| 768 | |
| 769 | glf = bb.utils.lockfile(self.cachefile + ".lock") |
| 770 | |
| 771 | try: |
| 772 | with open(self.cachefile, "rb") as f: |
| 773 | p = pickle.Unpickler(f) |
| 774 | data, version = p.load() |
| 775 | except: |
| 776 | bb.utils.unlockfile(glf) |
| 777 | return |
| 778 | |
| 779 | bb.utils.unlockfile(glf) |
| 780 | |
| 781 | if version != self.__class__.CACHE_VERSION: |
| 782 | return |
| 783 | |
| 784 | self.cachedata = data |
| 785 | |
| 786 | def create_cachedata(self): |
| 787 | data = [{}] |
| 788 | return data |
| 789 | |
| 790 | def save_extras(self, d): |
| 791 | if not self.cachefile: |
| 792 | return |
| 793 | |
| 794 | glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True) |
| 795 | |
| 796 | i = os.getpid() |
| 797 | lf = None |
| 798 | while not lf: |
| 799 | lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False) |
| 800 | if not lf or os.path.exists(self.cachefile + "-" + str(i)): |
| 801 | if lf: |
| 802 | bb.utils.unlockfile(lf) |
| 803 | lf = None |
| 804 | i = i + 1 |
| 805 | continue |
| 806 | |
| 807 | with open(self.cachefile + "-" + str(i), "wb") as f: |
| 808 | p = pickle.Pickler(f, -1) |
| 809 | p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION]) |
| 810 | |
| 811 | bb.utils.unlockfile(lf) |
| 812 | bb.utils.unlockfile(glf) |
| 813 | |
| 814 | def merge_data(self, source, dest): |
| 815 | for j in range(0,len(dest)): |
| 816 | for h in source[j]: |
| 817 | if h not in dest[j]: |
| 818 | dest[j][h] = source[j][h] |
| 819 | |
| 820 | def save_merge(self, d): |
| 821 | if not self.cachefile: |
| 822 | return |
| 823 | |
| 824 | glf = bb.utils.lockfile(self.cachefile + ".lock") |
| 825 | |
| 826 | data = self.cachedata |
| 827 | |
| 828 | for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]: |
| 829 | f = os.path.join(os.path.dirname(self.cachefile), f) |
| 830 | try: |
| 831 | with open(f, "rb") as fd: |
| 832 | p = pickle.Unpickler(fd) |
| 833 | extradata, version = p.load() |
| 834 | except (IOError, EOFError): |
| 835 | os.unlink(f) |
| 836 | continue |
| 837 | |
| 838 | if version != self.__class__.CACHE_VERSION: |
| 839 | os.unlink(f) |
| 840 | continue |
| 841 | |
| 842 | self.merge_data(extradata, data) |
| 843 | os.unlink(f) |
| 844 | |
| 845 | with open(self.cachefile, "wb") as f: |
| 846 | p = pickle.Pickler(f, -1) |
| 847 | p.dump([data, self.__class__.CACHE_VERSION]) |
| 848 | |
| 849 | bb.utils.unlockfile(glf) |
| 850 | |