blob: af5b9fbc6262e1741761bf56219061c48d62e765 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake Cache implementation
5#
6# Caching of bitbake variables before task execution
7
8# Copyright (C) 2006 Richard Purdie
9# Copyright (C) 2012 Intel Corporation
10
11# but small sections based on code from bin/bitbake:
12# Copyright (C) 2003, 2004 Chris Larson
13# Copyright (C) 2003, 2004 Phil Blundell
14# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
15# Copyright (C) 2005 Holger Hans Peter Freyther
16# Copyright (C) 2005 ROAD GmbH
17#
18# This program is free software; you can redistribute it and/or modify
19# it under the terms of the GNU General Public License version 2 as
20# published by the Free Software Foundation.
21#
22# This program is distributed in the hope that it will be useful,
23# but WITHOUT ANY WARRANTY; without even the implied warranty of
24# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
25# GNU General Public License for more details.
26#
27# You should have received a copy of the GNU General Public License along
28# with this program; if not, write to the Free Software Foundation, Inc.,
29# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
30
31
32import os
33import logging
34from collections import defaultdict
35import bb.utils
36
37logger = logging.getLogger("BitBake.Cache")
38
39try:
40 import cPickle as pickle
41except ImportError:
42 import pickle
43 logger.info("Importing cPickle failed. "
44 "Falling back to a very slow implementation.")
45
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050046__cache_version__ = "149"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050047
48def getCacheFile(path, filename, data_hash):
49 return os.path.join(path, filename + "." + data_hash)
50
51# RecipeInfoCommon defines common data retrieving methods
52# from meta data for caches. CoreRecipeInfo as well as other
53# Extra RecipeInfo needs to inherit this class
54class RecipeInfoCommon(object):
55
56 @classmethod
57 def listvar(cls, var, metadata):
58 return cls.getvar(var, metadata).split()
59
60 @classmethod
61 def intvar(cls, var, metadata):
62 return int(cls.getvar(var, metadata) or 0)
63
64 @classmethod
65 def depvar(cls, var, metadata):
66 return bb.utils.explode_deps(cls.getvar(var, metadata))
67
68 @classmethod
69 def pkgvar(cls, var, packages, metadata):
70 return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata))
71 for pkg in packages)
72
73 @classmethod
74 def taskvar(cls, var, tasks, metadata):
75 return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata))
76 for task in tasks)
77
78 @classmethod
79 def flaglist(cls, flag, varlist, metadata, squash=False):
80 out_dict = dict((var, metadata.getVarFlag(var, flag, True))
81 for var in varlist)
82 if squash:
83 return dict((k,v) for (k,v) in out_dict.iteritems() if v)
84 else:
85 return out_dict
86
87 @classmethod
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050088 def getvar(cls, var, metadata, expand = True):
89 return metadata.getVar(var, expand) or ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -050090
91
92class CoreRecipeInfo(RecipeInfoCommon):
93 __slots__ = ()
94
95 cachefile = "bb_cache.dat"
96
97 def __init__(self, filename, metadata):
98 self.file_depends = metadata.getVar('__depends', False)
99 self.timestamp = bb.parse.cached_mtime(filename)
100 self.variants = self.listvar('__VARIANTS', metadata) + ['']
101 self.appends = self.listvar('__BBAPPEND', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500102 self.nocache = self.getvar('BB_DONT_CACHE', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500103
104 self.skipreason = self.getvar('__SKIPPED', metadata)
105 if self.skipreason:
106 self.pn = self.getvar('PN', metadata) or bb.parse.BBHandler.vars_from_file(filename,metadata)[0]
107 self.skipped = True
108 self.provides = self.depvar('PROVIDES', metadata)
109 self.rprovides = self.depvar('RPROVIDES', metadata)
110 return
111
112 self.tasks = metadata.getVar('__BBTASKS', False)
113
114 self.pn = self.getvar('PN', metadata)
115 self.packages = self.listvar('PACKAGES', metadata)
116 if not self.pn in self.packages:
117 self.packages.append(self.pn)
118
119 self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
120 self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
121
122 self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
123
124 self.skipped = False
125 self.pe = self.getvar('PE', metadata)
126 self.pv = self.getvar('PV', metadata)
127 self.pr = self.getvar('PR', metadata)
128 self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
129 self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
130 self.stamp = self.getvar('STAMP', metadata)
131 self.stampclean = self.getvar('STAMPCLEAN', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500132 self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
133 self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
134 self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
135 self.depends = self.depvar('DEPENDS', metadata)
136 self.provides = self.depvar('PROVIDES', metadata)
137 self.rdepends = self.depvar('RDEPENDS', metadata)
138 self.rprovides = self.depvar('RPROVIDES', metadata)
139 self.rrecommends = self.depvar('RRECOMMENDS', metadata)
140 self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
141 self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
142 self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500143 self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500144 self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
145 self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
146 self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500147 self.extradepsfunc = self.getvar('calculate_extra_depends', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500148
149 @classmethod
150 def init_cacheData(cls, cachedata):
151 # CacheData in Core RecipeInfo Class
152 cachedata.task_deps = {}
153 cachedata.pkg_fn = {}
154 cachedata.pkg_pn = defaultdict(list)
155 cachedata.pkg_pepvpr = {}
156 cachedata.pkg_dp = {}
157
158 cachedata.stamp = {}
159 cachedata.stampclean = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500160 cachedata.stamp_extrainfo = {}
161 cachedata.file_checksums = {}
162 cachedata.fn_provides = {}
163 cachedata.pn_provides = defaultdict(list)
164 cachedata.all_depends = []
165
166 cachedata.deps = defaultdict(list)
167 cachedata.packages = defaultdict(list)
168 cachedata.providers = defaultdict(list)
169 cachedata.rproviders = defaultdict(list)
170 cachedata.packages_dynamic = defaultdict(list)
171
172 cachedata.rundeps = defaultdict(lambda: defaultdict(list))
173 cachedata.runrecs = defaultdict(lambda: defaultdict(list))
174 cachedata.possible_world = []
175 cachedata.universe_target = []
176 cachedata.hashfn = {}
177
178 cachedata.basetaskhash = {}
179 cachedata.inherits = {}
180 cachedata.fakerootenv = {}
181 cachedata.fakerootnoenv = {}
182 cachedata.fakerootdirs = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500183 cachedata.extradepsfunc = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500184
185 def add_cacheData(self, cachedata, fn):
186 cachedata.task_deps[fn] = self.task_deps
187 cachedata.pkg_fn[fn] = self.pn
188 cachedata.pkg_pn[self.pn].append(fn)
189 cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
190 cachedata.pkg_dp[fn] = self.defaultpref
191 cachedata.stamp[fn] = self.stamp
192 cachedata.stampclean[fn] = self.stampclean
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500193 cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
194 cachedata.file_checksums[fn] = self.file_checksums
195
196 provides = [self.pn]
197 for provide in self.provides:
198 if provide not in provides:
199 provides.append(provide)
200 cachedata.fn_provides[fn] = provides
201
202 for provide in provides:
203 cachedata.providers[provide].append(fn)
204 if provide not in cachedata.pn_provides[self.pn]:
205 cachedata.pn_provides[self.pn].append(provide)
206
207 for dep in self.depends:
208 if dep not in cachedata.deps[fn]:
209 cachedata.deps[fn].append(dep)
210 if dep not in cachedata.all_depends:
211 cachedata.all_depends.append(dep)
212
213 rprovides = self.rprovides
214 for package in self.packages:
215 cachedata.packages[package].append(fn)
216 rprovides += self.rprovides_pkg[package]
217
218 for rprovide in rprovides:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500219 if fn not in cachedata.rproviders[rprovide]:
220 cachedata.rproviders[rprovide].append(fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500221
222 for package in self.packages_dynamic:
223 cachedata.packages_dynamic[package].append(fn)
224
225 # Build hash of runtime depends and recommends
226 for package in self.packages + [self.pn]:
227 cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
228 cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
229
230 # Collect files we may need for possible world-dep
231 # calculations
232 if self.not_world:
233 logger.debug(1, "EXCLUDE FROM WORLD: %s", fn)
234 else:
235 cachedata.possible_world.append(fn)
236
237 # create a collection of all targets for sanity checking
238 # tasks, such as upstream versions, license, and tools for
239 # task and image creation.
240 cachedata.universe_target.append(self.pn)
241
242 cachedata.hashfn[fn] = self.hashfilename
243 for task, taskhash in self.basetaskhashes.iteritems():
244 identifier = '%s.%s' % (fn, task)
245 cachedata.basetaskhash[identifier] = taskhash
246
247 cachedata.inherits[fn] = self.inherits
248 cachedata.fakerootenv[fn] = self.fakerootenv
249 cachedata.fakerootnoenv[fn] = self.fakerootnoenv
250 cachedata.fakerootdirs[fn] = self.fakerootdirs
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500251 cachedata.extradepsfunc[fn] = self.extradepsfunc
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500252
253
254
255class Cache(object):
256 """
257 BitBake Cache implementation
258 """
259
260 def __init__(self, data, data_hash, caches_array):
261 # Pass caches_array information into Cache Constructor
262 # It will be used later for deciding whether we
263 # need extra cache file dump/load support
264 self.caches_array = caches_array
265 self.cachedir = data.getVar("CACHE", True)
266 self.clean = set()
267 self.checked = set()
268 self.depends_cache = {}
269 self.data = None
270 self.data_fn = None
271 self.cacheclean = True
272 self.data_hash = data_hash
273
274 if self.cachedir in [None, '']:
275 self.has_cache = False
276 logger.info("Not using a cache. "
277 "Set CACHE = <directory> to enable.")
278 return
279
280 self.has_cache = True
281 self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash)
282
283 logger.debug(1, "Using cache in '%s'", self.cachedir)
284 bb.utils.mkdirhier(self.cachedir)
285
286 cache_ok = True
287 if self.caches_array:
288 for cache_class in self.caches_array:
289 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
290 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
291 cache_ok = cache_ok and os.path.exists(cachefile)
292 cache_class.init_cacheData(self)
293 if cache_ok:
294 self.load_cachefile()
295 elif os.path.isfile(self.cachefile):
296 logger.info("Out of date cache found, rebuilding...")
297
298 def load_cachefile(self):
299 # Firstly, using core cache file information for
300 # valid checking
301 with open(self.cachefile, "rb") as cachefile:
302 pickled = pickle.Unpickler(cachefile)
303 try:
304 cache_ver = pickled.load()
305 bitbake_ver = pickled.load()
306 except Exception:
307 logger.info('Invalid cache, rebuilding...')
308 return
309
310 if cache_ver != __cache_version__:
311 logger.info('Cache version mismatch, rebuilding...')
312 return
313 elif bitbake_ver != bb.__version__:
314 logger.info('Bitbake version mismatch, rebuilding...')
315 return
316
317
318 cachesize = 0
319 previous_progress = 0
320 previous_percent = 0
321
322 # Calculate the correct cachesize of all those cache files
323 for cache_class in self.caches_array:
324 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
325 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
326 with open(cachefile, "rb") as cachefile:
327 cachesize += os.fstat(cachefile.fileno()).st_size
328
329 bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
330
331 for cache_class in self.caches_array:
332 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
333 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
334 with open(cachefile, "rb") as cachefile:
335 pickled = pickle.Unpickler(cachefile)
336 while cachefile:
337 try:
338 key = pickled.load()
339 value = pickled.load()
340 except Exception:
341 break
342 if self.depends_cache.has_key(key):
343 self.depends_cache[key].append(value)
344 else:
345 self.depends_cache[key] = [value]
346 # only fire events on even percentage boundaries
347 current_progress = cachefile.tell() + previous_progress
348 current_percent = 100 * current_progress / cachesize
349 if current_percent > previous_percent:
350 previous_percent = current_percent
351 bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
352 self.data)
353
354 previous_progress += current_progress
355
356 # Note: depends cache number is corresponding to the parsing file numbers.
357 # The same file has several caches, still regarded as one item in the cache
358 bb.event.fire(bb.event.CacheLoadCompleted(cachesize,
359 len(self.depends_cache)),
360 self.data)
361
362
363 @staticmethod
364 def virtualfn2realfn(virtualfn):
365 """
366 Convert a virtual file name to a real one + the associated subclass keyword
367 """
368
369 fn = virtualfn
370 cls = ""
371 if virtualfn.startswith('virtual:'):
372 elems = virtualfn.split(':')
373 cls = ":".join(elems[1:-1])
374 fn = elems[-1]
375 return (fn, cls)
376
377 @staticmethod
378 def realfn2virtual(realfn, cls):
379 """
380 Convert a real filename + the associated subclass keyword to a virtual filename
381 """
382 if cls == "":
383 return realfn
384 return "virtual:" + cls + ":" + realfn
385
386 @classmethod
387 def loadDataFull(cls, virtualfn, appends, cfgData):
388 """
389 Return a complete set of data for fn.
390 To do this, we need to parse the file.
391 """
392
393 (fn, virtual) = cls.virtualfn2realfn(virtualfn)
394
395 logger.debug(1, "Parsing %s (full)", fn)
396
397 cfgData.setVar("__ONLYFINALISE", virtual or "default")
398 bb_data = cls.load_bbfile(fn, appends, cfgData)
399 return bb_data[virtual]
400
401 @classmethod
402 def parse(cls, filename, appends, configdata, caches_array):
403 """Parse the specified filename, returning the recipe information"""
404 infos = []
405 datastores = cls.load_bbfile(filename, appends, configdata)
406 depends = []
407 for variant, data in sorted(datastores.iteritems(),
408 key=lambda i: i[0],
409 reverse=True):
410 virtualfn = cls.realfn2virtual(filename, variant)
411 depends = depends + (data.getVar("__depends", False) or [])
412 if depends and not variant:
413 data.setVar("__depends", depends)
414
415 info_array = []
416 for cache_class in caches_array:
417 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
418 info = cache_class(filename, data)
419 info_array.append(info)
420 infos.append((virtualfn, info_array))
421
422 return infos
423
424 def load(self, filename, appends, configdata):
425 """Obtain the recipe information for the specified filename,
426 using cached values if available, otherwise parsing.
427
428 Note that if it does parse to obtain the info, it will not
429 automatically add the information to the cache or to your
430 CacheData. Use the add or add_info method to do so after
431 running this, or use loadData instead."""
432 cached = self.cacheValid(filename, appends)
433 if cached:
434 infos = []
435 # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
436 info_array = self.depends_cache[filename]
437 for variant in info_array[0].variants:
438 virtualfn = self.realfn2virtual(filename, variant)
439 infos.append((virtualfn, self.depends_cache[virtualfn]))
440 else:
441 logger.debug(1, "Parsing %s", filename)
442 return self.parse(filename, appends, configdata, self.caches_array)
443
444 return cached, infos
445
446 def loadData(self, fn, appends, cfgData, cacheData):
447 """Load the recipe info for the specified filename,
448 parsing and adding to the cache if necessary, and adding
449 the recipe information to the supplied CacheData instance."""
450 skipped, virtuals = 0, 0
451
452 cached, infos = self.load(fn, appends, cfgData)
453 for virtualfn, info_array in infos:
454 if info_array[0].skipped:
455 logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
456 skipped += 1
457 else:
458 self.add_info(virtualfn, info_array, cacheData, not cached)
459 virtuals += 1
460
461 return cached, skipped, virtuals
462
463 def cacheValid(self, fn, appends):
464 """
465 Is the cache valid for fn?
466 Fast version, no timestamps checked.
467 """
468 if fn not in self.checked:
469 self.cacheValidUpdate(fn, appends)
470
471 # Is cache enabled?
472 if not self.has_cache:
473 return False
474 if fn in self.clean:
475 return True
476 return False
477
478 def cacheValidUpdate(self, fn, appends):
479 """
480 Is the cache valid for fn?
481 Make thorough (slower) checks including timestamps.
482 """
483 # Is cache enabled?
484 if not self.has_cache:
485 return False
486
487 self.checked.add(fn)
488
489 # File isn't in depends_cache
490 if not fn in self.depends_cache:
491 logger.debug(2, "Cache: %s is not cached", fn)
492 return False
493
494 mtime = bb.parse.cached_mtime_noerror(fn)
495
496 # Check file still exists
497 if mtime == 0:
498 logger.debug(2, "Cache: %s no longer exists", fn)
499 self.remove(fn)
500 return False
501
502 info_array = self.depends_cache[fn]
503 # Check the file's timestamp
504 if mtime != info_array[0].timestamp:
505 logger.debug(2, "Cache: %s changed", fn)
506 self.remove(fn)
507 return False
508
509 # Check dependencies are still valid
510 depends = info_array[0].file_depends
511 if depends:
512 for f, old_mtime in depends:
513 fmtime = bb.parse.cached_mtime_noerror(f)
514 # Check if file still exists
515 if old_mtime != 0 and fmtime == 0:
516 logger.debug(2, "Cache: %s's dependency %s was removed",
517 fn, f)
518 self.remove(fn)
519 return False
520
521 if (fmtime != old_mtime):
522 logger.debug(2, "Cache: %s's dependency %s changed",
523 fn, f)
524 self.remove(fn)
525 return False
526
527 if hasattr(info_array[0], 'file_checksums'):
528 for _, fl in info_array[0].file_checksums.items():
Patrick Williamsd7e96312015-09-22 08:09:05 -0500529 fl = fl.strip()
530 while fl:
531 # A .split() would be simpler but means spaces or colons in filenames would break
532 a = fl.find(":True")
533 b = fl.find(":False")
534 if ((a < 0) and b) or ((b > 0) and (b < a)):
535 f = fl[:b+6]
536 fl = fl[b+7:]
537 elif ((b < 0) and a) or ((a > 0) and (a < b)):
538 f = fl[:a+5]
539 fl = fl[a+6:]
540 else:
541 break
542 fl = fl.strip()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500543 if "*" in f:
544 continue
545 f, exist = f.split(":")
546 if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
547 logger.debug(2, "Cache: %s's file checksum list file %s changed",
548 fn, f)
549 self.remove(fn)
550 return False
551
552 if appends != info_array[0].appends:
553 logger.debug(2, "Cache: appends for %s changed", fn)
554 logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
555 self.remove(fn)
556 return False
557
558 invalid = False
559 for cls in info_array[0].variants:
560 virtualfn = self.realfn2virtual(fn, cls)
561 self.clean.add(virtualfn)
562 if virtualfn not in self.depends_cache:
563 logger.debug(2, "Cache: %s is not cached", virtualfn)
564 invalid = True
565
566 # If any one of the variants is not present, mark as invalid for all
567 if invalid:
568 for cls in info_array[0].variants:
569 virtualfn = self.realfn2virtual(fn, cls)
570 if virtualfn in self.clean:
571 logger.debug(2, "Cache: Removing %s from cache", virtualfn)
572 self.clean.remove(virtualfn)
573 if fn in self.clean:
574 logger.debug(2, "Cache: Marking %s as not clean", fn)
575 self.clean.remove(fn)
576 return False
577
578 self.clean.add(fn)
579 return True
580
581 def remove(self, fn):
582 """
583 Remove a fn from the cache
584 Called from the parser in error cases
585 """
586 if fn in self.depends_cache:
587 logger.debug(1, "Removing %s from cache", fn)
588 del self.depends_cache[fn]
589 if fn in self.clean:
590 logger.debug(1, "Marking %s as unclean", fn)
591 self.clean.remove(fn)
592
593 def sync(self):
594 """
595 Save the cache
596 Called from the parser when complete (or exiting)
597 """
598
599 if not self.has_cache:
600 return
601
602 if self.cacheclean:
603 logger.debug(2, "Cache is clean, not saving.")
604 return
605
606 file_dict = {}
607 pickler_dict = {}
608 for cache_class in self.caches_array:
609 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
610 cache_class_name = cache_class.__name__
611 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
612 file_dict[cache_class_name] = open(cachefile, "wb")
613 pickler_dict[cache_class_name] = pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL)
614
615 pickler_dict['CoreRecipeInfo'].dump(__cache_version__)
616 pickler_dict['CoreRecipeInfo'].dump(bb.__version__)
617
618 try:
619 for key, info_array in self.depends_cache.iteritems():
620 for info in info_array:
621 if isinstance(info, RecipeInfoCommon):
622 cache_class_name = info.__class__.__name__
623 pickler_dict[cache_class_name].dump(key)
624 pickler_dict[cache_class_name].dump(info)
625 finally:
626 for cache_class in self.caches_array:
627 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
628 cache_class_name = cache_class.__name__
629 file_dict[cache_class_name].close()
630
631 del self.depends_cache
632
633 @staticmethod
634 def mtime(cachefile):
635 return bb.parse.cached_mtime_noerror(cachefile)
636
637 def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
638 if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
639 cacheData.add_from_recipeinfo(filename, info_array)
640
641 if watcher:
642 watcher(info_array[0].file_depends)
643
644 if not self.has_cache:
645 return
646
647 if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
648 if parsed:
649 self.cacheclean = False
650 self.depends_cache[filename] = info_array
651
652 def add(self, file_name, data, cacheData, parsed=None):
653 """
654 Save data we need into the cache
655 """
656
657 realfn = self.virtualfn2realfn(file_name)[0]
658
659 info_array = []
660 for cache_class in self.caches_array:
661 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
662 info_array.append(cache_class(realfn, data))
663 self.add_info(file_name, info_array, cacheData, parsed)
664
665 @staticmethod
666 def load_bbfile(bbfile, appends, config):
667 """
668 Load and parse one .bb build file
669 Return the data and whether parsing resulted in the file being skipped
670 """
671 chdir_back = False
672
673 from bb import parse
674
675 # expand tmpdir to include this topdir
676 config.setVar('TMPDIR', config.getVar('TMPDIR', True) or "")
677 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
678 oldpath = os.path.abspath(os.getcwd())
679 parse.cached_mtime_noerror(bbfile_loc)
680 bb_data = config.createCopy()
681 # The ConfHandler first looks if there is a TOPDIR and if not
682 # then it would call getcwd().
683 # Previously, we chdir()ed to bbfile_loc, called the handler
684 # and finally chdir()ed back, a couple of thousand times. We now
685 # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
686 if not bb_data.getVar('TOPDIR', False):
687 chdir_back = True
688 bb_data.setVar('TOPDIR', bbfile_loc)
689 try:
690 if appends:
691 bb_data.setVar('__BBAPPEND', " ".join(appends))
692 bb_data = parse.handle(bbfile, bb_data)
693 if chdir_back:
694 os.chdir(oldpath)
695 return bb_data
696 except:
697 if chdir_back:
698 os.chdir(oldpath)
699 raise
700
701
702def init(cooker):
703 """
704 The Objective: Cache the minimum amount of data possible yet get to the
705 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
706
707 To do this, we intercept getVar calls and only cache the variables we see
708 being accessed. We rely on the cache getVar calls being made for all
709 variables bitbake might need to use to reach this stage. For each cached
710 file we need to track:
711
712 * Its mtime
713 * The mtimes of all its dependencies
714 * Whether it caused a parse.SkipRecipe exception
715
716 Files causing parsing errors are evicted from the cache.
717
718 """
719 return Cache(cooker.configuration.data, cooker.configuration.data_hash)
720
721
722class CacheData(object):
723 """
724 The data structures we compile from the cached data
725 """
726
727 def __init__(self, caches_array):
728 self.caches_array = caches_array
729 for cache_class in self.caches_array:
730 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
731 cache_class.init_cacheData(self)
732
733 # Direct cache variables
734 self.task_queues = {}
735 self.preferred = {}
736 self.tasks = {}
737 # Indirect Cache variables (set elsewhere)
738 self.ignored_dependencies = []
739 self.world_target = set()
740 self.bbfile_priority = {}
741
742 def add_from_recipeinfo(self, fn, info_array):
743 for info in info_array:
744 info.add_cacheData(self, fn)
745
746class MultiProcessCache(object):
747 """
748 BitBake multi-process cache implementation
749
750 Used by the codeparser & file checksum caches
751 """
752
753 def __init__(self):
754 self.cachefile = None
755 self.cachedata = self.create_cachedata()
756 self.cachedata_extras = self.create_cachedata()
757
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500758 def init_cache(self, d, cache_file_name=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500759 cachedir = (d.getVar("PERSISTENT_DIR", True) or
760 d.getVar("CACHE", True))
761 if cachedir in [None, '']:
762 return
763 bb.utils.mkdirhier(cachedir)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500764 self.cachefile = os.path.join(cachedir,
765 cache_file_name or self.__class__.cache_file_name)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500766 logger.debug(1, "Using cache in '%s'", self.cachefile)
767
768 glf = bb.utils.lockfile(self.cachefile + ".lock")
769
770 try:
771 with open(self.cachefile, "rb") as f:
772 p = pickle.Unpickler(f)
773 data, version = p.load()
774 except:
775 bb.utils.unlockfile(glf)
776 return
777
778 bb.utils.unlockfile(glf)
779
780 if version != self.__class__.CACHE_VERSION:
781 return
782
783 self.cachedata = data
784
785 def create_cachedata(self):
786 data = [{}]
787 return data
788
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500789 def save_extras(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500790 if not self.cachefile:
791 return
792
793 glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
794
795 i = os.getpid()
796 lf = None
797 while not lf:
798 lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
799 if not lf or os.path.exists(self.cachefile + "-" + str(i)):
800 if lf:
801 bb.utils.unlockfile(lf)
802 lf = None
803 i = i + 1
804 continue
805
806 with open(self.cachefile + "-" + str(i), "wb") as f:
807 p = pickle.Pickler(f, -1)
808 p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
809
810 bb.utils.unlockfile(lf)
811 bb.utils.unlockfile(glf)
812
813 def merge_data(self, source, dest):
814 for j in range(0,len(dest)):
815 for h in source[j]:
816 if h not in dest[j]:
817 dest[j][h] = source[j][h]
818
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500819 def save_merge(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500820 if not self.cachefile:
821 return
822
823 glf = bb.utils.lockfile(self.cachefile + ".lock")
824
825 data = self.cachedata
826
827 for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
828 f = os.path.join(os.path.dirname(self.cachefile), f)
829 try:
830 with open(f, "rb") as fd:
831 p = pickle.Unpickler(fd)
832 extradata, version = p.load()
833 except (IOError, EOFError):
834 os.unlink(f)
835 continue
836
837 if version != self.__class__.CACHE_VERSION:
838 os.unlink(f)
839 continue
840
841 self.merge_data(extradata, data)
842 os.unlink(f)
843
844 with open(self.cachefile, "wb") as f:
845 p = pickle.Pickler(f, -1)
846 p.dump([data, self.__class__.CACHE_VERSION])
847
848 bb.utils.unlockfile(glf)
849