blob: b8054e028b72c0c80d1efeafa7df7bf762b9803d [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# BitBake Cache implementation
3#
4# Caching of bitbake variables before task execution
5
6# Copyright (C) 2006 Richard Purdie
7# Copyright (C) 2012 Intel Corporation
8
9# but small sections based on code from bin/bitbake:
10# Copyright (C) 2003, 2004 Chris Larson
11# Copyright (C) 2003, 2004 Phil Blundell
12# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
13# Copyright (C) 2005 Holger Hans Peter Freyther
14# Copyright (C) 2005 ROAD GmbH
15#
Brad Bishopc342db32019-05-15 21:57:59 -040016# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050017#
Patrick Williamsc124f4f2015-09-15 14:41:29 -050018
Patrick Williamsc124f4f2015-09-15 14:41:29 -050019import os
20import logging
Patrick Williamsc0f7c042017-02-23 20:41:17 -060021import pickle
Andrew Geissler5a43b432020-06-13 10:46:56 -050022from collections import defaultdict, Mapping
Patrick Williamsc124f4f2015-09-15 14:41:29 -050023import bb.utils
Andrew Geissler5a43b432020-06-13 10:46:56 -050024from bb import PrefixLoggerAdapter
Andrew Geissler82c905d2020-04-13 13:39:40 -050025import re
Patrick Williamsc124f4f2015-09-15 14:41:29 -050026
27logger = logging.getLogger("BitBake.Cache")
28
Andrew Geisslerd25ed322020-06-27 00:28:28 -050029__cache_version__ = "153"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050030
Andrew Geissler5a43b432020-06-13 10:46:56 -050031def getCacheFile(path, filename, mc, data_hash):
32 mcspec = ''
33 if mc:
34 mcspec = ".%s" % mc
35 return os.path.join(path, filename + mcspec + "." + data_hash)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050036
37# RecipeInfoCommon defines common data retrieving methods
38# from meta data for caches. CoreRecipeInfo as well as other
39# Extra RecipeInfo needs to inherit this class
40class RecipeInfoCommon(object):
41
42 @classmethod
43 def listvar(cls, var, metadata):
44 return cls.getvar(var, metadata).split()
45
46 @classmethod
47 def intvar(cls, var, metadata):
48 return int(cls.getvar(var, metadata) or 0)
49
50 @classmethod
51 def depvar(cls, var, metadata):
52 return bb.utils.explode_deps(cls.getvar(var, metadata))
53
54 @classmethod
55 def pkgvar(cls, var, packages, metadata):
56 return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata))
57 for pkg in packages)
58
59 @classmethod
60 def taskvar(cls, var, tasks, metadata):
61 return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata))
62 for task in tasks)
63
64 @classmethod
65 def flaglist(cls, flag, varlist, metadata, squash=False):
Brad Bishop6e60e8b2018-02-01 10:27:11 -050066 out_dict = dict((var, metadata.getVarFlag(var, flag))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050067 for var in varlist)
68 if squash:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060069 return dict((k,v) for (k,v) in out_dict.items() if v)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050070 else:
71 return out_dict
72
73 @classmethod
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050074 def getvar(cls, var, metadata, expand = True):
75 return metadata.getVar(var, expand) or ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -050076
77
78class CoreRecipeInfo(RecipeInfoCommon):
79 __slots__ = ()
80
Brad Bishopd7bf8c12018-02-25 22:55:05 -050081 cachefile = "bb_cache.dat"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050082
Brad Bishopd7bf8c12018-02-25 22:55:05 -050083 def __init__(self, filename, metadata):
Patrick Williamsc124f4f2015-09-15 14:41:29 -050084 self.file_depends = metadata.getVar('__depends', False)
85 self.timestamp = bb.parse.cached_mtime(filename)
86 self.variants = self.listvar('__VARIANTS', metadata) + ['']
87 self.appends = self.listvar('__BBAPPEND', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050088 self.nocache = self.getvar('BB_DONT_CACHE', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050089
Brad Bishop96ff1982019-08-19 13:50:42 -040090 self.provides = self.depvar('PROVIDES', metadata)
91 self.rprovides = self.depvar('RPROVIDES', metadata)
92 self.pn = self.getvar('PN', metadata) or bb.parse.vars_from_file(filename,metadata)[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -050093 self.packages = self.listvar('PACKAGES', metadata)
Brad Bishopd7bf8c12018-02-25 22:55:05 -050094 if not self.packages:
Patrick Williamsc124f4f2015-09-15 14:41:29 -050095 self.packages.append(self.pn)
Brad Bishop96ff1982019-08-19 13:50:42 -040096 self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
97
98 self.skipreason = self.getvar('__SKIPPED', metadata)
99 if self.skipreason:
100 self.skipped = True
101 return
102
103 self.tasks = metadata.getVar('__BBTASKS', False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500104
105 self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
106 self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
107
108 self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
109
110 self.skipped = False
111 self.pe = self.getvar('PE', metadata)
112 self.pv = self.getvar('PV', metadata)
113 self.pr = self.getvar('PR', metadata)
114 self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
115 self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
116 self.stamp = self.getvar('STAMP', metadata)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500117 self.stampclean = self.getvar('STAMPCLEAN', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500118 self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
119 self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500120 self.depends = self.depvar('DEPENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500121 self.rdepends = self.depvar('RDEPENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500122 self.rrecommends = self.depvar('RRECOMMENDS', metadata)
123 self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
124 self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
125 self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500126 self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500127 self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
128 self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
129 self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500130 self.extradepsfunc = self.getvar('calculate_extra_depends', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500131
132 @classmethod
133 def init_cacheData(cls, cachedata):
134 # CacheData in Core RecipeInfo Class
135 cachedata.task_deps = {}
136 cachedata.pkg_fn = {}
137 cachedata.pkg_pn = defaultdict(list)
138 cachedata.pkg_pepvpr = {}
139 cachedata.pkg_dp = {}
140
141 cachedata.stamp = {}
142 cachedata.stampclean = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500143 cachedata.stamp_extrainfo = {}
144 cachedata.file_checksums = {}
145 cachedata.fn_provides = {}
146 cachedata.pn_provides = defaultdict(list)
147 cachedata.all_depends = []
148
149 cachedata.deps = defaultdict(list)
150 cachedata.packages = defaultdict(list)
151 cachedata.providers = defaultdict(list)
152 cachedata.rproviders = defaultdict(list)
153 cachedata.packages_dynamic = defaultdict(list)
154
155 cachedata.rundeps = defaultdict(lambda: defaultdict(list))
156 cachedata.runrecs = defaultdict(lambda: defaultdict(list))
157 cachedata.possible_world = []
158 cachedata.universe_target = []
159 cachedata.hashfn = {}
160
161 cachedata.basetaskhash = {}
162 cachedata.inherits = {}
163 cachedata.fakerootenv = {}
164 cachedata.fakerootnoenv = {}
165 cachedata.fakerootdirs = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500166 cachedata.extradepsfunc = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500167
168 def add_cacheData(self, cachedata, fn):
169 cachedata.task_deps[fn] = self.task_deps
170 cachedata.pkg_fn[fn] = self.pn
171 cachedata.pkg_pn[self.pn].append(fn)
172 cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
173 cachedata.pkg_dp[fn] = self.defaultpref
174 cachedata.stamp[fn] = self.stamp
175 cachedata.stampclean[fn] = self.stampclean
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500176 cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
177 cachedata.file_checksums[fn] = self.file_checksums
178
179 provides = [self.pn]
180 for provide in self.provides:
181 if provide not in provides:
182 provides.append(provide)
183 cachedata.fn_provides[fn] = provides
184
185 for provide in provides:
186 cachedata.providers[provide].append(fn)
187 if provide not in cachedata.pn_provides[self.pn]:
188 cachedata.pn_provides[self.pn].append(provide)
189
190 for dep in self.depends:
191 if dep not in cachedata.deps[fn]:
192 cachedata.deps[fn].append(dep)
193 if dep not in cachedata.all_depends:
194 cachedata.all_depends.append(dep)
195
196 rprovides = self.rprovides
197 for package in self.packages:
198 cachedata.packages[package].append(fn)
199 rprovides += self.rprovides_pkg[package]
200
201 for rprovide in rprovides:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500202 if fn not in cachedata.rproviders[rprovide]:
203 cachedata.rproviders[rprovide].append(fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500204
205 for package in self.packages_dynamic:
206 cachedata.packages_dynamic[package].append(fn)
207
208 # Build hash of runtime depends and recommends
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500209 for package in self.packages:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500210 cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
211 cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
212
213 # Collect files we may need for possible world-dep
214 # calculations
Andrew Geissler82c905d2020-04-13 13:39:40 -0500215 if not self.not_world:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500216 cachedata.possible_world.append(fn)
Andrew Geissler82c905d2020-04-13 13:39:40 -0500217 #else:
218 # logger.debug(2, "EXCLUDE FROM WORLD: %s", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500219
220 # create a collection of all targets for sanity checking
221 # tasks, such as upstream versions, license, and tools for
222 # task and image creation.
223 cachedata.universe_target.append(self.pn)
224
225 cachedata.hashfn[fn] = self.hashfilename
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600226 for task, taskhash in self.basetaskhashes.items():
Brad Bishop08902b02019-08-20 09:16:51 -0400227 identifier = '%s:%s' % (fn, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500228 cachedata.basetaskhash[identifier] = taskhash
229
230 cachedata.inherits[fn] = self.inherits
231 cachedata.fakerootenv[fn] = self.fakerootenv
232 cachedata.fakerootnoenv[fn] = self.fakerootnoenv
233 cachedata.fakerootdirs[fn] = self.fakerootdirs
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500234 cachedata.extradepsfunc[fn] = self.extradepsfunc
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500235
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600236def virtualfn2realfn(virtualfn):
237 """
238 Convert a virtual file name to a real one + the associated subclass keyword
239 """
240 mc = ""
Brad Bishop15ae2502019-06-18 21:44:24 -0400241 if virtualfn.startswith('mc:'):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600242 elems = virtualfn.split(':')
243 mc = elems[1]
244 virtualfn = ":".join(elems[2:])
245
246 fn = virtualfn
247 cls = ""
248 if virtualfn.startswith('virtual:'):
249 elems = virtualfn.split(':')
250 cls = ":".join(elems[1:-1])
251 fn = elems[-1]
252
253 return (fn, cls, mc)
254
255def realfn2virtual(realfn, cls, mc):
256 """
257 Convert a real filename + the associated subclass keyword to a virtual filename
258 """
259 if cls:
260 realfn = "virtual:" + cls + ":" + realfn
261 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -0400262 realfn = "mc:" + mc + ":" + realfn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600263 return realfn
264
265def variant2virtual(realfn, variant):
266 """
267 Convert a real filename + the associated subclass keyword to a virtual filename
268 """
269 if variant == "":
270 return realfn
Brad Bishop15ae2502019-06-18 21:44:24 -0400271 if variant.startswith("mc:"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600272 elems = variant.split(":")
273 if elems[2]:
Brad Bishop15ae2502019-06-18 21:44:24 -0400274 return "mc:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
275 return "mc:" + elems[1] + ":" + realfn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600276 return "virtual:" + variant + ":" + realfn
277
278def parse_recipe(bb_data, bbfile, appends, mc=''):
279 """
280 Parse a recipe
281 """
282
283 chdir_back = False
284
285 bb_data.setVar("__BBMULTICONFIG", mc)
286
287 # expand tmpdir to include this topdir
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500288 bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR') or "")
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600289 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
290 oldpath = os.path.abspath(os.getcwd())
291 bb.parse.cached_mtime_noerror(bbfile_loc)
292
293 # The ConfHandler first looks if there is a TOPDIR and if not
294 # then it would call getcwd().
295 # Previously, we chdir()ed to bbfile_loc, called the handler
296 # and finally chdir()ed back, a couple of thousand times. We now
297 # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
298 if not bb_data.getVar('TOPDIR', False):
299 chdir_back = True
300 bb_data.setVar('TOPDIR', bbfile_loc)
301 try:
302 if appends:
303 bb_data.setVar('__BBAPPEND', " ".join(appends))
304 bb_data = bb.parse.handle(bbfile, bb_data)
305 if chdir_back:
306 os.chdir(oldpath)
307 return bb_data
308 except:
309 if chdir_back:
310 os.chdir(oldpath)
311 raise
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500312
313
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600314
315class NoCache(object):
316
317 def __init__(self, databuilder):
318 self.databuilder = databuilder
319 self.data = databuilder.data
320
321 def loadDataFull(self, virtualfn, appends):
322 """
323 Return a complete set of data for fn.
324 To do this, we need to parse the file.
325 """
326 logger.debug(1, "Parsing %s (full)" % virtualfn)
327 (fn, virtual, mc) = virtualfn2realfn(virtualfn)
328 bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
329 return bb_data[virtual]
330
Andrew Geissler5a43b432020-06-13 10:46:56 -0500331 def load_bbfile(self, bbfile, appends, virtonly = False, mc=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600332 """
333 Load and parse one .bb build file
334 Return the data and whether parsing resulted in the file being skipped
335 """
336
337 if virtonly:
338 (bbfile, virtual, mc) = virtualfn2realfn(bbfile)
339 bb_data = self.databuilder.mcdata[mc].createCopy()
340 bb_data.setVar("__ONLYFINALISE", virtual or "default")
341 datastores = parse_recipe(bb_data, bbfile, appends, mc)
342 return datastores
343
Andrew Geissler5a43b432020-06-13 10:46:56 -0500344 if mc is not None:
345 bb_data = self.databuilder.mcdata[mc].createCopy()
346 return parse_recipe(bb_data, bbfile, appends, mc)
347
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600348 bb_data = self.data.createCopy()
349 datastores = parse_recipe(bb_data, bbfile, appends)
350
351 for mc in self.databuilder.mcdata:
352 if not mc:
353 continue
354 bb_data = self.databuilder.mcdata[mc].createCopy()
355 newstores = parse_recipe(bb_data, bbfile, appends, mc)
356 for ns in newstores:
Brad Bishop15ae2502019-06-18 21:44:24 -0400357 datastores["mc:%s:%s" % (mc, ns)] = newstores[ns]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600358
359 return datastores
360
361class Cache(NoCache):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500362 """
363 BitBake Cache implementation
364 """
Andrew Geissler5a43b432020-06-13 10:46:56 -0500365 def __init__(self, databuilder, mc, data_hash, caches_array):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600366 super().__init__(databuilder)
367 data = databuilder.data
368
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500369 # Pass caches_array information into Cache Constructor
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500370 # It will be used later for deciding whether we
371 # need extra cache file dump/load support
Andrew Geissler5a43b432020-06-13 10:46:56 -0500372 self.mc = mc
373 self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500374 self.caches_array = caches_array
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500375 self.cachedir = data.getVar("CACHE")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500376 self.clean = set()
377 self.checked = set()
378 self.depends_cache = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500379 self.data_fn = None
380 self.cacheclean = True
381 self.data_hash = data_hash
Andrew Geissler82c905d2020-04-13 13:39:40 -0500382 self.filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500383
384 if self.cachedir in [None, '']:
385 self.has_cache = False
Andrew Geissler5a43b432020-06-13 10:46:56 -0500386 self.logger.info("Not using a cache. "
387 "Set CACHE = <directory> to enable.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500388 return
389
390 self.has_cache = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500391
Andrew Geissler5a43b432020-06-13 10:46:56 -0500392 def getCacheFile(self, cachefile):
393 return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash)
394
395 def prepare_cache(self, progress):
396 if not self.has_cache:
397 return 0
398
399 loaded = 0
400
401 self.cachefile = self.getCacheFile("bb_cache.dat")
402
403 self.logger.debug(1, "Cache dir: %s", self.cachedir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500404 bb.utils.mkdirhier(self.cachedir)
405
406 cache_ok = True
407 if self.caches_array:
408 for cache_class in self.caches_array:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500409 cachefile = self.getCacheFile(cache_class.cachefile)
410 cache_exists = os.path.exists(cachefile)
411 self.logger.debug(2, "Checking if %s exists: %r", cachefile, cache_exists)
412 cache_ok = cache_ok and cache_exists
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600413 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500414 if cache_ok:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500415 loaded = self.load_cachefile(progress)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500416 elif os.path.isfile(self.cachefile):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500417 self.logger.info("Out of date cache found, rebuilding...")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400418 else:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500419 self.logger.debug(1, "Cache file %s not found, building..." % self.cachefile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500420
Brad Bishop96ff1982019-08-19 13:50:42 -0400421 # We don't use the symlink, its just for debugging convinience
Andrew Geissler5a43b432020-06-13 10:46:56 -0500422 if self.mc:
423 symlink = os.path.join(self.cachedir, "bb_cache.dat.%s" % self.mc)
424 else:
425 symlink = os.path.join(self.cachedir, "bb_cache.dat")
426
Brad Bishop96ff1982019-08-19 13:50:42 -0400427 if os.path.exists(symlink):
428 bb.utils.remove(symlink)
429 try:
430 os.symlink(os.path.basename(self.cachefile), symlink)
431 except OSError:
432 pass
433
Andrew Geissler5a43b432020-06-13 10:46:56 -0500434 return loaded
435
436 def cachesize(self):
437 if not self.has_cache:
438 return 0
439
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500440 cachesize = 0
Andrew Geissler5a43b432020-06-13 10:46:56 -0500441 for cache_class in self.caches_array:
442 cachefile = self.getCacheFile(cache_class.cachefile)
443 try:
444 with open(cachefile, "rb") as cachefile:
445 cachesize += os.fstat(cachefile.fileno()).st_size
446 except FileNotFoundError:
447 pass
448
449 return cachesize
450
451 def load_cachefile(self, progress):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500452 previous_progress = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500453
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500454 for cache_class in self.caches_array:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500455 cachefile = self.getCacheFile(cache_class.cachefile)
456 self.logger.debug(1, 'Loading cache file: %s' % cachefile)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600457 with open(cachefile, "rb") as cachefile:
458 pickled = pickle.Unpickler(cachefile)
459 # Check cache version information
460 try:
461 cache_ver = pickled.load()
462 bitbake_ver = pickled.load()
463 except Exception:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500464 self.logger.info('Invalid cache, rebuilding...')
465 return 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500466
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600467 if cache_ver != __cache_version__:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500468 self.logger.info('Cache version mismatch, rebuilding...')
469 return 0
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600470 elif bitbake_ver != bb.__version__:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500471 self.logger.info('Bitbake version mismatch, rebuilding...')
472 return 0
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600473
474 # Load the rest of the cache file
475 current_progress = 0
476 while cachefile:
477 try:
478 key = pickled.load()
479 value = pickled.load()
480 except Exception:
481 break
482 if not isinstance(key, str):
483 bb.warn("%s from extras cache is not a string?" % key)
484 break
485 if not isinstance(value, RecipeInfoCommon):
486 bb.warn("%s from extras cache is not a RecipeInfoCommon class?" % value)
487 break
488
489 if key in self.depends_cache:
490 self.depends_cache[key].append(value)
491 else:
492 self.depends_cache[key] = [value]
493 # only fire events on even percentage boundaries
494 current_progress = cachefile.tell() + previous_progress
Andrew Geissler5a43b432020-06-13 10:46:56 -0500495 progress(cachefile.tell() + previous_progress)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600496
497 previous_progress += current_progress
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500498
Andrew Geissler5a43b432020-06-13 10:46:56 -0500499 return len(self.depends_cache)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500500
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600501 def parse(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500502 """Parse the specified filename, returning the recipe information"""
Andrew Geissler5a43b432020-06-13 10:46:56 -0500503 self.logger.debug(1, "Parsing %s", filename)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500504 infos = []
Andrew Geissler5a43b432020-06-13 10:46:56 -0500505 datastores = self.load_bbfile(filename, appends, mc=self.mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500506 depends = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600507 variants = []
508 # Process the "real" fn last so we can store variants list
509 for variant, data in sorted(datastores.items(),
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500510 key=lambda i: i[0],
511 reverse=True):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600512 virtualfn = variant2virtual(filename, variant)
513 variants.append(variant)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500514 depends = depends + (data.getVar("__depends", False) or [])
515 if depends and not variant:
516 data.setVar("__depends", depends)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600517 if virtualfn == filename:
518 data.setVar("__VARIANTS", " ".join(variants))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500519 info_array = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600520 for cache_class in self.caches_array:
521 info = cache_class(filename, data)
522 info_array.append(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500523 infos.append((virtualfn, info_array))
524
525 return infos
526
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600527 def load(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500528 """Obtain the recipe information for the specified filename,
529 using cached values if available, otherwise parsing.
530
531 Note that if it does parse to obtain the info, it will not
532 automatically add the information to the cache or to your
533 CacheData. Use the add or add_info method to do so after
534 running this, or use loadData instead."""
535 cached = self.cacheValid(filename, appends)
536 if cached:
537 infos = []
538 # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
539 info_array = self.depends_cache[filename]
540 for variant in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600541 virtualfn = variant2virtual(filename, variant)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500542 infos.append((virtualfn, self.depends_cache[virtualfn]))
543 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500544 return self.parse(filename, appends, configdata, self.caches_array)
545
546 return cached, infos
547
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600548 def loadData(self, fn, appends, cacheData):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500549 """Load the recipe info for the specified filename,
550 parsing and adding to the cache if necessary, and adding
551 the recipe information to the supplied CacheData instance."""
552 skipped, virtuals = 0, 0
553
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600554 cached, infos = self.load(fn, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500555 for virtualfn, info_array in infos:
556 if info_array[0].skipped:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500557 self.logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500558 skipped += 1
559 else:
560 self.add_info(virtualfn, info_array, cacheData, not cached)
561 virtuals += 1
562
563 return cached, skipped, virtuals
564
565 def cacheValid(self, fn, appends):
566 """
567 Is the cache valid for fn?
568 Fast version, no timestamps checked.
569 """
570 if fn not in self.checked:
571 self.cacheValidUpdate(fn, appends)
572
573 # Is cache enabled?
574 if not self.has_cache:
575 return False
576 if fn in self.clean:
577 return True
578 return False
579
580 def cacheValidUpdate(self, fn, appends):
581 """
582 Is the cache valid for fn?
583 Make thorough (slower) checks including timestamps.
584 """
585 # Is cache enabled?
586 if not self.has_cache:
587 return False
588
589 self.checked.add(fn)
590
591 # File isn't in depends_cache
592 if not fn in self.depends_cache:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500593 self.logger.debug(2, "%s is not cached", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500594 return False
595
596 mtime = bb.parse.cached_mtime_noerror(fn)
597
598 # Check file still exists
599 if mtime == 0:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500600 self.logger.debug(2, "%s no longer exists", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500601 self.remove(fn)
602 return False
603
604 info_array = self.depends_cache[fn]
605 # Check the file's timestamp
606 if mtime != info_array[0].timestamp:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500607 self.logger.debug(2, "%s changed", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500608 self.remove(fn)
609 return False
610
611 # Check dependencies are still valid
612 depends = info_array[0].file_depends
613 if depends:
614 for f, old_mtime in depends:
615 fmtime = bb.parse.cached_mtime_noerror(f)
616 # Check if file still exists
617 if old_mtime != 0 and fmtime == 0:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500618 self.logger.debug(2, "%s's dependency %s was removed",
619 fn, f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500620 self.remove(fn)
621 return False
622
623 if (fmtime != old_mtime):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500624 self.logger.debug(2, "%s's dependency %s changed",
625 fn, f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500626 self.remove(fn)
627 return False
628
629 if hasattr(info_array[0], 'file_checksums'):
630 for _, fl in info_array[0].file_checksums.items():
Patrick Williamsd7e96312015-09-22 08:09:05 -0500631 fl = fl.strip()
Andrew Geissler82c905d2020-04-13 13:39:40 -0500632 if not fl:
633 continue
634 # Have to be careful about spaces and colons in filenames
635 flist = self.filelist_regex.split(fl)
636 for f in flist:
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500637 if not f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500638 continue
639 f, exist = f.split(":")
640 if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500641 self.logger.debug(2, "%s's file checksum list file %s changed",
642 fn, f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500643 self.remove(fn)
644 return False
645
Andrew Geissler5a43b432020-06-13 10:46:56 -0500646 if tuple(appends) != tuple(info_array[0].appends):
647 self.logger.debug(2, "appends for %s changed", fn)
648 self.logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500649 self.remove(fn)
650 return False
651
652 invalid = False
653 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600654 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500655 self.clean.add(virtualfn)
656 if virtualfn not in self.depends_cache:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500657 self.logger.debug(2, "%s is not cached", virtualfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500658 invalid = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600659 elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500660 self.logger.debug(2, "Extra caches missing for %s?" % virtualfn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600661 invalid = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500662
663 # If any one of the variants is not present, mark as invalid for all
664 if invalid:
665 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600666 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500667 if virtualfn in self.clean:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500668 self.logger.debug(2, "Removing %s from cache", virtualfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500669 self.clean.remove(virtualfn)
670 if fn in self.clean:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500671 self.logger.debug(2, "Marking %s as not clean", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500672 self.clean.remove(fn)
673 return False
674
675 self.clean.add(fn)
676 return True
677
678 def remove(self, fn):
679 """
680 Remove a fn from the cache
681 Called from the parser in error cases
682 """
683 if fn in self.depends_cache:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500684 self.logger.debug(1, "Removing %s from cache", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500685 del self.depends_cache[fn]
686 if fn in self.clean:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500687 self.logger.debug(1, "Marking %s as unclean", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500688 self.clean.remove(fn)
689
690 def sync(self):
691 """
692 Save the cache
693 Called from the parser when complete (or exiting)
694 """
695
696 if not self.has_cache:
697 return
698
699 if self.cacheclean:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500700 self.logger.debug(2, "Cache is clean, not saving.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500701 return
702
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500703 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600704 cache_class_name = cache_class.__name__
Andrew Geissler5a43b432020-06-13 10:46:56 -0500705 cachefile = self.getCacheFile(cache_class.cachefile)
706 self.logger.debug(2, "Writing %s", cachefile)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600707 with open(cachefile, "wb") as f:
708 p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
709 p.dump(__cache_version__)
710 p.dump(bb.__version__)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500711
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600712 for key, info_array in self.depends_cache.items():
713 for info in info_array:
714 if isinstance(info, RecipeInfoCommon) and info.__class__.__name__ == cache_class_name:
715 p.dump(key)
716 p.dump(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500717
718 del self.depends_cache
719
720 @staticmethod
721 def mtime(cachefile):
722 return bb.parse.cached_mtime_noerror(cachefile)
723
724 def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500725 if self.mc is not None:
726 (fn, cls, mc) = virtualfn2realfn(filename)
727 if mc:
Andrew Geisslerd25ed322020-06-27 00:28:28 -0500728 self.logger.error("Unexpected multiconfig %s", filename)
Andrew Geissler5a43b432020-06-13 10:46:56 -0500729 return
730
731 vfn = realfn2virtual(fn, cls, self.mc)
732 else:
733 vfn = filename
734
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500735 if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500736 cacheData.add_from_recipeinfo(vfn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500737
738 if watcher:
739 watcher(info_array[0].file_depends)
740
741 if not self.has_cache:
742 return
743
744 if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
745 if parsed:
746 self.cacheclean = False
747 self.depends_cache[filename] = info_array
748
749 def add(self, file_name, data, cacheData, parsed=None):
750 """
751 Save data we need into the cache
752 """
753
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600754 realfn = virtualfn2realfn(file_name)[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500755
756 info_array = []
757 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600758 info_array.append(cache_class(realfn, data))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500759 self.add_info(file_name, info_array, cacheData, parsed)
760
Andrew Geissler5a43b432020-06-13 10:46:56 -0500761class MulticonfigCache(Mapping):
762 def __init__(self, databuilder, data_hash, caches_array):
763 def progress(p):
764 nonlocal current_progress
765 nonlocal previous_progress
766 nonlocal previous_percent
767 nonlocal cachesize
768
769 current_progress = previous_progress + p
770
771 if current_progress > cachesize:
772 # we might have calculated incorrect total size because a file
773 # might've been written out just after we checked its size
774 cachesize = current_progress
775 current_percent = 100 * current_progress / cachesize
776 if current_percent > previous_percent:
777 previous_percent = current_percent
778 bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
779 databuilder.data)
780
781
782 cachesize = 0
783 current_progress = 0
784 previous_progress = 0
785 previous_percent = 0
786 self.__caches = {}
787
788 for mc, mcdata in databuilder.mcdata.items():
789 self.__caches[mc] = Cache(databuilder, mc, data_hash, caches_array)
790
791 cachesize += self.__caches[mc].cachesize()
792
793 bb.event.fire(bb.event.CacheLoadStarted(cachesize), databuilder.data)
794 loaded = 0
795
796 for c in self.__caches.values():
797 loaded += c.prepare_cache(progress)
798 previous_progress = current_progress
799
800 # Note: depends cache number is corresponding to the parsing file numbers.
801 # The same file has several caches, still regarded as one item in the cache
802 bb.event.fire(bb.event.CacheLoadCompleted(cachesize, loaded), databuilder.data)
803
804 def __len__(self):
805 return len(self.__caches)
806
807 def __getitem__(self, key):
808 return self.__caches[key]
809
810 def __contains__(self, key):
811 return key in self.__caches
812
813 def __iter__(self):
814 for k in self.__caches:
815 yield k
816
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500817def init(cooker):
818 """
819 The Objective: Cache the minimum amount of data possible yet get to the
820 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
821
822 To do this, we intercept getVar calls and only cache the variables we see
823 being accessed. We rely on the cache getVar calls being made for all
824 variables bitbake might need to use to reach this stage. For each cached
825 file we need to track:
826
827 * Its mtime
828 * The mtimes of all its dependencies
829 * Whether it caused a parse.SkipRecipe exception
830
831 Files causing parsing errors are evicted from the cache.
832
833 """
834 return Cache(cooker.configuration.data, cooker.configuration.data_hash)
835
836
837class CacheData(object):
838 """
839 The data structures we compile from the cached data
840 """
841
842 def __init__(self, caches_array):
843 self.caches_array = caches_array
844 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600845 if not issubclass(cache_class, RecipeInfoCommon):
846 bb.error("Extra cache data class %s should subclass RecipeInfoCommon class" % cache_class)
847 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500848
849 # Direct cache variables
850 self.task_queues = {}
851 self.preferred = {}
852 self.tasks = {}
853 # Indirect Cache variables (set elsewhere)
854 self.ignored_dependencies = []
855 self.world_target = set()
856 self.bbfile_priority = {}
857
858 def add_from_recipeinfo(self, fn, info_array):
859 for info in info_array:
860 info.add_cacheData(self, fn)
861
862class MultiProcessCache(object):
863 """
864 BitBake multi-process cache implementation
865
866 Used by the codeparser & file checksum caches
867 """
868
869 def __init__(self):
870 self.cachefile = None
871 self.cachedata = self.create_cachedata()
872 self.cachedata_extras = self.create_cachedata()
873
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500874 def init_cache(self, d, cache_file_name=None):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500875 cachedir = (d.getVar("PERSISTENT_DIR") or
876 d.getVar("CACHE"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500877 if cachedir in [None, '']:
878 return
879 bb.utils.mkdirhier(cachedir)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500880 self.cachefile = os.path.join(cachedir,
881 cache_file_name or self.__class__.cache_file_name)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500882 logger.debug(1, "Using cache in '%s'", self.cachefile)
883
884 glf = bb.utils.lockfile(self.cachefile + ".lock")
885
886 try:
887 with open(self.cachefile, "rb") as f:
888 p = pickle.Unpickler(f)
889 data, version = p.load()
890 except:
891 bb.utils.unlockfile(glf)
892 return
893
894 bb.utils.unlockfile(glf)
895
896 if version != self.__class__.CACHE_VERSION:
897 return
898
899 self.cachedata = data
900
901 def create_cachedata(self):
902 data = [{}]
903 return data
904
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500905 def save_extras(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500906 if not self.cachefile:
907 return
908
909 glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
910
911 i = os.getpid()
912 lf = None
913 while not lf:
914 lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
915 if not lf or os.path.exists(self.cachefile + "-" + str(i)):
916 if lf:
917 bb.utils.unlockfile(lf)
918 lf = None
919 i = i + 1
920 continue
921
922 with open(self.cachefile + "-" + str(i), "wb") as f:
923 p = pickle.Pickler(f, -1)
924 p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
925
926 bb.utils.unlockfile(lf)
927 bb.utils.unlockfile(glf)
928
929 def merge_data(self, source, dest):
930 for j in range(0,len(dest)):
931 for h in source[j]:
932 if h not in dest[j]:
933 dest[j][h] = source[j][h]
934
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500935 def save_merge(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500936 if not self.cachefile:
937 return
938
939 glf = bb.utils.lockfile(self.cachefile + ".lock")
940
941 data = self.cachedata
942
943 for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
944 f = os.path.join(os.path.dirname(self.cachefile), f)
945 try:
946 with open(f, "rb") as fd:
947 p = pickle.Unpickler(fd)
948 extradata, version = p.load()
949 except (IOError, EOFError):
950 os.unlink(f)
951 continue
952
953 if version != self.__class__.CACHE_VERSION:
954 os.unlink(f)
955 continue
956
957 self.merge_data(extradata, data)
958 os.unlink(f)
959
960 with open(self.cachefile, "wb") as f:
961 p = pickle.Pickler(f, -1)
962 p.dump([data, self.__class__.CACHE_VERSION])
963
964 bb.utils.unlockfile(glf)
Brad Bishop08902b02019-08-20 09:16:51 -0400965
966
967class SimpleCache(object):
968 """
969 BitBake multi-process cache implementation
970
971 Used by the codeparser & file checksum caches
972 """
973
974 def __init__(self, version):
975 self.cachefile = None
976 self.cachedata = None
977 self.cacheversion = version
978
979 def init_cache(self, d, cache_file_name=None, defaultdata=None):
980 cachedir = (d.getVar("PERSISTENT_DIR") or
981 d.getVar("CACHE"))
982 if not cachedir:
983 return defaultdata
984
985 bb.utils.mkdirhier(cachedir)
986 self.cachefile = os.path.join(cachedir,
987 cache_file_name or self.__class__.cache_file_name)
988 logger.debug(1, "Using cache in '%s'", self.cachefile)
989
990 glf = bb.utils.lockfile(self.cachefile + ".lock")
991
992 try:
993 with open(self.cachefile, "rb") as f:
994 p = pickle.Unpickler(f)
995 data, version = p.load()
996 except:
997 bb.utils.unlockfile(glf)
998 return defaultdata
999
1000 bb.utils.unlockfile(glf)
1001
1002 if version != self.cacheversion:
1003 return defaultdata
1004
1005 return data
1006
1007 def save(self, data):
1008 if not self.cachefile:
1009 return
1010
1011 glf = bb.utils.lockfile(self.cachefile + ".lock")
1012
1013 with open(self.cachefile, "wb") as f:
1014 p = pickle.Pickler(f, -1)
1015 p.dump([data, self.cacheversion])
1016
1017 bb.utils.unlockfile(glf)