blob: be5ea6a8bdc50c85740bd1dc1439ea74d06ead47 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# BitBake Cache implementation
3#
4# Caching of bitbake variables before task execution
5
6# Copyright (C) 2006 Richard Purdie
7# Copyright (C) 2012 Intel Corporation
8
9# but small sections based on code from bin/bitbake:
10# Copyright (C) 2003, 2004 Chris Larson
11# Copyright (C) 2003, 2004 Phil Blundell
12# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
13# Copyright (C) 2005 Holger Hans Peter Freyther
14# Copyright (C) 2005 ROAD GmbH
15#
Brad Bishopc342db32019-05-15 21:57:59 -040016# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050017#
Patrick Williamsc124f4f2015-09-15 14:41:29 -050018
Patrick Williamsc124f4f2015-09-15 14:41:29 -050019import os
20import logging
Patrick Williamsc0f7c042017-02-23 20:41:17 -060021import pickle
Andrew Geissler5a43b432020-06-13 10:46:56 -050022from collections import defaultdict, Mapping
Patrick Williamsc124f4f2015-09-15 14:41:29 -050023import bb.utils
Andrew Geissler5a43b432020-06-13 10:46:56 -050024from bb import PrefixLoggerAdapter
Andrew Geissler82c905d2020-04-13 13:39:40 -050025import re
Patrick Williamsc124f4f2015-09-15 14:41:29 -050026
27logger = logging.getLogger("BitBake.Cache")
28
Brad Bishop1a4b7ee2018-12-16 17:11:34 -080029__cache_version__ = "152"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050030
Andrew Geissler5a43b432020-06-13 10:46:56 -050031def getCacheFile(path, filename, mc, data_hash):
32 mcspec = ''
33 if mc:
34 mcspec = ".%s" % mc
35 return os.path.join(path, filename + mcspec + "." + data_hash)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050036
37# RecipeInfoCommon defines common data retrieving methods
38# from meta data for caches. CoreRecipeInfo as well as other
39# Extra RecipeInfo needs to inherit this class
40class RecipeInfoCommon(object):
41
42 @classmethod
43 def listvar(cls, var, metadata):
44 return cls.getvar(var, metadata).split()
45
46 @classmethod
47 def intvar(cls, var, metadata):
48 return int(cls.getvar(var, metadata) or 0)
49
50 @classmethod
51 def depvar(cls, var, metadata):
52 return bb.utils.explode_deps(cls.getvar(var, metadata))
53
54 @classmethod
55 def pkgvar(cls, var, packages, metadata):
56 return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata))
57 for pkg in packages)
58
59 @classmethod
60 def taskvar(cls, var, tasks, metadata):
61 return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata))
62 for task in tasks)
63
64 @classmethod
65 def flaglist(cls, flag, varlist, metadata, squash=False):
Brad Bishop6e60e8b2018-02-01 10:27:11 -050066 out_dict = dict((var, metadata.getVarFlag(var, flag))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050067 for var in varlist)
68 if squash:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060069 return dict((k,v) for (k,v) in out_dict.items() if v)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050070 else:
71 return out_dict
72
73 @classmethod
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050074 def getvar(cls, var, metadata, expand = True):
75 return metadata.getVar(var, expand) or ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -050076
77
78class CoreRecipeInfo(RecipeInfoCommon):
79 __slots__ = ()
80
Brad Bishopd7bf8c12018-02-25 22:55:05 -050081 cachefile = "bb_cache.dat"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050082
Brad Bishopd7bf8c12018-02-25 22:55:05 -050083 def __init__(self, filename, metadata):
Patrick Williamsc124f4f2015-09-15 14:41:29 -050084 self.file_depends = metadata.getVar('__depends', False)
85 self.timestamp = bb.parse.cached_mtime(filename)
86 self.variants = self.listvar('__VARIANTS', metadata) + ['']
87 self.appends = self.listvar('__BBAPPEND', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050088 self.nocache = self.getvar('BB_DONT_CACHE', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050089
Brad Bishop96ff1982019-08-19 13:50:42 -040090 self.provides = self.depvar('PROVIDES', metadata)
91 self.rprovides = self.depvar('RPROVIDES', metadata)
92 self.pn = self.getvar('PN', metadata) or bb.parse.vars_from_file(filename,metadata)[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -050093 self.packages = self.listvar('PACKAGES', metadata)
Brad Bishopd7bf8c12018-02-25 22:55:05 -050094 if not self.packages:
Patrick Williamsc124f4f2015-09-15 14:41:29 -050095 self.packages.append(self.pn)
Brad Bishop96ff1982019-08-19 13:50:42 -040096 self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
97
98 self.skipreason = self.getvar('__SKIPPED', metadata)
99 if self.skipreason:
100 self.skipped = True
101 return
102
103 self.tasks = metadata.getVar('__BBTASKS', False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500104
105 self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
106 self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
107
108 self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
109
110 self.skipped = False
111 self.pe = self.getvar('PE', metadata)
112 self.pv = self.getvar('PV', metadata)
113 self.pr = self.getvar('PR', metadata)
114 self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
115 self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
116 self.stamp = self.getvar('STAMP', metadata)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500117 self.stampclean = self.getvar('STAMPCLEAN', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500118 self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
119 self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500120 self.depends = self.depvar('DEPENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500121 self.rdepends = self.depvar('RDEPENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500122 self.rrecommends = self.depvar('RRECOMMENDS', metadata)
123 self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
124 self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
125 self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500126 self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500127 self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
128 self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
129 self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500130 self.extradepsfunc = self.getvar('calculate_extra_depends', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500131
132 @classmethod
133 def init_cacheData(cls, cachedata):
134 # CacheData in Core RecipeInfo Class
135 cachedata.task_deps = {}
136 cachedata.pkg_fn = {}
137 cachedata.pkg_pn = defaultdict(list)
138 cachedata.pkg_pepvpr = {}
139 cachedata.pkg_dp = {}
140
141 cachedata.stamp = {}
142 cachedata.stampclean = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500143 cachedata.stamp_extrainfo = {}
144 cachedata.file_checksums = {}
145 cachedata.fn_provides = {}
146 cachedata.pn_provides = defaultdict(list)
147 cachedata.all_depends = []
148
149 cachedata.deps = defaultdict(list)
150 cachedata.packages = defaultdict(list)
151 cachedata.providers = defaultdict(list)
152 cachedata.rproviders = defaultdict(list)
153 cachedata.packages_dynamic = defaultdict(list)
154
155 cachedata.rundeps = defaultdict(lambda: defaultdict(list))
156 cachedata.runrecs = defaultdict(lambda: defaultdict(list))
157 cachedata.possible_world = []
158 cachedata.universe_target = []
159 cachedata.hashfn = {}
160
161 cachedata.basetaskhash = {}
162 cachedata.inherits = {}
163 cachedata.fakerootenv = {}
164 cachedata.fakerootnoenv = {}
165 cachedata.fakerootdirs = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500166 cachedata.extradepsfunc = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500167
168 def add_cacheData(self, cachedata, fn):
169 cachedata.task_deps[fn] = self.task_deps
170 cachedata.pkg_fn[fn] = self.pn
171 cachedata.pkg_pn[self.pn].append(fn)
172 cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
173 cachedata.pkg_dp[fn] = self.defaultpref
174 cachedata.stamp[fn] = self.stamp
175 cachedata.stampclean[fn] = self.stampclean
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500176 cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
177 cachedata.file_checksums[fn] = self.file_checksums
178
179 provides = [self.pn]
180 for provide in self.provides:
181 if provide not in provides:
182 provides.append(provide)
183 cachedata.fn_provides[fn] = provides
184
185 for provide in provides:
186 cachedata.providers[provide].append(fn)
187 if provide not in cachedata.pn_provides[self.pn]:
188 cachedata.pn_provides[self.pn].append(provide)
189
190 for dep in self.depends:
191 if dep not in cachedata.deps[fn]:
192 cachedata.deps[fn].append(dep)
193 if dep not in cachedata.all_depends:
194 cachedata.all_depends.append(dep)
195
196 rprovides = self.rprovides
197 for package in self.packages:
198 cachedata.packages[package].append(fn)
199 rprovides += self.rprovides_pkg[package]
200
201 for rprovide in rprovides:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500202 if fn not in cachedata.rproviders[rprovide]:
203 cachedata.rproviders[rprovide].append(fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500204
205 for package in self.packages_dynamic:
206 cachedata.packages_dynamic[package].append(fn)
207
208 # Build hash of runtime depends and recommends
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500209 for package in self.packages:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500210 cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
211 cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
212
213 # Collect files we may need for possible world-dep
214 # calculations
Andrew Geissler82c905d2020-04-13 13:39:40 -0500215 if not self.not_world:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500216 cachedata.possible_world.append(fn)
Andrew Geissler82c905d2020-04-13 13:39:40 -0500217 #else:
218 # logger.debug(2, "EXCLUDE FROM WORLD: %s", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500219
220 # create a collection of all targets for sanity checking
221 # tasks, such as upstream versions, license, and tools for
222 # task and image creation.
223 cachedata.universe_target.append(self.pn)
224
225 cachedata.hashfn[fn] = self.hashfilename
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600226 for task, taskhash in self.basetaskhashes.items():
Brad Bishop08902b02019-08-20 09:16:51 -0400227 identifier = '%s:%s' % (fn, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500228 cachedata.basetaskhash[identifier] = taskhash
229
230 cachedata.inherits[fn] = self.inherits
231 cachedata.fakerootenv[fn] = self.fakerootenv
232 cachedata.fakerootnoenv[fn] = self.fakerootnoenv
233 cachedata.fakerootdirs[fn] = self.fakerootdirs
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500234 cachedata.extradepsfunc[fn] = self.extradepsfunc
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500235
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600236def virtualfn2realfn(virtualfn):
237 """
238 Convert a virtual file name to a real one + the associated subclass keyword
239 """
240 mc = ""
Brad Bishop15ae2502019-06-18 21:44:24 -0400241 if virtualfn.startswith('mc:'):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600242 elems = virtualfn.split(':')
243 mc = elems[1]
244 virtualfn = ":".join(elems[2:])
245
246 fn = virtualfn
247 cls = ""
248 if virtualfn.startswith('virtual:'):
249 elems = virtualfn.split(':')
250 cls = ":".join(elems[1:-1])
251 fn = elems[-1]
252
253 return (fn, cls, mc)
254
255def realfn2virtual(realfn, cls, mc):
256 """
257 Convert a real filename + the associated subclass keyword to a virtual filename
258 """
259 if cls:
260 realfn = "virtual:" + cls + ":" + realfn
261 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -0400262 realfn = "mc:" + mc + ":" + realfn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600263 return realfn
264
265def variant2virtual(realfn, variant):
266 """
267 Convert a real filename + the associated subclass keyword to a virtual filename
268 """
269 if variant == "":
270 return realfn
Brad Bishop15ae2502019-06-18 21:44:24 -0400271 if variant.startswith("mc:"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600272 elems = variant.split(":")
273 if elems[2]:
Brad Bishop15ae2502019-06-18 21:44:24 -0400274 return "mc:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
275 return "mc:" + elems[1] + ":" + realfn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600276 return "virtual:" + variant + ":" + realfn
277
278def parse_recipe(bb_data, bbfile, appends, mc=''):
279 """
280 Parse a recipe
281 """
282
283 chdir_back = False
284
285 bb_data.setVar("__BBMULTICONFIG", mc)
286
287 # expand tmpdir to include this topdir
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500288 bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR') or "")
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600289 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
290 oldpath = os.path.abspath(os.getcwd())
291 bb.parse.cached_mtime_noerror(bbfile_loc)
292
293 # The ConfHandler first looks if there is a TOPDIR and if not
294 # then it would call getcwd().
295 # Previously, we chdir()ed to bbfile_loc, called the handler
296 # and finally chdir()ed back, a couple of thousand times. We now
297 # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
298 if not bb_data.getVar('TOPDIR', False):
299 chdir_back = True
300 bb_data.setVar('TOPDIR', bbfile_loc)
301 try:
302 if appends:
303 bb_data.setVar('__BBAPPEND', " ".join(appends))
304 bb_data = bb.parse.handle(bbfile, bb_data)
305 if chdir_back:
306 os.chdir(oldpath)
307 return bb_data
308 except:
309 if chdir_back:
310 os.chdir(oldpath)
311 raise
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500312
313
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600314
315class NoCache(object):
316
317 def __init__(self, databuilder):
318 self.databuilder = databuilder
319 self.data = databuilder.data
320
321 def loadDataFull(self, virtualfn, appends):
322 """
323 Return a complete set of data for fn.
324 To do this, we need to parse the file.
325 """
326 logger.debug(1, "Parsing %s (full)" % virtualfn)
327 (fn, virtual, mc) = virtualfn2realfn(virtualfn)
328 bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
329 return bb_data[virtual]
330
Andrew Geissler5a43b432020-06-13 10:46:56 -0500331 def load_bbfile(self, bbfile, appends, virtonly = False, mc=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600332 """
333 Load and parse one .bb build file
334 Return the data and whether parsing resulted in the file being skipped
335 """
336
337 if virtonly:
338 (bbfile, virtual, mc) = virtualfn2realfn(bbfile)
339 bb_data = self.databuilder.mcdata[mc].createCopy()
340 bb_data.setVar("__ONLYFINALISE", virtual or "default")
341 datastores = parse_recipe(bb_data, bbfile, appends, mc)
342 return datastores
343
Andrew Geissler5a43b432020-06-13 10:46:56 -0500344 if mc is not None:
345 bb_data = self.databuilder.mcdata[mc].createCopy()
346 return parse_recipe(bb_data, bbfile, appends, mc)
347
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600348 bb_data = self.data.createCopy()
349 datastores = parse_recipe(bb_data, bbfile, appends)
350
351 for mc in self.databuilder.mcdata:
352 if not mc:
353 continue
354 bb_data = self.databuilder.mcdata[mc].createCopy()
355 newstores = parse_recipe(bb_data, bbfile, appends, mc)
356 for ns in newstores:
Brad Bishop15ae2502019-06-18 21:44:24 -0400357 datastores["mc:%s:%s" % (mc, ns)] = newstores[ns]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600358
359 return datastores
360
361class Cache(NoCache):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500362 """
363 BitBake Cache implementation
364 """
Andrew Geissler5a43b432020-06-13 10:46:56 -0500365 def __init__(self, databuilder, mc, data_hash, caches_array):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600366 super().__init__(databuilder)
367 data = databuilder.data
368
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500369 # Pass caches_array information into Cache Constructor
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500370 # It will be used later for deciding whether we
371 # need extra cache file dump/load support
Andrew Geissler5a43b432020-06-13 10:46:56 -0500372 self.mc = mc
373 self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500374 self.caches_array = caches_array
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500375 self.cachedir = data.getVar("CACHE")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500376 self.clean = set()
377 self.checked = set()
378 self.depends_cache = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500379 self.data_fn = None
380 self.cacheclean = True
381 self.data_hash = data_hash
Andrew Geissler82c905d2020-04-13 13:39:40 -0500382 self.filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500383
384 if self.cachedir in [None, '']:
385 self.has_cache = False
Andrew Geissler5a43b432020-06-13 10:46:56 -0500386 self.logger.info("Not using a cache. "
387 "Set CACHE = <directory> to enable.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500388 return
389
390 self.has_cache = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500391
Andrew Geissler5a43b432020-06-13 10:46:56 -0500392 def getCacheFile(self, cachefile):
393 return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash)
394
395 def prepare_cache(self, progress):
396 if not self.has_cache:
397 return 0
398
399 loaded = 0
400
401 self.cachefile = self.getCacheFile("bb_cache.dat")
402
403 self.logger.debug(1, "Cache dir: %s", self.cachedir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500404 bb.utils.mkdirhier(self.cachedir)
405
406 cache_ok = True
407 if self.caches_array:
408 for cache_class in self.caches_array:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500409 cachefile = self.getCacheFile(cache_class.cachefile)
410 cache_exists = os.path.exists(cachefile)
411 self.logger.debug(2, "Checking if %s exists: %r", cachefile, cache_exists)
412 cache_ok = cache_ok and cache_exists
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600413 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500414 if cache_ok:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500415 loaded = self.load_cachefile(progress)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500416 elif os.path.isfile(self.cachefile):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500417 self.logger.info("Out of date cache found, rebuilding...")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400418 else:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500419 self.logger.debug(1, "Cache file %s not found, building..." % self.cachefile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500420
Brad Bishop96ff1982019-08-19 13:50:42 -0400421 # We don't use the symlink, its just for debugging convinience
Andrew Geissler5a43b432020-06-13 10:46:56 -0500422 if self.mc:
423 symlink = os.path.join(self.cachedir, "bb_cache.dat.%s" % self.mc)
424 else:
425 symlink = os.path.join(self.cachedir, "bb_cache.dat")
426
Brad Bishop96ff1982019-08-19 13:50:42 -0400427 if os.path.exists(symlink):
428 bb.utils.remove(symlink)
429 try:
430 os.symlink(os.path.basename(self.cachefile), symlink)
431 except OSError:
432 pass
433
Andrew Geissler5a43b432020-06-13 10:46:56 -0500434 return loaded
435
436 def cachesize(self):
437 if not self.has_cache:
438 return 0
439
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500440 cachesize = 0
Andrew Geissler5a43b432020-06-13 10:46:56 -0500441 for cache_class in self.caches_array:
442 cachefile = self.getCacheFile(cache_class.cachefile)
443 try:
444 with open(cachefile, "rb") as cachefile:
445 cachesize += os.fstat(cachefile.fileno()).st_size
446 except FileNotFoundError:
447 pass
448
449 return cachesize
450
451 def load_cachefile(self, progress):
452 cachesize = self.cachesize()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500453 previous_progress = 0
454 previous_percent = 0
455
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500456 for cache_class in self.caches_array:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500457 cachefile = self.getCacheFile(cache_class.cachefile)
458 self.logger.debug(1, 'Loading cache file: %s' % cachefile)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600459 with open(cachefile, "rb") as cachefile:
460 pickled = pickle.Unpickler(cachefile)
461 # Check cache version information
462 try:
463 cache_ver = pickled.load()
464 bitbake_ver = pickled.load()
465 except Exception:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500466 self.logger.info('Invalid cache, rebuilding...')
467 return 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500468
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600469 if cache_ver != __cache_version__:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500470 self.logger.info('Cache version mismatch, rebuilding...')
471 return 0
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600472 elif bitbake_ver != bb.__version__:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500473 self.logger.info('Bitbake version mismatch, rebuilding...')
474 return 0
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600475
476 # Load the rest of the cache file
477 current_progress = 0
478 while cachefile:
479 try:
480 key = pickled.load()
481 value = pickled.load()
482 except Exception:
483 break
484 if not isinstance(key, str):
485 bb.warn("%s from extras cache is not a string?" % key)
486 break
487 if not isinstance(value, RecipeInfoCommon):
488 bb.warn("%s from extras cache is not a RecipeInfoCommon class?" % value)
489 break
490
491 if key in self.depends_cache:
492 self.depends_cache[key].append(value)
493 else:
494 self.depends_cache[key] = [value]
495 # only fire events on even percentage boundaries
496 current_progress = cachefile.tell() + previous_progress
Andrew Geissler5a43b432020-06-13 10:46:56 -0500497 progress(cachefile.tell() + previous_progress)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600498
499 previous_progress += current_progress
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500500
Andrew Geissler5a43b432020-06-13 10:46:56 -0500501 return len(self.depends_cache)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500502
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600503 def parse(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500504 """Parse the specified filename, returning the recipe information"""
Andrew Geissler5a43b432020-06-13 10:46:56 -0500505 self.logger.debug(1, "Parsing %s", filename)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500506 infos = []
Andrew Geissler5a43b432020-06-13 10:46:56 -0500507 datastores = self.load_bbfile(filename, appends, mc=self.mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500508 depends = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600509 variants = []
510 # Process the "real" fn last so we can store variants list
511 for variant, data in sorted(datastores.items(),
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500512 key=lambda i: i[0],
513 reverse=True):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600514 virtualfn = variant2virtual(filename, variant)
515 variants.append(variant)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500516 depends = depends + (data.getVar("__depends", False) or [])
517 if depends and not variant:
518 data.setVar("__depends", depends)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600519 if virtualfn == filename:
520 data.setVar("__VARIANTS", " ".join(variants))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500521 info_array = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600522 for cache_class in self.caches_array:
523 info = cache_class(filename, data)
524 info_array.append(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500525 infos.append((virtualfn, info_array))
526
527 return infos
528
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600529 def load(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500530 """Obtain the recipe information for the specified filename,
531 using cached values if available, otherwise parsing.
532
533 Note that if it does parse to obtain the info, it will not
534 automatically add the information to the cache or to your
535 CacheData. Use the add or add_info method to do so after
536 running this, or use loadData instead."""
537 cached = self.cacheValid(filename, appends)
538 if cached:
539 infos = []
540 # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
541 info_array = self.depends_cache[filename]
542 for variant in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600543 virtualfn = variant2virtual(filename, variant)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500544 infos.append((virtualfn, self.depends_cache[virtualfn]))
545 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500546 return self.parse(filename, appends, configdata, self.caches_array)
547
548 return cached, infos
549
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600550 def loadData(self, fn, appends, cacheData):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500551 """Load the recipe info for the specified filename,
552 parsing and adding to the cache if necessary, and adding
553 the recipe information to the supplied CacheData instance."""
554 skipped, virtuals = 0, 0
555
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600556 cached, infos = self.load(fn, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500557 for virtualfn, info_array in infos:
558 if info_array[0].skipped:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500559 self.logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500560 skipped += 1
561 else:
562 self.add_info(virtualfn, info_array, cacheData, not cached)
563 virtuals += 1
564
565 return cached, skipped, virtuals
566
567 def cacheValid(self, fn, appends):
568 """
569 Is the cache valid for fn?
570 Fast version, no timestamps checked.
571 """
572 if fn not in self.checked:
573 self.cacheValidUpdate(fn, appends)
574
575 # Is cache enabled?
576 if not self.has_cache:
577 return False
578 if fn in self.clean:
579 return True
580 return False
581
582 def cacheValidUpdate(self, fn, appends):
583 """
584 Is the cache valid for fn?
585 Make thorough (slower) checks including timestamps.
586 """
587 # Is cache enabled?
588 if not self.has_cache:
589 return False
590
591 self.checked.add(fn)
592
593 # File isn't in depends_cache
594 if not fn in self.depends_cache:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500595 self.logger.debug(2, "%s is not cached", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500596 return False
597
598 mtime = bb.parse.cached_mtime_noerror(fn)
599
600 # Check file still exists
601 if mtime == 0:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500602 self.logger.debug(2, "%s no longer exists", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500603 self.remove(fn)
604 return False
605
606 info_array = self.depends_cache[fn]
607 # Check the file's timestamp
608 if mtime != info_array[0].timestamp:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500609 self.logger.debug(2, "%s changed", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500610 self.remove(fn)
611 return False
612
613 # Check dependencies are still valid
614 depends = info_array[0].file_depends
615 if depends:
616 for f, old_mtime in depends:
617 fmtime = bb.parse.cached_mtime_noerror(f)
618 # Check if file still exists
619 if old_mtime != 0 and fmtime == 0:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500620 self.logger.debug(2, "%s's dependency %s was removed",
621 fn, f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500622 self.remove(fn)
623 return False
624
625 if (fmtime != old_mtime):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500626 self.logger.debug(2, "%s's dependency %s changed",
627 fn, f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500628 self.remove(fn)
629 return False
630
631 if hasattr(info_array[0], 'file_checksums'):
632 for _, fl in info_array[0].file_checksums.items():
Patrick Williamsd7e96312015-09-22 08:09:05 -0500633 fl = fl.strip()
Andrew Geissler82c905d2020-04-13 13:39:40 -0500634 if not fl:
635 continue
636 # Have to be careful about spaces and colons in filenames
637 flist = self.filelist_regex.split(fl)
638 for f in flist:
639 if not f or "*" in f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500640 continue
641 f, exist = f.split(":")
642 if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500643 self.logger.debug(2, "%s's file checksum list file %s changed",
644 fn, f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500645 self.remove(fn)
646 return False
647
Andrew Geissler5a43b432020-06-13 10:46:56 -0500648 if tuple(appends) != tuple(info_array[0].appends):
649 self.logger.debug(2, "appends for %s changed", fn)
650 self.logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500651 self.remove(fn)
652 return False
653
654 invalid = False
655 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600656 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500657 self.clean.add(virtualfn)
658 if virtualfn not in self.depends_cache:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500659 self.logger.debug(2, "%s is not cached", virtualfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500660 invalid = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600661 elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500662 self.logger.debug(2, "Extra caches missing for %s?" % virtualfn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600663 invalid = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500664
665 # If any one of the variants is not present, mark as invalid for all
666 if invalid:
667 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600668 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500669 if virtualfn in self.clean:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500670 self.logger.debug(2, "Removing %s from cache", virtualfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500671 self.clean.remove(virtualfn)
672 if fn in self.clean:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500673 self.logger.debug(2, "Marking %s as not clean", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500674 self.clean.remove(fn)
675 return False
676
677 self.clean.add(fn)
678 return True
679
680 def remove(self, fn):
681 """
682 Remove a fn from the cache
683 Called from the parser in error cases
684 """
685 if fn in self.depends_cache:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500686 self.logger.debug(1, "Removing %s from cache", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500687 del self.depends_cache[fn]
688 if fn in self.clean:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500689 self.logger.debug(1, "Marking %s as unclean", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500690 self.clean.remove(fn)
691
692 def sync(self):
693 """
694 Save the cache
695 Called from the parser when complete (or exiting)
696 """
697
698 if not self.has_cache:
699 return
700
701 if self.cacheclean:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500702 self.logger.debug(2, "Cache is clean, not saving.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500703 return
704
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500705 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600706 cache_class_name = cache_class.__name__
Andrew Geissler5a43b432020-06-13 10:46:56 -0500707 cachefile = self.getCacheFile(cache_class.cachefile)
708 self.logger.debug(2, "Writing %s", cachefile)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600709 with open(cachefile, "wb") as f:
710 p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
711 p.dump(__cache_version__)
712 p.dump(bb.__version__)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500713
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600714 for key, info_array in self.depends_cache.items():
715 for info in info_array:
716 if isinstance(info, RecipeInfoCommon) and info.__class__.__name__ == cache_class_name:
717 p.dump(key)
718 p.dump(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500719
720 del self.depends_cache
721
722 @staticmethod
723 def mtime(cachefile):
724 return bb.parse.cached_mtime_noerror(cachefile)
725
726 def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500727 if self.mc is not None:
728 (fn, cls, mc) = virtualfn2realfn(filename)
729 if mc:
730 self.logger.error("Unexpected multiconfig %s", virtualfn)
731 return
732
733 vfn = realfn2virtual(fn, cls, self.mc)
734 else:
735 vfn = filename
736
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500737 if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500738 cacheData.add_from_recipeinfo(vfn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500739
740 if watcher:
741 watcher(info_array[0].file_depends)
742
743 if not self.has_cache:
744 return
745
746 if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
747 if parsed:
748 self.cacheclean = False
749 self.depends_cache[filename] = info_array
750
751 def add(self, file_name, data, cacheData, parsed=None):
752 """
753 Save data we need into the cache
754 """
755
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600756 realfn = virtualfn2realfn(file_name)[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500757
758 info_array = []
759 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600760 info_array.append(cache_class(realfn, data))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500761 self.add_info(file_name, info_array, cacheData, parsed)
762
Andrew Geissler5a43b432020-06-13 10:46:56 -0500763class MulticonfigCache(Mapping):
764 def __init__(self, databuilder, data_hash, caches_array):
765 def progress(p):
766 nonlocal current_progress
767 nonlocal previous_progress
768 nonlocal previous_percent
769 nonlocal cachesize
770
771 current_progress = previous_progress + p
772
773 if current_progress > cachesize:
774 # we might have calculated incorrect total size because a file
775 # might've been written out just after we checked its size
776 cachesize = current_progress
777 current_percent = 100 * current_progress / cachesize
778 if current_percent > previous_percent:
779 previous_percent = current_percent
780 bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
781 databuilder.data)
782
783
784 cachesize = 0
785 current_progress = 0
786 previous_progress = 0
787 previous_percent = 0
788 self.__caches = {}
789
790 for mc, mcdata in databuilder.mcdata.items():
791 self.__caches[mc] = Cache(databuilder, mc, data_hash, caches_array)
792
793 cachesize += self.__caches[mc].cachesize()
794
795 bb.event.fire(bb.event.CacheLoadStarted(cachesize), databuilder.data)
796 loaded = 0
797
798 for c in self.__caches.values():
799 loaded += c.prepare_cache(progress)
800 previous_progress = current_progress
801
802 # Note: depends cache number is corresponding to the parsing file numbers.
803 # The same file has several caches, still regarded as one item in the cache
804 bb.event.fire(bb.event.CacheLoadCompleted(cachesize, loaded), databuilder.data)
805
806 def __len__(self):
807 return len(self.__caches)
808
809 def __getitem__(self, key):
810 return self.__caches[key]
811
812 def __contains__(self, key):
813 return key in self.__caches
814
815 def __iter__(self):
816 for k in self.__caches:
817 yield k
818
819 def keys(self):
820 return self.__caches[key]
821
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500822
823def init(cooker):
824 """
825 The Objective: Cache the minimum amount of data possible yet get to the
826 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
827
828 To do this, we intercept getVar calls and only cache the variables we see
829 being accessed. We rely on the cache getVar calls being made for all
830 variables bitbake might need to use to reach this stage. For each cached
831 file we need to track:
832
833 * Its mtime
834 * The mtimes of all its dependencies
835 * Whether it caused a parse.SkipRecipe exception
836
837 Files causing parsing errors are evicted from the cache.
838
839 """
840 return Cache(cooker.configuration.data, cooker.configuration.data_hash)
841
842
843class CacheData(object):
844 """
845 The data structures we compile from the cached data
846 """
847
848 def __init__(self, caches_array):
849 self.caches_array = caches_array
850 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600851 if not issubclass(cache_class, RecipeInfoCommon):
852 bb.error("Extra cache data class %s should subclass RecipeInfoCommon class" % cache_class)
853 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500854
855 # Direct cache variables
856 self.task_queues = {}
857 self.preferred = {}
858 self.tasks = {}
859 # Indirect Cache variables (set elsewhere)
860 self.ignored_dependencies = []
861 self.world_target = set()
862 self.bbfile_priority = {}
863
864 def add_from_recipeinfo(self, fn, info_array):
865 for info in info_array:
866 info.add_cacheData(self, fn)
867
868class MultiProcessCache(object):
869 """
870 BitBake multi-process cache implementation
871
872 Used by the codeparser & file checksum caches
873 """
874
875 def __init__(self):
876 self.cachefile = None
877 self.cachedata = self.create_cachedata()
878 self.cachedata_extras = self.create_cachedata()
879
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500880 def init_cache(self, d, cache_file_name=None):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500881 cachedir = (d.getVar("PERSISTENT_DIR") or
882 d.getVar("CACHE"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500883 if cachedir in [None, '']:
884 return
885 bb.utils.mkdirhier(cachedir)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500886 self.cachefile = os.path.join(cachedir,
887 cache_file_name or self.__class__.cache_file_name)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500888 logger.debug(1, "Using cache in '%s'", self.cachefile)
889
890 glf = bb.utils.lockfile(self.cachefile + ".lock")
891
892 try:
893 with open(self.cachefile, "rb") as f:
894 p = pickle.Unpickler(f)
895 data, version = p.load()
896 except:
897 bb.utils.unlockfile(glf)
898 return
899
900 bb.utils.unlockfile(glf)
901
902 if version != self.__class__.CACHE_VERSION:
903 return
904
905 self.cachedata = data
906
907 def create_cachedata(self):
908 data = [{}]
909 return data
910
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500911 def save_extras(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500912 if not self.cachefile:
913 return
914
915 glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
916
917 i = os.getpid()
918 lf = None
919 while not lf:
920 lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
921 if not lf or os.path.exists(self.cachefile + "-" + str(i)):
922 if lf:
923 bb.utils.unlockfile(lf)
924 lf = None
925 i = i + 1
926 continue
927
928 with open(self.cachefile + "-" + str(i), "wb") as f:
929 p = pickle.Pickler(f, -1)
930 p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
931
932 bb.utils.unlockfile(lf)
933 bb.utils.unlockfile(glf)
934
935 def merge_data(self, source, dest):
936 for j in range(0,len(dest)):
937 for h in source[j]:
938 if h not in dest[j]:
939 dest[j][h] = source[j][h]
940
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500941 def save_merge(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500942 if not self.cachefile:
943 return
944
945 glf = bb.utils.lockfile(self.cachefile + ".lock")
946
947 data = self.cachedata
948
949 for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
950 f = os.path.join(os.path.dirname(self.cachefile), f)
951 try:
952 with open(f, "rb") as fd:
953 p = pickle.Unpickler(fd)
954 extradata, version = p.load()
955 except (IOError, EOFError):
956 os.unlink(f)
957 continue
958
959 if version != self.__class__.CACHE_VERSION:
960 os.unlink(f)
961 continue
962
963 self.merge_data(extradata, data)
964 os.unlink(f)
965
966 with open(self.cachefile, "wb") as f:
967 p = pickle.Pickler(f, -1)
968 p.dump([data, self.__class__.CACHE_VERSION])
969
970 bb.utils.unlockfile(glf)
Brad Bishop08902b02019-08-20 09:16:51 -0400971
972
973class SimpleCache(object):
974 """
975 BitBake multi-process cache implementation
976
977 Used by the codeparser & file checksum caches
978 """
979
980 def __init__(self, version):
981 self.cachefile = None
982 self.cachedata = None
983 self.cacheversion = version
984
985 def init_cache(self, d, cache_file_name=None, defaultdata=None):
986 cachedir = (d.getVar("PERSISTENT_DIR") or
987 d.getVar("CACHE"))
988 if not cachedir:
989 return defaultdata
990
991 bb.utils.mkdirhier(cachedir)
992 self.cachefile = os.path.join(cachedir,
993 cache_file_name or self.__class__.cache_file_name)
994 logger.debug(1, "Using cache in '%s'", self.cachefile)
995
996 glf = bb.utils.lockfile(self.cachefile + ".lock")
997
998 try:
999 with open(self.cachefile, "rb") as f:
1000 p = pickle.Unpickler(f)
1001 data, version = p.load()
1002 except:
1003 bb.utils.unlockfile(glf)
1004 return defaultdata
1005
1006 bb.utils.unlockfile(glf)
1007
1008 if version != self.cacheversion:
1009 return defaultdata
1010
1011 return data
1012
1013 def save(self, data):
1014 if not self.cachefile:
1015 return
1016
1017 glf = bb.utils.lockfile(self.cachefile + ".lock")
1018
1019 with open(self.cachefile, "wb") as f:
1020 p = pickle.Pickler(f, -1)
1021 p.dump([data, self.cacheversion])
1022
1023 bb.utils.unlockfile(glf)