blob: 92e9a3ced7dfaaae835f1511d96e992082d5d929 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# BitBake Cache implementation
3#
4# Caching of bitbake variables before task execution
5
6# Copyright (C) 2006 Richard Purdie
7# Copyright (C) 2012 Intel Corporation
8
9# but small sections based on code from bin/bitbake:
10# Copyright (C) 2003, 2004 Chris Larson
11# Copyright (C) 2003, 2004 Phil Blundell
12# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
13# Copyright (C) 2005 Holger Hans Peter Freyther
14# Copyright (C) 2005 ROAD GmbH
15#
Brad Bishopc342db32019-05-15 21:57:59 -040016# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050017#
Patrick Williamsc124f4f2015-09-15 14:41:29 -050018
Patrick Williamsc124f4f2015-09-15 14:41:29 -050019import os
20import logging
Patrick Williamsc0f7c042017-02-23 20:41:17 -060021import pickle
Andrew Geissler5199d832021-09-24 16:47:35 -050022from collections import defaultdict
23from collections.abc import Mapping
Patrick Williamsc124f4f2015-09-15 14:41:29 -050024import bb.utils
Andrew Geissler5a43b432020-06-13 10:46:56 -050025from bb import PrefixLoggerAdapter
Andrew Geissler82c905d2020-04-13 13:39:40 -050026import re
Patrick Williamsc124f4f2015-09-15 14:41:29 -050027
28logger = logging.getLogger("BitBake.Cache")
29
Andrew Geisslerd1e89492021-02-12 15:35:20 -060030__cache_version__ = "154"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050031
Andrew Geissler5a43b432020-06-13 10:46:56 -050032def getCacheFile(path, filename, mc, data_hash):
33 mcspec = ''
34 if mc:
35 mcspec = ".%s" % mc
36 return os.path.join(path, filename + mcspec + "." + data_hash)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050037
38# RecipeInfoCommon defines common data retrieving methods
39# from meta data for caches. CoreRecipeInfo as well as other
40# Extra RecipeInfo needs to inherit this class
41class RecipeInfoCommon(object):
42
43 @classmethod
44 def listvar(cls, var, metadata):
45 return cls.getvar(var, metadata).split()
46
47 @classmethod
48 def intvar(cls, var, metadata):
49 return int(cls.getvar(var, metadata) or 0)
50
51 @classmethod
52 def depvar(cls, var, metadata):
53 return bb.utils.explode_deps(cls.getvar(var, metadata))
54
55 @classmethod
56 def pkgvar(cls, var, packages, metadata):
Patrick Williams213cb262021-08-07 19:21:33 -050057 return dict((pkg, cls.depvar("%s:%s" % (var, pkg), metadata))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050058 for pkg in packages)
59
60 @classmethod
61 def taskvar(cls, var, tasks, metadata):
Patrick Williams213cb262021-08-07 19:21:33 -050062 return dict((task, cls.getvar("%s:task-%s" % (var, task), metadata))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050063 for task in tasks)
64
65 @classmethod
66 def flaglist(cls, flag, varlist, metadata, squash=False):
Brad Bishop6e60e8b2018-02-01 10:27:11 -050067 out_dict = dict((var, metadata.getVarFlag(var, flag))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050068 for var in varlist)
69 if squash:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060070 return dict((k,v) for (k,v) in out_dict.items() if v)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050071 else:
72 return out_dict
73
74 @classmethod
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050075 def getvar(cls, var, metadata, expand = True):
76 return metadata.getVar(var, expand) or ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -050077
78
79class CoreRecipeInfo(RecipeInfoCommon):
80 __slots__ = ()
81
Brad Bishopd7bf8c12018-02-25 22:55:05 -050082 cachefile = "bb_cache.dat"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050083
Brad Bishopd7bf8c12018-02-25 22:55:05 -050084 def __init__(self, filename, metadata):
Patrick Williamsc124f4f2015-09-15 14:41:29 -050085 self.file_depends = metadata.getVar('__depends', False)
86 self.timestamp = bb.parse.cached_mtime(filename)
87 self.variants = self.listvar('__VARIANTS', metadata) + ['']
88 self.appends = self.listvar('__BBAPPEND', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050089 self.nocache = self.getvar('BB_DONT_CACHE', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050090
Brad Bishop96ff1982019-08-19 13:50:42 -040091 self.provides = self.depvar('PROVIDES', metadata)
92 self.rprovides = self.depvar('RPROVIDES', metadata)
93 self.pn = self.getvar('PN', metadata) or bb.parse.vars_from_file(filename,metadata)[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -050094 self.packages = self.listvar('PACKAGES', metadata)
Brad Bishopd7bf8c12018-02-25 22:55:05 -050095 if not self.packages:
Patrick Williamsc124f4f2015-09-15 14:41:29 -050096 self.packages.append(self.pn)
Brad Bishop96ff1982019-08-19 13:50:42 -040097 self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
Andrew Geisslerd1e89492021-02-12 15:35:20 -060098 self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
Brad Bishop96ff1982019-08-19 13:50:42 -040099
100 self.skipreason = self.getvar('__SKIPPED', metadata)
101 if self.skipreason:
102 self.skipped = True
103 return
104
105 self.tasks = metadata.getVar('__BBTASKS', False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500106
107 self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
108 self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
109
110 self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
111
112 self.skipped = False
113 self.pe = self.getvar('PE', metadata)
114 self.pv = self.getvar('PV', metadata)
115 self.pr = self.getvar('PR', metadata)
116 self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
117 self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
118 self.stamp = self.getvar('STAMP', metadata)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500119 self.stampclean = self.getvar('STAMPCLEAN', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500120 self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
121 self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500122 self.depends = self.depvar('DEPENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500123 self.rdepends = self.depvar('RDEPENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500124 self.rrecommends = self.depvar('RRECOMMENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500125 self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
126 self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500127 self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500128 self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
129 self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500130 self.fakerootlogs = self.getvar('FAKEROOTLOGS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500131 self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500132 self.extradepsfunc = self.getvar('calculate_extra_depends', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500133
134 @classmethod
135 def init_cacheData(cls, cachedata):
136 # CacheData in Core RecipeInfo Class
137 cachedata.task_deps = {}
138 cachedata.pkg_fn = {}
139 cachedata.pkg_pn = defaultdict(list)
140 cachedata.pkg_pepvpr = {}
141 cachedata.pkg_dp = {}
142
143 cachedata.stamp = {}
144 cachedata.stampclean = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500145 cachedata.stamp_extrainfo = {}
146 cachedata.file_checksums = {}
147 cachedata.fn_provides = {}
148 cachedata.pn_provides = defaultdict(list)
149 cachedata.all_depends = []
150
151 cachedata.deps = defaultdict(list)
152 cachedata.packages = defaultdict(list)
153 cachedata.providers = defaultdict(list)
154 cachedata.rproviders = defaultdict(list)
155 cachedata.packages_dynamic = defaultdict(list)
156
157 cachedata.rundeps = defaultdict(lambda: defaultdict(list))
158 cachedata.runrecs = defaultdict(lambda: defaultdict(list))
159 cachedata.possible_world = []
160 cachedata.universe_target = []
161 cachedata.hashfn = {}
162
163 cachedata.basetaskhash = {}
164 cachedata.inherits = {}
165 cachedata.fakerootenv = {}
166 cachedata.fakerootnoenv = {}
167 cachedata.fakerootdirs = {}
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500168 cachedata.fakerootlogs = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500169 cachedata.extradepsfunc = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500170
171 def add_cacheData(self, cachedata, fn):
172 cachedata.task_deps[fn] = self.task_deps
173 cachedata.pkg_fn[fn] = self.pn
174 cachedata.pkg_pn[self.pn].append(fn)
175 cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
176 cachedata.pkg_dp[fn] = self.defaultpref
177 cachedata.stamp[fn] = self.stamp
178 cachedata.stampclean[fn] = self.stampclean
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500179 cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
180 cachedata.file_checksums[fn] = self.file_checksums
181
182 provides = [self.pn]
183 for provide in self.provides:
184 if provide not in provides:
185 provides.append(provide)
186 cachedata.fn_provides[fn] = provides
187
188 for provide in provides:
189 cachedata.providers[provide].append(fn)
190 if provide not in cachedata.pn_provides[self.pn]:
191 cachedata.pn_provides[self.pn].append(provide)
192
193 for dep in self.depends:
194 if dep not in cachedata.deps[fn]:
195 cachedata.deps[fn].append(dep)
196 if dep not in cachedata.all_depends:
197 cachedata.all_depends.append(dep)
198
199 rprovides = self.rprovides
200 for package in self.packages:
201 cachedata.packages[package].append(fn)
202 rprovides += self.rprovides_pkg[package]
203
204 for rprovide in rprovides:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500205 if fn not in cachedata.rproviders[rprovide]:
206 cachedata.rproviders[rprovide].append(fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500207
208 for package in self.packages_dynamic:
209 cachedata.packages_dynamic[package].append(fn)
210
211 # Build hash of runtime depends and recommends
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500212 for package in self.packages:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500213 cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
214 cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
215
216 # Collect files we may need for possible world-dep
217 # calculations
Andrew Geissler82c905d2020-04-13 13:39:40 -0500218 if not self.not_world:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500219 cachedata.possible_world.append(fn)
Andrew Geissler82c905d2020-04-13 13:39:40 -0500220 #else:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600221 # logger.debug2("EXCLUDE FROM WORLD: %s", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500222
223 # create a collection of all targets for sanity checking
224 # tasks, such as upstream versions, license, and tools for
225 # task and image creation.
226 cachedata.universe_target.append(self.pn)
227
228 cachedata.hashfn[fn] = self.hashfilename
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600229 for task, taskhash in self.basetaskhashes.items():
Brad Bishop08902b02019-08-20 09:16:51 -0400230 identifier = '%s:%s' % (fn, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500231 cachedata.basetaskhash[identifier] = taskhash
232
233 cachedata.inherits[fn] = self.inherits
234 cachedata.fakerootenv[fn] = self.fakerootenv
235 cachedata.fakerootnoenv[fn] = self.fakerootnoenv
236 cachedata.fakerootdirs[fn] = self.fakerootdirs
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500237 cachedata.fakerootlogs[fn] = self.fakerootlogs
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500238 cachedata.extradepsfunc[fn] = self.extradepsfunc
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500239
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600240def virtualfn2realfn(virtualfn):
241 """
242 Convert a virtual file name to a real one + the associated subclass keyword
243 """
244 mc = ""
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600245 if virtualfn.startswith('mc:') and virtualfn.count(':') >= 2:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600246 elems = virtualfn.split(':')
247 mc = elems[1]
248 virtualfn = ":".join(elems[2:])
249
250 fn = virtualfn
251 cls = ""
252 if virtualfn.startswith('virtual:'):
253 elems = virtualfn.split(':')
254 cls = ":".join(elems[1:-1])
255 fn = elems[-1]
256
257 return (fn, cls, mc)
258
259def realfn2virtual(realfn, cls, mc):
260 """
261 Convert a real filename + the associated subclass keyword to a virtual filename
262 """
263 if cls:
264 realfn = "virtual:" + cls + ":" + realfn
265 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -0400266 realfn = "mc:" + mc + ":" + realfn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600267 return realfn
268
269def variant2virtual(realfn, variant):
270 """
271 Convert a real filename + the associated subclass keyword to a virtual filename
272 """
273 if variant == "":
274 return realfn
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600275 if variant.startswith("mc:") and variant.count(':') >= 2:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600276 elems = variant.split(":")
277 if elems[2]:
Brad Bishop15ae2502019-06-18 21:44:24 -0400278 return "mc:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
279 return "mc:" + elems[1] + ":" + realfn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600280 return "virtual:" + variant + ":" + realfn
281
282def parse_recipe(bb_data, bbfile, appends, mc=''):
283 """
284 Parse a recipe
285 """
286
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600287 bb_data.setVar("__BBMULTICONFIG", mc)
288
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600289 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600290 bb.parse.cached_mtime_noerror(bbfile_loc)
291
Andrew Geissler595f6302022-01-24 19:11:47 +0000292 if appends:
293 bb_data.setVar('__BBAPPEND', " ".join(appends))
294 bb_data = bb.parse.handle(bbfile, bb_data)
295 return bb_data
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500296
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600297
298class NoCache(object):
299
300 def __init__(self, databuilder):
301 self.databuilder = databuilder
302 self.data = databuilder.data
303
304 def loadDataFull(self, virtualfn, appends):
305 """
306 Return a complete set of data for fn.
307 To do this, we need to parse the file.
308 """
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600309 logger.debug("Parsing %s (full)" % virtualfn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600310 (fn, virtual, mc) = virtualfn2realfn(virtualfn)
311 bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
312 return bb_data[virtual]
313
Andrew Geissler5a43b432020-06-13 10:46:56 -0500314 def load_bbfile(self, bbfile, appends, virtonly = False, mc=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600315 """
316 Load and parse one .bb build file
317 Return the data and whether parsing resulted in the file being skipped
318 """
319
320 if virtonly:
321 (bbfile, virtual, mc) = virtualfn2realfn(bbfile)
322 bb_data = self.databuilder.mcdata[mc].createCopy()
323 bb_data.setVar("__ONLYFINALISE", virtual or "default")
324 datastores = parse_recipe(bb_data, bbfile, appends, mc)
325 return datastores
326
Andrew Geissler5a43b432020-06-13 10:46:56 -0500327 if mc is not None:
328 bb_data = self.databuilder.mcdata[mc].createCopy()
329 return parse_recipe(bb_data, bbfile, appends, mc)
330
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600331 bb_data = self.data.createCopy()
332 datastores = parse_recipe(bb_data, bbfile, appends)
333
334 for mc in self.databuilder.mcdata:
335 if not mc:
336 continue
337 bb_data = self.databuilder.mcdata[mc].createCopy()
338 newstores = parse_recipe(bb_data, bbfile, appends, mc)
339 for ns in newstores:
Brad Bishop15ae2502019-06-18 21:44:24 -0400340 datastores["mc:%s:%s" % (mc, ns)] = newstores[ns]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600341
342 return datastores
343
344class Cache(NoCache):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500345 """
346 BitBake Cache implementation
347 """
Andrew Geissler5a43b432020-06-13 10:46:56 -0500348 def __init__(self, databuilder, mc, data_hash, caches_array):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600349 super().__init__(databuilder)
350 data = databuilder.data
351
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500352 # Pass caches_array information into Cache Constructor
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500353 # It will be used later for deciding whether we
354 # need extra cache file dump/load support
Andrew Geissler5a43b432020-06-13 10:46:56 -0500355 self.mc = mc
356 self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500357 self.caches_array = caches_array
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500358 self.cachedir = data.getVar("CACHE")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500359 self.clean = set()
360 self.checked = set()
361 self.depends_cache = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500362 self.data_fn = None
363 self.cacheclean = True
364 self.data_hash = data_hash
Andrew Geissler82c905d2020-04-13 13:39:40 -0500365 self.filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500366
367 if self.cachedir in [None, '']:
368 self.has_cache = False
Andrew Geissler5a43b432020-06-13 10:46:56 -0500369 self.logger.info("Not using a cache. "
370 "Set CACHE = <directory> to enable.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500371 return
372
373 self.has_cache = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500374
Andrew Geissler5a43b432020-06-13 10:46:56 -0500375 def getCacheFile(self, cachefile):
376 return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash)
377
378 def prepare_cache(self, progress):
379 if not self.has_cache:
380 return 0
381
382 loaded = 0
383
384 self.cachefile = self.getCacheFile("bb_cache.dat")
385
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600386 self.logger.debug("Cache dir: %s", self.cachedir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500387 bb.utils.mkdirhier(self.cachedir)
388
389 cache_ok = True
390 if self.caches_array:
391 for cache_class in self.caches_array:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500392 cachefile = self.getCacheFile(cache_class.cachefile)
393 cache_exists = os.path.exists(cachefile)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600394 self.logger.debug2("Checking if %s exists: %r", cachefile, cache_exists)
Andrew Geissler5a43b432020-06-13 10:46:56 -0500395 cache_ok = cache_ok and cache_exists
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600396 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500397 if cache_ok:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500398 loaded = self.load_cachefile(progress)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500399 elif os.path.isfile(self.cachefile):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500400 self.logger.info("Out of date cache found, rebuilding...")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400401 else:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600402 self.logger.debug("Cache file %s not found, building..." % self.cachefile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500403
Brad Bishop96ff1982019-08-19 13:50:42 -0400404 # We don't use the symlink, its just for debugging convinience
Andrew Geissler5a43b432020-06-13 10:46:56 -0500405 if self.mc:
406 symlink = os.path.join(self.cachedir, "bb_cache.dat.%s" % self.mc)
407 else:
408 symlink = os.path.join(self.cachedir, "bb_cache.dat")
409
Brad Bishop96ff1982019-08-19 13:50:42 -0400410 if os.path.exists(symlink):
411 bb.utils.remove(symlink)
412 try:
413 os.symlink(os.path.basename(self.cachefile), symlink)
414 except OSError:
415 pass
416
Andrew Geissler5a43b432020-06-13 10:46:56 -0500417 return loaded
418
419 def cachesize(self):
420 if not self.has_cache:
421 return 0
422
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500423 cachesize = 0
Andrew Geissler5a43b432020-06-13 10:46:56 -0500424 for cache_class in self.caches_array:
425 cachefile = self.getCacheFile(cache_class.cachefile)
426 try:
427 with open(cachefile, "rb") as cachefile:
428 cachesize += os.fstat(cachefile.fileno()).st_size
429 except FileNotFoundError:
430 pass
431
432 return cachesize
433
434 def load_cachefile(self, progress):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500435 previous_progress = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500436
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500437 for cache_class in self.caches_array:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500438 cachefile = self.getCacheFile(cache_class.cachefile)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600439 self.logger.debug('Loading cache file: %s' % cachefile)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600440 with open(cachefile, "rb") as cachefile:
441 pickled = pickle.Unpickler(cachefile)
442 # Check cache version information
443 try:
444 cache_ver = pickled.load()
445 bitbake_ver = pickled.load()
446 except Exception:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500447 self.logger.info('Invalid cache, rebuilding...')
448 return 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500449
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600450 if cache_ver != __cache_version__:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500451 self.logger.info('Cache version mismatch, rebuilding...')
452 return 0
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600453 elif bitbake_ver != bb.__version__:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500454 self.logger.info('Bitbake version mismatch, rebuilding...')
455 return 0
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600456
457 # Load the rest of the cache file
458 current_progress = 0
459 while cachefile:
460 try:
461 key = pickled.load()
462 value = pickled.load()
463 except Exception:
464 break
465 if not isinstance(key, str):
466 bb.warn("%s from extras cache is not a string?" % key)
467 break
468 if not isinstance(value, RecipeInfoCommon):
469 bb.warn("%s from extras cache is not a RecipeInfoCommon class?" % value)
470 break
471
472 if key in self.depends_cache:
473 self.depends_cache[key].append(value)
474 else:
475 self.depends_cache[key] = [value]
476 # only fire events on even percentage boundaries
477 current_progress = cachefile.tell() + previous_progress
Andrew Geissler5a43b432020-06-13 10:46:56 -0500478 progress(cachefile.tell() + previous_progress)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600479
480 previous_progress += current_progress
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500481
Andrew Geissler5a43b432020-06-13 10:46:56 -0500482 return len(self.depends_cache)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500483
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600484 def parse(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500485 """Parse the specified filename, returning the recipe information"""
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600486 self.logger.debug("Parsing %s", filename)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500487 infos = []
Andrew Geissler5a43b432020-06-13 10:46:56 -0500488 datastores = self.load_bbfile(filename, appends, mc=self.mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500489 depends = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600490 variants = []
491 # Process the "real" fn last so we can store variants list
492 for variant, data in sorted(datastores.items(),
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500493 key=lambda i: i[0],
494 reverse=True):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600495 virtualfn = variant2virtual(filename, variant)
496 variants.append(variant)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500497 depends = depends + (data.getVar("__depends", False) or [])
498 if depends and not variant:
499 data.setVar("__depends", depends)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600500 if virtualfn == filename:
501 data.setVar("__VARIANTS", " ".join(variants))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500502 info_array = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600503 for cache_class in self.caches_array:
504 info = cache_class(filename, data)
505 info_array.append(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500506 infos.append((virtualfn, info_array))
507
508 return infos
509
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600510 def load(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500511 """Obtain the recipe information for the specified filename,
512 using cached values if available, otherwise parsing.
513
514 Note that if it does parse to obtain the info, it will not
515 automatically add the information to the cache or to your
516 CacheData. Use the add or add_info method to do so after
517 running this, or use loadData instead."""
518 cached = self.cacheValid(filename, appends)
519 if cached:
520 infos = []
521 # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
522 info_array = self.depends_cache[filename]
523 for variant in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600524 virtualfn = variant2virtual(filename, variant)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500525 infos.append((virtualfn, self.depends_cache[virtualfn]))
526 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500527 return self.parse(filename, appends, configdata, self.caches_array)
528
529 return cached, infos
530
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600531 def loadData(self, fn, appends, cacheData):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500532 """Load the recipe info for the specified filename,
533 parsing and adding to the cache if necessary, and adding
534 the recipe information to the supplied CacheData instance."""
535 skipped, virtuals = 0, 0
536
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600537 cached, infos = self.load(fn, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500538 for virtualfn, info_array in infos:
539 if info_array[0].skipped:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600540 self.logger.debug("Skipping %s: %s", virtualfn, info_array[0].skipreason)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500541 skipped += 1
542 else:
543 self.add_info(virtualfn, info_array, cacheData, not cached)
544 virtuals += 1
545
546 return cached, skipped, virtuals
547
548 def cacheValid(self, fn, appends):
549 """
550 Is the cache valid for fn?
551 Fast version, no timestamps checked.
552 """
553 if fn not in self.checked:
554 self.cacheValidUpdate(fn, appends)
555
556 # Is cache enabled?
557 if not self.has_cache:
558 return False
559 if fn in self.clean:
560 return True
561 return False
562
563 def cacheValidUpdate(self, fn, appends):
564 """
565 Is the cache valid for fn?
566 Make thorough (slower) checks including timestamps.
567 """
568 # Is cache enabled?
569 if not self.has_cache:
570 return False
571
572 self.checked.add(fn)
573
574 # File isn't in depends_cache
575 if not fn in self.depends_cache:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600576 self.logger.debug2("%s is not cached", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500577 return False
578
579 mtime = bb.parse.cached_mtime_noerror(fn)
580
581 # Check file still exists
582 if mtime == 0:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600583 self.logger.debug2("%s no longer exists", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500584 self.remove(fn)
585 return False
586
587 info_array = self.depends_cache[fn]
588 # Check the file's timestamp
589 if mtime != info_array[0].timestamp:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600590 self.logger.debug2("%s changed", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500591 self.remove(fn)
592 return False
593
594 # Check dependencies are still valid
595 depends = info_array[0].file_depends
596 if depends:
597 for f, old_mtime in depends:
598 fmtime = bb.parse.cached_mtime_noerror(f)
599 # Check if file still exists
600 if old_mtime != 0 and fmtime == 0:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600601 self.logger.debug2("%s's dependency %s was removed",
Andrew Geissler5a43b432020-06-13 10:46:56 -0500602 fn, f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500603 self.remove(fn)
604 return False
605
606 if (fmtime != old_mtime):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600607 self.logger.debug2("%s's dependency %s changed",
Andrew Geissler5a43b432020-06-13 10:46:56 -0500608 fn, f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500609 self.remove(fn)
610 return False
611
612 if hasattr(info_array[0], 'file_checksums'):
613 for _, fl in info_array[0].file_checksums.items():
Patrick Williamsd7e96312015-09-22 08:09:05 -0500614 fl = fl.strip()
Andrew Geissler82c905d2020-04-13 13:39:40 -0500615 if not fl:
616 continue
617 # Have to be careful about spaces and colons in filenames
618 flist = self.filelist_regex.split(fl)
619 for f in flist:
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500620 if not f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500621 continue
Andrew Geisslerd5838332022-05-27 11:33:10 -0500622 f, exist = f.rsplit(":", 1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500623 if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600624 self.logger.debug2("%s's file checksum list file %s changed",
Andrew Geissler5a43b432020-06-13 10:46:56 -0500625 fn, f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500626 self.remove(fn)
627 return False
628
Andrew Geissler5a43b432020-06-13 10:46:56 -0500629 if tuple(appends) != tuple(info_array[0].appends):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600630 self.logger.debug2("appends for %s changed", fn)
631 self.logger.debug2("%s to %s" % (str(appends), str(info_array[0].appends)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500632 self.remove(fn)
633 return False
634
635 invalid = False
636 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600637 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500638 self.clean.add(virtualfn)
639 if virtualfn not in self.depends_cache:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600640 self.logger.debug2("%s is not cached", virtualfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500641 invalid = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600642 elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600643 self.logger.debug2("Extra caches missing for %s?" % virtualfn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600644 invalid = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500645
646 # If any one of the variants is not present, mark as invalid for all
647 if invalid:
648 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600649 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500650 if virtualfn in self.clean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600651 self.logger.debug2("Removing %s from cache", virtualfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500652 self.clean.remove(virtualfn)
653 if fn in self.clean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600654 self.logger.debug2("Marking %s as not clean", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500655 self.clean.remove(fn)
656 return False
657
658 self.clean.add(fn)
659 return True
660
661 def remove(self, fn):
662 """
663 Remove a fn from the cache
664 Called from the parser in error cases
665 """
666 if fn in self.depends_cache:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600667 self.logger.debug("Removing %s from cache", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500668 del self.depends_cache[fn]
669 if fn in self.clean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600670 self.logger.debug("Marking %s as unclean", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500671 self.clean.remove(fn)
672
673 def sync(self):
674 """
675 Save the cache
676 Called from the parser when complete (or exiting)
677 """
678
679 if not self.has_cache:
680 return
681
682 if self.cacheclean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600683 self.logger.debug2("Cache is clean, not saving.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500684 return
685
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500686 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600687 cache_class_name = cache_class.__name__
Andrew Geissler5a43b432020-06-13 10:46:56 -0500688 cachefile = self.getCacheFile(cache_class.cachefile)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600689 self.logger.debug2("Writing %s", cachefile)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600690 with open(cachefile, "wb") as f:
691 p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
692 p.dump(__cache_version__)
693 p.dump(bb.__version__)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500694
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600695 for key, info_array in self.depends_cache.items():
696 for info in info_array:
697 if isinstance(info, RecipeInfoCommon) and info.__class__.__name__ == cache_class_name:
698 p.dump(key)
699 p.dump(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500700
701 del self.depends_cache
702
703 @staticmethod
704 def mtime(cachefile):
705 return bb.parse.cached_mtime_noerror(cachefile)
706
707 def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500708 if self.mc is not None:
709 (fn, cls, mc) = virtualfn2realfn(filename)
710 if mc:
Andrew Geisslerd25ed322020-06-27 00:28:28 -0500711 self.logger.error("Unexpected multiconfig %s", filename)
Andrew Geissler5a43b432020-06-13 10:46:56 -0500712 return
713
714 vfn = realfn2virtual(fn, cls, self.mc)
715 else:
716 vfn = filename
717
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500718 if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500719 cacheData.add_from_recipeinfo(vfn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500720
721 if watcher:
722 watcher(info_array[0].file_depends)
723
724 if not self.has_cache:
725 return
726
727 if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
728 if parsed:
729 self.cacheclean = False
730 self.depends_cache[filename] = info_array
731
732 def add(self, file_name, data, cacheData, parsed=None):
733 """
734 Save data we need into the cache
735 """
736
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600737 realfn = virtualfn2realfn(file_name)[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500738
739 info_array = []
740 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600741 info_array.append(cache_class(realfn, data))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500742 self.add_info(file_name, info_array, cacheData, parsed)
743
Andrew Geissler5a43b432020-06-13 10:46:56 -0500744class MulticonfigCache(Mapping):
745 def __init__(self, databuilder, data_hash, caches_array):
746 def progress(p):
747 nonlocal current_progress
748 nonlocal previous_progress
749 nonlocal previous_percent
750 nonlocal cachesize
751
752 current_progress = previous_progress + p
753
754 if current_progress > cachesize:
755 # we might have calculated incorrect total size because a file
756 # might've been written out just after we checked its size
757 cachesize = current_progress
758 current_percent = 100 * current_progress / cachesize
759 if current_percent > previous_percent:
760 previous_percent = current_percent
761 bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
762 databuilder.data)
763
764
765 cachesize = 0
766 current_progress = 0
767 previous_progress = 0
768 previous_percent = 0
769 self.__caches = {}
770
771 for mc, mcdata in databuilder.mcdata.items():
772 self.__caches[mc] = Cache(databuilder, mc, data_hash, caches_array)
773
774 cachesize += self.__caches[mc].cachesize()
775
776 bb.event.fire(bb.event.CacheLoadStarted(cachesize), databuilder.data)
777 loaded = 0
778
779 for c in self.__caches.values():
780 loaded += c.prepare_cache(progress)
781 previous_progress = current_progress
782
783 # Note: depends cache number is corresponding to the parsing file numbers.
784 # The same file has several caches, still regarded as one item in the cache
785 bb.event.fire(bb.event.CacheLoadCompleted(cachesize, loaded), databuilder.data)
786
787 def __len__(self):
788 return len(self.__caches)
789
790 def __getitem__(self, key):
791 return self.__caches[key]
792
793 def __contains__(self, key):
794 return key in self.__caches
795
796 def __iter__(self):
797 for k in self.__caches:
798 yield k
799
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500800def init(cooker):
801 """
802 The Objective: Cache the minimum amount of data possible yet get to the
803 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
804
805 To do this, we intercept getVar calls and only cache the variables we see
806 being accessed. We rely on the cache getVar calls being made for all
807 variables bitbake might need to use to reach this stage. For each cached
808 file we need to track:
809
810 * Its mtime
811 * The mtimes of all its dependencies
812 * Whether it caused a parse.SkipRecipe exception
813
814 Files causing parsing errors are evicted from the cache.
815
816 """
817 return Cache(cooker.configuration.data, cooker.configuration.data_hash)
818
819
820class CacheData(object):
821 """
822 The data structures we compile from the cached data
823 """
824
825 def __init__(self, caches_array):
826 self.caches_array = caches_array
827 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600828 if not issubclass(cache_class, RecipeInfoCommon):
829 bb.error("Extra cache data class %s should subclass RecipeInfoCommon class" % cache_class)
830 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500831
832 # Direct cache variables
833 self.task_queues = {}
834 self.preferred = {}
835 self.tasks = {}
836 # Indirect Cache variables (set elsewhere)
837 self.ignored_dependencies = []
838 self.world_target = set()
839 self.bbfile_priority = {}
840
841 def add_from_recipeinfo(self, fn, info_array):
842 for info in info_array:
843 info.add_cacheData(self, fn)
844
845class MultiProcessCache(object):
846 """
847 BitBake multi-process cache implementation
848
849 Used by the codeparser & file checksum caches
850 """
851
852 def __init__(self):
853 self.cachefile = None
854 self.cachedata = self.create_cachedata()
855 self.cachedata_extras = self.create_cachedata()
856
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500857 def init_cache(self, d, cache_file_name=None):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500858 cachedir = (d.getVar("PERSISTENT_DIR") or
859 d.getVar("CACHE"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500860 if cachedir in [None, '']:
861 return
862 bb.utils.mkdirhier(cachedir)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500863 self.cachefile = os.path.join(cachedir,
864 cache_file_name or self.__class__.cache_file_name)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600865 logger.debug("Using cache in '%s'", self.cachefile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500866
867 glf = bb.utils.lockfile(self.cachefile + ".lock")
868
869 try:
870 with open(self.cachefile, "rb") as f:
871 p = pickle.Unpickler(f)
872 data, version = p.load()
873 except:
874 bb.utils.unlockfile(glf)
875 return
876
877 bb.utils.unlockfile(glf)
878
879 if version != self.__class__.CACHE_VERSION:
880 return
881
882 self.cachedata = data
883
884 def create_cachedata(self):
885 data = [{}]
886 return data
887
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500888 def save_extras(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500889 if not self.cachefile:
890 return
891
892 glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
893
894 i = os.getpid()
895 lf = None
896 while not lf:
897 lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
898 if not lf or os.path.exists(self.cachefile + "-" + str(i)):
899 if lf:
900 bb.utils.unlockfile(lf)
901 lf = None
902 i = i + 1
903 continue
904
905 with open(self.cachefile + "-" + str(i), "wb") as f:
906 p = pickle.Pickler(f, -1)
907 p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
908
909 bb.utils.unlockfile(lf)
910 bb.utils.unlockfile(glf)
911
912 def merge_data(self, source, dest):
913 for j in range(0,len(dest)):
914 for h in source[j]:
915 if h not in dest[j]:
916 dest[j][h] = source[j][h]
917
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500918 def save_merge(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500919 if not self.cachefile:
920 return
921
922 glf = bb.utils.lockfile(self.cachefile + ".lock")
923
924 data = self.cachedata
925
926 for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
927 f = os.path.join(os.path.dirname(self.cachefile), f)
928 try:
929 with open(f, "rb") as fd:
930 p = pickle.Unpickler(fd)
931 extradata, version = p.load()
932 except (IOError, EOFError):
933 os.unlink(f)
934 continue
935
936 if version != self.__class__.CACHE_VERSION:
937 os.unlink(f)
938 continue
939
940 self.merge_data(extradata, data)
941 os.unlink(f)
942
943 with open(self.cachefile, "wb") as f:
944 p = pickle.Pickler(f, -1)
945 p.dump([data, self.__class__.CACHE_VERSION])
946
947 bb.utils.unlockfile(glf)
Brad Bishop08902b02019-08-20 09:16:51 -0400948
949
950class SimpleCache(object):
951 """
952 BitBake multi-process cache implementation
953
954 Used by the codeparser & file checksum caches
955 """
956
957 def __init__(self, version):
958 self.cachefile = None
959 self.cachedata = None
960 self.cacheversion = version
961
962 def init_cache(self, d, cache_file_name=None, defaultdata=None):
963 cachedir = (d.getVar("PERSISTENT_DIR") or
964 d.getVar("CACHE"))
965 if not cachedir:
966 return defaultdata
967
968 bb.utils.mkdirhier(cachedir)
969 self.cachefile = os.path.join(cachedir,
970 cache_file_name or self.__class__.cache_file_name)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600971 logger.debug("Using cache in '%s'", self.cachefile)
Brad Bishop08902b02019-08-20 09:16:51 -0400972
973 glf = bb.utils.lockfile(self.cachefile + ".lock")
974
975 try:
976 with open(self.cachefile, "rb") as f:
977 p = pickle.Unpickler(f)
978 data, version = p.load()
979 except:
980 bb.utils.unlockfile(glf)
981 return defaultdata
982
983 bb.utils.unlockfile(glf)
984
985 if version != self.cacheversion:
986 return defaultdata
987
988 return data
989
990 def save(self, data):
991 if not self.cachefile:
992 return
993
994 glf = bb.utils.lockfile(self.cachefile + ".lock")
995
996 with open(self.cachefile, "wb") as f:
997 p = pickle.Pickler(f, -1)
998 p.dump([data, self.cacheversion])
999
1000 bb.utils.unlockfile(glf)