blob: 18d5574a31744b10d1cfacce2e2b9c72a996b9f2 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# BitBake Cache implementation
3#
4# Caching of bitbake variables before task execution
5
6# Copyright (C) 2006 Richard Purdie
7# Copyright (C) 2012 Intel Corporation
8
9# but small sections based on code from bin/bitbake:
10# Copyright (C) 2003, 2004 Chris Larson
11# Copyright (C) 2003, 2004 Phil Blundell
12# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
13# Copyright (C) 2005 Holger Hans Peter Freyther
14# Copyright (C) 2005 ROAD GmbH
15#
Brad Bishopc342db32019-05-15 21:57:59 -040016# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050017#
Patrick Williamsc124f4f2015-09-15 14:41:29 -050018
Patrick Williamsc124f4f2015-09-15 14:41:29 -050019import os
20import logging
Patrick Williamsc0f7c042017-02-23 20:41:17 -060021import pickle
Andrew Geissler5199d832021-09-24 16:47:35 -050022from collections import defaultdict
23from collections.abc import Mapping
Patrick Williamsc124f4f2015-09-15 14:41:29 -050024import bb.utils
Andrew Geissler5a43b432020-06-13 10:46:56 -050025from bb import PrefixLoggerAdapter
Andrew Geissler82c905d2020-04-13 13:39:40 -050026import re
Andrew Geissler78b72792022-06-14 06:47:25 -050027import shutil
Patrick Williamsc124f4f2015-09-15 14:41:29 -050028
29logger = logging.getLogger("BitBake.Cache")
30
Andrew Geissler517393d2023-01-13 08:55:19 -060031__cache_version__ = "155"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050032
Andrew Geissler5a43b432020-06-13 10:46:56 -050033def getCacheFile(path, filename, mc, data_hash):
34 mcspec = ''
35 if mc:
36 mcspec = ".%s" % mc
37 return os.path.join(path, filename + mcspec + "." + data_hash)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050038
39# RecipeInfoCommon defines common data retrieving methods
40# from meta data for caches. CoreRecipeInfo as well as other
41# Extra RecipeInfo needs to inherit this class
42class RecipeInfoCommon(object):
43
44 @classmethod
45 def listvar(cls, var, metadata):
46 return cls.getvar(var, metadata).split()
47
48 @classmethod
49 def intvar(cls, var, metadata):
50 return int(cls.getvar(var, metadata) or 0)
51
52 @classmethod
53 def depvar(cls, var, metadata):
54 return bb.utils.explode_deps(cls.getvar(var, metadata))
55
56 @classmethod
57 def pkgvar(cls, var, packages, metadata):
Patrick Williams213cb262021-08-07 19:21:33 -050058 return dict((pkg, cls.depvar("%s:%s" % (var, pkg), metadata))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050059 for pkg in packages)
60
61 @classmethod
62 def taskvar(cls, var, tasks, metadata):
Patrick Williams213cb262021-08-07 19:21:33 -050063 return dict((task, cls.getvar("%s:task-%s" % (var, task), metadata))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050064 for task in tasks)
65
66 @classmethod
67 def flaglist(cls, flag, varlist, metadata, squash=False):
Brad Bishop6e60e8b2018-02-01 10:27:11 -050068 out_dict = dict((var, metadata.getVarFlag(var, flag))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050069 for var in varlist)
70 if squash:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060071 return dict((k,v) for (k,v) in out_dict.items() if v)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050072 else:
73 return out_dict
74
75 @classmethod
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050076 def getvar(cls, var, metadata, expand = True):
77 return metadata.getVar(var, expand) or ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -050078
79
80class CoreRecipeInfo(RecipeInfoCommon):
81 __slots__ = ()
82
Brad Bishopd7bf8c12018-02-25 22:55:05 -050083 cachefile = "bb_cache.dat"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050084
Brad Bishopd7bf8c12018-02-25 22:55:05 -050085 def __init__(self, filename, metadata):
Patrick Williamsc124f4f2015-09-15 14:41:29 -050086 self.file_depends = metadata.getVar('__depends', False)
87 self.timestamp = bb.parse.cached_mtime(filename)
88 self.variants = self.listvar('__VARIANTS', metadata) + ['']
89 self.appends = self.listvar('__BBAPPEND', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050090 self.nocache = self.getvar('BB_DONT_CACHE', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050091
Brad Bishop96ff1982019-08-19 13:50:42 -040092 self.provides = self.depvar('PROVIDES', metadata)
93 self.rprovides = self.depvar('RPROVIDES', metadata)
94 self.pn = self.getvar('PN', metadata) or bb.parse.vars_from_file(filename,metadata)[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -050095 self.packages = self.listvar('PACKAGES', metadata)
Brad Bishopd7bf8c12018-02-25 22:55:05 -050096 if not self.packages:
Patrick Williamsc124f4f2015-09-15 14:41:29 -050097 self.packages.append(self.pn)
Brad Bishop96ff1982019-08-19 13:50:42 -040098 self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
Andrew Geisslerd1e89492021-02-12 15:35:20 -060099 self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
Brad Bishop96ff1982019-08-19 13:50:42 -0400100
101 self.skipreason = self.getvar('__SKIPPED', metadata)
102 if self.skipreason:
103 self.skipped = True
104 return
105
106 self.tasks = metadata.getVar('__BBTASKS', False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500107
Andrew Geissler517393d2023-01-13 08:55:19 -0600108 self.basetaskhashes = metadata.getVar('__siggen_basehashes', False) or {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500109 self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
110
111 self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
112
113 self.skipped = False
114 self.pe = self.getvar('PE', metadata)
115 self.pv = self.getvar('PV', metadata)
116 self.pr = self.getvar('PR', metadata)
117 self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
118 self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
119 self.stamp = self.getvar('STAMP', metadata)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500120 self.stampclean = self.getvar('STAMPCLEAN', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500121 self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
122 self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500123 self.depends = self.depvar('DEPENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500124 self.rdepends = self.depvar('RDEPENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500125 self.rrecommends = self.depvar('RRECOMMENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500126 self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
127 self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500128 self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500129 self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
130 self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500131 self.fakerootlogs = self.getvar('FAKEROOTLOGS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500132 self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500133 self.extradepsfunc = self.getvar('calculate_extra_depends', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500134
135 @classmethod
136 def init_cacheData(cls, cachedata):
137 # CacheData in Core RecipeInfo Class
138 cachedata.task_deps = {}
139 cachedata.pkg_fn = {}
140 cachedata.pkg_pn = defaultdict(list)
141 cachedata.pkg_pepvpr = {}
142 cachedata.pkg_dp = {}
143
144 cachedata.stamp = {}
145 cachedata.stampclean = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500146 cachedata.stamp_extrainfo = {}
147 cachedata.file_checksums = {}
148 cachedata.fn_provides = {}
149 cachedata.pn_provides = defaultdict(list)
150 cachedata.all_depends = []
151
152 cachedata.deps = defaultdict(list)
153 cachedata.packages = defaultdict(list)
154 cachedata.providers = defaultdict(list)
155 cachedata.rproviders = defaultdict(list)
156 cachedata.packages_dynamic = defaultdict(list)
157
158 cachedata.rundeps = defaultdict(lambda: defaultdict(list))
159 cachedata.runrecs = defaultdict(lambda: defaultdict(list))
160 cachedata.possible_world = []
161 cachedata.universe_target = []
162 cachedata.hashfn = {}
163
164 cachedata.basetaskhash = {}
165 cachedata.inherits = {}
166 cachedata.fakerootenv = {}
167 cachedata.fakerootnoenv = {}
168 cachedata.fakerootdirs = {}
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500169 cachedata.fakerootlogs = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500170 cachedata.extradepsfunc = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500171
172 def add_cacheData(self, cachedata, fn):
173 cachedata.task_deps[fn] = self.task_deps
174 cachedata.pkg_fn[fn] = self.pn
175 cachedata.pkg_pn[self.pn].append(fn)
176 cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
177 cachedata.pkg_dp[fn] = self.defaultpref
178 cachedata.stamp[fn] = self.stamp
179 cachedata.stampclean[fn] = self.stampclean
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500180 cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
181 cachedata.file_checksums[fn] = self.file_checksums
182
183 provides = [self.pn]
184 for provide in self.provides:
185 if provide not in provides:
186 provides.append(provide)
187 cachedata.fn_provides[fn] = provides
188
189 for provide in provides:
190 cachedata.providers[provide].append(fn)
191 if provide not in cachedata.pn_provides[self.pn]:
192 cachedata.pn_provides[self.pn].append(provide)
193
194 for dep in self.depends:
195 if dep not in cachedata.deps[fn]:
196 cachedata.deps[fn].append(dep)
197 if dep not in cachedata.all_depends:
198 cachedata.all_depends.append(dep)
199
200 rprovides = self.rprovides
201 for package in self.packages:
202 cachedata.packages[package].append(fn)
203 rprovides += self.rprovides_pkg[package]
204
205 for rprovide in rprovides:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500206 if fn not in cachedata.rproviders[rprovide]:
207 cachedata.rproviders[rprovide].append(fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500208
209 for package in self.packages_dynamic:
210 cachedata.packages_dynamic[package].append(fn)
211
212 # Build hash of runtime depends and recommends
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500213 for package in self.packages:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500214 cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
215 cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
216
217 # Collect files we may need for possible world-dep
218 # calculations
Andrew Geisslerfc113ea2023-03-31 09:59:46 -0500219 if not bb.utils.to_boolean(self.not_world):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500220 cachedata.possible_world.append(fn)
Andrew Geissler82c905d2020-04-13 13:39:40 -0500221 #else:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600222 # logger.debug2("EXCLUDE FROM WORLD: %s", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500223
224 # create a collection of all targets for sanity checking
225 # tasks, such as upstream versions, license, and tools for
226 # task and image creation.
227 cachedata.universe_target.append(self.pn)
228
229 cachedata.hashfn[fn] = self.hashfilename
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600230 for task, taskhash in self.basetaskhashes.items():
Brad Bishop08902b02019-08-20 09:16:51 -0400231 identifier = '%s:%s' % (fn, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500232 cachedata.basetaskhash[identifier] = taskhash
233
234 cachedata.inherits[fn] = self.inherits
235 cachedata.fakerootenv[fn] = self.fakerootenv
236 cachedata.fakerootnoenv[fn] = self.fakerootnoenv
237 cachedata.fakerootdirs[fn] = self.fakerootdirs
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500238 cachedata.fakerootlogs[fn] = self.fakerootlogs
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500239 cachedata.extradepsfunc[fn] = self.extradepsfunc
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500240
Andrew Geissler517393d2023-01-13 08:55:19 -0600241
242class SiggenRecipeInfo(RecipeInfoCommon):
243 __slots__ = ()
244
245 classname = "SiggenRecipeInfo"
246 cachefile = "bb_cache_" + classname +".dat"
247 # we don't want to show this information in graph files so don't set cachefields
248 #cachefields = []
249
250 def __init__(self, filename, metadata):
251 self.siggen_gendeps = metadata.getVar("__siggen_gendeps", False)
252 self.siggen_varvals = metadata.getVar("__siggen_varvals", False)
253 self.siggen_taskdeps = metadata.getVar("__siggen_taskdeps", False)
254
255 @classmethod
256 def init_cacheData(cls, cachedata):
257 cachedata.siggen_taskdeps = {}
258 cachedata.siggen_gendeps = {}
259 cachedata.siggen_varvals = {}
260
261 def add_cacheData(self, cachedata, fn):
262 cachedata.siggen_gendeps[fn] = self.siggen_gendeps
263 cachedata.siggen_varvals[fn] = self.siggen_varvals
264 cachedata.siggen_taskdeps[fn] = self.siggen_taskdeps
265
266 # The siggen variable data is large and impacts:
267 # - bitbake's overall memory usage
268 # - the amount of data sent over IPC between parsing processes and the server
269 # - the size of the cache files on disk
270 # - the size of "sigdata" hash information files on disk
271 # The data consists of strings (some large) or frozenset lists of variables
272 # As such, we a) deplicate the data here and b) pass references to the object at second
273 # access (e.g. over IPC or saving into pickle).
274
275 store = {}
276 save_map = {}
277 save_count = 1
278 restore_map = {}
279 restore_count = {}
280
281 @classmethod
282 def reset(cls):
283 # Needs to be called before starting new streamed data in a given process
284 # (e.g. writing out the cache again)
285 cls.save_map = {}
286 cls.save_count = 1
287 cls.restore_map = {}
288
289 @classmethod
290 def _save(cls, deps):
291 ret = []
292 if not deps:
293 return deps
294 for dep in deps:
295 fs = deps[dep]
296 if fs is None:
297 ret.append((dep, None, None))
298 elif fs in cls.save_map:
299 ret.append((dep, None, cls.save_map[fs]))
300 else:
301 cls.save_map[fs] = cls.save_count
302 ret.append((dep, fs, cls.save_count))
303 cls.save_count = cls.save_count + 1
304 return ret
305
306 @classmethod
307 def _restore(cls, deps, pid):
308 ret = {}
309 if not deps:
310 return deps
311 if pid not in cls.restore_map:
312 cls.restore_map[pid] = {}
313 map = cls.restore_map[pid]
314 for dep, fs, mapnum in deps:
315 if fs is None and mapnum is None:
316 ret[dep] = None
317 elif fs is None:
318 ret[dep] = map[mapnum]
319 else:
320 try:
321 fs = cls.store[fs]
322 except KeyError:
323 cls.store[fs] = fs
324 map[mapnum] = fs
325 ret[dep] = fs
326 return ret
327
328 def __getstate__(self):
329 ret = {}
330 for key in ["siggen_gendeps", "siggen_taskdeps", "siggen_varvals"]:
331 ret[key] = self._save(self.__dict__[key])
332 ret['pid'] = os.getpid()
333 return ret
334
335 def __setstate__(self, state):
336 pid = state['pid']
337 for key in ["siggen_gendeps", "siggen_taskdeps", "siggen_varvals"]:
338 setattr(self, key, self._restore(state[key], pid))
339
340
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600341def virtualfn2realfn(virtualfn):
342 """
343 Convert a virtual file name to a real one + the associated subclass keyword
344 """
345 mc = ""
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600346 if virtualfn.startswith('mc:') and virtualfn.count(':') >= 2:
Patrick Williamsac13d5f2023-11-24 18:59:46 -0600347 (_, mc, virtualfn) = virtualfn.split(':', 2)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600348
349 fn = virtualfn
350 cls = ""
351 if virtualfn.startswith('virtual:'):
352 elems = virtualfn.split(':')
353 cls = ":".join(elems[1:-1])
354 fn = elems[-1]
355
356 return (fn, cls, mc)
357
358def realfn2virtual(realfn, cls, mc):
359 """
360 Convert a real filename + the associated subclass keyword to a virtual filename
361 """
362 if cls:
363 realfn = "virtual:" + cls + ":" + realfn
364 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -0400365 realfn = "mc:" + mc + ":" + realfn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600366 return realfn
367
368def variant2virtual(realfn, variant):
369 """
Patrick Williamsac13d5f2023-11-24 18:59:46 -0600370 Convert a real filename + a variant to a virtual filename
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600371 """
372 if variant == "":
373 return realfn
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600374 if variant.startswith("mc:") and variant.count(':') >= 2:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600375 elems = variant.split(":")
376 if elems[2]:
Brad Bishop15ae2502019-06-18 21:44:24 -0400377 return "mc:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
378 return "mc:" + elems[1] + ":" + realfn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600379 return "virtual:" + variant + ":" + realfn
380
Andrew Geissler517393d2023-01-13 08:55:19 -0600381#
382# Cooker calls cacheValid on its recipe list, then either calls loadCached
383# from it's main thread or parse from separate processes to generate an up to
384# date cache
385#
386class Cache(object):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500387 """
388 BitBake Cache implementation
389 """
Andrew Geissler5a43b432020-06-13 10:46:56 -0500390 def __init__(self, databuilder, mc, data_hash, caches_array):
Andrew Geissler517393d2023-01-13 08:55:19 -0600391 self.databuilder = databuilder
392 self.data = databuilder.data
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600393
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500394 # Pass caches_array information into Cache Constructor
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500395 # It will be used later for deciding whether we
396 # need extra cache file dump/load support
Andrew Geissler5a43b432020-06-13 10:46:56 -0500397 self.mc = mc
398 self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500399 self.caches_array = caches_array
Andrew Geissler517393d2023-01-13 08:55:19 -0600400 self.cachedir = self.data.getVar("CACHE")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500401 self.clean = set()
402 self.checked = set()
403 self.depends_cache = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500404 self.data_fn = None
405 self.cacheclean = True
406 self.data_hash = data_hash
Andrew Geissler82c905d2020-04-13 13:39:40 -0500407 self.filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500408
409 if self.cachedir in [None, '']:
Andrew Geissler517393d2023-01-13 08:55:19 -0600410 bb.fatal("Please ensure CACHE is set to the cache directory for BitBake to use")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500411
Andrew Geissler5a43b432020-06-13 10:46:56 -0500412 def getCacheFile(self, cachefile):
413 return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash)
414
415 def prepare_cache(self, progress):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500416 loaded = 0
417
418 self.cachefile = self.getCacheFile("bb_cache.dat")
419
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600420 self.logger.debug("Cache dir: %s", self.cachedir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500421 bb.utils.mkdirhier(self.cachedir)
422
423 cache_ok = True
424 if self.caches_array:
425 for cache_class in self.caches_array:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500426 cachefile = self.getCacheFile(cache_class.cachefile)
427 cache_exists = os.path.exists(cachefile)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600428 self.logger.debug2("Checking if %s exists: %r", cachefile, cache_exists)
Andrew Geissler5a43b432020-06-13 10:46:56 -0500429 cache_ok = cache_ok and cache_exists
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600430 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500431 if cache_ok:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500432 loaded = self.load_cachefile(progress)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500433 elif os.path.isfile(self.cachefile):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500434 self.logger.info("Out of date cache found, rebuilding...")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400435 else:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600436 self.logger.debug("Cache file %s not found, building..." % self.cachefile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500437
Brad Bishop96ff1982019-08-19 13:50:42 -0400438 # We don't use the symlink, its just for debugging convinience
Andrew Geissler5a43b432020-06-13 10:46:56 -0500439 if self.mc:
440 symlink = os.path.join(self.cachedir, "bb_cache.dat.%s" % self.mc)
441 else:
442 symlink = os.path.join(self.cachedir, "bb_cache.dat")
443
Brad Bishop96ff1982019-08-19 13:50:42 -0400444 if os.path.exists(symlink):
445 bb.utils.remove(symlink)
446 try:
447 os.symlink(os.path.basename(self.cachefile), symlink)
448 except OSError:
449 pass
450
Andrew Geissler5a43b432020-06-13 10:46:56 -0500451 return loaded
452
453 def cachesize(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500454 cachesize = 0
Andrew Geissler5a43b432020-06-13 10:46:56 -0500455 for cache_class in self.caches_array:
456 cachefile = self.getCacheFile(cache_class.cachefile)
457 try:
458 with open(cachefile, "rb") as cachefile:
459 cachesize += os.fstat(cachefile.fileno()).st_size
460 except FileNotFoundError:
461 pass
462
463 return cachesize
464
465 def load_cachefile(self, progress):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500466 previous_progress = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500467
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500468 for cache_class in self.caches_array:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500469 cachefile = self.getCacheFile(cache_class.cachefile)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600470 self.logger.debug('Loading cache file: %s' % cachefile)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600471 with open(cachefile, "rb") as cachefile:
472 pickled = pickle.Unpickler(cachefile)
473 # Check cache version information
474 try:
475 cache_ver = pickled.load()
476 bitbake_ver = pickled.load()
477 except Exception:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500478 self.logger.info('Invalid cache, rebuilding...')
479 return 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500480
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600481 if cache_ver != __cache_version__:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500482 self.logger.info('Cache version mismatch, rebuilding...')
483 return 0
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600484 elif bitbake_ver != bb.__version__:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500485 self.logger.info('Bitbake version mismatch, rebuilding...')
486 return 0
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600487
488 # Load the rest of the cache file
489 current_progress = 0
490 while cachefile:
491 try:
492 key = pickled.load()
493 value = pickled.load()
494 except Exception:
495 break
496 if not isinstance(key, str):
497 bb.warn("%s from extras cache is not a string?" % key)
498 break
499 if not isinstance(value, RecipeInfoCommon):
500 bb.warn("%s from extras cache is not a RecipeInfoCommon class?" % value)
501 break
502
503 if key in self.depends_cache:
504 self.depends_cache[key].append(value)
505 else:
506 self.depends_cache[key] = [value]
507 # only fire events on even percentage boundaries
508 current_progress = cachefile.tell() + previous_progress
Andrew Geissler5a43b432020-06-13 10:46:56 -0500509 progress(cachefile.tell() + previous_progress)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600510
511 previous_progress += current_progress
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500512
Andrew Geissler5a43b432020-06-13 10:46:56 -0500513 return len(self.depends_cache)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500514
Patrick Williamse760df82023-05-26 11:10:49 -0500515 def parse(self, filename, appends, layername):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500516 """Parse the specified filename, returning the recipe information"""
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600517 self.logger.debug("Parsing %s", filename)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500518 infos = []
Patrick Williamse760df82023-05-26 11:10:49 -0500519 datastores = self.databuilder.parseRecipeVariants(filename, appends, mc=self.mc, layername=layername)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500520 depends = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600521 variants = []
522 # Process the "real" fn last so we can store variants list
523 for variant, data in sorted(datastores.items(),
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500524 key=lambda i: i[0],
525 reverse=True):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600526 virtualfn = variant2virtual(filename, variant)
527 variants.append(variant)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500528 depends = depends + (data.getVar("__depends", False) or [])
529 if depends and not variant:
530 data.setVar("__depends", depends)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600531 if virtualfn == filename:
532 data.setVar("__VARIANTS", " ".join(variants))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500533 info_array = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600534 for cache_class in self.caches_array:
535 info = cache_class(filename, data)
536 info_array.append(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500537 infos.append((virtualfn, info_array))
538
539 return infos
540
Andrew Geissler517393d2023-01-13 08:55:19 -0600541 def loadCached(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500542 """Obtain the recipe information for the specified filename,
Andrew Geissler517393d2023-01-13 08:55:19 -0600543 using cached values.
544 """
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500545
Andrew Geissler517393d2023-01-13 08:55:19 -0600546 infos = []
547 # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
548 info_array = self.depends_cache[filename]
549 for variant in info_array[0].variants:
550 virtualfn = variant2virtual(filename, variant)
551 infos.append((virtualfn, self.depends_cache[virtualfn]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500552
Andrew Geissler517393d2023-01-13 08:55:19 -0600553 return infos
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500554
555 def cacheValid(self, fn, appends):
556 """
557 Is the cache valid for fn?
558 Fast version, no timestamps checked.
559 """
560 if fn not in self.checked:
561 self.cacheValidUpdate(fn, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500562 if fn in self.clean:
563 return True
564 return False
565
566 def cacheValidUpdate(self, fn, appends):
567 """
568 Is the cache valid for fn?
569 Make thorough (slower) checks including timestamps.
570 """
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500571 self.checked.add(fn)
572
573 # File isn't in depends_cache
574 if not fn in self.depends_cache:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600575 self.logger.debug2("%s is not cached", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500576 return False
577
578 mtime = bb.parse.cached_mtime_noerror(fn)
579
580 # Check file still exists
581 if mtime == 0:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600582 self.logger.debug2("%s no longer exists", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500583 self.remove(fn)
584 return False
585
586 info_array = self.depends_cache[fn]
587 # Check the file's timestamp
588 if mtime != info_array[0].timestamp:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600589 self.logger.debug2("%s changed", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500590 self.remove(fn)
591 return False
592
593 # Check dependencies are still valid
594 depends = info_array[0].file_depends
595 if depends:
596 for f, old_mtime in depends:
597 fmtime = bb.parse.cached_mtime_noerror(f)
598 # Check if file still exists
599 if old_mtime != 0 and fmtime == 0:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600600 self.logger.debug2("%s's dependency %s was removed",
Andrew Geissler5a43b432020-06-13 10:46:56 -0500601 fn, f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500602 self.remove(fn)
603 return False
604
605 if (fmtime != old_mtime):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600606 self.logger.debug2("%s's dependency %s changed",
Andrew Geissler5a43b432020-06-13 10:46:56 -0500607 fn, f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500608 self.remove(fn)
609 return False
610
611 if hasattr(info_array[0], 'file_checksums'):
612 for _, fl in info_array[0].file_checksums.items():
Patrick Williamsd7e96312015-09-22 08:09:05 -0500613 fl = fl.strip()
Andrew Geissler82c905d2020-04-13 13:39:40 -0500614 if not fl:
615 continue
616 # Have to be careful about spaces and colons in filenames
617 flist = self.filelist_regex.split(fl)
618 for f in flist:
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500619 if not f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500620 continue
Andrew Geisslerd5838332022-05-27 11:33:10 -0500621 f, exist = f.rsplit(":", 1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500622 if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600623 self.logger.debug2("%s's file checksum list file %s changed",
Andrew Geissler5a43b432020-06-13 10:46:56 -0500624 fn, f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500625 self.remove(fn)
626 return False
627
Andrew Geissler5a43b432020-06-13 10:46:56 -0500628 if tuple(appends) != tuple(info_array[0].appends):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600629 self.logger.debug2("appends for %s changed", fn)
630 self.logger.debug2("%s to %s" % (str(appends), str(info_array[0].appends)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500631 self.remove(fn)
632 return False
633
634 invalid = False
635 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600636 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500637 self.clean.add(virtualfn)
638 if virtualfn not in self.depends_cache:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600639 self.logger.debug2("%s is not cached", virtualfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500640 invalid = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600641 elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600642 self.logger.debug2("Extra caches missing for %s?" % virtualfn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600643 invalid = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500644
645 # If any one of the variants is not present, mark as invalid for all
646 if invalid:
647 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600648 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500649 if virtualfn in self.clean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600650 self.logger.debug2("Removing %s from cache", virtualfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500651 self.clean.remove(virtualfn)
652 if fn in self.clean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600653 self.logger.debug2("Marking %s as not clean", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500654 self.clean.remove(fn)
655 return False
656
657 self.clean.add(fn)
658 return True
659
660 def remove(self, fn):
661 """
662 Remove a fn from the cache
663 Called from the parser in error cases
664 """
665 if fn in self.depends_cache:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600666 self.logger.debug("Removing %s from cache", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500667 del self.depends_cache[fn]
668 if fn in self.clean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600669 self.logger.debug("Marking %s as unclean", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500670 self.clean.remove(fn)
671
672 def sync(self):
673 """
674 Save the cache
675 Called from the parser when complete (or exiting)
676 """
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500677 if self.cacheclean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600678 self.logger.debug2("Cache is clean, not saving.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500679 return
680
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500681 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600682 cache_class_name = cache_class.__name__
Andrew Geissler5a43b432020-06-13 10:46:56 -0500683 cachefile = self.getCacheFile(cache_class.cachefile)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600684 self.logger.debug2("Writing %s", cachefile)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600685 with open(cachefile, "wb") as f:
686 p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
687 p.dump(__cache_version__)
688 p.dump(bb.__version__)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500689
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600690 for key, info_array in self.depends_cache.items():
691 for info in info_array:
692 if isinstance(info, RecipeInfoCommon) and info.__class__.__name__ == cache_class_name:
693 p.dump(key)
694 p.dump(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500695
696 del self.depends_cache
Andrew Geissler517393d2023-01-13 08:55:19 -0600697 SiggenRecipeInfo.reset()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500698
699 @staticmethod
700 def mtime(cachefile):
701 return bb.parse.cached_mtime_noerror(cachefile)
702
703 def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500704 if self.mc is not None:
705 (fn, cls, mc) = virtualfn2realfn(filename)
706 if mc:
Andrew Geisslerd25ed322020-06-27 00:28:28 -0500707 self.logger.error("Unexpected multiconfig %s", filename)
Andrew Geissler5a43b432020-06-13 10:46:56 -0500708 return
709
710 vfn = realfn2virtual(fn, cls, self.mc)
711 else:
712 vfn = filename
713
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500714 if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500715 cacheData.add_from_recipeinfo(vfn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500716
717 if watcher:
718 watcher(info_array[0].file_depends)
719
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500720 if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
721 if parsed:
722 self.cacheclean = False
723 self.depends_cache[filename] = info_array
724
Andrew Geissler5a43b432020-06-13 10:46:56 -0500725class MulticonfigCache(Mapping):
726 def __init__(self, databuilder, data_hash, caches_array):
727 def progress(p):
728 nonlocal current_progress
729 nonlocal previous_progress
730 nonlocal previous_percent
731 nonlocal cachesize
732
733 current_progress = previous_progress + p
734
735 if current_progress > cachesize:
736 # we might have calculated incorrect total size because a file
737 # might've been written out just after we checked its size
738 cachesize = current_progress
739 current_percent = 100 * current_progress / cachesize
740 if current_percent > previous_percent:
741 previous_percent = current_percent
742 bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
743 databuilder.data)
744
745
746 cachesize = 0
747 current_progress = 0
748 previous_progress = 0
749 previous_percent = 0
750 self.__caches = {}
751
752 for mc, mcdata in databuilder.mcdata.items():
753 self.__caches[mc] = Cache(databuilder, mc, data_hash, caches_array)
754
755 cachesize += self.__caches[mc].cachesize()
756
757 bb.event.fire(bb.event.CacheLoadStarted(cachesize), databuilder.data)
758 loaded = 0
759
760 for c in self.__caches.values():
Andrew Geissler517393d2023-01-13 08:55:19 -0600761 SiggenRecipeInfo.reset()
Andrew Geissler5a43b432020-06-13 10:46:56 -0500762 loaded += c.prepare_cache(progress)
763 previous_progress = current_progress
764
765 # Note: depends cache number is corresponding to the parsing file numbers.
766 # The same file has several caches, still regarded as one item in the cache
767 bb.event.fire(bb.event.CacheLoadCompleted(cachesize, loaded), databuilder.data)
768
769 def __len__(self):
770 return len(self.__caches)
771
772 def __getitem__(self, key):
773 return self.__caches[key]
774
775 def __contains__(self, key):
776 return key in self.__caches
777
778 def __iter__(self):
779 for k in self.__caches:
780 yield k
781
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500782def init(cooker):
783 """
784 The Objective: Cache the minimum amount of data possible yet get to the
785 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
786
787 To do this, we intercept getVar calls and only cache the variables we see
788 being accessed. We rely on the cache getVar calls being made for all
789 variables bitbake might need to use to reach this stage. For each cached
790 file we need to track:
791
792 * Its mtime
793 * The mtimes of all its dependencies
794 * Whether it caused a parse.SkipRecipe exception
795
796 Files causing parsing errors are evicted from the cache.
797
798 """
799 return Cache(cooker.configuration.data, cooker.configuration.data_hash)
800
801
802class CacheData(object):
803 """
804 The data structures we compile from the cached data
805 """
806
807 def __init__(self, caches_array):
808 self.caches_array = caches_array
809 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600810 if not issubclass(cache_class, RecipeInfoCommon):
811 bb.error("Extra cache data class %s should subclass RecipeInfoCommon class" % cache_class)
812 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500813
814 # Direct cache variables
815 self.task_queues = {}
816 self.preferred = {}
817 self.tasks = {}
818 # Indirect Cache variables (set elsewhere)
819 self.ignored_dependencies = []
820 self.world_target = set()
821 self.bbfile_priority = {}
822
823 def add_from_recipeinfo(self, fn, info_array):
824 for info in info_array:
825 info.add_cacheData(self, fn)
826
827class MultiProcessCache(object):
828 """
829 BitBake multi-process cache implementation
830
831 Used by the codeparser & file checksum caches
832 """
833
834 def __init__(self):
835 self.cachefile = None
836 self.cachedata = self.create_cachedata()
837 self.cachedata_extras = self.create_cachedata()
838
Andrew Geisslerc5535c92023-01-27 16:10:19 -0600839 def init_cache(self, cachedir, cache_file_name=None):
840 if not cachedir:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500841 return
Andrew Geisslerc5535c92023-01-27 16:10:19 -0600842
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500843 bb.utils.mkdirhier(cachedir)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500844 self.cachefile = os.path.join(cachedir,
845 cache_file_name or self.__class__.cache_file_name)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600846 logger.debug("Using cache in '%s'", self.cachefile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500847
848 glf = bb.utils.lockfile(self.cachefile + ".lock")
849
850 try:
851 with open(self.cachefile, "rb") as f:
852 p = pickle.Unpickler(f)
853 data, version = p.load()
854 except:
855 bb.utils.unlockfile(glf)
856 return
857
858 bb.utils.unlockfile(glf)
859
860 if version != self.__class__.CACHE_VERSION:
861 return
862
863 self.cachedata = data
864
865 def create_cachedata(self):
866 data = [{}]
867 return data
868
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500869 def save_extras(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500870 if not self.cachefile:
871 return
872
Andrew Geisslerc5535c92023-01-27 16:10:19 -0600873 have_data = any(self.cachedata_extras)
874 if not have_data:
875 return
876
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500877 glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
878
879 i = os.getpid()
880 lf = None
881 while not lf:
882 lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
883 if not lf or os.path.exists(self.cachefile + "-" + str(i)):
884 if lf:
885 bb.utils.unlockfile(lf)
886 lf = None
887 i = i + 1
888 continue
889
890 with open(self.cachefile + "-" + str(i), "wb") as f:
891 p = pickle.Pickler(f, -1)
892 p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
893
894 bb.utils.unlockfile(lf)
895 bb.utils.unlockfile(glf)
896
897 def merge_data(self, source, dest):
898 for j in range(0,len(dest)):
899 for h in source[j]:
900 if h not in dest[j]:
901 dest[j][h] = source[j][h]
902
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500903 def save_merge(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500904 if not self.cachefile:
905 return
906
907 glf = bb.utils.lockfile(self.cachefile + ".lock")
908
909 data = self.cachedata
910
Andrew Geisslerc5535c92023-01-27 16:10:19 -0600911 have_data = False
912
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500913 for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
914 f = os.path.join(os.path.dirname(self.cachefile), f)
915 try:
916 with open(f, "rb") as fd:
917 p = pickle.Unpickler(fd)
918 extradata, version = p.load()
919 except (IOError, EOFError):
920 os.unlink(f)
921 continue
922
923 if version != self.__class__.CACHE_VERSION:
924 os.unlink(f)
925 continue
926
Andrew Geisslerc5535c92023-01-27 16:10:19 -0600927 have_data = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500928 self.merge_data(extradata, data)
929 os.unlink(f)
930
Andrew Geisslerc5535c92023-01-27 16:10:19 -0600931 if have_data:
932 with open(self.cachefile, "wb") as f:
933 p = pickle.Pickler(f, -1)
934 p.dump([data, self.__class__.CACHE_VERSION])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500935
936 bb.utils.unlockfile(glf)
Brad Bishop08902b02019-08-20 09:16:51 -0400937
938
939class SimpleCache(object):
940 """
941 BitBake multi-process cache implementation
942
943 Used by the codeparser & file checksum caches
944 """
945
946 def __init__(self, version):
947 self.cachefile = None
948 self.cachedata = None
949 self.cacheversion = version
950
951 def init_cache(self, d, cache_file_name=None, defaultdata=None):
952 cachedir = (d.getVar("PERSISTENT_DIR") or
953 d.getVar("CACHE"))
954 if not cachedir:
955 return defaultdata
956
957 bb.utils.mkdirhier(cachedir)
958 self.cachefile = os.path.join(cachedir,
959 cache_file_name or self.__class__.cache_file_name)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600960 logger.debug("Using cache in '%s'", self.cachefile)
Brad Bishop08902b02019-08-20 09:16:51 -0400961
962 glf = bb.utils.lockfile(self.cachefile + ".lock")
963
964 try:
965 with open(self.cachefile, "rb") as f:
966 p = pickle.Unpickler(f)
967 data, version = p.load()
968 except:
969 bb.utils.unlockfile(glf)
970 return defaultdata
971
972 bb.utils.unlockfile(glf)
973
974 if version != self.cacheversion:
975 return defaultdata
976
977 return data
978
979 def save(self, data):
980 if not self.cachefile:
981 return
982
983 glf = bb.utils.lockfile(self.cachefile + ".lock")
984
985 with open(self.cachefile, "wb") as f:
986 p = pickle.Pickler(f, -1)
987 p.dump([data, self.cacheversion])
988
989 bb.utils.unlockfile(glf)
Andrew Geissler78b72792022-06-14 06:47:25 -0500990
991 def copyfile(self, target):
992 if not self.cachefile:
993 return
994
995 glf = bb.utils.lockfile(self.cachefile + ".lock")
996 shutil.copy(self.cachefile, target)
997 bb.utils.unlockfile(glf)