blob: 5ea41c5de0b4578ea87a4bbb561b581adb3845b4 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# BitBake Cache implementation
3#
4# Caching of bitbake variables before task execution
5
6# Copyright (C) 2006 Richard Purdie
7# Copyright (C) 2012 Intel Corporation
8
9# but small sections based on code from bin/bitbake:
10# Copyright (C) 2003, 2004 Chris Larson
11# Copyright (C) 2003, 2004 Phil Blundell
12# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
13# Copyright (C) 2005 Holger Hans Peter Freyther
14# Copyright (C) 2005 ROAD GmbH
15#
Brad Bishopc342db32019-05-15 21:57:59 -040016# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050017#
Patrick Williamsc124f4f2015-09-15 14:41:29 -050018
Patrick Williamsc124f4f2015-09-15 14:41:29 -050019import os
20import logging
Patrick Williamsc0f7c042017-02-23 20:41:17 -060021import pickle
Andrew Geissler5199d832021-09-24 16:47:35 -050022from collections import defaultdict
23from collections.abc import Mapping
Patrick Williamsc124f4f2015-09-15 14:41:29 -050024import bb.utils
Andrew Geissler5a43b432020-06-13 10:46:56 -050025from bb import PrefixLoggerAdapter
Andrew Geissler82c905d2020-04-13 13:39:40 -050026import re
Andrew Geissler78b72792022-06-14 06:47:25 -050027import shutil
Patrick Williamsc124f4f2015-09-15 14:41:29 -050028
29logger = logging.getLogger("BitBake.Cache")
30
Andrew Geissler517393d2023-01-13 08:55:19 -060031__cache_version__ = "155"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050032
Andrew Geissler5a43b432020-06-13 10:46:56 -050033def getCacheFile(path, filename, mc, data_hash):
34 mcspec = ''
35 if mc:
36 mcspec = ".%s" % mc
37 return os.path.join(path, filename + mcspec + "." + data_hash)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050038
39# RecipeInfoCommon defines common data retrieving methods
40# from meta data for caches. CoreRecipeInfo as well as other
41# Extra RecipeInfo needs to inherit this class
42class RecipeInfoCommon(object):
43
44 @classmethod
45 def listvar(cls, var, metadata):
46 return cls.getvar(var, metadata).split()
47
48 @classmethod
49 def intvar(cls, var, metadata):
50 return int(cls.getvar(var, metadata) or 0)
51
52 @classmethod
53 def depvar(cls, var, metadata):
54 return bb.utils.explode_deps(cls.getvar(var, metadata))
55
56 @classmethod
57 def pkgvar(cls, var, packages, metadata):
Patrick Williams213cb262021-08-07 19:21:33 -050058 return dict((pkg, cls.depvar("%s:%s" % (var, pkg), metadata))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050059 for pkg in packages)
60
61 @classmethod
62 def taskvar(cls, var, tasks, metadata):
Patrick Williams213cb262021-08-07 19:21:33 -050063 return dict((task, cls.getvar("%s:task-%s" % (var, task), metadata))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050064 for task in tasks)
65
66 @classmethod
67 def flaglist(cls, flag, varlist, metadata, squash=False):
Brad Bishop6e60e8b2018-02-01 10:27:11 -050068 out_dict = dict((var, metadata.getVarFlag(var, flag))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050069 for var in varlist)
70 if squash:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060071 return dict((k,v) for (k,v) in out_dict.items() if v)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050072 else:
73 return out_dict
74
75 @classmethod
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050076 def getvar(cls, var, metadata, expand = True):
77 return metadata.getVar(var, expand) or ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -050078
79
80class CoreRecipeInfo(RecipeInfoCommon):
81 __slots__ = ()
82
Brad Bishopd7bf8c12018-02-25 22:55:05 -050083 cachefile = "bb_cache.dat"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050084
Brad Bishopd7bf8c12018-02-25 22:55:05 -050085 def __init__(self, filename, metadata):
Patrick Williamsc124f4f2015-09-15 14:41:29 -050086 self.file_depends = metadata.getVar('__depends', False)
87 self.timestamp = bb.parse.cached_mtime(filename)
88 self.variants = self.listvar('__VARIANTS', metadata) + ['']
89 self.appends = self.listvar('__BBAPPEND', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050090 self.nocache = self.getvar('BB_DONT_CACHE', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050091
Brad Bishop96ff1982019-08-19 13:50:42 -040092 self.provides = self.depvar('PROVIDES', metadata)
93 self.rprovides = self.depvar('RPROVIDES', metadata)
94 self.pn = self.getvar('PN', metadata) or bb.parse.vars_from_file(filename,metadata)[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -050095 self.packages = self.listvar('PACKAGES', metadata)
Brad Bishopd7bf8c12018-02-25 22:55:05 -050096 if not self.packages:
Patrick Williamsc124f4f2015-09-15 14:41:29 -050097 self.packages.append(self.pn)
Brad Bishop96ff1982019-08-19 13:50:42 -040098 self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
Andrew Geisslerd1e89492021-02-12 15:35:20 -060099 self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
Brad Bishop96ff1982019-08-19 13:50:42 -0400100
101 self.skipreason = self.getvar('__SKIPPED', metadata)
102 if self.skipreason:
103 self.skipped = True
104 return
105
106 self.tasks = metadata.getVar('__BBTASKS', False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500107
Andrew Geissler517393d2023-01-13 08:55:19 -0600108 self.basetaskhashes = metadata.getVar('__siggen_basehashes', False) or {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500109 self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
110
111 self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
112
113 self.skipped = False
114 self.pe = self.getvar('PE', metadata)
115 self.pv = self.getvar('PV', metadata)
116 self.pr = self.getvar('PR', metadata)
117 self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
118 self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
119 self.stamp = self.getvar('STAMP', metadata)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500120 self.stampclean = self.getvar('STAMPCLEAN', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500121 self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
122 self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500123 self.depends = self.depvar('DEPENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500124 self.rdepends = self.depvar('RDEPENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500125 self.rrecommends = self.depvar('RRECOMMENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500126 self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
127 self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500128 self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500129 self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
130 self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500131 self.fakerootlogs = self.getvar('FAKEROOTLOGS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500132 self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500133 self.extradepsfunc = self.getvar('calculate_extra_depends', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500134
135 @classmethod
136 def init_cacheData(cls, cachedata):
137 # CacheData in Core RecipeInfo Class
138 cachedata.task_deps = {}
139 cachedata.pkg_fn = {}
140 cachedata.pkg_pn = defaultdict(list)
141 cachedata.pkg_pepvpr = {}
142 cachedata.pkg_dp = {}
143
144 cachedata.stamp = {}
145 cachedata.stampclean = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500146 cachedata.stamp_extrainfo = {}
147 cachedata.file_checksums = {}
148 cachedata.fn_provides = {}
149 cachedata.pn_provides = defaultdict(list)
150 cachedata.all_depends = []
151
152 cachedata.deps = defaultdict(list)
153 cachedata.packages = defaultdict(list)
154 cachedata.providers = defaultdict(list)
155 cachedata.rproviders = defaultdict(list)
156 cachedata.packages_dynamic = defaultdict(list)
157
158 cachedata.rundeps = defaultdict(lambda: defaultdict(list))
159 cachedata.runrecs = defaultdict(lambda: defaultdict(list))
160 cachedata.possible_world = []
161 cachedata.universe_target = []
162 cachedata.hashfn = {}
163
164 cachedata.basetaskhash = {}
165 cachedata.inherits = {}
166 cachedata.fakerootenv = {}
167 cachedata.fakerootnoenv = {}
168 cachedata.fakerootdirs = {}
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500169 cachedata.fakerootlogs = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500170 cachedata.extradepsfunc = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500171
172 def add_cacheData(self, cachedata, fn):
173 cachedata.task_deps[fn] = self.task_deps
174 cachedata.pkg_fn[fn] = self.pn
175 cachedata.pkg_pn[self.pn].append(fn)
176 cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
177 cachedata.pkg_dp[fn] = self.defaultpref
178 cachedata.stamp[fn] = self.stamp
179 cachedata.stampclean[fn] = self.stampclean
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500180 cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
181 cachedata.file_checksums[fn] = self.file_checksums
182
183 provides = [self.pn]
184 for provide in self.provides:
185 if provide not in provides:
186 provides.append(provide)
187 cachedata.fn_provides[fn] = provides
188
189 for provide in provides:
190 cachedata.providers[provide].append(fn)
191 if provide not in cachedata.pn_provides[self.pn]:
192 cachedata.pn_provides[self.pn].append(provide)
193
194 for dep in self.depends:
195 if dep not in cachedata.deps[fn]:
196 cachedata.deps[fn].append(dep)
197 if dep not in cachedata.all_depends:
198 cachedata.all_depends.append(dep)
199
200 rprovides = self.rprovides
201 for package in self.packages:
202 cachedata.packages[package].append(fn)
203 rprovides += self.rprovides_pkg[package]
204
205 for rprovide in rprovides:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500206 if fn not in cachedata.rproviders[rprovide]:
207 cachedata.rproviders[rprovide].append(fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500208
209 for package in self.packages_dynamic:
210 cachedata.packages_dynamic[package].append(fn)
211
212 # Build hash of runtime depends and recommends
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500213 for package in self.packages:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500214 cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
215 cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
216
217 # Collect files we may need for possible world-dep
218 # calculations
Andrew Geisslerfc113ea2023-03-31 09:59:46 -0500219 if not bb.utils.to_boolean(self.not_world):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500220 cachedata.possible_world.append(fn)
Andrew Geissler82c905d2020-04-13 13:39:40 -0500221 #else:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600222 # logger.debug2("EXCLUDE FROM WORLD: %s", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500223
224 # create a collection of all targets for sanity checking
225 # tasks, such as upstream versions, license, and tools for
226 # task and image creation.
227 cachedata.universe_target.append(self.pn)
228
229 cachedata.hashfn[fn] = self.hashfilename
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600230 for task, taskhash in self.basetaskhashes.items():
Brad Bishop08902b02019-08-20 09:16:51 -0400231 identifier = '%s:%s' % (fn, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500232 cachedata.basetaskhash[identifier] = taskhash
233
234 cachedata.inherits[fn] = self.inherits
235 cachedata.fakerootenv[fn] = self.fakerootenv
236 cachedata.fakerootnoenv[fn] = self.fakerootnoenv
237 cachedata.fakerootdirs[fn] = self.fakerootdirs
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500238 cachedata.fakerootlogs[fn] = self.fakerootlogs
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500239 cachedata.extradepsfunc[fn] = self.extradepsfunc
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500240
Andrew Geissler517393d2023-01-13 08:55:19 -0600241
242class SiggenRecipeInfo(RecipeInfoCommon):
243 __slots__ = ()
244
245 classname = "SiggenRecipeInfo"
246 cachefile = "bb_cache_" + classname +".dat"
247 # we don't want to show this information in graph files so don't set cachefields
248 #cachefields = []
249
250 def __init__(self, filename, metadata):
251 self.siggen_gendeps = metadata.getVar("__siggen_gendeps", False)
252 self.siggen_varvals = metadata.getVar("__siggen_varvals", False)
253 self.siggen_taskdeps = metadata.getVar("__siggen_taskdeps", False)
254
255 @classmethod
256 def init_cacheData(cls, cachedata):
257 cachedata.siggen_taskdeps = {}
258 cachedata.siggen_gendeps = {}
259 cachedata.siggen_varvals = {}
260
261 def add_cacheData(self, cachedata, fn):
262 cachedata.siggen_gendeps[fn] = self.siggen_gendeps
263 cachedata.siggen_varvals[fn] = self.siggen_varvals
264 cachedata.siggen_taskdeps[fn] = self.siggen_taskdeps
265
266 # The siggen variable data is large and impacts:
267 # - bitbake's overall memory usage
268 # - the amount of data sent over IPC between parsing processes and the server
269 # - the size of the cache files on disk
270 # - the size of "sigdata" hash information files on disk
271 # The data consists of strings (some large) or frozenset lists of variables
272 # As such, we a) deplicate the data here and b) pass references to the object at second
273 # access (e.g. over IPC or saving into pickle).
274
275 store = {}
276 save_map = {}
277 save_count = 1
278 restore_map = {}
279 restore_count = {}
280
281 @classmethod
282 def reset(cls):
283 # Needs to be called before starting new streamed data in a given process
284 # (e.g. writing out the cache again)
285 cls.save_map = {}
286 cls.save_count = 1
287 cls.restore_map = {}
288
289 @classmethod
290 def _save(cls, deps):
291 ret = []
292 if not deps:
293 return deps
294 for dep in deps:
295 fs = deps[dep]
296 if fs is None:
297 ret.append((dep, None, None))
298 elif fs in cls.save_map:
299 ret.append((dep, None, cls.save_map[fs]))
300 else:
301 cls.save_map[fs] = cls.save_count
302 ret.append((dep, fs, cls.save_count))
303 cls.save_count = cls.save_count + 1
304 return ret
305
306 @classmethod
307 def _restore(cls, deps, pid):
308 ret = {}
309 if not deps:
310 return deps
311 if pid not in cls.restore_map:
312 cls.restore_map[pid] = {}
313 map = cls.restore_map[pid]
314 for dep, fs, mapnum in deps:
315 if fs is None and mapnum is None:
316 ret[dep] = None
317 elif fs is None:
318 ret[dep] = map[mapnum]
319 else:
320 try:
321 fs = cls.store[fs]
322 except KeyError:
323 cls.store[fs] = fs
324 map[mapnum] = fs
325 ret[dep] = fs
326 return ret
327
328 def __getstate__(self):
329 ret = {}
330 for key in ["siggen_gendeps", "siggen_taskdeps", "siggen_varvals"]:
331 ret[key] = self._save(self.__dict__[key])
332 ret['pid'] = os.getpid()
333 return ret
334
335 def __setstate__(self, state):
336 pid = state['pid']
337 for key in ["siggen_gendeps", "siggen_taskdeps", "siggen_varvals"]:
338 setattr(self, key, self._restore(state[key], pid))
339
340
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600341def virtualfn2realfn(virtualfn):
342 """
343 Convert a virtual file name to a real one + the associated subclass keyword
344 """
345 mc = ""
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600346 if virtualfn.startswith('mc:') and virtualfn.count(':') >= 2:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600347 elems = virtualfn.split(':')
348 mc = elems[1]
349 virtualfn = ":".join(elems[2:])
350
351 fn = virtualfn
352 cls = ""
353 if virtualfn.startswith('virtual:'):
354 elems = virtualfn.split(':')
355 cls = ":".join(elems[1:-1])
356 fn = elems[-1]
357
358 return (fn, cls, mc)
359
360def realfn2virtual(realfn, cls, mc):
361 """
362 Convert a real filename + the associated subclass keyword to a virtual filename
363 """
364 if cls:
365 realfn = "virtual:" + cls + ":" + realfn
366 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -0400367 realfn = "mc:" + mc + ":" + realfn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600368 return realfn
369
370def variant2virtual(realfn, variant):
371 """
372 Convert a real filename + the associated subclass keyword to a virtual filename
373 """
374 if variant == "":
375 return realfn
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600376 if variant.startswith("mc:") and variant.count(':') >= 2:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600377 elems = variant.split(":")
378 if elems[2]:
Brad Bishop15ae2502019-06-18 21:44:24 -0400379 return "mc:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
380 return "mc:" + elems[1] + ":" + realfn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600381 return "virtual:" + variant + ":" + realfn
382
Andrew Geissler517393d2023-01-13 08:55:19 -0600383#
384# Cooker calls cacheValid on its recipe list, then either calls loadCached
385# from it's main thread or parse from separate processes to generate an up to
386# date cache
387#
388class Cache(object):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500389 """
390 BitBake Cache implementation
391 """
Andrew Geissler5a43b432020-06-13 10:46:56 -0500392 def __init__(self, databuilder, mc, data_hash, caches_array):
Andrew Geissler517393d2023-01-13 08:55:19 -0600393 self.databuilder = databuilder
394 self.data = databuilder.data
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600395
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500396 # Pass caches_array information into Cache Constructor
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500397 # It will be used later for deciding whether we
398 # need extra cache file dump/load support
Andrew Geissler5a43b432020-06-13 10:46:56 -0500399 self.mc = mc
400 self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500401 self.caches_array = caches_array
Andrew Geissler517393d2023-01-13 08:55:19 -0600402 self.cachedir = self.data.getVar("CACHE")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500403 self.clean = set()
404 self.checked = set()
405 self.depends_cache = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500406 self.data_fn = None
407 self.cacheclean = True
408 self.data_hash = data_hash
Andrew Geissler82c905d2020-04-13 13:39:40 -0500409 self.filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500410
411 if self.cachedir in [None, '']:
Andrew Geissler517393d2023-01-13 08:55:19 -0600412 bb.fatal("Please ensure CACHE is set to the cache directory for BitBake to use")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500413
Andrew Geissler5a43b432020-06-13 10:46:56 -0500414 def getCacheFile(self, cachefile):
415 return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash)
416
417 def prepare_cache(self, progress):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500418 loaded = 0
419
420 self.cachefile = self.getCacheFile("bb_cache.dat")
421
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600422 self.logger.debug("Cache dir: %s", self.cachedir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500423 bb.utils.mkdirhier(self.cachedir)
424
425 cache_ok = True
426 if self.caches_array:
427 for cache_class in self.caches_array:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500428 cachefile = self.getCacheFile(cache_class.cachefile)
429 cache_exists = os.path.exists(cachefile)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600430 self.logger.debug2("Checking if %s exists: %r", cachefile, cache_exists)
Andrew Geissler5a43b432020-06-13 10:46:56 -0500431 cache_ok = cache_ok and cache_exists
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600432 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500433 if cache_ok:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500434 loaded = self.load_cachefile(progress)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500435 elif os.path.isfile(self.cachefile):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500436 self.logger.info("Out of date cache found, rebuilding...")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400437 else:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600438 self.logger.debug("Cache file %s not found, building..." % self.cachefile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500439
Brad Bishop96ff1982019-08-19 13:50:42 -0400440 # We don't use the symlink, its just for debugging convinience
Andrew Geissler5a43b432020-06-13 10:46:56 -0500441 if self.mc:
442 symlink = os.path.join(self.cachedir, "bb_cache.dat.%s" % self.mc)
443 else:
444 symlink = os.path.join(self.cachedir, "bb_cache.dat")
445
Brad Bishop96ff1982019-08-19 13:50:42 -0400446 if os.path.exists(symlink):
447 bb.utils.remove(symlink)
448 try:
449 os.symlink(os.path.basename(self.cachefile), symlink)
450 except OSError:
451 pass
452
Andrew Geissler5a43b432020-06-13 10:46:56 -0500453 return loaded
454
455 def cachesize(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500456 cachesize = 0
Andrew Geissler5a43b432020-06-13 10:46:56 -0500457 for cache_class in self.caches_array:
458 cachefile = self.getCacheFile(cache_class.cachefile)
459 try:
460 with open(cachefile, "rb") as cachefile:
461 cachesize += os.fstat(cachefile.fileno()).st_size
462 except FileNotFoundError:
463 pass
464
465 return cachesize
466
467 def load_cachefile(self, progress):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500468 previous_progress = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500469
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500470 for cache_class in self.caches_array:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500471 cachefile = self.getCacheFile(cache_class.cachefile)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600472 self.logger.debug('Loading cache file: %s' % cachefile)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600473 with open(cachefile, "rb") as cachefile:
474 pickled = pickle.Unpickler(cachefile)
475 # Check cache version information
476 try:
477 cache_ver = pickled.load()
478 bitbake_ver = pickled.load()
479 except Exception:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500480 self.logger.info('Invalid cache, rebuilding...')
481 return 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500482
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600483 if cache_ver != __cache_version__:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500484 self.logger.info('Cache version mismatch, rebuilding...')
485 return 0
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600486 elif bitbake_ver != bb.__version__:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500487 self.logger.info('Bitbake version mismatch, rebuilding...')
488 return 0
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600489
490 # Load the rest of the cache file
491 current_progress = 0
492 while cachefile:
493 try:
494 key = pickled.load()
495 value = pickled.load()
496 except Exception:
497 break
498 if not isinstance(key, str):
499 bb.warn("%s from extras cache is not a string?" % key)
500 break
501 if not isinstance(value, RecipeInfoCommon):
502 bb.warn("%s from extras cache is not a RecipeInfoCommon class?" % value)
503 break
504
505 if key in self.depends_cache:
506 self.depends_cache[key].append(value)
507 else:
508 self.depends_cache[key] = [value]
509 # only fire events on even percentage boundaries
510 current_progress = cachefile.tell() + previous_progress
Andrew Geissler5a43b432020-06-13 10:46:56 -0500511 progress(cachefile.tell() + previous_progress)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600512
513 previous_progress += current_progress
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500514
Andrew Geissler5a43b432020-06-13 10:46:56 -0500515 return len(self.depends_cache)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500516
Patrick Williamse760df82023-05-26 11:10:49 -0500517 def parse(self, filename, appends, layername):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500518 """Parse the specified filename, returning the recipe information"""
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600519 self.logger.debug("Parsing %s", filename)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500520 infos = []
Patrick Williamse760df82023-05-26 11:10:49 -0500521 datastores = self.databuilder.parseRecipeVariants(filename, appends, mc=self.mc, layername=layername)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500522 depends = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600523 variants = []
524 # Process the "real" fn last so we can store variants list
525 for variant, data in sorted(datastores.items(),
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500526 key=lambda i: i[0],
527 reverse=True):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600528 virtualfn = variant2virtual(filename, variant)
529 variants.append(variant)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500530 depends = depends + (data.getVar("__depends", False) or [])
531 if depends and not variant:
532 data.setVar("__depends", depends)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600533 if virtualfn == filename:
534 data.setVar("__VARIANTS", " ".join(variants))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500535 info_array = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600536 for cache_class in self.caches_array:
537 info = cache_class(filename, data)
538 info_array.append(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500539 infos.append((virtualfn, info_array))
540
541 return infos
542
Andrew Geissler517393d2023-01-13 08:55:19 -0600543 def loadCached(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500544 """Obtain the recipe information for the specified filename,
Andrew Geissler517393d2023-01-13 08:55:19 -0600545 using cached values.
546 """
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500547
Andrew Geissler517393d2023-01-13 08:55:19 -0600548 infos = []
549 # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
550 info_array = self.depends_cache[filename]
551 for variant in info_array[0].variants:
552 virtualfn = variant2virtual(filename, variant)
553 infos.append((virtualfn, self.depends_cache[virtualfn]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500554
Andrew Geissler517393d2023-01-13 08:55:19 -0600555 return infos
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500556
557 def cacheValid(self, fn, appends):
558 """
559 Is the cache valid for fn?
560 Fast version, no timestamps checked.
561 """
562 if fn not in self.checked:
563 self.cacheValidUpdate(fn, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500564 if fn in self.clean:
565 return True
566 return False
567
568 def cacheValidUpdate(self, fn, appends):
569 """
570 Is the cache valid for fn?
571 Make thorough (slower) checks including timestamps.
572 """
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500573 self.checked.add(fn)
574
575 # File isn't in depends_cache
576 if not fn in self.depends_cache:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600577 self.logger.debug2("%s is not cached", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500578 return False
579
580 mtime = bb.parse.cached_mtime_noerror(fn)
581
582 # Check file still exists
583 if mtime == 0:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600584 self.logger.debug2("%s no longer exists", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500585 self.remove(fn)
586 return False
587
588 info_array = self.depends_cache[fn]
589 # Check the file's timestamp
590 if mtime != info_array[0].timestamp:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600591 self.logger.debug2("%s changed", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500592 self.remove(fn)
593 return False
594
595 # Check dependencies are still valid
596 depends = info_array[0].file_depends
597 if depends:
598 for f, old_mtime in depends:
599 fmtime = bb.parse.cached_mtime_noerror(f)
600 # Check if file still exists
601 if old_mtime != 0 and fmtime == 0:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600602 self.logger.debug2("%s's dependency %s was removed",
Andrew Geissler5a43b432020-06-13 10:46:56 -0500603 fn, f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500604 self.remove(fn)
605 return False
606
607 if (fmtime != old_mtime):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600608 self.logger.debug2("%s's dependency %s changed",
Andrew Geissler5a43b432020-06-13 10:46:56 -0500609 fn, f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500610 self.remove(fn)
611 return False
612
613 if hasattr(info_array[0], 'file_checksums'):
614 for _, fl in info_array[0].file_checksums.items():
Patrick Williamsd7e96312015-09-22 08:09:05 -0500615 fl = fl.strip()
Andrew Geissler82c905d2020-04-13 13:39:40 -0500616 if not fl:
617 continue
618 # Have to be careful about spaces and colons in filenames
619 flist = self.filelist_regex.split(fl)
620 for f in flist:
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500621 if not f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500622 continue
Andrew Geisslerd5838332022-05-27 11:33:10 -0500623 f, exist = f.rsplit(":", 1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500624 if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600625 self.logger.debug2("%s's file checksum list file %s changed",
Andrew Geissler5a43b432020-06-13 10:46:56 -0500626 fn, f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500627 self.remove(fn)
628 return False
629
Andrew Geissler5a43b432020-06-13 10:46:56 -0500630 if tuple(appends) != tuple(info_array[0].appends):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600631 self.logger.debug2("appends for %s changed", fn)
632 self.logger.debug2("%s to %s" % (str(appends), str(info_array[0].appends)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500633 self.remove(fn)
634 return False
635
636 invalid = False
637 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600638 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500639 self.clean.add(virtualfn)
640 if virtualfn not in self.depends_cache:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600641 self.logger.debug2("%s is not cached", virtualfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500642 invalid = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600643 elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600644 self.logger.debug2("Extra caches missing for %s?" % virtualfn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600645 invalid = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500646
647 # If any one of the variants is not present, mark as invalid for all
648 if invalid:
649 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600650 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500651 if virtualfn in self.clean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600652 self.logger.debug2("Removing %s from cache", virtualfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500653 self.clean.remove(virtualfn)
654 if fn in self.clean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600655 self.logger.debug2("Marking %s as not clean", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500656 self.clean.remove(fn)
657 return False
658
659 self.clean.add(fn)
660 return True
661
662 def remove(self, fn):
663 """
664 Remove a fn from the cache
665 Called from the parser in error cases
666 """
667 if fn in self.depends_cache:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600668 self.logger.debug("Removing %s from cache", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500669 del self.depends_cache[fn]
670 if fn in self.clean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600671 self.logger.debug("Marking %s as unclean", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500672 self.clean.remove(fn)
673
674 def sync(self):
675 """
676 Save the cache
677 Called from the parser when complete (or exiting)
678 """
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500679 if self.cacheclean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600680 self.logger.debug2("Cache is clean, not saving.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500681 return
682
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500683 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600684 cache_class_name = cache_class.__name__
Andrew Geissler5a43b432020-06-13 10:46:56 -0500685 cachefile = self.getCacheFile(cache_class.cachefile)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600686 self.logger.debug2("Writing %s", cachefile)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600687 with open(cachefile, "wb") as f:
688 p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
689 p.dump(__cache_version__)
690 p.dump(bb.__version__)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500691
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600692 for key, info_array in self.depends_cache.items():
693 for info in info_array:
694 if isinstance(info, RecipeInfoCommon) and info.__class__.__name__ == cache_class_name:
695 p.dump(key)
696 p.dump(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500697
698 del self.depends_cache
Andrew Geissler517393d2023-01-13 08:55:19 -0600699 SiggenRecipeInfo.reset()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500700
701 @staticmethod
702 def mtime(cachefile):
703 return bb.parse.cached_mtime_noerror(cachefile)
704
705 def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500706 if self.mc is not None:
707 (fn, cls, mc) = virtualfn2realfn(filename)
708 if mc:
Andrew Geisslerd25ed322020-06-27 00:28:28 -0500709 self.logger.error("Unexpected multiconfig %s", filename)
Andrew Geissler5a43b432020-06-13 10:46:56 -0500710 return
711
712 vfn = realfn2virtual(fn, cls, self.mc)
713 else:
714 vfn = filename
715
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500716 if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500717 cacheData.add_from_recipeinfo(vfn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500718
719 if watcher:
720 watcher(info_array[0].file_depends)
721
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500722 if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
723 if parsed:
724 self.cacheclean = False
725 self.depends_cache[filename] = info_array
726
Andrew Geissler5a43b432020-06-13 10:46:56 -0500727class MulticonfigCache(Mapping):
728 def __init__(self, databuilder, data_hash, caches_array):
729 def progress(p):
730 nonlocal current_progress
731 nonlocal previous_progress
732 nonlocal previous_percent
733 nonlocal cachesize
734
735 current_progress = previous_progress + p
736
737 if current_progress > cachesize:
738 # we might have calculated incorrect total size because a file
739 # might've been written out just after we checked its size
740 cachesize = current_progress
741 current_percent = 100 * current_progress / cachesize
742 if current_percent > previous_percent:
743 previous_percent = current_percent
744 bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
745 databuilder.data)
746
747
748 cachesize = 0
749 current_progress = 0
750 previous_progress = 0
751 previous_percent = 0
752 self.__caches = {}
753
754 for mc, mcdata in databuilder.mcdata.items():
755 self.__caches[mc] = Cache(databuilder, mc, data_hash, caches_array)
756
757 cachesize += self.__caches[mc].cachesize()
758
759 bb.event.fire(bb.event.CacheLoadStarted(cachesize), databuilder.data)
760 loaded = 0
761
762 for c in self.__caches.values():
Andrew Geissler517393d2023-01-13 08:55:19 -0600763 SiggenRecipeInfo.reset()
Andrew Geissler5a43b432020-06-13 10:46:56 -0500764 loaded += c.prepare_cache(progress)
765 previous_progress = current_progress
766
767 # Note: depends cache number is corresponding to the parsing file numbers.
768 # The same file has several caches, still regarded as one item in the cache
769 bb.event.fire(bb.event.CacheLoadCompleted(cachesize, loaded), databuilder.data)
770
771 def __len__(self):
772 return len(self.__caches)
773
774 def __getitem__(self, key):
775 return self.__caches[key]
776
777 def __contains__(self, key):
778 return key in self.__caches
779
780 def __iter__(self):
781 for k in self.__caches:
782 yield k
783
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500784def init(cooker):
785 """
786 The Objective: Cache the minimum amount of data possible yet get to the
787 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
788
789 To do this, we intercept getVar calls and only cache the variables we see
790 being accessed. We rely on the cache getVar calls being made for all
791 variables bitbake might need to use to reach this stage. For each cached
792 file we need to track:
793
794 * Its mtime
795 * The mtimes of all its dependencies
796 * Whether it caused a parse.SkipRecipe exception
797
798 Files causing parsing errors are evicted from the cache.
799
800 """
801 return Cache(cooker.configuration.data, cooker.configuration.data_hash)
802
803
804class CacheData(object):
805 """
806 The data structures we compile from the cached data
807 """
808
809 def __init__(self, caches_array):
810 self.caches_array = caches_array
811 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600812 if not issubclass(cache_class, RecipeInfoCommon):
813 bb.error("Extra cache data class %s should subclass RecipeInfoCommon class" % cache_class)
814 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500815
816 # Direct cache variables
817 self.task_queues = {}
818 self.preferred = {}
819 self.tasks = {}
820 # Indirect Cache variables (set elsewhere)
821 self.ignored_dependencies = []
822 self.world_target = set()
823 self.bbfile_priority = {}
824
825 def add_from_recipeinfo(self, fn, info_array):
826 for info in info_array:
827 info.add_cacheData(self, fn)
828
829class MultiProcessCache(object):
830 """
831 BitBake multi-process cache implementation
832
833 Used by the codeparser & file checksum caches
834 """
835
836 def __init__(self):
837 self.cachefile = None
838 self.cachedata = self.create_cachedata()
839 self.cachedata_extras = self.create_cachedata()
840
Andrew Geisslerc5535c92023-01-27 16:10:19 -0600841 def init_cache(self, cachedir, cache_file_name=None):
842 if not cachedir:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500843 return
Andrew Geisslerc5535c92023-01-27 16:10:19 -0600844
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500845 bb.utils.mkdirhier(cachedir)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500846 self.cachefile = os.path.join(cachedir,
847 cache_file_name or self.__class__.cache_file_name)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600848 logger.debug("Using cache in '%s'", self.cachefile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500849
850 glf = bb.utils.lockfile(self.cachefile + ".lock")
851
852 try:
853 with open(self.cachefile, "rb") as f:
854 p = pickle.Unpickler(f)
855 data, version = p.load()
856 except:
857 bb.utils.unlockfile(glf)
858 return
859
860 bb.utils.unlockfile(glf)
861
862 if version != self.__class__.CACHE_VERSION:
863 return
864
865 self.cachedata = data
866
867 def create_cachedata(self):
868 data = [{}]
869 return data
870
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500871 def save_extras(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500872 if not self.cachefile:
873 return
874
Andrew Geisslerc5535c92023-01-27 16:10:19 -0600875 have_data = any(self.cachedata_extras)
876 if not have_data:
877 return
878
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500879 glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
880
881 i = os.getpid()
882 lf = None
883 while not lf:
884 lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
885 if not lf or os.path.exists(self.cachefile + "-" + str(i)):
886 if lf:
887 bb.utils.unlockfile(lf)
888 lf = None
889 i = i + 1
890 continue
891
892 with open(self.cachefile + "-" + str(i), "wb") as f:
893 p = pickle.Pickler(f, -1)
894 p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
895
896 bb.utils.unlockfile(lf)
897 bb.utils.unlockfile(glf)
898
899 def merge_data(self, source, dest):
900 for j in range(0,len(dest)):
901 for h in source[j]:
902 if h not in dest[j]:
903 dest[j][h] = source[j][h]
904
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500905 def save_merge(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500906 if not self.cachefile:
907 return
908
909 glf = bb.utils.lockfile(self.cachefile + ".lock")
910
911 data = self.cachedata
912
Andrew Geisslerc5535c92023-01-27 16:10:19 -0600913 have_data = False
914
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500915 for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
916 f = os.path.join(os.path.dirname(self.cachefile), f)
917 try:
918 with open(f, "rb") as fd:
919 p = pickle.Unpickler(fd)
920 extradata, version = p.load()
921 except (IOError, EOFError):
922 os.unlink(f)
923 continue
924
925 if version != self.__class__.CACHE_VERSION:
926 os.unlink(f)
927 continue
928
Andrew Geisslerc5535c92023-01-27 16:10:19 -0600929 have_data = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500930 self.merge_data(extradata, data)
931 os.unlink(f)
932
Andrew Geisslerc5535c92023-01-27 16:10:19 -0600933 if have_data:
934 with open(self.cachefile, "wb") as f:
935 p = pickle.Pickler(f, -1)
936 p.dump([data, self.__class__.CACHE_VERSION])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500937
938 bb.utils.unlockfile(glf)
Brad Bishop08902b02019-08-20 09:16:51 -0400939
940
941class SimpleCache(object):
942 """
943 BitBake multi-process cache implementation
944
945 Used by the codeparser & file checksum caches
946 """
947
948 def __init__(self, version):
949 self.cachefile = None
950 self.cachedata = None
951 self.cacheversion = version
952
953 def init_cache(self, d, cache_file_name=None, defaultdata=None):
954 cachedir = (d.getVar("PERSISTENT_DIR") or
955 d.getVar("CACHE"))
956 if not cachedir:
957 return defaultdata
958
959 bb.utils.mkdirhier(cachedir)
960 self.cachefile = os.path.join(cachedir,
961 cache_file_name or self.__class__.cache_file_name)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600962 logger.debug("Using cache in '%s'", self.cachefile)
Brad Bishop08902b02019-08-20 09:16:51 -0400963
964 glf = bb.utils.lockfile(self.cachefile + ".lock")
965
966 try:
967 with open(self.cachefile, "rb") as f:
968 p = pickle.Unpickler(f)
969 data, version = p.load()
970 except:
971 bb.utils.unlockfile(glf)
972 return defaultdata
973
974 bb.utils.unlockfile(glf)
975
976 if version != self.cacheversion:
977 return defaultdata
978
979 return data
980
981 def save(self, data):
982 if not self.cachefile:
983 return
984
985 glf = bb.utils.lockfile(self.cachefile + ".lock")
986
987 with open(self.cachefile, "wb") as f:
988 p = pickle.Pickler(f, -1)
989 p.dump([data, self.cacheversion])
990
991 bb.utils.unlockfile(glf)
Andrew Geissler78b72792022-06-14 06:47:25 -0500992
993 def copyfile(self, target):
994 if not self.cachefile:
995 return
996
997 glf = bb.utils.lockfile(self.cachefile + ".lock")
998 shutil.copy(self.cachefile, target)
999 bb.utils.unlockfile(glf)