blob: 988c596c39590f31685704663fe86d7479bda9fb [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# BitBake Cache implementation
3#
4# Caching of bitbake variables before task execution
5
6# Copyright (C) 2006 Richard Purdie
7# Copyright (C) 2012 Intel Corporation
8
9# but small sections based on code from bin/bitbake:
10# Copyright (C) 2003, 2004 Chris Larson
11# Copyright (C) 2003, 2004 Phil Blundell
12# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
13# Copyright (C) 2005 Holger Hans Peter Freyther
14# Copyright (C) 2005 ROAD GmbH
15#
Brad Bishopc342db32019-05-15 21:57:59 -040016# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050017#
Patrick Williamsc124f4f2015-09-15 14:41:29 -050018
Patrick Williamsc124f4f2015-09-15 14:41:29 -050019import os
20import logging
Patrick Williamsc0f7c042017-02-23 20:41:17 -060021import pickle
Andrew Geissler5199d832021-09-24 16:47:35 -050022from collections import defaultdict
23from collections.abc import Mapping
Patrick Williamsc124f4f2015-09-15 14:41:29 -050024import bb.utils
Andrew Geissler5a43b432020-06-13 10:46:56 -050025from bb import PrefixLoggerAdapter
Andrew Geissler82c905d2020-04-13 13:39:40 -050026import re
Andrew Geissler78b72792022-06-14 06:47:25 -050027import shutil
Patrick Williamsc124f4f2015-09-15 14:41:29 -050028
29logger = logging.getLogger("BitBake.Cache")
30
Andrew Geisslerd1e89492021-02-12 15:35:20 -060031__cache_version__ = "154"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050032
Andrew Geissler5a43b432020-06-13 10:46:56 -050033def getCacheFile(path, filename, mc, data_hash):
34 mcspec = ''
35 if mc:
36 mcspec = ".%s" % mc
37 return os.path.join(path, filename + mcspec + "." + data_hash)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050038
39# RecipeInfoCommon defines common data retrieving methods
40# from meta data for caches. CoreRecipeInfo as well as other
41# Extra RecipeInfo needs to inherit this class
42class RecipeInfoCommon(object):
43
44 @classmethod
45 def listvar(cls, var, metadata):
46 return cls.getvar(var, metadata).split()
47
48 @classmethod
49 def intvar(cls, var, metadata):
50 return int(cls.getvar(var, metadata) or 0)
51
52 @classmethod
53 def depvar(cls, var, metadata):
54 return bb.utils.explode_deps(cls.getvar(var, metadata))
55
56 @classmethod
57 def pkgvar(cls, var, packages, metadata):
Patrick Williams213cb262021-08-07 19:21:33 -050058 return dict((pkg, cls.depvar("%s:%s" % (var, pkg), metadata))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050059 for pkg in packages)
60
61 @classmethod
62 def taskvar(cls, var, tasks, metadata):
Patrick Williams213cb262021-08-07 19:21:33 -050063 return dict((task, cls.getvar("%s:task-%s" % (var, task), metadata))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050064 for task in tasks)
65
66 @classmethod
67 def flaglist(cls, flag, varlist, metadata, squash=False):
Brad Bishop6e60e8b2018-02-01 10:27:11 -050068 out_dict = dict((var, metadata.getVarFlag(var, flag))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050069 for var in varlist)
70 if squash:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060071 return dict((k,v) for (k,v) in out_dict.items() if v)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050072 else:
73 return out_dict
74
75 @classmethod
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050076 def getvar(cls, var, metadata, expand = True):
77 return metadata.getVar(var, expand) or ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -050078
79
80class CoreRecipeInfo(RecipeInfoCommon):
81 __slots__ = ()
82
Brad Bishopd7bf8c12018-02-25 22:55:05 -050083 cachefile = "bb_cache.dat"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050084
Brad Bishopd7bf8c12018-02-25 22:55:05 -050085 def __init__(self, filename, metadata):
Patrick Williamsc124f4f2015-09-15 14:41:29 -050086 self.file_depends = metadata.getVar('__depends', False)
87 self.timestamp = bb.parse.cached_mtime(filename)
88 self.variants = self.listvar('__VARIANTS', metadata) + ['']
89 self.appends = self.listvar('__BBAPPEND', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050090 self.nocache = self.getvar('BB_DONT_CACHE', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050091
Brad Bishop96ff1982019-08-19 13:50:42 -040092 self.provides = self.depvar('PROVIDES', metadata)
93 self.rprovides = self.depvar('RPROVIDES', metadata)
94 self.pn = self.getvar('PN', metadata) or bb.parse.vars_from_file(filename,metadata)[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -050095 self.packages = self.listvar('PACKAGES', metadata)
Brad Bishopd7bf8c12018-02-25 22:55:05 -050096 if not self.packages:
Patrick Williamsc124f4f2015-09-15 14:41:29 -050097 self.packages.append(self.pn)
Brad Bishop96ff1982019-08-19 13:50:42 -040098 self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
Andrew Geisslerd1e89492021-02-12 15:35:20 -060099 self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
Brad Bishop96ff1982019-08-19 13:50:42 -0400100
101 self.skipreason = self.getvar('__SKIPPED', metadata)
102 if self.skipreason:
103 self.skipped = True
104 return
105
106 self.tasks = metadata.getVar('__BBTASKS', False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500107
108 self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
109 self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
110
111 self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
112
113 self.skipped = False
114 self.pe = self.getvar('PE', metadata)
115 self.pv = self.getvar('PV', metadata)
116 self.pr = self.getvar('PR', metadata)
117 self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
118 self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
119 self.stamp = self.getvar('STAMP', metadata)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500120 self.stampclean = self.getvar('STAMPCLEAN', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500121 self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
122 self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500123 self.depends = self.depvar('DEPENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500124 self.rdepends = self.depvar('RDEPENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500125 self.rrecommends = self.depvar('RRECOMMENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500126 self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
127 self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500128 self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500129 self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
130 self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500131 self.fakerootlogs = self.getvar('FAKEROOTLOGS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500132 self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500133 self.extradepsfunc = self.getvar('calculate_extra_depends', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500134
135 @classmethod
136 def init_cacheData(cls, cachedata):
137 # CacheData in Core RecipeInfo Class
138 cachedata.task_deps = {}
139 cachedata.pkg_fn = {}
140 cachedata.pkg_pn = defaultdict(list)
141 cachedata.pkg_pepvpr = {}
142 cachedata.pkg_dp = {}
143
144 cachedata.stamp = {}
145 cachedata.stampclean = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500146 cachedata.stamp_extrainfo = {}
147 cachedata.file_checksums = {}
148 cachedata.fn_provides = {}
149 cachedata.pn_provides = defaultdict(list)
150 cachedata.all_depends = []
151
152 cachedata.deps = defaultdict(list)
153 cachedata.packages = defaultdict(list)
154 cachedata.providers = defaultdict(list)
155 cachedata.rproviders = defaultdict(list)
156 cachedata.packages_dynamic = defaultdict(list)
157
158 cachedata.rundeps = defaultdict(lambda: defaultdict(list))
159 cachedata.runrecs = defaultdict(lambda: defaultdict(list))
160 cachedata.possible_world = []
161 cachedata.universe_target = []
162 cachedata.hashfn = {}
163
164 cachedata.basetaskhash = {}
165 cachedata.inherits = {}
166 cachedata.fakerootenv = {}
167 cachedata.fakerootnoenv = {}
168 cachedata.fakerootdirs = {}
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500169 cachedata.fakerootlogs = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500170 cachedata.extradepsfunc = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500171
172 def add_cacheData(self, cachedata, fn):
173 cachedata.task_deps[fn] = self.task_deps
174 cachedata.pkg_fn[fn] = self.pn
175 cachedata.pkg_pn[self.pn].append(fn)
176 cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
177 cachedata.pkg_dp[fn] = self.defaultpref
178 cachedata.stamp[fn] = self.stamp
179 cachedata.stampclean[fn] = self.stampclean
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500180 cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
181 cachedata.file_checksums[fn] = self.file_checksums
182
183 provides = [self.pn]
184 for provide in self.provides:
185 if provide not in provides:
186 provides.append(provide)
187 cachedata.fn_provides[fn] = provides
188
189 for provide in provides:
190 cachedata.providers[provide].append(fn)
191 if provide not in cachedata.pn_provides[self.pn]:
192 cachedata.pn_provides[self.pn].append(provide)
193
194 for dep in self.depends:
195 if dep not in cachedata.deps[fn]:
196 cachedata.deps[fn].append(dep)
197 if dep not in cachedata.all_depends:
198 cachedata.all_depends.append(dep)
199
200 rprovides = self.rprovides
201 for package in self.packages:
202 cachedata.packages[package].append(fn)
203 rprovides += self.rprovides_pkg[package]
204
205 for rprovide in rprovides:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500206 if fn not in cachedata.rproviders[rprovide]:
207 cachedata.rproviders[rprovide].append(fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500208
209 for package in self.packages_dynamic:
210 cachedata.packages_dynamic[package].append(fn)
211
212 # Build hash of runtime depends and recommends
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500213 for package in self.packages:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500214 cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
215 cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
216
217 # Collect files we may need for possible world-dep
218 # calculations
Andrew Geissler82c905d2020-04-13 13:39:40 -0500219 if not self.not_world:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500220 cachedata.possible_world.append(fn)
Andrew Geissler82c905d2020-04-13 13:39:40 -0500221 #else:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600222 # logger.debug2("EXCLUDE FROM WORLD: %s", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500223
224 # create a collection of all targets for sanity checking
225 # tasks, such as upstream versions, license, and tools for
226 # task and image creation.
227 cachedata.universe_target.append(self.pn)
228
229 cachedata.hashfn[fn] = self.hashfilename
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600230 for task, taskhash in self.basetaskhashes.items():
Brad Bishop08902b02019-08-20 09:16:51 -0400231 identifier = '%s:%s' % (fn, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500232 cachedata.basetaskhash[identifier] = taskhash
233
234 cachedata.inherits[fn] = self.inherits
235 cachedata.fakerootenv[fn] = self.fakerootenv
236 cachedata.fakerootnoenv[fn] = self.fakerootnoenv
237 cachedata.fakerootdirs[fn] = self.fakerootdirs
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500238 cachedata.fakerootlogs[fn] = self.fakerootlogs
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500239 cachedata.extradepsfunc[fn] = self.extradepsfunc
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500240
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600241def virtualfn2realfn(virtualfn):
242 """
243 Convert a virtual file name to a real one + the associated subclass keyword
244 """
245 mc = ""
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600246 if virtualfn.startswith('mc:') and virtualfn.count(':') >= 2:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600247 elems = virtualfn.split(':')
248 mc = elems[1]
249 virtualfn = ":".join(elems[2:])
250
251 fn = virtualfn
252 cls = ""
253 if virtualfn.startswith('virtual:'):
254 elems = virtualfn.split(':')
255 cls = ":".join(elems[1:-1])
256 fn = elems[-1]
257
258 return (fn, cls, mc)
259
260def realfn2virtual(realfn, cls, mc):
261 """
262 Convert a real filename + the associated subclass keyword to a virtual filename
263 """
264 if cls:
265 realfn = "virtual:" + cls + ":" + realfn
266 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -0400267 realfn = "mc:" + mc + ":" + realfn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600268 return realfn
269
270def variant2virtual(realfn, variant):
271 """
272 Convert a real filename + the associated subclass keyword to a virtual filename
273 """
274 if variant == "":
275 return realfn
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600276 if variant.startswith("mc:") and variant.count(':') >= 2:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600277 elems = variant.split(":")
278 if elems[2]:
Brad Bishop15ae2502019-06-18 21:44:24 -0400279 return "mc:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
280 return "mc:" + elems[1] + ":" + realfn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600281 return "virtual:" + variant + ":" + realfn
282
283def parse_recipe(bb_data, bbfile, appends, mc=''):
284 """
285 Parse a recipe
286 """
287
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600288 bb_data.setVar("__BBMULTICONFIG", mc)
289
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600290 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600291 bb.parse.cached_mtime_noerror(bbfile_loc)
292
Andrew Geissler595f6302022-01-24 19:11:47 +0000293 if appends:
294 bb_data.setVar('__BBAPPEND', " ".join(appends))
295 bb_data = bb.parse.handle(bbfile, bb_data)
296 return bb_data
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500297
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600298
299class NoCache(object):
300
301 def __init__(self, databuilder):
302 self.databuilder = databuilder
303 self.data = databuilder.data
304
305 def loadDataFull(self, virtualfn, appends):
306 """
307 Return a complete set of data for fn.
308 To do this, we need to parse the file.
309 """
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600310 logger.debug("Parsing %s (full)" % virtualfn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600311 (fn, virtual, mc) = virtualfn2realfn(virtualfn)
312 bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
313 return bb_data[virtual]
314
Andrew Geissler5a43b432020-06-13 10:46:56 -0500315 def load_bbfile(self, bbfile, appends, virtonly = False, mc=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600316 """
317 Load and parse one .bb build file
318 Return the data and whether parsing resulted in the file being skipped
319 """
320
321 if virtonly:
322 (bbfile, virtual, mc) = virtualfn2realfn(bbfile)
323 bb_data = self.databuilder.mcdata[mc].createCopy()
324 bb_data.setVar("__ONLYFINALISE", virtual or "default")
325 datastores = parse_recipe(bb_data, bbfile, appends, mc)
326 return datastores
327
Andrew Geissler5a43b432020-06-13 10:46:56 -0500328 if mc is not None:
329 bb_data = self.databuilder.mcdata[mc].createCopy()
330 return parse_recipe(bb_data, bbfile, appends, mc)
331
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600332 bb_data = self.data.createCopy()
333 datastores = parse_recipe(bb_data, bbfile, appends)
334
335 for mc in self.databuilder.mcdata:
336 if not mc:
337 continue
338 bb_data = self.databuilder.mcdata[mc].createCopy()
339 newstores = parse_recipe(bb_data, bbfile, appends, mc)
340 for ns in newstores:
Brad Bishop15ae2502019-06-18 21:44:24 -0400341 datastores["mc:%s:%s" % (mc, ns)] = newstores[ns]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600342
343 return datastores
344
345class Cache(NoCache):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500346 """
347 BitBake Cache implementation
348 """
Andrew Geissler5a43b432020-06-13 10:46:56 -0500349 def __init__(self, databuilder, mc, data_hash, caches_array):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600350 super().__init__(databuilder)
351 data = databuilder.data
352
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500353 # Pass caches_array information into Cache Constructor
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500354 # It will be used later for deciding whether we
355 # need extra cache file dump/load support
Andrew Geissler5a43b432020-06-13 10:46:56 -0500356 self.mc = mc
357 self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500358 self.caches_array = caches_array
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500359 self.cachedir = data.getVar("CACHE")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500360 self.clean = set()
361 self.checked = set()
362 self.depends_cache = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500363 self.data_fn = None
364 self.cacheclean = True
365 self.data_hash = data_hash
Andrew Geissler82c905d2020-04-13 13:39:40 -0500366 self.filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500367
368 if self.cachedir in [None, '']:
369 self.has_cache = False
Andrew Geissler5a43b432020-06-13 10:46:56 -0500370 self.logger.info("Not using a cache. "
371 "Set CACHE = <directory> to enable.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500372 return
373
374 self.has_cache = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500375
Andrew Geissler5a43b432020-06-13 10:46:56 -0500376 def getCacheFile(self, cachefile):
377 return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash)
378
379 def prepare_cache(self, progress):
380 if not self.has_cache:
381 return 0
382
383 loaded = 0
384
385 self.cachefile = self.getCacheFile("bb_cache.dat")
386
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600387 self.logger.debug("Cache dir: %s", self.cachedir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500388 bb.utils.mkdirhier(self.cachedir)
389
390 cache_ok = True
391 if self.caches_array:
392 for cache_class in self.caches_array:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500393 cachefile = self.getCacheFile(cache_class.cachefile)
394 cache_exists = os.path.exists(cachefile)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600395 self.logger.debug2("Checking if %s exists: %r", cachefile, cache_exists)
Andrew Geissler5a43b432020-06-13 10:46:56 -0500396 cache_ok = cache_ok and cache_exists
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600397 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500398 if cache_ok:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500399 loaded = self.load_cachefile(progress)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500400 elif os.path.isfile(self.cachefile):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500401 self.logger.info("Out of date cache found, rebuilding...")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400402 else:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600403 self.logger.debug("Cache file %s not found, building..." % self.cachefile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500404
Brad Bishop96ff1982019-08-19 13:50:42 -0400405 # We don't use the symlink, its just for debugging convinience
Andrew Geissler5a43b432020-06-13 10:46:56 -0500406 if self.mc:
407 symlink = os.path.join(self.cachedir, "bb_cache.dat.%s" % self.mc)
408 else:
409 symlink = os.path.join(self.cachedir, "bb_cache.dat")
410
Brad Bishop96ff1982019-08-19 13:50:42 -0400411 if os.path.exists(symlink):
412 bb.utils.remove(symlink)
413 try:
414 os.symlink(os.path.basename(self.cachefile), symlink)
415 except OSError:
416 pass
417
Andrew Geissler5a43b432020-06-13 10:46:56 -0500418 return loaded
419
420 def cachesize(self):
421 if not self.has_cache:
422 return 0
423
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500424 cachesize = 0
Andrew Geissler5a43b432020-06-13 10:46:56 -0500425 for cache_class in self.caches_array:
426 cachefile = self.getCacheFile(cache_class.cachefile)
427 try:
428 with open(cachefile, "rb") as cachefile:
429 cachesize += os.fstat(cachefile.fileno()).st_size
430 except FileNotFoundError:
431 pass
432
433 return cachesize
434
435 def load_cachefile(self, progress):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500436 previous_progress = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500437
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500438 for cache_class in self.caches_array:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500439 cachefile = self.getCacheFile(cache_class.cachefile)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600440 self.logger.debug('Loading cache file: %s' % cachefile)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600441 with open(cachefile, "rb") as cachefile:
442 pickled = pickle.Unpickler(cachefile)
443 # Check cache version information
444 try:
445 cache_ver = pickled.load()
446 bitbake_ver = pickled.load()
447 except Exception:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500448 self.logger.info('Invalid cache, rebuilding...')
449 return 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500450
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600451 if cache_ver != __cache_version__:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500452 self.logger.info('Cache version mismatch, rebuilding...')
453 return 0
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600454 elif bitbake_ver != bb.__version__:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500455 self.logger.info('Bitbake version mismatch, rebuilding...')
456 return 0
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600457
458 # Load the rest of the cache file
459 current_progress = 0
460 while cachefile:
461 try:
462 key = pickled.load()
463 value = pickled.load()
464 except Exception:
465 break
466 if not isinstance(key, str):
467 bb.warn("%s from extras cache is not a string?" % key)
468 break
469 if not isinstance(value, RecipeInfoCommon):
470 bb.warn("%s from extras cache is not a RecipeInfoCommon class?" % value)
471 break
472
473 if key in self.depends_cache:
474 self.depends_cache[key].append(value)
475 else:
476 self.depends_cache[key] = [value]
477 # only fire events on even percentage boundaries
478 current_progress = cachefile.tell() + previous_progress
Andrew Geissler5a43b432020-06-13 10:46:56 -0500479 progress(cachefile.tell() + previous_progress)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600480
481 previous_progress += current_progress
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500482
Andrew Geissler5a43b432020-06-13 10:46:56 -0500483 return len(self.depends_cache)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500484
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600485 def parse(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500486 """Parse the specified filename, returning the recipe information"""
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600487 self.logger.debug("Parsing %s", filename)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500488 infos = []
Andrew Geissler5a43b432020-06-13 10:46:56 -0500489 datastores = self.load_bbfile(filename, appends, mc=self.mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500490 depends = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600491 variants = []
492 # Process the "real" fn last so we can store variants list
493 for variant, data in sorted(datastores.items(),
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500494 key=lambda i: i[0],
495 reverse=True):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600496 virtualfn = variant2virtual(filename, variant)
497 variants.append(variant)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500498 depends = depends + (data.getVar("__depends", False) or [])
499 if depends and not variant:
500 data.setVar("__depends", depends)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600501 if virtualfn == filename:
502 data.setVar("__VARIANTS", " ".join(variants))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500503 info_array = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600504 for cache_class in self.caches_array:
505 info = cache_class(filename, data)
506 info_array.append(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500507 infos.append((virtualfn, info_array))
508
509 return infos
510
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600511 def load(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500512 """Obtain the recipe information for the specified filename,
513 using cached values if available, otherwise parsing.
514
515 Note that if it does parse to obtain the info, it will not
516 automatically add the information to the cache or to your
517 CacheData. Use the add or add_info method to do so after
518 running this, or use loadData instead."""
519 cached = self.cacheValid(filename, appends)
520 if cached:
521 infos = []
522 # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
523 info_array = self.depends_cache[filename]
524 for variant in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600525 virtualfn = variant2virtual(filename, variant)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500526 infos.append((virtualfn, self.depends_cache[virtualfn]))
527 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500528 return self.parse(filename, appends, configdata, self.caches_array)
529
530 return cached, infos
531
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600532 def loadData(self, fn, appends, cacheData):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500533 """Load the recipe info for the specified filename,
534 parsing and adding to the cache if necessary, and adding
535 the recipe information to the supplied CacheData instance."""
536 skipped, virtuals = 0, 0
537
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600538 cached, infos = self.load(fn, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500539 for virtualfn, info_array in infos:
540 if info_array[0].skipped:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600541 self.logger.debug("Skipping %s: %s", virtualfn, info_array[0].skipreason)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500542 skipped += 1
543 else:
544 self.add_info(virtualfn, info_array, cacheData, not cached)
545 virtuals += 1
546
547 return cached, skipped, virtuals
548
549 def cacheValid(self, fn, appends):
550 """
551 Is the cache valid for fn?
552 Fast version, no timestamps checked.
553 """
554 if fn not in self.checked:
555 self.cacheValidUpdate(fn, appends)
556
557 # Is cache enabled?
558 if not self.has_cache:
559 return False
560 if fn in self.clean:
561 return True
562 return False
563
564 def cacheValidUpdate(self, fn, appends):
565 """
566 Is the cache valid for fn?
567 Make thorough (slower) checks including timestamps.
568 """
569 # Is cache enabled?
570 if not self.has_cache:
571 return False
572
573 self.checked.add(fn)
574
575 # File isn't in depends_cache
576 if not fn in self.depends_cache:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600577 self.logger.debug2("%s is not cached", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500578 return False
579
580 mtime = bb.parse.cached_mtime_noerror(fn)
581
582 # Check file still exists
583 if mtime == 0:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600584 self.logger.debug2("%s no longer exists", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500585 self.remove(fn)
586 return False
587
588 info_array = self.depends_cache[fn]
589 # Check the file's timestamp
590 if mtime != info_array[0].timestamp:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600591 self.logger.debug2("%s changed", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500592 self.remove(fn)
593 return False
594
595 # Check dependencies are still valid
596 depends = info_array[0].file_depends
597 if depends:
598 for f, old_mtime in depends:
599 fmtime = bb.parse.cached_mtime_noerror(f)
600 # Check if file still exists
601 if old_mtime != 0 and fmtime == 0:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600602 self.logger.debug2("%s's dependency %s was removed",
Andrew Geissler5a43b432020-06-13 10:46:56 -0500603 fn, f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500604 self.remove(fn)
605 return False
606
607 if (fmtime != old_mtime):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600608 self.logger.debug2("%s's dependency %s changed",
Andrew Geissler5a43b432020-06-13 10:46:56 -0500609 fn, f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500610 self.remove(fn)
611 return False
612
613 if hasattr(info_array[0], 'file_checksums'):
614 for _, fl in info_array[0].file_checksums.items():
Patrick Williamsd7e96312015-09-22 08:09:05 -0500615 fl = fl.strip()
Andrew Geissler82c905d2020-04-13 13:39:40 -0500616 if not fl:
617 continue
618 # Have to be careful about spaces and colons in filenames
619 flist = self.filelist_regex.split(fl)
620 for f in flist:
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500621 if not f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500622 continue
Andrew Geisslerd5838332022-05-27 11:33:10 -0500623 f, exist = f.rsplit(":", 1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500624 if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600625 self.logger.debug2("%s's file checksum list file %s changed",
Andrew Geissler5a43b432020-06-13 10:46:56 -0500626 fn, f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500627 self.remove(fn)
628 return False
629
Andrew Geissler5a43b432020-06-13 10:46:56 -0500630 if tuple(appends) != tuple(info_array[0].appends):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600631 self.logger.debug2("appends for %s changed", fn)
632 self.logger.debug2("%s to %s" % (str(appends), str(info_array[0].appends)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500633 self.remove(fn)
634 return False
635
636 invalid = False
637 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600638 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500639 self.clean.add(virtualfn)
640 if virtualfn not in self.depends_cache:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600641 self.logger.debug2("%s is not cached", virtualfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500642 invalid = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600643 elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600644 self.logger.debug2("Extra caches missing for %s?" % virtualfn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600645 invalid = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500646
647 # If any one of the variants is not present, mark as invalid for all
648 if invalid:
649 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600650 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500651 if virtualfn in self.clean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600652 self.logger.debug2("Removing %s from cache", virtualfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500653 self.clean.remove(virtualfn)
654 if fn in self.clean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600655 self.logger.debug2("Marking %s as not clean", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500656 self.clean.remove(fn)
657 return False
658
659 self.clean.add(fn)
660 return True
661
662 def remove(self, fn):
663 """
664 Remove a fn from the cache
665 Called from the parser in error cases
666 """
667 if fn in self.depends_cache:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600668 self.logger.debug("Removing %s from cache", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500669 del self.depends_cache[fn]
670 if fn in self.clean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600671 self.logger.debug("Marking %s as unclean", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500672 self.clean.remove(fn)
673
674 def sync(self):
675 """
676 Save the cache
677 Called from the parser when complete (or exiting)
678 """
679
680 if not self.has_cache:
681 return
682
683 if self.cacheclean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600684 self.logger.debug2("Cache is clean, not saving.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500685 return
686
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500687 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600688 cache_class_name = cache_class.__name__
Andrew Geissler5a43b432020-06-13 10:46:56 -0500689 cachefile = self.getCacheFile(cache_class.cachefile)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600690 self.logger.debug2("Writing %s", cachefile)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600691 with open(cachefile, "wb") as f:
692 p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
693 p.dump(__cache_version__)
694 p.dump(bb.__version__)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500695
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600696 for key, info_array in self.depends_cache.items():
697 for info in info_array:
698 if isinstance(info, RecipeInfoCommon) and info.__class__.__name__ == cache_class_name:
699 p.dump(key)
700 p.dump(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500701
702 del self.depends_cache
703
704 @staticmethod
705 def mtime(cachefile):
706 return bb.parse.cached_mtime_noerror(cachefile)
707
708 def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500709 if self.mc is not None:
710 (fn, cls, mc) = virtualfn2realfn(filename)
711 if mc:
Andrew Geisslerd25ed322020-06-27 00:28:28 -0500712 self.logger.error("Unexpected multiconfig %s", filename)
Andrew Geissler5a43b432020-06-13 10:46:56 -0500713 return
714
715 vfn = realfn2virtual(fn, cls, self.mc)
716 else:
717 vfn = filename
718
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500719 if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500720 cacheData.add_from_recipeinfo(vfn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500721
722 if watcher:
723 watcher(info_array[0].file_depends)
724
725 if not self.has_cache:
726 return
727
728 if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
729 if parsed:
730 self.cacheclean = False
731 self.depends_cache[filename] = info_array
732
733 def add(self, file_name, data, cacheData, parsed=None):
734 """
735 Save data we need into the cache
736 """
737
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600738 realfn = virtualfn2realfn(file_name)[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500739
740 info_array = []
741 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600742 info_array.append(cache_class(realfn, data))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500743 self.add_info(file_name, info_array, cacheData, parsed)
744
Andrew Geissler5a43b432020-06-13 10:46:56 -0500745class MulticonfigCache(Mapping):
746 def __init__(self, databuilder, data_hash, caches_array):
747 def progress(p):
748 nonlocal current_progress
749 nonlocal previous_progress
750 nonlocal previous_percent
751 nonlocal cachesize
752
753 current_progress = previous_progress + p
754
755 if current_progress > cachesize:
756 # we might have calculated incorrect total size because a file
757 # might've been written out just after we checked its size
758 cachesize = current_progress
759 current_percent = 100 * current_progress / cachesize
760 if current_percent > previous_percent:
761 previous_percent = current_percent
762 bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
763 databuilder.data)
764
765
766 cachesize = 0
767 current_progress = 0
768 previous_progress = 0
769 previous_percent = 0
770 self.__caches = {}
771
772 for mc, mcdata in databuilder.mcdata.items():
773 self.__caches[mc] = Cache(databuilder, mc, data_hash, caches_array)
774
775 cachesize += self.__caches[mc].cachesize()
776
777 bb.event.fire(bb.event.CacheLoadStarted(cachesize), databuilder.data)
778 loaded = 0
779
780 for c in self.__caches.values():
781 loaded += c.prepare_cache(progress)
782 previous_progress = current_progress
783
784 # Note: depends cache number is corresponding to the parsing file numbers.
785 # The same file has several caches, still regarded as one item in the cache
786 bb.event.fire(bb.event.CacheLoadCompleted(cachesize, loaded), databuilder.data)
787
788 def __len__(self):
789 return len(self.__caches)
790
791 def __getitem__(self, key):
792 return self.__caches[key]
793
794 def __contains__(self, key):
795 return key in self.__caches
796
797 def __iter__(self):
798 for k in self.__caches:
799 yield k
800
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500801def init(cooker):
802 """
803 The Objective: Cache the minimum amount of data possible yet get to the
804 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
805
806 To do this, we intercept getVar calls and only cache the variables we see
807 being accessed. We rely on the cache getVar calls being made for all
808 variables bitbake might need to use to reach this stage. For each cached
809 file we need to track:
810
811 * Its mtime
812 * The mtimes of all its dependencies
813 * Whether it caused a parse.SkipRecipe exception
814
815 Files causing parsing errors are evicted from the cache.
816
817 """
818 return Cache(cooker.configuration.data, cooker.configuration.data_hash)
819
820
821class CacheData(object):
822 """
823 The data structures we compile from the cached data
824 """
825
826 def __init__(self, caches_array):
827 self.caches_array = caches_array
828 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600829 if not issubclass(cache_class, RecipeInfoCommon):
830 bb.error("Extra cache data class %s should subclass RecipeInfoCommon class" % cache_class)
831 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500832
833 # Direct cache variables
834 self.task_queues = {}
835 self.preferred = {}
836 self.tasks = {}
837 # Indirect Cache variables (set elsewhere)
838 self.ignored_dependencies = []
839 self.world_target = set()
840 self.bbfile_priority = {}
841
842 def add_from_recipeinfo(self, fn, info_array):
843 for info in info_array:
844 info.add_cacheData(self, fn)
845
846class MultiProcessCache(object):
847 """
848 BitBake multi-process cache implementation
849
850 Used by the codeparser & file checksum caches
851 """
852
853 def __init__(self):
854 self.cachefile = None
855 self.cachedata = self.create_cachedata()
856 self.cachedata_extras = self.create_cachedata()
857
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500858 def init_cache(self, d, cache_file_name=None):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500859 cachedir = (d.getVar("PERSISTENT_DIR") or
860 d.getVar("CACHE"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500861 if cachedir in [None, '']:
862 return
863 bb.utils.mkdirhier(cachedir)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500864 self.cachefile = os.path.join(cachedir,
865 cache_file_name or self.__class__.cache_file_name)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600866 logger.debug("Using cache in '%s'", self.cachefile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500867
868 glf = bb.utils.lockfile(self.cachefile + ".lock")
869
870 try:
871 with open(self.cachefile, "rb") as f:
872 p = pickle.Unpickler(f)
873 data, version = p.load()
874 except:
875 bb.utils.unlockfile(glf)
876 return
877
878 bb.utils.unlockfile(glf)
879
880 if version != self.__class__.CACHE_VERSION:
881 return
882
883 self.cachedata = data
884
885 def create_cachedata(self):
886 data = [{}]
887 return data
888
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500889 def save_extras(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500890 if not self.cachefile:
891 return
892
893 glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
894
895 i = os.getpid()
896 lf = None
897 while not lf:
898 lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
899 if not lf or os.path.exists(self.cachefile + "-" + str(i)):
900 if lf:
901 bb.utils.unlockfile(lf)
902 lf = None
903 i = i + 1
904 continue
905
906 with open(self.cachefile + "-" + str(i), "wb") as f:
907 p = pickle.Pickler(f, -1)
908 p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
909
910 bb.utils.unlockfile(lf)
911 bb.utils.unlockfile(glf)
912
913 def merge_data(self, source, dest):
914 for j in range(0,len(dest)):
915 for h in source[j]:
916 if h not in dest[j]:
917 dest[j][h] = source[j][h]
918
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500919 def save_merge(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500920 if not self.cachefile:
921 return
922
923 glf = bb.utils.lockfile(self.cachefile + ".lock")
924
925 data = self.cachedata
926
927 for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
928 f = os.path.join(os.path.dirname(self.cachefile), f)
929 try:
930 with open(f, "rb") as fd:
931 p = pickle.Unpickler(fd)
932 extradata, version = p.load()
933 except (IOError, EOFError):
934 os.unlink(f)
935 continue
936
937 if version != self.__class__.CACHE_VERSION:
938 os.unlink(f)
939 continue
940
941 self.merge_data(extradata, data)
942 os.unlink(f)
943
944 with open(self.cachefile, "wb") as f:
945 p = pickle.Pickler(f, -1)
946 p.dump([data, self.__class__.CACHE_VERSION])
947
948 bb.utils.unlockfile(glf)
Brad Bishop08902b02019-08-20 09:16:51 -0400949
950
951class SimpleCache(object):
952 """
953 BitBake multi-process cache implementation
954
955 Used by the codeparser & file checksum caches
956 """
957
958 def __init__(self, version):
959 self.cachefile = None
960 self.cachedata = None
961 self.cacheversion = version
962
963 def init_cache(self, d, cache_file_name=None, defaultdata=None):
964 cachedir = (d.getVar("PERSISTENT_DIR") or
965 d.getVar("CACHE"))
966 if not cachedir:
967 return defaultdata
968
969 bb.utils.mkdirhier(cachedir)
970 self.cachefile = os.path.join(cachedir,
971 cache_file_name or self.__class__.cache_file_name)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600972 logger.debug("Using cache in '%s'", self.cachefile)
Brad Bishop08902b02019-08-20 09:16:51 -0400973
974 glf = bb.utils.lockfile(self.cachefile + ".lock")
975
976 try:
977 with open(self.cachefile, "rb") as f:
978 p = pickle.Unpickler(f)
979 data, version = p.load()
980 except:
981 bb.utils.unlockfile(glf)
982 return defaultdata
983
984 bb.utils.unlockfile(glf)
985
986 if version != self.cacheversion:
987 return defaultdata
988
989 return data
990
991 def save(self, data):
992 if not self.cachefile:
993 return
994
995 glf = bb.utils.lockfile(self.cachefile + ".lock")
996
997 with open(self.cachefile, "wb") as f:
998 p = pickle.Pickler(f, -1)
999 p.dump([data, self.cacheversion])
1000
1001 bb.utils.unlockfile(glf)
Andrew Geissler78b72792022-06-14 06:47:25 -05001002
1003 def copyfile(self, target):
1004 if not self.cachefile:
1005 return
1006
1007 glf = bb.utils.lockfile(self.cachefile + ".lock")
1008 shutil.copy(self.cachefile, target)
1009 bb.utils.unlockfile(glf)