blob: 4e08c100ab2665715e6765ad0635475c3b5fbb53 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# BitBake Cache implementation
3#
4# Caching of bitbake variables before task execution
5
6# Copyright (C) 2006 Richard Purdie
7# Copyright (C) 2012 Intel Corporation
8
9# but small sections based on code from bin/bitbake:
10# Copyright (C) 2003, 2004 Chris Larson
11# Copyright (C) 2003, 2004 Phil Blundell
12# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
13# Copyright (C) 2005 Holger Hans Peter Freyther
14# Copyright (C) 2005 ROAD GmbH
15#
Brad Bishopc342db32019-05-15 21:57:59 -040016# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050017#
Patrick Williamsc124f4f2015-09-15 14:41:29 -050018
Patrick Williamsc124f4f2015-09-15 14:41:29 -050019import os
20import logging
Patrick Williamsc0f7c042017-02-23 20:41:17 -060021import pickle
Andrew Geissler5199d832021-09-24 16:47:35 -050022from collections import defaultdict
23from collections.abc import Mapping
Patrick Williamsc124f4f2015-09-15 14:41:29 -050024import bb.utils
Andrew Geissler5a43b432020-06-13 10:46:56 -050025from bb import PrefixLoggerAdapter
Andrew Geissler82c905d2020-04-13 13:39:40 -050026import re
Patrick Williamsc124f4f2015-09-15 14:41:29 -050027
28logger = logging.getLogger("BitBake.Cache")
29
Andrew Geisslerd1e89492021-02-12 15:35:20 -060030__cache_version__ = "154"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050031
Andrew Geissler5a43b432020-06-13 10:46:56 -050032def getCacheFile(path, filename, mc, data_hash):
33 mcspec = ''
34 if mc:
35 mcspec = ".%s" % mc
36 return os.path.join(path, filename + mcspec + "." + data_hash)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050037
38# RecipeInfoCommon defines common data retrieving methods
39# from meta data for caches. CoreRecipeInfo as well as other
40# Extra RecipeInfo needs to inherit this class
41class RecipeInfoCommon(object):
42
43 @classmethod
44 def listvar(cls, var, metadata):
45 return cls.getvar(var, metadata).split()
46
47 @classmethod
48 def intvar(cls, var, metadata):
49 return int(cls.getvar(var, metadata) or 0)
50
51 @classmethod
52 def depvar(cls, var, metadata):
53 return bb.utils.explode_deps(cls.getvar(var, metadata))
54
55 @classmethod
56 def pkgvar(cls, var, packages, metadata):
Patrick Williams213cb262021-08-07 19:21:33 -050057 return dict((pkg, cls.depvar("%s:%s" % (var, pkg), metadata))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050058 for pkg in packages)
59
60 @classmethod
61 def taskvar(cls, var, tasks, metadata):
Patrick Williams213cb262021-08-07 19:21:33 -050062 return dict((task, cls.getvar("%s:task-%s" % (var, task), metadata))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050063 for task in tasks)
64
65 @classmethod
66 def flaglist(cls, flag, varlist, metadata, squash=False):
Brad Bishop6e60e8b2018-02-01 10:27:11 -050067 out_dict = dict((var, metadata.getVarFlag(var, flag))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050068 for var in varlist)
69 if squash:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060070 return dict((k,v) for (k,v) in out_dict.items() if v)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050071 else:
72 return out_dict
73
74 @classmethod
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050075 def getvar(cls, var, metadata, expand = True):
76 return metadata.getVar(var, expand) or ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -050077
78
79class CoreRecipeInfo(RecipeInfoCommon):
80 __slots__ = ()
81
Brad Bishopd7bf8c12018-02-25 22:55:05 -050082 cachefile = "bb_cache.dat"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050083
Brad Bishopd7bf8c12018-02-25 22:55:05 -050084 def __init__(self, filename, metadata):
Patrick Williamsc124f4f2015-09-15 14:41:29 -050085 self.file_depends = metadata.getVar('__depends', False)
86 self.timestamp = bb.parse.cached_mtime(filename)
87 self.variants = self.listvar('__VARIANTS', metadata) + ['']
88 self.appends = self.listvar('__BBAPPEND', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050089 self.nocache = self.getvar('BB_DONT_CACHE', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050090
Brad Bishop96ff1982019-08-19 13:50:42 -040091 self.provides = self.depvar('PROVIDES', metadata)
92 self.rprovides = self.depvar('RPROVIDES', metadata)
93 self.pn = self.getvar('PN', metadata) or bb.parse.vars_from_file(filename,metadata)[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -050094 self.packages = self.listvar('PACKAGES', metadata)
Brad Bishopd7bf8c12018-02-25 22:55:05 -050095 if not self.packages:
Patrick Williamsc124f4f2015-09-15 14:41:29 -050096 self.packages.append(self.pn)
Brad Bishop96ff1982019-08-19 13:50:42 -040097 self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
Andrew Geisslerd1e89492021-02-12 15:35:20 -060098 self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
Brad Bishop96ff1982019-08-19 13:50:42 -040099
100 self.skipreason = self.getvar('__SKIPPED', metadata)
101 if self.skipreason:
102 self.skipped = True
103 return
104
105 self.tasks = metadata.getVar('__BBTASKS', False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500106
107 self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
108 self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
109
110 self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
111
112 self.skipped = False
113 self.pe = self.getvar('PE', metadata)
114 self.pv = self.getvar('PV', metadata)
115 self.pr = self.getvar('PR', metadata)
116 self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
117 self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
118 self.stamp = self.getvar('STAMP', metadata)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500119 self.stampclean = self.getvar('STAMPCLEAN', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500120 self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
121 self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500122 self.depends = self.depvar('DEPENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500123 self.rdepends = self.depvar('RDEPENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500124 self.rrecommends = self.depvar('RRECOMMENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500125 self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
126 self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500127 self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500128 self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
129 self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500130 self.fakerootlogs = self.getvar('FAKEROOTLOGS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500131 self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500132 self.extradepsfunc = self.getvar('calculate_extra_depends', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500133
134 @classmethod
135 def init_cacheData(cls, cachedata):
136 # CacheData in Core RecipeInfo Class
137 cachedata.task_deps = {}
138 cachedata.pkg_fn = {}
139 cachedata.pkg_pn = defaultdict(list)
140 cachedata.pkg_pepvpr = {}
141 cachedata.pkg_dp = {}
142
143 cachedata.stamp = {}
144 cachedata.stampclean = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500145 cachedata.stamp_extrainfo = {}
146 cachedata.file_checksums = {}
147 cachedata.fn_provides = {}
148 cachedata.pn_provides = defaultdict(list)
149 cachedata.all_depends = []
150
151 cachedata.deps = defaultdict(list)
152 cachedata.packages = defaultdict(list)
153 cachedata.providers = defaultdict(list)
154 cachedata.rproviders = defaultdict(list)
155 cachedata.packages_dynamic = defaultdict(list)
156
157 cachedata.rundeps = defaultdict(lambda: defaultdict(list))
158 cachedata.runrecs = defaultdict(lambda: defaultdict(list))
159 cachedata.possible_world = []
160 cachedata.universe_target = []
161 cachedata.hashfn = {}
162
163 cachedata.basetaskhash = {}
164 cachedata.inherits = {}
165 cachedata.fakerootenv = {}
166 cachedata.fakerootnoenv = {}
167 cachedata.fakerootdirs = {}
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500168 cachedata.fakerootlogs = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500169 cachedata.extradepsfunc = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500170
171 def add_cacheData(self, cachedata, fn):
172 cachedata.task_deps[fn] = self.task_deps
173 cachedata.pkg_fn[fn] = self.pn
174 cachedata.pkg_pn[self.pn].append(fn)
175 cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
176 cachedata.pkg_dp[fn] = self.defaultpref
177 cachedata.stamp[fn] = self.stamp
178 cachedata.stampclean[fn] = self.stampclean
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500179 cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
180 cachedata.file_checksums[fn] = self.file_checksums
181
182 provides = [self.pn]
183 for provide in self.provides:
184 if provide not in provides:
185 provides.append(provide)
186 cachedata.fn_provides[fn] = provides
187
188 for provide in provides:
189 cachedata.providers[provide].append(fn)
190 if provide not in cachedata.pn_provides[self.pn]:
191 cachedata.pn_provides[self.pn].append(provide)
192
193 for dep in self.depends:
194 if dep not in cachedata.deps[fn]:
195 cachedata.deps[fn].append(dep)
196 if dep not in cachedata.all_depends:
197 cachedata.all_depends.append(dep)
198
199 rprovides = self.rprovides
200 for package in self.packages:
201 cachedata.packages[package].append(fn)
202 rprovides += self.rprovides_pkg[package]
203
204 for rprovide in rprovides:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500205 if fn not in cachedata.rproviders[rprovide]:
206 cachedata.rproviders[rprovide].append(fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500207
208 for package in self.packages_dynamic:
209 cachedata.packages_dynamic[package].append(fn)
210
211 # Build hash of runtime depends and recommends
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500212 for package in self.packages:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500213 cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
214 cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
215
216 # Collect files we may need for possible world-dep
217 # calculations
Andrew Geissler82c905d2020-04-13 13:39:40 -0500218 if not self.not_world:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500219 cachedata.possible_world.append(fn)
Andrew Geissler82c905d2020-04-13 13:39:40 -0500220 #else:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600221 # logger.debug2("EXCLUDE FROM WORLD: %s", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500222
223 # create a collection of all targets for sanity checking
224 # tasks, such as upstream versions, license, and tools for
225 # task and image creation.
226 cachedata.universe_target.append(self.pn)
227
228 cachedata.hashfn[fn] = self.hashfilename
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600229 for task, taskhash in self.basetaskhashes.items():
Brad Bishop08902b02019-08-20 09:16:51 -0400230 identifier = '%s:%s' % (fn, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500231 cachedata.basetaskhash[identifier] = taskhash
232
233 cachedata.inherits[fn] = self.inherits
234 cachedata.fakerootenv[fn] = self.fakerootenv
235 cachedata.fakerootnoenv[fn] = self.fakerootnoenv
236 cachedata.fakerootdirs[fn] = self.fakerootdirs
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500237 cachedata.fakerootlogs[fn] = self.fakerootlogs
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500238 cachedata.extradepsfunc[fn] = self.extradepsfunc
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500239
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600240def virtualfn2realfn(virtualfn):
241 """
242 Convert a virtual file name to a real one + the associated subclass keyword
243 """
244 mc = ""
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600245 if virtualfn.startswith('mc:') and virtualfn.count(':') >= 2:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600246 elems = virtualfn.split(':')
247 mc = elems[1]
248 virtualfn = ":".join(elems[2:])
249
250 fn = virtualfn
251 cls = ""
252 if virtualfn.startswith('virtual:'):
253 elems = virtualfn.split(':')
254 cls = ":".join(elems[1:-1])
255 fn = elems[-1]
256
257 return (fn, cls, mc)
258
259def realfn2virtual(realfn, cls, mc):
260 """
261 Convert a real filename + the associated subclass keyword to a virtual filename
262 """
263 if cls:
264 realfn = "virtual:" + cls + ":" + realfn
265 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -0400266 realfn = "mc:" + mc + ":" + realfn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600267 return realfn
268
269def variant2virtual(realfn, variant):
270 """
271 Convert a real filename + the associated subclass keyword to a virtual filename
272 """
273 if variant == "":
274 return realfn
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600275 if variant.startswith("mc:") and variant.count(':') >= 2:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600276 elems = variant.split(":")
277 if elems[2]:
Brad Bishop15ae2502019-06-18 21:44:24 -0400278 return "mc:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
279 return "mc:" + elems[1] + ":" + realfn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600280 return "virtual:" + variant + ":" + realfn
281
282def parse_recipe(bb_data, bbfile, appends, mc=''):
283 """
284 Parse a recipe
285 """
286
287 chdir_back = False
288
289 bb_data.setVar("__BBMULTICONFIG", mc)
290
291 # expand tmpdir to include this topdir
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500292 bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR') or "")
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600293 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
294 oldpath = os.path.abspath(os.getcwd())
295 bb.parse.cached_mtime_noerror(bbfile_loc)
296
297 # The ConfHandler first looks if there is a TOPDIR and if not
298 # then it would call getcwd().
299 # Previously, we chdir()ed to bbfile_loc, called the handler
300 # and finally chdir()ed back, a couple of thousand times. We now
301 # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
302 if not bb_data.getVar('TOPDIR', False):
303 chdir_back = True
304 bb_data.setVar('TOPDIR', bbfile_loc)
305 try:
306 if appends:
307 bb_data.setVar('__BBAPPEND', " ".join(appends))
308 bb_data = bb.parse.handle(bbfile, bb_data)
309 if chdir_back:
310 os.chdir(oldpath)
311 return bb_data
312 except:
313 if chdir_back:
314 os.chdir(oldpath)
315 raise
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500316
317
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600318
319class NoCache(object):
320
321 def __init__(self, databuilder):
322 self.databuilder = databuilder
323 self.data = databuilder.data
324
325 def loadDataFull(self, virtualfn, appends):
326 """
327 Return a complete set of data for fn.
328 To do this, we need to parse the file.
329 """
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600330 logger.debug("Parsing %s (full)" % virtualfn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600331 (fn, virtual, mc) = virtualfn2realfn(virtualfn)
332 bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
333 return bb_data[virtual]
334
Andrew Geissler5a43b432020-06-13 10:46:56 -0500335 def load_bbfile(self, bbfile, appends, virtonly = False, mc=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600336 """
337 Load and parse one .bb build file
338 Return the data and whether parsing resulted in the file being skipped
339 """
340
341 if virtonly:
342 (bbfile, virtual, mc) = virtualfn2realfn(bbfile)
343 bb_data = self.databuilder.mcdata[mc].createCopy()
344 bb_data.setVar("__ONLYFINALISE", virtual or "default")
345 datastores = parse_recipe(bb_data, bbfile, appends, mc)
346 return datastores
347
Andrew Geissler5a43b432020-06-13 10:46:56 -0500348 if mc is not None:
349 bb_data = self.databuilder.mcdata[mc].createCopy()
350 return parse_recipe(bb_data, bbfile, appends, mc)
351
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600352 bb_data = self.data.createCopy()
353 datastores = parse_recipe(bb_data, bbfile, appends)
354
355 for mc in self.databuilder.mcdata:
356 if not mc:
357 continue
358 bb_data = self.databuilder.mcdata[mc].createCopy()
359 newstores = parse_recipe(bb_data, bbfile, appends, mc)
360 for ns in newstores:
Brad Bishop15ae2502019-06-18 21:44:24 -0400361 datastores["mc:%s:%s" % (mc, ns)] = newstores[ns]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600362
363 return datastores
364
365class Cache(NoCache):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500366 """
367 BitBake Cache implementation
368 """
Andrew Geissler5a43b432020-06-13 10:46:56 -0500369 def __init__(self, databuilder, mc, data_hash, caches_array):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600370 super().__init__(databuilder)
371 data = databuilder.data
372
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500373 # Pass caches_array information into Cache Constructor
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500374 # It will be used later for deciding whether we
375 # need extra cache file dump/load support
Andrew Geissler5a43b432020-06-13 10:46:56 -0500376 self.mc = mc
377 self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500378 self.caches_array = caches_array
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500379 self.cachedir = data.getVar("CACHE")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500380 self.clean = set()
381 self.checked = set()
382 self.depends_cache = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500383 self.data_fn = None
384 self.cacheclean = True
385 self.data_hash = data_hash
Andrew Geissler82c905d2020-04-13 13:39:40 -0500386 self.filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500387
388 if self.cachedir in [None, '']:
389 self.has_cache = False
Andrew Geissler5a43b432020-06-13 10:46:56 -0500390 self.logger.info("Not using a cache. "
391 "Set CACHE = <directory> to enable.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500392 return
393
394 self.has_cache = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500395
Andrew Geissler5a43b432020-06-13 10:46:56 -0500396 def getCacheFile(self, cachefile):
397 return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash)
398
399 def prepare_cache(self, progress):
400 if not self.has_cache:
401 return 0
402
403 loaded = 0
404
405 self.cachefile = self.getCacheFile("bb_cache.dat")
406
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600407 self.logger.debug("Cache dir: %s", self.cachedir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500408 bb.utils.mkdirhier(self.cachedir)
409
410 cache_ok = True
411 if self.caches_array:
412 for cache_class in self.caches_array:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500413 cachefile = self.getCacheFile(cache_class.cachefile)
414 cache_exists = os.path.exists(cachefile)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600415 self.logger.debug2("Checking if %s exists: %r", cachefile, cache_exists)
Andrew Geissler5a43b432020-06-13 10:46:56 -0500416 cache_ok = cache_ok and cache_exists
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600417 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500418 if cache_ok:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500419 loaded = self.load_cachefile(progress)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500420 elif os.path.isfile(self.cachefile):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500421 self.logger.info("Out of date cache found, rebuilding...")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400422 else:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600423 self.logger.debug("Cache file %s not found, building..." % self.cachefile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500424
Brad Bishop96ff1982019-08-19 13:50:42 -0400425 # We don't use the symlink, its just for debugging convinience
Andrew Geissler5a43b432020-06-13 10:46:56 -0500426 if self.mc:
427 symlink = os.path.join(self.cachedir, "bb_cache.dat.%s" % self.mc)
428 else:
429 symlink = os.path.join(self.cachedir, "bb_cache.dat")
430
Brad Bishop96ff1982019-08-19 13:50:42 -0400431 if os.path.exists(symlink):
432 bb.utils.remove(symlink)
433 try:
434 os.symlink(os.path.basename(self.cachefile), symlink)
435 except OSError:
436 pass
437
Andrew Geissler5a43b432020-06-13 10:46:56 -0500438 return loaded
439
440 def cachesize(self):
441 if not self.has_cache:
442 return 0
443
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500444 cachesize = 0
Andrew Geissler5a43b432020-06-13 10:46:56 -0500445 for cache_class in self.caches_array:
446 cachefile = self.getCacheFile(cache_class.cachefile)
447 try:
448 with open(cachefile, "rb") as cachefile:
449 cachesize += os.fstat(cachefile.fileno()).st_size
450 except FileNotFoundError:
451 pass
452
453 return cachesize
454
455 def load_cachefile(self, progress):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500456 previous_progress = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500457
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500458 for cache_class in self.caches_array:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500459 cachefile = self.getCacheFile(cache_class.cachefile)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600460 self.logger.debug('Loading cache file: %s' % cachefile)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600461 with open(cachefile, "rb") as cachefile:
462 pickled = pickle.Unpickler(cachefile)
463 # Check cache version information
464 try:
465 cache_ver = pickled.load()
466 bitbake_ver = pickled.load()
467 except Exception:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500468 self.logger.info('Invalid cache, rebuilding...')
469 return 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500470
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600471 if cache_ver != __cache_version__:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500472 self.logger.info('Cache version mismatch, rebuilding...')
473 return 0
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600474 elif bitbake_ver != bb.__version__:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500475 self.logger.info('Bitbake version mismatch, rebuilding...')
476 return 0
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600477
478 # Load the rest of the cache file
479 current_progress = 0
480 while cachefile:
481 try:
482 key = pickled.load()
483 value = pickled.load()
484 except Exception:
485 break
486 if not isinstance(key, str):
487 bb.warn("%s from extras cache is not a string?" % key)
488 break
489 if not isinstance(value, RecipeInfoCommon):
490 bb.warn("%s from extras cache is not a RecipeInfoCommon class?" % value)
491 break
492
493 if key in self.depends_cache:
494 self.depends_cache[key].append(value)
495 else:
496 self.depends_cache[key] = [value]
497 # only fire events on even percentage boundaries
498 current_progress = cachefile.tell() + previous_progress
Andrew Geissler5a43b432020-06-13 10:46:56 -0500499 progress(cachefile.tell() + previous_progress)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600500
501 previous_progress += current_progress
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500502
Andrew Geissler5a43b432020-06-13 10:46:56 -0500503 return len(self.depends_cache)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500504
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600505 def parse(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500506 """Parse the specified filename, returning the recipe information"""
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600507 self.logger.debug("Parsing %s", filename)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500508 infos = []
Andrew Geissler5a43b432020-06-13 10:46:56 -0500509 datastores = self.load_bbfile(filename, appends, mc=self.mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500510 depends = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600511 variants = []
512 # Process the "real" fn last so we can store variants list
513 for variant, data in sorted(datastores.items(),
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500514 key=lambda i: i[0],
515 reverse=True):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600516 virtualfn = variant2virtual(filename, variant)
517 variants.append(variant)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500518 depends = depends + (data.getVar("__depends", False) or [])
519 if depends and not variant:
520 data.setVar("__depends", depends)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600521 if virtualfn == filename:
522 data.setVar("__VARIANTS", " ".join(variants))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500523 info_array = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600524 for cache_class in self.caches_array:
525 info = cache_class(filename, data)
526 info_array.append(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500527 infos.append((virtualfn, info_array))
528
529 return infos
530
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600531 def load(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500532 """Obtain the recipe information for the specified filename,
533 using cached values if available, otherwise parsing.
534
535 Note that if it does parse to obtain the info, it will not
536 automatically add the information to the cache or to your
537 CacheData. Use the add or add_info method to do so after
538 running this, or use loadData instead."""
539 cached = self.cacheValid(filename, appends)
540 if cached:
541 infos = []
542 # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
543 info_array = self.depends_cache[filename]
544 for variant in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600545 virtualfn = variant2virtual(filename, variant)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500546 infos.append((virtualfn, self.depends_cache[virtualfn]))
547 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500548 return self.parse(filename, appends, configdata, self.caches_array)
549
550 return cached, infos
551
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600552 def loadData(self, fn, appends, cacheData):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500553 """Load the recipe info for the specified filename,
554 parsing and adding to the cache if necessary, and adding
555 the recipe information to the supplied CacheData instance."""
556 skipped, virtuals = 0, 0
557
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600558 cached, infos = self.load(fn, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500559 for virtualfn, info_array in infos:
560 if info_array[0].skipped:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600561 self.logger.debug("Skipping %s: %s", virtualfn, info_array[0].skipreason)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500562 skipped += 1
563 else:
564 self.add_info(virtualfn, info_array, cacheData, not cached)
565 virtuals += 1
566
567 return cached, skipped, virtuals
568
569 def cacheValid(self, fn, appends):
570 """
571 Is the cache valid for fn?
572 Fast version, no timestamps checked.
573 """
574 if fn not in self.checked:
575 self.cacheValidUpdate(fn, appends)
576
577 # Is cache enabled?
578 if not self.has_cache:
579 return False
580 if fn in self.clean:
581 return True
582 return False
583
584 def cacheValidUpdate(self, fn, appends):
585 """
586 Is the cache valid for fn?
587 Make thorough (slower) checks including timestamps.
588 """
589 # Is cache enabled?
590 if not self.has_cache:
591 return False
592
593 self.checked.add(fn)
594
595 # File isn't in depends_cache
596 if not fn in self.depends_cache:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600597 self.logger.debug2("%s is not cached", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500598 return False
599
600 mtime = bb.parse.cached_mtime_noerror(fn)
601
602 # Check file still exists
603 if mtime == 0:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600604 self.logger.debug2("%s no longer exists", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500605 self.remove(fn)
606 return False
607
608 info_array = self.depends_cache[fn]
609 # Check the file's timestamp
610 if mtime != info_array[0].timestamp:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600611 self.logger.debug2("%s changed", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500612 self.remove(fn)
613 return False
614
615 # Check dependencies are still valid
616 depends = info_array[0].file_depends
617 if depends:
618 for f, old_mtime in depends:
619 fmtime = bb.parse.cached_mtime_noerror(f)
620 # Check if file still exists
621 if old_mtime != 0 and fmtime == 0:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600622 self.logger.debug2("%s's dependency %s was removed",
Andrew Geissler5a43b432020-06-13 10:46:56 -0500623 fn, f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500624 self.remove(fn)
625 return False
626
627 if (fmtime != old_mtime):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600628 self.logger.debug2("%s's dependency %s changed",
Andrew Geissler5a43b432020-06-13 10:46:56 -0500629 fn, f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500630 self.remove(fn)
631 return False
632
633 if hasattr(info_array[0], 'file_checksums'):
634 for _, fl in info_array[0].file_checksums.items():
Patrick Williamsd7e96312015-09-22 08:09:05 -0500635 fl = fl.strip()
Andrew Geissler82c905d2020-04-13 13:39:40 -0500636 if not fl:
637 continue
638 # Have to be careful about spaces and colons in filenames
639 flist = self.filelist_regex.split(fl)
640 for f in flist:
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500641 if not f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500642 continue
643 f, exist = f.split(":")
644 if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600645 self.logger.debug2("%s's file checksum list file %s changed",
Andrew Geissler5a43b432020-06-13 10:46:56 -0500646 fn, f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500647 self.remove(fn)
648 return False
649
Andrew Geissler5a43b432020-06-13 10:46:56 -0500650 if tuple(appends) != tuple(info_array[0].appends):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600651 self.logger.debug2("appends for %s changed", fn)
652 self.logger.debug2("%s to %s" % (str(appends), str(info_array[0].appends)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500653 self.remove(fn)
654 return False
655
656 invalid = False
657 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600658 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500659 self.clean.add(virtualfn)
660 if virtualfn not in self.depends_cache:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600661 self.logger.debug2("%s is not cached", virtualfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500662 invalid = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600663 elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600664 self.logger.debug2("Extra caches missing for %s?" % virtualfn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600665 invalid = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500666
667 # If any one of the variants is not present, mark as invalid for all
668 if invalid:
669 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600670 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500671 if virtualfn in self.clean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600672 self.logger.debug2("Removing %s from cache", virtualfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500673 self.clean.remove(virtualfn)
674 if fn in self.clean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600675 self.logger.debug2("Marking %s as not clean", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500676 self.clean.remove(fn)
677 return False
678
679 self.clean.add(fn)
680 return True
681
682 def remove(self, fn):
683 """
684 Remove a fn from the cache
685 Called from the parser in error cases
686 """
687 if fn in self.depends_cache:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600688 self.logger.debug("Removing %s from cache", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500689 del self.depends_cache[fn]
690 if fn in self.clean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600691 self.logger.debug("Marking %s as unclean", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500692 self.clean.remove(fn)
693
694 def sync(self):
695 """
696 Save the cache
697 Called from the parser when complete (or exiting)
698 """
699
700 if not self.has_cache:
701 return
702
703 if self.cacheclean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600704 self.logger.debug2("Cache is clean, not saving.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500705 return
706
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500707 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600708 cache_class_name = cache_class.__name__
Andrew Geissler5a43b432020-06-13 10:46:56 -0500709 cachefile = self.getCacheFile(cache_class.cachefile)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600710 self.logger.debug2("Writing %s", cachefile)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600711 with open(cachefile, "wb") as f:
712 p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
713 p.dump(__cache_version__)
714 p.dump(bb.__version__)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500715
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600716 for key, info_array in self.depends_cache.items():
717 for info in info_array:
718 if isinstance(info, RecipeInfoCommon) and info.__class__.__name__ == cache_class_name:
719 p.dump(key)
720 p.dump(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500721
722 del self.depends_cache
723
724 @staticmethod
725 def mtime(cachefile):
726 return bb.parse.cached_mtime_noerror(cachefile)
727
728 def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500729 if self.mc is not None:
730 (fn, cls, mc) = virtualfn2realfn(filename)
731 if mc:
Andrew Geisslerd25ed322020-06-27 00:28:28 -0500732 self.logger.error("Unexpected multiconfig %s", filename)
Andrew Geissler5a43b432020-06-13 10:46:56 -0500733 return
734
735 vfn = realfn2virtual(fn, cls, self.mc)
736 else:
737 vfn = filename
738
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500739 if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500740 cacheData.add_from_recipeinfo(vfn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500741
742 if watcher:
743 watcher(info_array[0].file_depends)
744
745 if not self.has_cache:
746 return
747
748 if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
749 if parsed:
750 self.cacheclean = False
751 self.depends_cache[filename] = info_array
752
753 def add(self, file_name, data, cacheData, parsed=None):
754 """
755 Save data we need into the cache
756 """
757
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600758 realfn = virtualfn2realfn(file_name)[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500759
760 info_array = []
761 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600762 info_array.append(cache_class(realfn, data))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500763 self.add_info(file_name, info_array, cacheData, parsed)
764
Andrew Geissler5a43b432020-06-13 10:46:56 -0500765class MulticonfigCache(Mapping):
766 def __init__(self, databuilder, data_hash, caches_array):
767 def progress(p):
768 nonlocal current_progress
769 nonlocal previous_progress
770 nonlocal previous_percent
771 nonlocal cachesize
772
773 current_progress = previous_progress + p
774
775 if current_progress > cachesize:
776 # we might have calculated incorrect total size because a file
777 # might've been written out just after we checked its size
778 cachesize = current_progress
779 current_percent = 100 * current_progress / cachesize
780 if current_percent > previous_percent:
781 previous_percent = current_percent
782 bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
783 databuilder.data)
784
785
786 cachesize = 0
787 current_progress = 0
788 previous_progress = 0
789 previous_percent = 0
790 self.__caches = {}
791
792 for mc, mcdata in databuilder.mcdata.items():
793 self.__caches[mc] = Cache(databuilder, mc, data_hash, caches_array)
794
795 cachesize += self.__caches[mc].cachesize()
796
797 bb.event.fire(bb.event.CacheLoadStarted(cachesize), databuilder.data)
798 loaded = 0
799
800 for c in self.__caches.values():
801 loaded += c.prepare_cache(progress)
802 previous_progress = current_progress
803
804 # Note: depends cache number is corresponding to the parsing file numbers.
805 # The same file has several caches, still regarded as one item in the cache
806 bb.event.fire(bb.event.CacheLoadCompleted(cachesize, loaded), databuilder.data)
807
808 def __len__(self):
809 return len(self.__caches)
810
811 def __getitem__(self, key):
812 return self.__caches[key]
813
814 def __contains__(self, key):
815 return key in self.__caches
816
817 def __iter__(self):
818 for k in self.__caches:
819 yield k
820
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500821def init(cooker):
822 """
823 The Objective: Cache the minimum amount of data possible yet get to the
824 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
825
826 To do this, we intercept getVar calls and only cache the variables we see
827 being accessed. We rely on the cache getVar calls being made for all
828 variables bitbake might need to use to reach this stage. For each cached
829 file we need to track:
830
831 * Its mtime
832 * The mtimes of all its dependencies
833 * Whether it caused a parse.SkipRecipe exception
834
835 Files causing parsing errors are evicted from the cache.
836
837 """
838 return Cache(cooker.configuration.data, cooker.configuration.data_hash)
839
840
841class CacheData(object):
842 """
843 The data structures we compile from the cached data
844 """
845
846 def __init__(self, caches_array):
847 self.caches_array = caches_array
848 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600849 if not issubclass(cache_class, RecipeInfoCommon):
850 bb.error("Extra cache data class %s should subclass RecipeInfoCommon class" % cache_class)
851 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500852
853 # Direct cache variables
854 self.task_queues = {}
855 self.preferred = {}
856 self.tasks = {}
857 # Indirect Cache variables (set elsewhere)
858 self.ignored_dependencies = []
859 self.world_target = set()
860 self.bbfile_priority = {}
861
862 def add_from_recipeinfo(self, fn, info_array):
863 for info in info_array:
864 info.add_cacheData(self, fn)
865
866class MultiProcessCache(object):
867 """
868 BitBake multi-process cache implementation
869
870 Used by the codeparser & file checksum caches
871 """
872
873 def __init__(self):
874 self.cachefile = None
875 self.cachedata = self.create_cachedata()
876 self.cachedata_extras = self.create_cachedata()
877
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500878 def init_cache(self, d, cache_file_name=None):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500879 cachedir = (d.getVar("PERSISTENT_DIR") or
880 d.getVar("CACHE"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500881 if cachedir in [None, '']:
882 return
883 bb.utils.mkdirhier(cachedir)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500884 self.cachefile = os.path.join(cachedir,
885 cache_file_name or self.__class__.cache_file_name)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600886 logger.debug("Using cache in '%s'", self.cachefile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500887
888 glf = bb.utils.lockfile(self.cachefile + ".lock")
889
890 try:
891 with open(self.cachefile, "rb") as f:
892 p = pickle.Unpickler(f)
893 data, version = p.load()
894 except:
895 bb.utils.unlockfile(glf)
896 return
897
898 bb.utils.unlockfile(glf)
899
900 if version != self.__class__.CACHE_VERSION:
901 return
902
903 self.cachedata = data
904
905 def create_cachedata(self):
906 data = [{}]
907 return data
908
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500909 def save_extras(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500910 if not self.cachefile:
911 return
912
913 glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
914
915 i = os.getpid()
916 lf = None
917 while not lf:
918 lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
919 if not lf or os.path.exists(self.cachefile + "-" + str(i)):
920 if lf:
921 bb.utils.unlockfile(lf)
922 lf = None
923 i = i + 1
924 continue
925
926 with open(self.cachefile + "-" + str(i), "wb") as f:
927 p = pickle.Pickler(f, -1)
928 p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
929
930 bb.utils.unlockfile(lf)
931 bb.utils.unlockfile(glf)
932
933 def merge_data(self, source, dest):
934 for j in range(0,len(dest)):
935 for h in source[j]:
936 if h not in dest[j]:
937 dest[j][h] = source[j][h]
938
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500939 def save_merge(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500940 if not self.cachefile:
941 return
942
943 glf = bb.utils.lockfile(self.cachefile + ".lock")
944
945 data = self.cachedata
946
947 for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
948 f = os.path.join(os.path.dirname(self.cachefile), f)
949 try:
950 with open(f, "rb") as fd:
951 p = pickle.Unpickler(fd)
952 extradata, version = p.load()
953 except (IOError, EOFError):
954 os.unlink(f)
955 continue
956
957 if version != self.__class__.CACHE_VERSION:
958 os.unlink(f)
959 continue
960
961 self.merge_data(extradata, data)
962 os.unlink(f)
963
964 with open(self.cachefile, "wb") as f:
965 p = pickle.Pickler(f, -1)
966 p.dump([data, self.__class__.CACHE_VERSION])
967
968 bb.utils.unlockfile(glf)
Brad Bishop08902b02019-08-20 09:16:51 -0400969
970
971class SimpleCache(object):
972 """
973 BitBake multi-process cache implementation
974
975 Used by the codeparser & file checksum caches
976 """
977
978 def __init__(self, version):
979 self.cachefile = None
980 self.cachedata = None
981 self.cacheversion = version
982
983 def init_cache(self, d, cache_file_name=None, defaultdata=None):
984 cachedir = (d.getVar("PERSISTENT_DIR") or
985 d.getVar("CACHE"))
986 if not cachedir:
987 return defaultdata
988
989 bb.utils.mkdirhier(cachedir)
990 self.cachefile = os.path.join(cachedir,
991 cache_file_name or self.__class__.cache_file_name)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600992 logger.debug("Using cache in '%s'", self.cachefile)
Brad Bishop08902b02019-08-20 09:16:51 -0400993
994 glf = bb.utils.lockfile(self.cachefile + ".lock")
995
996 try:
997 with open(self.cachefile, "rb") as f:
998 p = pickle.Unpickler(f)
999 data, version = p.load()
1000 except:
1001 bb.utils.unlockfile(glf)
1002 return defaultdata
1003
1004 bb.utils.unlockfile(glf)
1005
1006 if version != self.cacheversion:
1007 return defaultdata
1008
1009 return data
1010
1011 def save(self, data):
1012 if not self.cachefile:
1013 return
1014
1015 glf = bb.utils.lockfile(self.cachefile + ".lock")
1016
1017 with open(self.cachefile, "wb") as f:
1018 p = pickle.Pickler(f, -1)
1019 p.dump([data, self.cacheversion])
1020
1021 bb.utils.unlockfile(glf)