blob: 73bc6e96656b87e3286eb2115b3da233c2acf93f [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# BitBake Cache implementation
3#
4# Caching of bitbake variables before task execution
5
6# Copyright (C) 2006 Richard Purdie
7# Copyright (C) 2012 Intel Corporation
8
9# but small sections based on code from bin/bitbake:
10# Copyright (C) 2003, 2004 Chris Larson
11# Copyright (C) 2003, 2004 Phil Blundell
12# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
13# Copyright (C) 2005 Holger Hans Peter Freyther
14# Copyright (C) 2005 ROAD GmbH
15#
Brad Bishopc342db32019-05-15 21:57:59 -040016# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050017#
Patrick Williamsc124f4f2015-09-15 14:41:29 -050018
Patrick Williamsc124f4f2015-09-15 14:41:29 -050019import os
20import logging
Patrick Williamsc0f7c042017-02-23 20:41:17 -060021import pickle
Andrew Geissler5a43b432020-06-13 10:46:56 -050022from collections import defaultdict, Mapping
Patrick Williamsc124f4f2015-09-15 14:41:29 -050023import bb.utils
Andrew Geissler5a43b432020-06-13 10:46:56 -050024from bb import PrefixLoggerAdapter
Andrew Geissler82c905d2020-04-13 13:39:40 -050025import re
Patrick Williamsc124f4f2015-09-15 14:41:29 -050026
27logger = logging.getLogger("BitBake.Cache")
28
Andrew Geisslerd1e89492021-02-12 15:35:20 -060029__cache_version__ = "154"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050030
Andrew Geissler5a43b432020-06-13 10:46:56 -050031def getCacheFile(path, filename, mc, data_hash):
32 mcspec = ''
33 if mc:
34 mcspec = ".%s" % mc
35 return os.path.join(path, filename + mcspec + "." + data_hash)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050036
37# RecipeInfoCommon defines common data retrieving methods
38# from meta data for caches. CoreRecipeInfo as well as other
39# Extra RecipeInfo needs to inherit this class
40class RecipeInfoCommon(object):
41
42 @classmethod
43 def listvar(cls, var, metadata):
44 return cls.getvar(var, metadata).split()
45
46 @classmethod
47 def intvar(cls, var, metadata):
48 return int(cls.getvar(var, metadata) or 0)
49
50 @classmethod
51 def depvar(cls, var, metadata):
52 return bb.utils.explode_deps(cls.getvar(var, metadata))
53
54 @classmethod
55 def pkgvar(cls, var, packages, metadata):
Patrick Williams213cb262021-08-07 19:21:33 -050056 return dict((pkg, cls.depvar("%s:%s" % (var, pkg), metadata))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050057 for pkg in packages)
58
59 @classmethod
60 def taskvar(cls, var, tasks, metadata):
Patrick Williams213cb262021-08-07 19:21:33 -050061 return dict((task, cls.getvar("%s:task-%s" % (var, task), metadata))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050062 for task in tasks)
63
64 @classmethod
65 def flaglist(cls, flag, varlist, metadata, squash=False):
Brad Bishop6e60e8b2018-02-01 10:27:11 -050066 out_dict = dict((var, metadata.getVarFlag(var, flag))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050067 for var in varlist)
68 if squash:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060069 return dict((k,v) for (k,v) in out_dict.items() if v)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050070 else:
71 return out_dict
72
73 @classmethod
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050074 def getvar(cls, var, metadata, expand = True):
75 return metadata.getVar(var, expand) or ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -050076
77
78class CoreRecipeInfo(RecipeInfoCommon):
79 __slots__ = ()
80
Brad Bishopd7bf8c12018-02-25 22:55:05 -050081 cachefile = "bb_cache.dat"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050082
Brad Bishopd7bf8c12018-02-25 22:55:05 -050083 def __init__(self, filename, metadata):
Patrick Williamsc124f4f2015-09-15 14:41:29 -050084 self.file_depends = metadata.getVar('__depends', False)
85 self.timestamp = bb.parse.cached_mtime(filename)
86 self.variants = self.listvar('__VARIANTS', metadata) + ['']
87 self.appends = self.listvar('__BBAPPEND', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050088 self.nocache = self.getvar('BB_DONT_CACHE', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050089
Brad Bishop96ff1982019-08-19 13:50:42 -040090 self.provides = self.depvar('PROVIDES', metadata)
91 self.rprovides = self.depvar('RPROVIDES', metadata)
92 self.pn = self.getvar('PN', metadata) or bb.parse.vars_from_file(filename,metadata)[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -050093 self.packages = self.listvar('PACKAGES', metadata)
Brad Bishopd7bf8c12018-02-25 22:55:05 -050094 if not self.packages:
Patrick Williamsc124f4f2015-09-15 14:41:29 -050095 self.packages.append(self.pn)
Brad Bishop96ff1982019-08-19 13:50:42 -040096 self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
Andrew Geisslerd1e89492021-02-12 15:35:20 -060097 self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
Brad Bishop96ff1982019-08-19 13:50:42 -040098
99 self.skipreason = self.getvar('__SKIPPED', metadata)
100 if self.skipreason:
101 self.skipped = True
102 return
103
104 self.tasks = metadata.getVar('__BBTASKS', False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500105
106 self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
107 self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
108
109 self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
110
111 self.skipped = False
112 self.pe = self.getvar('PE', metadata)
113 self.pv = self.getvar('PV', metadata)
114 self.pr = self.getvar('PR', metadata)
115 self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
116 self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
117 self.stamp = self.getvar('STAMP', metadata)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500118 self.stampclean = self.getvar('STAMPCLEAN', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500119 self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
120 self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500121 self.depends = self.depvar('DEPENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500122 self.rdepends = self.depvar('RDEPENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500123 self.rrecommends = self.depvar('RRECOMMENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500124 self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
125 self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500126 self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500127 self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
128 self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500129 self.fakerootlogs = self.getvar('FAKEROOTLOGS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500130 self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500131 self.extradepsfunc = self.getvar('calculate_extra_depends', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500132
133 @classmethod
134 def init_cacheData(cls, cachedata):
135 # CacheData in Core RecipeInfo Class
136 cachedata.task_deps = {}
137 cachedata.pkg_fn = {}
138 cachedata.pkg_pn = defaultdict(list)
139 cachedata.pkg_pepvpr = {}
140 cachedata.pkg_dp = {}
141
142 cachedata.stamp = {}
143 cachedata.stampclean = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500144 cachedata.stamp_extrainfo = {}
145 cachedata.file_checksums = {}
146 cachedata.fn_provides = {}
147 cachedata.pn_provides = defaultdict(list)
148 cachedata.all_depends = []
149
150 cachedata.deps = defaultdict(list)
151 cachedata.packages = defaultdict(list)
152 cachedata.providers = defaultdict(list)
153 cachedata.rproviders = defaultdict(list)
154 cachedata.packages_dynamic = defaultdict(list)
155
156 cachedata.rundeps = defaultdict(lambda: defaultdict(list))
157 cachedata.runrecs = defaultdict(lambda: defaultdict(list))
158 cachedata.possible_world = []
159 cachedata.universe_target = []
160 cachedata.hashfn = {}
161
162 cachedata.basetaskhash = {}
163 cachedata.inherits = {}
164 cachedata.fakerootenv = {}
165 cachedata.fakerootnoenv = {}
166 cachedata.fakerootdirs = {}
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500167 cachedata.fakerootlogs = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500168 cachedata.extradepsfunc = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500169
170 def add_cacheData(self, cachedata, fn):
171 cachedata.task_deps[fn] = self.task_deps
172 cachedata.pkg_fn[fn] = self.pn
173 cachedata.pkg_pn[self.pn].append(fn)
174 cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
175 cachedata.pkg_dp[fn] = self.defaultpref
176 cachedata.stamp[fn] = self.stamp
177 cachedata.stampclean[fn] = self.stampclean
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500178 cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
179 cachedata.file_checksums[fn] = self.file_checksums
180
181 provides = [self.pn]
182 for provide in self.provides:
183 if provide not in provides:
184 provides.append(provide)
185 cachedata.fn_provides[fn] = provides
186
187 for provide in provides:
188 cachedata.providers[provide].append(fn)
189 if provide not in cachedata.pn_provides[self.pn]:
190 cachedata.pn_provides[self.pn].append(provide)
191
192 for dep in self.depends:
193 if dep not in cachedata.deps[fn]:
194 cachedata.deps[fn].append(dep)
195 if dep not in cachedata.all_depends:
196 cachedata.all_depends.append(dep)
197
198 rprovides = self.rprovides
199 for package in self.packages:
200 cachedata.packages[package].append(fn)
201 rprovides += self.rprovides_pkg[package]
202
203 for rprovide in rprovides:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500204 if fn not in cachedata.rproviders[rprovide]:
205 cachedata.rproviders[rprovide].append(fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500206
207 for package in self.packages_dynamic:
208 cachedata.packages_dynamic[package].append(fn)
209
210 # Build hash of runtime depends and recommends
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500211 for package in self.packages:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500212 cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
213 cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
214
215 # Collect files we may need for possible world-dep
216 # calculations
Andrew Geissler82c905d2020-04-13 13:39:40 -0500217 if not self.not_world:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500218 cachedata.possible_world.append(fn)
Andrew Geissler82c905d2020-04-13 13:39:40 -0500219 #else:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600220 # logger.debug2("EXCLUDE FROM WORLD: %s", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500221
222 # create a collection of all targets for sanity checking
223 # tasks, such as upstream versions, license, and tools for
224 # task and image creation.
225 cachedata.universe_target.append(self.pn)
226
227 cachedata.hashfn[fn] = self.hashfilename
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600228 for task, taskhash in self.basetaskhashes.items():
Brad Bishop08902b02019-08-20 09:16:51 -0400229 identifier = '%s:%s' % (fn, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500230 cachedata.basetaskhash[identifier] = taskhash
231
232 cachedata.inherits[fn] = self.inherits
233 cachedata.fakerootenv[fn] = self.fakerootenv
234 cachedata.fakerootnoenv[fn] = self.fakerootnoenv
235 cachedata.fakerootdirs[fn] = self.fakerootdirs
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500236 cachedata.fakerootlogs[fn] = self.fakerootlogs
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500237 cachedata.extradepsfunc[fn] = self.extradepsfunc
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500238
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600239def virtualfn2realfn(virtualfn):
240 """
241 Convert a virtual file name to a real one + the associated subclass keyword
242 """
243 mc = ""
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600244 if virtualfn.startswith('mc:') and virtualfn.count(':') >= 2:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600245 elems = virtualfn.split(':')
246 mc = elems[1]
247 virtualfn = ":".join(elems[2:])
248
249 fn = virtualfn
250 cls = ""
251 if virtualfn.startswith('virtual:'):
252 elems = virtualfn.split(':')
253 cls = ":".join(elems[1:-1])
254 fn = elems[-1]
255
256 return (fn, cls, mc)
257
258def realfn2virtual(realfn, cls, mc):
259 """
260 Convert a real filename + the associated subclass keyword to a virtual filename
261 """
262 if cls:
263 realfn = "virtual:" + cls + ":" + realfn
264 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -0400265 realfn = "mc:" + mc + ":" + realfn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600266 return realfn
267
268def variant2virtual(realfn, variant):
269 """
270 Convert a real filename + the associated subclass keyword to a virtual filename
271 """
272 if variant == "":
273 return realfn
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600274 if variant.startswith("mc:") and variant.count(':') >= 2:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600275 elems = variant.split(":")
276 if elems[2]:
Brad Bishop15ae2502019-06-18 21:44:24 -0400277 return "mc:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
278 return "mc:" + elems[1] + ":" + realfn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600279 return "virtual:" + variant + ":" + realfn
280
281def parse_recipe(bb_data, bbfile, appends, mc=''):
282 """
283 Parse a recipe
284 """
285
286 chdir_back = False
287
288 bb_data.setVar("__BBMULTICONFIG", mc)
289
290 # expand tmpdir to include this topdir
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500291 bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR') or "")
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600292 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
293 oldpath = os.path.abspath(os.getcwd())
294 bb.parse.cached_mtime_noerror(bbfile_loc)
295
296 # The ConfHandler first looks if there is a TOPDIR and if not
297 # then it would call getcwd().
298 # Previously, we chdir()ed to bbfile_loc, called the handler
299 # and finally chdir()ed back, a couple of thousand times. We now
300 # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
301 if not bb_data.getVar('TOPDIR', False):
302 chdir_back = True
303 bb_data.setVar('TOPDIR', bbfile_loc)
304 try:
305 if appends:
306 bb_data.setVar('__BBAPPEND', " ".join(appends))
307 bb_data = bb.parse.handle(bbfile, bb_data)
308 if chdir_back:
309 os.chdir(oldpath)
310 return bb_data
311 except:
312 if chdir_back:
313 os.chdir(oldpath)
314 raise
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500315
316
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600317
318class NoCache(object):
319
320 def __init__(self, databuilder):
321 self.databuilder = databuilder
322 self.data = databuilder.data
323
324 def loadDataFull(self, virtualfn, appends):
325 """
326 Return a complete set of data for fn.
327 To do this, we need to parse the file.
328 """
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600329 logger.debug("Parsing %s (full)" % virtualfn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600330 (fn, virtual, mc) = virtualfn2realfn(virtualfn)
331 bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
332 return bb_data[virtual]
333
Andrew Geissler5a43b432020-06-13 10:46:56 -0500334 def load_bbfile(self, bbfile, appends, virtonly = False, mc=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600335 """
336 Load and parse one .bb build file
337 Return the data and whether parsing resulted in the file being skipped
338 """
339
340 if virtonly:
341 (bbfile, virtual, mc) = virtualfn2realfn(bbfile)
342 bb_data = self.databuilder.mcdata[mc].createCopy()
343 bb_data.setVar("__ONLYFINALISE", virtual or "default")
344 datastores = parse_recipe(bb_data, bbfile, appends, mc)
345 return datastores
346
Andrew Geissler5a43b432020-06-13 10:46:56 -0500347 if mc is not None:
348 bb_data = self.databuilder.mcdata[mc].createCopy()
349 return parse_recipe(bb_data, bbfile, appends, mc)
350
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600351 bb_data = self.data.createCopy()
352 datastores = parse_recipe(bb_data, bbfile, appends)
353
354 for mc in self.databuilder.mcdata:
355 if not mc:
356 continue
357 bb_data = self.databuilder.mcdata[mc].createCopy()
358 newstores = parse_recipe(bb_data, bbfile, appends, mc)
359 for ns in newstores:
Brad Bishop15ae2502019-06-18 21:44:24 -0400360 datastores["mc:%s:%s" % (mc, ns)] = newstores[ns]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600361
362 return datastores
363
364class Cache(NoCache):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500365 """
366 BitBake Cache implementation
367 """
Andrew Geissler5a43b432020-06-13 10:46:56 -0500368 def __init__(self, databuilder, mc, data_hash, caches_array):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600369 super().__init__(databuilder)
370 data = databuilder.data
371
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500372 # Pass caches_array information into Cache Constructor
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500373 # It will be used later for deciding whether we
374 # need extra cache file dump/load support
Andrew Geissler5a43b432020-06-13 10:46:56 -0500375 self.mc = mc
376 self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500377 self.caches_array = caches_array
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500378 self.cachedir = data.getVar("CACHE")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500379 self.clean = set()
380 self.checked = set()
381 self.depends_cache = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500382 self.data_fn = None
383 self.cacheclean = True
384 self.data_hash = data_hash
Andrew Geissler82c905d2020-04-13 13:39:40 -0500385 self.filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500386
387 if self.cachedir in [None, '']:
388 self.has_cache = False
Andrew Geissler5a43b432020-06-13 10:46:56 -0500389 self.logger.info("Not using a cache. "
390 "Set CACHE = <directory> to enable.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500391 return
392
393 self.has_cache = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500394
Andrew Geissler5a43b432020-06-13 10:46:56 -0500395 def getCacheFile(self, cachefile):
396 return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash)
397
398 def prepare_cache(self, progress):
399 if not self.has_cache:
400 return 0
401
402 loaded = 0
403
404 self.cachefile = self.getCacheFile("bb_cache.dat")
405
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600406 self.logger.debug("Cache dir: %s", self.cachedir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500407 bb.utils.mkdirhier(self.cachedir)
408
409 cache_ok = True
410 if self.caches_array:
411 for cache_class in self.caches_array:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500412 cachefile = self.getCacheFile(cache_class.cachefile)
413 cache_exists = os.path.exists(cachefile)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600414 self.logger.debug2("Checking if %s exists: %r", cachefile, cache_exists)
Andrew Geissler5a43b432020-06-13 10:46:56 -0500415 cache_ok = cache_ok and cache_exists
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600416 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500417 if cache_ok:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500418 loaded = self.load_cachefile(progress)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500419 elif os.path.isfile(self.cachefile):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500420 self.logger.info("Out of date cache found, rebuilding...")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400421 else:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600422 self.logger.debug("Cache file %s not found, building..." % self.cachefile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500423
Brad Bishop96ff1982019-08-19 13:50:42 -0400424 # We don't use the symlink, its just for debugging convinience
Andrew Geissler5a43b432020-06-13 10:46:56 -0500425 if self.mc:
426 symlink = os.path.join(self.cachedir, "bb_cache.dat.%s" % self.mc)
427 else:
428 symlink = os.path.join(self.cachedir, "bb_cache.dat")
429
Brad Bishop96ff1982019-08-19 13:50:42 -0400430 if os.path.exists(symlink):
431 bb.utils.remove(symlink)
432 try:
433 os.symlink(os.path.basename(self.cachefile), symlink)
434 except OSError:
435 pass
436
Andrew Geissler5a43b432020-06-13 10:46:56 -0500437 return loaded
438
439 def cachesize(self):
440 if not self.has_cache:
441 return 0
442
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500443 cachesize = 0
Andrew Geissler5a43b432020-06-13 10:46:56 -0500444 for cache_class in self.caches_array:
445 cachefile = self.getCacheFile(cache_class.cachefile)
446 try:
447 with open(cachefile, "rb") as cachefile:
448 cachesize += os.fstat(cachefile.fileno()).st_size
449 except FileNotFoundError:
450 pass
451
452 return cachesize
453
454 def load_cachefile(self, progress):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500455 previous_progress = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500456
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500457 for cache_class in self.caches_array:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500458 cachefile = self.getCacheFile(cache_class.cachefile)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600459 self.logger.debug('Loading cache file: %s' % cachefile)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600460 with open(cachefile, "rb") as cachefile:
461 pickled = pickle.Unpickler(cachefile)
462 # Check cache version information
463 try:
464 cache_ver = pickled.load()
465 bitbake_ver = pickled.load()
466 except Exception:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500467 self.logger.info('Invalid cache, rebuilding...')
468 return 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500469
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600470 if cache_ver != __cache_version__:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500471 self.logger.info('Cache version mismatch, rebuilding...')
472 return 0
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600473 elif bitbake_ver != bb.__version__:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500474 self.logger.info('Bitbake version mismatch, rebuilding...')
475 return 0
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600476
477 # Load the rest of the cache file
478 current_progress = 0
479 while cachefile:
480 try:
481 key = pickled.load()
482 value = pickled.load()
483 except Exception:
484 break
485 if not isinstance(key, str):
486 bb.warn("%s from extras cache is not a string?" % key)
487 break
488 if not isinstance(value, RecipeInfoCommon):
489 bb.warn("%s from extras cache is not a RecipeInfoCommon class?" % value)
490 break
491
492 if key in self.depends_cache:
493 self.depends_cache[key].append(value)
494 else:
495 self.depends_cache[key] = [value]
496 # only fire events on even percentage boundaries
497 current_progress = cachefile.tell() + previous_progress
Andrew Geissler5a43b432020-06-13 10:46:56 -0500498 progress(cachefile.tell() + previous_progress)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600499
500 previous_progress += current_progress
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500501
Andrew Geissler5a43b432020-06-13 10:46:56 -0500502 return len(self.depends_cache)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500503
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600504 def parse(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500505 """Parse the specified filename, returning the recipe information"""
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600506 self.logger.debug("Parsing %s", filename)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500507 infos = []
Andrew Geissler5a43b432020-06-13 10:46:56 -0500508 datastores = self.load_bbfile(filename, appends, mc=self.mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500509 depends = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600510 variants = []
511 # Process the "real" fn last so we can store variants list
512 for variant, data in sorted(datastores.items(),
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500513 key=lambda i: i[0],
514 reverse=True):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600515 virtualfn = variant2virtual(filename, variant)
516 variants.append(variant)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500517 depends = depends + (data.getVar("__depends", False) or [])
518 if depends and not variant:
519 data.setVar("__depends", depends)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600520 if virtualfn == filename:
521 data.setVar("__VARIANTS", " ".join(variants))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500522 info_array = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600523 for cache_class in self.caches_array:
524 info = cache_class(filename, data)
525 info_array.append(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500526 infos.append((virtualfn, info_array))
527
528 return infos
529
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600530 def load(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500531 """Obtain the recipe information for the specified filename,
532 using cached values if available, otherwise parsing.
533
534 Note that if it does parse to obtain the info, it will not
535 automatically add the information to the cache or to your
536 CacheData. Use the add or add_info method to do so after
537 running this, or use loadData instead."""
538 cached = self.cacheValid(filename, appends)
539 if cached:
540 infos = []
541 # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
542 info_array = self.depends_cache[filename]
543 for variant in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600544 virtualfn = variant2virtual(filename, variant)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500545 infos.append((virtualfn, self.depends_cache[virtualfn]))
546 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500547 return self.parse(filename, appends, configdata, self.caches_array)
548
549 return cached, infos
550
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600551 def loadData(self, fn, appends, cacheData):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500552 """Load the recipe info for the specified filename,
553 parsing and adding to the cache if necessary, and adding
554 the recipe information to the supplied CacheData instance."""
555 skipped, virtuals = 0, 0
556
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600557 cached, infos = self.load(fn, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500558 for virtualfn, info_array in infos:
559 if info_array[0].skipped:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600560 self.logger.debug("Skipping %s: %s", virtualfn, info_array[0].skipreason)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500561 skipped += 1
562 else:
563 self.add_info(virtualfn, info_array, cacheData, not cached)
564 virtuals += 1
565
566 return cached, skipped, virtuals
567
568 def cacheValid(self, fn, appends):
569 """
570 Is the cache valid for fn?
571 Fast version, no timestamps checked.
572 """
573 if fn not in self.checked:
574 self.cacheValidUpdate(fn, appends)
575
576 # Is cache enabled?
577 if not self.has_cache:
578 return False
579 if fn in self.clean:
580 return True
581 return False
582
583 def cacheValidUpdate(self, fn, appends):
584 """
585 Is the cache valid for fn?
586 Make thorough (slower) checks including timestamps.
587 """
588 # Is cache enabled?
589 if not self.has_cache:
590 return False
591
592 self.checked.add(fn)
593
594 # File isn't in depends_cache
595 if not fn in self.depends_cache:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600596 self.logger.debug2("%s is not cached", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500597 return False
598
599 mtime = bb.parse.cached_mtime_noerror(fn)
600
601 # Check file still exists
602 if mtime == 0:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600603 self.logger.debug2("%s no longer exists", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500604 self.remove(fn)
605 return False
606
607 info_array = self.depends_cache[fn]
608 # Check the file's timestamp
609 if mtime != info_array[0].timestamp:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600610 self.logger.debug2("%s changed", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500611 self.remove(fn)
612 return False
613
614 # Check dependencies are still valid
615 depends = info_array[0].file_depends
616 if depends:
617 for f, old_mtime in depends:
618 fmtime = bb.parse.cached_mtime_noerror(f)
619 # Check if file still exists
620 if old_mtime != 0 and fmtime == 0:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600621 self.logger.debug2("%s's dependency %s was removed",
Andrew Geissler5a43b432020-06-13 10:46:56 -0500622 fn, f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500623 self.remove(fn)
624 return False
625
626 if (fmtime != old_mtime):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600627 self.logger.debug2("%s's dependency %s changed",
Andrew Geissler5a43b432020-06-13 10:46:56 -0500628 fn, f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500629 self.remove(fn)
630 return False
631
632 if hasattr(info_array[0], 'file_checksums'):
633 for _, fl in info_array[0].file_checksums.items():
Patrick Williamsd7e96312015-09-22 08:09:05 -0500634 fl = fl.strip()
Andrew Geissler82c905d2020-04-13 13:39:40 -0500635 if not fl:
636 continue
637 # Have to be careful about spaces and colons in filenames
638 flist = self.filelist_regex.split(fl)
639 for f in flist:
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500640 if not f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500641 continue
642 f, exist = f.split(":")
643 if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600644 self.logger.debug2("%s's file checksum list file %s changed",
Andrew Geissler5a43b432020-06-13 10:46:56 -0500645 fn, f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500646 self.remove(fn)
647 return False
648
Andrew Geissler5a43b432020-06-13 10:46:56 -0500649 if tuple(appends) != tuple(info_array[0].appends):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600650 self.logger.debug2("appends for %s changed", fn)
651 self.logger.debug2("%s to %s" % (str(appends), str(info_array[0].appends)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500652 self.remove(fn)
653 return False
654
655 invalid = False
656 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600657 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500658 self.clean.add(virtualfn)
659 if virtualfn not in self.depends_cache:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600660 self.logger.debug2("%s is not cached", virtualfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500661 invalid = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600662 elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600663 self.logger.debug2("Extra caches missing for %s?" % virtualfn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600664 invalid = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500665
666 # If any one of the variants is not present, mark as invalid for all
667 if invalid:
668 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600669 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500670 if virtualfn in self.clean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600671 self.logger.debug2("Removing %s from cache", virtualfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500672 self.clean.remove(virtualfn)
673 if fn in self.clean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600674 self.logger.debug2("Marking %s as not clean", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500675 self.clean.remove(fn)
676 return False
677
678 self.clean.add(fn)
679 return True
680
681 def remove(self, fn):
682 """
683 Remove a fn from the cache
684 Called from the parser in error cases
685 """
686 if fn in self.depends_cache:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600687 self.logger.debug("Removing %s from cache", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500688 del self.depends_cache[fn]
689 if fn in self.clean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600690 self.logger.debug("Marking %s as unclean", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500691 self.clean.remove(fn)
692
693 def sync(self):
694 """
695 Save the cache
696 Called from the parser when complete (or exiting)
697 """
698
699 if not self.has_cache:
700 return
701
702 if self.cacheclean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600703 self.logger.debug2("Cache is clean, not saving.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500704 return
705
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500706 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600707 cache_class_name = cache_class.__name__
Andrew Geissler5a43b432020-06-13 10:46:56 -0500708 cachefile = self.getCacheFile(cache_class.cachefile)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600709 self.logger.debug2("Writing %s", cachefile)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600710 with open(cachefile, "wb") as f:
711 p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
712 p.dump(__cache_version__)
713 p.dump(bb.__version__)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500714
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600715 for key, info_array in self.depends_cache.items():
716 for info in info_array:
717 if isinstance(info, RecipeInfoCommon) and info.__class__.__name__ == cache_class_name:
718 p.dump(key)
719 p.dump(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500720
721 del self.depends_cache
722
723 @staticmethod
724 def mtime(cachefile):
725 return bb.parse.cached_mtime_noerror(cachefile)
726
727 def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500728 if self.mc is not None:
729 (fn, cls, mc) = virtualfn2realfn(filename)
730 if mc:
Andrew Geisslerd25ed322020-06-27 00:28:28 -0500731 self.logger.error("Unexpected multiconfig %s", filename)
Andrew Geissler5a43b432020-06-13 10:46:56 -0500732 return
733
734 vfn = realfn2virtual(fn, cls, self.mc)
735 else:
736 vfn = filename
737
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500738 if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500739 cacheData.add_from_recipeinfo(vfn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500740
741 if watcher:
742 watcher(info_array[0].file_depends)
743
744 if not self.has_cache:
745 return
746
747 if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
748 if parsed:
749 self.cacheclean = False
750 self.depends_cache[filename] = info_array
751
752 def add(self, file_name, data, cacheData, parsed=None):
753 """
754 Save data we need into the cache
755 """
756
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600757 realfn = virtualfn2realfn(file_name)[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500758
759 info_array = []
760 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600761 info_array.append(cache_class(realfn, data))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500762 self.add_info(file_name, info_array, cacheData, parsed)
763
Andrew Geissler5a43b432020-06-13 10:46:56 -0500764class MulticonfigCache(Mapping):
765 def __init__(self, databuilder, data_hash, caches_array):
766 def progress(p):
767 nonlocal current_progress
768 nonlocal previous_progress
769 nonlocal previous_percent
770 nonlocal cachesize
771
772 current_progress = previous_progress + p
773
774 if current_progress > cachesize:
775 # we might have calculated incorrect total size because a file
776 # might've been written out just after we checked its size
777 cachesize = current_progress
778 current_percent = 100 * current_progress / cachesize
779 if current_percent > previous_percent:
780 previous_percent = current_percent
781 bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
782 databuilder.data)
783
784
785 cachesize = 0
786 current_progress = 0
787 previous_progress = 0
788 previous_percent = 0
789 self.__caches = {}
790
791 for mc, mcdata in databuilder.mcdata.items():
792 self.__caches[mc] = Cache(databuilder, mc, data_hash, caches_array)
793
794 cachesize += self.__caches[mc].cachesize()
795
796 bb.event.fire(bb.event.CacheLoadStarted(cachesize), databuilder.data)
797 loaded = 0
798
799 for c in self.__caches.values():
800 loaded += c.prepare_cache(progress)
801 previous_progress = current_progress
802
803 # Note: depends cache number is corresponding to the parsing file numbers.
804 # The same file has several caches, still regarded as one item in the cache
805 bb.event.fire(bb.event.CacheLoadCompleted(cachesize, loaded), databuilder.data)
806
807 def __len__(self):
808 return len(self.__caches)
809
810 def __getitem__(self, key):
811 return self.__caches[key]
812
813 def __contains__(self, key):
814 return key in self.__caches
815
816 def __iter__(self):
817 for k in self.__caches:
818 yield k
819
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500820def init(cooker):
821 """
822 The Objective: Cache the minimum amount of data possible yet get to the
823 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
824
825 To do this, we intercept getVar calls and only cache the variables we see
826 being accessed. We rely on the cache getVar calls being made for all
827 variables bitbake might need to use to reach this stage. For each cached
828 file we need to track:
829
830 * Its mtime
831 * The mtimes of all its dependencies
832 * Whether it caused a parse.SkipRecipe exception
833
834 Files causing parsing errors are evicted from the cache.
835
836 """
837 return Cache(cooker.configuration.data, cooker.configuration.data_hash)
838
839
840class CacheData(object):
841 """
842 The data structures we compile from the cached data
843 """
844
845 def __init__(self, caches_array):
846 self.caches_array = caches_array
847 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600848 if not issubclass(cache_class, RecipeInfoCommon):
849 bb.error("Extra cache data class %s should subclass RecipeInfoCommon class" % cache_class)
850 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500851
852 # Direct cache variables
853 self.task_queues = {}
854 self.preferred = {}
855 self.tasks = {}
856 # Indirect Cache variables (set elsewhere)
857 self.ignored_dependencies = []
858 self.world_target = set()
859 self.bbfile_priority = {}
860
861 def add_from_recipeinfo(self, fn, info_array):
862 for info in info_array:
863 info.add_cacheData(self, fn)
864
865class MultiProcessCache(object):
866 """
867 BitBake multi-process cache implementation
868
869 Used by the codeparser & file checksum caches
870 """
871
872 def __init__(self):
873 self.cachefile = None
874 self.cachedata = self.create_cachedata()
875 self.cachedata_extras = self.create_cachedata()
876
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500877 def init_cache(self, d, cache_file_name=None):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500878 cachedir = (d.getVar("PERSISTENT_DIR") or
879 d.getVar("CACHE"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500880 if cachedir in [None, '']:
881 return
882 bb.utils.mkdirhier(cachedir)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500883 self.cachefile = os.path.join(cachedir,
884 cache_file_name or self.__class__.cache_file_name)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600885 logger.debug("Using cache in '%s'", self.cachefile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500886
887 glf = bb.utils.lockfile(self.cachefile + ".lock")
888
889 try:
890 with open(self.cachefile, "rb") as f:
891 p = pickle.Unpickler(f)
892 data, version = p.load()
893 except:
894 bb.utils.unlockfile(glf)
895 return
896
897 bb.utils.unlockfile(glf)
898
899 if version != self.__class__.CACHE_VERSION:
900 return
901
902 self.cachedata = data
903
904 def create_cachedata(self):
905 data = [{}]
906 return data
907
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500908 def save_extras(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500909 if not self.cachefile:
910 return
911
912 glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
913
914 i = os.getpid()
915 lf = None
916 while not lf:
917 lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
918 if not lf or os.path.exists(self.cachefile + "-" + str(i)):
919 if lf:
920 bb.utils.unlockfile(lf)
921 lf = None
922 i = i + 1
923 continue
924
925 with open(self.cachefile + "-" + str(i), "wb") as f:
926 p = pickle.Pickler(f, -1)
927 p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
928
929 bb.utils.unlockfile(lf)
930 bb.utils.unlockfile(glf)
931
932 def merge_data(self, source, dest):
933 for j in range(0,len(dest)):
934 for h in source[j]:
935 if h not in dest[j]:
936 dest[j][h] = source[j][h]
937
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500938 def save_merge(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500939 if not self.cachefile:
940 return
941
942 glf = bb.utils.lockfile(self.cachefile + ".lock")
943
944 data = self.cachedata
945
946 for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
947 f = os.path.join(os.path.dirname(self.cachefile), f)
948 try:
949 with open(f, "rb") as fd:
950 p = pickle.Unpickler(fd)
951 extradata, version = p.load()
952 except (IOError, EOFError):
953 os.unlink(f)
954 continue
955
956 if version != self.__class__.CACHE_VERSION:
957 os.unlink(f)
958 continue
959
960 self.merge_data(extradata, data)
961 os.unlink(f)
962
963 with open(self.cachefile, "wb") as f:
964 p = pickle.Pickler(f, -1)
965 p.dump([data, self.__class__.CACHE_VERSION])
966
967 bb.utils.unlockfile(glf)
Brad Bishop08902b02019-08-20 09:16:51 -0400968
969
970class SimpleCache(object):
971 """
972 BitBake multi-process cache implementation
973
974 Used by the codeparser & file checksum caches
975 """
976
977 def __init__(self, version):
978 self.cachefile = None
979 self.cachedata = None
980 self.cacheversion = version
981
982 def init_cache(self, d, cache_file_name=None, defaultdata=None):
983 cachedir = (d.getVar("PERSISTENT_DIR") or
984 d.getVar("CACHE"))
985 if not cachedir:
986 return defaultdata
987
988 bb.utils.mkdirhier(cachedir)
989 self.cachefile = os.path.join(cachedir,
990 cache_file_name or self.__class__.cache_file_name)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600991 logger.debug("Using cache in '%s'", self.cachefile)
Brad Bishop08902b02019-08-20 09:16:51 -0400992
993 glf = bb.utils.lockfile(self.cachefile + ".lock")
994
995 try:
996 with open(self.cachefile, "rb") as f:
997 p = pickle.Unpickler(f)
998 data, version = p.load()
999 except:
1000 bb.utils.unlockfile(glf)
1001 return defaultdata
1002
1003 bb.utils.unlockfile(glf)
1004
1005 if version != self.cacheversion:
1006 return defaultdata
1007
1008 return data
1009
1010 def save(self, data):
1011 if not self.cachefile:
1012 return
1013
1014 glf = bb.utils.lockfile(self.cachefile + ".lock")
1015
1016 with open(self.cachefile, "wb") as f:
1017 p = pickle.Pickler(f, -1)
1018 p.dump([data, self.cacheversion])
1019
1020 bb.utils.unlockfile(glf)