blob: d1be83617b20947a28f4412c6c22c57b88b9e921 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# BitBake Cache implementation
3#
4# Caching of bitbake variables before task execution
5
6# Copyright (C) 2006 Richard Purdie
7# Copyright (C) 2012 Intel Corporation
8
9# but small sections based on code from bin/bitbake:
10# Copyright (C) 2003, 2004 Chris Larson
11# Copyright (C) 2003, 2004 Phil Blundell
12# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
13# Copyright (C) 2005 Holger Hans Peter Freyther
14# Copyright (C) 2005 ROAD GmbH
15#
Brad Bishopc342db32019-05-15 21:57:59 -040016# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050017#
Patrick Williamsc124f4f2015-09-15 14:41:29 -050018
Patrick Williamsc124f4f2015-09-15 14:41:29 -050019import os
20import logging
Patrick Williamsc0f7c042017-02-23 20:41:17 -060021import pickle
Patrick Williamsc124f4f2015-09-15 14:41:29 -050022from collections import defaultdict
23import bb.utils
Andrew Geissler82c905d2020-04-13 13:39:40 -050024import re
Patrick Williamsc124f4f2015-09-15 14:41:29 -050025
26logger = logging.getLogger("BitBake.Cache")
27
Brad Bishop1a4b7ee2018-12-16 17:11:34 -080028__cache_version__ = "152"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050029
30def getCacheFile(path, filename, data_hash):
31 return os.path.join(path, filename + "." + data_hash)
32
33# RecipeInfoCommon defines common data retrieving methods
34# from meta data for caches. CoreRecipeInfo as well as other
35# Extra RecipeInfo needs to inherit this class
36class RecipeInfoCommon(object):
37
38 @classmethod
39 def listvar(cls, var, metadata):
40 return cls.getvar(var, metadata).split()
41
42 @classmethod
43 def intvar(cls, var, metadata):
44 return int(cls.getvar(var, metadata) or 0)
45
46 @classmethod
47 def depvar(cls, var, metadata):
48 return bb.utils.explode_deps(cls.getvar(var, metadata))
49
50 @classmethod
51 def pkgvar(cls, var, packages, metadata):
52 return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata))
53 for pkg in packages)
54
55 @classmethod
56 def taskvar(cls, var, tasks, metadata):
57 return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata))
58 for task in tasks)
59
60 @classmethod
61 def flaglist(cls, flag, varlist, metadata, squash=False):
Brad Bishop6e60e8b2018-02-01 10:27:11 -050062 out_dict = dict((var, metadata.getVarFlag(var, flag))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050063 for var in varlist)
64 if squash:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060065 return dict((k,v) for (k,v) in out_dict.items() if v)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050066 else:
67 return out_dict
68
69 @classmethod
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050070 def getvar(cls, var, metadata, expand = True):
71 return metadata.getVar(var, expand) or ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -050072
73
74class CoreRecipeInfo(RecipeInfoCommon):
75 __slots__ = ()
76
Brad Bishopd7bf8c12018-02-25 22:55:05 -050077 cachefile = "bb_cache.dat"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050078
Brad Bishopd7bf8c12018-02-25 22:55:05 -050079 def __init__(self, filename, metadata):
Patrick Williamsc124f4f2015-09-15 14:41:29 -050080 self.file_depends = metadata.getVar('__depends', False)
81 self.timestamp = bb.parse.cached_mtime(filename)
82 self.variants = self.listvar('__VARIANTS', metadata) + ['']
83 self.appends = self.listvar('__BBAPPEND', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050084 self.nocache = self.getvar('BB_DONT_CACHE', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050085
Brad Bishop96ff1982019-08-19 13:50:42 -040086 self.provides = self.depvar('PROVIDES', metadata)
87 self.rprovides = self.depvar('RPROVIDES', metadata)
88 self.pn = self.getvar('PN', metadata) or bb.parse.vars_from_file(filename,metadata)[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -050089 self.packages = self.listvar('PACKAGES', metadata)
Brad Bishopd7bf8c12018-02-25 22:55:05 -050090 if not self.packages:
Patrick Williamsc124f4f2015-09-15 14:41:29 -050091 self.packages.append(self.pn)
Brad Bishop96ff1982019-08-19 13:50:42 -040092 self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
93
94 self.skipreason = self.getvar('__SKIPPED', metadata)
95 if self.skipreason:
96 self.skipped = True
97 return
98
99 self.tasks = metadata.getVar('__BBTASKS', False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500100
101 self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
102 self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
103
104 self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
105
106 self.skipped = False
107 self.pe = self.getvar('PE', metadata)
108 self.pv = self.getvar('PV', metadata)
109 self.pr = self.getvar('PR', metadata)
110 self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
111 self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
112 self.stamp = self.getvar('STAMP', metadata)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500113 self.stampclean = self.getvar('STAMPCLEAN', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500114 self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
115 self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500116 self.depends = self.depvar('DEPENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500117 self.rdepends = self.depvar('RDEPENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500118 self.rrecommends = self.depvar('RRECOMMENDS', metadata)
119 self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
120 self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
121 self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500122 self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500123 self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
124 self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
125 self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500126 self.extradepsfunc = self.getvar('calculate_extra_depends', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500127
128 @classmethod
129 def init_cacheData(cls, cachedata):
130 # CacheData in Core RecipeInfo Class
131 cachedata.task_deps = {}
132 cachedata.pkg_fn = {}
133 cachedata.pkg_pn = defaultdict(list)
134 cachedata.pkg_pepvpr = {}
135 cachedata.pkg_dp = {}
136
137 cachedata.stamp = {}
138 cachedata.stampclean = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500139 cachedata.stamp_extrainfo = {}
140 cachedata.file_checksums = {}
141 cachedata.fn_provides = {}
142 cachedata.pn_provides = defaultdict(list)
143 cachedata.all_depends = []
144
145 cachedata.deps = defaultdict(list)
146 cachedata.packages = defaultdict(list)
147 cachedata.providers = defaultdict(list)
148 cachedata.rproviders = defaultdict(list)
149 cachedata.packages_dynamic = defaultdict(list)
150
151 cachedata.rundeps = defaultdict(lambda: defaultdict(list))
152 cachedata.runrecs = defaultdict(lambda: defaultdict(list))
153 cachedata.possible_world = []
154 cachedata.universe_target = []
155 cachedata.hashfn = {}
156
157 cachedata.basetaskhash = {}
158 cachedata.inherits = {}
159 cachedata.fakerootenv = {}
160 cachedata.fakerootnoenv = {}
161 cachedata.fakerootdirs = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500162 cachedata.extradepsfunc = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500163
164 def add_cacheData(self, cachedata, fn):
165 cachedata.task_deps[fn] = self.task_deps
166 cachedata.pkg_fn[fn] = self.pn
167 cachedata.pkg_pn[self.pn].append(fn)
168 cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
169 cachedata.pkg_dp[fn] = self.defaultpref
170 cachedata.stamp[fn] = self.stamp
171 cachedata.stampclean[fn] = self.stampclean
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500172 cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
173 cachedata.file_checksums[fn] = self.file_checksums
174
175 provides = [self.pn]
176 for provide in self.provides:
177 if provide not in provides:
178 provides.append(provide)
179 cachedata.fn_provides[fn] = provides
180
181 for provide in provides:
182 cachedata.providers[provide].append(fn)
183 if provide not in cachedata.pn_provides[self.pn]:
184 cachedata.pn_provides[self.pn].append(provide)
185
186 for dep in self.depends:
187 if dep not in cachedata.deps[fn]:
188 cachedata.deps[fn].append(dep)
189 if dep not in cachedata.all_depends:
190 cachedata.all_depends.append(dep)
191
192 rprovides = self.rprovides
193 for package in self.packages:
194 cachedata.packages[package].append(fn)
195 rprovides += self.rprovides_pkg[package]
196
197 for rprovide in rprovides:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500198 if fn not in cachedata.rproviders[rprovide]:
199 cachedata.rproviders[rprovide].append(fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500200
201 for package in self.packages_dynamic:
202 cachedata.packages_dynamic[package].append(fn)
203
204 # Build hash of runtime depends and recommends
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500205 for package in self.packages:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500206 cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
207 cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
208
209 # Collect files we may need for possible world-dep
210 # calculations
Andrew Geissler82c905d2020-04-13 13:39:40 -0500211 if not self.not_world:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500212 cachedata.possible_world.append(fn)
Andrew Geissler82c905d2020-04-13 13:39:40 -0500213 #else:
214 # logger.debug(2, "EXCLUDE FROM WORLD: %s", fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500215
216 # create a collection of all targets for sanity checking
217 # tasks, such as upstream versions, license, and tools for
218 # task and image creation.
219 cachedata.universe_target.append(self.pn)
220
221 cachedata.hashfn[fn] = self.hashfilename
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600222 for task, taskhash in self.basetaskhashes.items():
Brad Bishop08902b02019-08-20 09:16:51 -0400223 identifier = '%s:%s' % (fn, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500224 cachedata.basetaskhash[identifier] = taskhash
225
226 cachedata.inherits[fn] = self.inherits
227 cachedata.fakerootenv[fn] = self.fakerootenv
228 cachedata.fakerootnoenv[fn] = self.fakerootnoenv
229 cachedata.fakerootdirs[fn] = self.fakerootdirs
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500230 cachedata.extradepsfunc[fn] = self.extradepsfunc
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500231
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600232def virtualfn2realfn(virtualfn):
233 """
234 Convert a virtual file name to a real one + the associated subclass keyword
235 """
236 mc = ""
Brad Bishop15ae2502019-06-18 21:44:24 -0400237 if virtualfn.startswith('mc:'):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600238 elems = virtualfn.split(':')
239 mc = elems[1]
240 virtualfn = ":".join(elems[2:])
241
242 fn = virtualfn
243 cls = ""
244 if virtualfn.startswith('virtual:'):
245 elems = virtualfn.split(':')
246 cls = ":".join(elems[1:-1])
247 fn = elems[-1]
248
249 return (fn, cls, mc)
250
251def realfn2virtual(realfn, cls, mc):
252 """
253 Convert a real filename + the associated subclass keyword to a virtual filename
254 """
255 if cls:
256 realfn = "virtual:" + cls + ":" + realfn
257 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -0400258 realfn = "mc:" + mc + ":" + realfn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600259 return realfn
260
261def variant2virtual(realfn, variant):
262 """
263 Convert a real filename + the associated subclass keyword to a virtual filename
264 """
265 if variant == "":
266 return realfn
Brad Bishop15ae2502019-06-18 21:44:24 -0400267 if variant.startswith("mc:"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600268 elems = variant.split(":")
269 if elems[2]:
Brad Bishop15ae2502019-06-18 21:44:24 -0400270 return "mc:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
271 return "mc:" + elems[1] + ":" + realfn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600272 return "virtual:" + variant + ":" + realfn
273
274def parse_recipe(bb_data, bbfile, appends, mc=''):
275 """
276 Parse a recipe
277 """
278
279 chdir_back = False
280
281 bb_data.setVar("__BBMULTICONFIG", mc)
282
283 # expand tmpdir to include this topdir
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500284 bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR') or "")
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600285 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
286 oldpath = os.path.abspath(os.getcwd())
287 bb.parse.cached_mtime_noerror(bbfile_loc)
288
289 # The ConfHandler first looks if there is a TOPDIR and if not
290 # then it would call getcwd().
291 # Previously, we chdir()ed to bbfile_loc, called the handler
292 # and finally chdir()ed back, a couple of thousand times. We now
293 # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
294 if not bb_data.getVar('TOPDIR', False):
295 chdir_back = True
296 bb_data.setVar('TOPDIR', bbfile_loc)
297 try:
298 if appends:
299 bb_data.setVar('__BBAPPEND', " ".join(appends))
300 bb_data = bb.parse.handle(bbfile, bb_data)
301 if chdir_back:
302 os.chdir(oldpath)
303 return bb_data
304 except:
305 if chdir_back:
306 os.chdir(oldpath)
307 raise
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500308
309
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600310
311class NoCache(object):
312
313 def __init__(self, databuilder):
314 self.databuilder = databuilder
315 self.data = databuilder.data
316
317 def loadDataFull(self, virtualfn, appends):
318 """
319 Return a complete set of data for fn.
320 To do this, we need to parse the file.
321 """
322 logger.debug(1, "Parsing %s (full)" % virtualfn)
323 (fn, virtual, mc) = virtualfn2realfn(virtualfn)
324 bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
325 return bb_data[virtual]
326
327 def load_bbfile(self, bbfile, appends, virtonly = False):
328 """
329 Load and parse one .bb build file
330 Return the data and whether parsing resulted in the file being skipped
331 """
332
333 if virtonly:
334 (bbfile, virtual, mc) = virtualfn2realfn(bbfile)
335 bb_data = self.databuilder.mcdata[mc].createCopy()
336 bb_data.setVar("__ONLYFINALISE", virtual or "default")
337 datastores = parse_recipe(bb_data, bbfile, appends, mc)
338 return datastores
339
340 bb_data = self.data.createCopy()
341 datastores = parse_recipe(bb_data, bbfile, appends)
342
343 for mc in self.databuilder.mcdata:
344 if not mc:
345 continue
346 bb_data = self.databuilder.mcdata[mc].createCopy()
347 newstores = parse_recipe(bb_data, bbfile, appends, mc)
348 for ns in newstores:
Brad Bishop15ae2502019-06-18 21:44:24 -0400349 datastores["mc:%s:%s" % (mc, ns)] = newstores[ns]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600350
351 return datastores
352
353class Cache(NoCache):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500354 """
355 BitBake Cache implementation
356 """
357
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600358 def __init__(self, databuilder, data_hash, caches_array):
359 super().__init__(databuilder)
360 data = databuilder.data
361
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500362 # Pass caches_array information into Cache Constructor
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500363 # It will be used later for deciding whether we
364 # need extra cache file dump/load support
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500365 self.caches_array = caches_array
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500366 self.cachedir = data.getVar("CACHE")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500367 self.clean = set()
368 self.checked = set()
369 self.depends_cache = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500370 self.data_fn = None
371 self.cacheclean = True
372 self.data_hash = data_hash
Andrew Geissler82c905d2020-04-13 13:39:40 -0500373 self.filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500374
375 if self.cachedir in [None, '']:
376 self.has_cache = False
377 logger.info("Not using a cache. "
378 "Set CACHE = <directory> to enable.")
379 return
380
381 self.has_cache = True
382 self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash)
383
Brad Bishop316dfdd2018-06-25 12:45:53 -0400384 logger.debug(1, "Cache dir: %s", self.cachedir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500385 bb.utils.mkdirhier(self.cachedir)
386
387 cache_ok = True
388 if self.caches_array:
389 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600390 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
391 cache_ok = cache_ok and os.path.exists(cachefile)
392 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500393 if cache_ok:
394 self.load_cachefile()
395 elif os.path.isfile(self.cachefile):
396 logger.info("Out of date cache found, rebuilding...")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400397 else:
398 logger.debug(1, "Cache file %s not found, building..." % self.cachefile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500399
Brad Bishop96ff1982019-08-19 13:50:42 -0400400 # We don't use the symlink, its just for debugging convinience
401 symlink = os.path.join(self.cachedir, "bb_cache.dat")
402 if os.path.exists(symlink):
403 bb.utils.remove(symlink)
404 try:
405 os.symlink(os.path.basename(self.cachefile), symlink)
406 except OSError:
407 pass
408
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500409 def load_cachefile(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500410 cachesize = 0
411 previous_progress = 0
412 previous_percent = 0
413
414 # Calculate the correct cachesize of all those cache files
415 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600416 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
417 with open(cachefile, "rb") as cachefile:
418 cachesize += os.fstat(cachefile.fileno()).st_size
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500419
420 bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500421
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500422 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600423 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
Brad Bishop316dfdd2018-06-25 12:45:53 -0400424 logger.debug(1, 'Loading cache file: %s' % cachefile)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600425 with open(cachefile, "rb") as cachefile:
426 pickled = pickle.Unpickler(cachefile)
427 # Check cache version information
428 try:
429 cache_ver = pickled.load()
430 bitbake_ver = pickled.load()
431 except Exception:
432 logger.info('Invalid cache, rebuilding...')
433 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500434
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600435 if cache_ver != __cache_version__:
436 logger.info('Cache version mismatch, rebuilding...')
437 return
438 elif bitbake_ver != bb.__version__:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500439 logger.info('Bitbake version mismatch, rebuilding...')
440 return
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600441
442 # Load the rest of the cache file
443 current_progress = 0
444 while cachefile:
445 try:
446 key = pickled.load()
447 value = pickled.load()
448 except Exception:
449 break
450 if not isinstance(key, str):
451 bb.warn("%s from extras cache is not a string?" % key)
452 break
453 if not isinstance(value, RecipeInfoCommon):
454 bb.warn("%s from extras cache is not a RecipeInfoCommon class?" % value)
455 break
456
457 if key in self.depends_cache:
458 self.depends_cache[key].append(value)
459 else:
460 self.depends_cache[key] = [value]
461 # only fire events on even percentage boundaries
462 current_progress = cachefile.tell() + previous_progress
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500463 if current_progress > cachesize:
464 # we might have calculated incorrect total size because a file
465 # might've been written out just after we checked its size
466 cachesize = current_progress
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600467 current_percent = 100 * current_progress / cachesize
468 if current_percent > previous_percent:
469 previous_percent = current_percent
470 bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
471 self.data)
472
473 previous_progress += current_progress
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500474
475 # Note: depends cache number is corresponding to the parsing file numbers.
476 # The same file has several caches, still regarded as one item in the cache
477 bb.event.fire(bb.event.CacheLoadCompleted(cachesize,
478 len(self.depends_cache)),
479 self.data)
480
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600481 def parse(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500482 """Parse the specified filename, returning the recipe information"""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600483 logger.debug(1, "Parsing %s", filename)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500484 infos = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600485 datastores = self.load_bbfile(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500486 depends = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600487 variants = []
488 # Process the "real" fn last so we can store variants list
489 for variant, data in sorted(datastores.items(),
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500490 key=lambda i: i[0],
491 reverse=True):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600492 virtualfn = variant2virtual(filename, variant)
493 variants.append(variant)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500494 depends = depends + (data.getVar("__depends", False) or [])
495 if depends and not variant:
496 data.setVar("__depends", depends)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600497 if virtualfn == filename:
498 data.setVar("__VARIANTS", " ".join(variants))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500499 info_array = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600500 for cache_class in self.caches_array:
501 info = cache_class(filename, data)
502 info_array.append(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500503 infos.append((virtualfn, info_array))
504
505 return infos
506
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600507 def load(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500508 """Obtain the recipe information for the specified filename,
509 using cached values if available, otherwise parsing.
510
511 Note that if it does parse to obtain the info, it will not
512 automatically add the information to the cache or to your
513 CacheData. Use the add or add_info method to do so after
514 running this, or use loadData instead."""
515 cached = self.cacheValid(filename, appends)
516 if cached:
517 infos = []
518 # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
519 info_array = self.depends_cache[filename]
520 for variant in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600521 virtualfn = variant2virtual(filename, variant)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500522 infos.append((virtualfn, self.depends_cache[virtualfn]))
523 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500524 return self.parse(filename, appends, configdata, self.caches_array)
525
526 return cached, infos
527
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600528 def loadData(self, fn, appends, cacheData):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500529 """Load the recipe info for the specified filename,
530 parsing and adding to the cache if necessary, and adding
531 the recipe information to the supplied CacheData instance."""
532 skipped, virtuals = 0, 0
533
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600534 cached, infos = self.load(fn, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500535 for virtualfn, info_array in infos:
536 if info_array[0].skipped:
537 logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
538 skipped += 1
539 else:
540 self.add_info(virtualfn, info_array, cacheData, not cached)
541 virtuals += 1
542
543 return cached, skipped, virtuals
544
545 def cacheValid(self, fn, appends):
546 """
547 Is the cache valid for fn?
548 Fast version, no timestamps checked.
549 """
550 if fn not in self.checked:
551 self.cacheValidUpdate(fn, appends)
552
553 # Is cache enabled?
554 if not self.has_cache:
555 return False
556 if fn in self.clean:
557 return True
558 return False
559
560 def cacheValidUpdate(self, fn, appends):
561 """
562 Is the cache valid for fn?
563 Make thorough (slower) checks including timestamps.
564 """
565 # Is cache enabled?
566 if not self.has_cache:
567 return False
568
569 self.checked.add(fn)
570
571 # File isn't in depends_cache
572 if not fn in self.depends_cache:
573 logger.debug(2, "Cache: %s is not cached", fn)
574 return False
575
576 mtime = bb.parse.cached_mtime_noerror(fn)
577
578 # Check file still exists
579 if mtime == 0:
580 logger.debug(2, "Cache: %s no longer exists", fn)
581 self.remove(fn)
582 return False
583
584 info_array = self.depends_cache[fn]
585 # Check the file's timestamp
586 if mtime != info_array[0].timestamp:
587 logger.debug(2, "Cache: %s changed", fn)
588 self.remove(fn)
589 return False
590
591 # Check dependencies are still valid
592 depends = info_array[0].file_depends
593 if depends:
594 for f, old_mtime in depends:
595 fmtime = bb.parse.cached_mtime_noerror(f)
596 # Check if file still exists
597 if old_mtime != 0 and fmtime == 0:
598 logger.debug(2, "Cache: %s's dependency %s was removed",
599 fn, f)
600 self.remove(fn)
601 return False
602
603 if (fmtime != old_mtime):
604 logger.debug(2, "Cache: %s's dependency %s changed",
605 fn, f)
606 self.remove(fn)
607 return False
608
609 if hasattr(info_array[0], 'file_checksums'):
610 for _, fl in info_array[0].file_checksums.items():
Patrick Williamsd7e96312015-09-22 08:09:05 -0500611 fl = fl.strip()
Andrew Geissler82c905d2020-04-13 13:39:40 -0500612 if not fl:
613 continue
614 # Have to be careful about spaces and colons in filenames
615 flist = self.filelist_regex.split(fl)
616 for f in flist:
617 if not f or "*" in f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500618 continue
619 f, exist = f.split(":")
620 if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
621 logger.debug(2, "Cache: %s's file checksum list file %s changed",
622 fn, f)
623 self.remove(fn)
624 return False
625
626 if appends != info_array[0].appends:
627 logger.debug(2, "Cache: appends for %s changed", fn)
628 logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
629 self.remove(fn)
630 return False
631
632 invalid = False
633 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600634 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500635 self.clean.add(virtualfn)
636 if virtualfn not in self.depends_cache:
637 logger.debug(2, "Cache: %s is not cached", virtualfn)
638 invalid = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600639 elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
640 logger.debug(2, "Cache: Extra caches missing for %s?" % virtualfn)
641 invalid = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500642
643 # If any one of the variants is not present, mark as invalid for all
644 if invalid:
645 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600646 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500647 if virtualfn in self.clean:
648 logger.debug(2, "Cache: Removing %s from cache", virtualfn)
649 self.clean.remove(virtualfn)
650 if fn in self.clean:
651 logger.debug(2, "Cache: Marking %s as not clean", fn)
652 self.clean.remove(fn)
653 return False
654
655 self.clean.add(fn)
656 return True
657
658 def remove(self, fn):
659 """
660 Remove a fn from the cache
661 Called from the parser in error cases
662 """
663 if fn in self.depends_cache:
664 logger.debug(1, "Removing %s from cache", fn)
665 del self.depends_cache[fn]
666 if fn in self.clean:
667 logger.debug(1, "Marking %s as unclean", fn)
668 self.clean.remove(fn)
669
670 def sync(self):
671 """
672 Save the cache
673 Called from the parser when complete (or exiting)
674 """
675
676 if not self.has_cache:
677 return
678
679 if self.cacheclean:
680 logger.debug(2, "Cache is clean, not saving.")
681 return
682
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500683 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600684 cache_class_name = cache_class.__name__
685 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
686 with open(cachefile, "wb") as f:
687 p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
688 p.dump(__cache_version__)
689 p.dump(bb.__version__)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500690
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600691 for key, info_array in self.depends_cache.items():
692 for info in info_array:
693 if isinstance(info, RecipeInfoCommon) and info.__class__.__name__ == cache_class_name:
694 p.dump(key)
695 p.dump(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500696
697 del self.depends_cache
698
699 @staticmethod
700 def mtime(cachefile):
701 return bb.parse.cached_mtime_noerror(cachefile)
702
703 def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
704 if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
705 cacheData.add_from_recipeinfo(filename, info_array)
706
707 if watcher:
708 watcher(info_array[0].file_depends)
709
710 if not self.has_cache:
711 return
712
713 if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
714 if parsed:
715 self.cacheclean = False
716 self.depends_cache[filename] = info_array
717
718 def add(self, file_name, data, cacheData, parsed=None):
719 """
720 Save data we need into the cache
721 """
722
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600723 realfn = virtualfn2realfn(file_name)[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500724
725 info_array = []
726 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600727 info_array.append(cache_class(realfn, data))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500728 self.add_info(file_name, info_array, cacheData, parsed)
729
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500730
731def init(cooker):
732 """
733 The Objective: Cache the minimum amount of data possible yet get to the
734 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
735
736 To do this, we intercept getVar calls and only cache the variables we see
737 being accessed. We rely on the cache getVar calls being made for all
738 variables bitbake might need to use to reach this stage. For each cached
739 file we need to track:
740
741 * Its mtime
742 * The mtimes of all its dependencies
743 * Whether it caused a parse.SkipRecipe exception
744
745 Files causing parsing errors are evicted from the cache.
746
747 """
748 return Cache(cooker.configuration.data, cooker.configuration.data_hash)
749
750
751class CacheData(object):
752 """
753 The data structures we compile from the cached data
754 """
755
756 def __init__(self, caches_array):
757 self.caches_array = caches_array
758 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600759 if not issubclass(cache_class, RecipeInfoCommon):
760 bb.error("Extra cache data class %s should subclass RecipeInfoCommon class" % cache_class)
761 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500762
763 # Direct cache variables
764 self.task_queues = {}
765 self.preferred = {}
766 self.tasks = {}
767 # Indirect Cache variables (set elsewhere)
768 self.ignored_dependencies = []
769 self.world_target = set()
770 self.bbfile_priority = {}
771
772 def add_from_recipeinfo(self, fn, info_array):
773 for info in info_array:
774 info.add_cacheData(self, fn)
775
776class MultiProcessCache(object):
777 """
778 BitBake multi-process cache implementation
779
780 Used by the codeparser & file checksum caches
781 """
782
783 def __init__(self):
784 self.cachefile = None
785 self.cachedata = self.create_cachedata()
786 self.cachedata_extras = self.create_cachedata()
787
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500788 def init_cache(self, d, cache_file_name=None):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500789 cachedir = (d.getVar("PERSISTENT_DIR") or
790 d.getVar("CACHE"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500791 if cachedir in [None, '']:
792 return
793 bb.utils.mkdirhier(cachedir)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500794 self.cachefile = os.path.join(cachedir,
795 cache_file_name or self.__class__.cache_file_name)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500796 logger.debug(1, "Using cache in '%s'", self.cachefile)
797
798 glf = bb.utils.lockfile(self.cachefile + ".lock")
799
800 try:
801 with open(self.cachefile, "rb") as f:
802 p = pickle.Unpickler(f)
803 data, version = p.load()
804 except:
805 bb.utils.unlockfile(glf)
806 return
807
808 bb.utils.unlockfile(glf)
809
810 if version != self.__class__.CACHE_VERSION:
811 return
812
813 self.cachedata = data
814
815 def create_cachedata(self):
816 data = [{}]
817 return data
818
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500819 def save_extras(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500820 if not self.cachefile:
821 return
822
823 glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
824
825 i = os.getpid()
826 lf = None
827 while not lf:
828 lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
829 if not lf or os.path.exists(self.cachefile + "-" + str(i)):
830 if lf:
831 bb.utils.unlockfile(lf)
832 lf = None
833 i = i + 1
834 continue
835
836 with open(self.cachefile + "-" + str(i), "wb") as f:
837 p = pickle.Pickler(f, -1)
838 p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
839
840 bb.utils.unlockfile(lf)
841 bb.utils.unlockfile(glf)
842
843 def merge_data(self, source, dest):
844 for j in range(0,len(dest)):
845 for h in source[j]:
846 if h not in dest[j]:
847 dest[j][h] = source[j][h]
848
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500849 def save_merge(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500850 if not self.cachefile:
851 return
852
853 glf = bb.utils.lockfile(self.cachefile + ".lock")
854
855 data = self.cachedata
856
857 for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
858 f = os.path.join(os.path.dirname(self.cachefile), f)
859 try:
860 with open(f, "rb") as fd:
861 p = pickle.Unpickler(fd)
862 extradata, version = p.load()
863 except (IOError, EOFError):
864 os.unlink(f)
865 continue
866
867 if version != self.__class__.CACHE_VERSION:
868 os.unlink(f)
869 continue
870
871 self.merge_data(extradata, data)
872 os.unlink(f)
873
874 with open(self.cachefile, "wb") as f:
875 p = pickle.Pickler(f, -1)
876 p.dump([data, self.__class__.CACHE_VERSION])
877
878 bb.utils.unlockfile(glf)
Brad Bishop08902b02019-08-20 09:16:51 -0400879
880
881class SimpleCache(object):
882 """
883 BitBake multi-process cache implementation
884
885 Used by the codeparser & file checksum caches
886 """
887
888 def __init__(self, version):
889 self.cachefile = None
890 self.cachedata = None
891 self.cacheversion = version
892
893 def init_cache(self, d, cache_file_name=None, defaultdata=None):
894 cachedir = (d.getVar("PERSISTENT_DIR") or
895 d.getVar("CACHE"))
896 if not cachedir:
897 return defaultdata
898
899 bb.utils.mkdirhier(cachedir)
900 self.cachefile = os.path.join(cachedir,
901 cache_file_name or self.__class__.cache_file_name)
902 logger.debug(1, "Using cache in '%s'", self.cachefile)
903
904 glf = bb.utils.lockfile(self.cachefile + ".lock")
905
906 try:
907 with open(self.cachefile, "rb") as f:
908 p = pickle.Unpickler(f)
909 data, version = p.load()
910 except:
911 bb.utils.unlockfile(glf)
912 return defaultdata
913
914 bb.utils.unlockfile(glf)
915
916 if version != self.cacheversion:
917 return defaultdata
918
919 return data
920
921 def save(self, data):
922 if not self.cachefile:
923 return
924
925 glf = bb.utils.lockfile(self.cachefile + ".lock")
926
927 with open(self.cachefile, "wb") as f:
928 p = pickle.Pickler(f, -1)
929 p.dump([data, self.cacheversion])
930
931 bb.utils.unlockfile(glf)