blob: 5fb2f17cda2f24aae351b5f5c31a0776c5318a2c [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# BitBake Cache implementation
3#
4# Caching of bitbake variables before task execution
5
6# Copyright (C) 2006 Richard Purdie
7# Copyright (C) 2012 Intel Corporation
8
9# but small sections based on code from bin/bitbake:
10# Copyright (C) 2003, 2004 Chris Larson
11# Copyright (C) 2003, 2004 Phil Blundell
12# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
13# Copyright (C) 2005 Holger Hans Peter Freyther
14# Copyright (C) 2005 ROAD GmbH
15#
Brad Bishopc342db32019-05-15 21:57:59 -040016# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050017#
Patrick Williamsc124f4f2015-09-15 14:41:29 -050018
Patrick Williamsc124f4f2015-09-15 14:41:29 -050019import os
Patrick Williamsc0f7c042017-02-23 20:41:17 -060020import sys
Patrick Williamsc124f4f2015-09-15 14:41:29 -050021import logging
Patrick Williamsc0f7c042017-02-23 20:41:17 -060022import pickle
Patrick Williamsc124f4f2015-09-15 14:41:29 -050023from collections import defaultdict
24import bb.utils
25
26logger = logging.getLogger("BitBake.Cache")
27
Brad Bishop1a4b7ee2018-12-16 17:11:34 -080028__cache_version__ = "152"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050029
30def getCacheFile(path, filename, data_hash):
31 return os.path.join(path, filename + "." + data_hash)
32
33# RecipeInfoCommon defines common data retrieving methods
34# from meta data for caches. CoreRecipeInfo as well as other
35# Extra RecipeInfo needs to inherit this class
36class RecipeInfoCommon(object):
37
38 @classmethod
39 def listvar(cls, var, metadata):
40 return cls.getvar(var, metadata).split()
41
42 @classmethod
43 def intvar(cls, var, metadata):
44 return int(cls.getvar(var, metadata) or 0)
45
46 @classmethod
47 def depvar(cls, var, metadata):
48 return bb.utils.explode_deps(cls.getvar(var, metadata))
49
50 @classmethod
51 def pkgvar(cls, var, packages, metadata):
52 return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata))
53 for pkg in packages)
54
55 @classmethod
56 def taskvar(cls, var, tasks, metadata):
57 return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata))
58 for task in tasks)
59
60 @classmethod
61 def flaglist(cls, flag, varlist, metadata, squash=False):
Brad Bishop6e60e8b2018-02-01 10:27:11 -050062 out_dict = dict((var, metadata.getVarFlag(var, flag))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050063 for var in varlist)
64 if squash:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060065 return dict((k,v) for (k,v) in out_dict.items() if v)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050066 else:
67 return out_dict
68
69 @classmethod
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050070 def getvar(cls, var, metadata, expand = True):
71 return metadata.getVar(var, expand) or ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -050072
73
74class CoreRecipeInfo(RecipeInfoCommon):
75 __slots__ = ()
76
Brad Bishopd7bf8c12018-02-25 22:55:05 -050077 cachefile = "bb_cache.dat"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050078
Brad Bishopd7bf8c12018-02-25 22:55:05 -050079 def __init__(self, filename, metadata):
Patrick Williamsc124f4f2015-09-15 14:41:29 -050080 self.file_depends = metadata.getVar('__depends', False)
81 self.timestamp = bb.parse.cached_mtime(filename)
82 self.variants = self.listvar('__VARIANTS', metadata) + ['']
83 self.appends = self.listvar('__BBAPPEND', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050084 self.nocache = self.getvar('BB_DONT_CACHE', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050085
86 self.skipreason = self.getvar('__SKIPPED', metadata)
87 if self.skipreason:
Brad Bishop19323692019-04-05 15:28:33 -040088 self.pn = self.getvar('PN', metadata) or bb.parse.vars_from_file(filename,metadata)[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -050089 self.skipped = True
90 self.provides = self.depvar('PROVIDES', metadata)
91 self.rprovides = self.depvar('RPROVIDES', metadata)
92 return
93
94 self.tasks = metadata.getVar('__BBTASKS', False)
95
96 self.pn = self.getvar('PN', metadata)
97 self.packages = self.listvar('PACKAGES', metadata)
Brad Bishopd7bf8c12018-02-25 22:55:05 -050098 if not self.packages:
Patrick Williamsc124f4f2015-09-15 14:41:29 -050099 self.packages.append(self.pn)
100
101 self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
102 self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
103
104 self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
105
106 self.skipped = False
107 self.pe = self.getvar('PE', metadata)
108 self.pv = self.getvar('PV', metadata)
109 self.pr = self.getvar('PR', metadata)
110 self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
111 self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
112 self.stamp = self.getvar('STAMP', metadata)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500113 self.stampclean = self.getvar('STAMPCLEAN', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500114 self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
115 self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
116 self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
117 self.depends = self.depvar('DEPENDS', metadata)
118 self.provides = self.depvar('PROVIDES', metadata)
119 self.rdepends = self.depvar('RDEPENDS', metadata)
120 self.rprovides = self.depvar('RPROVIDES', metadata)
121 self.rrecommends = self.depvar('RRECOMMENDS', metadata)
122 self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
123 self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
124 self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500125 self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500126 self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
127 self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
128 self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500129 self.extradepsfunc = self.getvar('calculate_extra_depends', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500130
131 @classmethod
132 def init_cacheData(cls, cachedata):
133 # CacheData in Core RecipeInfo Class
134 cachedata.task_deps = {}
135 cachedata.pkg_fn = {}
136 cachedata.pkg_pn = defaultdict(list)
137 cachedata.pkg_pepvpr = {}
138 cachedata.pkg_dp = {}
139
140 cachedata.stamp = {}
141 cachedata.stampclean = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500142 cachedata.stamp_extrainfo = {}
143 cachedata.file_checksums = {}
144 cachedata.fn_provides = {}
145 cachedata.pn_provides = defaultdict(list)
146 cachedata.all_depends = []
147
148 cachedata.deps = defaultdict(list)
149 cachedata.packages = defaultdict(list)
150 cachedata.providers = defaultdict(list)
151 cachedata.rproviders = defaultdict(list)
152 cachedata.packages_dynamic = defaultdict(list)
153
154 cachedata.rundeps = defaultdict(lambda: defaultdict(list))
155 cachedata.runrecs = defaultdict(lambda: defaultdict(list))
156 cachedata.possible_world = []
157 cachedata.universe_target = []
158 cachedata.hashfn = {}
159
160 cachedata.basetaskhash = {}
161 cachedata.inherits = {}
162 cachedata.fakerootenv = {}
163 cachedata.fakerootnoenv = {}
164 cachedata.fakerootdirs = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500165 cachedata.extradepsfunc = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500166
167 def add_cacheData(self, cachedata, fn):
168 cachedata.task_deps[fn] = self.task_deps
169 cachedata.pkg_fn[fn] = self.pn
170 cachedata.pkg_pn[self.pn].append(fn)
171 cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
172 cachedata.pkg_dp[fn] = self.defaultpref
173 cachedata.stamp[fn] = self.stamp
174 cachedata.stampclean[fn] = self.stampclean
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500175 cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
176 cachedata.file_checksums[fn] = self.file_checksums
177
178 provides = [self.pn]
179 for provide in self.provides:
180 if provide not in provides:
181 provides.append(provide)
182 cachedata.fn_provides[fn] = provides
183
184 for provide in provides:
185 cachedata.providers[provide].append(fn)
186 if provide not in cachedata.pn_provides[self.pn]:
187 cachedata.pn_provides[self.pn].append(provide)
188
189 for dep in self.depends:
190 if dep not in cachedata.deps[fn]:
191 cachedata.deps[fn].append(dep)
192 if dep not in cachedata.all_depends:
193 cachedata.all_depends.append(dep)
194
195 rprovides = self.rprovides
196 for package in self.packages:
197 cachedata.packages[package].append(fn)
198 rprovides += self.rprovides_pkg[package]
199
200 for rprovide in rprovides:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500201 if fn not in cachedata.rproviders[rprovide]:
202 cachedata.rproviders[rprovide].append(fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500203
204 for package in self.packages_dynamic:
205 cachedata.packages_dynamic[package].append(fn)
206
207 # Build hash of runtime depends and recommends
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500208 for package in self.packages:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500209 cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
210 cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
211
212 # Collect files we may need for possible world-dep
213 # calculations
214 if self.not_world:
215 logger.debug(1, "EXCLUDE FROM WORLD: %s", fn)
216 else:
217 cachedata.possible_world.append(fn)
218
219 # create a collection of all targets for sanity checking
220 # tasks, such as upstream versions, license, and tools for
221 # task and image creation.
222 cachedata.universe_target.append(self.pn)
223
224 cachedata.hashfn[fn] = self.hashfilename
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600225 for task, taskhash in self.basetaskhashes.items():
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500226 identifier = '%s.%s' % (fn, task)
227 cachedata.basetaskhash[identifier] = taskhash
228
229 cachedata.inherits[fn] = self.inherits
230 cachedata.fakerootenv[fn] = self.fakerootenv
231 cachedata.fakerootnoenv[fn] = self.fakerootnoenv
232 cachedata.fakerootdirs[fn] = self.fakerootdirs
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500233 cachedata.extradepsfunc[fn] = self.extradepsfunc
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500234
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600235def virtualfn2realfn(virtualfn):
236 """
237 Convert a virtual file name to a real one + the associated subclass keyword
238 """
239 mc = ""
Brad Bishop15ae2502019-06-18 21:44:24 -0400240 if virtualfn.startswith('mc:'):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600241 elems = virtualfn.split(':')
242 mc = elems[1]
243 virtualfn = ":".join(elems[2:])
244
245 fn = virtualfn
246 cls = ""
247 if virtualfn.startswith('virtual:'):
248 elems = virtualfn.split(':')
249 cls = ":".join(elems[1:-1])
250 fn = elems[-1]
251
252 return (fn, cls, mc)
253
254def realfn2virtual(realfn, cls, mc):
255 """
256 Convert a real filename + the associated subclass keyword to a virtual filename
257 """
258 if cls:
259 realfn = "virtual:" + cls + ":" + realfn
260 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -0400261 realfn = "mc:" + mc + ":" + realfn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600262 return realfn
263
264def variant2virtual(realfn, variant):
265 """
266 Convert a real filename + the associated subclass keyword to a virtual filename
267 """
268 if variant == "":
269 return realfn
Brad Bishop15ae2502019-06-18 21:44:24 -0400270 if variant.startswith("mc:"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600271 elems = variant.split(":")
272 if elems[2]:
Brad Bishop15ae2502019-06-18 21:44:24 -0400273 return "mc:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
274 return "mc:" + elems[1] + ":" + realfn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600275 return "virtual:" + variant + ":" + realfn
276
277def parse_recipe(bb_data, bbfile, appends, mc=''):
278 """
279 Parse a recipe
280 """
281
282 chdir_back = False
283
284 bb_data.setVar("__BBMULTICONFIG", mc)
285
286 # expand tmpdir to include this topdir
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500287 bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR') or "")
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600288 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
289 oldpath = os.path.abspath(os.getcwd())
290 bb.parse.cached_mtime_noerror(bbfile_loc)
291
292 # The ConfHandler first looks if there is a TOPDIR and if not
293 # then it would call getcwd().
294 # Previously, we chdir()ed to bbfile_loc, called the handler
295 # and finally chdir()ed back, a couple of thousand times. We now
296 # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
297 if not bb_data.getVar('TOPDIR', False):
298 chdir_back = True
299 bb_data.setVar('TOPDIR', bbfile_loc)
300 try:
301 if appends:
302 bb_data.setVar('__BBAPPEND', " ".join(appends))
303 bb_data = bb.parse.handle(bbfile, bb_data)
304 if chdir_back:
305 os.chdir(oldpath)
306 return bb_data
307 except:
308 if chdir_back:
309 os.chdir(oldpath)
310 raise
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500311
312
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600313
314class NoCache(object):
315
316 def __init__(self, databuilder):
317 self.databuilder = databuilder
318 self.data = databuilder.data
319
320 def loadDataFull(self, virtualfn, appends):
321 """
322 Return a complete set of data for fn.
323 To do this, we need to parse the file.
324 """
325 logger.debug(1, "Parsing %s (full)" % virtualfn)
326 (fn, virtual, mc) = virtualfn2realfn(virtualfn)
327 bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
328 return bb_data[virtual]
329
330 def load_bbfile(self, bbfile, appends, virtonly = False):
331 """
332 Load and parse one .bb build file
333 Return the data and whether parsing resulted in the file being skipped
334 """
335
336 if virtonly:
337 (bbfile, virtual, mc) = virtualfn2realfn(bbfile)
338 bb_data = self.databuilder.mcdata[mc].createCopy()
339 bb_data.setVar("__ONLYFINALISE", virtual or "default")
340 datastores = parse_recipe(bb_data, bbfile, appends, mc)
341 return datastores
342
343 bb_data = self.data.createCopy()
344 datastores = parse_recipe(bb_data, bbfile, appends)
345
346 for mc in self.databuilder.mcdata:
347 if not mc:
348 continue
349 bb_data = self.databuilder.mcdata[mc].createCopy()
350 newstores = parse_recipe(bb_data, bbfile, appends, mc)
351 for ns in newstores:
Brad Bishop15ae2502019-06-18 21:44:24 -0400352 datastores["mc:%s:%s" % (mc, ns)] = newstores[ns]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600353
354 return datastores
355
356class Cache(NoCache):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500357 """
358 BitBake Cache implementation
359 """
360
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600361 def __init__(self, databuilder, data_hash, caches_array):
362 super().__init__(databuilder)
363 data = databuilder.data
364
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500365 # Pass caches_array information into Cache Constructor
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500366 # It will be used later for deciding whether we
367 # need extra cache file dump/load support
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500368 self.caches_array = caches_array
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500369 self.cachedir = data.getVar("CACHE")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500370 self.clean = set()
371 self.checked = set()
372 self.depends_cache = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500373 self.data_fn = None
374 self.cacheclean = True
375 self.data_hash = data_hash
376
377 if self.cachedir in [None, '']:
378 self.has_cache = False
379 logger.info("Not using a cache. "
380 "Set CACHE = <directory> to enable.")
381 return
382
383 self.has_cache = True
384 self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash)
385
Brad Bishop316dfdd2018-06-25 12:45:53 -0400386 logger.debug(1, "Cache dir: %s", self.cachedir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500387 bb.utils.mkdirhier(self.cachedir)
388
389 cache_ok = True
390 if self.caches_array:
391 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600392 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
393 cache_ok = cache_ok and os.path.exists(cachefile)
394 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500395 if cache_ok:
396 self.load_cachefile()
397 elif os.path.isfile(self.cachefile):
398 logger.info("Out of date cache found, rebuilding...")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400399 else:
400 logger.debug(1, "Cache file %s not found, building..." % self.cachefile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500401
402 def load_cachefile(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500403 cachesize = 0
404 previous_progress = 0
405 previous_percent = 0
406
407 # Calculate the correct cachesize of all those cache files
408 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600409 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
410 with open(cachefile, "rb") as cachefile:
411 cachesize += os.fstat(cachefile.fileno()).st_size
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500412
413 bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500414
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500415 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600416 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
Brad Bishop316dfdd2018-06-25 12:45:53 -0400417 logger.debug(1, 'Loading cache file: %s' % cachefile)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600418 with open(cachefile, "rb") as cachefile:
419 pickled = pickle.Unpickler(cachefile)
420 # Check cache version information
421 try:
422 cache_ver = pickled.load()
423 bitbake_ver = pickled.load()
424 except Exception:
425 logger.info('Invalid cache, rebuilding...')
426 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500427
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600428 if cache_ver != __cache_version__:
429 logger.info('Cache version mismatch, rebuilding...')
430 return
431 elif bitbake_ver != bb.__version__:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500432 logger.info('Bitbake version mismatch, rebuilding...')
433 return
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600434
435 # Load the rest of the cache file
436 current_progress = 0
437 while cachefile:
438 try:
439 key = pickled.load()
440 value = pickled.load()
441 except Exception:
442 break
443 if not isinstance(key, str):
444 bb.warn("%s from extras cache is not a string?" % key)
445 break
446 if not isinstance(value, RecipeInfoCommon):
447 bb.warn("%s from extras cache is not a RecipeInfoCommon class?" % value)
448 break
449
450 if key in self.depends_cache:
451 self.depends_cache[key].append(value)
452 else:
453 self.depends_cache[key] = [value]
454 # only fire events on even percentage boundaries
455 current_progress = cachefile.tell() + previous_progress
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500456 if current_progress > cachesize:
457 # we might have calculated incorrect total size because a file
458 # might've been written out just after we checked its size
459 cachesize = current_progress
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600460 current_percent = 100 * current_progress / cachesize
461 if current_percent > previous_percent:
462 previous_percent = current_percent
463 bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
464 self.data)
465
466 previous_progress += current_progress
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500467
468 # Note: depends cache number is corresponding to the parsing file numbers.
469 # The same file has several caches, still regarded as one item in the cache
470 bb.event.fire(bb.event.CacheLoadCompleted(cachesize,
471 len(self.depends_cache)),
472 self.data)
473
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600474 def parse(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500475 """Parse the specified filename, returning the recipe information"""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600476 logger.debug(1, "Parsing %s", filename)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500477 infos = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600478 datastores = self.load_bbfile(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500479 depends = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600480 variants = []
481 # Process the "real" fn last so we can store variants list
482 for variant, data in sorted(datastores.items(),
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500483 key=lambda i: i[0],
484 reverse=True):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600485 virtualfn = variant2virtual(filename, variant)
486 variants.append(variant)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500487 depends = depends + (data.getVar("__depends", False) or [])
488 if depends and not variant:
489 data.setVar("__depends", depends)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600490 if virtualfn == filename:
491 data.setVar("__VARIANTS", " ".join(variants))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500492 info_array = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600493 for cache_class in self.caches_array:
494 info = cache_class(filename, data)
495 info_array.append(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500496 infos.append((virtualfn, info_array))
497
498 return infos
499
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600500 def load(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500501 """Obtain the recipe information for the specified filename,
502 using cached values if available, otherwise parsing.
503
504 Note that if it does parse to obtain the info, it will not
505 automatically add the information to the cache or to your
506 CacheData. Use the add or add_info method to do so after
507 running this, or use loadData instead."""
508 cached = self.cacheValid(filename, appends)
509 if cached:
510 infos = []
511 # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
512 info_array = self.depends_cache[filename]
513 for variant in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600514 virtualfn = variant2virtual(filename, variant)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500515 infos.append((virtualfn, self.depends_cache[virtualfn]))
516 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500517 return self.parse(filename, appends, configdata, self.caches_array)
518
519 return cached, infos
520
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600521 def loadData(self, fn, appends, cacheData):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500522 """Load the recipe info for the specified filename,
523 parsing and adding to the cache if necessary, and adding
524 the recipe information to the supplied CacheData instance."""
525 skipped, virtuals = 0, 0
526
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600527 cached, infos = self.load(fn, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500528 for virtualfn, info_array in infos:
529 if info_array[0].skipped:
530 logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
531 skipped += 1
532 else:
533 self.add_info(virtualfn, info_array, cacheData, not cached)
534 virtuals += 1
535
536 return cached, skipped, virtuals
537
538 def cacheValid(self, fn, appends):
539 """
540 Is the cache valid for fn?
541 Fast version, no timestamps checked.
542 """
543 if fn not in self.checked:
544 self.cacheValidUpdate(fn, appends)
545
546 # Is cache enabled?
547 if not self.has_cache:
548 return False
549 if fn in self.clean:
550 return True
551 return False
552
553 def cacheValidUpdate(self, fn, appends):
554 """
555 Is the cache valid for fn?
556 Make thorough (slower) checks including timestamps.
557 """
558 # Is cache enabled?
559 if not self.has_cache:
560 return False
561
562 self.checked.add(fn)
563
564 # File isn't in depends_cache
565 if not fn in self.depends_cache:
566 logger.debug(2, "Cache: %s is not cached", fn)
567 return False
568
569 mtime = bb.parse.cached_mtime_noerror(fn)
570
571 # Check file still exists
572 if mtime == 0:
573 logger.debug(2, "Cache: %s no longer exists", fn)
574 self.remove(fn)
575 return False
576
577 info_array = self.depends_cache[fn]
578 # Check the file's timestamp
579 if mtime != info_array[0].timestamp:
580 logger.debug(2, "Cache: %s changed", fn)
581 self.remove(fn)
582 return False
583
584 # Check dependencies are still valid
585 depends = info_array[0].file_depends
586 if depends:
587 for f, old_mtime in depends:
588 fmtime = bb.parse.cached_mtime_noerror(f)
589 # Check if file still exists
590 if old_mtime != 0 and fmtime == 0:
591 logger.debug(2, "Cache: %s's dependency %s was removed",
592 fn, f)
593 self.remove(fn)
594 return False
595
596 if (fmtime != old_mtime):
597 logger.debug(2, "Cache: %s's dependency %s changed",
598 fn, f)
599 self.remove(fn)
600 return False
601
602 if hasattr(info_array[0], 'file_checksums'):
603 for _, fl in info_array[0].file_checksums.items():
Patrick Williamsd7e96312015-09-22 08:09:05 -0500604 fl = fl.strip()
605 while fl:
606 # A .split() would be simpler but means spaces or colons in filenames would break
607 a = fl.find(":True")
608 b = fl.find(":False")
609 if ((a < 0) and b) or ((b > 0) and (b < a)):
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500610 f = fl[:b+6]
611 fl = fl[b+7:]
Patrick Williamsd7e96312015-09-22 08:09:05 -0500612 elif ((b < 0) and a) or ((a > 0) and (a < b)):
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500613 f = fl[:a+5]
614 fl = fl[a+6:]
Patrick Williamsd7e96312015-09-22 08:09:05 -0500615 else:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500616 break
Patrick Williamsd7e96312015-09-22 08:09:05 -0500617 fl = fl.strip()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500618 if "*" in f:
619 continue
620 f, exist = f.split(":")
621 if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
622 logger.debug(2, "Cache: %s's file checksum list file %s changed",
623 fn, f)
624 self.remove(fn)
625 return False
626
627 if appends != info_array[0].appends:
628 logger.debug(2, "Cache: appends for %s changed", fn)
629 logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
630 self.remove(fn)
631 return False
632
633 invalid = False
634 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600635 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500636 self.clean.add(virtualfn)
637 if virtualfn not in self.depends_cache:
638 logger.debug(2, "Cache: %s is not cached", virtualfn)
639 invalid = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600640 elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
641 logger.debug(2, "Cache: Extra caches missing for %s?" % virtualfn)
642 invalid = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500643
644 # If any one of the variants is not present, mark as invalid for all
645 if invalid:
646 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600647 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500648 if virtualfn in self.clean:
649 logger.debug(2, "Cache: Removing %s from cache", virtualfn)
650 self.clean.remove(virtualfn)
651 if fn in self.clean:
652 logger.debug(2, "Cache: Marking %s as not clean", fn)
653 self.clean.remove(fn)
654 return False
655
656 self.clean.add(fn)
657 return True
658
659 def remove(self, fn):
660 """
661 Remove a fn from the cache
662 Called from the parser in error cases
663 """
664 if fn in self.depends_cache:
665 logger.debug(1, "Removing %s from cache", fn)
666 del self.depends_cache[fn]
667 if fn in self.clean:
668 logger.debug(1, "Marking %s as unclean", fn)
669 self.clean.remove(fn)
670
671 def sync(self):
672 """
673 Save the cache
674 Called from the parser when complete (or exiting)
675 """
676
677 if not self.has_cache:
678 return
679
680 if self.cacheclean:
681 logger.debug(2, "Cache is clean, not saving.")
682 return
683
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500684 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600685 cache_class_name = cache_class.__name__
686 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
687 with open(cachefile, "wb") as f:
688 p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
689 p.dump(__cache_version__)
690 p.dump(bb.__version__)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500691
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600692 for key, info_array in self.depends_cache.items():
693 for info in info_array:
694 if isinstance(info, RecipeInfoCommon) and info.__class__.__name__ == cache_class_name:
695 p.dump(key)
696 p.dump(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500697
698 del self.depends_cache
699
700 @staticmethod
701 def mtime(cachefile):
702 return bb.parse.cached_mtime_noerror(cachefile)
703
704 def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
705 if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
706 cacheData.add_from_recipeinfo(filename, info_array)
707
708 if watcher:
709 watcher(info_array[0].file_depends)
710
711 if not self.has_cache:
712 return
713
714 if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
715 if parsed:
716 self.cacheclean = False
717 self.depends_cache[filename] = info_array
718
719 def add(self, file_name, data, cacheData, parsed=None):
720 """
721 Save data we need into the cache
722 """
723
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600724 realfn = virtualfn2realfn(file_name)[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500725
726 info_array = []
727 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600728 info_array.append(cache_class(realfn, data))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500729 self.add_info(file_name, info_array, cacheData, parsed)
730
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500731
732def init(cooker):
733 """
734 The Objective: Cache the minimum amount of data possible yet get to the
735 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
736
737 To do this, we intercept getVar calls and only cache the variables we see
738 being accessed. We rely on the cache getVar calls being made for all
739 variables bitbake might need to use to reach this stage. For each cached
740 file we need to track:
741
742 * Its mtime
743 * The mtimes of all its dependencies
744 * Whether it caused a parse.SkipRecipe exception
745
746 Files causing parsing errors are evicted from the cache.
747
748 """
749 return Cache(cooker.configuration.data, cooker.configuration.data_hash)
750
751
752class CacheData(object):
753 """
754 The data structures we compile from the cached data
755 """
756
757 def __init__(self, caches_array):
758 self.caches_array = caches_array
759 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600760 if not issubclass(cache_class, RecipeInfoCommon):
761 bb.error("Extra cache data class %s should subclass RecipeInfoCommon class" % cache_class)
762 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500763
764 # Direct cache variables
765 self.task_queues = {}
766 self.preferred = {}
767 self.tasks = {}
768 # Indirect Cache variables (set elsewhere)
769 self.ignored_dependencies = []
770 self.world_target = set()
771 self.bbfile_priority = {}
772
773 def add_from_recipeinfo(self, fn, info_array):
774 for info in info_array:
775 info.add_cacheData(self, fn)
776
777class MultiProcessCache(object):
778 """
779 BitBake multi-process cache implementation
780
781 Used by the codeparser & file checksum caches
782 """
783
784 def __init__(self):
785 self.cachefile = None
786 self.cachedata = self.create_cachedata()
787 self.cachedata_extras = self.create_cachedata()
788
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500789 def init_cache(self, d, cache_file_name=None):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500790 cachedir = (d.getVar("PERSISTENT_DIR") or
791 d.getVar("CACHE"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500792 if cachedir in [None, '']:
793 return
794 bb.utils.mkdirhier(cachedir)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500795 self.cachefile = os.path.join(cachedir,
796 cache_file_name or self.__class__.cache_file_name)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500797 logger.debug(1, "Using cache in '%s'", self.cachefile)
798
799 glf = bb.utils.lockfile(self.cachefile + ".lock")
800
801 try:
802 with open(self.cachefile, "rb") as f:
803 p = pickle.Unpickler(f)
804 data, version = p.load()
805 except:
806 bb.utils.unlockfile(glf)
807 return
808
809 bb.utils.unlockfile(glf)
810
811 if version != self.__class__.CACHE_VERSION:
812 return
813
814 self.cachedata = data
815
816 def create_cachedata(self):
817 data = [{}]
818 return data
819
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500820 def save_extras(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500821 if not self.cachefile:
822 return
823
824 glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
825
826 i = os.getpid()
827 lf = None
828 while not lf:
829 lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
830 if not lf or os.path.exists(self.cachefile + "-" + str(i)):
831 if lf:
832 bb.utils.unlockfile(lf)
833 lf = None
834 i = i + 1
835 continue
836
837 with open(self.cachefile + "-" + str(i), "wb") as f:
838 p = pickle.Pickler(f, -1)
839 p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
840
841 bb.utils.unlockfile(lf)
842 bb.utils.unlockfile(glf)
843
844 def merge_data(self, source, dest):
845 for j in range(0,len(dest)):
846 for h in source[j]:
847 if h not in dest[j]:
848 dest[j][h] = source[j][h]
849
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500850 def save_merge(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500851 if not self.cachefile:
852 return
853
854 glf = bb.utils.lockfile(self.cachefile + ".lock")
855
856 data = self.cachedata
857
858 for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
859 f = os.path.join(os.path.dirname(self.cachefile), f)
860 try:
861 with open(f, "rb") as fd:
862 p = pickle.Unpickler(fd)
863 extradata, version = p.load()
864 except (IOError, EOFError):
865 os.unlink(f)
866 continue
867
868 if version != self.__class__.CACHE_VERSION:
869 os.unlink(f)
870 continue
871
872 self.merge_data(extradata, data)
873 os.unlink(f)
874
875 with open(self.cachefile, "wb") as f:
876 p = pickle.Pickler(f, -1)
877 p.dump([data, self.__class__.CACHE_VERSION])
878
879 bb.utils.unlockfile(glf)