blob: ab18dd5eaaf51d096f6b4fc58ed603587b059649 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# BitBake Cache implementation
3#
4# Caching of bitbake variables before task execution
5
6# Copyright (C) 2006 Richard Purdie
7# Copyright (C) 2012 Intel Corporation
8
9# but small sections based on code from bin/bitbake:
10# Copyright (C) 2003, 2004 Chris Larson
11# Copyright (C) 2003, 2004 Phil Blundell
12# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
13# Copyright (C) 2005 Holger Hans Peter Freyther
14# Copyright (C) 2005 ROAD GmbH
15#
Brad Bishopc342db32019-05-15 21:57:59 -040016# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050017#
Patrick Williamsc124f4f2015-09-15 14:41:29 -050018
Patrick Williamsc124f4f2015-09-15 14:41:29 -050019import os
Patrick Williamsc0f7c042017-02-23 20:41:17 -060020import sys
Patrick Williamsc124f4f2015-09-15 14:41:29 -050021import logging
Patrick Williamsc0f7c042017-02-23 20:41:17 -060022import pickle
Patrick Williamsc124f4f2015-09-15 14:41:29 -050023from collections import defaultdict
24import bb.utils
25
26logger = logging.getLogger("BitBake.Cache")
27
Brad Bishop1a4b7ee2018-12-16 17:11:34 -080028__cache_version__ = "152"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050029
30def getCacheFile(path, filename, data_hash):
31 return os.path.join(path, filename + "." + data_hash)
32
33# RecipeInfoCommon defines common data retrieving methods
34# from meta data for caches. CoreRecipeInfo as well as other
35# Extra RecipeInfo needs to inherit this class
36class RecipeInfoCommon(object):
37
38 @classmethod
39 def listvar(cls, var, metadata):
40 return cls.getvar(var, metadata).split()
41
42 @classmethod
43 def intvar(cls, var, metadata):
44 return int(cls.getvar(var, metadata) or 0)
45
46 @classmethod
47 def depvar(cls, var, metadata):
48 return bb.utils.explode_deps(cls.getvar(var, metadata))
49
50 @classmethod
51 def pkgvar(cls, var, packages, metadata):
52 return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata))
53 for pkg in packages)
54
55 @classmethod
56 def taskvar(cls, var, tasks, metadata):
57 return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata))
58 for task in tasks)
59
60 @classmethod
61 def flaglist(cls, flag, varlist, metadata, squash=False):
Brad Bishop6e60e8b2018-02-01 10:27:11 -050062 out_dict = dict((var, metadata.getVarFlag(var, flag))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050063 for var in varlist)
64 if squash:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060065 return dict((k,v) for (k,v) in out_dict.items() if v)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050066 else:
67 return out_dict
68
69 @classmethod
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050070 def getvar(cls, var, metadata, expand = True):
71 return metadata.getVar(var, expand) or ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -050072
73
74class CoreRecipeInfo(RecipeInfoCommon):
75 __slots__ = ()
76
Brad Bishopd7bf8c12018-02-25 22:55:05 -050077 cachefile = "bb_cache.dat"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050078
Brad Bishopd7bf8c12018-02-25 22:55:05 -050079 def __init__(self, filename, metadata):
Patrick Williamsc124f4f2015-09-15 14:41:29 -050080 self.file_depends = metadata.getVar('__depends', False)
81 self.timestamp = bb.parse.cached_mtime(filename)
82 self.variants = self.listvar('__VARIANTS', metadata) + ['']
83 self.appends = self.listvar('__BBAPPEND', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050084 self.nocache = self.getvar('BB_DONT_CACHE', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050085
Brad Bishop96ff1982019-08-19 13:50:42 -040086 self.provides = self.depvar('PROVIDES', metadata)
87 self.rprovides = self.depvar('RPROVIDES', metadata)
88 self.pn = self.getvar('PN', metadata) or bb.parse.vars_from_file(filename,metadata)[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -050089 self.packages = self.listvar('PACKAGES', metadata)
Brad Bishopd7bf8c12018-02-25 22:55:05 -050090 if not self.packages:
Patrick Williamsc124f4f2015-09-15 14:41:29 -050091 self.packages.append(self.pn)
Brad Bishop96ff1982019-08-19 13:50:42 -040092 self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
93
94 self.skipreason = self.getvar('__SKIPPED', metadata)
95 if self.skipreason:
96 self.skipped = True
97 return
98
99 self.tasks = metadata.getVar('__BBTASKS', False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500100
101 self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
102 self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
103
104 self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
105
106 self.skipped = False
107 self.pe = self.getvar('PE', metadata)
108 self.pv = self.getvar('PV', metadata)
109 self.pr = self.getvar('PR', metadata)
110 self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
111 self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
112 self.stamp = self.getvar('STAMP', metadata)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500113 self.stampclean = self.getvar('STAMPCLEAN', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500114 self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
115 self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500116 self.depends = self.depvar('DEPENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500117 self.rdepends = self.depvar('RDEPENDS', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500118 self.rrecommends = self.depvar('RRECOMMENDS', metadata)
119 self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
120 self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
121 self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500122 self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500123 self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
124 self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
125 self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500126 self.extradepsfunc = self.getvar('calculate_extra_depends', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500127
128 @classmethod
129 def init_cacheData(cls, cachedata):
130 # CacheData in Core RecipeInfo Class
131 cachedata.task_deps = {}
132 cachedata.pkg_fn = {}
133 cachedata.pkg_pn = defaultdict(list)
134 cachedata.pkg_pepvpr = {}
135 cachedata.pkg_dp = {}
136
137 cachedata.stamp = {}
138 cachedata.stampclean = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500139 cachedata.stamp_extrainfo = {}
140 cachedata.file_checksums = {}
141 cachedata.fn_provides = {}
142 cachedata.pn_provides = defaultdict(list)
143 cachedata.all_depends = []
144
145 cachedata.deps = defaultdict(list)
146 cachedata.packages = defaultdict(list)
147 cachedata.providers = defaultdict(list)
148 cachedata.rproviders = defaultdict(list)
149 cachedata.packages_dynamic = defaultdict(list)
150
151 cachedata.rundeps = defaultdict(lambda: defaultdict(list))
152 cachedata.runrecs = defaultdict(lambda: defaultdict(list))
153 cachedata.possible_world = []
154 cachedata.universe_target = []
155 cachedata.hashfn = {}
156
157 cachedata.basetaskhash = {}
158 cachedata.inherits = {}
159 cachedata.fakerootenv = {}
160 cachedata.fakerootnoenv = {}
161 cachedata.fakerootdirs = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500162 cachedata.extradepsfunc = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500163
164 def add_cacheData(self, cachedata, fn):
165 cachedata.task_deps[fn] = self.task_deps
166 cachedata.pkg_fn[fn] = self.pn
167 cachedata.pkg_pn[self.pn].append(fn)
168 cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
169 cachedata.pkg_dp[fn] = self.defaultpref
170 cachedata.stamp[fn] = self.stamp
171 cachedata.stampclean[fn] = self.stampclean
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500172 cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
173 cachedata.file_checksums[fn] = self.file_checksums
174
175 provides = [self.pn]
176 for provide in self.provides:
177 if provide not in provides:
178 provides.append(provide)
179 cachedata.fn_provides[fn] = provides
180
181 for provide in provides:
182 cachedata.providers[provide].append(fn)
183 if provide not in cachedata.pn_provides[self.pn]:
184 cachedata.pn_provides[self.pn].append(provide)
185
186 for dep in self.depends:
187 if dep not in cachedata.deps[fn]:
188 cachedata.deps[fn].append(dep)
189 if dep not in cachedata.all_depends:
190 cachedata.all_depends.append(dep)
191
192 rprovides = self.rprovides
193 for package in self.packages:
194 cachedata.packages[package].append(fn)
195 rprovides += self.rprovides_pkg[package]
196
197 for rprovide in rprovides:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500198 if fn not in cachedata.rproviders[rprovide]:
199 cachedata.rproviders[rprovide].append(fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500200
201 for package in self.packages_dynamic:
202 cachedata.packages_dynamic[package].append(fn)
203
204 # Build hash of runtime depends and recommends
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500205 for package in self.packages:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500206 cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
207 cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
208
209 # Collect files we may need for possible world-dep
210 # calculations
211 if self.not_world:
212 logger.debug(1, "EXCLUDE FROM WORLD: %s", fn)
213 else:
214 cachedata.possible_world.append(fn)
215
216 # create a collection of all targets for sanity checking
217 # tasks, such as upstream versions, license, and tools for
218 # task and image creation.
219 cachedata.universe_target.append(self.pn)
220
221 cachedata.hashfn[fn] = self.hashfilename
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600222 for task, taskhash in self.basetaskhashes.items():
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500223 identifier = '%s.%s' % (fn, task)
224 cachedata.basetaskhash[identifier] = taskhash
225
226 cachedata.inherits[fn] = self.inherits
227 cachedata.fakerootenv[fn] = self.fakerootenv
228 cachedata.fakerootnoenv[fn] = self.fakerootnoenv
229 cachedata.fakerootdirs[fn] = self.fakerootdirs
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500230 cachedata.extradepsfunc[fn] = self.extradepsfunc
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500231
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600232def virtualfn2realfn(virtualfn):
233 """
234 Convert a virtual file name to a real one + the associated subclass keyword
235 """
236 mc = ""
Brad Bishop15ae2502019-06-18 21:44:24 -0400237 if virtualfn.startswith('mc:'):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600238 elems = virtualfn.split(':')
239 mc = elems[1]
240 virtualfn = ":".join(elems[2:])
241
242 fn = virtualfn
243 cls = ""
244 if virtualfn.startswith('virtual:'):
245 elems = virtualfn.split(':')
246 cls = ":".join(elems[1:-1])
247 fn = elems[-1]
248
249 return (fn, cls, mc)
250
251def realfn2virtual(realfn, cls, mc):
252 """
253 Convert a real filename + the associated subclass keyword to a virtual filename
254 """
255 if cls:
256 realfn = "virtual:" + cls + ":" + realfn
257 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -0400258 realfn = "mc:" + mc + ":" + realfn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600259 return realfn
260
261def variant2virtual(realfn, variant):
262 """
263 Convert a real filename + the associated subclass keyword to a virtual filename
264 """
265 if variant == "":
266 return realfn
Brad Bishop15ae2502019-06-18 21:44:24 -0400267 if variant.startswith("mc:"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600268 elems = variant.split(":")
269 if elems[2]:
Brad Bishop15ae2502019-06-18 21:44:24 -0400270 return "mc:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
271 return "mc:" + elems[1] + ":" + realfn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600272 return "virtual:" + variant + ":" + realfn
273
274def parse_recipe(bb_data, bbfile, appends, mc=''):
275 """
276 Parse a recipe
277 """
278
279 chdir_back = False
280
281 bb_data.setVar("__BBMULTICONFIG", mc)
282
283 # expand tmpdir to include this topdir
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500284 bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR') or "")
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600285 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
286 oldpath = os.path.abspath(os.getcwd())
287 bb.parse.cached_mtime_noerror(bbfile_loc)
288
289 # The ConfHandler first looks if there is a TOPDIR and if not
290 # then it would call getcwd().
291 # Previously, we chdir()ed to bbfile_loc, called the handler
292 # and finally chdir()ed back, a couple of thousand times. We now
293 # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
294 if not bb_data.getVar('TOPDIR', False):
295 chdir_back = True
296 bb_data.setVar('TOPDIR', bbfile_loc)
297 try:
298 if appends:
299 bb_data.setVar('__BBAPPEND', " ".join(appends))
300 bb_data = bb.parse.handle(bbfile, bb_data)
301 if chdir_back:
302 os.chdir(oldpath)
303 return bb_data
304 except:
305 if chdir_back:
306 os.chdir(oldpath)
307 raise
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500308
309
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600310
311class NoCache(object):
312
313 def __init__(self, databuilder):
314 self.databuilder = databuilder
315 self.data = databuilder.data
316
317 def loadDataFull(self, virtualfn, appends):
318 """
319 Return a complete set of data for fn.
320 To do this, we need to parse the file.
321 """
322 logger.debug(1, "Parsing %s (full)" % virtualfn)
323 (fn, virtual, mc) = virtualfn2realfn(virtualfn)
324 bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
325 return bb_data[virtual]
326
327 def load_bbfile(self, bbfile, appends, virtonly = False):
328 """
329 Load and parse one .bb build file
330 Return the data and whether parsing resulted in the file being skipped
331 """
332
333 if virtonly:
334 (bbfile, virtual, mc) = virtualfn2realfn(bbfile)
335 bb_data = self.databuilder.mcdata[mc].createCopy()
336 bb_data.setVar("__ONLYFINALISE", virtual or "default")
337 datastores = parse_recipe(bb_data, bbfile, appends, mc)
338 return datastores
339
340 bb_data = self.data.createCopy()
341 datastores = parse_recipe(bb_data, bbfile, appends)
342
343 for mc in self.databuilder.mcdata:
344 if not mc:
345 continue
346 bb_data = self.databuilder.mcdata[mc].createCopy()
347 newstores = parse_recipe(bb_data, bbfile, appends, mc)
348 for ns in newstores:
Brad Bishop15ae2502019-06-18 21:44:24 -0400349 datastores["mc:%s:%s" % (mc, ns)] = newstores[ns]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600350
351 return datastores
352
353class Cache(NoCache):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500354 """
355 BitBake Cache implementation
356 """
357
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600358 def __init__(self, databuilder, data_hash, caches_array):
359 super().__init__(databuilder)
360 data = databuilder.data
361
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500362 # Pass caches_array information into Cache Constructor
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500363 # It will be used later for deciding whether we
364 # need extra cache file dump/load support
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500365 self.caches_array = caches_array
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500366 self.cachedir = data.getVar("CACHE")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500367 self.clean = set()
368 self.checked = set()
369 self.depends_cache = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500370 self.data_fn = None
371 self.cacheclean = True
372 self.data_hash = data_hash
373
374 if self.cachedir in [None, '']:
375 self.has_cache = False
376 logger.info("Not using a cache. "
377 "Set CACHE = <directory> to enable.")
378 return
379
380 self.has_cache = True
381 self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash)
382
Brad Bishop316dfdd2018-06-25 12:45:53 -0400383 logger.debug(1, "Cache dir: %s", self.cachedir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500384 bb.utils.mkdirhier(self.cachedir)
385
386 cache_ok = True
387 if self.caches_array:
388 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600389 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
390 cache_ok = cache_ok and os.path.exists(cachefile)
391 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500392 if cache_ok:
393 self.load_cachefile()
394 elif os.path.isfile(self.cachefile):
395 logger.info("Out of date cache found, rebuilding...")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400396 else:
397 logger.debug(1, "Cache file %s not found, building..." % self.cachefile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500398
Brad Bishop96ff1982019-08-19 13:50:42 -0400399 # We don't use the symlink, its just for debugging convinience
400 symlink = os.path.join(self.cachedir, "bb_cache.dat")
401 if os.path.exists(symlink):
402 bb.utils.remove(symlink)
403 try:
404 os.symlink(os.path.basename(self.cachefile), symlink)
405 except OSError:
406 pass
407
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500408 def load_cachefile(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500409 cachesize = 0
410 previous_progress = 0
411 previous_percent = 0
412
413 # Calculate the correct cachesize of all those cache files
414 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600415 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
416 with open(cachefile, "rb") as cachefile:
417 cachesize += os.fstat(cachefile.fileno()).st_size
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500418
419 bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500420
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500421 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600422 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
Brad Bishop316dfdd2018-06-25 12:45:53 -0400423 logger.debug(1, 'Loading cache file: %s' % cachefile)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600424 with open(cachefile, "rb") as cachefile:
425 pickled = pickle.Unpickler(cachefile)
426 # Check cache version information
427 try:
428 cache_ver = pickled.load()
429 bitbake_ver = pickled.load()
430 except Exception:
431 logger.info('Invalid cache, rebuilding...')
432 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500433
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600434 if cache_ver != __cache_version__:
435 logger.info('Cache version mismatch, rebuilding...')
436 return
437 elif bitbake_ver != bb.__version__:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500438 logger.info('Bitbake version mismatch, rebuilding...')
439 return
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600440
441 # Load the rest of the cache file
442 current_progress = 0
443 while cachefile:
444 try:
445 key = pickled.load()
446 value = pickled.load()
447 except Exception:
448 break
449 if not isinstance(key, str):
450 bb.warn("%s from extras cache is not a string?" % key)
451 break
452 if not isinstance(value, RecipeInfoCommon):
453 bb.warn("%s from extras cache is not a RecipeInfoCommon class?" % value)
454 break
455
456 if key in self.depends_cache:
457 self.depends_cache[key].append(value)
458 else:
459 self.depends_cache[key] = [value]
460 # only fire events on even percentage boundaries
461 current_progress = cachefile.tell() + previous_progress
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500462 if current_progress > cachesize:
463 # we might have calculated incorrect total size because a file
464 # might've been written out just after we checked its size
465 cachesize = current_progress
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600466 current_percent = 100 * current_progress / cachesize
467 if current_percent > previous_percent:
468 previous_percent = current_percent
469 bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
470 self.data)
471
472 previous_progress += current_progress
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500473
474 # Note: depends cache number is corresponding to the parsing file numbers.
475 # The same file has several caches, still regarded as one item in the cache
476 bb.event.fire(bb.event.CacheLoadCompleted(cachesize,
477 len(self.depends_cache)),
478 self.data)
479
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600480 def parse(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500481 """Parse the specified filename, returning the recipe information"""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600482 logger.debug(1, "Parsing %s", filename)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500483 infos = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600484 datastores = self.load_bbfile(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500485 depends = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600486 variants = []
487 # Process the "real" fn last so we can store variants list
488 for variant, data in sorted(datastores.items(),
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500489 key=lambda i: i[0],
490 reverse=True):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600491 virtualfn = variant2virtual(filename, variant)
492 variants.append(variant)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500493 depends = depends + (data.getVar("__depends", False) or [])
494 if depends and not variant:
495 data.setVar("__depends", depends)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600496 if virtualfn == filename:
497 data.setVar("__VARIANTS", " ".join(variants))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500498 info_array = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600499 for cache_class in self.caches_array:
500 info = cache_class(filename, data)
501 info_array.append(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500502 infos.append((virtualfn, info_array))
503
504 return infos
505
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600506 def load(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500507 """Obtain the recipe information for the specified filename,
508 using cached values if available, otherwise parsing.
509
510 Note that if it does parse to obtain the info, it will not
511 automatically add the information to the cache or to your
512 CacheData. Use the add or add_info method to do so after
513 running this, or use loadData instead."""
514 cached = self.cacheValid(filename, appends)
515 if cached:
516 infos = []
517 # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
518 info_array = self.depends_cache[filename]
519 for variant in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600520 virtualfn = variant2virtual(filename, variant)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500521 infos.append((virtualfn, self.depends_cache[virtualfn]))
522 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500523 return self.parse(filename, appends, configdata, self.caches_array)
524
525 return cached, infos
526
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600527 def loadData(self, fn, appends, cacheData):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500528 """Load the recipe info for the specified filename,
529 parsing and adding to the cache if necessary, and adding
530 the recipe information to the supplied CacheData instance."""
531 skipped, virtuals = 0, 0
532
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600533 cached, infos = self.load(fn, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500534 for virtualfn, info_array in infos:
535 if info_array[0].skipped:
536 logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
537 skipped += 1
538 else:
539 self.add_info(virtualfn, info_array, cacheData, not cached)
540 virtuals += 1
541
542 return cached, skipped, virtuals
543
544 def cacheValid(self, fn, appends):
545 """
546 Is the cache valid for fn?
547 Fast version, no timestamps checked.
548 """
549 if fn not in self.checked:
550 self.cacheValidUpdate(fn, appends)
551
552 # Is cache enabled?
553 if not self.has_cache:
554 return False
555 if fn in self.clean:
556 return True
557 return False
558
559 def cacheValidUpdate(self, fn, appends):
560 """
561 Is the cache valid for fn?
562 Make thorough (slower) checks including timestamps.
563 """
564 # Is cache enabled?
565 if not self.has_cache:
566 return False
567
568 self.checked.add(fn)
569
570 # File isn't in depends_cache
571 if not fn in self.depends_cache:
572 logger.debug(2, "Cache: %s is not cached", fn)
573 return False
574
575 mtime = bb.parse.cached_mtime_noerror(fn)
576
577 # Check file still exists
578 if mtime == 0:
579 logger.debug(2, "Cache: %s no longer exists", fn)
580 self.remove(fn)
581 return False
582
583 info_array = self.depends_cache[fn]
584 # Check the file's timestamp
585 if mtime != info_array[0].timestamp:
586 logger.debug(2, "Cache: %s changed", fn)
587 self.remove(fn)
588 return False
589
590 # Check dependencies are still valid
591 depends = info_array[0].file_depends
592 if depends:
593 for f, old_mtime in depends:
594 fmtime = bb.parse.cached_mtime_noerror(f)
595 # Check if file still exists
596 if old_mtime != 0 and fmtime == 0:
597 logger.debug(2, "Cache: %s's dependency %s was removed",
598 fn, f)
599 self.remove(fn)
600 return False
601
602 if (fmtime != old_mtime):
603 logger.debug(2, "Cache: %s's dependency %s changed",
604 fn, f)
605 self.remove(fn)
606 return False
607
608 if hasattr(info_array[0], 'file_checksums'):
609 for _, fl in info_array[0].file_checksums.items():
Patrick Williamsd7e96312015-09-22 08:09:05 -0500610 fl = fl.strip()
611 while fl:
612 # A .split() would be simpler but means spaces or colons in filenames would break
613 a = fl.find(":True")
614 b = fl.find(":False")
615 if ((a < 0) and b) or ((b > 0) and (b < a)):
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500616 f = fl[:b+6]
617 fl = fl[b+7:]
Patrick Williamsd7e96312015-09-22 08:09:05 -0500618 elif ((b < 0) and a) or ((a > 0) and (a < b)):
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500619 f = fl[:a+5]
620 fl = fl[a+6:]
Patrick Williamsd7e96312015-09-22 08:09:05 -0500621 else:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500622 break
Patrick Williamsd7e96312015-09-22 08:09:05 -0500623 fl = fl.strip()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500624 if "*" in f:
625 continue
626 f, exist = f.split(":")
627 if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
628 logger.debug(2, "Cache: %s's file checksum list file %s changed",
629 fn, f)
630 self.remove(fn)
631 return False
632
633 if appends != info_array[0].appends:
634 logger.debug(2, "Cache: appends for %s changed", fn)
635 logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
636 self.remove(fn)
637 return False
638
639 invalid = False
640 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600641 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500642 self.clean.add(virtualfn)
643 if virtualfn not in self.depends_cache:
644 logger.debug(2, "Cache: %s is not cached", virtualfn)
645 invalid = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600646 elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
647 logger.debug(2, "Cache: Extra caches missing for %s?" % virtualfn)
648 invalid = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500649
650 # If any one of the variants is not present, mark as invalid for all
651 if invalid:
652 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600653 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500654 if virtualfn in self.clean:
655 logger.debug(2, "Cache: Removing %s from cache", virtualfn)
656 self.clean.remove(virtualfn)
657 if fn in self.clean:
658 logger.debug(2, "Cache: Marking %s as not clean", fn)
659 self.clean.remove(fn)
660 return False
661
662 self.clean.add(fn)
663 return True
664
665 def remove(self, fn):
666 """
667 Remove a fn from the cache
668 Called from the parser in error cases
669 """
670 if fn in self.depends_cache:
671 logger.debug(1, "Removing %s from cache", fn)
672 del self.depends_cache[fn]
673 if fn in self.clean:
674 logger.debug(1, "Marking %s as unclean", fn)
675 self.clean.remove(fn)
676
677 def sync(self):
678 """
679 Save the cache
680 Called from the parser when complete (or exiting)
681 """
682
683 if not self.has_cache:
684 return
685
686 if self.cacheclean:
687 logger.debug(2, "Cache is clean, not saving.")
688 return
689
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500690 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600691 cache_class_name = cache_class.__name__
692 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
693 with open(cachefile, "wb") as f:
694 p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
695 p.dump(__cache_version__)
696 p.dump(bb.__version__)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500697
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600698 for key, info_array in self.depends_cache.items():
699 for info in info_array:
700 if isinstance(info, RecipeInfoCommon) and info.__class__.__name__ == cache_class_name:
701 p.dump(key)
702 p.dump(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500703
704 del self.depends_cache
705
706 @staticmethod
707 def mtime(cachefile):
708 return bb.parse.cached_mtime_noerror(cachefile)
709
710 def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
711 if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
712 cacheData.add_from_recipeinfo(filename, info_array)
713
714 if watcher:
715 watcher(info_array[0].file_depends)
716
717 if not self.has_cache:
718 return
719
720 if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
721 if parsed:
722 self.cacheclean = False
723 self.depends_cache[filename] = info_array
724
725 def add(self, file_name, data, cacheData, parsed=None):
726 """
727 Save data we need into the cache
728 """
729
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600730 realfn = virtualfn2realfn(file_name)[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500731
732 info_array = []
733 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600734 info_array.append(cache_class(realfn, data))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500735 self.add_info(file_name, info_array, cacheData, parsed)
736
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500737
738def init(cooker):
739 """
740 The Objective: Cache the minimum amount of data possible yet get to the
741 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
742
743 To do this, we intercept getVar calls and only cache the variables we see
744 being accessed. We rely on the cache getVar calls being made for all
745 variables bitbake might need to use to reach this stage. For each cached
746 file we need to track:
747
748 * Its mtime
749 * The mtimes of all its dependencies
750 * Whether it caused a parse.SkipRecipe exception
751
752 Files causing parsing errors are evicted from the cache.
753
754 """
755 return Cache(cooker.configuration.data, cooker.configuration.data_hash)
756
757
758class CacheData(object):
759 """
760 The data structures we compile from the cached data
761 """
762
763 def __init__(self, caches_array):
764 self.caches_array = caches_array
765 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600766 if not issubclass(cache_class, RecipeInfoCommon):
767 bb.error("Extra cache data class %s should subclass RecipeInfoCommon class" % cache_class)
768 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500769
770 # Direct cache variables
771 self.task_queues = {}
772 self.preferred = {}
773 self.tasks = {}
774 # Indirect Cache variables (set elsewhere)
775 self.ignored_dependencies = []
776 self.world_target = set()
777 self.bbfile_priority = {}
778
779 def add_from_recipeinfo(self, fn, info_array):
780 for info in info_array:
781 info.add_cacheData(self, fn)
782
783class MultiProcessCache(object):
784 """
785 BitBake multi-process cache implementation
786
787 Used by the codeparser & file checksum caches
788 """
789
790 def __init__(self):
791 self.cachefile = None
792 self.cachedata = self.create_cachedata()
793 self.cachedata_extras = self.create_cachedata()
794
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500795 def init_cache(self, d, cache_file_name=None):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500796 cachedir = (d.getVar("PERSISTENT_DIR") or
797 d.getVar("CACHE"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500798 if cachedir in [None, '']:
799 return
800 bb.utils.mkdirhier(cachedir)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500801 self.cachefile = os.path.join(cachedir,
802 cache_file_name or self.__class__.cache_file_name)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500803 logger.debug(1, "Using cache in '%s'", self.cachefile)
804
805 glf = bb.utils.lockfile(self.cachefile + ".lock")
806
807 try:
808 with open(self.cachefile, "rb") as f:
809 p = pickle.Unpickler(f)
810 data, version = p.load()
811 except:
812 bb.utils.unlockfile(glf)
813 return
814
815 bb.utils.unlockfile(glf)
816
817 if version != self.__class__.CACHE_VERSION:
818 return
819
820 self.cachedata = data
821
822 def create_cachedata(self):
823 data = [{}]
824 return data
825
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500826 def save_extras(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500827 if not self.cachefile:
828 return
829
830 glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
831
832 i = os.getpid()
833 lf = None
834 while not lf:
835 lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
836 if not lf or os.path.exists(self.cachefile + "-" + str(i)):
837 if lf:
838 bb.utils.unlockfile(lf)
839 lf = None
840 i = i + 1
841 continue
842
843 with open(self.cachefile + "-" + str(i), "wb") as f:
844 p = pickle.Pickler(f, -1)
845 p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
846
847 bb.utils.unlockfile(lf)
848 bb.utils.unlockfile(glf)
849
850 def merge_data(self, source, dest):
851 for j in range(0,len(dest)):
852 for h in source[j]:
853 if h not in dest[j]:
854 dest[j][h] = source[j][h]
855
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500856 def save_merge(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500857 if not self.cachefile:
858 return
859
860 glf = bb.utils.lockfile(self.cachefile + ".lock")
861
862 data = self.cachedata
863
864 for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
865 f = os.path.join(os.path.dirname(self.cachefile), f)
866 try:
867 with open(f, "rb") as fd:
868 p = pickle.Unpickler(fd)
869 extradata, version = p.load()
870 except (IOError, EOFError):
871 os.unlink(f)
872 continue
873
874 if version != self.__class__.CACHE_VERSION:
875 os.unlink(f)
876 continue
877
878 self.merge_data(extradata, data)
879 os.unlink(f)
880
881 with open(self.cachefile, "wb") as f:
882 p = pickle.Pickler(f, -1)
883 p.dump([data, self.__class__.CACHE_VERSION])
884
885 bb.utils.unlockfile(glf)