blob: e7eeb4f50547a1c862d865c42620b9f1ad3aaf1a [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake Cache implementation
5#
6# Caching of bitbake variables before task execution
7
8# Copyright (C) 2006 Richard Purdie
9# Copyright (C) 2012 Intel Corporation
10
11# but small sections based on code from bin/bitbake:
12# Copyright (C) 2003, 2004 Chris Larson
13# Copyright (C) 2003, 2004 Phil Blundell
14# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
15# Copyright (C) 2005 Holger Hans Peter Freyther
16# Copyright (C) 2005 ROAD GmbH
17#
18# This program is free software; you can redistribute it and/or modify
19# it under the terms of the GNU General Public License version 2 as
20# published by the Free Software Foundation.
21#
22# This program is distributed in the hope that it will be useful,
23# but WITHOUT ANY WARRANTY; without even the implied warranty of
24# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
25# GNU General Public License for more details.
26#
27# You should have received a copy of the GNU General Public License along
28# with this program; if not, write to the Free Software Foundation, Inc.,
29# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
30
Patrick Williamsc124f4f2015-09-15 14:41:29 -050031import os
Patrick Williamsc0f7c042017-02-23 20:41:17 -060032import sys
Patrick Williamsc124f4f2015-09-15 14:41:29 -050033import logging
Patrick Williamsc0f7c042017-02-23 20:41:17 -060034import pickle
Patrick Williamsc124f4f2015-09-15 14:41:29 -050035from collections import defaultdict
36import bb.utils
37
38logger = logging.getLogger("BitBake.Cache")
39
Brad Bishop6e60e8b2018-02-01 10:27:11 -050040__cache_version__ = "151"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050041
42def getCacheFile(path, filename, data_hash):
43 return os.path.join(path, filename + "." + data_hash)
44
45# RecipeInfoCommon defines common data retrieving methods
46# from meta data for caches. CoreRecipeInfo as well as other
47# Extra RecipeInfo needs to inherit this class
48class RecipeInfoCommon(object):
49
50 @classmethod
51 def listvar(cls, var, metadata):
52 return cls.getvar(var, metadata).split()
53
54 @classmethod
55 def intvar(cls, var, metadata):
56 return int(cls.getvar(var, metadata) or 0)
57
58 @classmethod
59 def depvar(cls, var, metadata):
60 return bb.utils.explode_deps(cls.getvar(var, metadata))
61
62 @classmethod
63 def pkgvar(cls, var, packages, metadata):
64 return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata))
65 for pkg in packages)
66
67 @classmethod
68 def taskvar(cls, var, tasks, metadata):
69 return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata))
70 for task in tasks)
71
72 @classmethod
73 def flaglist(cls, flag, varlist, metadata, squash=False):
Brad Bishop6e60e8b2018-02-01 10:27:11 -050074 out_dict = dict((var, metadata.getVarFlag(var, flag))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050075 for var in varlist)
76 if squash:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060077 return dict((k,v) for (k,v) in out_dict.items() if v)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050078 else:
79 return out_dict
80
81 @classmethod
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050082 def getvar(cls, var, metadata, expand = True):
83 return metadata.getVar(var, expand) or ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -050084
85
86class CoreRecipeInfo(RecipeInfoCommon):
87 __slots__ = ()
88
89 cachefile = "bb_cache.dat"
90
91 def __init__(self, filename, metadata):
92 self.file_depends = metadata.getVar('__depends', False)
93 self.timestamp = bb.parse.cached_mtime(filename)
94 self.variants = self.listvar('__VARIANTS', metadata) + ['']
95 self.appends = self.listvar('__BBAPPEND', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050096 self.nocache = self.getvar('BB_DONT_CACHE', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050097
98 self.skipreason = self.getvar('__SKIPPED', metadata)
99 if self.skipreason:
100 self.pn = self.getvar('PN', metadata) or bb.parse.BBHandler.vars_from_file(filename,metadata)[0]
101 self.skipped = True
102 self.provides = self.depvar('PROVIDES', metadata)
103 self.rprovides = self.depvar('RPROVIDES', metadata)
104 return
105
106 self.tasks = metadata.getVar('__BBTASKS', False)
107
108 self.pn = self.getvar('PN', metadata)
109 self.packages = self.listvar('PACKAGES', metadata)
110 if not self.pn in self.packages:
111 self.packages.append(self.pn)
112
113 self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
114 self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
115
116 self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
117
118 self.skipped = False
119 self.pe = self.getvar('PE', metadata)
120 self.pv = self.getvar('PV', metadata)
121 self.pr = self.getvar('PR', metadata)
122 self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
123 self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
124 self.stamp = self.getvar('STAMP', metadata)
125 self.stampclean = self.getvar('STAMPCLEAN', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500126 self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
127 self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
128 self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
129 self.depends = self.depvar('DEPENDS', metadata)
130 self.provides = self.depvar('PROVIDES', metadata)
131 self.rdepends = self.depvar('RDEPENDS', metadata)
132 self.rprovides = self.depvar('RPROVIDES', metadata)
133 self.rrecommends = self.depvar('RRECOMMENDS', metadata)
134 self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
135 self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
136 self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500137 self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500138 self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
139 self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
140 self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500141 self.extradepsfunc = self.getvar('calculate_extra_depends', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500142
143 @classmethod
144 def init_cacheData(cls, cachedata):
145 # CacheData in Core RecipeInfo Class
146 cachedata.task_deps = {}
147 cachedata.pkg_fn = {}
148 cachedata.pkg_pn = defaultdict(list)
149 cachedata.pkg_pepvpr = {}
150 cachedata.pkg_dp = {}
151
152 cachedata.stamp = {}
153 cachedata.stampclean = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500154 cachedata.stamp_extrainfo = {}
155 cachedata.file_checksums = {}
156 cachedata.fn_provides = {}
157 cachedata.pn_provides = defaultdict(list)
158 cachedata.all_depends = []
159
160 cachedata.deps = defaultdict(list)
161 cachedata.packages = defaultdict(list)
162 cachedata.providers = defaultdict(list)
163 cachedata.rproviders = defaultdict(list)
164 cachedata.packages_dynamic = defaultdict(list)
165
166 cachedata.rundeps = defaultdict(lambda: defaultdict(list))
167 cachedata.runrecs = defaultdict(lambda: defaultdict(list))
168 cachedata.possible_world = []
169 cachedata.universe_target = []
170 cachedata.hashfn = {}
171
172 cachedata.basetaskhash = {}
173 cachedata.inherits = {}
174 cachedata.fakerootenv = {}
175 cachedata.fakerootnoenv = {}
176 cachedata.fakerootdirs = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500177 cachedata.extradepsfunc = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500178
179 def add_cacheData(self, cachedata, fn):
180 cachedata.task_deps[fn] = self.task_deps
181 cachedata.pkg_fn[fn] = self.pn
182 cachedata.pkg_pn[self.pn].append(fn)
183 cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
184 cachedata.pkg_dp[fn] = self.defaultpref
185 cachedata.stamp[fn] = self.stamp
186 cachedata.stampclean[fn] = self.stampclean
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500187 cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
188 cachedata.file_checksums[fn] = self.file_checksums
189
190 provides = [self.pn]
191 for provide in self.provides:
192 if provide not in provides:
193 provides.append(provide)
194 cachedata.fn_provides[fn] = provides
195
196 for provide in provides:
197 cachedata.providers[provide].append(fn)
198 if provide not in cachedata.pn_provides[self.pn]:
199 cachedata.pn_provides[self.pn].append(provide)
200
201 for dep in self.depends:
202 if dep not in cachedata.deps[fn]:
203 cachedata.deps[fn].append(dep)
204 if dep not in cachedata.all_depends:
205 cachedata.all_depends.append(dep)
206
207 rprovides = self.rprovides
208 for package in self.packages:
209 cachedata.packages[package].append(fn)
210 rprovides += self.rprovides_pkg[package]
211
212 for rprovide in rprovides:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500213 if fn not in cachedata.rproviders[rprovide]:
214 cachedata.rproviders[rprovide].append(fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500215
216 for package in self.packages_dynamic:
217 cachedata.packages_dynamic[package].append(fn)
218
219 # Build hash of runtime depends and recommends
220 for package in self.packages + [self.pn]:
221 cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
222 cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
223
224 # Collect files we may need for possible world-dep
225 # calculations
226 if self.not_world:
227 logger.debug(1, "EXCLUDE FROM WORLD: %s", fn)
228 else:
229 cachedata.possible_world.append(fn)
230
231 # create a collection of all targets for sanity checking
232 # tasks, such as upstream versions, license, and tools for
233 # task and image creation.
234 cachedata.universe_target.append(self.pn)
235
236 cachedata.hashfn[fn] = self.hashfilename
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600237 for task, taskhash in self.basetaskhashes.items():
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500238 identifier = '%s.%s' % (fn, task)
239 cachedata.basetaskhash[identifier] = taskhash
240
241 cachedata.inherits[fn] = self.inherits
242 cachedata.fakerootenv[fn] = self.fakerootenv
243 cachedata.fakerootnoenv[fn] = self.fakerootnoenv
244 cachedata.fakerootdirs[fn] = self.fakerootdirs
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500245 cachedata.extradepsfunc[fn] = self.extradepsfunc
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500246
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600247def virtualfn2realfn(virtualfn):
248 """
249 Convert a virtual file name to a real one + the associated subclass keyword
250 """
251 mc = ""
252 if virtualfn.startswith('multiconfig:'):
253 elems = virtualfn.split(':')
254 mc = elems[1]
255 virtualfn = ":".join(elems[2:])
256
257 fn = virtualfn
258 cls = ""
259 if virtualfn.startswith('virtual:'):
260 elems = virtualfn.split(':')
261 cls = ":".join(elems[1:-1])
262 fn = elems[-1]
263
264 return (fn, cls, mc)
265
266def realfn2virtual(realfn, cls, mc):
267 """
268 Convert a real filename + the associated subclass keyword to a virtual filename
269 """
270 if cls:
271 realfn = "virtual:" + cls + ":" + realfn
272 if mc:
273 realfn = "multiconfig:" + mc + ":" + realfn
274 return realfn
275
276def variant2virtual(realfn, variant):
277 """
278 Convert a real filename + the associated subclass keyword to a virtual filename
279 """
280 if variant == "":
281 return realfn
282 if variant.startswith("multiconfig:"):
283 elems = variant.split(":")
284 if elems[2]:
285 return "multiconfig:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
286 return "multiconfig:" + elems[1] + ":" + realfn
287 return "virtual:" + variant + ":" + realfn
288
289def parse_recipe(bb_data, bbfile, appends, mc=''):
290 """
291 Parse a recipe
292 """
293
294 chdir_back = False
295
296 bb_data.setVar("__BBMULTICONFIG", mc)
297
298 # expand tmpdir to include this topdir
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500299 bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR') or "")
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600300 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
301 oldpath = os.path.abspath(os.getcwd())
302 bb.parse.cached_mtime_noerror(bbfile_loc)
303
304 # The ConfHandler first looks if there is a TOPDIR and if not
305 # then it would call getcwd().
306 # Previously, we chdir()ed to bbfile_loc, called the handler
307 # and finally chdir()ed back, a couple of thousand times. We now
308 # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
309 if not bb_data.getVar('TOPDIR', False):
310 chdir_back = True
311 bb_data.setVar('TOPDIR', bbfile_loc)
312 try:
313 if appends:
314 bb_data.setVar('__BBAPPEND', " ".join(appends))
315 bb_data = bb.parse.handle(bbfile, bb_data)
316 if chdir_back:
317 os.chdir(oldpath)
318 return bb_data
319 except:
320 if chdir_back:
321 os.chdir(oldpath)
322 raise
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500323
324
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600325
326class NoCache(object):
327
328 def __init__(self, databuilder):
329 self.databuilder = databuilder
330 self.data = databuilder.data
331
332 def loadDataFull(self, virtualfn, appends):
333 """
334 Return a complete set of data for fn.
335 To do this, we need to parse the file.
336 """
337 logger.debug(1, "Parsing %s (full)" % virtualfn)
338 (fn, virtual, mc) = virtualfn2realfn(virtualfn)
339 bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
340 return bb_data[virtual]
341
342 def load_bbfile(self, bbfile, appends, virtonly = False):
343 """
344 Load and parse one .bb build file
345 Return the data and whether parsing resulted in the file being skipped
346 """
347
348 if virtonly:
349 (bbfile, virtual, mc) = virtualfn2realfn(bbfile)
350 bb_data = self.databuilder.mcdata[mc].createCopy()
351 bb_data.setVar("__ONLYFINALISE", virtual or "default")
352 datastores = parse_recipe(bb_data, bbfile, appends, mc)
353 return datastores
354
355 bb_data = self.data.createCopy()
356 datastores = parse_recipe(bb_data, bbfile, appends)
357
358 for mc in self.databuilder.mcdata:
359 if not mc:
360 continue
361 bb_data = self.databuilder.mcdata[mc].createCopy()
362 newstores = parse_recipe(bb_data, bbfile, appends, mc)
363 for ns in newstores:
364 datastores["multiconfig:%s:%s" % (mc, ns)] = newstores[ns]
365
366 return datastores
367
368class Cache(NoCache):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500369 """
370 BitBake Cache implementation
371 """
372
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600373 def __init__(self, databuilder, data_hash, caches_array):
374 super().__init__(databuilder)
375 data = databuilder.data
376
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500377 # Pass caches_array information into Cache Constructor
378 # It will be used later for deciding whether we
379 # need extra cache file dump/load support
380 self.caches_array = caches_array
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500381 self.cachedir = data.getVar("CACHE")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500382 self.clean = set()
383 self.checked = set()
384 self.depends_cache = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500385 self.data_fn = None
386 self.cacheclean = True
387 self.data_hash = data_hash
388
389 if self.cachedir in [None, '']:
390 self.has_cache = False
391 logger.info("Not using a cache. "
392 "Set CACHE = <directory> to enable.")
393 return
394
395 self.has_cache = True
396 self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash)
397
398 logger.debug(1, "Using cache in '%s'", self.cachedir)
399 bb.utils.mkdirhier(self.cachedir)
400
401 cache_ok = True
402 if self.caches_array:
403 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600404 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
405 cache_ok = cache_ok and os.path.exists(cachefile)
406 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500407 if cache_ok:
408 self.load_cachefile()
409 elif os.path.isfile(self.cachefile):
410 logger.info("Out of date cache found, rebuilding...")
411
412 def load_cachefile(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500413 cachesize = 0
414 previous_progress = 0
415 previous_percent = 0
416
417 # Calculate the correct cachesize of all those cache files
418 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600419 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
420 with open(cachefile, "rb") as cachefile:
421 cachesize += os.fstat(cachefile.fileno()).st_size
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500422
423 bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
424
425 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600426 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
427 with open(cachefile, "rb") as cachefile:
428 pickled = pickle.Unpickler(cachefile)
429 # Check cache version information
430 try:
431 cache_ver = pickled.load()
432 bitbake_ver = pickled.load()
433 except Exception:
434 logger.info('Invalid cache, rebuilding...')
435 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500436
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600437 if cache_ver != __cache_version__:
438 logger.info('Cache version mismatch, rebuilding...')
439 return
440 elif bitbake_ver != bb.__version__:
441 logger.info('Bitbake version mismatch, rebuilding...')
442 return
443
444 # Load the rest of the cache file
445 current_progress = 0
446 while cachefile:
447 try:
448 key = pickled.load()
449 value = pickled.load()
450 except Exception:
451 break
452 if not isinstance(key, str):
453 bb.warn("%s from extras cache is not a string?" % key)
454 break
455 if not isinstance(value, RecipeInfoCommon):
456 bb.warn("%s from extras cache is not a RecipeInfoCommon class?" % value)
457 break
458
459 if key in self.depends_cache:
460 self.depends_cache[key].append(value)
461 else:
462 self.depends_cache[key] = [value]
463 # only fire events on even percentage boundaries
464 current_progress = cachefile.tell() + previous_progress
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500465 if current_progress > cachesize:
466 # we might have calculated incorrect total size because a file
467 # might've been written out just after we checked its size
468 cachesize = current_progress
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600469 current_percent = 100 * current_progress / cachesize
470 if current_percent > previous_percent:
471 previous_percent = current_percent
472 bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
473 self.data)
474
475 previous_progress += current_progress
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500476
477 # Note: depends cache number is corresponding to the parsing file numbers.
478 # The same file has several caches, still regarded as one item in the cache
479 bb.event.fire(bb.event.CacheLoadCompleted(cachesize,
480 len(self.depends_cache)),
481 self.data)
482
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600483 def parse(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500484 """Parse the specified filename, returning the recipe information"""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600485 logger.debug(1, "Parsing %s", filename)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500486 infos = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600487 datastores = self.load_bbfile(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500488 depends = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600489 variants = []
490 # Process the "real" fn last so we can store variants list
491 for variant, data in sorted(datastores.items(),
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500492 key=lambda i: i[0],
493 reverse=True):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600494 virtualfn = variant2virtual(filename, variant)
495 variants.append(variant)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500496 depends = depends + (data.getVar("__depends", False) or [])
497 if depends and not variant:
498 data.setVar("__depends", depends)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600499 if virtualfn == filename:
500 data.setVar("__VARIANTS", " ".join(variants))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500501 info_array = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600502 for cache_class in self.caches_array:
503 info = cache_class(filename, data)
504 info_array.append(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500505 infos.append((virtualfn, info_array))
506
507 return infos
508
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600509 def load(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500510 """Obtain the recipe information for the specified filename,
511 using cached values if available, otherwise parsing.
512
513 Note that if it does parse to obtain the info, it will not
514 automatically add the information to the cache or to your
515 CacheData. Use the add or add_info method to do so after
516 running this, or use loadData instead."""
517 cached = self.cacheValid(filename, appends)
518 if cached:
519 infos = []
520 # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
521 info_array = self.depends_cache[filename]
522 for variant in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600523 virtualfn = variant2virtual(filename, variant)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500524 infos.append((virtualfn, self.depends_cache[virtualfn]))
525 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500526 return self.parse(filename, appends, configdata, self.caches_array)
527
528 return cached, infos
529
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600530 def loadData(self, fn, appends, cacheData):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500531 """Load the recipe info for the specified filename,
532 parsing and adding to the cache if necessary, and adding
533 the recipe information to the supplied CacheData instance."""
534 skipped, virtuals = 0, 0
535
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600536 cached, infos = self.load(fn, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500537 for virtualfn, info_array in infos:
538 if info_array[0].skipped:
539 logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
540 skipped += 1
541 else:
542 self.add_info(virtualfn, info_array, cacheData, not cached)
543 virtuals += 1
544
545 return cached, skipped, virtuals
546
547 def cacheValid(self, fn, appends):
548 """
549 Is the cache valid for fn?
550 Fast version, no timestamps checked.
551 """
552 if fn not in self.checked:
553 self.cacheValidUpdate(fn, appends)
554
555 # Is cache enabled?
556 if not self.has_cache:
557 return False
558 if fn in self.clean:
559 return True
560 return False
561
562 def cacheValidUpdate(self, fn, appends):
563 """
564 Is the cache valid for fn?
565 Make thorough (slower) checks including timestamps.
566 """
567 # Is cache enabled?
568 if not self.has_cache:
569 return False
570
571 self.checked.add(fn)
572
573 # File isn't in depends_cache
574 if not fn in self.depends_cache:
575 logger.debug(2, "Cache: %s is not cached", fn)
576 return False
577
578 mtime = bb.parse.cached_mtime_noerror(fn)
579
580 # Check file still exists
581 if mtime == 0:
582 logger.debug(2, "Cache: %s no longer exists", fn)
583 self.remove(fn)
584 return False
585
586 info_array = self.depends_cache[fn]
587 # Check the file's timestamp
588 if mtime != info_array[0].timestamp:
589 logger.debug(2, "Cache: %s changed", fn)
590 self.remove(fn)
591 return False
592
593 # Check dependencies are still valid
594 depends = info_array[0].file_depends
595 if depends:
596 for f, old_mtime in depends:
597 fmtime = bb.parse.cached_mtime_noerror(f)
598 # Check if file still exists
599 if old_mtime != 0 and fmtime == 0:
600 logger.debug(2, "Cache: %s's dependency %s was removed",
601 fn, f)
602 self.remove(fn)
603 return False
604
605 if (fmtime != old_mtime):
606 logger.debug(2, "Cache: %s's dependency %s changed",
607 fn, f)
608 self.remove(fn)
609 return False
610
611 if hasattr(info_array[0], 'file_checksums'):
612 for _, fl in info_array[0].file_checksums.items():
Patrick Williamsd7e96312015-09-22 08:09:05 -0500613 fl = fl.strip()
614 while fl:
615 # A .split() would be simpler but means spaces or colons in filenames would break
616 a = fl.find(":True")
617 b = fl.find(":False")
618 if ((a < 0) and b) or ((b > 0) and (b < a)):
619 f = fl[:b+6]
620 fl = fl[b+7:]
621 elif ((b < 0) and a) or ((a > 0) and (a < b)):
622 f = fl[:a+5]
623 fl = fl[a+6:]
624 else:
625 break
626 fl = fl.strip()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500627 if "*" in f:
628 continue
629 f, exist = f.split(":")
630 if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
631 logger.debug(2, "Cache: %s's file checksum list file %s changed",
632 fn, f)
633 self.remove(fn)
634 return False
635
636 if appends != info_array[0].appends:
637 logger.debug(2, "Cache: appends for %s changed", fn)
638 logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
639 self.remove(fn)
640 return False
641
642 invalid = False
643 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600644 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500645 self.clean.add(virtualfn)
646 if virtualfn not in self.depends_cache:
647 logger.debug(2, "Cache: %s is not cached", virtualfn)
648 invalid = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600649 elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
650 logger.debug(2, "Cache: Extra caches missing for %s?" % virtualfn)
651 invalid = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500652
653 # If any one of the variants is not present, mark as invalid for all
654 if invalid:
655 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600656 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500657 if virtualfn in self.clean:
658 logger.debug(2, "Cache: Removing %s from cache", virtualfn)
659 self.clean.remove(virtualfn)
660 if fn in self.clean:
661 logger.debug(2, "Cache: Marking %s as not clean", fn)
662 self.clean.remove(fn)
663 return False
664
665 self.clean.add(fn)
666 return True
667
668 def remove(self, fn):
669 """
670 Remove a fn from the cache
671 Called from the parser in error cases
672 """
673 if fn in self.depends_cache:
674 logger.debug(1, "Removing %s from cache", fn)
675 del self.depends_cache[fn]
676 if fn in self.clean:
677 logger.debug(1, "Marking %s as unclean", fn)
678 self.clean.remove(fn)
679
680 def sync(self):
681 """
682 Save the cache
683 Called from the parser when complete (or exiting)
684 """
685
686 if not self.has_cache:
687 return
688
689 if self.cacheclean:
690 logger.debug(2, "Cache is clean, not saving.")
691 return
692
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500693 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600694 cache_class_name = cache_class.__name__
695 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
696 with open(cachefile, "wb") as f:
697 p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
698 p.dump(__cache_version__)
699 p.dump(bb.__version__)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500700
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600701 for key, info_array in self.depends_cache.items():
702 for info in info_array:
703 if isinstance(info, RecipeInfoCommon) and info.__class__.__name__ == cache_class_name:
704 p.dump(key)
705 p.dump(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500706
707 del self.depends_cache
708
709 @staticmethod
710 def mtime(cachefile):
711 return bb.parse.cached_mtime_noerror(cachefile)
712
713 def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
714 if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
715 cacheData.add_from_recipeinfo(filename, info_array)
716
717 if watcher:
718 watcher(info_array[0].file_depends)
719
720 if not self.has_cache:
721 return
722
723 if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
724 if parsed:
725 self.cacheclean = False
726 self.depends_cache[filename] = info_array
727
728 def add(self, file_name, data, cacheData, parsed=None):
729 """
730 Save data we need into the cache
731 """
732
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600733 realfn = virtualfn2realfn(file_name)[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500734
735 info_array = []
736 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600737 info_array.append(cache_class(realfn, data))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500738 self.add_info(file_name, info_array, cacheData, parsed)
739
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500740
741def init(cooker):
742 """
743 The Objective: Cache the minimum amount of data possible yet get to the
744 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
745
746 To do this, we intercept getVar calls and only cache the variables we see
747 being accessed. We rely on the cache getVar calls being made for all
748 variables bitbake might need to use to reach this stage. For each cached
749 file we need to track:
750
751 * Its mtime
752 * The mtimes of all its dependencies
753 * Whether it caused a parse.SkipRecipe exception
754
755 Files causing parsing errors are evicted from the cache.
756
757 """
758 return Cache(cooker.configuration.data, cooker.configuration.data_hash)
759
760
761class CacheData(object):
762 """
763 The data structures we compile from the cached data
764 """
765
766 def __init__(self, caches_array):
767 self.caches_array = caches_array
768 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600769 if not issubclass(cache_class, RecipeInfoCommon):
770 bb.error("Extra cache data class %s should subclass RecipeInfoCommon class" % cache_class)
771 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500772
773 # Direct cache variables
774 self.task_queues = {}
775 self.preferred = {}
776 self.tasks = {}
777 # Indirect Cache variables (set elsewhere)
778 self.ignored_dependencies = []
779 self.world_target = set()
780 self.bbfile_priority = {}
781
782 def add_from_recipeinfo(self, fn, info_array):
783 for info in info_array:
784 info.add_cacheData(self, fn)
785
786class MultiProcessCache(object):
787 """
788 BitBake multi-process cache implementation
789
790 Used by the codeparser & file checksum caches
791 """
792
793 def __init__(self):
794 self.cachefile = None
795 self.cachedata = self.create_cachedata()
796 self.cachedata_extras = self.create_cachedata()
797
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500798 def init_cache(self, d, cache_file_name=None):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500799 cachedir = (d.getVar("PERSISTENT_DIR") or
800 d.getVar("CACHE"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500801 if cachedir in [None, '']:
802 return
803 bb.utils.mkdirhier(cachedir)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500804 self.cachefile = os.path.join(cachedir,
805 cache_file_name or self.__class__.cache_file_name)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500806 logger.debug(1, "Using cache in '%s'", self.cachefile)
807
808 glf = bb.utils.lockfile(self.cachefile + ".lock")
809
810 try:
811 with open(self.cachefile, "rb") as f:
812 p = pickle.Unpickler(f)
813 data, version = p.load()
814 except:
815 bb.utils.unlockfile(glf)
816 return
817
818 bb.utils.unlockfile(glf)
819
820 if version != self.__class__.CACHE_VERSION:
821 return
822
823 self.cachedata = data
824
825 def create_cachedata(self):
826 data = [{}]
827 return data
828
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500829 def save_extras(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500830 if not self.cachefile:
831 return
832
833 glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
834
835 i = os.getpid()
836 lf = None
837 while not lf:
838 lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
839 if not lf or os.path.exists(self.cachefile + "-" + str(i)):
840 if lf:
841 bb.utils.unlockfile(lf)
842 lf = None
843 i = i + 1
844 continue
845
846 with open(self.cachefile + "-" + str(i), "wb") as f:
847 p = pickle.Pickler(f, -1)
848 p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
849
850 bb.utils.unlockfile(lf)
851 bb.utils.unlockfile(glf)
852
853 def merge_data(self, source, dest):
854 for j in range(0,len(dest)):
855 for h in source[j]:
856 if h not in dest[j]:
857 dest[j][h] = source[j][h]
858
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500859 def save_merge(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500860 if not self.cachefile:
861 return
862
863 glf = bb.utils.lockfile(self.cachefile + ".lock")
864
865 data = self.cachedata
866
867 for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
868 f = os.path.join(os.path.dirname(self.cachefile), f)
869 try:
870 with open(f, "rb") as fd:
871 p = pickle.Unpickler(fd)
872 extradata, version = p.load()
873 except (IOError, EOFError):
874 os.unlink(f)
875 continue
876
877 if version != self.__class__.CACHE_VERSION:
878 os.unlink(f)
879 continue
880
881 self.merge_data(extradata, data)
882 os.unlink(f)
883
884 with open(self.cachefile, "wb") as f:
885 p = pickle.Pickler(f, -1)
886 p.dump([data, self.__class__.CACHE_VERSION])
887
888 bb.utils.unlockfile(glf)
889