blob: dd9cfdfacf7a443f7d2db954cab29c1ed44fe129 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake Cache implementation
5#
6# Caching of bitbake variables before task execution
7
8# Copyright (C) 2006 Richard Purdie
9# Copyright (C) 2012 Intel Corporation
10
11# but small sections based on code from bin/bitbake:
12# Copyright (C) 2003, 2004 Chris Larson
13# Copyright (C) 2003, 2004 Phil Blundell
14# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
15# Copyright (C) 2005 Holger Hans Peter Freyther
16# Copyright (C) 2005 ROAD GmbH
17#
18# This program is free software; you can redistribute it and/or modify
19# it under the terms of the GNU General Public License version 2 as
20# published by the Free Software Foundation.
21#
22# This program is distributed in the hope that it will be useful,
23# but WITHOUT ANY WARRANTY; without even the implied warranty of
24# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
25# GNU General Public License for more details.
26#
27# You should have received a copy of the GNU General Public License along
28# with this program; if not, write to the Free Software Foundation, Inc.,
29# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
30
Patrick Williamsc124f4f2015-09-15 14:41:29 -050031import os
Patrick Williamsc0f7c042017-02-23 20:41:17 -060032import sys
Patrick Williamsc124f4f2015-09-15 14:41:29 -050033import logging
Patrick Williamsc0f7c042017-02-23 20:41:17 -060034import pickle
Patrick Williamsc124f4f2015-09-15 14:41:29 -050035from collections import defaultdict
36import bb.utils
37
38logger = logging.getLogger("BitBake.Cache")
39
Patrick Williamsc0f7c042017-02-23 20:41:17 -060040__cache_version__ = "150"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050041
42def getCacheFile(path, filename, data_hash):
43 return os.path.join(path, filename + "." + data_hash)
44
45# RecipeInfoCommon defines common data retrieving methods
46# from meta data for caches. CoreRecipeInfo as well as other
47# Extra RecipeInfo needs to inherit this class
48class RecipeInfoCommon(object):
49
50 @classmethod
51 def listvar(cls, var, metadata):
52 return cls.getvar(var, metadata).split()
53
54 @classmethod
55 def intvar(cls, var, metadata):
56 return int(cls.getvar(var, metadata) or 0)
57
58 @classmethod
59 def depvar(cls, var, metadata):
60 return bb.utils.explode_deps(cls.getvar(var, metadata))
61
62 @classmethod
63 def pkgvar(cls, var, packages, metadata):
64 return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata))
65 for pkg in packages)
66
67 @classmethod
68 def taskvar(cls, var, tasks, metadata):
69 return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata))
70 for task in tasks)
71
72 @classmethod
73 def flaglist(cls, flag, varlist, metadata, squash=False):
74 out_dict = dict((var, metadata.getVarFlag(var, flag, True))
75 for var in varlist)
76 if squash:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060077 return dict((k,v) for (k,v) in out_dict.items() if v)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050078 else:
79 return out_dict
80
81 @classmethod
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050082 def getvar(cls, var, metadata, expand = True):
83 return metadata.getVar(var, expand) or ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -050084
85
86class CoreRecipeInfo(RecipeInfoCommon):
87 __slots__ = ()
88
89 cachefile = "bb_cache.dat"
90
91 def __init__(self, filename, metadata):
92 self.file_depends = metadata.getVar('__depends', False)
93 self.timestamp = bb.parse.cached_mtime(filename)
94 self.variants = self.listvar('__VARIANTS', metadata) + ['']
95 self.appends = self.listvar('__BBAPPEND', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050096 self.nocache = self.getvar('BB_DONT_CACHE', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050097
98 self.skipreason = self.getvar('__SKIPPED', metadata)
99 if self.skipreason:
100 self.pn = self.getvar('PN', metadata) or bb.parse.BBHandler.vars_from_file(filename,metadata)[0]
101 self.skipped = True
102 self.provides = self.depvar('PROVIDES', metadata)
103 self.rprovides = self.depvar('RPROVIDES', metadata)
104 return
105
106 self.tasks = metadata.getVar('__BBTASKS', False)
107
108 self.pn = self.getvar('PN', metadata)
109 self.packages = self.listvar('PACKAGES', metadata)
110 if not self.pn in self.packages:
111 self.packages.append(self.pn)
112
113 self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
114 self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
115
116 self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
117
118 self.skipped = False
119 self.pe = self.getvar('PE', metadata)
120 self.pv = self.getvar('PV', metadata)
121 self.pr = self.getvar('PR', metadata)
122 self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
123 self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
124 self.stamp = self.getvar('STAMP', metadata)
125 self.stampclean = self.getvar('STAMPCLEAN', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500126 self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
127 self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
128 self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
129 self.depends = self.depvar('DEPENDS', metadata)
130 self.provides = self.depvar('PROVIDES', metadata)
131 self.rdepends = self.depvar('RDEPENDS', metadata)
132 self.rprovides = self.depvar('RPROVIDES', metadata)
133 self.rrecommends = self.depvar('RRECOMMENDS', metadata)
134 self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
135 self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
136 self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500137 self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500138 self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
139 self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
140 self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500141 self.extradepsfunc = self.getvar('calculate_extra_depends', metadata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500142
143 @classmethod
144 def init_cacheData(cls, cachedata):
145 # CacheData in Core RecipeInfo Class
146 cachedata.task_deps = {}
147 cachedata.pkg_fn = {}
148 cachedata.pkg_pn = defaultdict(list)
149 cachedata.pkg_pepvpr = {}
150 cachedata.pkg_dp = {}
151
152 cachedata.stamp = {}
153 cachedata.stampclean = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500154 cachedata.stamp_extrainfo = {}
155 cachedata.file_checksums = {}
156 cachedata.fn_provides = {}
157 cachedata.pn_provides = defaultdict(list)
158 cachedata.all_depends = []
159
160 cachedata.deps = defaultdict(list)
161 cachedata.packages = defaultdict(list)
162 cachedata.providers = defaultdict(list)
163 cachedata.rproviders = defaultdict(list)
164 cachedata.packages_dynamic = defaultdict(list)
165
166 cachedata.rundeps = defaultdict(lambda: defaultdict(list))
167 cachedata.runrecs = defaultdict(lambda: defaultdict(list))
168 cachedata.possible_world = []
169 cachedata.universe_target = []
170 cachedata.hashfn = {}
171
172 cachedata.basetaskhash = {}
173 cachedata.inherits = {}
174 cachedata.fakerootenv = {}
175 cachedata.fakerootnoenv = {}
176 cachedata.fakerootdirs = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500177 cachedata.extradepsfunc = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500178
179 def add_cacheData(self, cachedata, fn):
180 cachedata.task_deps[fn] = self.task_deps
181 cachedata.pkg_fn[fn] = self.pn
182 cachedata.pkg_pn[self.pn].append(fn)
183 cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
184 cachedata.pkg_dp[fn] = self.defaultpref
185 cachedata.stamp[fn] = self.stamp
186 cachedata.stampclean[fn] = self.stampclean
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500187 cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
188 cachedata.file_checksums[fn] = self.file_checksums
189
190 provides = [self.pn]
191 for provide in self.provides:
192 if provide not in provides:
193 provides.append(provide)
194 cachedata.fn_provides[fn] = provides
195
196 for provide in provides:
197 cachedata.providers[provide].append(fn)
198 if provide not in cachedata.pn_provides[self.pn]:
199 cachedata.pn_provides[self.pn].append(provide)
200
201 for dep in self.depends:
202 if dep not in cachedata.deps[fn]:
203 cachedata.deps[fn].append(dep)
204 if dep not in cachedata.all_depends:
205 cachedata.all_depends.append(dep)
206
207 rprovides = self.rprovides
208 for package in self.packages:
209 cachedata.packages[package].append(fn)
210 rprovides += self.rprovides_pkg[package]
211
212 for rprovide in rprovides:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500213 if fn not in cachedata.rproviders[rprovide]:
214 cachedata.rproviders[rprovide].append(fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500215
216 for package in self.packages_dynamic:
217 cachedata.packages_dynamic[package].append(fn)
218
219 # Build hash of runtime depends and recommends
220 for package in self.packages + [self.pn]:
221 cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
222 cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
223
224 # Collect files we may need for possible world-dep
225 # calculations
226 if self.not_world:
227 logger.debug(1, "EXCLUDE FROM WORLD: %s", fn)
228 else:
229 cachedata.possible_world.append(fn)
230
231 # create a collection of all targets for sanity checking
232 # tasks, such as upstream versions, license, and tools for
233 # task and image creation.
234 cachedata.universe_target.append(self.pn)
235
236 cachedata.hashfn[fn] = self.hashfilename
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600237 for task, taskhash in self.basetaskhashes.items():
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500238 identifier = '%s.%s' % (fn, task)
239 cachedata.basetaskhash[identifier] = taskhash
240
241 cachedata.inherits[fn] = self.inherits
242 cachedata.fakerootenv[fn] = self.fakerootenv
243 cachedata.fakerootnoenv[fn] = self.fakerootnoenv
244 cachedata.fakerootdirs[fn] = self.fakerootdirs
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500245 cachedata.extradepsfunc[fn] = self.extradepsfunc
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500246
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600247def virtualfn2realfn(virtualfn):
248 """
249 Convert a virtual file name to a real one + the associated subclass keyword
250 """
251 mc = ""
252 if virtualfn.startswith('multiconfig:'):
253 elems = virtualfn.split(':')
254 mc = elems[1]
255 virtualfn = ":".join(elems[2:])
256
257 fn = virtualfn
258 cls = ""
259 if virtualfn.startswith('virtual:'):
260 elems = virtualfn.split(':')
261 cls = ":".join(elems[1:-1])
262 fn = elems[-1]
263
264 return (fn, cls, mc)
265
266def realfn2virtual(realfn, cls, mc):
267 """
268 Convert a real filename + the associated subclass keyword to a virtual filename
269 """
270 if cls:
271 realfn = "virtual:" + cls + ":" + realfn
272 if mc:
273 realfn = "multiconfig:" + mc + ":" + realfn
274 return realfn
275
276def variant2virtual(realfn, variant):
277 """
278 Convert a real filename + the associated subclass keyword to a virtual filename
279 """
280 if variant == "":
281 return realfn
282 if variant.startswith("multiconfig:"):
283 elems = variant.split(":")
284 if elems[2]:
285 return "multiconfig:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
286 return "multiconfig:" + elems[1] + ":" + realfn
287 return "virtual:" + variant + ":" + realfn
288
289def parse_recipe(bb_data, bbfile, appends, mc=''):
290 """
291 Parse a recipe
292 """
293
294 chdir_back = False
295
296 bb_data.setVar("__BBMULTICONFIG", mc)
297
298 # expand tmpdir to include this topdir
299 bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR', True) or "")
300 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
301 oldpath = os.path.abspath(os.getcwd())
302 bb.parse.cached_mtime_noerror(bbfile_loc)
303
304 # The ConfHandler first looks if there is a TOPDIR and if not
305 # then it would call getcwd().
306 # Previously, we chdir()ed to bbfile_loc, called the handler
307 # and finally chdir()ed back, a couple of thousand times. We now
308 # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
309 if not bb_data.getVar('TOPDIR', False):
310 chdir_back = True
311 bb_data.setVar('TOPDIR', bbfile_loc)
312 try:
313 if appends:
314 bb_data.setVar('__BBAPPEND', " ".join(appends))
315 bb_data = bb.parse.handle(bbfile, bb_data)
316 if chdir_back:
317 os.chdir(oldpath)
318 return bb_data
319 except:
320 if chdir_back:
321 os.chdir(oldpath)
322 raise
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500323
324
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600325
326class NoCache(object):
327
328 def __init__(self, databuilder):
329 self.databuilder = databuilder
330 self.data = databuilder.data
331
332 def loadDataFull(self, virtualfn, appends):
333 """
334 Return a complete set of data for fn.
335 To do this, we need to parse the file.
336 """
337 logger.debug(1, "Parsing %s (full)" % virtualfn)
338 (fn, virtual, mc) = virtualfn2realfn(virtualfn)
339 bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
340 return bb_data[virtual]
341
342 def load_bbfile(self, bbfile, appends, virtonly = False):
343 """
344 Load and parse one .bb build file
345 Return the data and whether parsing resulted in the file being skipped
346 """
347
348 if virtonly:
349 (bbfile, virtual, mc) = virtualfn2realfn(bbfile)
350 bb_data = self.databuilder.mcdata[mc].createCopy()
351 bb_data.setVar("__ONLYFINALISE", virtual or "default")
352 datastores = parse_recipe(bb_data, bbfile, appends, mc)
353 return datastores
354
355 bb_data = self.data.createCopy()
356 datastores = parse_recipe(bb_data, bbfile, appends)
357
358 for mc in self.databuilder.mcdata:
359 if not mc:
360 continue
361 bb_data = self.databuilder.mcdata[mc].createCopy()
362 newstores = parse_recipe(bb_data, bbfile, appends, mc)
363 for ns in newstores:
364 datastores["multiconfig:%s:%s" % (mc, ns)] = newstores[ns]
365
366 return datastores
367
368class Cache(NoCache):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500369 """
370 BitBake Cache implementation
371 """
372
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600373 def __init__(self, databuilder, data_hash, caches_array):
374 super().__init__(databuilder)
375 data = databuilder.data
376
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500377 # Pass caches_array information into Cache Constructor
378 # It will be used later for deciding whether we
379 # need extra cache file dump/load support
380 self.caches_array = caches_array
381 self.cachedir = data.getVar("CACHE", True)
382 self.clean = set()
383 self.checked = set()
384 self.depends_cache = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500385 self.data_fn = None
386 self.cacheclean = True
387 self.data_hash = data_hash
388
389 if self.cachedir in [None, '']:
390 self.has_cache = False
391 logger.info("Not using a cache. "
392 "Set CACHE = <directory> to enable.")
393 return
394
395 self.has_cache = True
396 self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash)
397
398 logger.debug(1, "Using cache in '%s'", self.cachedir)
399 bb.utils.mkdirhier(self.cachedir)
400
401 cache_ok = True
402 if self.caches_array:
403 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600404 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
405 cache_ok = cache_ok and os.path.exists(cachefile)
406 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500407 if cache_ok:
408 self.load_cachefile()
409 elif os.path.isfile(self.cachefile):
410 logger.info("Out of date cache found, rebuilding...")
411
412 def load_cachefile(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500413 cachesize = 0
414 previous_progress = 0
415 previous_percent = 0
416
417 # Calculate the correct cachesize of all those cache files
418 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600419 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
420 with open(cachefile, "rb") as cachefile:
421 cachesize += os.fstat(cachefile.fileno()).st_size
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500422
423 bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
424
425 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600426 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
427 with open(cachefile, "rb") as cachefile:
428 pickled = pickle.Unpickler(cachefile)
429 # Check cache version information
430 try:
431 cache_ver = pickled.load()
432 bitbake_ver = pickled.load()
433 except Exception:
434 logger.info('Invalid cache, rebuilding...')
435 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500436
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600437 if cache_ver != __cache_version__:
438 logger.info('Cache version mismatch, rebuilding...')
439 return
440 elif bitbake_ver != bb.__version__:
441 logger.info('Bitbake version mismatch, rebuilding...')
442 return
443
444 # Load the rest of the cache file
445 current_progress = 0
446 while cachefile:
447 try:
448 key = pickled.load()
449 value = pickled.load()
450 except Exception:
451 break
452 if not isinstance(key, str):
453 bb.warn("%s from extras cache is not a string?" % key)
454 break
455 if not isinstance(value, RecipeInfoCommon):
456 bb.warn("%s from extras cache is not a RecipeInfoCommon class?" % value)
457 break
458
459 if key in self.depends_cache:
460 self.depends_cache[key].append(value)
461 else:
462 self.depends_cache[key] = [value]
463 # only fire events on even percentage boundaries
464 current_progress = cachefile.tell() + previous_progress
465 current_percent = 100 * current_progress / cachesize
466 if current_percent > previous_percent:
467 previous_percent = current_percent
468 bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
469 self.data)
470
471 previous_progress += current_progress
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500472
473 # Note: depends cache number is corresponding to the parsing file numbers.
474 # The same file has several caches, still regarded as one item in the cache
475 bb.event.fire(bb.event.CacheLoadCompleted(cachesize,
476 len(self.depends_cache)),
477 self.data)
478
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600479 def parse(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500480 """Parse the specified filename, returning the recipe information"""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600481 logger.debug(1, "Parsing %s", filename)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500482 infos = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600483 datastores = self.load_bbfile(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500484 depends = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600485 variants = []
486 # Process the "real" fn last so we can store variants list
487 for variant, data in sorted(datastores.items(),
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500488 key=lambda i: i[0],
489 reverse=True):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600490 virtualfn = variant2virtual(filename, variant)
491 variants.append(variant)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500492 depends = depends + (data.getVar("__depends", False) or [])
493 if depends and not variant:
494 data.setVar("__depends", depends)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600495 if virtualfn == filename:
496 data.setVar("__VARIANTS", " ".join(variants))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500497 info_array = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600498 for cache_class in self.caches_array:
499 info = cache_class(filename, data)
500 info_array.append(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500501 infos.append((virtualfn, info_array))
502
503 return infos
504
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600505 def load(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500506 """Obtain the recipe information for the specified filename,
507 using cached values if available, otherwise parsing.
508
509 Note that if it does parse to obtain the info, it will not
510 automatically add the information to the cache or to your
511 CacheData. Use the add or add_info method to do so after
512 running this, or use loadData instead."""
513 cached = self.cacheValid(filename, appends)
514 if cached:
515 infos = []
516 # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
517 info_array = self.depends_cache[filename]
518 for variant in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600519 virtualfn = variant2virtual(filename, variant)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500520 infos.append((virtualfn, self.depends_cache[virtualfn]))
521 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500522 return self.parse(filename, appends, configdata, self.caches_array)
523
524 return cached, infos
525
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600526 def loadData(self, fn, appends, cacheData):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500527 """Load the recipe info for the specified filename,
528 parsing and adding to the cache if necessary, and adding
529 the recipe information to the supplied CacheData instance."""
530 skipped, virtuals = 0, 0
531
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600532 cached, infos = self.load(fn, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500533 for virtualfn, info_array in infos:
534 if info_array[0].skipped:
535 logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
536 skipped += 1
537 else:
538 self.add_info(virtualfn, info_array, cacheData, not cached)
539 virtuals += 1
540
541 return cached, skipped, virtuals
542
543 def cacheValid(self, fn, appends):
544 """
545 Is the cache valid for fn?
546 Fast version, no timestamps checked.
547 """
548 if fn not in self.checked:
549 self.cacheValidUpdate(fn, appends)
550
551 # Is cache enabled?
552 if not self.has_cache:
553 return False
554 if fn in self.clean:
555 return True
556 return False
557
558 def cacheValidUpdate(self, fn, appends):
559 """
560 Is the cache valid for fn?
561 Make thorough (slower) checks including timestamps.
562 """
563 # Is cache enabled?
564 if not self.has_cache:
565 return False
566
567 self.checked.add(fn)
568
569 # File isn't in depends_cache
570 if not fn in self.depends_cache:
571 logger.debug(2, "Cache: %s is not cached", fn)
572 return False
573
574 mtime = bb.parse.cached_mtime_noerror(fn)
575
576 # Check file still exists
577 if mtime == 0:
578 logger.debug(2, "Cache: %s no longer exists", fn)
579 self.remove(fn)
580 return False
581
582 info_array = self.depends_cache[fn]
583 # Check the file's timestamp
584 if mtime != info_array[0].timestamp:
585 logger.debug(2, "Cache: %s changed", fn)
586 self.remove(fn)
587 return False
588
589 # Check dependencies are still valid
590 depends = info_array[0].file_depends
591 if depends:
592 for f, old_mtime in depends:
593 fmtime = bb.parse.cached_mtime_noerror(f)
594 # Check if file still exists
595 if old_mtime != 0 and fmtime == 0:
596 logger.debug(2, "Cache: %s's dependency %s was removed",
597 fn, f)
598 self.remove(fn)
599 return False
600
601 if (fmtime != old_mtime):
602 logger.debug(2, "Cache: %s's dependency %s changed",
603 fn, f)
604 self.remove(fn)
605 return False
606
607 if hasattr(info_array[0], 'file_checksums'):
608 for _, fl in info_array[0].file_checksums.items():
Patrick Williamsd7e96312015-09-22 08:09:05 -0500609 fl = fl.strip()
610 while fl:
611 # A .split() would be simpler but means spaces or colons in filenames would break
612 a = fl.find(":True")
613 b = fl.find(":False")
614 if ((a < 0) and b) or ((b > 0) and (b < a)):
615 f = fl[:b+6]
616 fl = fl[b+7:]
617 elif ((b < 0) and a) or ((a > 0) and (a < b)):
618 f = fl[:a+5]
619 fl = fl[a+6:]
620 else:
621 break
622 fl = fl.strip()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500623 if "*" in f:
624 continue
625 f, exist = f.split(":")
626 if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
627 logger.debug(2, "Cache: %s's file checksum list file %s changed",
628 fn, f)
629 self.remove(fn)
630 return False
631
632 if appends != info_array[0].appends:
633 logger.debug(2, "Cache: appends for %s changed", fn)
634 logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
635 self.remove(fn)
636 return False
637
638 invalid = False
639 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600640 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500641 self.clean.add(virtualfn)
642 if virtualfn not in self.depends_cache:
643 logger.debug(2, "Cache: %s is not cached", virtualfn)
644 invalid = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600645 elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
646 logger.debug(2, "Cache: Extra caches missing for %s?" % virtualfn)
647 invalid = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500648
649 # If any one of the variants is not present, mark as invalid for all
650 if invalid:
651 for cls in info_array[0].variants:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600652 virtualfn = variant2virtual(fn, cls)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500653 if virtualfn in self.clean:
654 logger.debug(2, "Cache: Removing %s from cache", virtualfn)
655 self.clean.remove(virtualfn)
656 if fn in self.clean:
657 logger.debug(2, "Cache: Marking %s as not clean", fn)
658 self.clean.remove(fn)
659 return False
660
661 self.clean.add(fn)
662 return True
663
664 def remove(self, fn):
665 """
666 Remove a fn from the cache
667 Called from the parser in error cases
668 """
669 if fn in self.depends_cache:
670 logger.debug(1, "Removing %s from cache", fn)
671 del self.depends_cache[fn]
672 if fn in self.clean:
673 logger.debug(1, "Marking %s as unclean", fn)
674 self.clean.remove(fn)
675
676 def sync(self):
677 """
678 Save the cache
679 Called from the parser when complete (or exiting)
680 """
681
682 if not self.has_cache:
683 return
684
685 if self.cacheclean:
686 logger.debug(2, "Cache is clean, not saving.")
687 return
688
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500689 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600690 cache_class_name = cache_class.__name__
691 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
692 with open(cachefile, "wb") as f:
693 p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
694 p.dump(__cache_version__)
695 p.dump(bb.__version__)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500696
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600697 for key, info_array in self.depends_cache.items():
698 for info in info_array:
699 if isinstance(info, RecipeInfoCommon) and info.__class__.__name__ == cache_class_name:
700 p.dump(key)
701 p.dump(info)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500702
703 del self.depends_cache
704
705 @staticmethod
706 def mtime(cachefile):
707 return bb.parse.cached_mtime_noerror(cachefile)
708
709 def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
710 if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
711 cacheData.add_from_recipeinfo(filename, info_array)
712
713 if watcher:
714 watcher(info_array[0].file_depends)
715
716 if not self.has_cache:
717 return
718
719 if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
720 if parsed:
721 self.cacheclean = False
722 self.depends_cache[filename] = info_array
723
724 def add(self, file_name, data, cacheData, parsed=None):
725 """
726 Save data we need into the cache
727 """
728
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600729 realfn = virtualfn2realfn(file_name)[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500730
731 info_array = []
732 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600733 info_array.append(cache_class(realfn, data))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500734 self.add_info(file_name, info_array, cacheData, parsed)
735
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500736
737def init(cooker):
738 """
739 The Objective: Cache the minimum amount of data possible yet get to the
740 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
741
742 To do this, we intercept getVar calls and only cache the variables we see
743 being accessed. We rely on the cache getVar calls being made for all
744 variables bitbake might need to use to reach this stage. For each cached
745 file we need to track:
746
747 * Its mtime
748 * The mtimes of all its dependencies
749 * Whether it caused a parse.SkipRecipe exception
750
751 Files causing parsing errors are evicted from the cache.
752
753 """
754 return Cache(cooker.configuration.data, cooker.configuration.data_hash)
755
756
757class CacheData(object):
758 """
759 The data structures we compile from the cached data
760 """
761
762 def __init__(self, caches_array):
763 self.caches_array = caches_array
764 for cache_class in self.caches_array:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600765 if not issubclass(cache_class, RecipeInfoCommon):
766 bb.error("Extra cache data class %s should subclass RecipeInfoCommon class" % cache_class)
767 cache_class.init_cacheData(self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500768
769 # Direct cache variables
770 self.task_queues = {}
771 self.preferred = {}
772 self.tasks = {}
773 # Indirect Cache variables (set elsewhere)
774 self.ignored_dependencies = []
775 self.world_target = set()
776 self.bbfile_priority = {}
777
778 def add_from_recipeinfo(self, fn, info_array):
779 for info in info_array:
780 info.add_cacheData(self, fn)
781
782class MultiProcessCache(object):
783 """
784 BitBake multi-process cache implementation
785
786 Used by the codeparser & file checksum caches
787 """
788
789 def __init__(self):
790 self.cachefile = None
791 self.cachedata = self.create_cachedata()
792 self.cachedata_extras = self.create_cachedata()
793
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500794 def init_cache(self, d, cache_file_name=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500795 cachedir = (d.getVar("PERSISTENT_DIR", True) or
796 d.getVar("CACHE", True))
797 if cachedir in [None, '']:
798 return
799 bb.utils.mkdirhier(cachedir)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500800 self.cachefile = os.path.join(cachedir,
801 cache_file_name or self.__class__.cache_file_name)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500802 logger.debug(1, "Using cache in '%s'", self.cachefile)
803
804 glf = bb.utils.lockfile(self.cachefile + ".lock")
805
806 try:
807 with open(self.cachefile, "rb") as f:
808 p = pickle.Unpickler(f)
809 data, version = p.load()
810 except:
811 bb.utils.unlockfile(glf)
812 return
813
814 bb.utils.unlockfile(glf)
815
816 if version != self.__class__.CACHE_VERSION:
817 return
818
819 self.cachedata = data
820
821 def create_cachedata(self):
822 data = [{}]
823 return data
824
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500825 def save_extras(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500826 if not self.cachefile:
827 return
828
829 glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
830
831 i = os.getpid()
832 lf = None
833 while not lf:
834 lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
835 if not lf or os.path.exists(self.cachefile + "-" + str(i)):
836 if lf:
837 bb.utils.unlockfile(lf)
838 lf = None
839 i = i + 1
840 continue
841
842 with open(self.cachefile + "-" + str(i), "wb") as f:
843 p = pickle.Pickler(f, -1)
844 p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
845
846 bb.utils.unlockfile(lf)
847 bb.utils.unlockfile(glf)
848
849 def merge_data(self, source, dest):
850 for j in range(0,len(dest)):
851 for h in source[j]:
852 if h not in dest[j]:
853 dest[j][h] = source[j][h]
854
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500855 def save_merge(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500856 if not self.cachefile:
857 return
858
859 glf = bb.utils.lockfile(self.cachefile + ".lock")
860
861 data = self.cachedata
862
863 for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
864 f = os.path.join(os.path.dirname(self.cachefile), f)
865 try:
866 with open(f, "rb") as fd:
867 p = pickle.Unpickler(fd)
868 extradata, version = p.load()
869 except (IOError, EOFError):
870 os.unlink(f)
871 continue
872
873 if version != self.__class__.CACHE_VERSION:
874 os.unlink(f)
875 continue
876
877 self.merge_data(extradata, data)
878 os.unlink(f)
879
880 with open(self.cachefile, "wb") as f:
881 p = pickle.Pickler(f, -1)
882 p.dump([data, self.__class__.CACHE_VERSION])
883
884 bb.utils.unlockfile(glf)
885