blob: e527e231146b3ca46ed856b6ecfbd60d8166bd83 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# Copyright (C) 2003, 2004 Chris Larson
3# Copyright (C) 2003, 2004 Phil Blundell
4# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
5# Copyright (C) 2005 Holger Hans Peter Freyther
6# Copyright (C) 2005 ROAD GmbH
7# Copyright (C) 2006 - 2007 Richard Purdie
8#
Brad Bishopc342db32019-05-15 21:57:59 -04009# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050010#
Patrick Williamsc0f7c042017-02-23 20:41:17 -060011
Patrick Williamsc124f4f2015-09-15 14:41:29 -050012import sys, os, glob, os.path, re, time
Patrick Williamsc124f4f2015-09-15 14:41:29 -050013import itertools
14import logging
15import multiprocessing
16import sre_constants
17import threading
Patrick Williamsc0f7c042017-02-23 20:41:17 -060018from io import StringIO, UnsupportedOperation
Patrick Williamsc124f4f2015-09-15 14:41:29 -050019from contextlib import closing
Patrick Williamsc0f7c042017-02-23 20:41:17 -060020from collections import defaultdict, namedtuple
Patrick Williamsc124f4f2015-09-15 14:41:29 -050021import bb, bb.exceptions, bb.command
22from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
Patrick Williamsc0f7c042017-02-23 20:41:17 -060023import queue
Patrick Williamsc124f4f2015-09-15 14:41:29 -050024import signal
Patrick Williamsc124f4f2015-09-15 14:41:29 -050025import prserv.serv
26import pyinotify
Patrick Williamsc0f7c042017-02-23 20:41:17 -060027import json
28import pickle
29import codecs
Brad Bishop08902b02019-08-20 09:16:51 -040030import hashserv
Patrick Williamsc124f4f2015-09-15 14:41:29 -050031
32logger = logging.getLogger("BitBake")
33collectlog = logging.getLogger("BitBake.Collection")
34buildlog = logging.getLogger("BitBake.Build")
35parselog = logging.getLogger("BitBake.Parsing")
36providerlog = logging.getLogger("BitBake.Provider")
37
38class NoSpecificMatch(bb.BBHandledException):
39 """
40 Exception raised when no or multiple file matches are found
41 """
42
43class NothingToBuild(Exception):
44 """
45 Exception raised when there is nothing to build
46 """
47
48class CollectionError(bb.BBHandledException):
49 """
50 Exception raised when layer configuration is incorrect
51 """
52
53class state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060054 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050055
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050056 @classmethod
57 def get_name(cls, code):
58 for name in dir(cls):
59 value = getattr(cls, name)
60 if type(value) == type(cls.initial) and value == code:
61 return name
62 raise ValueError("Invalid status code: %s" % code)
63
Patrick Williamsc124f4f2015-09-15 14:41:29 -050064
65class SkippedPackage:
66 def __init__(self, info = None, reason = None):
67 self.pn = None
68 self.skipreason = None
69 self.provides = None
70 self.rprovides = None
71
72 if info:
73 self.pn = info.pn
74 self.skipreason = info.skipreason
75 self.provides = info.provides
76 self.rprovides = info.rprovides
77 elif reason:
78 self.skipreason = reason
79
80
81class CookerFeatures(object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -060082 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050083
84 def __init__(self):
85 self._features=set()
86
87 def setFeature(self, f):
88 # validate we got a request for a feature we support
89 if f not in CookerFeatures._feature_list:
90 return
91 self._features.add(f)
92
93 def __contains__(self, f):
94 return f in self._features
95
96 def __iter__(self):
97 return self._features.__iter__()
98
Patrick Williamsc0f7c042017-02-23 20:41:17 -060099 def __next__(self):
100 return next(self._features)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500101
102
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600103class EventWriter:
104 def __init__(self, cooker, eventfile):
105 self.file_inited = None
106 self.cooker = cooker
107 self.eventfile = eventfile
108 self.event_queue = []
109
110 def write_event(self, event):
111 with open(self.eventfile, "a") as f:
112 try:
113 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
114 f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
115 "vars": str_event}))
116 except Exception as err:
117 import traceback
118 print(err, traceback.format_exc())
119
120 def send(self, event):
121 if self.file_inited:
122 # we have the file, just write the event
123 self.write_event(event)
124 else:
125 # init on bb.event.BuildStarted
126 name = "%s.%s" % (event.__module__, event.__class__.__name__)
127 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
128 with open(self.eventfile, "w") as f:
129 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
130
131 self.file_inited = True
132
133 # write pending events
134 for evt in self.event_queue:
135 self.write_event(evt)
136
137 # also write the current event
138 self.write_event(event)
139 else:
140 # queue all events until the file is inited
141 self.event_queue.append(event)
142
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500143#============================================================================#
144# BBCooker
145#============================================================================#
146class BBCooker:
147 """
148 Manages one bitbake build run
149 """
150
151 def __init__(self, configuration, featureSet=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600152 self.recipecaches = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500153 self.skiplist = {}
154 self.featureset = CookerFeatures()
155 if featureSet:
156 for f in featureSet:
157 self.featureset.setFeature(f)
158
159 self.configuration = configuration
160
Brad Bishopf058f492019-01-28 23:50:33 -0500161 bb.debug(1, "BBCooker starting %s" % time.time())
162 sys.stdout.flush()
163
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500164 self.configwatcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500165 bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
166 sys.stdout.flush()
167
Andrew Geissler82c905d2020-04-13 13:39:40 -0500168 self.configwatcher.bbseen = set()
169 self.configwatcher.bbwatchedfiles = set()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500170 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
Brad Bishopf058f492019-01-28 23:50:33 -0500171 bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
172 sys.stdout.flush()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500173 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
174 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500175 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500176 self.watcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500177 bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
178 sys.stdout.flush()
Andrew Geissler82c905d2020-04-13 13:39:40 -0500179 self.watcher.bbseen = set()
180 self.watcher.bbwatchedfiles = set()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500181 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
182
Brad Bishopf058f492019-01-28 23:50:33 -0500183 bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
184 sys.stdout.flush()
185
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500186 # If being called by something like tinfoil, we need to clean cached data
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500187 # which may now be invalid
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500188 bb.parse.clear_cache()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500189 bb.parse.BBHandler.cached_statements = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500190
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500191 self.ui_cmdline = None
Brad Bishop08902b02019-08-20 09:16:51 -0400192 self.hashserv = None
Brad Bishopa34c0302019-09-23 22:34:48 -0400193 self.hashservaddr = None
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500194
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500195 self.initConfigurationData()
196
Brad Bishopf058f492019-01-28 23:50:33 -0500197 bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
198 sys.stdout.flush()
199
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600200 # we log all events to a file if so directed
201 if self.configuration.writeeventlog:
202 # register the log file writer as UI Handler
203 writer = EventWriter(self, self.configuration.writeeventlog)
204 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
205 bb.event.register_UIHhandler(EventLogWriteHandler(writer))
206
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500207 self.inotify_modified_files = []
208
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500209 def _process_inotify_updates(server, cooker, abort):
210 cooker.process_inotify_updates()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500211 return 1.0
212
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500213 self.configuration.server_register_idlecallback(_process_inotify_updates, self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500214
215 # TOSTOP must not be set or our children will hang when they output
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600216 try:
217 fd = sys.stdout.fileno()
218 if os.isatty(fd):
219 import termios
220 tcattr = termios.tcgetattr(fd)
221 if tcattr[3] & termios.TOSTOP:
222 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
223 tcattr[3] = tcattr[3] & ~termios.TOSTOP
224 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
225 except UnsupportedOperation:
226 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500227
228 self.command = bb.command.Command(self)
229 self.state = state.initial
230
231 self.parser = None
232
233 signal.signal(signal.SIGTERM, self.sigterm_exception)
234 # Let SIGHUP exit as SIGTERM
235 signal.signal(signal.SIGHUP, self.sigterm_exception)
236
Brad Bishopf058f492019-01-28 23:50:33 -0500237 bb.debug(1, "BBCooker startup complete %s" % time.time())
238 sys.stdout.flush()
239
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500240 def process_inotify_updates(self):
241 for n in [self.confignotifier, self.notifier]:
242 if n.check_events(timeout=0):
243 # read notified events and enqeue them
244 n.read_events()
245 n.process_events()
246
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500247 def config_notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500248 if event.maskname == "IN_Q_OVERFLOW":
249 bb.warn("inotify event queue overflowed, invalidating caches.")
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500250 self.parsecache_valid = False
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500251 self.baseconfig_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500252 bb.parse.clear_cache()
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500253 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500254 if not event.pathname in self.configwatcher.bbwatchedfiles:
255 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500256 if not event.pathname in self.inotify_modified_files:
257 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500258 self.baseconfig_valid = False
259
260 def notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500261 if event.maskname == "IN_Q_OVERFLOW":
262 bb.warn("inotify event queue overflowed, invalidating caches.")
263 self.parsecache_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500264 bb.parse.clear_cache()
265 return
266 if event.pathname.endswith("bitbake-cookerdaemon.log") \
267 or event.pathname.endswith("bitbake.lock"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500268 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500269 if not event.pathname in self.inotify_modified_files:
270 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500271 self.parsecache_valid = False
272
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500273 def add_filewatch(self, deps, watcher=None, dirs=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500274 if not watcher:
275 watcher = self.watcher
276 for i in deps:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500277 watcher.bbwatchedfiles.add(i[0])
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500278 if dirs:
279 f = i[0]
280 else:
281 f = os.path.dirname(i[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500282 if f in watcher.bbseen:
283 continue
Andrew Geissler82c905d2020-04-13 13:39:40 -0500284 watcher.bbseen.add(f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500285 watchtarget = None
286 while True:
287 # We try and add watches for files that don't exist but if they did, would influence
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500288 # the parser. The parent directory of these files may not exist, in which case we need
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500289 # to watch any parent that does exist for changes.
290 try:
291 watcher.add_watch(f, self.watchmask, quiet=False)
292 if watchtarget:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500293 watcher.bbwatchedfiles.add(watchtarget)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500294 break
295 except pyinotify.WatchManagerError as e:
296 if 'ENOENT' in str(e):
297 watchtarget = f
298 f = os.path.dirname(f)
299 if f in watcher.bbseen:
300 break
Andrew Geissler82c905d2020-04-13 13:39:40 -0500301 watcher.bbseen.add(f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500302 continue
303 if 'ENOSPC' in str(e):
304 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
305 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
306 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
307 providerlog.error("Root privilege is required to modify max_user_watches.")
308 raise
309
310 def sigterm_exception(self, signum, stackframe):
311 if signum == signal.SIGTERM:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500312 bb.warn("Cooker received SIGTERM, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500313 elif signum == signal.SIGHUP:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500314 bb.warn("Cooker received SIGHUP, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500315 self.state = state.forceshutdown
316
317 def setFeatures(self, features):
318 # we only accept a new feature set if we're in state initial, so we can reset without problems
319 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
320 raise Exception("Illegal state for feature set change")
321 original_featureset = list(self.featureset)
322 for feature in features:
323 self.featureset.setFeature(feature)
324 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
325 if (original_featureset != list(self.featureset)) and self.state != state.error:
326 self.reset()
327
328 def initConfigurationData(self):
329
330 self.state = state.initial
331 self.caches_array = []
332
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500333 # Need to preserve BB_CONSOLELOG over resets
334 consolelog = None
335 if hasattr(self, "data"):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500336 consolelog = self.data.getVar("BB_CONSOLELOG")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500337
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500338 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
339 self.enableDataTracking()
340
341 all_extra_cache_names = []
342 # We hardcode all known cache types in a single place, here.
343 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
344 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
345
346 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
347
348 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
349 # This is the entry point, no further check needed!
350 for var in caches_name_array:
351 try:
352 module_name, cache_name = var.split(':')
353 module = __import__(module_name, fromlist=(cache_name,))
354 self.caches_array.append(getattr(module, cache_name))
355 except ImportError as exc:
356 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
357 sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
358
359 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
360 self.databuilder.parseBaseConfiguration()
361 self.data = self.databuilder.data
362 self.data_hash = self.databuilder.data_hash
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500363 self.extraconfigdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500364
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500365 if consolelog:
366 self.data.setVar("BB_CONSOLELOG", consolelog)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500367
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500368 self.data.setVar('BB_CMDLINE', self.ui_cmdline)
369
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500370 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
371 self.disableDataTracking()
372
Brad Bishop15ae2502019-06-18 21:44:24 -0400373 for mc in self.databuilder.mcdata.values():
374 mc.renameVar("__depends", "__base_depends")
375 self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500376
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500377 self.baseconfig_valid = True
378 self.parsecache_valid = False
379
380 def handlePRServ(self):
381 # Setup a PR Server based on the new configuration
382 try:
383 self.prhost = prserv.serv.auto_start(self.data)
384 except prserv.serv.PRServiceConfigError as e:
385 bb.fatal("Unable to start PR Server, exitting")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500386
Brad Bishopa34c0302019-09-23 22:34:48 -0400387 if self.data.getVar("BB_HASHSERVE") == "auto":
388 # Create a new hash server bound to a unix domain socket
Brad Bishop08902b02019-08-20 09:16:51 -0400389 if not self.hashserv:
390 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
Brad Bishopa34c0302019-09-23 22:34:48 -0400391 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
392 self.hashserv = hashserv.create_server(self.hashservaddr, dbfile, sync=False)
Brad Bishop08902b02019-08-20 09:16:51 -0400393 self.hashserv.process = multiprocessing.Process(target=self.hashserv.serve_forever)
Brad Bishop08902b02019-08-20 09:16:51 -0400394 self.hashserv.process.start()
Brad Bishopa34c0302019-09-23 22:34:48 -0400395 self.data.setVar("BB_HASHSERVE", self.hashservaddr)
396 self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr)
397 self.databuilder.data.setVar("BB_HASHSERVE", self.hashservaddr)
Brad Bishop08902b02019-08-20 09:16:51 -0400398 for mc in self.databuilder.mcdata:
Brad Bishopa34c0302019-09-23 22:34:48 -0400399 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
Brad Bishop08902b02019-08-20 09:16:51 -0400400
401 bb.parse.init_parser(self.data)
402
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500403 def enableDataTracking(self):
404 self.configuration.tracking = True
405 if hasattr(self, "data"):
406 self.data.enableTracking()
407
408 def disableDataTracking(self):
409 self.configuration.tracking = False
410 if hasattr(self, "data"):
411 self.data.disableTracking()
412
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500413 def parseConfiguration(self):
414 # Set log file verbosity
415 verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
416 if verboselogs:
417 bb.msg.loggerVerboseLogs = True
418
419 # Change nice level if we're asked to
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500420 nice = self.data.getVar("BB_NICE_LEVEL")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500421 if nice:
422 curnice = os.nice(0)
423 nice = int(nice) - curnice
424 buildlog.verbose("Renice to %s " % os.nice(nice))
425
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600426 if self.recipecaches:
427 del self.recipecaches
428 self.multiconfigs = self.databuilder.mcdata.keys()
429 self.recipecaches = {}
430 for mc in self.multiconfigs:
431 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500432
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500433 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500434
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500435 self.parsecache_valid = False
436
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500437 def updateConfigOpts(self, options, environment, cmdline):
438 self.ui_cmdline = cmdline
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500439 clean = True
440 for o in options:
441 if o in ['prefile', 'postfile']:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500442 # Only these options may require a reparse
443 try:
444 if getattr(self.configuration, o) == options[o]:
445 # Value is the same, no need to mark dirty
446 continue
447 except AttributeError:
448 pass
449 logger.debug(1, "Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
450 print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500451 clean = False
452 setattr(self.configuration, o, options[o])
453 for k in bb.utils.approved_variables():
454 if k in environment and k not in self.configuration.env:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500455 logger.debug(1, "Updating new environment variable %s to %s" % (k, environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500456 self.configuration.env[k] = environment[k]
457 clean = False
458 if k in self.configuration.env and k not in environment:
459 logger.debug(1, "Updating environment variable %s (deleted)" % (k))
460 del self.configuration.env[k]
461 clean = False
462 if k not in self.configuration.env and k not in environment:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500463 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500464 if environment[k] != self.configuration.env[k]:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500465 logger.debug(1, "Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500466 self.configuration.env[k] = environment[k]
467 clean = False
468 if not clean:
469 logger.debug(1, "Base environment change, triggering reparse")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500470 self.reset()
471
472 def runCommands(self, server, data, abort):
473 """
474 Run any queued asynchronous command
475 This is done by the idle handler so it runs in true context rather than
476 tied to any UI.
477 """
478
479 return self.command.runAsyncCommand()
480
481 def showVersions(self):
482
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500483 (latest_versions, preferred_versions) = self.findProviders()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500484
485 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
486 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
487
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500488 for p in sorted(self.recipecaches[''].pkg_pn):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500489 pref = preferred_versions[p]
490 latest = latest_versions[p]
491
492 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
493 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
494
495 if pref == latest:
496 prefstr = ""
497
498 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
499
500 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
501 """
502 Show the outer or per-recipe environment
503 """
504 fn = None
505 envdata = None
Brad Bishop15ae2502019-06-18 21:44:24 -0400506 mc = ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500507 if not pkgs_to_build:
508 pkgs_to_build = []
509
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500510 orig_tracking = self.configuration.tracking
511 if not orig_tracking:
512 self.enableDataTracking()
513 self.reset()
514
Brad Bishop15ae2502019-06-18 21:44:24 -0400515 def mc_base(p):
516 if p.startswith('mc:'):
517 s = p.split(':')
518 if len(s) == 2:
519 return s[1]
520 return None
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500521
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500522 if buildfile:
523 # Parse the configuration here. We need to do it explicitly here since
524 # this showEnvironment() code path doesn't use the cache
525 self.parseConfiguration()
526
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600527 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500528 fn = self.matchFile(fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600529 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500530 elif len(pkgs_to_build) == 1:
Brad Bishop15ae2502019-06-18 21:44:24 -0400531 mc = mc_base(pkgs_to_build[0])
532 if not mc:
533 ignore = self.data.getVar("ASSUME_PROVIDED") or ""
534 if pkgs_to_build[0] in set(ignore.split()):
535 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500536
Brad Bishop15ae2502019-06-18 21:44:24 -0400537 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500538
Brad Bishop15ae2502019-06-18 21:44:24 -0400539 mc = runlist[0][0]
540 fn = runlist[0][3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500541
542 if fn:
543 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600544 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
545 envdata = bb_cache.loadDataFull(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500546 except Exception as e:
547 parselog.exception("Unable to read %s", fn)
548 raise
Brad Bishop15ae2502019-06-18 21:44:24 -0400549 else:
550 if not mc in self.databuilder.mcdata:
551 bb.fatal('Not multiconfig named "%s" found' % mc)
552 envdata = self.databuilder.mcdata[mc]
553 data.expandKeys(envdata)
554 parse.ast.runAnonFuncs(envdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500555
556 # Display history
557 with closing(StringIO()) as env:
558 self.data.inchistory.emit(env)
559 logger.plain(env.getvalue())
560
561 # emit variables and shell functions
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500562 with closing(StringIO()) as env:
563 data.emit_env(env, envdata, True)
564 logger.plain(env.getvalue())
565
566 # emit the metadata which isnt valid shell
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500567 for e in sorted(envdata.keys()):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600568 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500569 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500570
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500571 if not orig_tracking:
572 self.disableDataTracking()
573 self.reset()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500574
575 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
576 """
577 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
578 """
579 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
580
581 # A task of None means use the default task
582 if task is None:
583 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500584 if not task.startswith("do_"):
585 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500586
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500587 targetlist = self.checkPackages(pkgs_to_build, task)
588 fulltargetlist = []
589 defaulttask_implicit = ''
590 defaulttask_explicit = False
591 wildcard = False
592
593 # Wild card expansion:
Brad Bishop15ae2502019-06-18 21:44:24 -0400594 # Replace string such as "mc:*:bash"
595 # into "mc:A:bash mc:B:bash bash"
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500596 for k in targetlist:
Brad Bishop15ae2502019-06-18 21:44:24 -0400597 if k.startswith("mc:"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500598 if wildcard:
599 bb.fatal('multiconfig conflict')
600 if k.split(":")[1] == "*":
601 wildcard = True
602 for mc in self.multiconfigs:
603 if mc:
604 fulltargetlist.append(k.replace('*', mc))
605 # implicit default task
606 else:
607 defaulttask_implicit = k.split(":")[2]
608 else:
609 fulltargetlist.append(k)
610 else:
611 defaulttask_explicit = True
612 fulltargetlist.append(k)
613
614 if not defaulttask_explicit and defaulttask_implicit != '':
615 fulltargetlist.append(defaulttask_implicit)
616
617 bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600618 taskdata = {}
619 localdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500620
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600621 for mc in self.multiconfigs:
622 taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
623 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600624 bb.data.expandKeys(localdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500625
626 current = 0
627 runlist = []
628 for k in fulltargetlist:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600629 mc = ""
Brad Bishop15ae2502019-06-18 21:44:24 -0400630 if k.startswith("mc:"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600631 mc = k.split(":")[1]
632 k = ":".join(k.split(":")[2:])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500633 ktask = task
634 if ":do_" in k:
635 k2 = k.split(":do_")
636 k = k2[0]
637 ktask = k2[1]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600638 taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500639 current += 1
640 if not ktask.startswith("do_"):
641 ktask = "do_%s" % ktask
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600642 if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
643 # e.g. in ASSUME_PROVIDED
644 continue
645 fn = taskdata[mc].build_targets[k][0]
646 runlist.append([mc, k, ktask, fn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500647 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600648
Brad Bishop15ae2502019-06-18 21:44:24 -0400649 havemc = False
650 for mc in self.multiconfigs:
651 if taskdata[mc].get_mcdepends():
652 havemc = True
Brad Bishopf058f492019-01-28 23:50:33 -0500653
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800654 # No need to do check providers if there are no mcdeps or not an mc build
Brad Bishop15ae2502019-06-18 21:44:24 -0400655 if havemc or len(self.multiconfigs) > 1:
Andrew Geissler99467da2019-02-25 18:54:23 -0600656 seen = set()
657 new = True
658 # Make sure we can provide the multiconfig dependency
659 while new:
660 mcdeps = set()
661 # Add unresolved first, so we can get multiconfig indirect dependencies on time
662 for mc in self.multiconfigs:
663 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
664 mcdeps |= set(taskdata[mc].get_mcdepends())
665 new = False
666 for mc in self.multiconfigs:
667 for k in mcdeps:
668 if k in seen:
669 continue
670 l = k.split(':')
671 depmc = l[2]
672 if depmc not in self.multiconfigs:
673 bb.fatal("Multiconfig dependency %s depends on nonexistent mc configuration %s" % (k,depmc))
674 else:
675 logger.debug(1, "Adding providers for multiconfig dependency %s" % l[3])
676 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
677 seen.add(k)
678 new = True
Brad Bishopf058f492019-01-28 23:50:33 -0500679
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600680 for mc in self.multiconfigs:
681 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
682
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500683 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600684 return taskdata, runlist
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500685
686 def prepareTreeData(self, pkgs_to_build, task):
687 """
688 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
689 """
690
691 # We set abort to False here to prevent unbuildable targets raising
692 # an exception when we're just generating data
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600693 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500694
695 return runlist, taskdata
696
697 ######## WARNING : this function requires cache_extra to be enabled ########
698
699 def generateTaskDepTreeData(self, pkgs_to_build, task):
700 """
701 Create a dependency graph of pkgs_to_build including reverse dependency
702 information.
703 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500704 if not task.startswith("do_"):
705 task = "do_%s" % task
706
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500707 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600708 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500709 rq.rqdata.prepare()
710 return self.buildDependTree(rq, taskdata)
711
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600712 @staticmethod
713 def add_mc_prefix(mc, pn):
714 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -0400715 return "mc:%s:%s" % (mc, pn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600716 return pn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500717
718 def buildDependTree(self, rq, taskdata):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600719 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500720 depend_tree = {}
721 depend_tree["depends"] = {}
722 depend_tree["tdepends"] = {}
723 depend_tree["pn"] = {}
724 depend_tree["rdepends-pn"] = {}
725 depend_tree["packages"] = {}
726 depend_tree["rdepends-pkg"] = {}
727 depend_tree["rrecs-pkg"] = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500728 depend_tree['providermap'] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600729 depend_tree["layer-priorities"] = self.bbfile_config_priorities
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500730
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600731 for mc in taskdata:
732 for name, fn in list(taskdata[mc].get_providermap().items()):
733 pn = self.recipecaches[mc].pkg_fn[fn]
734 pn = self.add_mc_prefix(mc, pn)
735 if name != pn:
736 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
737 depend_tree['providermap'][name] = (pn, version)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500738
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600739 for tid in rq.rqdata.runtaskentries:
740 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
741 pn = self.recipecaches[mc].pkg_fn[taskfn]
742 pn = self.add_mc_prefix(mc, pn)
743 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500744 if pn not in depend_tree["pn"]:
745 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600746 depend_tree["pn"][pn]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500747 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600748 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500749
750 # if we have extra caches, list all attributes they bring in
751 extra_info = []
752 for cache_class in self.caches_array:
753 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
754 cachefields = getattr(cache_class, 'cachefields', [])
755 extra_info = extra_info + cachefields
756
757 # for all attributes stored, add them to the dependency tree
758 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600759 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500760
761
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500762 dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
763 if not dotname in depend_tree["tdepends"]:
764 depend_tree["tdepends"][dotname] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600765 for dep in rq.rqdata.runtaskentries[tid].depends:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800766 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
767 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600768 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
769 if taskfn not in seen_fns:
770 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500771 packages = []
772
773 depend_tree["depends"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600774 for dep in taskdata[mc].depids[taskfn]:
775 depend_tree["depends"][pn].append(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500776
777 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600778 for rdep in taskdata[mc].rdepids[taskfn]:
779 depend_tree["rdepends-pn"][pn].append(rdep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500780
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600781 rdepends = self.recipecaches[mc].rundeps[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500782 for package in rdepends:
783 depend_tree["rdepends-pkg"][package] = []
784 for rdepend in rdepends[package]:
785 depend_tree["rdepends-pkg"][package].append(rdepend)
786 packages.append(package)
787
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600788 rrecs = self.recipecaches[mc].runrecs[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500789 for package in rrecs:
790 depend_tree["rrecs-pkg"][package] = []
791 for rdepend in rrecs[package]:
792 depend_tree["rrecs-pkg"][package].append(rdepend)
793 if not package in packages:
794 packages.append(package)
795
796 for package in packages:
797 if package not in depend_tree["packages"]:
798 depend_tree["packages"][package] = {}
799 depend_tree["packages"][package]["pn"] = pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600800 depend_tree["packages"][package]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500801 depend_tree["packages"][package]["version"] = version
802
803 return depend_tree
804
805 ######## WARNING : this function requires cache_extra to be enabled ########
806 def generatePkgDepTreeData(self, pkgs_to_build, task):
807 """
808 Create a dependency tree of pkgs_to_build, returning the data.
809 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500810 if not task.startswith("do_"):
811 task = "do_%s" % task
812
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500813 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500814
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600815 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500816 depend_tree = {}
817 depend_tree["depends"] = {}
818 depend_tree["pn"] = {}
819 depend_tree["rdepends-pn"] = {}
820 depend_tree["rdepends-pkg"] = {}
821 depend_tree["rrecs-pkg"] = {}
822
823 # if we have extra caches, list all attributes they bring in
824 extra_info = []
825 for cache_class in self.caches_array:
826 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
827 cachefields = getattr(cache_class, 'cachefields', [])
828 extra_info = extra_info + cachefields
829
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600830 tids = []
831 for mc in taskdata:
832 for tid in taskdata[mc].taskentries:
833 tids.append(tid)
834
835 for tid in tids:
836 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
837
838 pn = self.recipecaches[mc].pkg_fn[taskfn]
839 pn = self.add_mc_prefix(mc, pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500840
841 if pn not in depend_tree["pn"]:
842 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600843 depend_tree["pn"][pn]["filename"] = taskfn
844 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500845 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600846 rdepends = self.recipecaches[mc].rundeps[taskfn]
847 rrecs = self.recipecaches[mc].runrecs[taskfn]
848 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500849
850 # for all extra attributes stored, add them to the dependency tree
851 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600852 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500853
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600854 if taskfn not in seen_fns:
855 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500856
857 depend_tree["depends"][pn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500858 for dep in taskdata[mc].depids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500859 pn_provider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600860 if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
861 fn_provider = taskdata[mc].build_targets[dep][0]
862 pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500863 else:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500864 pn_provider = dep
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600865 pn_provider = self.add_mc_prefix(mc, pn_provider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500866 depend_tree["depends"][pn].append(pn_provider)
867
868 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600869 for rdep in taskdata[mc].rdepids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500870 pn_rprovider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600871 if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
872 fn_rprovider = taskdata[mc].run_targets[rdep][0]
873 pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500874 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600875 pn_rprovider = rdep
876 pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500877 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
878
879 depend_tree["rdepends-pkg"].update(rdepends)
880 depend_tree["rrecs-pkg"].update(rrecs)
881
882 return depend_tree
883
884 def generateDepTreeEvent(self, pkgs_to_build, task):
885 """
886 Create a task dependency graph of pkgs_to_build.
887 Generate an event with the result
888 """
889 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
890 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
891
892 def generateDotGraphFiles(self, pkgs_to_build, task):
893 """
894 Create a task dependency graph of pkgs_to_build.
895 Save the result to a set of .dot files.
896 """
897
898 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
899
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500900 with open('pn-buildlist', 'w') as f:
901 for pn in depgraph["pn"]:
902 f.write(pn + "\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500903 logger.info("PN build list saved to 'pn-buildlist'")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500904
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500905 # Remove old format output files to ensure no confusion with stale data
906 try:
907 os.unlink('pn-depends.dot')
908 except FileNotFoundError:
909 pass
910 try:
911 os.unlink('package-depends.dot')
912 except FileNotFoundError:
913 pass
Brad Bishop79641f22019-09-10 07:20:22 -0400914 try:
915 os.unlink('recipe-depends.dot')
916 except FileNotFoundError:
917 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500918
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500919 with open('task-depends.dot', 'w') as f:
920 f.write("digraph depends {\n")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400921 for task in sorted(depgraph["tdepends"]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500922 (pn, taskname) = task.rsplit(".", 1)
923 fn = depgraph["pn"][pn]["filename"]
924 version = depgraph["pn"][pn]["version"]
925 f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400926 for dep in sorted(depgraph["tdepends"][task]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500927 f.write('"%s" -> "%s"\n' % (task, dep))
928 f.write("}\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500929 logger.info("Task dependencies saved to 'task-depends.dot'")
930
931 def show_appends_with_no_recipes(self):
932 # Determine which bbappends haven't been applied
933
934 # First get list of recipes, including skipped
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600935 recipefns = list(self.recipecaches[''].pkg_fn.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500936 recipefns.extend(self.skiplist.keys())
937
938 # Work out list of bbappends that have been applied
939 applied_appends = []
940 for fn in recipefns:
941 applied_appends.extend(self.collection.get_file_appends(fn))
942
943 appends_without_recipes = []
944 for _, appendfn in self.collection.bbappends:
945 if not appendfn in applied_appends:
946 appends_without_recipes.append(appendfn)
947
948 if appends_without_recipes:
949 msg = 'No recipes available for:\n %s' % '\n '.join(appends_without_recipes)
950 warn_only = self.data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
951 False) or "no"
952 if warn_only.lower() in ("1", "yes", "true"):
953 bb.warn(msg)
954 else:
955 bb.fatal(msg)
956
957 def handlePrefProviders(self):
958
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600959 for mc in self.multiconfigs:
960 localdata = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600961 bb.data.expandKeys(localdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500962
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600963 # Handle PREFERRED_PROVIDERS
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500964 for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600965 try:
966 (providee, provider) = p.split(':')
967 except:
968 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
969 continue
970 if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
971 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
972 self.recipecaches[mc].preferred[providee] = provider
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500973
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500974 def findConfigFilePath(self, configfile):
975 """
976 Find the location on disk of configfile and if it exists and was parsed by BitBake
977 emit the ConfigFilePathFound event with the path to the file.
978 """
979 path = bb.cookerdata.findConfigFile(configfile, self.data)
980 if not path:
981 return
982
983 # Generate a list of parsed configuration files by searching the files
984 # listed in the __depends and __base_depends variables with a .conf suffix.
985 conffiles = []
986 dep_files = self.data.getVar('__base_depends', False) or []
987 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
988
989 for f in dep_files:
990 if f[0].endswith(".conf"):
991 conffiles.append(f[0])
992
993 _, conf, conffile = path.rpartition("conf/")
994 match = os.path.join(conf, conffile)
995 # Try and find matches for conf/conffilename.conf as we don't always
996 # have the full path to the file.
997 for cfg in conffiles:
998 if cfg.endswith(match):
999 bb.event.fire(bb.event.ConfigFilePathFound(path),
1000 self.data)
1001 break
1002
1003 def findFilesMatchingInDir(self, filepattern, directory):
1004 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001005 Searches for files containing the substring 'filepattern' which are children of
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001006 'directory' in each BBPATH. i.e. to find all rootfs package classes available
1007 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1008 or to find all machine configuration files one could call:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001009 findFilesMatchingInDir(self, '.conf', 'conf/machine')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001010 """
1011
1012 matches = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001013 bbpaths = self.data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001014 for path in bbpaths:
1015 dirpath = os.path.join(path, directory)
1016 if os.path.exists(dirpath):
1017 for root, dirs, files in os.walk(dirpath):
1018 for f in files:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001019 if filepattern in f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001020 matches.append(f)
1021
1022 if matches:
1023 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1024
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001025 def findProviders(self, mc=''):
Andrew Geissler82c905d2020-04-13 13:39:40 -05001026 return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001027
1028 def findBestProvider(self, pn, mc=''):
1029 if pn in self.recipecaches[mc].providers:
1030 filenames = self.recipecaches[mc].providers[pn]
Andrew Geissler82c905d2020-04-13 13:39:40 -05001031 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc])
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001032 filename = eligible[0]
1033 return None, None, None, filename
1034 elif pn in self.recipecaches[mc].pkg_pn:
Andrew Geissler82c905d2020-04-13 13:39:40 -05001035 return bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001036 else:
1037 return None, None, None, None
1038
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001039 def findConfigFiles(self, varname):
1040 """
1041 Find config files which are appropriate values for varname.
1042 i.e. MACHINE, DISTRO
1043 """
1044 possible = []
1045 var = varname.lower()
1046
1047 data = self.data
1048 # iterate configs
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001049 bbpaths = data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001050 for path in bbpaths:
1051 confpath = os.path.join(path, "conf", var)
1052 if os.path.exists(confpath):
1053 for root, dirs, files in os.walk(confpath):
1054 # get all child files, these are appropriate values
1055 for f in files:
1056 val, sep, end = f.rpartition('.')
1057 if end == 'conf':
1058 possible.append(val)
1059
1060 if possible:
1061 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1062
1063 def findInheritsClass(self, klass):
1064 """
1065 Find all recipes which inherit the specified class
1066 """
1067 pkg_list = []
1068
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001069 for pfn in self.recipecaches[''].pkg_fn:
1070 inherits = self.recipecaches[''].inherits.get(pfn, None)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001071 if inherits and klass in inherits:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001072 pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001073
1074 return pkg_list
1075
1076 def generateTargetsTree(self, klass=None, pkgs=None):
1077 """
1078 Generate a dependency tree of buildable targets
1079 Generate an event with the result
1080 """
1081 # if the caller hasn't specified a pkgs list default to universe
1082 if not pkgs:
1083 pkgs = ['universe']
1084 # if inherited_class passed ensure all recipes which inherit the
1085 # specified class are included in pkgs
1086 if klass:
1087 extra_pkgs = self.findInheritsClass(klass)
1088 pkgs = pkgs + extra_pkgs
1089
1090 # generate a dependency tree for all our packages
1091 tree = self.generatePkgDepTreeData(pkgs, 'build')
1092 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1093
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001094 def interactiveMode( self ):
1095 """Drop off into a shell"""
1096 try:
1097 from bb import shell
1098 except ImportError:
1099 parselog.exception("Interactive mode not available")
1100 sys.exit(1)
1101 else:
1102 shell.start( self )
1103
1104
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001105 def handleCollections(self, collections):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001106 """Handle collections"""
1107 errors = False
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001108 self.bbfile_config_priorities = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001109 if collections:
1110 collection_priorities = {}
1111 collection_depends = {}
1112 collection_list = collections.split()
1113 min_prio = 0
1114 for c in collection_list:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001115 bb.debug(1,'Processing %s in collection list' % (c))
1116
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001117 # Get collection priority if defined explicitly
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001118 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001119 if priority:
1120 try:
1121 prio = int(priority)
1122 except ValueError:
1123 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1124 errors = True
1125 if min_prio == 0 or prio < min_prio:
1126 min_prio = prio
1127 collection_priorities[c] = prio
1128 else:
1129 collection_priorities[c] = None
1130
1131 # Check dependencies and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001132 deps = self.data.getVar("LAYERDEPENDS_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001133 if deps:
1134 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001135 depDict = bb.utils.explode_dep_versions2(deps)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001136 except bb.utils.VersionStringException as vse:
1137 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001138 for dep, oplist in list(depDict.items()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001139 if dep in collection_list:
1140 for opstr in oplist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001141 layerver = self.data.getVar("LAYERVERSION_%s" % dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001142 (op, depver) = opstr.split()
1143 if layerver:
1144 try:
1145 res = bb.utils.vercmp_string_op(layerver, depver, op)
1146 except bb.utils.VersionStringException as vse:
1147 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1148 if not res:
1149 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1150 errors = True
1151 else:
1152 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1153 errors = True
1154 else:
1155 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1156 errors = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001157 collection_depends[c] = list(depDict.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001158 else:
1159 collection_depends[c] = []
1160
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001161 # Check recommends and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001162 recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001163 if recs:
1164 try:
1165 recDict = bb.utils.explode_dep_versions2(recs)
1166 except bb.utils.VersionStringException as vse:
1167 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1168 for rec, oplist in list(recDict.items()):
1169 if rec in collection_list:
1170 if oplist:
1171 opstr = oplist[0]
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001172 layerver = self.data.getVar("LAYERVERSION_%s" % rec)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001173 if layerver:
1174 (op, recver) = opstr.split()
1175 try:
1176 res = bb.utils.vercmp_string_op(layerver, recver, op)
1177 except bb.utils.VersionStringException as vse:
1178 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1179 if not res:
1180 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1181 continue
1182 else:
1183 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1184 continue
1185 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1186 collection_depends[c].append(rec)
1187 else:
1188 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1189
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001190 # Recursively work out collection priorities based on dependencies
1191 def calc_layer_priority(collection):
1192 if not collection_priorities[collection]:
1193 max_depprio = min_prio
1194 for dep in collection_depends[collection]:
1195 calc_layer_priority(dep)
1196 depprio = collection_priorities[dep]
1197 if depprio > max_depprio:
1198 max_depprio = depprio
1199 max_depprio += 1
1200 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1201 collection_priorities[collection] = max_depprio
1202
1203 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1204 for c in collection_list:
1205 calc_layer_priority(c)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001206 regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
Andrew Geissler82c905d2020-04-13 13:39:40 -05001207 if regex is None:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001208 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1209 errors = True
1210 continue
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001211 elif regex == "":
1212 parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
Brad Bishop19323692019-04-05 15:28:33 -04001213 cre = re.compile('^NULL$')
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001214 errors = False
1215 else:
1216 try:
1217 cre = re.compile(regex)
1218 except re.error:
1219 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1220 errors = True
1221 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001222 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001223 if errors:
1224 # We've already printed the actual error(s)
1225 raise CollectionError("Errors during parsing layer configuration")
1226
1227 def buildSetVars(self):
1228 """
1229 Setup any variables needed before starting a build
1230 """
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001231 t = time.gmtime()
1232 for mc in self.databuilder.mcdata:
1233 ds = self.databuilder.mcdata[mc]
1234 if not ds.getVar("BUILDNAME", False):
1235 ds.setVar("BUILDNAME", "${DATE}${TIME}")
1236 ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1237 ds.setVar("DATE", time.strftime('%Y%m%d', t))
1238 ds.setVar("TIME", time.strftime('%H%M%S', t))
1239
1240 def reset_mtime_caches(self):
1241 """
1242 Reset mtime caches - this is particularly important when memory resident as something
1243 which is cached is not unlikely to have changed since the last invocation (e.g. a
1244 file associated with a recipe might have been modified by the user).
1245 """
1246 build.reset_cache()
1247 bb.fetch._checksum_cache.mtime_cache.clear()
1248 siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1249 if siggen_cache:
1250 bb.parse.siggen.checksum_cache.mtime_cache.clear()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001251
1252 def matchFiles(self, bf):
1253 """
1254 Find the .bb files which match the expression in 'buildfile'.
1255 """
1256 if bf.startswith("/") or bf.startswith("../"):
1257 bf = os.path.abspath(bf)
1258
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001259 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001260 filelist, masked, searchdirs = self.collection.collect_bbfiles(self.data, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001261 try:
1262 os.stat(bf)
1263 bf = os.path.abspath(bf)
1264 return [bf]
1265 except OSError:
1266 regexp = re.compile(bf)
1267 matches = []
1268 for f in filelist:
1269 if regexp.search(f) and os.path.isfile(f):
1270 matches.append(f)
1271 return matches
1272
1273 def matchFile(self, buildfile):
1274 """
1275 Find the .bb file which matches the expression in 'buildfile'.
1276 Raise an error if multiple files
1277 """
1278 matches = self.matchFiles(buildfile)
1279 if len(matches) != 1:
1280 if matches:
1281 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1282 if matches:
1283 for f in matches:
1284 msg += "\n %s" % f
1285 parselog.error(msg)
1286 else:
1287 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1288 raise NoSpecificMatch
1289 return matches[0]
1290
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001291 def buildFile(self, buildfile, task):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001292 """
1293 Build the file matching regexp buildfile
1294 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001295 bb.event.fire(bb.event.BuildInit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001296
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001297 # Too many people use -b because they think it's how you normally
1298 # specify a target to be built, so show a warning
1299 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1300
1301 self.buildFileInternal(buildfile, task)
1302
1303 def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1304 """
1305 Build the file matching regexp buildfile
1306 """
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001307
1308 # Parse the configuration here. We need to do it explicitly here since
1309 # buildFile() doesn't use the cache
1310 self.parseConfiguration()
1311
1312 # If we are told to do the None task then query the default task
Andrew Geissler82c905d2020-04-13 13:39:40 -05001313 if task is None:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001314 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001315 if not task.startswith("do_"):
1316 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001317
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001318 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001319 fn = self.matchFile(fn)
1320
1321 self.buildSetVars()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001322 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001323
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001324 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
1325
1326 infos = bb_cache.parse(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001327 infos = dict(infos)
1328
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001329 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001330 try:
1331 info_array = infos[fn]
1332 except KeyError:
1333 bb.fatal("%s does not exist" % fn)
1334
1335 if info_array[0].skipped:
1336 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1337
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001338 self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001339
1340 # Tweak some variables
1341 item = info_array[0].pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001342 self.recipecaches[mc].ignored_dependencies = set()
1343 self.recipecaches[mc].bbfile_priority[fn] = 1
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001344 self.configuration.limited_deps = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001345
1346 # Remove external dependencies
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001347 self.recipecaches[mc].task_deps[fn]['depends'] = {}
1348 self.recipecaches[mc].deps[fn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001349 self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1350 self.recipecaches[mc].runrecs[fn] = defaultdict(list)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001351
1352 # Invalidate task for target if force mode active
1353 if self.configuration.force:
1354 logger.verbose("Invalidate task %s, %s", task, fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001355 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001356
1357 # Setup taskdata structure
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001358 taskdata = {}
1359 taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001360 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001361
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001362 if quietlog:
1363 rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1364 bb.runqueue.logger.setLevel(logging.WARNING)
1365
1366 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1367 if fireevents:
1368 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001369
1370 # Execute the runqueue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001371 runlist = [[mc, item, task, fn]]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001372
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001373 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001374
1375 def buildFileIdle(server, rq, abort):
1376
1377 msg = None
1378 interrupted = 0
1379 if abort or self.state == state.forceshutdown:
1380 rq.finish_runqueue(True)
1381 msg = "Forced shutdown"
1382 interrupted = 2
1383 elif self.state == state.shutdown:
1384 rq.finish_runqueue(False)
1385 msg = "Stopped build"
1386 interrupted = 1
1387 failures = 0
1388 try:
1389 retval = rq.execute_runqueue()
1390 except runqueue.TaskFailure as exc:
1391 failures += len(exc.args)
1392 retval = False
1393 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001394 self.command.finishAsyncCommand(str(exc))
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001395 if quietlog:
1396 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001397 return False
1398
1399 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001400 if fireevents:
1401 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001402 self.command.finishAsyncCommand(msg)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001403 # We trashed self.recipecaches above
1404 self.parsecache_valid = False
1405 self.configuration.limited_deps = False
1406 bb.parse.siggen.reset(self.data)
1407 if quietlog:
1408 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001409 return False
1410 if retval is True:
1411 return True
1412 return retval
1413
1414 self.configuration.server_register_idlecallback(buildFileIdle, rq)
1415
1416 def buildTargets(self, targets, task):
1417 """
1418 Attempt to build the targets specified
1419 """
1420
1421 def buildTargetsIdle(server, rq, abort):
1422 msg = None
1423 interrupted = 0
1424 if abort or self.state == state.forceshutdown:
1425 rq.finish_runqueue(True)
1426 msg = "Forced shutdown"
1427 interrupted = 2
1428 elif self.state == state.shutdown:
1429 rq.finish_runqueue(False)
1430 msg = "Stopped build"
1431 interrupted = 1
1432 failures = 0
1433 try:
1434 retval = rq.execute_runqueue()
1435 except runqueue.TaskFailure as exc:
1436 failures += len(exc.args)
1437 retval = False
1438 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001439 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001440 return False
1441
1442 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001443 try:
1444 for mc in self.multiconfigs:
1445 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1446 finally:
1447 self.command.finishAsyncCommand(msg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001448 return False
1449 if retval is True:
1450 return True
1451 return retval
1452
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001453 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001454 self.buildSetVars()
1455
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001456 # If we are told to do the None task then query the default task
Andrew Geissler82c905d2020-04-13 13:39:40 -05001457 if task is None:
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001458 task = self.configuration.cmd
1459
1460 if not task.startswith("do_"):
1461 task = "do_%s" % task
1462
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001463 packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1464
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001465 bb.event.fire(bb.event.BuildInit(packages), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001466
1467 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001468
1469 buildname = self.data.getVar("BUILDNAME", False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001470
1471 # make targets to always look as <target>:do_<task>
1472 ntargets = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001473 for target in runlist:
1474 if target[0]:
Brad Bishop15ae2502019-06-18 21:44:24 -04001475 ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2]))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001476 ntargets.append("%s:%s" % (target[1], target[2]))
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001477
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001478 for mc in self.multiconfigs:
1479 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001480
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001481 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001482 if 'universe' in targets:
1483 rq.rqdata.warn_multi_bb = True
1484
1485 self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
1486
1487
1488 def getAllKeysWithFlags(self, flaglist):
1489 dump = {}
1490 for k in self.data.keys():
1491 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001492 expand = True
1493 flags = self.data.getVarFlags(k)
1494 if flags and "func" in flags and "python" in flags:
1495 expand = False
1496 v = self.data.getVar(k, expand)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001497 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1498 dump[k] = {
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001499 'v' : str(v) ,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001500 'history' : self.data.varhistory.variable(k),
1501 }
1502 for d in flaglist:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001503 if flags and d in flags:
1504 dump[k][d] = flags[d]
1505 else:
1506 dump[k][d] = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001507 except Exception as e:
1508 print(e)
1509 return dump
1510
1511
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001512 def updateCacheSync(self):
1513 if self.state == state.running:
1514 return
1515
1516 # reload files for which we got notifications
1517 for p in self.inotify_modified_files:
1518 bb.parse.update_cache(p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001519 if p in bb.parse.BBHandler.cached_statements:
1520 del bb.parse.BBHandler.cached_statements[p]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001521 self.inotify_modified_files = []
1522
1523 if not self.baseconfig_valid:
1524 logger.debug(1, "Reloading base configuration data")
1525 self.initConfigurationData()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001526 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001527
1528 # This is called for all async commands when self.state != running
1529 def updateCache(self):
1530 if self.state == state.running:
1531 return
1532
1533 if self.state in (state.shutdown, state.forceshutdown, state.error):
1534 if hasattr(self.parser, 'shutdown'):
1535 self.parser.shutdown(clean=False, force = True)
1536 raise bb.BBHandledException()
1537
1538 if self.state != state.parsing:
1539 self.updateCacheSync()
1540
1541 if self.state != state.parsing and not self.parsecache_valid:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001542 bb.parse.siggen.reset(self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001543 self.parseConfiguration ()
1544 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001545 for mc in self.multiconfigs:
1546 bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001547
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001548 for mc in self.multiconfigs:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001549 ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001550 self.recipecaches[mc].ignored_dependencies = set(ignore.split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001551
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001552 for dep in self.configuration.extra_assume_provided:
1553 self.recipecaches[mc].ignored_dependencies.add(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001554
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001555 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001556 (filelist, masked, searchdirs) = self.collection.collect_bbfiles(self.data, self.data)
1557
1558 # Add inotify watches for directories searched for bb/bbappend files
1559 for dirent in searchdirs:
1560 self.add_filewatch([[dirent]], dirs=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001561
1562 self.parser = CookerParser(self, filelist, masked)
1563 self.parsecache_valid = True
1564
1565 self.state = state.parsing
1566
1567 if not self.parser.parse_next():
1568 collectlog.debug(1, "parsing complete")
1569 if self.parser.error:
1570 raise bb.BBHandledException()
1571 self.show_appends_with_no_recipes()
1572 self.handlePrefProviders()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001573 for mc in self.multiconfigs:
1574 self.recipecaches[mc].bbfile_priority = self.collection.collection_priorities(self.recipecaches[mc].pkg_fn, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001575 self.state = state.running
1576
1577 # Send an event listing all stamps reachable after parsing
1578 # which the metadata may use to clean up stale data
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001579 for mc in self.multiconfigs:
1580 event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1581 bb.event.fire(event, self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001582 return None
1583
1584 return True
1585
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001586 def checkPackages(self, pkgs_to_build, task=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001587
1588 # Return a copy, don't modify the original
1589 pkgs_to_build = pkgs_to_build[:]
1590
1591 if len(pkgs_to_build) == 0:
1592 raise NothingToBuild
1593
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001594 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001595 for pkg in pkgs_to_build:
1596 if pkg in ignore:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001597 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
Brad Bishop15ae2502019-06-18 21:44:24 -04001598 if pkg.startswith("multiconfig:"):
1599 pkgs_to_build.remove(pkg)
1600 pkgs_to_build.append(pkg.replace("multiconfig:", "mc:"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001601
1602 if 'world' in pkgs_to_build:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001603 pkgs_to_build.remove('world')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001604 for mc in self.multiconfigs:
1605 bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1606 for t in self.recipecaches[mc].world_target:
1607 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001608 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001609 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001610
1611 if 'universe' in pkgs_to_build:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001612 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001613 parselog.debug(1, "collating packages for \"universe\"")
1614 pkgs_to_build.remove('universe')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001615 for mc in self.multiconfigs:
1616 for t in self.recipecaches[mc].universe_target:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001617 if task:
1618 foundtask = False
1619 for provider_fn in self.recipecaches[mc].providers[t]:
1620 if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1621 foundtask = True
1622 break
1623 if not foundtask:
1624 bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1625 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001626 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001627 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001628 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001629
1630 return pkgs_to_build
1631
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001632 def pre_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001633 # We now are in our own process so we can call this here.
1634 # PRServ exits if its parent process exits
1635 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001636 return
1637
1638 def post_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001639 prserv.serv.auto_shutdown()
Brad Bishop08902b02019-08-20 09:16:51 -04001640 if self.hashserv:
1641 self.hashserv.process.terminate()
1642 self.hashserv.process.join()
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001643 bb.event.fire(CookerExit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001644
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001645 def shutdown(self, force = False):
1646 if force:
1647 self.state = state.forceshutdown
1648 else:
1649 self.state = state.shutdown
1650
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001651 if self.parser:
1652 self.parser.shutdown(clean=not force, force=force)
1653
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001654 def finishcommand(self):
1655 self.state = state.initial
1656
1657 def reset(self):
1658 self.initConfigurationData()
Brad Bishop08902b02019-08-20 09:16:51 -04001659 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001660
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001661 def clientComplete(self):
1662 """Called when the client is done using the server"""
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001663 self.finishcommand()
1664 self.extraconfigdata = {}
1665 self.command.reset()
1666 self.databuilder.reset()
1667 self.data = self.databuilder.data
Andrew Geissler82c905d2020-04-13 13:39:40 -05001668 self.parsecache_valid = False
1669 self.baseconfig_valid = False
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001670
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001671
1672class CookerExit(bb.event.Event):
1673 """
1674 Notify clients of the Cooker shutdown
1675 """
1676
1677 def __init__(self):
1678 bb.event.Event.__init__(self)
1679
1680
1681class CookerCollectFiles(object):
1682 def __init__(self, priorities):
1683 self.bbappends = []
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001684 # Priorities is a list of tupples, with the second element as the pattern.
1685 # We need to sort the list with the longest pattern first, and so on to
1686 # the shortest. This allows nested layers to be properly evaluated.
1687 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001688
1689 def calc_bbfile_priority( self, filename, matched = None ):
1690 for _, _, regex, pri in self.bbfile_config_priorities:
1691 if regex.match(filename):
Andrew Geissler82c905d2020-04-13 13:39:40 -05001692 if matched is not None:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001693 if not regex in matched:
1694 matched.add(regex)
1695 return pri
1696 return 0
1697
1698 def get_bbfiles(self):
1699 """Get list of default .bb files by reading out the current directory"""
1700 path = os.getcwd()
1701 contents = os.listdir(path)
1702 bbfiles = []
1703 for f in contents:
1704 if f.endswith(".bb"):
1705 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1706 return bbfiles
1707
1708 def find_bbfiles(self, path):
1709 """Find all the .bb and .bbappend files in a directory"""
1710 found = []
1711 for dir, dirs, files in os.walk(path):
1712 for ignored in ('SCCS', 'CVS', '.svn'):
1713 if ignored in dirs:
1714 dirs.remove(ignored)
1715 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1716
1717 return found
1718
1719 def collect_bbfiles(self, config, eventdata):
1720 """Collect all available .bb build files"""
1721 masked = 0
1722
1723 collectlog.debug(1, "collecting .bb files")
1724
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001725 files = (config.getVar( "BBFILES") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001726 config.setVar("BBFILES", " ".join(files))
1727
1728 # Sort files by priority
1729 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
1730
1731 if not len(files):
1732 files = self.get_bbfiles()
1733
1734 if not len(files):
1735 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1736 bb.event.fire(CookerExit(), eventdata)
1737
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001738 # We need to track where we look so that we can add inotify watches. There
1739 # is no nice way to do this, this is horrid. We intercept the os.listdir()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001740 # (or os.scandir() for python 3.6+) calls while we run glob().
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001741 origlistdir = os.listdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001742 if hasattr(os, 'scandir'):
1743 origscandir = os.scandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001744 searchdirs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001745
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001746 def ourlistdir(d):
1747 searchdirs.append(d)
1748 return origlistdir(d)
1749
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001750 def ourscandir(d):
1751 searchdirs.append(d)
1752 return origscandir(d)
1753
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001754 os.listdir = ourlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001755 if hasattr(os, 'scandir'):
1756 os.scandir = ourscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001757 try:
1758 # Can't use set here as order is important
1759 newfiles = []
1760 for f in files:
1761 if os.path.isdir(f):
1762 dirfiles = self.find_bbfiles(f)
1763 for g in dirfiles:
1764 if g not in newfiles:
1765 newfiles.append(g)
1766 else:
1767 globbed = glob.glob(f)
1768 if not globbed and os.path.exists(f):
1769 globbed = [f]
1770 # glob gives files in order on disk. Sort to be deterministic.
1771 for g in sorted(globbed):
1772 if g not in newfiles:
1773 newfiles.append(g)
1774 finally:
1775 os.listdir = origlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001776 if hasattr(os, 'scandir'):
1777 os.scandir = origscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001778
1779 bbmask = config.getVar('BBMASK')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001780
1781 if bbmask:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001782 # First validate the individual regular expressions and ignore any
1783 # that do not compile
1784 bbmasks = []
1785 for mask in bbmask.split():
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001786 # When constructing an older style single regex, it's possible for BBMASK
1787 # to end up beginning with '|', which matches and masks _everything_.
1788 if mask.startswith("|"):
Andrew Geissler82c905d2020-04-13 13:39:40 -05001789 collectlog.warning("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001790 mask = mask[1:]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001791 try:
1792 re.compile(mask)
1793 bbmasks.append(mask)
1794 except sre_constants.error:
1795 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1796
1797 # Then validate the combined regular expressions. This should never
1798 # fail, but better safe than sorry...
1799 bbmask = "|".join(bbmasks)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001800 try:
1801 bbmask_compiled = re.compile(bbmask)
1802 except sre_constants.error:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001803 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1804 bbmask = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001805
1806 bbfiles = []
1807 bbappend = []
1808 for f in newfiles:
1809 if bbmask and bbmask_compiled.search(f):
1810 collectlog.debug(1, "skipping masked file %s", f)
1811 masked += 1
1812 continue
1813 if f.endswith('.bb'):
1814 bbfiles.append(f)
1815 elif f.endswith('.bbappend'):
1816 bbappend.append(f)
1817 else:
1818 collectlog.debug(1, "skipping %s: unknown file extension", f)
1819
1820 # Build a list of .bbappend files for each .bb file
1821 for f in bbappend:
1822 base = os.path.basename(f).replace('.bbappend', '.bb')
1823 self.bbappends.append((base, f))
1824
1825 # Find overlayed recipes
1826 # bbfiles will be in priority order which makes this easy
1827 bbfile_seen = dict()
1828 self.overlayed = defaultdict(list)
1829 for f in reversed(bbfiles):
1830 base = os.path.basename(f)
1831 if base not in bbfile_seen:
1832 bbfile_seen[base] = f
1833 else:
1834 topfile = bbfile_seen[base]
1835 self.overlayed[topfile].append(f)
1836
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001837 return (bbfiles, masked, searchdirs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001838
1839 def get_file_appends(self, fn):
1840 """
1841 Returns a list of .bbappend files to apply to fn
1842 """
1843 filelist = []
1844 f = os.path.basename(fn)
1845 for b in self.bbappends:
1846 (bbappend, filename) = b
1847 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1848 filelist.append(filename)
1849 return filelist
1850
1851 def collection_priorities(self, pkgfns, d):
1852
1853 priorities = {}
1854
1855 # Calculate priorities for each file
1856 matched = set()
1857 for p in pkgfns:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001858 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001859 priorities[p] = self.calc_bbfile_priority(realfn, matched)
1860
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001861 unmatched = set()
1862 for _, _, regex, pri in self.bbfile_config_priorities:
1863 if not regex in matched:
1864 unmatched.add(regex)
1865
Brad Bishop316dfdd2018-06-25 12:45:53 -04001866 # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
1867 def find_bbappend_match(regex):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001868 for b in self.bbappends:
1869 (bbfile, append) = b
1870 if regex.match(append):
Brad Bishop316dfdd2018-06-25 12:45:53 -04001871 # If the bbappend is matched by already "matched set", return False
1872 for matched_regex in matched:
1873 if matched_regex.match(append):
1874 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001875 return True
1876 return False
1877
1878 for unmatch in unmatched.copy():
Brad Bishop316dfdd2018-06-25 12:45:53 -04001879 if find_bbappend_match(unmatch):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001880 unmatched.remove(unmatch)
1881
1882 for collection, pattern, regex, _ in self.bbfile_config_priorities:
1883 if regex in unmatched:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001884 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001885 collectlog.warning("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001886
1887 return priorities
1888
1889class ParsingFailure(Exception):
1890 def __init__(self, realexception, recipe):
1891 self.realexception = realexception
1892 self.recipe = recipe
1893 Exception.__init__(self, realexception, recipe)
1894
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001895class Parser(multiprocessing.Process):
1896 def __init__(self, jobs, results, quit, init, profile):
1897 self.jobs = jobs
1898 self.results = results
1899 self.quit = quit
1900 self.init = init
1901 multiprocessing.Process.__init__(self)
1902 self.context = bb.utils.get_context().copy()
1903 self.handlers = bb.event.get_class_handlers().copy()
1904 self.profile = profile
1905
1906 def run(self):
1907
1908 if not self.profile:
1909 self.realrun()
1910 return
1911
1912 try:
1913 import cProfile as profile
1914 except:
1915 import profile
1916 prof = profile.Profile()
1917 try:
1918 profile.Profile.runcall(prof, self.realrun)
1919 finally:
1920 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1921 prof.dump_stats(logfile)
1922
1923 def realrun(self):
1924 if self.init:
1925 self.init()
1926
1927 pending = []
1928 while True:
1929 try:
1930 self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001931 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001932 pass
1933 else:
1934 self.results.cancel_join_thread()
1935 break
1936
1937 if pending:
1938 result = pending.pop()
1939 else:
1940 try:
Brad Bishop19323692019-04-05 15:28:33 -04001941 job = self.jobs.pop()
1942 except IndexError:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001943 break
1944 result = self.parse(*job)
Andrew Geissler82c905d2020-04-13 13:39:40 -05001945 # Clear the siggen cache after parsing to control memory usage, its huge
1946 bb.parse.siggen.postparsing_clean_cache()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001947 try:
1948 self.results.put(result, timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001949 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001950 pending.append(result)
1951
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001952 def parse(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001953 try:
Andrew Geissler82c905d2020-04-13 13:39:40 -05001954 origfilter = bb.event.LogHandler.filter
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001955 # Record the filename we're parsing into any events generated
1956 def parse_filter(self, record):
1957 record.taskpid = bb.event.worker_pid
1958 record.fn = filename
1959 return True
1960
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001961 # Reset our environment and handlers to the original settings
1962 bb.utils.set_context(self.context.copy())
1963 bb.event.set_class_handlers(self.handlers.copy())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001964 bb.event.LogHandler.filter = parse_filter
1965
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001966 return True, self.bb_cache.parse(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001967 except Exception as exc:
1968 tb = sys.exc_info()[2]
1969 exc.recipe = filename
1970 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
1971 return True, exc
1972 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
1973 # and for example a worker thread doesn't just exit on its own in response to
1974 # a SystemExit event for example.
1975 except BaseException as exc:
1976 return True, ParsingFailure(exc, filename)
Andrew Geissler82c905d2020-04-13 13:39:40 -05001977 finally:
1978 bb.event.LogHandler.filter = origfilter
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001979
1980class CookerParser(object):
1981 def __init__(self, cooker, filelist, masked):
1982 self.filelist = filelist
1983 self.cooker = cooker
1984 self.cfgdata = cooker.data
1985 self.cfghash = cooker.data_hash
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001986 self.cfgbuilder = cooker.databuilder
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001987
1988 # Accounting statistics
1989 self.parsed = 0
1990 self.cached = 0
1991 self.error = 0
1992 self.masked = masked
1993
1994 self.skipped = 0
1995 self.virtuals = 0
1996 self.total = len(filelist)
1997
1998 self.current = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001999 self.process_names = []
2000
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002001 self.bb_cache = bb.cache.Cache(self.cfgbuilder, self.cfghash, cooker.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002002 self.fromcache = []
2003 self.willparse = []
2004 for filename in self.filelist:
2005 appends = self.cooker.collection.get_file_appends(filename)
2006 if not self.bb_cache.cacheValid(filename, appends):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002007 self.willparse.append((filename, appends))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002008 else:
2009 self.fromcache.append((filename, appends))
2010 self.toparse = self.total - len(self.fromcache)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002011 self.progress_chunk = int(max(self.toparse / 100, 1))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002012
Brad Bishop6e60e8b2018-02-01 10:27:11 -05002013 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002014 multiprocessing.cpu_count()), len(self.willparse))
2015
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002016 self.start()
2017 self.haveshutdown = False
2018
2019 def start(self):
2020 self.results = self.load_cached()
2021 self.processes = []
2022 if self.toparse:
2023 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2024 def init():
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002025 Parser.bb_cache = self.bb_cache
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002026 bb.utils.set_process_name(multiprocessing.current_process().name)
2027 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2028 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002029
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002030 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002031 self.result_queue = multiprocessing.Queue()
Brad Bishop19323692019-04-05 15:28:33 -04002032
2033 def chunkify(lst,n):
2034 return [lst[i::n] for i in range(n)]
2035 self.jobs = chunkify(self.willparse, self.num_processes)
2036
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002037 for i in range(0, self.num_processes):
Brad Bishop19323692019-04-05 15:28:33 -04002038 parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002039 parser.start()
2040 self.process_names.append(parser.name)
2041 self.processes.append(parser)
2042
2043 self.results = itertools.chain(self.results, self.parse_generator())
2044
2045 def shutdown(self, clean=True, force=False):
2046 if not self.toparse:
2047 return
2048 if self.haveshutdown:
2049 return
2050 self.haveshutdown = True
2051
2052 if clean:
2053 event = bb.event.ParseCompleted(self.cached, self.parsed,
2054 self.skipped, self.masked,
2055 self.virtuals, self.error,
2056 self.total)
2057
2058 bb.event.fire(event, self.cfgdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002059 for process in self.processes:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002060 self.parser_quit.put(None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002061 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002062 self.parser_quit.cancel_join_thread()
2063 for process in self.processes:
2064 self.parser_quit.put(None)
2065
Brad Bishop08902b02019-08-20 09:16:51 -04002066 # Cleanup the queue before call process.join(), otherwise there might be
2067 # deadlocks.
2068 while True:
2069 try:
2070 self.result_queue.get(timeout=0.25)
2071 except queue.Empty:
2072 break
2073
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002074 for process in self.processes:
2075 if force:
2076 process.join(.1)
2077 process.terminate()
2078 else:
2079 process.join()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002080
2081 sync = threading.Thread(target=self.bb_cache.sync)
2082 sync.start()
2083 multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002084 bb.codeparser.parser_cache_savemerge()
2085 bb.fetch.fetcher_parse_done()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002086 if self.cooker.configuration.profile:
2087 profiles = []
2088 for i in self.process_names:
2089 logfile = "profile-parse-%s.log" % i
2090 if os.path.exists(logfile):
2091 profiles.append(logfile)
2092
2093 pout = "profile-parse.log.processed"
2094 bb.utils.process_profilelog(profiles, pout = pout)
2095 print("Processed parsing statistics saved to %s" % (pout))
2096
2097 def load_cached(self):
2098 for filename, appends in self.fromcache:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002099 cached, infos = self.bb_cache.load(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002100 yield not cached, infos
2101
2102 def parse_generator(self):
2103 while True:
2104 if self.parsed >= self.toparse:
2105 break
2106
2107 try:
2108 result = self.result_queue.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002109 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002110 pass
2111 else:
2112 value = result[1]
2113 if isinstance(value, BaseException):
2114 raise value
2115 else:
2116 yield result
2117
2118 def parse_next(self):
2119 result = []
2120 parsed = None
2121 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002122 parsed, result = next(self.results)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002123 except StopIteration:
2124 self.shutdown()
2125 return False
2126 except bb.BBHandledException as exc:
2127 self.error += 1
2128 logger.error('Failed to parse recipe: %s' % exc.recipe)
2129 self.shutdown(clean=False)
2130 return False
2131 except ParsingFailure as exc:
2132 self.error += 1
2133 logger.error('Unable to parse %s: %s' %
2134 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2135 self.shutdown(clean=False)
2136 return False
2137 except bb.parse.ParseError as exc:
2138 self.error += 1
2139 logger.error(str(exc))
2140 self.shutdown(clean=False)
2141 return False
2142 except bb.data_smart.ExpansionError as exc:
2143 self.error += 1
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002144 bbdir = os.path.dirname(__file__) + os.sep
2145 etype, value, _ = sys.exc_info()
2146 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2147 logger.error('ExpansionError during parsing %s', value.recipe,
2148 exc_info=(etype, value, tb))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002149 self.shutdown(clean=False)
2150 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002151 except Exception as exc:
2152 self.error += 1
2153 etype, value, tb = sys.exc_info()
2154 if hasattr(value, "recipe"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002155 logger.error('Unable to parse %s' % value.recipe,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002156 exc_info=(etype, value, exc.traceback))
2157 else:
2158 # Most likely, an exception occurred during raising an exception
2159 import traceback
2160 logger.error('Exception during parse: %s' % traceback.format_exc())
2161 self.shutdown(clean=False)
2162 return False
2163
2164 self.current += 1
2165 self.virtuals += len(result)
2166 if parsed:
2167 self.parsed += 1
2168 if self.parsed % self.progress_chunk == 0:
2169 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2170 self.cfgdata)
2171 else:
2172 self.cached += 1
2173
2174 for virtualfn, info_array in result:
2175 if info_array[0].skipped:
2176 self.skipped += 1
2177 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002178 (fn, cls, mc) = bb.cache.virtualfn2realfn(virtualfn)
2179 self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002180 parsed=parsed, watcher = self.cooker.add_filewatch)
2181 return True
2182
2183 def reparse(self, filename):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002184 infos = self.bb_cache.parse(filename, self.cooker.collection.get_file_appends(filename))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002185 for vfn, info_array in infos:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002186 (fn, cls, mc) = bb.cache.virtualfn2realfn(vfn)
2187 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)