blob: effd02442c0cb88df4cdbe84dc01664c2b65429d [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# Copyright (C) 2003, 2004 Chris Larson
3# Copyright (C) 2003, 2004 Phil Blundell
4# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
5# Copyright (C) 2005 Holger Hans Peter Freyther
6# Copyright (C) 2005 ROAD GmbH
7# Copyright (C) 2006 - 2007 Richard Purdie
8#
Brad Bishopc342db32019-05-15 21:57:59 -04009# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050010#
Patrick Williamsc0f7c042017-02-23 20:41:17 -060011
Patrick Williamsc124f4f2015-09-15 14:41:29 -050012import sys, os, glob, os.path, re, time
Patrick Williamsc124f4f2015-09-15 14:41:29 -050013import itertools
14import logging
15import multiprocessing
16import sre_constants
17import threading
Patrick Williamsc0f7c042017-02-23 20:41:17 -060018from io import StringIO, UnsupportedOperation
Patrick Williamsc124f4f2015-09-15 14:41:29 -050019from contextlib import closing
Patrick Williamsc0f7c042017-02-23 20:41:17 -060020from collections import defaultdict, namedtuple
Patrick Williamsc124f4f2015-09-15 14:41:29 -050021import bb, bb.exceptions, bb.command
22from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
Patrick Williamsc0f7c042017-02-23 20:41:17 -060023import queue
Patrick Williamsc124f4f2015-09-15 14:41:29 -050024import signal
Patrick Williamsc124f4f2015-09-15 14:41:29 -050025import prserv.serv
26import pyinotify
Patrick Williamsc0f7c042017-02-23 20:41:17 -060027import json
28import pickle
29import codecs
Brad Bishop08902b02019-08-20 09:16:51 -040030import hashserv
Patrick Williamsc124f4f2015-09-15 14:41:29 -050031
32logger = logging.getLogger("BitBake")
33collectlog = logging.getLogger("BitBake.Collection")
34buildlog = logging.getLogger("BitBake.Build")
35parselog = logging.getLogger("BitBake.Parsing")
36providerlog = logging.getLogger("BitBake.Provider")
37
38class NoSpecificMatch(bb.BBHandledException):
39 """
40 Exception raised when no or multiple file matches are found
41 """
42
43class NothingToBuild(Exception):
44 """
45 Exception raised when there is nothing to build
46 """
47
48class CollectionError(bb.BBHandledException):
49 """
50 Exception raised when layer configuration is incorrect
51 """
52
53class state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060054 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050055
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050056 @classmethod
57 def get_name(cls, code):
58 for name in dir(cls):
59 value = getattr(cls, name)
60 if type(value) == type(cls.initial) and value == code:
61 return name
62 raise ValueError("Invalid status code: %s" % code)
63
Patrick Williamsc124f4f2015-09-15 14:41:29 -050064
65class SkippedPackage:
66 def __init__(self, info = None, reason = None):
67 self.pn = None
68 self.skipreason = None
69 self.provides = None
70 self.rprovides = None
71
72 if info:
73 self.pn = info.pn
74 self.skipreason = info.skipreason
75 self.provides = info.provides
76 self.rprovides = info.rprovides
77 elif reason:
78 self.skipreason = reason
79
80
81class CookerFeatures(object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -060082 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050083
84 def __init__(self):
85 self._features=set()
86
87 def setFeature(self, f):
88 # validate we got a request for a feature we support
89 if f not in CookerFeatures._feature_list:
90 return
91 self._features.add(f)
92
93 def __contains__(self, f):
94 return f in self._features
95
96 def __iter__(self):
97 return self._features.__iter__()
98
Patrick Williamsc0f7c042017-02-23 20:41:17 -060099 def __next__(self):
100 return next(self._features)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500101
102
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600103class EventWriter:
104 def __init__(self, cooker, eventfile):
105 self.file_inited = None
106 self.cooker = cooker
107 self.eventfile = eventfile
108 self.event_queue = []
109
110 def write_event(self, event):
111 with open(self.eventfile, "a") as f:
112 try:
113 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
114 f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
115 "vars": str_event}))
116 except Exception as err:
117 import traceback
118 print(err, traceback.format_exc())
119
120 def send(self, event):
121 if self.file_inited:
122 # we have the file, just write the event
123 self.write_event(event)
124 else:
125 # init on bb.event.BuildStarted
126 name = "%s.%s" % (event.__module__, event.__class__.__name__)
127 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
128 with open(self.eventfile, "w") as f:
129 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
130
131 self.file_inited = True
132
133 # write pending events
134 for evt in self.event_queue:
135 self.write_event(evt)
136
137 # also write the current event
138 self.write_event(event)
139 else:
140 # queue all events until the file is inited
141 self.event_queue.append(event)
142
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500143#============================================================================#
144# BBCooker
145#============================================================================#
146class BBCooker:
147 """
148 Manages one bitbake build run
149 """
150
151 def __init__(self, configuration, featureSet=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600152 self.recipecaches = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500153 self.skiplist = {}
154 self.featureset = CookerFeatures()
155 if featureSet:
156 for f in featureSet:
157 self.featureset.setFeature(f)
158
159 self.configuration = configuration
160
Brad Bishopf058f492019-01-28 23:50:33 -0500161 bb.debug(1, "BBCooker starting %s" % time.time())
162 sys.stdout.flush()
163
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500164 self.configwatcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500165 bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
166 sys.stdout.flush()
167
Andrew Geissler82c905d2020-04-13 13:39:40 -0500168 self.configwatcher.bbseen = set()
169 self.configwatcher.bbwatchedfiles = set()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500170 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
Brad Bishopf058f492019-01-28 23:50:33 -0500171 bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
172 sys.stdout.flush()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500173 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
174 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500175 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500176 self.watcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500177 bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
178 sys.stdout.flush()
Andrew Geissler82c905d2020-04-13 13:39:40 -0500179 self.watcher.bbseen = set()
180 self.watcher.bbwatchedfiles = set()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500181 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
182
Brad Bishopf058f492019-01-28 23:50:33 -0500183 bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
184 sys.stdout.flush()
185
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500186 # If being called by something like tinfoil, we need to clean cached data
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500187 # which may now be invalid
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500188 bb.parse.clear_cache()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500189 bb.parse.BBHandler.cached_statements = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500190
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500191 self.ui_cmdline = None
Brad Bishop08902b02019-08-20 09:16:51 -0400192 self.hashserv = None
Brad Bishopa34c0302019-09-23 22:34:48 -0400193 self.hashservaddr = None
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500194
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500195 self.initConfigurationData()
196
Brad Bishopf058f492019-01-28 23:50:33 -0500197 bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
198 sys.stdout.flush()
199
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600200 # we log all events to a file if so directed
201 if self.configuration.writeeventlog:
202 # register the log file writer as UI Handler
203 writer = EventWriter(self, self.configuration.writeeventlog)
204 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
205 bb.event.register_UIHhandler(EventLogWriteHandler(writer))
206
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500207 self.inotify_modified_files = []
208
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500209 def _process_inotify_updates(server, cooker, abort):
210 cooker.process_inotify_updates()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500211 return 1.0
212
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500213 self.configuration.server_register_idlecallback(_process_inotify_updates, self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500214
215 # TOSTOP must not be set or our children will hang when they output
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600216 try:
217 fd = sys.stdout.fileno()
218 if os.isatty(fd):
219 import termios
220 tcattr = termios.tcgetattr(fd)
221 if tcattr[3] & termios.TOSTOP:
222 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
223 tcattr[3] = tcattr[3] & ~termios.TOSTOP
224 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
225 except UnsupportedOperation:
226 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500227
228 self.command = bb.command.Command(self)
229 self.state = state.initial
230
231 self.parser = None
232
233 signal.signal(signal.SIGTERM, self.sigterm_exception)
234 # Let SIGHUP exit as SIGTERM
235 signal.signal(signal.SIGHUP, self.sigterm_exception)
236
Brad Bishopf058f492019-01-28 23:50:33 -0500237 bb.debug(1, "BBCooker startup complete %s" % time.time())
238 sys.stdout.flush()
239
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500240 def process_inotify_updates(self):
241 for n in [self.confignotifier, self.notifier]:
242 if n.check_events(timeout=0):
243 # read notified events and enqeue them
244 n.read_events()
245 n.process_events()
246
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500247 def config_notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500248 if event.maskname == "IN_Q_OVERFLOW":
249 bb.warn("inotify event queue overflowed, invalidating caches.")
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500250 self.parsecache_valid = False
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500251 self.baseconfig_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500252 bb.parse.clear_cache()
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500253 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500254 if not event.pathname in self.configwatcher.bbwatchedfiles:
255 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500256 if not event.pathname in self.inotify_modified_files:
257 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500258 self.baseconfig_valid = False
259
260 def notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500261 if event.maskname == "IN_Q_OVERFLOW":
262 bb.warn("inotify event queue overflowed, invalidating caches.")
263 self.parsecache_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500264 bb.parse.clear_cache()
265 return
266 if event.pathname.endswith("bitbake-cookerdaemon.log") \
267 or event.pathname.endswith("bitbake.lock"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500268 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500269 if not event.pathname in self.inotify_modified_files:
270 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500271 self.parsecache_valid = False
272
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500273 def add_filewatch(self, deps, watcher=None, dirs=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500274 if not watcher:
275 watcher = self.watcher
276 for i in deps:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500277 watcher.bbwatchedfiles.add(i[0])
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500278 if dirs:
279 f = i[0]
280 else:
281 f = os.path.dirname(i[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500282 if f in watcher.bbseen:
283 continue
Andrew Geissler82c905d2020-04-13 13:39:40 -0500284 watcher.bbseen.add(f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500285 watchtarget = None
286 while True:
287 # We try and add watches for files that don't exist but if they did, would influence
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500288 # the parser. The parent directory of these files may not exist, in which case we need
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500289 # to watch any parent that does exist for changes.
290 try:
291 watcher.add_watch(f, self.watchmask, quiet=False)
292 if watchtarget:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500293 watcher.bbwatchedfiles.add(watchtarget)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500294 break
295 except pyinotify.WatchManagerError as e:
296 if 'ENOENT' in str(e):
297 watchtarget = f
298 f = os.path.dirname(f)
299 if f in watcher.bbseen:
300 break
Andrew Geissler82c905d2020-04-13 13:39:40 -0500301 watcher.bbseen.add(f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500302 continue
303 if 'ENOSPC' in str(e):
304 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
305 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
306 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
307 providerlog.error("Root privilege is required to modify max_user_watches.")
308 raise
309
310 def sigterm_exception(self, signum, stackframe):
311 if signum == signal.SIGTERM:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500312 bb.warn("Cooker received SIGTERM, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500313 elif signum == signal.SIGHUP:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500314 bb.warn("Cooker received SIGHUP, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500315 self.state = state.forceshutdown
316
317 def setFeatures(self, features):
318 # we only accept a new feature set if we're in state initial, so we can reset without problems
319 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
320 raise Exception("Illegal state for feature set change")
321 original_featureset = list(self.featureset)
322 for feature in features:
323 self.featureset.setFeature(feature)
324 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
325 if (original_featureset != list(self.featureset)) and self.state != state.error:
326 self.reset()
327
328 def initConfigurationData(self):
329
330 self.state = state.initial
331 self.caches_array = []
332
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500333 # Need to preserve BB_CONSOLELOG over resets
334 consolelog = None
335 if hasattr(self, "data"):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500336 consolelog = self.data.getVar("BB_CONSOLELOG")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500337
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500338 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
339 self.enableDataTracking()
340
341 all_extra_cache_names = []
342 # We hardcode all known cache types in a single place, here.
343 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
344 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
345
346 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
347
348 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
349 # This is the entry point, no further check needed!
350 for var in caches_name_array:
351 try:
352 module_name, cache_name = var.split(':')
353 module = __import__(module_name, fromlist=(cache_name,))
354 self.caches_array.append(getattr(module, cache_name))
355 except ImportError as exc:
356 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
357 sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
358
359 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
360 self.databuilder.parseBaseConfiguration()
361 self.data = self.databuilder.data
362 self.data_hash = self.databuilder.data_hash
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500363 self.extraconfigdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500364
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500365 if consolelog:
366 self.data.setVar("BB_CONSOLELOG", consolelog)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500367
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500368 self.data.setVar('BB_CMDLINE', self.ui_cmdline)
369
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500370 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
371 self.disableDataTracking()
372
Brad Bishop15ae2502019-06-18 21:44:24 -0400373 for mc in self.databuilder.mcdata.values():
374 mc.renameVar("__depends", "__base_depends")
375 self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500376
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500377 self.baseconfig_valid = True
378 self.parsecache_valid = False
379
380 def handlePRServ(self):
381 # Setup a PR Server based on the new configuration
382 try:
383 self.prhost = prserv.serv.auto_start(self.data)
384 except prserv.serv.PRServiceConfigError as e:
385 bb.fatal("Unable to start PR Server, exitting")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500386
Brad Bishopa34c0302019-09-23 22:34:48 -0400387 if self.data.getVar("BB_HASHSERVE") == "auto":
388 # Create a new hash server bound to a unix domain socket
Brad Bishop08902b02019-08-20 09:16:51 -0400389 if not self.hashserv:
390 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
Brad Bishopa34c0302019-09-23 22:34:48 -0400391 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
392 self.hashserv = hashserv.create_server(self.hashservaddr, dbfile, sync=False)
Brad Bishop08902b02019-08-20 09:16:51 -0400393 self.hashserv.process = multiprocessing.Process(target=self.hashserv.serve_forever)
Brad Bishop08902b02019-08-20 09:16:51 -0400394 self.hashserv.process.start()
Brad Bishopa34c0302019-09-23 22:34:48 -0400395 self.data.setVar("BB_HASHSERVE", self.hashservaddr)
396 self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr)
397 self.databuilder.data.setVar("BB_HASHSERVE", self.hashservaddr)
Brad Bishop08902b02019-08-20 09:16:51 -0400398 for mc in self.databuilder.mcdata:
Brad Bishopa34c0302019-09-23 22:34:48 -0400399 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
Brad Bishop08902b02019-08-20 09:16:51 -0400400
401 bb.parse.init_parser(self.data)
402
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500403 def enableDataTracking(self):
404 self.configuration.tracking = True
405 if hasattr(self, "data"):
406 self.data.enableTracking()
407
408 def disableDataTracking(self):
409 self.configuration.tracking = False
410 if hasattr(self, "data"):
411 self.data.disableTracking()
412
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500413 def parseConfiguration(self):
414 # Set log file verbosity
415 verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
416 if verboselogs:
417 bb.msg.loggerVerboseLogs = True
418
419 # Change nice level if we're asked to
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500420 nice = self.data.getVar("BB_NICE_LEVEL")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500421 if nice:
422 curnice = os.nice(0)
423 nice = int(nice) - curnice
424 buildlog.verbose("Renice to %s " % os.nice(nice))
425
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600426 if self.recipecaches:
427 del self.recipecaches
428 self.multiconfigs = self.databuilder.mcdata.keys()
429 self.recipecaches = {}
430 for mc in self.multiconfigs:
431 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500432
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500433 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500434
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500435 self.parsecache_valid = False
436
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500437 def updateConfigOpts(self, options, environment, cmdline):
438 self.ui_cmdline = cmdline
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500439 clean = True
440 for o in options:
441 if o in ['prefile', 'postfile']:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500442 # Only these options may require a reparse
443 try:
444 if getattr(self.configuration, o) == options[o]:
445 # Value is the same, no need to mark dirty
446 continue
447 except AttributeError:
448 pass
449 logger.debug(1, "Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
450 print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500451 clean = False
452 setattr(self.configuration, o, options[o])
453 for k in bb.utils.approved_variables():
454 if k in environment and k not in self.configuration.env:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500455 logger.debug(1, "Updating new environment variable %s to %s" % (k, environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500456 self.configuration.env[k] = environment[k]
457 clean = False
458 if k in self.configuration.env and k not in environment:
459 logger.debug(1, "Updating environment variable %s (deleted)" % (k))
460 del self.configuration.env[k]
461 clean = False
462 if k not in self.configuration.env and k not in environment:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500463 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500464 if environment[k] != self.configuration.env[k]:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500465 logger.debug(1, "Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500466 self.configuration.env[k] = environment[k]
467 clean = False
468 if not clean:
469 logger.debug(1, "Base environment change, triggering reparse")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500470 self.reset()
471
472 def runCommands(self, server, data, abort):
473 """
474 Run any queued asynchronous command
475 This is done by the idle handler so it runs in true context rather than
476 tied to any UI.
477 """
478
479 return self.command.runAsyncCommand()
480
481 def showVersions(self):
482
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500483 (latest_versions, preferred_versions) = self.findProviders()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500484
485 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
486 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
487
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500488 for p in sorted(self.recipecaches[''].pkg_pn):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500489 pref = preferred_versions[p]
490 latest = latest_versions[p]
491
492 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
493 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
494
495 if pref == latest:
496 prefstr = ""
497
498 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
499
500 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
501 """
502 Show the outer or per-recipe environment
503 """
504 fn = None
505 envdata = None
Brad Bishop15ae2502019-06-18 21:44:24 -0400506 mc = ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500507 if not pkgs_to_build:
508 pkgs_to_build = []
509
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500510 orig_tracking = self.configuration.tracking
511 if not orig_tracking:
512 self.enableDataTracking()
513 self.reset()
514
Brad Bishop15ae2502019-06-18 21:44:24 -0400515 def mc_base(p):
516 if p.startswith('mc:'):
517 s = p.split(':')
518 if len(s) == 2:
519 return s[1]
520 return None
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500521
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500522 if buildfile:
523 # Parse the configuration here. We need to do it explicitly here since
524 # this showEnvironment() code path doesn't use the cache
525 self.parseConfiguration()
526
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600527 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Andrew Geissler5a43b432020-06-13 10:46:56 -0500528 fn = self.matchFile(fn, mc)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600529 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500530 elif len(pkgs_to_build) == 1:
Brad Bishop15ae2502019-06-18 21:44:24 -0400531 mc = mc_base(pkgs_to_build[0])
532 if not mc:
533 ignore = self.data.getVar("ASSUME_PROVIDED") or ""
534 if pkgs_to_build[0] in set(ignore.split()):
535 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500536
Brad Bishop15ae2502019-06-18 21:44:24 -0400537 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500538
Brad Bishop15ae2502019-06-18 21:44:24 -0400539 mc = runlist[0][0]
540 fn = runlist[0][3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500541
542 if fn:
543 try:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500544 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
545 envdata = bb_caches[mc].loadDataFull(fn, self.collections[mc].get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500546 except Exception as e:
547 parselog.exception("Unable to read %s", fn)
548 raise
Brad Bishop15ae2502019-06-18 21:44:24 -0400549 else:
550 if not mc in self.databuilder.mcdata:
551 bb.fatal('Not multiconfig named "%s" found' % mc)
552 envdata = self.databuilder.mcdata[mc]
553 data.expandKeys(envdata)
554 parse.ast.runAnonFuncs(envdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500555
556 # Display history
557 with closing(StringIO()) as env:
558 self.data.inchistory.emit(env)
559 logger.plain(env.getvalue())
560
561 # emit variables and shell functions
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500562 with closing(StringIO()) as env:
563 data.emit_env(env, envdata, True)
564 logger.plain(env.getvalue())
565
566 # emit the metadata which isnt valid shell
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500567 for e in sorted(envdata.keys()):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600568 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500569 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500570
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500571 if not orig_tracking:
572 self.disableDataTracking()
573 self.reset()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500574
575 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
576 """
577 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
578 """
579 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
580
581 # A task of None means use the default task
582 if task is None:
583 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500584 if not task.startswith("do_"):
585 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500586
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500587 targetlist = self.checkPackages(pkgs_to_build, task)
588 fulltargetlist = []
589 defaulttask_implicit = ''
590 defaulttask_explicit = False
591 wildcard = False
592
593 # Wild card expansion:
Brad Bishop15ae2502019-06-18 21:44:24 -0400594 # Replace string such as "mc:*:bash"
595 # into "mc:A:bash mc:B:bash bash"
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500596 for k in targetlist:
Brad Bishop15ae2502019-06-18 21:44:24 -0400597 if k.startswith("mc:"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500598 if wildcard:
599 bb.fatal('multiconfig conflict')
600 if k.split(":")[1] == "*":
601 wildcard = True
602 for mc in self.multiconfigs:
603 if mc:
604 fulltargetlist.append(k.replace('*', mc))
605 # implicit default task
606 else:
607 defaulttask_implicit = k.split(":")[2]
608 else:
609 fulltargetlist.append(k)
610 else:
611 defaulttask_explicit = True
612 fulltargetlist.append(k)
613
614 if not defaulttask_explicit and defaulttask_implicit != '':
615 fulltargetlist.append(defaulttask_implicit)
616
617 bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600618 taskdata = {}
619 localdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500620
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600621 for mc in self.multiconfigs:
622 taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
623 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600624 bb.data.expandKeys(localdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500625
626 current = 0
627 runlist = []
628 for k in fulltargetlist:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600629 mc = ""
Brad Bishop15ae2502019-06-18 21:44:24 -0400630 if k.startswith("mc:"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600631 mc = k.split(":")[1]
632 k = ":".join(k.split(":")[2:])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500633 ktask = task
634 if ":do_" in k:
635 k2 = k.split(":do_")
636 k = k2[0]
637 ktask = k2[1]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600638 taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500639 current += 1
640 if not ktask.startswith("do_"):
641 ktask = "do_%s" % ktask
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600642 if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
643 # e.g. in ASSUME_PROVIDED
644 continue
645 fn = taskdata[mc].build_targets[k][0]
646 runlist.append([mc, k, ktask, fn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500647 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600648
Brad Bishop15ae2502019-06-18 21:44:24 -0400649 havemc = False
650 for mc in self.multiconfigs:
651 if taskdata[mc].get_mcdepends():
652 havemc = True
Brad Bishopf058f492019-01-28 23:50:33 -0500653
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800654 # No need to do check providers if there are no mcdeps or not an mc build
Brad Bishop15ae2502019-06-18 21:44:24 -0400655 if havemc or len(self.multiconfigs) > 1:
Andrew Geissler99467da2019-02-25 18:54:23 -0600656 seen = set()
657 new = True
658 # Make sure we can provide the multiconfig dependency
659 while new:
660 mcdeps = set()
661 # Add unresolved first, so we can get multiconfig indirect dependencies on time
662 for mc in self.multiconfigs:
663 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
664 mcdeps |= set(taskdata[mc].get_mcdepends())
665 new = False
666 for mc in self.multiconfigs:
667 for k in mcdeps:
668 if k in seen:
669 continue
670 l = k.split(':')
671 depmc = l[2]
672 if depmc not in self.multiconfigs:
673 bb.fatal("Multiconfig dependency %s depends on nonexistent mc configuration %s" % (k,depmc))
674 else:
675 logger.debug(1, "Adding providers for multiconfig dependency %s" % l[3])
676 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
677 seen.add(k)
678 new = True
Brad Bishopf058f492019-01-28 23:50:33 -0500679
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600680 for mc in self.multiconfigs:
681 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
682
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500683 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600684 return taskdata, runlist
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500685
686 def prepareTreeData(self, pkgs_to_build, task):
687 """
688 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
689 """
690
691 # We set abort to False here to prevent unbuildable targets raising
692 # an exception when we're just generating data
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600693 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500694
695 return runlist, taskdata
696
697 ######## WARNING : this function requires cache_extra to be enabled ########
698
699 def generateTaskDepTreeData(self, pkgs_to_build, task):
700 """
701 Create a dependency graph of pkgs_to_build including reverse dependency
702 information.
703 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500704 if not task.startswith("do_"):
705 task = "do_%s" % task
706
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500707 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600708 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500709 rq.rqdata.prepare()
710 return self.buildDependTree(rq, taskdata)
711
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600712 @staticmethod
713 def add_mc_prefix(mc, pn):
714 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -0400715 return "mc:%s:%s" % (mc, pn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600716 return pn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500717
718 def buildDependTree(self, rq, taskdata):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600719 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500720 depend_tree = {}
721 depend_tree["depends"] = {}
722 depend_tree["tdepends"] = {}
723 depend_tree["pn"] = {}
724 depend_tree["rdepends-pn"] = {}
725 depend_tree["packages"] = {}
726 depend_tree["rdepends-pkg"] = {}
727 depend_tree["rrecs-pkg"] = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500728 depend_tree['providermap'] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600729 depend_tree["layer-priorities"] = self.bbfile_config_priorities
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500730
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600731 for mc in taskdata:
732 for name, fn in list(taskdata[mc].get_providermap().items()):
733 pn = self.recipecaches[mc].pkg_fn[fn]
734 pn = self.add_mc_prefix(mc, pn)
735 if name != pn:
736 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
737 depend_tree['providermap'][name] = (pn, version)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500738
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600739 for tid in rq.rqdata.runtaskentries:
740 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
741 pn = self.recipecaches[mc].pkg_fn[taskfn]
742 pn = self.add_mc_prefix(mc, pn)
743 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500744 if pn not in depend_tree["pn"]:
745 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600746 depend_tree["pn"][pn]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500747 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600748 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500749
750 # if we have extra caches, list all attributes they bring in
751 extra_info = []
752 for cache_class in self.caches_array:
753 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
754 cachefields = getattr(cache_class, 'cachefields', [])
755 extra_info = extra_info + cachefields
756
757 # for all attributes stored, add them to the dependency tree
758 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600759 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500760
761
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500762 dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
763 if not dotname in depend_tree["tdepends"]:
764 depend_tree["tdepends"][dotname] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600765 for dep in rq.rqdata.runtaskentries[tid].depends:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800766 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
767 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600768 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
769 if taskfn not in seen_fns:
770 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500771 packages = []
772
773 depend_tree["depends"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600774 for dep in taskdata[mc].depids[taskfn]:
775 depend_tree["depends"][pn].append(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500776
777 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600778 for rdep in taskdata[mc].rdepids[taskfn]:
779 depend_tree["rdepends-pn"][pn].append(rdep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500780
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600781 rdepends = self.recipecaches[mc].rundeps[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500782 for package in rdepends:
783 depend_tree["rdepends-pkg"][package] = []
784 for rdepend in rdepends[package]:
785 depend_tree["rdepends-pkg"][package].append(rdepend)
786 packages.append(package)
787
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600788 rrecs = self.recipecaches[mc].runrecs[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500789 for package in rrecs:
790 depend_tree["rrecs-pkg"][package] = []
791 for rdepend in rrecs[package]:
792 depend_tree["rrecs-pkg"][package].append(rdepend)
793 if not package in packages:
794 packages.append(package)
795
796 for package in packages:
797 if package not in depend_tree["packages"]:
798 depend_tree["packages"][package] = {}
799 depend_tree["packages"][package]["pn"] = pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600800 depend_tree["packages"][package]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500801 depend_tree["packages"][package]["version"] = version
802
803 return depend_tree
804
805 ######## WARNING : this function requires cache_extra to be enabled ########
806 def generatePkgDepTreeData(self, pkgs_to_build, task):
807 """
808 Create a dependency tree of pkgs_to_build, returning the data.
809 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500810 if not task.startswith("do_"):
811 task = "do_%s" % task
812
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500813 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500814
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600815 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500816 depend_tree = {}
817 depend_tree["depends"] = {}
818 depend_tree["pn"] = {}
819 depend_tree["rdepends-pn"] = {}
820 depend_tree["rdepends-pkg"] = {}
821 depend_tree["rrecs-pkg"] = {}
822
823 # if we have extra caches, list all attributes they bring in
824 extra_info = []
825 for cache_class in self.caches_array:
826 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
827 cachefields = getattr(cache_class, 'cachefields', [])
828 extra_info = extra_info + cachefields
829
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600830 tids = []
831 for mc in taskdata:
832 for tid in taskdata[mc].taskentries:
833 tids.append(tid)
834
835 for tid in tids:
836 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
837
838 pn = self.recipecaches[mc].pkg_fn[taskfn]
839 pn = self.add_mc_prefix(mc, pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500840
841 if pn not in depend_tree["pn"]:
842 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600843 depend_tree["pn"][pn]["filename"] = taskfn
844 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500845 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600846 rdepends = self.recipecaches[mc].rundeps[taskfn]
847 rrecs = self.recipecaches[mc].runrecs[taskfn]
848 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500849
850 # for all extra attributes stored, add them to the dependency tree
851 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600852 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500853
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600854 if taskfn not in seen_fns:
855 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500856
857 depend_tree["depends"][pn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500858 for dep in taskdata[mc].depids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500859 pn_provider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600860 if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
861 fn_provider = taskdata[mc].build_targets[dep][0]
862 pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500863 else:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500864 pn_provider = dep
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600865 pn_provider = self.add_mc_prefix(mc, pn_provider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500866 depend_tree["depends"][pn].append(pn_provider)
867
868 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600869 for rdep in taskdata[mc].rdepids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500870 pn_rprovider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600871 if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
872 fn_rprovider = taskdata[mc].run_targets[rdep][0]
873 pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500874 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600875 pn_rprovider = rdep
876 pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500877 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
878
879 depend_tree["rdepends-pkg"].update(rdepends)
880 depend_tree["rrecs-pkg"].update(rrecs)
881
882 return depend_tree
883
884 def generateDepTreeEvent(self, pkgs_to_build, task):
885 """
886 Create a task dependency graph of pkgs_to_build.
887 Generate an event with the result
888 """
889 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
890 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
891
892 def generateDotGraphFiles(self, pkgs_to_build, task):
893 """
894 Create a task dependency graph of pkgs_to_build.
895 Save the result to a set of .dot files.
896 """
897
898 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
899
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500900 with open('pn-buildlist', 'w') as f:
901 for pn in depgraph["pn"]:
902 f.write(pn + "\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500903 logger.info("PN build list saved to 'pn-buildlist'")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500904
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500905 # Remove old format output files to ensure no confusion with stale data
906 try:
907 os.unlink('pn-depends.dot')
908 except FileNotFoundError:
909 pass
910 try:
911 os.unlink('package-depends.dot')
912 except FileNotFoundError:
913 pass
Brad Bishop79641f22019-09-10 07:20:22 -0400914 try:
915 os.unlink('recipe-depends.dot')
916 except FileNotFoundError:
917 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500918
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500919 with open('task-depends.dot', 'w') as f:
920 f.write("digraph depends {\n")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400921 for task in sorted(depgraph["tdepends"]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500922 (pn, taskname) = task.rsplit(".", 1)
923 fn = depgraph["pn"][pn]["filename"]
924 version = depgraph["pn"][pn]["version"]
925 f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400926 for dep in sorted(depgraph["tdepends"][task]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500927 f.write('"%s" -> "%s"\n' % (task, dep))
928 f.write("}\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500929 logger.info("Task dependencies saved to 'task-depends.dot'")
930
931 def show_appends_with_no_recipes(self):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500932 appends_without_recipes = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500933 # Determine which bbappends haven't been applied
Andrew Geissler5a43b432020-06-13 10:46:56 -0500934 for mc in self.multiconfigs:
935 # First get list of recipes, including skipped
936 recipefns = list(self.recipecaches[mc].pkg_fn.keys())
937 recipefns.extend(self.skiplist.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500938
Andrew Geissler5a43b432020-06-13 10:46:56 -0500939 # Work out list of bbappends that have been applied
940 applied_appends = []
941 for fn in recipefns:
942 applied_appends.extend(self.collections[mc].get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500943
Andrew Geissler5a43b432020-06-13 10:46:56 -0500944 appends_without_recipes[mc] = []
945 for _, appendfn in self.collections[mc].bbappends:
946 if not appendfn in applied_appends:
947 appends_without_recipes[mc].append(appendfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500948
Andrew Geissler5a43b432020-06-13 10:46:56 -0500949 msgs = []
950 for mc in sorted(appends_without_recipes.keys()):
951 if appends_without_recipes[mc]:
952 msgs.append('No recipes in %s available for:\n %s' % (mc if mc else 'default',
953 '\n '.join(appends_without_recipes[mc])))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500954
Andrew Geissler5a43b432020-06-13 10:46:56 -0500955 if msgs:
956 msg = "\n".join(msgs)
957 warn_only = self.databuilder.mcdata[mc].getVar("BB_DANGLINGAPPENDS_WARNONLY", \
958 False) or "no"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500959 if warn_only.lower() in ("1", "yes", "true"):
960 bb.warn(msg)
961 else:
962 bb.fatal(msg)
963
964 def handlePrefProviders(self):
965
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600966 for mc in self.multiconfigs:
967 localdata = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600968 bb.data.expandKeys(localdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500969
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600970 # Handle PREFERRED_PROVIDERS
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500971 for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600972 try:
973 (providee, provider) = p.split(':')
974 except:
975 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
976 continue
977 if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
978 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
979 self.recipecaches[mc].preferred[providee] = provider
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500980
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500981 def findConfigFilePath(self, configfile):
982 """
983 Find the location on disk of configfile and if it exists and was parsed by BitBake
984 emit the ConfigFilePathFound event with the path to the file.
985 """
986 path = bb.cookerdata.findConfigFile(configfile, self.data)
987 if not path:
988 return
989
990 # Generate a list of parsed configuration files by searching the files
991 # listed in the __depends and __base_depends variables with a .conf suffix.
992 conffiles = []
993 dep_files = self.data.getVar('__base_depends', False) or []
994 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
995
996 for f in dep_files:
997 if f[0].endswith(".conf"):
998 conffiles.append(f[0])
999
1000 _, conf, conffile = path.rpartition("conf/")
1001 match = os.path.join(conf, conffile)
1002 # Try and find matches for conf/conffilename.conf as we don't always
1003 # have the full path to the file.
1004 for cfg in conffiles:
1005 if cfg.endswith(match):
1006 bb.event.fire(bb.event.ConfigFilePathFound(path),
1007 self.data)
1008 break
1009
1010 def findFilesMatchingInDir(self, filepattern, directory):
1011 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001012 Searches for files containing the substring 'filepattern' which are children of
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001013 'directory' in each BBPATH. i.e. to find all rootfs package classes available
1014 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1015 or to find all machine configuration files one could call:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001016 findFilesMatchingInDir(self, '.conf', 'conf/machine')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001017 """
1018
1019 matches = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001020 bbpaths = self.data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001021 for path in bbpaths:
1022 dirpath = os.path.join(path, directory)
1023 if os.path.exists(dirpath):
1024 for root, dirs, files in os.walk(dirpath):
1025 for f in files:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001026 if filepattern in f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001027 matches.append(f)
1028
1029 if matches:
1030 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1031
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001032 def findProviders(self, mc=''):
Andrew Geissler82c905d2020-04-13 13:39:40 -05001033 return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001034
1035 def findBestProvider(self, pn, mc=''):
1036 if pn in self.recipecaches[mc].providers:
1037 filenames = self.recipecaches[mc].providers[pn]
Andrew Geissler82c905d2020-04-13 13:39:40 -05001038 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc])
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001039 filename = eligible[0]
1040 return None, None, None, filename
1041 elif pn in self.recipecaches[mc].pkg_pn:
Andrew Geissler82c905d2020-04-13 13:39:40 -05001042 return bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001043 else:
1044 return None, None, None, None
1045
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001046 def findConfigFiles(self, varname):
1047 """
1048 Find config files which are appropriate values for varname.
1049 i.e. MACHINE, DISTRO
1050 """
1051 possible = []
1052 var = varname.lower()
1053
1054 data = self.data
1055 # iterate configs
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001056 bbpaths = data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001057 for path in bbpaths:
1058 confpath = os.path.join(path, "conf", var)
1059 if os.path.exists(confpath):
1060 for root, dirs, files in os.walk(confpath):
1061 # get all child files, these are appropriate values
1062 for f in files:
1063 val, sep, end = f.rpartition('.')
1064 if end == 'conf':
1065 possible.append(val)
1066
1067 if possible:
1068 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1069
1070 def findInheritsClass(self, klass):
1071 """
1072 Find all recipes which inherit the specified class
1073 """
1074 pkg_list = []
1075
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001076 for pfn in self.recipecaches[''].pkg_fn:
1077 inherits = self.recipecaches[''].inherits.get(pfn, None)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001078 if inherits and klass in inherits:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001079 pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001080
1081 return pkg_list
1082
1083 def generateTargetsTree(self, klass=None, pkgs=None):
1084 """
1085 Generate a dependency tree of buildable targets
1086 Generate an event with the result
1087 """
1088 # if the caller hasn't specified a pkgs list default to universe
1089 if not pkgs:
1090 pkgs = ['universe']
1091 # if inherited_class passed ensure all recipes which inherit the
1092 # specified class are included in pkgs
1093 if klass:
1094 extra_pkgs = self.findInheritsClass(klass)
1095 pkgs = pkgs + extra_pkgs
1096
1097 # generate a dependency tree for all our packages
1098 tree = self.generatePkgDepTreeData(pkgs, 'build')
1099 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1100
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001101 def interactiveMode( self ):
1102 """Drop off into a shell"""
1103 try:
1104 from bb import shell
1105 except ImportError:
1106 parselog.exception("Interactive mode not available")
1107 sys.exit(1)
1108 else:
1109 shell.start( self )
1110
1111
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001112 def handleCollections(self, collections):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001113 """Handle collections"""
1114 errors = False
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001115 self.bbfile_config_priorities = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001116 if collections:
1117 collection_priorities = {}
1118 collection_depends = {}
1119 collection_list = collections.split()
1120 min_prio = 0
1121 for c in collection_list:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001122 bb.debug(1,'Processing %s in collection list' % (c))
1123
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001124 # Get collection priority if defined explicitly
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001125 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001126 if priority:
1127 try:
1128 prio = int(priority)
1129 except ValueError:
1130 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1131 errors = True
1132 if min_prio == 0 or prio < min_prio:
1133 min_prio = prio
1134 collection_priorities[c] = prio
1135 else:
1136 collection_priorities[c] = None
1137
1138 # Check dependencies and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001139 deps = self.data.getVar("LAYERDEPENDS_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001140 if deps:
1141 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001142 depDict = bb.utils.explode_dep_versions2(deps)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001143 except bb.utils.VersionStringException as vse:
1144 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001145 for dep, oplist in list(depDict.items()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001146 if dep in collection_list:
1147 for opstr in oplist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001148 layerver = self.data.getVar("LAYERVERSION_%s" % dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001149 (op, depver) = opstr.split()
1150 if layerver:
1151 try:
1152 res = bb.utils.vercmp_string_op(layerver, depver, op)
1153 except bb.utils.VersionStringException as vse:
1154 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1155 if not res:
1156 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1157 errors = True
1158 else:
1159 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1160 errors = True
1161 else:
1162 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1163 errors = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001164 collection_depends[c] = list(depDict.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001165 else:
1166 collection_depends[c] = []
1167
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001168 # Check recommends and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001169 recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001170 if recs:
1171 try:
1172 recDict = bb.utils.explode_dep_versions2(recs)
1173 except bb.utils.VersionStringException as vse:
1174 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1175 for rec, oplist in list(recDict.items()):
1176 if rec in collection_list:
1177 if oplist:
1178 opstr = oplist[0]
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001179 layerver = self.data.getVar("LAYERVERSION_%s" % rec)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001180 if layerver:
1181 (op, recver) = opstr.split()
1182 try:
1183 res = bb.utils.vercmp_string_op(layerver, recver, op)
1184 except bb.utils.VersionStringException as vse:
1185 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1186 if not res:
1187 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1188 continue
1189 else:
1190 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1191 continue
1192 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1193 collection_depends[c].append(rec)
1194 else:
1195 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1196
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001197 # Recursively work out collection priorities based on dependencies
1198 def calc_layer_priority(collection):
1199 if not collection_priorities[collection]:
1200 max_depprio = min_prio
1201 for dep in collection_depends[collection]:
1202 calc_layer_priority(dep)
1203 depprio = collection_priorities[dep]
1204 if depprio > max_depprio:
1205 max_depprio = depprio
1206 max_depprio += 1
1207 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1208 collection_priorities[collection] = max_depprio
1209
1210 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1211 for c in collection_list:
1212 calc_layer_priority(c)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001213 regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
Andrew Geissler82c905d2020-04-13 13:39:40 -05001214 if regex is None:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001215 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1216 errors = True
1217 continue
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001218 elif regex == "":
1219 parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
Brad Bishop19323692019-04-05 15:28:33 -04001220 cre = re.compile('^NULL$')
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001221 errors = False
1222 else:
1223 try:
1224 cre = re.compile(regex)
1225 except re.error:
1226 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1227 errors = True
1228 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001229 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001230 if errors:
1231 # We've already printed the actual error(s)
1232 raise CollectionError("Errors during parsing layer configuration")
1233
1234 def buildSetVars(self):
1235 """
1236 Setup any variables needed before starting a build
1237 """
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001238 t = time.gmtime()
1239 for mc in self.databuilder.mcdata:
1240 ds = self.databuilder.mcdata[mc]
1241 if not ds.getVar("BUILDNAME", False):
1242 ds.setVar("BUILDNAME", "${DATE}${TIME}")
1243 ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1244 ds.setVar("DATE", time.strftime('%Y%m%d', t))
1245 ds.setVar("TIME", time.strftime('%H%M%S', t))
1246
1247 def reset_mtime_caches(self):
1248 """
1249 Reset mtime caches - this is particularly important when memory resident as something
1250 which is cached is not unlikely to have changed since the last invocation (e.g. a
1251 file associated with a recipe might have been modified by the user).
1252 """
1253 build.reset_cache()
1254 bb.fetch._checksum_cache.mtime_cache.clear()
1255 siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1256 if siggen_cache:
1257 bb.parse.siggen.checksum_cache.mtime_cache.clear()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001258
Andrew Geissler5a43b432020-06-13 10:46:56 -05001259 def matchFiles(self, bf, mc=''):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001260 """
1261 Find the .bb files which match the expression in 'buildfile'.
1262 """
1263 if bf.startswith("/") or bf.startswith("../"):
1264 bf = os.path.abspath(bf)
1265
Andrew Geissler5a43b432020-06-13 10:46:56 -05001266 self.collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)}
1267 filelist, masked, searchdirs = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001268 try:
1269 os.stat(bf)
1270 bf = os.path.abspath(bf)
1271 return [bf]
1272 except OSError:
1273 regexp = re.compile(bf)
1274 matches = []
1275 for f in filelist:
1276 if regexp.search(f) and os.path.isfile(f):
1277 matches.append(f)
1278 return matches
1279
Andrew Geissler5a43b432020-06-13 10:46:56 -05001280 def matchFile(self, buildfile, mc=''):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001281 """
1282 Find the .bb file which matches the expression in 'buildfile'.
1283 Raise an error if multiple files
1284 """
Andrew Geissler5a43b432020-06-13 10:46:56 -05001285 matches = self.matchFiles(buildfile, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001286 if len(matches) != 1:
1287 if matches:
1288 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1289 if matches:
1290 for f in matches:
1291 msg += "\n %s" % f
1292 parselog.error(msg)
1293 else:
1294 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1295 raise NoSpecificMatch
1296 return matches[0]
1297
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001298 def buildFile(self, buildfile, task):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001299 """
1300 Build the file matching regexp buildfile
1301 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001302 bb.event.fire(bb.event.BuildInit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001303
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001304 # Too many people use -b because they think it's how you normally
1305 # specify a target to be built, so show a warning
1306 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1307
1308 self.buildFileInternal(buildfile, task)
1309
1310 def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1311 """
1312 Build the file matching regexp buildfile
1313 """
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001314
1315 # Parse the configuration here. We need to do it explicitly here since
1316 # buildFile() doesn't use the cache
1317 self.parseConfiguration()
1318
1319 # If we are told to do the None task then query the default task
Andrew Geissler82c905d2020-04-13 13:39:40 -05001320 if task is None:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001321 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001322 if not task.startswith("do_"):
1323 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001324
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001325 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Andrew Geissler5a43b432020-06-13 10:46:56 -05001326 fn = self.matchFile(fn, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001327
1328 self.buildSetVars()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001329 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001330
Andrew Geissler5a43b432020-06-13 10:46:56 -05001331 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001332
Andrew Geissler5a43b432020-06-13 10:46:56 -05001333 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001334 infos = dict(infos)
1335
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001336 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001337 try:
1338 info_array = infos[fn]
1339 except KeyError:
1340 bb.fatal("%s does not exist" % fn)
1341
1342 if info_array[0].skipped:
1343 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1344
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001345 self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001346
1347 # Tweak some variables
1348 item = info_array[0].pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001349 self.recipecaches[mc].ignored_dependencies = set()
1350 self.recipecaches[mc].bbfile_priority[fn] = 1
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001351 self.configuration.limited_deps = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001352
1353 # Remove external dependencies
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001354 self.recipecaches[mc].task_deps[fn]['depends'] = {}
1355 self.recipecaches[mc].deps[fn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001356 self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1357 self.recipecaches[mc].runrecs[fn] = defaultdict(list)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001358
1359 # Invalidate task for target if force mode active
1360 if self.configuration.force:
1361 logger.verbose("Invalidate task %s, %s", task, fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001362 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001363
1364 # Setup taskdata structure
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001365 taskdata = {}
1366 taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001367 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001368
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001369 if quietlog:
1370 rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1371 bb.runqueue.logger.setLevel(logging.WARNING)
1372
1373 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1374 if fireevents:
1375 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001376
1377 # Execute the runqueue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001378 runlist = [[mc, item, task, fn]]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001379
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001380 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001381
1382 def buildFileIdle(server, rq, abort):
1383
1384 msg = None
1385 interrupted = 0
1386 if abort or self.state == state.forceshutdown:
1387 rq.finish_runqueue(True)
1388 msg = "Forced shutdown"
1389 interrupted = 2
1390 elif self.state == state.shutdown:
1391 rq.finish_runqueue(False)
1392 msg = "Stopped build"
1393 interrupted = 1
1394 failures = 0
1395 try:
1396 retval = rq.execute_runqueue()
1397 except runqueue.TaskFailure as exc:
1398 failures += len(exc.args)
1399 retval = False
1400 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001401 self.command.finishAsyncCommand(str(exc))
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001402 if quietlog:
1403 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001404 return False
1405
1406 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001407 if fireevents:
1408 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001409 self.command.finishAsyncCommand(msg)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001410 # We trashed self.recipecaches above
1411 self.parsecache_valid = False
1412 self.configuration.limited_deps = False
1413 bb.parse.siggen.reset(self.data)
1414 if quietlog:
1415 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001416 return False
1417 if retval is True:
1418 return True
1419 return retval
1420
1421 self.configuration.server_register_idlecallback(buildFileIdle, rq)
1422
1423 def buildTargets(self, targets, task):
1424 """
1425 Attempt to build the targets specified
1426 """
1427
1428 def buildTargetsIdle(server, rq, abort):
1429 msg = None
1430 interrupted = 0
1431 if abort or self.state == state.forceshutdown:
1432 rq.finish_runqueue(True)
1433 msg = "Forced shutdown"
1434 interrupted = 2
1435 elif self.state == state.shutdown:
1436 rq.finish_runqueue(False)
1437 msg = "Stopped build"
1438 interrupted = 1
1439 failures = 0
1440 try:
1441 retval = rq.execute_runqueue()
1442 except runqueue.TaskFailure as exc:
1443 failures += len(exc.args)
1444 retval = False
1445 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001446 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001447 return False
1448
1449 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001450 try:
1451 for mc in self.multiconfigs:
1452 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1453 finally:
1454 self.command.finishAsyncCommand(msg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001455 return False
1456 if retval is True:
1457 return True
1458 return retval
1459
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001460 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001461 self.buildSetVars()
1462
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001463 # If we are told to do the None task then query the default task
Andrew Geissler82c905d2020-04-13 13:39:40 -05001464 if task is None:
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001465 task = self.configuration.cmd
1466
1467 if not task.startswith("do_"):
1468 task = "do_%s" % task
1469
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001470 packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1471
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001472 bb.event.fire(bb.event.BuildInit(packages), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001473
1474 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001475
1476 buildname = self.data.getVar("BUILDNAME", False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001477
1478 # make targets to always look as <target>:do_<task>
1479 ntargets = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001480 for target in runlist:
1481 if target[0]:
Brad Bishop15ae2502019-06-18 21:44:24 -04001482 ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2]))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001483 ntargets.append("%s:%s" % (target[1], target[2]))
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001484
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001485 for mc in self.multiconfigs:
1486 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001487
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001488 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001489 if 'universe' in targets:
1490 rq.rqdata.warn_multi_bb = True
1491
1492 self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
1493
1494
1495 def getAllKeysWithFlags(self, flaglist):
1496 dump = {}
1497 for k in self.data.keys():
1498 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001499 expand = True
1500 flags = self.data.getVarFlags(k)
1501 if flags and "func" in flags and "python" in flags:
1502 expand = False
1503 v = self.data.getVar(k, expand)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001504 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1505 dump[k] = {
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001506 'v' : str(v) ,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001507 'history' : self.data.varhistory.variable(k),
1508 }
1509 for d in flaglist:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001510 if flags and d in flags:
1511 dump[k][d] = flags[d]
1512 else:
1513 dump[k][d] = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001514 except Exception as e:
1515 print(e)
1516 return dump
1517
1518
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001519 def updateCacheSync(self):
1520 if self.state == state.running:
1521 return
1522
1523 # reload files for which we got notifications
1524 for p in self.inotify_modified_files:
1525 bb.parse.update_cache(p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001526 if p in bb.parse.BBHandler.cached_statements:
1527 del bb.parse.BBHandler.cached_statements[p]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001528 self.inotify_modified_files = []
1529
1530 if not self.baseconfig_valid:
1531 logger.debug(1, "Reloading base configuration data")
1532 self.initConfigurationData()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001533 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001534
1535 # This is called for all async commands when self.state != running
1536 def updateCache(self):
1537 if self.state == state.running:
1538 return
1539
1540 if self.state in (state.shutdown, state.forceshutdown, state.error):
1541 if hasattr(self.parser, 'shutdown'):
1542 self.parser.shutdown(clean=False, force = True)
1543 raise bb.BBHandledException()
1544
1545 if self.state != state.parsing:
1546 self.updateCacheSync()
1547
1548 if self.state != state.parsing and not self.parsecache_valid:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001549 bb.parse.siggen.reset(self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001550 self.parseConfiguration ()
1551 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001552 for mc in self.multiconfigs:
1553 bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001554
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001555 for mc in self.multiconfigs:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001556 ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001557 self.recipecaches[mc].ignored_dependencies = set(ignore.split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001558
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001559 for dep in self.configuration.extra_assume_provided:
1560 self.recipecaches[mc].ignored_dependencies.add(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001561
Andrew Geissler5a43b432020-06-13 10:46:56 -05001562 self.collections = {}
1563
1564 mcfilelist = {}
1565 total_masked = 0
1566 searchdirs = set()
1567 for mc in self.multiconfigs:
1568 self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
1569 (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1570
1571 mcfilelist[mc] = filelist
1572 total_masked += masked
1573 searchdirs |= set(search)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001574
1575 # Add inotify watches for directories searched for bb/bbappend files
1576 for dirent in searchdirs:
1577 self.add_filewatch([[dirent]], dirs=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001578
Andrew Geissler5a43b432020-06-13 10:46:56 -05001579 self.parser = CookerParser(self, mcfilelist, total_masked)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001580 self.parsecache_valid = True
1581
1582 self.state = state.parsing
1583
1584 if not self.parser.parse_next():
1585 collectlog.debug(1, "parsing complete")
1586 if self.parser.error:
1587 raise bb.BBHandledException()
1588 self.show_appends_with_no_recipes()
1589 self.handlePrefProviders()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001590 for mc in self.multiconfigs:
Andrew Geissler5a43b432020-06-13 10:46:56 -05001591 self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001592 self.state = state.running
1593
1594 # Send an event listing all stamps reachable after parsing
1595 # which the metadata may use to clean up stale data
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001596 for mc in self.multiconfigs:
1597 event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1598 bb.event.fire(event, self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001599 return None
1600
1601 return True
1602
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001603 def checkPackages(self, pkgs_to_build, task=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001604
1605 # Return a copy, don't modify the original
1606 pkgs_to_build = pkgs_to_build[:]
1607
1608 if len(pkgs_to_build) == 0:
1609 raise NothingToBuild
1610
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001611 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001612 for pkg in pkgs_to_build:
1613 if pkg in ignore:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001614 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
Brad Bishop15ae2502019-06-18 21:44:24 -04001615 if pkg.startswith("multiconfig:"):
1616 pkgs_to_build.remove(pkg)
1617 pkgs_to_build.append(pkg.replace("multiconfig:", "mc:"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001618
1619 if 'world' in pkgs_to_build:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001620 pkgs_to_build.remove('world')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001621 for mc in self.multiconfigs:
1622 bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1623 for t in self.recipecaches[mc].world_target:
1624 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001625 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001626 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001627
1628 if 'universe' in pkgs_to_build:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001629 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001630 parselog.debug(1, "collating packages for \"universe\"")
1631 pkgs_to_build.remove('universe')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001632 for mc in self.multiconfigs:
1633 for t in self.recipecaches[mc].universe_target:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001634 if task:
1635 foundtask = False
1636 for provider_fn in self.recipecaches[mc].providers[t]:
1637 if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1638 foundtask = True
1639 break
1640 if not foundtask:
1641 bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1642 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001643 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001644 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001645 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001646
1647 return pkgs_to_build
1648
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001649 def pre_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001650 # We now are in our own process so we can call this here.
1651 # PRServ exits if its parent process exits
1652 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001653 return
1654
1655 def post_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001656 prserv.serv.auto_shutdown()
Brad Bishop08902b02019-08-20 09:16:51 -04001657 if self.hashserv:
1658 self.hashserv.process.terminate()
1659 self.hashserv.process.join()
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001660 bb.event.fire(CookerExit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001661
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001662 def shutdown(self, force = False):
1663 if force:
1664 self.state = state.forceshutdown
1665 else:
1666 self.state = state.shutdown
1667
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001668 if self.parser:
1669 self.parser.shutdown(clean=not force, force=force)
1670
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001671 def finishcommand(self):
1672 self.state = state.initial
1673
1674 def reset(self):
1675 self.initConfigurationData()
Brad Bishop08902b02019-08-20 09:16:51 -04001676 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001677
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001678 def clientComplete(self):
1679 """Called when the client is done using the server"""
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001680 self.finishcommand()
1681 self.extraconfigdata = {}
1682 self.command.reset()
1683 self.databuilder.reset()
1684 self.data = self.databuilder.data
Andrew Geissler82c905d2020-04-13 13:39:40 -05001685 self.parsecache_valid = False
1686 self.baseconfig_valid = False
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001687
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001688
1689class CookerExit(bb.event.Event):
1690 """
1691 Notify clients of the Cooker shutdown
1692 """
1693
1694 def __init__(self):
1695 bb.event.Event.__init__(self)
1696
1697
1698class CookerCollectFiles(object):
Andrew Geissler5a43b432020-06-13 10:46:56 -05001699 def __init__(self, priorities, mc=''):
1700 self.mc = mc
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001701 self.bbappends = []
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001702 # Priorities is a list of tupples, with the second element as the pattern.
1703 # We need to sort the list with the longest pattern first, and so on to
1704 # the shortest. This allows nested layers to be properly evaluated.
1705 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001706
1707 def calc_bbfile_priority( self, filename, matched = None ):
1708 for _, _, regex, pri in self.bbfile_config_priorities:
1709 if regex.match(filename):
Andrew Geissler82c905d2020-04-13 13:39:40 -05001710 if matched is not None:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001711 if not regex in matched:
1712 matched.add(regex)
1713 return pri
1714 return 0
1715
1716 def get_bbfiles(self):
1717 """Get list of default .bb files by reading out the current directory"""
1718 path = os.getcwd()
1719 contents = os.listdir(path)
1720 bbfiles = []
1721 for f in contents:
1722 if f.endswith(".bb"):
1723 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1724 return bbfiles
1725
1726 def find_bbfiles(self, path):
1727 """Find all the .bb and .bbappend files in a directory"""
1728 found = []
1729 for dir, dirs, files in os.walk(path):
1730 for ignored in ('SCCS', 'CVS', '.svn'):
1731 if ignored in dirs:
1732 dirs.remove(ignored)
1733 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1734
1735 return found
1736
1737 def collect_bbfiles(self, config, eventdata):
1738 """Collect all available .bb build files"""
1739 masked = 0
1740
1741 collectlog.debug(1, "collecting .bb files")
1742
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001743 files = (config.getVar( "BBFILES") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001744 config.setVar("BBFILES", " ".join(files))
1745
1746 # Sort files by priority
1747 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
1748
1749 if not len(files):
1750 files = self.get_bbfiles()
1751
1752 if not len(files):
1753 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1754 bb.event.fire(CookerExit(), eventdata)
1755
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001756 # We need to track where we look so that we can add inotify watches. There
1757 # is no nice way to do this, this is horrid. We intercept the os.listdir()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001758 # (or os.scandir() for python 3.6+) calls while we run glob().
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001759 origlistdir = os.listdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001760 if hasattr(os, 'scandir'):
1761 origscandir = os.scandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001762 searchdirs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001763
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001764 def ourlistdir(d):
1765 searchdirs.append(d)
1766 return origlistdir(d)
1767
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001768 def ourscandir(d):
1769 searchdirs.append(d)
1770 return origscandir(d)
1771
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001772 os.listdir = ourlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001773 if hasattr(os, 'scandir'):
1774 os.scandir = ourscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001775 try:
1776 # Can't use set here as order is important
1777 newfiles = []
1778 for f in files:
1779 if os.path.isdir(f):
1780 dirfiles = self.find_bbfiles(f)
1781 for g in dirfiles:
1782 if g not in newfiles:
1783 newfiles.append(g)
1784 else:
1785 globbed = glob.glob(f)
1786 if not globbed and os.path.exists(f):
1787 globbed = [f]
1788 # glob gives files in order on disk. Sort to be deterministic.
1789 for g in sorted(globbed):
1790 if g not in newfiles:
1791 newfiles.append(g)
1792 finally:
1793 os.listdir = origlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001794 if hasattr(os, 'scandir'):
1795 os.scandir = origscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001796
1797 bbmask = config.getVar('BBMASK')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001798
1799 if bbmask:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001800 # First validate the individual regular expressions and ignore any
1801 # that do not compile
1802 bbmasks = []
1803 for mask in bbmask.split():
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001804 # When constructing an older style single regex, it's possible for BBMASK
1805 # to end up beginning with '|', which matches and masks _everything_.
1806 if mask.startswith("|"):
Andrew Geissler82c905d2020-04-13 13:39:40 -05001807 collectlog.warning("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001808 mask = mask[1:]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001809 try:
1810 re.compile(mask)
1811 bbmasks.append(mask)
1812 except sre_constants.error:
1813 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1814
1815 # Then validate the combined regular expressions. This should never
1816 # fail, but better safe than sorry...
1817 bbmask = "|".join(bbmasks)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001818 try:
1819 bbmask_compiled = re.compile(bbmask)
1820 except sre_constants.error:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001821 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1822 bbmask = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001823
1824 bbfiles = []
1825 bbappend = []
1826 for f in newfiles:
1827 if bbmask and bbmask_compiled.search(f):
1828 collectlog.debug(1, "skipping masked file %s", f)
1829 masked += 1
1830 continue
1831 if f.endswith('.bb'):
1832 bbfiles.append(f)
1833 elif f.endswith('.bbappend'):
1834 bbappend.append(f)
1835 else:
1836 collectlog.debug(1, "skipping %s: unknown file extension", f)
1837
1838 # Build a list of .bbappend files for each .bb file
1839 for f in bbappend:
1840 base = os.path.basename(f).replace('.bbappend', '.bb')
1841 self.bbappends.append((base, f))
1842
1843 # Find overlayed recipes
1844 # bbfiles will be in priority order which makes this easy
1845 bbfile_seen = dict()
1846 self.overlayed = defaultdict(list)
1847 for f in reversed(bbfiles):
1848 base = os.path.basename(f)
1849 if base not in bbfile_seen:
1850 bbfile_seen[base] = f
1851 else:
1852 topfile = bbfile_seen[base]
1853 self.overlayed[topfile].append(f)
1854
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001855 return (bbfiles, masked, searchdirs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001856
1857 def get_file_appends(self, fn):
1858 """
1859 Returns a list of .bbappend files to apply to fn
1860 """
1861 filelist = []
1862 f = os.path.basename(fn)
1863 for b in self.bbappends:
1864 (bbappend, filename) = b
1865 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1866 filelist.append(filename)
Andrew Geissler5a43b432020-06-13 10:46:56 -05001867 return tuple(filelist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001868
1869 def collection_priorities(self, pkgfns, d):
1870
1871 priorities = {}
1872
1873 # Calculate priorities for each file
1874 matched = set()
1875 for p in pkgfns:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001876 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001877 priorities[p] = self.calc_bbfile_priority(realfn, matched)
1878
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001879 unmatched = set()
1880 for _, _, regex, pri in self.bbfile_config_priorities:
1881 if not regex in matched:
1882 unmatched.add(regex)
1883
Brad Bishop316dfdd2018-06-25 12:45:53 -04001884 # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
1885 def find_bbappend_match(regex):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001886 for b in self.bbappends:
1887 (bbfile, append) = b
1888 if regex.match(append):
Brad Bishop316dfdd2018-06-25 12:45:53 -04001889 # If the bbappend is matched by already "matched set", return False
1890 for matched_regex in matched:
1891 if matched_regex.match(append):
1892 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001893 return True
1894 return False
1895
1896 for unmatch in unmatched.copy():
Brad Bishop316dfdd2018-06-25 12:45:53 -04001897 if find_bbappend_match(unmatch):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001898 unmatched.remove(unmatch)
1899
1900 for collection, pattern, regex, _ in self.bbfile_config_priorities:
1901 if regex in unmatched:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001902 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
Andrew Geissler5a43b432020-06-13 10:46:56 -05001903 collectlog.warning("No bb files in %s matched BBFILE_PATTERN_%s '%s'" % (self.mc if self.mc else 'default',
1904 collection, pattern))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001905
1906 return priorities
1907
1908class ParsingFailure(Exception):
1909 def __init__(self, realexception, recipe):
1910 self.realexception = realexception
1911 self.recipe = recipe
1912 Exception.__init__(self, realexception, recipe)
1913
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001914class Parser(multiprocessing.Process):
1915 def __init__(self, jobs, results, quit, init, profile):
1916 self.jobs = jobs
1917 self.results = results
1918 self.quit = quit
1919 self.init = init
1920 multiprocessing.Process.__init__(self)
1921 self.context = bb.utils.get_context().copy()
1922 self.handlers = bb.event.get_class_handlers().copy()
1923 self.profile = profile
1924
1925 def run(self):
1926
1927 if not self.profile:
1928 self.realrun()
1929 return
1930
1931 try:
1932 import cProfile as profile
1933 except:
1934 import profile
1935 prof = profile.Profile()
1936 try:
1937 profile.Profile.runcall(prof, self.realrun)
1938 finally:
1939 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1940 prof.dump_stats(logfile)
1941
1942 def realrun(self):
1943 if self.init:
1944 self.init()
1945
1946 pending = []
1947 while True:
1948 try:
1949 self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001950 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001951 pass
1952 else:
1953 self.results.cancel_join_thread()
1954 break
1955
1956 if pending:
1957 result = pending.pop()
1958 else:
1959 try:
Brad Bishop19323692019-04-05 15:28:33 -04001960 job = self.jobs.pop()
1961 except IndexError:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001962 break
1963 result = self.parse(*job)
Andrew Geissler82c905d2020-04-13 13:39:40 -05001964 # Clear the siggen cache after parsing to control memory usage, its huge
1965 bb.parse.siggen.postparsing_clean_cache()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001966 try:
1967 self.results.put(result, timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001968 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001969 pending.append(result)
1970
Andrew Geissler5a43b432020-06-13 10:46:56 -05001971 def parse(self, mc, cache, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001972 try:
Andrew Geissler82c905d2020-04-13 13:39:40 -05001973 origfilter = bb.event.LogHandler.filter
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001974 # Record the filename we're parsing into any events generated
1975 def parse_filter(self, record):
1976 record.taskpid = bb.event.worker_pid
1977 record.fn = filename
1978 return True
1979
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001980 # Reset our environment and handlers to the original settings
1981 bb.utils.set_context(self.context.copy())
1982 bb.event.set_class_handlers(self.handlers.copy())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001983 bb.event.LogHandler.filter = parse_filter
1984
Andrew Geissler5a43b432020-06-13 10:46:56 -05001985 return True, mc, cache.parse(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001986 except Exception as exc:
1987 tb = sys.exc_info()[2]
1988 exc.recipe = filename
1989 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
1990 return True, exc
1991 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
1992 # and for example a worker thread doesn't just exit on its own in response to
1993 # a SystemExit event for example.
1994 except BaseException as exc:
1995 return True, ParsingFailure(exc, filename)
Andrew Geissler82c905d2020-04-13 13:39:40 -05001996 finally:
1997 bb.event.LogHandler.filter = origfilter
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001998
1999class CookerParser(object):
Andrew Geissler5a43b432020-06-13 10:46:56 -05002000 def __init__(self, cooker, mcfilelist, masked):
2001 self.mcfilelist = mcfilelist
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002002 self.cooker = cooker
2003 self.cfgdata = cooker.data
2004 self.cfghash = cooker.data_hash
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002005 self.cfgbuilder = cooker.databuilder
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002006
2007 # Accounting statistics
2008 self.parsed = 0
2009 self.cached = 0
2010 self.error = 0
2011 self.masked = masked
2012
2013 self.skipped = 0
2014 self.virtuals = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002015
2016 self.current = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002017 self.process_names = []
2018
Andrew Geissler5a43b432020-06-13 10:46:56 -05002019 self.bb_caches = bb.cache.MulticonfigCache(self.cfgbuilder, self.cfghash, cooker.caches_array)
2020 self.fromcache = set()
2021 self.willparse = set()
2022 for mc in self.cooker.multiconfigs:
2023 for filename in self.mcfilelist[mc]:
2024 appends = self.cooker.collections[mc].get_file_appends(filename)
2025 if not self.bb_caches[mc].cacheValid(filename, appends):
2026 self.willparse.add((mc, self.bb_caches[mc], filename, appends))
2027 else:
2028 self.fromcache.add((mc, self.bb_caches[mc], filename, appends))
2029
2030 self.total = len(self.fromcache) + len(self.willparse)
2031 self.toparse = len(self.willparse)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002032 self.progress_chunk = int(max(self.toparse / 100, 1))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002033
Brad Bishop6e60e8b2018-02-01 10:27:11 -05002034 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
Andrew Geissler5a43b432020-06-13 10:46:56 -05002035 multiprocessing.cpu_count()), self.toparse)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002036
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002037 self.start()
2038 self.haveshutdown = False
2039
2040 def start(self):
2041 self.results = self.load_cached()
2042 self.processes = []
2043 if self.toparse:
2044 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2045 def init():
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002046 bb.utils.set_process_name(multiprocessing.current_process().name)
2047 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2048 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002049
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002050 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002051 self.result_queue = multiprocessing.Queue()
Brad Bishop19323692019-04-05 15:28:33 -04002052
2053 def chunkify(lst,n):
2054 return [lst[i::n] for i in range(n)]
Andrew Geissler5a43b432020-06-13 10:46:56 -05002055 self.jobs = chunkify(list(self.willparse), self.num_processes)
Brad Bishop19323692019-04-05 15:28:33 -04002056
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002057 for i in range(0, self.num_processes):
Brad Bishop19323692019-04-05 15:28:33 -04002058 parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002059 parser.start()
2060 self.process_names.append(parser.name)
2061 self.processes.append(parser)
2062
2063 self.results = itertools.chain(self.results, self.parse_generator())
2064
2065 def shutdown(self, clean=True, force=False):
2066 if not self.toparse:
2067 return
2068 if self.haveshutdown:
2069 return
2070 self.haveshutdown = True
2071
2072 if clean:
2073 event = bb.event.ParseCompleted(self.cached, self.parsed,
2074 self.skipped, self.masked,
2075 self.virtuals, self.error,
2076 self.total)
2077
2078 bb.event.fire(event, self.cfgdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002079 for process in self.processes:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002080 self.parser_quit.put(None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002081 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002082 self.parser_quit.cancel_join_thread()
2083 for process in self.processes:
2084 self.parser_quit.put(None)
2085
Brad Bishop08902b02019-08-20 09:16:51 -04002086 # Cleanup the queue before call process.join(), otherwise there might be
2087 # deadlocks.
2088 while True:
2089 try:
2090 self.result_queue.get(timeout=0.25)
2091 except queue.Empty:
2092 break
2093
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002094 for process in self.processes:
2095 if force:
2096 process.join(.1)
2097 process.terminate()
2098 else:
2099 process.join()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002100
Andrew Geissler5a43b432020-06-13 10:46:56 -05002101 def sync_caches():
2102 for c in self.bb_caches.values():
2103 c.sync()
2104
2105 sync = threading.Thread(target=sync_caches)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002106 sync.start()
2107 multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002108 bb.codeparser.parser_cache_savemerge()
2109 bb.fetch.fetcher_parse_done()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002110 if self.cooker.configuration.profile:
2111 profiles = []
2112 for i in self.process_names:
2113 logfile = "profile-parse-%s.log" % i
2114 if os.path.exists(logfile):
2115 profiles.append(logfile)
2116
2117 pout = "profile-parse.log.processed"
2118 bb.utils.process_profilelog(profiles, pout = pout)
2119 print("Processed parsing statistics saved to %s" % (pout))
2120
2121 def load_cached(self):
Andrew Geissler5a43b432020-06-13 10:46:56 -05002122 for mc, cache, filename, appends in self.fromcache:
2123 cached, infos = cache.load(filename, appends)
2124 yield not cached, mc, infos
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002125
2126 def parse_generator(self):
2127 while True:
2128 if self.parsed >= self.toparse:
2129 break
2130
2131 try:
2132 result = self.result_queue.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002133 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002134 pass
2135 else:
2136 value = result[1]
2137 if isinstance(value, BaseException):
2138 raise value
2139 else:
2140 yield result
2141
2142 def parse_next(self):
2143 result = []
2144 parsed = None
2145 try:
Andrew Geissler5a43b432020-06-13 10:46:56 -05002146 parsed, mc, result = next(self.results)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002147 except StopIteration:
2148 self.shutdown()
2149 return False
2150 except bb.BBHandledException as exc:
2151 self.error += 1
2152 logger.error('Failed to parse recipe: %s' % exc.recipe)
2153 self.shutdown(clean=False)
2154 return False
2155 except ParsingFailure as exc:
2156 self.error += 1
2157 logger.error('Unable to parse %s: %s' %
2158 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2159 self.shutdown(clean=False)
2160 return False
2161 except bb.parse.ParseError as exc:
2162 self.error += 1
2163 logger.error(str(exc))
2164 self.shutdown(clean=False)
2165 return False
2166 except bb.data_smart.ExpansionError as exc:
2167 self.error += 1
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002168 bbdir = os.path.dirname(__file__) + os.sep
2169 etype, value, _ = sys.exc_info()
2170 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2171 logger.error('ExpansionError during parsing %s', value.recipe,
2172 exc_info=(etype, value, tb))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002173 self.shutdown(clean=False)
2174 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002175 except Exception as exc:
2176 self.error += 1
2177 etype, value, tb = sys.exc_info()
2178 if hasattr(value, "recipe"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002179 logger.error('Unable to parse %s' % value.recipe,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002180 exc_info=(etype, value, exc.traceback))
2181 else:
2182 # Most likely, an exception occurred during raising an exception
2183 import traceback
2184 logger.error('Exception during parse: %s' % traceback.format_exc())
2185 self.shutdown(clean=False)
2186 return False
2187
2188 self.current += 1
2189 self.virtuals += len(result)
2190 if parsed:
2191 self.parsed += 1
2192 if self.parsed % self.progress_chunk == 0:
2193 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2194 self.cfgdata)
2195 else:
2196 self.cached += 1
2197
2198 for virtualfn, info_array in result:
2199 if info_array[0].skipped:
2200 self.skipped += 1
2201 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
Andrew Geissler5a43b432020-06-13 10:46:56 -05002202 self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002203 parsed=parsed, watcher = self.cooker.add_filewatch)
2204 return True
2205
2206 def reparse(self, filename):
Andrew Geissler5a43b432020-06-13 10:46:56 -05002207 to_reparse = set()
2208 for mc in self.cooker.multiconfigs:
2209 to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename)))
2210
2211 for mc, filename, appends in to_reparse:
2212 infos = self.bb_caches[mc].parse(filename, appends)
2213 for vfn, info_array in infos:
2214 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)