blob: f6abc634872a4f49f55d47c0485973e20eecfd75 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# Copyright (C) 2003, 2004 Chris Larson
3# Copyright (C) 2003, 2004 Phil Blundell
4# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
5# Copyright (C) 2005 Holger Hans Peter Freyther
6# Copyright (C) 2005 ROAD GmbH
7# Copyright (C) 2006 - 2007 Richard Purdie
8#
Brad Bishopc342db32019-05-15 21:57:59 -04009# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050010#
Patrick Williamsc0f7c042017-02-23 20:41:17 -060011
Patrick Williamsc124f4f2015-09-15 14:41:29 -050012import sys, os, glob, os.path, re, time
Patrick Williamsc124f4f2015-09-15 14:41:29 -050013import itertools
14import logging
15import multiprocessing
16import sre_constants
17import threading
Patrick Williamsc0f7c042017-02-23 20:41:17 -060018from io import StringIO, UnsupportedOperation
Patrick Williamsc124f4f2015-09-15 14:41:29 -050019from contextlib import closing
Patrick Williamsc0f7c042017-02-23 20:41:17 -060020from collections import defaultdict, namedtuple
Patrick Williamsc124f4f2015-09-15 14:41:29 -050021import bb, bb.exceptions, bb.command
22from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
Patrick Williamsc0f7c042017-02-23 20:41:17 -060023import queue
Patrick Williamsc124f4f2015-09-15 14:41:29 -050024import signal
Patrick Williamsc124f4f2015-09-15 14:41:29 -050025import prserv.serv
26import pyinotify
Patrick Williamsc0f7c042017-02-23 20:41:17 -060027import json
28import pickle
29import codecs
Brad Bishop08902b02019-08-20 09:16:51 -040030import hashserv
Patrick Williamsc124f4f2015-09-15 14:41:29 -050031
32logger = logging.getLogger("BitBake")
33collectlog = logging.getLogger("BitBake.Collection")
34buildlog = logging.getLogger("BitBake.Build")
35parselog = logging.getLogger("BitBake.Parsing")
36providerlog = logging.getLogger("BitBake.Provider")
37
38class NoSpecificMatch(bb.BBHandledException):
39 """
40 Exception raised when no or multiple file matches are found
41 """
42
43class NothingToBuild(Exception):
44 """
45 Exception raised when there is nothing to build
46 """
47
48class CollectionError(bb.BBHandledException):
49 """
50 Exception raised when layer configuration is incorrect
51 """
52
53class state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060054 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050055
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050056 @classmethod
57 def get_name(cls, code):
58 for name in dir(cls):
59 value = getattr(cls, name)
60 if type(value) == type(cls.initial) and value == code:
61 return name
62 raise ValueError("Invalid status code: %s" % code)
63
Patrick Williamsc124f4f2015-09-15 14:41:29 -050064
65class SkippedPackage:
66 def __init__(self, info = None, reason = None):
67 self.pn = None
68 self.skipreason = None
69 self.provides = None
70 self.rprovides = None
71
72 if info:
73 self.pn = info.pn
74 self.skipreason = info.skipreason
75 self.provides = info.provides
76 self.rprovides = info.rprovides
77 elif reason:
78 self.skipreason = reason
79
80
81class CookerFeatures(object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -060082 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050083
84 def __init__(self):
85 self._features=set()
86
87 def setFeature(self, f):
88 # validate we got a request for a feature we support
89 if f not in CookerFeatures._feature_list:
90 return
91 self._features.add(f)
92
93 def __contains__(self, f):
94 return f in self._features
95
96 def __iter__(self):
97 return self._features.__iter__()
98
Patrick Williamsc0f7c042017-02-23 20:41:17 -060099 def __next__(self):
100 return next(self._features)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500101
102
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600103class EventWriter:
104 def __init__(self, cooker, eventfile):
105 self.file_inited = None
106 self.cooker = cooker
107 self.eventfile = eventfile
108 self.event_queue = []
109
110 def write_event(self, event):
111 with open(self.eventfile, "a") as f:
112 try:
113 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
114 f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
115 "vars": str_event}))
116 except Exception as err:
117 import traceback
118 print(err, traceback.format_exc())
119
120 def send(self, event):
121 if self.file_inited:
122 # we have the file, just write the event
123 self.write_event(event)
124 else:
125 # init on bb.event.BuildStarted
126 name = "%s.%s" % (event.__module__, event.__class__.__name__)
127 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
128 with open(self.eventfile, "w") as f:
129 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
130
131 self.file_inited = True
132
133 # write pending events
134 for evt in self.event_queue:
135 self.write_event(evt)
136
137 # also write the current event
138 self.write_event(event)
139 else:
140 # queue all events until the file is inited
141 self.event_queue.append(event)
142
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500143#============================================================================#
144# BBCooker
145#============================================================================#
146class BBCooker:
147 """
148 Manages one bitbake build run
149 """
150
151 def __init__(self, configuration, featureSet=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600152 self.recipecaches = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500153 self.skiplist = {}
154 self.featureset = CookerFeatures()
155 if featureSet:
156 for f in featureSet:
157 self.featureset.setFeature(f)
158
159 self.configuration = configuration
160
Brad Bishopf058f492019-01-28 23:50:33 -0500161 bb.debug(1, "BBCooker starting %s" % time.time())
162 sys.stdout.flush()
163
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500164 self.configwatcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500165 bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
166 sys.stdout.flush()
167
Andrew Geissler82c905d2020-04-13 13:39:40 -0500168 self.configwatcher.bbseen = set()
169 self.configwatcher.bbwatchedfiles = set()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500170 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
Brad Bishopf058f492019-01-28 23:50:33 -0500171 bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
172 sys.stdout.flush()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500173 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
174 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500175 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500176 self.watcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500177 bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
178 sys.stdout.flush()
Andrew Geissler82c905d2020-04-13 13:39:40 -0500179 self.watcher.bbseen = set()
180 self.watcher.bbwatchedfiles = set()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500181 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
182
Brad Bishopf058f492019-01-28 23:50:33 -0500183 bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
184 sys.stdout.flush()
185
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500186 # If being called by something like tinfoil, we need to clean cached data
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500187 # which may now be invalid
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500188 bb.parse.clear_cache()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500189 bb.parse.BBHandler.cached_statements = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500190
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500191 self.ui_cmdline = None
Brad Bishop08902b02019-08-20 09:16:51 -0400192 self.hashserv = None
Brad Bishopa34c0302019-09-23 22:34:48 -0400193 self.hashservaddr = None
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500194
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500195 self.initConfigurationData()
196
Brad Bishopf058f492019-01-28 23:50:33 -0500197 bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
198 sys.stdout.flush()
199
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600200 # we log all events to a file if so directed
201 if self.configuration.writeeventlog:
202 # register the log file writer as UI Handler
203 writer = EventWriter(self, self.configuration.writeeventlog)
204 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
205 bb.event.register_UIHhandler(EventLogWriteHandler(writer))
206
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500207 self.inotify_modified_files = []
208
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500209 def _process_inotify_updates(server, cooker, abort):
210 cooker.process_inotify_updates()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500211 return 1.0
212
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500213 self.configuration.server_register_idlecallback(_process_inotify_updates, self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500214
215 # TOSTOP must not be set or our children will hang when they output
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600216 try:
217 fd = sys.stdout.fileno()
218 if os.isatty(fd):
219 import termios
220 tcattr = termios.tcgetattr(fd)
221 if tcattr[3] & termios.TOSTOP:
222 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
223 tcattr[3] = tcattr[3] & ~termios.TOSTOP
224 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
225 except UnsupportedOperation:
226 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500227
228 self.command = bb.command.Command(self)
229 self.state = state.initial
230
231 self.parser = None
232
233 signal.signal(signal.SIGTERM, self.sigterm_exception)
234 # Let SIGHUP exit as SIGTERM
235 signal.signal(signal.SIGHUP, self.sigterm_exception)
236
Brad Bishopf058f492019-01-28 23:50:33 -0500237 bb.debug(1, "BBCooker startup complete %s" % time.time())
238 sys.stdout.flush()
239
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500240 def process_inotify_updates(self):
241 for n in [self.confignotifier, self.notifier]:
242 if n.check_events(timeout=0):
243 # read notified events and enqeue them
244 n.read_events()
245 n.process_events()
246
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500247 def config_notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500248 if event.maskname == "IN_Q_OVERFLOW":
249 bb.warn("inotify event queue overflowed, invalidating caches.")
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500250 self.parsecache_valid = False
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500251 self.baseconfig_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500252 bb.parse.clear_cache()
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500253 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500254 if not event.pathname in self.configwatcher.bbwatchedfiles:
255 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500256 if not event.pathname in self.inotify_modified_files:
257 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500258 self.baseconfig_valid = False
259
260 def notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500261 if event.maskname == "IN_Q_OVERFLOW":
262 bb.warn("inotify event queue overflowed, invalidating caches.")
263 self.parsecache_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500264 bb.parse.clear_cache()
265 return
266 if event.pathname.endswith("bitbake-cookerdaemon.log") \
267 or event.pathname.endswith("bitbake.lock"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500268 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500269 if not event.pathname in self.inotify_modified_files:
270 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500271 self.parsecache_valid = False
272
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500273 def add_filewatch(self, deps, watcher=None, dirs=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500274 if not watcher:
275 watcher = self.watcher
276 for i in deps:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500277 watcher.bbwatchedfiles.add(i[0])
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500278 if dirs:
279 f = i[0]
280 else:
281 f = os.path.dirname(i[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500282 if f in watcher.bbseen:
283 continue
Andrew Geissler82c905d2020-04-13 13:39:40 -0500284 watcher.bbseen.add(f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500285 watchtarget = None
286 while True:
287 # We try and add watches for files that don't exist but if they did, would influence
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500288 # the parser. The parent directory of these files may not exist, in which case we need
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500289 # to watch any parent that does exist for changes.
290 try:
291 watcher.add_watch(f, self.watchmask, quiet=False)
292 if watchtarget:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500293 watcher.bbwatchedfiles.add(watchtarget)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500294 break
295 except pyinotify.WatchManagerError as e:
296 if 'ENOENT' in str(e):
297 watchtarget = f
298 f = os.path.dirname(f)
299 if f in watcher.bbseen:
300 break
Andrew Geissler82c905d2020-04-13 13:39:40 -0500301 watcher.bbseen.add(f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500302 continue
303 if 'ENOSPC' in str(e):
304 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
305 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
306 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
307 providerlog.error("Root privilege is required to modify max_user_watches.")
308 raise
309
310 def sigterm_exception(self, signum, stackframe):
311 if signum == signal.SIGTERM:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500312 bb.warn("Cooker received SIGTERM, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500313 elif signum == signal.SIGHUP:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500314 bb.warn("Cooker received SIGHUP, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500315 self.state = state.forceshutdown
316
317 def setFeatures(self, features):
318 # we only accept a new feature set if we're in state initial, so we can reset without problems
319 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
320 raise Exception("Illegal state for feature set change")
321 original_featureset = list(self.featureset)
322 for feature in features:
323 self.featureset.setFeature(feature)
324 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
325 if (original_featureset != list(self.featureset)) and self.state != state.error:
326 self.reset()
327
328 def initConfigurationData(self):
329
330 self.state = state.initial
331 self.caches_array = []
332
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500333 # Need to preserve BB_CONSOLELOG over resets
334 consolelog = None
335 if hasattr(self, "data"):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500336 consolelog = self.data.getVar("BB_CONSOLELOG")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500337
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500338 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
339 self.enableDataTracking()
340
341 all_extra_cache_names = []
342 # We hardcode all known cache types in a single place, here.
343 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
344 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
345
346 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
347
348 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
349 # This is the entry point, no further check needed!
350 for var in caches_name_array:
351 try:
352 module_name, cache_name = var.split(':')
353 module = __import__(module_name, fromlist=(cache_name,))
354 self.caches_array.append(getattr(module, cache_name))
355 except ImportError as exc:
356 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
357 sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
358
359 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
360 self.databuilder.parseBaseConfiguration()
361 self.data = self.databuilder.data
362 self.data_hash = self.databuilder.data_hash
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500363 self.extraconfigdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500364
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500365 if consolelog:
366 self.data.setVar("BB_CONSOLELOG", consolelog)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500367
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500368 self.data.setVar('BB_CMDLINE', self.ui_cmdline)
369
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500370 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
371 self.disableDataTracking()
372
Brad Bishop15ae2502019-06-18 21:44:24 -0400373 for mc in self.databuilder.mcdata.values():
374 mc.renameVar("__depends", "__base_depends")
375 self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500376
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500377 self.baseconfig_valid = True
378 self.parsecache_valid = False
379
380 def handlePRServ(self):
381 # Setup a PR Server based on the new configuration
382 try:
383 self.prhost = prserv.serv.auto_start(self.data)
384 except prserv.serv.PRServiceConfigError as e:
385 bb.fatal("Unable to start PR Server, exitting")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500386
Brad Bishopa34c0302019-09-23 22:34:48 -0400387 if self.data.getVar("BB_HASHSERVE") == "auto":
388 # Create a new hash server bound to a unix domain socket
Brad Bishop08902b02019-08-20 09:16:51 -0400389 if not self.hashserv:
390 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
Brad Bishopa34c0302019-09-23 22:34:48 -0400391 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
392 self.hashserv = hashserv.create_server(self.hashservaddr, dbfile, sync=False)
Brad Bishop08902b02019-08-20 09:16:51 -0400393 self.hashserv.process = multiprocessing.Process(target=self.hashserv.serve_forever)
Brad Bishop08902b02019-08-20 09:16:51 -0400394 self.hashserv.process.start()
Brad Bishopa34c0302019-09-23 22:34:48 -0400395 self.data.setVar("BB_HASHSERVE", self.hashservaddr)
396 self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr)
397 self.databuilder.data.setVar("BB_HASHSERVE", self.hashservaddr)
Brad Bishop08902b02019-08-20 09:16:51 -0400398 for mc in self.databuilder.mcdata:
Brad Bishopa34c0302019-09-23 22:34:48 -0400399 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
Brad Bishop08902b02019-08-20 09:16:51 -0400400
401 bb.parse.init_parser(self.data)
402
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500403 def enableDataTracking(self):
404 self.configuration.tracking = True
405 if hasattr(self, "data"):
406 self.data.enableTracking()
407
408 def disableDataTracking(self):
409 self.configuration.tracking = False
410 if hasattr(self, "data"):
411 self.data.disableTracking()
412
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500413 def parseConfiguration(self):
414 # Set log file verbosity
415 verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
416 if verboselogs:
417 bb.msg.loggerVerboseLogs = True
418
419 # Change nice level if we're asked to
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500420 nice = self.data.getVar("BB_NICE_LEVEL")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500421 if nice:
422 curnice = os.nice(0)
423 nice = int(nice) - curnice
424 buildlog.verbose("Renice to %s " % os.nice(nice))
425
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600426 if self.recipecaches:
427 del self.recipecaches
428 self.multiconfigs = self.databuilder.mcdata.keys()
429 self.recipecaches = {}
430 for mc in self.multiconfigs:
431 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500432
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500433 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500434
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500435 self.parsecache_valid = False
436
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500437 def updateConfigOpts(self, options, environment, cmdline):
438 self.ui_cmdline = cmdline
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500439 clean = True
440 for o in options:
441 if o in ['prefile', 'postfile']:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500442 # Only these options may require a reparse
443 try:
444 if getattr(self.configuration, o) == options[o]:
445 # Value is the same, no need to mark dirty
446 continue
447 except AttributeError:
448 pass
449 logger.debug(1, "Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
450 print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500451 clean = False
452 setattr(self.configuration, o, options[o])
453 for k in bb.utils.approved_variables():
454 if k in environment and k not in self.configuration.env:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500455 logger.debug(1, "Updating new environment variable %s to %s" % (k, environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500456 self.configuration.env[k] = environment[k]
457 clean = False
458 if k in self.configuration.env and k not in environment:
459 logger.debug(1, "Updating environment variable %s (deleted)" % (k))
460 del self.configuration.env[k]
461 clean = False
462 if k not in self.configuration.env and k not in environment:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500463 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500464 if environment[k] != self.configuration.env[k]:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500465 logger.debug(1, "Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500466 self.configuration.env[k] = environment[k]
467 clean = False
468 if not clean:
469 logger.debug(1, "Base environment change, triggering reparse")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500470 self.reset()
471
472 def runCommands(self, server, data, abort):
473 """
474 Run any queued asynchronous command
475 This is done by the idle handler so it runs in true context rather than
476 tied to any UI.
477 """
478
479 return self.command.runAsyncCommand()
480
481 def showVersions(self):
482
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500483 (latest_versions, preferred_versions) = self.findProviders()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500484
485 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
486 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
487
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500488 for p in sorted(self.recipecaches[''].pkg_pn):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500489 pref = preferred_versions[p]
490 latest = latest_versions[p]
491
492 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
493 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
494
495 if pref == latest:
496 prefstr = ""
497
498 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
499
500 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
501 """
502 Show the outer or per-recipe environment
503 """
504 fn = None
505 envdata = None
Brad Bishop15ae2502019-06-18 21:44:24 -0400506 mc = ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500507 if not pkgs_to_build:
508 pkgs_to_build = []
509
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500510 orig_tracking = self.configuration.tracking
511 if not orig_tracking:
512 self.enableDataTracking()
513 self.reset()
514
Brad Bishop15ae2502019-06-18 21:44:24 -0400515 def mc_base(p):
516 if p.startswith('mc:'):
517 s = p.split(':')
518 if len(s) == 2:
519 return s[1]
520 return None
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500521
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500522 if buildfile:
523 # Parse the configuration here. We need to do it explicitly here since
524 # this showEnvironment() code path doesn't use the cache
525 self.parseConfiguration()
526
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600527 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Andrew Geissler5a43b432020-06-13 10:46:56 -0500528 fn = self.matchFile(fn, mc)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600529 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500530 elif len(pkgs_to_build) == 1:
Brad Bishop15ae2502019-06-18 21:44:24 -0400531 mc = mc_base(pkgs_to_build[0])
532 if not mc:
533 ignore = self.data.getVar("ASSUME_PROVIDED") or ""
534 if pkgs_to_build[0] in set(ignore.split()):
535 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500536
Brad Bishop15ae2502019-06-18 21:44:24 -0400537 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500538
Brad Bishop15ae2502019-06-18 21:44:24 -0400539 mc = runlist[0][0]
540 fn = runlist[0][3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500541
542 if fn:
543 try:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500544 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
545 envdata = bb_caches[mc].loadDataFull(fn, self.collections[mc].get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500546 except Exception as e:
547 parselog.exception("Unable to read %s", fn)
548 raise
Brad Bishop15ae2502019-06-18 21:44:24 -0400549 else:
550 if not mc in self.databuilder.mcdata:
551 bb.fatal('Not multiconfig named "%s" found' % mc)
552 envdata = self.databuilder.mcdata[mc]
553 data.expandKeys(envdata)
554 parse.ast.runAnonFuncs(envdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500555
556 # Display history
557 with closing(StringIO()) as env:
558 self.data.inchistory.emit(env)
559 logger.plain(env.getvalue())
560
561 # emit variables and shell functions
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500562 with closing(StringIO()) as env:
563 data.emit_env(env, envdata, True)
564 logger.plain(env.getvalue())
565
566 # emit the metadata which isnt valid shell
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500567 for e in sorted(envdata.keys()):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600568 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500569 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500570
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500571 if not orig_tracking:
572 self.disableDataTracking()
573 self.reset()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500574
575 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
576 """
577 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
578 """
579 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
580
581 # A task of None means use the default task
582 if task is None:
583 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500584 if not task.startswith("do_"):
585 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500586
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500587 targetlist = self.checkPackages(pkgs_to_build, task)
588 fulltargetlist = []
589 defaulttask_implicit = ''
590 defaulttask_explicit = False
591 wildcard = False
592
593 # Wild card expansion:
Brad Bishop15ae2502019-06-18 21:44:24 -0400594 # Replace string such as "mc:*:bash"
595 # into "mc:A:bash mc:B:bash bash"
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500596 for k in targetlist:
Brad Bishop15ae2502019-06-18 21:44:24 -0400597 if k.startswith("mc:"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500598 if wildcard:
599 bb.fatal('multiconfig conflict')
600 if k.split(":")[1] == "*":
601 wildcard = True
602 for mc in self.multiconfigs:
603 if mc:
604 fulltargetlist.append(k.replace('*', mc))
605 # implicit default task
606 else:
607 defaulttask_implicit = k.split(":")[2]
608 else:
609 fulltargetlist.append(k)
610 else:
611 defaulttask_explicit = True
612 fulltargetlist.append(k)
613
614 if not defaulttask_explicit and defaulttask_implicit != '':
615 fulltargetlist.append(defaulttask_implicit)
616
617 bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600618 taskdata = {}
619 localdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500620
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600621 for mc in self.multiconfigs:
622 taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
623 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600624 bb.data.expandKeys(localdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500625
626 current = 0
627 runlist = []
628 for k in fulltargetlist:
Andrew Geisslerb7d28612020-07-24 16:15:54 -0500629 origk = k
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600630 mc = ""
Brad Bishop15ae2502019-06-18 21:44:24 -0400631 if k.startswith("mc:"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600632 mc = k.split(":")[1]
633 k = ":".join(k.split(":")[2:])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500634 ktask = task
635 if ":do_" in k:
636 k2 = k.split(":do_")
637 k = k2[0]
638 ktask = k2[1]
Andrew Geisslerb7d28612020-07-24 16:15:54 -0500639
640 if mc not in self.multiconfigs:
641 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named %s" % (origk, mc))
642
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600643 taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500644 current += 1
645 if not ktask.startswith("do_"):
646 ktask = "do_%s" % ktask
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600647 if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
648 # e.g. in ASSUME_PROVIDED
649 continue
650 fn = taskdata[mc].build_targets[k][0]
651 runlist.append([mc, k, ktask, fn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500652 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600653
Brad Bishop15ae2502019-06-18 21:44:24 -0400654 havemc = False
655 for mc in self.multiconfigs:
656 if taskdata[mc].get_mcdepends():
657 havemc = True
Brad Bishopf058f492019-01-28 23:50:33 -0500658
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800659 # No need to do check providers if there are no mcdeps or not an mc build
Brad Bishop15ae2502019-06-18 21:44:24 -0400660 if havemc or len(self.multiconfigs) > 1:
Andrew Geissler99467da2019-02-25 18:54:23 -0600661 seen = set()
662 new = True
663 # Make sure we can provide the multiconfig dependency
664 while new:
665 mcdeps = set()
666 # Add unresolved first, so we can get multiconfig indirect dependencies on time
667 for mc in self.multiconfigs:
668 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
669 mcdeps |= set(taskdata[mc].get_mcdepends())
670 new = False
671 for mc in self.multiconfigs:
672 for k in mcdeps:
673 if k in seen:
674 continue
675 l = k.split(':')
676 depmc = l[2]
677 if depmc not in self.multiconfigs:
Andrew Geisslerb7d28612020-07-24 16:15:54 -0500678 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc))
Andrew Geissler99467da2019-02-25 18:54:23 -0600679 else:
680 logger.debug(1, "Adding providers for multiconfig dependency %s" % l[3])
681 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
682 seen.add(k)
683 new = True
Brad Bishopf058f492019-01-28 23:50:33 -0500684
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600685 for mc in self.multiconfigs:
686 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
687
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500688 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600689 return taskdata, runlist
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500690
691 def prepareTreeData(self, pkgs_to_build, task):
692 """
693 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
694 """
695
696 # We set abort to False here to prevent unbuildable targets raising
697 # an exception when we're just generating data
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600698 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500699
700 return runlist, taskdata
701
702 ######## WARNING : this function requires cache_extra to be enabled ########
703
704 def generateTaskDepTreeData(self, pkgs_to_build, task):
705 """
706 Create a dependency graph of pkgs_to_build including reverse dependency
707 information.
708 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500709 if not task.startswith("do_"):
710 task = "do_%s" % task
711
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500712 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600713 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500714 rq.rqdata.prepare()
715 return self.buildDependTree(rq, taskdata)
716
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600717 @staticmethod
718 def add_mc_prefix(mc, pn):
719 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -0400720 return "mc:%s:%s" % (mc, pn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600721 return pn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500722
723 def buildDependTree(self, rq, taskdata):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600724 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500725 depend_tree = {}
726 depend_tree["depends"] = {}
727 depend_tree["tdepends"] = {}
728 depend_tree["pn"] = {}
729 depend_tree["rdepends-pn"] = {}
730 depend_tree["packages"] = {}
731 depend_tree["rdepends-pkg"] = {}
732 depend_tree["rrecs-pkg"] = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500733 depend_tree['providermap'] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600734 depend_tree["layer-priorities"] = self.bbfile_config_priorities
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500735
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600736 for mc in taskdata:
737 for name, fn in list(taskdata[mc].get_providermap().items()):
738 pn = self.recipecaches[mc].pkg_fn[fn]
739 pn = self.add_mc_prefix(mc, pn)
740 if name != pn:
741 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
742 depend_tree['providermap'][name] = (pn, version)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500743
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600744 for tid in rq.rqdata.runtaskentries:
745 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
746 pn = self.recipecaches[mc].pkg_fn[taskfn]
747 pn = self.add_mc_prefix(mc, pn)
748 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500749 if pn not in depend_tree["pn"]:
750 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600751 depend_tree["pn"][pn]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500752 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600753 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500754
755 # if we have extra caches, list all attributes they bring in
756 extra_info = []
757 for cache_class in self.caches_array:
758 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
759 cachefields = getattr(cache_class, 'cachefields', [])
760 extra_info = extra_info + cachefields
761
762 # for all attributes stored, add them to the dependency tree
763 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600764 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500765
766
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500767 dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
768 if not dotname in depend_tree["tdepends"]:
769 depend_tree["tdepends"][dotname] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600770 for dep in rq.rqdata.runtaskentries[tid].depends:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800771 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
772 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600773 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
774 if taskfn not in seen_fns:
775 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500776 packages = []
777
778 depend_tree["depends"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600779 for dep in taskdata[mc].depids[taskfn]:
780 depend_tree["depends"][pn].append(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500781
782 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600783 for rdep in taskdata[mc].rdepids[taskfn]:
784 depend_tree["rdepends-pn"][pn].append(rdep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500785
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600786 rdepends = self.recipecaches[mc].rundeps[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500787 for package in rdepends:
788 depend_tree["rdepends-pkg"][package] = []
789 for rdepend in rdepends[package]:
790 depend_tree["rdepends-pkg"][package].append(rdepend)
791 packages.append(package)
792
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600793 rrecs = self.recipecaches[mc].runrecs[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500794 for package in rrecs:
795 depend_tree["rrecs-pkg"][package] = []
796 for rdepend in rrecs[package]:
797 depend_tree["rrecs-pkg"][package].append(rdepend)
798 if not package in packages:
799 packages.append(package)
800
801 for package in packages:
802 if package not in depend_tree["packages"]:
803 depend_tree["packages"][package] = {}
804 depend_tree["packages"][package]["pn"] = pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600805 depend_tree["packages"][package]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500806 depend_tree["packages"][package]["version"] = version
807
808 return depend_tree
809
810 ######## WARNING : this function requires cache_extra to be enabled ########
811 def generatePkgDepTreeData(self, pkgs_to_build, task):
812 """
813 Create a dependency tree of pkgs_to_build, returning the data.
814 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500815 if not task.startswith("do_"):
816 task = "do_%s" % task
817
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500818 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500819
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600820 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500821 depend_tree = {}
822 depend_tree["depends"] = {}
823 depend_tree["pn"] = {}
824 depend_tree["rdepends-pn"] = {}
825 depend_tree["rdepends-pkg"] = {}
826 depend_tree["rrecs-pkg"] = {}
827
828 # if we have extra caches, list all attributes they bring in
829 extra_info = []
830 for cache_class in self.caches_array:
831 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
832 cachefields = getattr(cache_class, 'cachefields', [])
833 extra_info = extra_info + cachefields
834
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600835 tids = []
836 for mc in taskdata:
837 for tid in taskdata[mc].taskentries:
838 tids.append(tid)
839
840 for tid in tids:
841 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
842
843 pn = self.recipecaches[mc].pkg_fn[taskfn]
844 pn = self.add_mc_prefix(mc, pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500845
846 if pn not in depend_tree["pn"]:
847 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600848 depend_tree["pn"][pn]["filename"] = taskfn
849 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500850 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600851 rdepends = self.recipecaches[mc].rundeps[taskfn]
852 rrecs = self.recipecaches[mc].runrecs[taskfn]
853 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500854
855 # for all extra attributes stored, add them to the dependency tree
856 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600857 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500858
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600859 if taskfn not in seen_fns:
860 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500861
862 depend_tree["depends"][pn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500863 for dep in taskdata[mc].depids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500864 pn_provider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600865 if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
866 fn_provider = taskdata[mc].build_targets[dep][0]
867 pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500868 else:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500869 pn_provider = dep
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600870 pn_provider = self.add_mc_prefix(mc, pn_provider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500871 depend_tree["depends"][pn].append(pn_provider)
872
873 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600874 for rdep in taskdata[mc].rdepids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500875 pn_rprovider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600876 if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
877 fn_rprovider = taskdata[mc].run_targets[rdep][0]
878 pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500879 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600880 pn_rprovider = rdep
881 pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500882 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
883
884 depend_tree["rdepends-pkg"].update(rdepends)
885 depend_tree["rrecs-pkg"].update(rrecs)
886
887 return depend_tree
888
889 def generateDepTreeEvent(self, pkgs_to_build, task):
890 """
891 Create a task dependency graph of pkgs_to_build.
892 Generate an event with the result
893 """
894 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
895 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
896
897 def generateDotGraphFiles(self, pkgs_to_build, task):
898 """
899 Create a task dependency graph of pkgs_to_build.
900 Save the result to a set of .dot files.
901 """
902
903 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
904
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500905 with open('pn-buildlist', 'w') as f:
906 for pn in depgraph["pn"]:
907 f.write(pn + "\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500908 logger.info("PN build list saved to 'pn-buildlist'")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500909
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500910 # Remove old format output files to ensure no confusion with stale data
911 try:
912 os.unlink('pn-depends.dot')
913 except FileNotFoundError:
914 pass
915 try:
916 os.unlink('package-depends.dot')
917 except FileNotFoundError:
918 pass
Brad Bishop79641f22019-09-10 07:20:22 -0400919 try:
920 os.unlink('recipe-depends.dot')
921 except FileNotFoundError:
922 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500923
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500924 with open('task-depends.dot', 'w') as f:
925 f.write("digraph depends {\n")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400926 for task in sorted(depgraph["tdepends"]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500927 (pn, taskname) = task.rsplit(".", 1)
928 fn = depgraph["pn"][pn]["filename"]
929 version = depgraph["pn"][pn]["version"]
930 f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400931 for dep in sorted(depgraph["tdepends"][task]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500932 f.write('"%s" -> "%s"\n' % (task, dep))
933 f.write("}\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500934 logger.info("Task dependencies saved to 'task-depends.dot'")
935
936 def show_appends_with_no_recipes(self):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500937 appends_without_recipes = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500938 # Determine which bbappends haven't been applied
Andrew Geissler5a43b432020-06-13 10:46:56 -0500939 for mc in self.multiconfigs:
940 # First get list of recipes, including skipped
941 recipefns = list(self.recipecaches[mc].pkg_fn.keys())
942 recipefns.extend(self.skiplist.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500943
Andrew Geissler5a43b432020-06-13 10:46:56 -0500944 # Work out list of bbappends that have been applied
945 applied_appends = []
946 for fn in recipefns:
947 applied_appends.extend(self.collections[mc].get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500948
Andrew Geissler5a43b432020-06-13 10:46:56 -0500949 appends_without_recipes[mc] = []
950 for _, appendfn in self.collections[mc].bbappends:
951 if not appendfn in applied_appends:
952 appends_without_recipes[mc].append(appendfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500953
Andrew Geissler5a43b432020-06-13 10:46:56 -0500954 msgs = []
955 for mc in sorted(appends_without_recipes.keys()):
956 if appends_without_recipes[mc]:
957 msgs.append('No recipes in %s available for:\n %s' % (mc if mc else 'default',
958 '\n '.join(appends_without_recipes[mc])))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500959
Andrew Geissler5a43b432020-06-13 10:46:56 -0500960 if msgs:
961 msg = "\n".join(msgs)
962 warn_only = self.databuilder.mcdata[mc].getVar("BB_DANGLINGAPPENDS_WARNONLY", \
963 False) or "no"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500964 if warn_only.lower() in ("1", "yes", "true"):
965 bb.warn(msg)
966 else:
967 bb.fatal(msg)
968
969 def handlePrefProviders(self):
970
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600971 for mc in self.multiconfigs:
972 localdata = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600973 bb.data.expandKeys(localdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500974
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600975 # Handle PREFERRED_PROVIDERS
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500976 for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600977 try:
978 (providee, provider) = p.split(':')
979 except:
980 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
981 continue
982 if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
983 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
984 self.recipecaches[mc].preferred[providee] = provider
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500985
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500986 def findConfigFilePath(self, configfile):
987 """
988 Find the location on disk of configfile and if it exists and was parsed by BitBake
989 emit the ConfigFilePathFound event with the path to the file.
990 """
991 path = bb.cookerdata.findConfigFile(configfile, self.data)
992 if not path:
993 return
994
995 # Generate a list of parsed configuration files by searching the files
996 # listed in the __depends and __base_depends variables with a .conf suffix.
997 conffiles = []
998 dep_files = self.data.getVar('__base_depends', False) or []
999 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
1000
1001 for f in dep_files:
1002 if f[0].endswith(".conf"):
1003 conffiles.append(f[0])
1004
1005 _, conf, conffile = path.rpartition("conf/")
1006 match = os.path.join(conf, conffile)
1007 # Try and find matches for conf/conffilename.conf as we don't always
1008 # have the full path to the file.
1009 for cfg in conffiles:
1010 if cfg.endswith(match):
1011 bb.event.fire(bb.event.ConfigFilePathFound(path),
1012 self.data)
1013 break
1014
1015 def findFilesMatchingInDir(self, filepattern, directory):
1016 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001017 Searches for files containing the substring 'filepattern' which are children of
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001018 'directory' in each BBPATH. i.e. to find all rootfs package classes available
1019 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1020 or to find all machine configuration files one could call:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001021 findFilesMatchingInDir(self, '.conf', 'conf/machine')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001022 """
1023
1024 matches = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001025 bbpaths = self.data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001026 for path in bbpaths:
1027 dirpath = os.path.join(path, directory)
1028 if os.path.exists(dirpath):
1029 for root, dirs, files in os.walk(dirpath):
1030 for f in files:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001031 if filepattern in f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001032 matches.append(f)
1033
1034 if matches:
1035 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1036
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001037 def findProviders(self, mc=''):
Andrew Geissler82c905d2020-04-13 13:39:40 -05001038 return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001039
1040 def findBestProvider(self, pn, mc=''):
1041 if pn in self.recipecaches[mc].providers:
1042 filenames = self.recipecaches[mc].providers[pn]
Andrew Geissler82c905d2020-04-13 13:39:40 -05001043 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc])
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001044 filename = eligible[0]
1045 return None, None, None, filename
1046 elif pn in self.recipecaches[mc].pkg_pn:
Andrew Geissler82c905d2020-04-13 13:39:40 -05001047 return bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001048 else:
1049 return None, None, None, None
1050
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001051 def findConfigFiles(self, varname):
1052 """
1053 Find config files which are appropriate values for varname.
1054 i.e. MACHINE, DISTRO
1055 """
1056 possible = []
1057 var = varname.lower()
1058
1059 data = self.data
1060 # iterate configs
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001061 bbpaths = data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001062 for path in bbpaths:
1063 confpath = os.path.join(path, "conf", var)
1064 if os.path.exists(confpath):
1065 for root, dirs, files in os.walk(confpath):
1066 # get all child files, these are appropriate values
1067 for f in files:
1068 val, sep, end = f.rpartition('.')
1069 if end == 'conf':
1070 possible.append(val)
1071
1072 if possible:
1073 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1074
1075 def findInheritsClass(self, klass):
1076 """
1077 Find all recipes which inherit the specified class
1078 """
1079 pkg_list = []
1080
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001081 for pfn in self.recipecaches[''].pkg_fn:
1082 inherits = self.recipecaches[''].inherits.get(pfn, None)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001083 if inherits and klass in inherits:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001084 pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001085
1086 return pkg_list
1087
1088 def generateTargetsTree(self, klass=None, pkgs=None):
1089 """
1090 Generate a dependency tree of buildable targets
1091 Generate an event with the result
1092 """
1093 # if the caller hasn't specified a pkgs list default to universe
1094 if not pkgs:
1095 pkgs = ['universe']
1096 # if inherited_class passed ensure all recipes which inherit the
1097 # specified class are included in pkgs
1098 if klass:
1099 extra_pkgs = self.findInheritsClass(klass)
1100 pkgs = pkgs + extra_pkgs
1101
1102 # generate a dependency tree for all our packages
1103 tree = self.generatePkgDepTreeData(pkgs, 'build')
1104 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1105
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001106 def interactiveMode( self ):
1107 """Drop off into a shell"""
1108 try:
1109 from bb import shell
1110 except ImportError:
1111 parselog.exception("Interactive mode not available")
1112 sys.exit(1)
1113 else:
1114 shell.start( self )
1115
1116
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001117 def handleCollections(self, collections):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001118 """Handle collections"""
1119 errors = False
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001120 self.bbfile_config_priorities = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001121 if collections:
1122 collection_priorities = {}
1123 collection_depends = {}
1124 collection_list = collections.split()
1125 min_prio = 0
1126 for c in collection_list:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001127 bb.debug(1,'Processing %s in collection list' % (c))
1128
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001129 # Get collection priority if defined explicitly
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001130 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001131 if priority:
1132 try:
1133 prio = int(priority)
1134 except ValueError:
1135 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1136 errors = True
1137 if min_prio == 0 or prio < min_prio:
1138 min_prio = prio
1139 collection_priorities[c] = prio
1140 else:
1141 collection_priorities[c] = None
1142
1143 # Check dependencies and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001144 deps = self.data.getVar("LAYERDEPENDS_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001145 if deps:
1146 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001147 depDict = bb.utils.explode_dep_versions2(deps)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001148 except bb.utils.VersionStringException as vse:
1149 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001150 for dep, oplist in list(depDict.items()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001151 if dep in collection_list:
1152 for opstr in oplist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001153 layerver = self.data.getVar("LAYERVERSION_%s" % dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001154 (op, depver) = opstr.split()
1155 if layerver:
1156 try:
1157 res = bb.utils.vercmp_string_op(layerver, depver, op)
1158 except bb.utils.VersionStringException as vse:
1159 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1160 if not res:
1161 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1162 errors = True
1163 else:
1164 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1165 errors = True
1166 else:
1167 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1168 errors = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001169 collection_depends[c] = list(depDict.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001170 else:
1171 collection_depends[c] = []
1172
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001173 # Check recommends and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001174 recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001175 if recs:
1176 try:
1177 recDict = bb.utils.explode_dep_versions2(recs)
1178 except bb.utils.VersionStringException as vse:
1179 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1180 for rec, oplist in list(recDict.items()):
1181 if rec in collection_list:
1182 if oplist:
1183 opstr = oplist[0]
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001184 layerver = self.data.getVar("LAYERVERSION_%s" % rec)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001185 if layerver:
1186 (op, recver) = opstr.split()
1187 try:
1188 res = bb.utils.vercmp_string_op(layerver, recver, op)
1189 except bb.utils.VersionStringException as vse:
1190 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1191 if not res:
1192 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1193 continue
1194 else:
1195 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1196 continue
1197 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1198 collection_depends[c].append(rec)
1199 else:
1200 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1201
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001202 # Recursively work out collection priorities based on dependencies
1203 def calc_layer_priority(collection):
1204 if not collection_priorities[collection]:
1205 max_depprio = min_prio
1206 for dep in collection_depends[collection]:
1207 calc_layer_priority(dep)
1208 depprio = collection_priorities[dep]
1209 if depprio > max_depprio:
1210 max_depprio = depprio
1211 max_depprio += 1
1212 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1213 collection_priorities[collection] = max_depprio
1214
1215 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1216 for c in collection_list:
1217 calc_layer_priority(c)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001218 regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
Andrew Geissler82c905d2020-04-13 13:39:40 -05001219 if regex is None:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001220 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1221 errors = True
1222 continue
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001223 elif regex == "":
1224 parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
Brad Bishop19323692019-04-05 15:28:33 -04001225 cre = re.compile('^NULL$')
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001226 errors = False
1227 else:
1228 try:
1229 cre = re.compile(regex)
1230 except re.error:
1231 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1232 errors = True
1233 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001234 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001235 if errors:
1236 # We've already printed the actual error(s)
1237 raise CollectionError("Errors during parsing layer configuration")
1238
1239 def buildSetVars(self):
1240 """
1241 Setup any variables needed before starting a build
1242 """
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001243 t = time.gmtime()
1244 for mc in self.databuilder.mcdata:
1245 ds = self.databuilder.mcdata[mc]
1246 if not ds.getVar("BUILDNAME", False):
1247 ds.setVar("BUILDNAME", "${DATE}${TIME}")
1248 ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1249 ds.setVar("DATE", time.strftime('%Y%m%d', t))
1250 ds.setVar("TIME", time.strftime('%H%M%S', t))
1251
1252 def reset_mtime_caches(self):
1253 """
1254 Reset mtime caches - this is particularly important when memory resident as something
1255 which is cached is not unlikely to have changed since the last invocation (e.g. a
1256 file associated with a recipe might have been modified by the user).
1257 """
1258 build.reset_cache()
1259 bb.fetch._checksum_cache.mtime_cache.clear()
1260 siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1261 if siggen_cache:
1262 bb.parse.siggen.checksum_cache.mtime_cache.clear()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001263
Andrew Geissler5a43b432020-06-13 10:46:56 -05001264 def matchFiles(self, bf, mc=''):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001265 """
1266 Find the .bb files which match the expression in 'buildfile'.
1267 """
1268 if bf.startswith("/") or bf.startswith("../"):
1269 bf = os.path.abspath(bf)
1270
Andrew Geissler5a43b432020-06-13 10:46:56 -05001271 self.collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)}
1272 filelist, masked, searchdirs = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001273 try:
1274 os.stat(bf)
1275 bf = os.path.abspath(bf)
1276 return [bf]
1277 except OSError:
1278 regexp = re.compile(bf)
1279 matches = []
1280 for f in filelist:
1281 if regexp.search(f) and os.path.isfile(f):
1282 matches.append(f)
1283 return matches
1284
Andrew Geissler5a43b432020-06-13 10:46:56 -05001285 def matchFile(self, buildfile, mc=''):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001286 """
1287 Find the .bb file which matches the expression in 'buildfile'.
1288 Raise an error if multiple files
1289 """
Andrew Geissler5a43b432020-06-13 10:46:56 -05001290 matches = self.matchFiles(buildfile, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001291 if len(matches) != 1:
1292 if matches:
1293 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1294 if matches:
1295 for f in matches:
1296 msg += "\n %s" % f
1297 parselog.error(msg)
1298 else:
1299 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1300 raise NoSpecificMatch
1301 return matches[0]
1302
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001303 def buildFile(self, buildfile, task):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001304 """
1305 Build the file matching regexp buildfile
1306 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001307 bb.event.fire(bb.event.BuildInit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001308
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001309 # Too many people use -b because they think it's how you normally
1310 # specify a target to be built, so show a warning
1311 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1312
1313 self.buildFileInternal(buildfile, task)
1314
1315 def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1316 """
1317 Build the file matching regexp buildfile
1318 """
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001319
1320 # Parse the configuration here. We need to do it explicitly here since
1321 # buildFile() doesn't use the cache
1322 self.parseConfiguration()
1323
1324 # If we are told to do the None task then query the default task
Andrew Geissler82c905d2020-04-13 13:39:40 -05001325 if task is None:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001326 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001327 if not task.startswith("do_"):
1328 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001329
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001330 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Andrew Geissler5a43b432020-06-13 10:46:56 -05001331 fn = self.matchFile(fn, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001332
1333 self.buildSetVars()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001334 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001335
Andrew Geissler5a43b432020-06-13 10:46:56 -05001336 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001337
Andrew Geissler5a43b432020-06-13 10:46:56 -05001338 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001339 infos = dict(infos)
1340
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001341 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001342 try:
1343 info_array = infos[fn]
1344 except KeyError:
1345 bb.fatal("%s does not exist" % fn)
1346
1347 if info_array[0].skipped:
1348 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1349
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001350 self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001351
1352 # Tweak some variables
1353 item = info_array[0].pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001354 self.recipecaches[mc].ignored_dependencies = set()
1355 self.recipecaches[mc].bbfile_priority[fn] = 1
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001356 self.configuration.limited_deps = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001357
1358 # Remove external dependencies
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001359 self.recipecaches[mc].task_deps[fn]['depends'] = {}
1360 self.recipecaches[mc].deps[fn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001361 self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1362 self.recipecaches[mc].runrecs[fn] = defaultdict(list)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001363
1364 # Invalidate task for target if force mode active
1365 if self.configuration.force:
1366 logger.verbose("Invalidate task %s, %s", task, fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001367 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001368
1369 # Setup taskdata structure
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001370 taskdata = {}
1371 taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001372 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001373
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001374 if quietlog:
1375 rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1376 bb.runqueue.logger.setLevel(logging.WARNING)
1377
1378 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1379 if fireevents:
1380 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001381
1382 # Execute the runqueue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001383 runlist = [[mc, item, task, fn]]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001384
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001385 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001386
1387 def buildFileIdle(server, rq, abort):
1388
1389 msg = None
1390 interrupted = 0
1391 if abort or self.state == state.forceshutdown:
1392 rq.finish_runqueue(True)
1393 msg = "Forced shutdown"
1394 interrupted = 2
1395 elif self.state == state.shutdown:
1396 rq.finish_runqueue(False)
1397 msg = "Stopped build"
1398 interrupted = 1
1399 failures = 0
1400 try:
1401 retval = rq.execute_runqueue()
1402 except runqueue.TaskFailure as exc:
1403 failures += len(exc.args)
1404 retval = False
1405 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001406 self.command.finishAsyncCommand(str(exc))
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001407 if quietlog:
1408 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001409 return False
1410
1411 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001412 if fireevents:
1413 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001414 self.command.finishAsyncCommand(msg)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001415 # We trashed self.recipecaches above
1416 self.parsecache_valid = False
1417 self.configuration.limited_deps = False
1418 bb.parse.siggen.reset(self.data)
1419 if quietlog:
1420 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001421 return False
1422 if retval is True:
1423 return True
1424 return retval
1425
1426 self.configuration.server_register_idlecallback(buildFileIdle, rq)
1427
1428 def buildTargets(self, targets, task):
1429 """
1430 Attempt to build the targets specified
1431 """
1432
1433 def buildTargetsIdle(server, rq, abort):
1434 msg = None
1435 interrupted = 0
1436 if abort or self.state == state.forceshutdown:
1437 rq.finish_runqueue(True)
1438 msg = "Forced shutdown"
1439 interrupted = 2
1440 elif self.state == state.shutdown:
1441 rq.finish_runqueue(False)
1442 msg = "Stopped build"
1443 interrupted = 1
1444 failures = 0
1445 try:
1446 retval = rq.execute_runqueue()
1447 except runqueue.TaskFailure as exc:
1448 failures += len(exc.args)
1449 retval = False
1450 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001451 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001452 return False
1453
1454 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001455 try:
1456 for mc in self.multiconfigs:
1457 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1458 finally:
1459 self.command.finishAsyncCommand(msg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001460 return False
1461 if retval is True:
1462 return True
1463 return retval
1464
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001465 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001466 self.buildSetVars()
1467
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001468 # If we are told to do the None task then query the default task
Andrew Geissler82c905d2020-04-13 13:39:40 -05001469 if task is None:
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001470 task = self.configuration.cmd
1471
1472 if not task.startswith("do_"):
1473 task = "do_%s" % task
1474
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001475 packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1476
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001477 bb.event.fire(bb.event.BuildInit(packages), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001478
1479 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001480
1481 buildname = self.data.getVar("BUILDNAME", False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001482
1483 # make targets to always look as <target>:do_<task>
1484 ntargets = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001485 for target in runlist:
1486 if target[0]:
Brad Bishop15ae2502019-06-18 21:44:24 -04001487 ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2]))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001488 ntargets.append("%s:%s" % (target[1], target[2]))
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001489
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001490 for mc in self.multiconfigs:
1491 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001492
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001493 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001494 if 'universe' in targets:
1495 rq.rqdata.warn_multi_bb = True
1496
1497 self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
1498
1499
1500 def getAllKeysWithFlags(self, flaglist):
1501 dump = {}
1502 for k in self.data.keys():
1503 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001504 expand = True
1505 flags = self.data.getVarFlags(k)
1506 if flags and "func" in flags and "python" in flags:
1507 expand = False
1508 v = self.data.getVar(k, expand)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001509 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1510 dump[k] = {
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001511 'v' : str(v) ,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001512 'history' : self.data.varhistory.variable(k),
1513 }
1514 for d in flaglist:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001515 if flags and d in flags:
1516 dump[k][d] = flags[d]
1517 else:
1518 dump[k][d] = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001519 except Exception as e:
1520 print(e)
1521 return dump
1522
1523
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001524 def updateCacheSync(self):
1525 if self.state == state.running:
1526 return
1527
1528 # reload files for which we got notifications
1529 for p in self.inotify_modified_files:
1530 bb.parse.update_cache(p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001531 if p in bb.parse.BBHandler.cached_statements:
1532 del bb.parse.BBHandler.cached_statements[p]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001533 self.inotify_modified_files = []
1534
1535 if not self.baseconfig_valid:
1536 logger.debug(1, "Reloading base configuration data")
1537 self.initConfigurationData()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001538 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001539
1540 # This is called for all async commands when self.state != running
1541 def updateCache(self):
1542 if self.state == state.running:
1543 return
1544
1545 if self.state in (state.shutdown, state.forceshutdown, state.error):
1546 if hasattr(self.parser, 'shutdown'):
1547 self.parser.shutdown(clean=False, force = True)
1548 raise bb.BBHandledException()
1549
1550 if self.state != state.parsing:
1551 self.updateCacheSync()
1552
1553 if self.state != state.parsing and not self.parsecache_valid:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001554 bb.parse.siggen.reset(self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001555 self.parseConfiguration ()
1556 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001557 for mc in self.multiconfigs:
1558 bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001559
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001560 for mc in self.multiconfigs:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001561 ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001562 self.recipecaches[mc].ignored_dependencies = set(ignore.split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001563
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001564 for dep in self.configuration.extra_assume_provided:
1565 self.recipecaches[mc].ignored_dependencies.add(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001566
Andrew Geissler5a43b432020-06-13 10:46:56 -05001567 self.collections = {}
1568
1569 mcfilelist = {}
1570 total_masked = 0
1571 searchdirs = set()
1572 for mc in self.multiconfigs:
1573 self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
1574 (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1575
1576 mcfilelist[mc] = filelist
1577 total_masked += masked
1578 searchdirs |= set(search)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001579
1580 # Add inotify watches for directories searched for bb/bbappend files
1581 for dirent in searchdirs:
1582 self.add_filewatch([[dirent]], dirs=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001583
Andrew Geissler5a43b432020-06-13 10:46:56 -05001584 self.parser = CookerParser(self, mcfilelist, total_masked)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001585 self.parsecache_valid = True
1586
1587 self.state = state.parsing
1588
1589 if not self.parser.parse_next():
1590 collectlog.debug(1, "parsing complete")
1591 if self.parser.error:
1592 raise bb.BBHandledException()
1593 self.show_appends_with_no_recipes()
1594 self.handlePrefProviders()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001595 for mc in self.multiconfigs:
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001596 self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001597 self.state = state.running
1598
1599 # Send an event listing all stamps reachable after parsing
1600 # which the metadata may use to clean up stale data
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001601 for mc in self.multiconfigs:
1602 event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1603 bb.event.fire(event, self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001604 return None
1605
1606 return True
1607
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001608 def checkPackages(self, pkgs_to_build, task=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001609
1610 # Return a copy, don't modify the original
1611 pkgs_to_build = pkgs_to_build[:]
1612
1613 if len(pkgs_to_build) == 0:
1614 raise NothingToBuild
1615
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001616 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001617 for pkg in pkgs_to_build.copy():
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001618 if pkg in ignore:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001619 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
Brad Bishop15ae2502019-06-18 21:44:24 -04001620 if pkg.startswith("multiconfig:"):
1621 pkgs_to_build.remove(pkg)
1622 pkgs_to_build.append(pkg.replace("multiconfig:", "mc:"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001623
1624 if 'world' in pkgs_to_build:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001625 pkgs_to_build.remove('world')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001626 for mc in self.multiconfigs:
1627 bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1628 for t in self.recipecaches[mc].world_target:
1629 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001630 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001631 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001632
1633 if 'universe' in pkgs_to_build:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001634 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001635 parselog.debug(1, "collating packages for \"universe\"")
1636 pkgs_to_build.remove('universe')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001637 for mc in self.multiconfigs:
1638 for t in self.recipecaches[mc].universe_target:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001639 if task:
1640 foundtask = False
1641 for provider_fn in self.recipecaches[mc].providers[t]:
1642 if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1643 foundtask = True
1644 break
1645 if not foundtask:
1646 bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1647 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001648 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001649 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001650 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001651
1652 return pkgs_to_build
1653
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001654 def pre_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001655 # We now are in our own process so we can call this here.
1656 # PRServ exits if its parent process exits
1657 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001658 return
1659
1660 def post_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001661 prserv.serv.auto_shutdown()
Brad Bishop08902b02019-08-20 09:16:51 -04001662 if self.hashserv:
1663 self.hashserv.process.terminate()
1664 self.hashserv.process.join()
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001665 bb.event.fire(CookerExit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001666
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001667 def shutdown(self, force = False):
1668 if force:
1669 self.state = state.forceshutdown
1670 else:
1671 self.state = state.shutdown
1672
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001673 if self.parser:
1674 self.parser.shutdown(clean=not force, force=force)
1675
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001676 def finishcommand(self):
1677 self.state = state.initial
1678
1679 def reset(self):
1680 self.initConfigurationData()
Brad Bishop08902b02019-08-20 09:16:51 -04001681 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001682
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001683 def clientComplete(self):
1684 """Called when the client is done using the server"""
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001685 self.finishcommand()
1686 self.extraconfigdata = {}
1687 self.command.reset()
1688 self.databuilder.reset()
1689 self.data = self.databuilder.data
Andrew Geissler82c905d2020-04-13 13:39:40 -05001690 self.parsecache_valid = False
1691 self.baseconfig_valid = False
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001692
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001693
1694class CookerExit(bb.event.Event):
1695 """
1696 Notify clients of the Cooker shutdown
1697 """
1698
1699 def __init__(self):
1700 bb.event.Event.__init__(self)
1701
1702
1703class CookerCollectFiles(object):
Andrew Geissler5a43b432020-06-13 10:46:56 -05001704 def __init__(self, priorities, mc=''):
1705 self.mc = mc
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001706 self.bbappends = []
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001707 # Priorities is a list of tupples, with the second element as the pattern.
1708 # We need to sort the list with the longest pattern first, and so on to
1709 # the shortest. This allows nested layers to be properly evaluated.
1710 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001711
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001712 def calc_bbfile_priority(self, filename):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001713 for _, _, regex, pri in self.bbfile_config_priorities:
1714 if regex.match(filename):
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001715 return pri, regex
1716 return 0, None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001717
1718 def get_bbfiles(self):
1719 """Get list of default .bb files by reading out the current directory"""
1720 path = os.getcwd()
1721 contents = os.listdir(path)
1722 bbfiles = []
1723 for f in contents:
1724 if f.endswith(".bb"):
1725 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1726 return bbfiles
1727
1728 def find_bbfiles(self, path):
1729 """Find all the .bb and .bbappend files in a directory"""
1730 found = []
1731 for dir, dirs, files in os.walk(path):
1732 for ignored in ('SCCS', 'CVS', '.svn'):
1733 if ignored in dirs:
1734 dirs.remove(ignored)
1735 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1736
1737 return found
1738
1739 def collect_bbfiles(self, config, eventdata):
1740 """Collect all available .bb build files"""
1741 masked = 0
1742
1743 collectlog.debug(1, "collecting .bb files")
1744
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001745 files = (config.getVar( "BBFILES") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001746 config.setVar("BBFILES", " ".join(files))
1747
1748 # Sort files by priority
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001749 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] )
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001750
1751 if not len(files):
1752 files = self.get_bbfiles()
1753
1754 if not len(files):
1755 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1756 bb.event.fire(CookerExit(), eventdata)
1757
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001758 # We need to track where we look so that we can add inotify watches. There
1759 # is no nice way to do this, this is horrid. We intercept the os.listdir()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001760 # (or os.scandir() for python 3.6+) calls while we run glob().
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001761 origlistdir = os.listdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001762 if hasattr(os, 'scandir'):
1763 origscandir = os.scandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001764 searchdirs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001765
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001766 def ourlistdir(d):
1767 searchdirs.append(d)
1768 return origlistdir(d)
1769
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001770 def ourscandir(d):
1771 searchdirs.append(d)
1772 return origscandir(d)
1773
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001774 os.listdir = ourlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001775 if hasattr(os, 'scandir'):
1776 os.scandir = ourscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001777 try:
1778 # Can't use set here as order is important
1779 newfiles = []
1780 for f in files:
1781 if os.path.isdir(f):
1782 dirfiles = self.find_bbfiles(f)
1783 for g in dirfiles:
1784 if g not in newfiles:
1785 newfiles.append(g)
1786 else:
1787 globbed = glob.glob(f)
1788 if not globbed and os.path.exists(f):
1789 globbed = [f]
1790 # glob gives files in order on disk. Sort to be deterministic.
1791 for g in sorted(globbed):
1792 if g not in newfiles:
1793 newfiles.append(g)
1794 finally:
1795 os.listdir = origlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001796 if hasattr(os, 'scandir'):
1797 os.scandir = origscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001798
1799 bbmask = config.getVar('BBMASK')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001800
1801 if bbmask:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001802 # First validate the individual regular expressions and ignore any
1803 # that do not compile
1804 bbmasks = []
1805 for mask in bbmask.split():
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001806 # When constructing an older style single regex, it's possible for BBMASK
1807 # to end up beginning with '|', which matches and masks _everything_.
1808 if mask.startswith("|"):
Andrew Geissler82c905d2020-04-13 13:39:40 -05001809 collectlog.warning("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001810 mask = mask[1:]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001811 try:
1812 re.compile(mask)
1813 bbmasks.append(mask)
1814 except sre_constants.error:
1815 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1816
1817 # Then validate the combined regular expressions. This should never
1818 # fail, but better safe than sorry...
1819 bbmask = "|".join(bbmasks)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001820 try:
1821 bbmask_compiled = re.compile(bbmask)
1822 except sre_constants.error:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001823 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1824 bbmask = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001825
1826 bbfiles = []
1827 bbappend = []
1828 for f in newfiles:
1829 if bbmask and bbmask_compiled.search(f):
1830 collectlog.debug(1, "skipping masked file %s", f)
1831 masked += 1
1832 continue
1833 if f.endswith('.bb'):
1834 bbfiles.append(f)
1835 elif f.endswith('.bbappend'):
1836 bbappend.append(f)
1837 else:
1838 collectlog.debug(1, "skipping %s: unknown file extension", f)
1839
1840 # Build a list of .bbappend files for each .bb file
1841 for f in bbappend:
1842 base = os.path.basename(f).replace('.bbappend', '.bb')
1843 self.bbappends.append((base, f))
1844
1845 # Find overlayed recipes
1846 # bbfiles will be in priority order which makes this easy
1847 bbfile_seen = dict()
1848 self.overlayed = defaultdict(list)
1849 for f in reversed(bbfiles):
1850 base = os.path.basename(f)
1851 if base not in bbfile_seen:
1852 bbfile_seen[base] = f
1853 else:
1854 topfile = bbfile_seen[base]
1855 self.overlayed[topfile].append(f)
1856
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001857 return (bbfiles, masked, searchdirs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001858
1859 def get_file_appends(self, fn):
1860 """
1861 Returns a list of .bbappend files to apply to fn
1862 """
1863 filelist = []
1864 f = os.path.basename(fn)
1865 for b in self.bbappends:
1866 (bbappend, filename) = b
1867 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1868 filelist.append(filename)
Andrew Geissler5a43b432020-06-13 10:46:56 -05001869 return tuple(filelist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001870
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001871 def collection_priorities(self, pkgfns, fns, d):
1872 # Return the priorities of the entries in pkgfns
1873 # Also check that all the regexes in self.bbfile_config_priorities are used
1874 # (but to do that we need to ensure skipped recipes aren't counted, nor
1875 # collections in BBFILE_PATTERN_IGNORE_EMPTY)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001876
1877 priorities = {}
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001878 seen = set()
1879 matched = set()
1880
1881 matched_regex = set()
1882 unmatched_regex = set()
1883 for _, _, regex, _ in self.bbfile_config_priorities:
1884 unmatched_regex.add(regex)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001885
1886 # Calculate priorities for each file
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001887 for p in pkgfns:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001888 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001889 priorities[p], regex = self.calc_bbfile_priority(realfn)
1890 if regex in unmatched_regex:
1891 matched_regex.add(regex)
1892 unmatched_regex.remove(regex)
1893 seen.add(realfn)
1894 if regex:
1895 matched.add(realfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001896
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001897 if unmatched_regex:
1898 # Account for bbappend files
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001899 for b in self.bbappends:
1900 (bbfile, append) = b
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001901 seen.add(append)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001902
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001903 # Account for skipped recipes
1904 seen.update(fns)
1905
1906 seen.difference_update(matched)
1907
1908 def already_matched(fn):
1909 for regex in matched_regex:
1910 if regex.match(fn):
1911 return True
1912 return False
1913
1914 for unmatch in unmatched_regex.copy():
1915 for fn in seen:
1916 if unmatch.match(fn):
1917 # If the bbappend or file was already matched by another regex, skip it
1918 # e.g. for a layer within a layer, the outer regex could match, the inner
1919 # regex may match nothing and we should warn about that
1920 if already_matched(fn):
1921 continue
1922 unmatched_regex.remove(unmatch)
1923 break
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001924
1925 for collection, pattern, regex, _ in self.bbfile_config_priorities:
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001926 if regex in unmatched_regex:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001927 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
Andrew Geissler5a43b432020-06-13 10:46:56 -05001928 collectlog.warning("No bb files in %s matched BBFILE_PATTERN_%s '%s'" % (self.mc if self.mc else 'default',
1929 collection, pattern))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001930
1931 return priorities
1932
1933class ParsingFailure(Exception):
1934 def __init__(self, realexception, recipe):
1935 self.realexception = realexception
1936 self.recipe = recipe
1937 Exception.__init__(self, realexception, recipe)
1938
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001939class Parser(multiprocessing.Process):
1940 def __init__(self, jobs, results, quit, init, profile):
1941 self.jobs = jobs
1942 self.results = results
1943 self.quit = quit
1944 self.init = init
1945 multiprocessing.Process.__init__(self)
1946 self.context = bb.utils.get_context().copy()
1947 self.handlers = bb.event.get_class_handlers().copy()
1948 self.profile = profile
1949
1950 def run(self):
1951
1952 if not self.profile:
1953 self.realrun()
1954 return
1955
1956 try:
1957 import cProfile as profile
1958 except:
1959 import profile
1960 prof = profile.Profile()
1961 try:
1962 profile.Profile.runcall(prof, self.realrun)
1963 finally:
1964 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1965 prof.dump_stats(logfile)
1966
1967 def realrun(self):
1968 if self.init:
1969 self.init()
1970
1971 pending = []
1972 while True:
1973 try:
1974 self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001975 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001976 pass
1977 else:
1978 self.results.cancel_join_thread()
1979 break
1980
1981 if pending:
1982 result = pending.pop()
1983 else:
1984 try:
Brad Bishop19323692019-04-05 15:28:33 -04001985 job = self.jobs.pop()
1986 except IndexError:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001987 break
1988 result = self.parse(*job)
Andrew Geissler82c905d2020-04-13 13:39:40 -05001989 # Clear the siggen cache after parsing to control memory usage, its huge
1990 bb.parse.siggen.postparsing_clean_cache()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001991 try:
1992 self.results.put(result, timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001993 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001994 pending.append(result)
1995
Andrew Geissler5a43b432020-06-13 10:46:56 -05001996 def parse(self, mc, cache, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001997 try:
Andrew Geissler82c905d2020-04-13 13:39:40 -05001998 origfilter = bb.event.LogHandler.filter
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001999 # Record the filename we're parsing into any events generated
2000 def parse_filter(self, record):
2001 record.taskpid = bb.event.worker_pid
2002 record.fn = filename
2003 return True
2004
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002005 # Reset our environment and handlers to the original settings
2006 bb.utils.set_context(self.context.copy())
2007 bb.event.set_class_handlers(self.handlers.copy())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002008 bb.event.LogHandler.filter = parse_filter
2009
Andrew Geissler5a43b432020-06-13 10:46:56 -05002010 return True, mc, cache.parse(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002011 except Exception as exc:
2012 tb = sys.exc_info()[2]
2013 exc.recipe = filename
2014 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
2015 return True, exc
2016 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2017 # and for example a worker thread doesn't just exit on its own in response to
2018 # a SystemExit event for example.
2019 except BaseException as exc:
2020 return True, ParsingFailure(exc, filename)
Andrew Geissler82c905d2020-04-13 13:39:40 -05002021 finally:
2022 bb.event.LogHandler.filter = origfilter
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002023
2024class CookerParser(object):
Andrew Geissler5a43b432020-06-13 10:46:56 -05002025 def __init__(self, cooker, mcfilelist, masked):
2026 self.mcfilelist = mcfilelist
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002027 self.cooker = cooker
2028 self.cfgdata = cooker.data
2029 self.cfghash = cooker.data_hash
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002030 self.cfgbuilder = cooker.databuilder
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002031
2032 # Accounting statistics
2033 self.parsed = 0
2034 self.cached = 0
2035 self.error = 0
2036 self.masked = masked
2037
2038 self.skipped = 0
2039 self.virtuals = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002040
2041 self.current = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002042 self.process_names = []
2043
Andrew Geissler5a43b432020-06-13 10:46:56 -05002044 self.bb_caches = bb.cache.MulticonfigCache(self.cfgbuilder, self.cfghash, cooker.caches_array)
2045 self.fromcache = set()
2046 self.willparse = set()
2047 for mc in self.cooker.multiconfigs:
2048 for filename in self.mcfilelist[mc]:
2049 appends = self.cooker.collections[mc].get_file_appends(filename)
2050 if not self.bb_caches[mc].cacheValid(filename, appends):
2051 self.willparse.add((mc, self.bb_caches[mc], filename, appends))
2052 else:
2053 self.fromcache.add((mc, self.bb_caches[mc], filename, appends))
2054
2055 self.total = len(self.fromcache) + len(self.willparse)
2056 self.toparse = len(self.willparse)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002057 self.progress_chunk = int(max(self.toparse / 100, 1))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002058
Brad Bishop6e60e8b2018-02-01 10:27:11 -05002059 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
Andrew Geissler5a43b432020-06-13 10:46:56 -05002060 multiprocessing.cpu_count()), self.toparse)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002061
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002062 self.start()
2063 self.haveshutdown = False
2064
2065 def start(self):
2066 self.results = self.load_cached()
2067 self.processes = []
2068 if self.toparse:
2069 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2070 def init():
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002071 bb.utils.set_process_name(multiprocessing.current_process().name)
2072 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2073 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002074
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002075 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002076 self.result_queue = multiprocessing.Queue()
Brad Bishop19323692019-04-05 15:28:33 -04002077
2078 def chunkify(lst,n):
2079 return [lst[i::n] for i in range(n)]
Andrew Geissler5a43b432020-06-13 10:46:56 -05002080 self.jobs = chunkify(list(self.willparse), self.num_processes)
Brad Bishop19323692019-04-05 15:28:33 -04002081
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002082 for i in range(0, self.num_processes):
Brad Bishop19323692019-04-05 15:28:33 -04002083 parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002084 parser.start()
2085 self.process_names.append(parser.name)
2086 self.processes.append(parser)
2087
2088 self.results = itertools.chain(self.results, self.parse_generator())
2089
2090 def shutdown(self, clean=True, force=False):
2091 if not self.toparse:
2092 return
2093 if self.haveshutdown:
2094 return
2095 self.haveshutdown = True
2096
2097 if clean:
2098 event = bb.event.ParseCompleted(self.cached, self.parsed,
2099 self.skipped, self.masked,
2100 self.virtuals, self.error,
2101 self.total)
2102
2103 bb.event.fire(event, self.cfgdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002104 for process in self.processes:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002105 self.parser_quit.put(None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002106 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002107 self.parser_quit.cancel_join_thread()
2108 for process in self.processes:
2109 self.parser_quit.put(None)
2110
Brad Bishop08902b02019-08-20 09:16:51 -04002111 # Cleanup the queue before call process.join(), otherwise there might be
2112 # deadlocks.
2113 while True:
2114 try:
2115 self.result_queue.get(timeout=0.25)
2116 except queue.Empty:
2117 break
2118
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002119 for process in self.processes:
2120 if force:
2121 process.join(.1)
2122 process.terminate()
2123 else:
2124 process.join()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002125
Andrew Geissler5a43b432020-06-13 10:46:56 -05002126 def sync_caches():
2127 for c in self.bb_caches.values():
2128 c.sync()
2129
2130 sync = threading.Thread(target=sync_caches)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002131 sync.start()
2132 multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002133 bb.codeparser.parser_cache_savemerge()
2134 bb.fetch.fetcher_parse_done()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002135 if self.cooker.configuration.profile:
2136 profiles = []
2137 for i in self.process_names:
2138 logfile = "profile-parse-%s.log" % i
2139 if os.path.exists(logfile):
2140 profiles.append(logfile)
2141
2142 pout = "profile-parse.log.processed"
2143 bb.utils.process_profilelog(profiles, pout = pout)
2144 print("Processed parsing statistics saved to %s" % (pout))
2145
2146 def load_cached(self):
Andrew Geissler5a43b432020-06-13 10:46:56 -05002147 for mc, cache, filename, appends in self.fromcache:
2148 cached, infos = cache.load(filename, appends)
2149 yield not cached, mc, infos
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002150
2151 def parse_generator(self):
2152 while True:
2153 if self.parsed >= self.toparse:
2154 break
2155
2156 try:
2157 result = self.result_queue.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002158 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002159 pass
2160 else:
2161 value = result[1]
2162 if isinstance(value, BaseException):
2163 raise value
2164 else:
2165 yield result
2166
2167 def parse_next(self):
2168 result = []
2169 parsed = None
2170 try:
Andrew Geissler5a43b432020-06-13 10:46:56 -05002171 parsed, mc, result = next(self.results)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002172 except StopIteration:
2173 self.shutdown()
2174 return False
2175 except bb.BBHandledException as exc:
2176 self.error += 1
2177 logger.error('Failed to parse recipe: %s' % exc.recipe)
2178 self.shutdown(clean=False)
2179 return False
2180 except ParsingFailure as exc:
2181 self.error += 1
2182 logger.error('Unable to parse %s: %s' %
2183 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2184 self.shutdown(clean=False)
2185 return False
2186 except bb.parse.ParseError as exc:
2187 self.error += 1
2188 logger.error(str(exc))
2189 self.shutdown(clean=False)
2190 return False
2191 except bb.data_smart.ExpansionError as exc:
2192 self.error += 1
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002193 bbdir = os.path.dirname(__file__) + os.sep
2194 etype, value, _ = sys.exc_info()
2195 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2196 logger.error('ExpansionError during parsing %s', value.recipe,
2197 exc_info=(etype, value, tb))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002198 self.shutdown(clean=False)
2199 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002200 except Exception as exc:
2201 self.error += 1
2202 etype, value, tb = sys.exc_info()
2203 if hasattr(value, "recipe"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002204 logger.error('Unable to parse %s' % value.recipe,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002205 exc_info=(etype, value, exc.traceback))
2206 else:
2207 # Most likely, an exception occurred during raising an exception
2208 import traceback
2209 logger.error('Exception during parse: %s' % traceback.format_exc())
2210 self.shutdown(clean=False)
2211 return False
2212
2213 self.current += 1
2214 self.virtuals += len(result)
2215 if parsed:
2216 self.parsed += 1
2217 if self.parsed % self.progress_chunk == 0:
2218 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2219 self.cfgdata)
2220 else:
2221 self.cached += 1
2222
2223 for virtualfn, info_array in result:
2224 if info_array[0].skipped:
2225 self.skipped += 1
2226 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
Andrew Geissler5a43b432020-06-13 10:46:56 -05002227 self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002228 parsed=parsed, watcher = self.cooker.add_filewatch)
2229 return True
2230
2231 def reparse(self, filename):
Andrew Geissler5a43b432020-06-13 10:46:56 -05002232 to_reparse = set()
2233 for mc in self.cooker.multiconfigs:
2234 to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename)))
2235
2236 for mc, filename, appends in to_reparse:
2237 infos = self.bb_caches[mc].parse(filename, appends)
2238 for vfn, info_array in infos:
2239 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)