blob: 1f4cc1e96d684c19beb2639974fbff51fb909cd3 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# Copyright (C) 2003, 2004 Chris Larson
3# Copyright (C) 2003, 2004 Phil Blundell
4# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
5# Copyright (C) 2005 Holger Hans Peter Freyther
6# Copyright (C) 2005 ROAD GmbH
7# Copyright (C) 2006 - 2007 Richard Purdie
8#
Brad Bishopc342db32019-05-15 21:57:59 -04009# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050010#
Patrick Williamsc0f7c042017-02-23 20:41:17 -060011
Patrick Williamsc124f4f2015-09-15 14:41:29 -050012import sys, os, glob, os.path, re, time
Patrick Williamsc124f4f2015-09-15 14:41:29 -050013import itertools
14import logging
15import multiprocessing
16import sre_constants
17import threading
Patrick Williamsc0f7c042017-02-23 20:41:17 -060018from io import StringIO, UnsupportedOperation
Patrick Williamsc124f4f2015-09-15 14:41:29 -050019from contextlib import closing
Patrick Williamsc0f7c042017-02-23 20:41:17 -060020from collections import defaultdict, namedtuple
Patrick Williamsc124f4f2015-09-15 14:41:29 -050021import bb, bb.exceptions, bb.command
22from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
Patrick Williamsc0f7c042017-02-23 20:41:17 -060023import queue
Patrick Williamsc124f4f2015-09-15 14:41:29 -050024import signal
Patrick Williamsc124f4f2015-09-15 14:41:29 -050025import prserv.serv
26import pyinotify
Patrick Williamsc0f7c042017-02-23 20:41:17 -060027import json
28import pickle
29import codecs
Brad Bishop08902b02019-08-20 09:16:51 -040030import hashserv
Patrick Williamsc124f4f2015-09-15 14:41:29 -050031
32logger = logging.getLogger("BitBake")
33collectlog = logging.getLogger("BitBake.Collection")
34buildlog = logging.getLogger("BitBake.Build")
35parselog = logging.getLogger("BitBake.Parsing")
36providerlog = logging.getLogger("BitBake.Provider")
37
38class NoSpecificMatch(bb.BBHandledException):
39 """
40 Exception raised when no or multiple file matches are found
41 """
42
43class NothingToBuild(Exception):
44 """
45 Exception raised when there is nothing to build
46 """
47
48class CollectionError(bb.BBHandledException):
49 """
50 Exception raised when layer configuration is incorrect
51 """
52
53class state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060054 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050055
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050056 @classmethod
57 def get_name(cls, code):
58 for name in dir(cls):
59 value = getattr(cls, name)
60 if type(value) == type(cls.initial) and value == code:
61 return name
62 raise ValueError("Invalid status code: %s" % code)
63
Patrick Williamsc124f4f2015-09-15 14:41:29 -050064
65class SkippedPackage:
66 def __init__(self, info = None, reason = None):
67 self.pn = None
68 self.skipreason = None
69 self.provides = None
70 self.rprovides = None
71
72 if info:
73 self.pn = info.pn
74 self.skipreason = info.skipreason
75 self.provides = info.provides
Andrew Geissler706d5aa2021-02-12 15:55:30 -060076 self.rprovides = info.rprovides
Patrick Williamsc124f4f2015-09-15 14:41:29 -050077 elif reason:
78 self.skipreason = reason
79
80
81class CookerFeatures(object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -060082 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050083
84 def __init__(self):
85 self._features=set()
86
87 def setFeature(self, f):
88 # validate we got a request for a feature we support
89 if f not in CookerFeatures._feature_list:
90 return
91 self._features.add(f)
92
93 def __contains__(self, f):
94 return f in self._features
95
96 def __iter__(self):
97 return self._features.__iter__()
98
Patrick Williamsc0f7c042017-02-23 20:41:17 -060099 def __next__(self):
100 return next(self._features)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500101
102
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600103class EventWriter:
104 def __init__(self, cooker, eventfile):
105 self.file_inited = None
106 self.cooker = cooker
107 self.eventfile = eventfile
108 self.event_queue = []
109
110 def write_event(self, event):
111 with open(self.eventfile, "a") as f:
112 try:
113 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
114 f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
115 "vars": str_event}))
116 except Exception as err:
117 import traceback
118 print(err, traceback.format_exc())
119
120 def send(self, event):
121 if self.file_inited:
122 # we have the file, just write the event
123 self.write_event(event)
124 else:
125 # init on bb.event.BuildStarted
126 name = "%s.%s" % (event.__module__, event.__class__.__name__)
127 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
128 with open(self.eventfile, "w") as f:
129 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
130
131 self.file_inited = True
132
133 # write pending events
134 for evt in self.event_queue:
135 self.write_event(evt)
136
137 # also write the current event
138 self.write_event(event)
139 else:
140 # queue all events until the file is inited
141 self.event_queue.append(event)
142
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500143#============================================================================#
144# BBCooker
145#============================================================================#
146class BBCooker:
147 """
148 Manages one bitbake build run
149 """
150
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500151 def __init__(self, featureSet=None, idleCallBackRegister=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600152 self.recipecaches = None
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500153 self.eventlog = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500154 self.skiplist = {}
155 self.featureset = CookerFeatures()
156 if featureSet:
157 for f in featureSet:
158 self.featureset.setFeature(f)
159
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500160 self.configuration = bb.cookerdata.CookerConfiguration()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500161
Andrew Geissler635e0e42020-08-21 15:58:33 -0500162 self.idleCallBackRegister = idleCallBackRegister
163
Brad Bishopf058f492019-01-28 23:50:33 -0500164 bb.debug(1, "BBCooker starting %s" % time.time())
165 sys.stdout.flush()
166
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500167 self.configwatcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500168 bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
169 sys.stdout.flush()
170
Andrew Geissler82c905d2020-04-13 13:39:40 -0500171 self.configwatcher.bbseen = set()
172 self.configwatcher.bbwatchedfiles = set()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500173 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
Brad Bishopf058f492019-01-28 23:50:33 -0500174 bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
175 sys.stdout.flush()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500176 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
177 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500178 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500179 self.watcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500180 bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
181 sys.stdout.flush()
Andrew Geissler82c905d2020-04-13 13:39:40 -0500182 self.watcher.bbseen = set()
183 self.watcher.bbwatchedfiles = set()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500184 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
185
Brad Bishopf058f492019-01-28 23:50:33 -0500186 bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
187 sys.stdout.flush()
188
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500189 # If being called by something like tinfoil, we need to clean cached data
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500190 # which may now be invalid
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500191 bb.parse.clear_cache()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500192 bb.parse.BBHandler.cached_statements = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500193
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500194 self.ui_cmdline = None
Brad Bishop08902b02019-08-20 09:16:51 -0400195 self.hashserv = None
Brad Bishopa34c0302019-09-23 22:34:48 -0400196 self.hashservaddr = None
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500197
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500198 self.inotify_modified_files = []
199
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500200 def _process_inotify_updates(server, cooker, abort):
201 cooker.process_inotify_updates()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500202 return 1.0
203
Andrew Geissler635e0e42020-08-21 15:58:33 -0500204 self.idleCallBackRegister(_process_inotify_updates, self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500205
206 # TOSTOP must not be set or our children will hang when they output
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600207 try:
208 fd = sys.stdout.fileno()
209 if os.isatty(fd):
210 import termios
211 tcattr = termios.tcgetattr(fd)
212 if tcattr[3] & termios.TOSTOP:
213 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
214 tcattr[3] = tcattr[3] & ~termios.TOSTOP
215 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
216 except UnsupportedOperation:
217 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500218
219 self.command = bb.command.Command(self)
220 self.state = state.initial
221
222 self.parser = None
223
224 signal.signal(signal.SIGTERM, self.sigterm_exception)
225 # Let SIGHUP exit as SIGTERM
226 signal.signal(signal.SIGHUP, self.sigterm_exception)
227
Brad Bishopf058f492019-01-28 23:50:33 -0500228 bb.debug(1, "BBCooker startup complete %s" % time.time())
229 sys.stdout.flush()
230
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500231 def init_configdata(self):
232 if not hasattr(self, "data"):
233 self.initConfigurationData()
234 bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
235 sys.stdout.flush()
236 self.handlePRServ()
237
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500238 def process_inotify_updates(self):
239 for n in [self.confignotifier, self.notifier]:
240 if n.check_events(timeout=0):
241 # read notified events and enqeue them
242 n.read_events()
243 n.process_events()
244
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500245 def config_notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500246 if event.maskname == "IN_Q_OVERFLOW":
247 bb.warn("inotify event queue overflowed, invalidating caches.")
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500248 self.parsecache_valid = False
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500249 self.baseconfig_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500250 bb.parse.clear_cache()
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500251 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500252 if not event.pathname in self.configwatcher.bbwatchedfiles:
253 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500254 if not event.pathname in self.inotify_modified_files:
255 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500256 self.baseconfig_valid = False
257
258 def notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500259 if event.maskname == "IN_Q_OVERFLOW":
260 bb.warn("inotify event queue overflowed, invalidating caches.")
261 self.parsecache_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500262 bb.parse.clear_cache()
263 return
264 if event.pathname.endswith("bitbake-cookerdaemon.log") \
265 or event.pathname.endswith("bitbake.lock"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500266 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500267 if not event.pathname in self.inotify_modified_files:
268 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500269 self.parsecache_valid = False
270
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500271 def add_filewatch(self, deps, watcher=None, dirs=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500272 if not watcher:
273 watcher = self.watcher
274 for i in deps:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500275 watcher.bbwatchedfiles.add(i[0])
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500276 if dirs:
277 f = i[0]
278 else:
279 f = os.path.dirname(i[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500280 if f in watcher.bbseen:
281 continue
Andrew Geissler82c905d2020-04-13 13:39:40 -0500282 watcher.bbseen.add(f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500283 watchtarget = None
284 while True:
285 # We try and add watches for files that don't exist but if they did, would influence
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500286 # the parser. The parent directory of these files may not exist, in which case we need
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500287 # to watch any parent that does exist for changes.
288 try:
289 watcher.add_watch(f, self.watchmask, quiet=False)
290 if watchtarget:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500291 watcher.bbwatchedfiles.add(watchtarget)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500292 break
293 except pyinotify.WatchManagerError as e:
294 if 'ENOENT' in str(e):
295 watchtarget = f
296 f = os.path.dirname(f)
297 if f in watcher.bbseen:
298 break
Andrew Geissler82c905d2020-04-13 13:39:40 -0500299 watcher.bbseen.add(f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500300 continue
301 if 'ENOSPC' in str(e):
302 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
303 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
304 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
305 providerlog.error("Root privilege is required to modify max_user_watches.")
306 raise
307
308 def sigterm_exception(self, signum, stackframe):
309 if signum == signal.SIGTERM:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500310 bb.warn("Cooker received SIGTERM, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500311 elif signum == signal.SIGHUP:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500312 bb.warn("Cooker received SIGHUP, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500313 self.state = state.forceshutdown
314
315 def setFeatures(self, features):
316 # we only accept a new feature set if we're in state initial, so we can reset without problems
317 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
318 raise Exception("Illegal state for feature set change")
319 original_featureset = list(self.featureset)
320 for feature in features:
321 self.featureset.setFeature(feature)
322 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500323 if (original_featureset != list(self.featureset)) and self.state != state.error and hasattr(self, "data"):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500324 self.reset()
325
326 def initConfigurationData(self):
327
328 self.state = state.initial
329 self.caches_array = []
330
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500331 # Need to preserve BB_CONSOLELOG over resets
332 consolelog = None
333 if hasattr(self, "data"):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500334 consolelog = self.data.getVar("BB_CONSOLELOG")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500335
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500336 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
337 self.enableDataTracking()
338
339 all_extra_cache_names = []
340 # We hardcode all known cache types in a single place, here.
341 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
342 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
343
344 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
345
346 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
347 # This is the entry point, no further check needed!
348 for var in caches_name_array:
349 try:
350 module_name, cache_name = var.split(':')
351 module = __import__(module_name, fromlist=(cache_name,))
352 self.caches_array.append(getattr(module, cache_name))
353 except ImportError as exc:
354 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500355 raise bb.BBHandledException()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500356
357 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
358 self.databuilder.parseBaseConfiguration()
359 self.data = self.databuilder.data
360 self.data_hash = self.databuilder.data_hash
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500361 self.extraconfigdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500362
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500363 if consolelog:
364 self.data.setVar("BB_CONSOLELOG", consolelog)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500365
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500366 self.data.setVar('BB_CMDLINE', self.ui_cmdline)
367
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500368 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
369 self.disableDataTracking()
370
Brad Bishop15ae2502019-06-18 21:44:24 -0400371 for mc in self.databuilder.mcdata.values():
372 mc.renameVar("__depends", "__base_depends")
373 self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500374
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500375 self.baseconfig_valid = True
376 self.parsecache_valid = False
377
378 def handlePRServ(self):
379 # Setup a PR Server based on the new configuration
380 try:
381 self.prhost = prserv.serv.auto_start(self.data)
382 except prserv.serv.PRServiceConfigError as e:
383 bb.fatal("Unable to start PR Server, exitting")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500384
Brad Bishopa34c0302019-09-23 22:34:48 -0400385 if self.data.getVar("BB_HASHSERVE") == "auto":
386 # Create a new hash server bound to a unix domain socket
Brad Bishop08902b02019-08-20 09:16:51 -0400387 if not self.hashserv:
388 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
Brad Bishopa34c0302019-09-23 22:34:48 -0400389 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
390 self.hashserv = hashserv.create_server(self.hashservaddr, dbfile, sync=False)
Brad Bishop08902b02019-08-20 09:16:51 -0400391 self.hashserv.process = multiprocessing.Process(target=self.hashserv.serve_forever)
Brad Bishop08902b02019-08-20 09:16:51 -0400392 self.hashserv.process.start()
Brad Bishopa34c0302019-09-23 22:34:48 -0400393 self.data.setVar("BB_HASHSERVE", self.hashservaddr)
394 self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr)
395 self.databuilder.data.setVar("BB_HASHSERVE", self.hashservaddr)
Brad Bishop08902b02019-08-20 09:16:51 -0400396 for mc in self.databuilder.mcdata:
Brad Bishopa34c0302019-09-23 22:34:48 -0400397 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
Brad Bishop08902b02019-08-20 09:16:51 -0400398
399 bb.parse.init_parser(self.data)
400
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500401 def enableDataTracking(self):
402 self.configuration.tracking = True
403 if hasattr(self, "data"):
404 self.data.enableTracking()
405
406 def disableDataTracking(self):
407 self.configuration.tracking = False
408 if hasattr(self, "data"):
409 self.data.disableTracking()
410
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500411 def parseConfiguration(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500412 # Change nice level if we're asked to
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500413 nice = self.data.getVar("BB_NICE_LEVEL")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500414 if nice:
415 curnice = os.nice(0)
416 nice = int(nice) - curnice
417 buildlog.verbose("Renice to %s " % os.nice(nice))
418
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600419 if self.recipecaches:
420 del self.recipecaches
421 self.multiconfigs = self.databuilder.mcdata.keys()
422 self.recipecaches = {}
423 for mc in self.multiconfigs:
424 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500425
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500426 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500427
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500428 self.parsecache_valid = False
429
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500430 def updateConfigOpts(self, options, environment, cmdline):
431 self.ui_cmdline = cmdline
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500432 clean = True
433 for o in options:
434 if o in ['prefile', 'postfile']:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500435 # Only these options may require a reparse
436 try:
437 if getattr(self.configuration, o) == options[o]:
438 # Value is the same, no need to mark dirty
439 continue
440 except AttributeError:
441 pass
442 logger.debug(1, "Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
443 print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500444 clean = False
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500445 if hasattr(self.configuration, o):
446 setattr(self.configuration, o, options[o])
447
448 if self.configuration.writeeventlog:
449 if self.eventlog and self.eventlog[0] != self.configuration.writeeventlog:
450 bb.event.unregister_UIHhandler(self.eventlog[1])
451 if not self.eventlog or self.eventlog[0] != self.configuration.writeeventlog:
452 # we log all events to a file if so directed
453 # register the log file writer as UI Handler
454 writer = EventWriter(self, self.configuration.writeeventlog)
455 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
456 self.eventlog = (self.configuration.writeeventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)))
457
458 bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel
459 bb.msg.loggerDefaultDomains = self.configuration.debug_domains
460
461 if hasattr(self, "data"):
462 origenv = bb.data.init()
463 for k in environment:
464 origenv.setVar(k, environment[k])
465 self.data.setVar("BB_ORIGENV", origenv)
466
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500467 for k in bb.utils.approved_variables():
468 if k in environment and k not in self.configuration.env:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500469 logger.debug(1, "Updating new environment variable %s to %s" % (k, environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500470 self.configuration.env[k] = environment[k]
471 clean = False
472 if k in self.configuration.env and k not in environment:
473 logger.debug(1, "Updating environment variable %s (deleted)" % (k))
474 del self.configuration.env[k]
475 clean = False
476 if k not in self.configuration.env and k not in environment:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500477 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500478 if environment[k] != self.configuration.env[k]:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500479 logger.debug(1, "Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500480 self.configuration.env[k] = environment[k]
481 clean = False
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500482
483 # Now update all the variables not in the datastore to match
484 self.configuration.env = environment
485
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500486 if not clean:
487 logger.debug(1, "Base environment change, triggering reparse")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500488 self.reset()
489
490 def runCommands(self, server, data, abort):
491 """
492 Run any queued asynchronous command
493 This is done by the idle handler so it runs in true context rather than
494 tied to any UI.
495 """
496
497 return self.command.runAsyncCommand()
498
499 def showVersions(self):
500
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500501 (latest_versions, preferred_versions) = self.findProviders()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500502
503 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
504 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
505
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500506 for p in sorted(self.recipecaches[''].pkg_pn):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500507 pref = preferred_versions[p]
508 latest = latest_versions[p]
509
510 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
511 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
512
513 if pref == latest:
514 prefstr = ""
515
516 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
517
518 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
519 """
520 Show the outer or per-recipe environment
521 """
522 fn = None
523 envdata = None
Brad Bishop15ae2502019-06-18 21:44:24 -0400524 mc = ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500525 if not pkgs_to_build:
526 pkgs_to_build = []
527
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500528 orig_tracking = self.configuration.tracking
529 if not orig_tracking:
530 self.enableDataTracking()
531 self.reset()
532
Brad Bishop15ae2502019-06-18 21:44:24 -0400533 def mc_base(p):
534 if p.startswith('mc:'):
535 s = p.split(':')
536 if len(s) == 2:
537 return s[1]
538 return None
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500539
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500540 if buildfile:
541 # Parse the configuration here. We need to do it explicitly here since
542 # this showEnvironment() code path doesn't use the cache
543 self.parseConfiguration()
544
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600545 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Andrew Geissler5a43b432020-06-13 10:46:56 -0500546 fn = self.matchFile(fn, mc)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600547 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500548 elif len(pkgs_to_build) == 1:
Brad Bishop15ae2502019-06-18 21:44:24 -0400549 mc = mc_base(pkgs_to_build[0])
550 if not mc:
551 ignore = self.data.getVar("ASSUME_PROVIDED") or ""
552 if pkgs_to_build[0] in set(ignore.split()):
553 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500554
Brad Bishop15ae2502019-06-18 21:44:24 -0400555 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500556
Brad Bishop15ae2502019-06-18 21:44:24 -0400557 mc = runlist[0][0]
558 fn = runlist[0][3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500559
560 if fn:
561 try:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500562 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
563 envdata = bb_caches[mc].loadDataFull(fn, self.collections[mc].get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500564 except Exception as e:
565 parselog.exception("Unable to read %s", fn)
566 raise
Brad Bishop15ae2502019-06-18 21:44:24 -0400567 else:
568 if not mc in self.databuilder.mcdata:
569 bb.fatal('Not multiconfig named "%s" found' % mc)
570 envdata = self.databuilder.mcdata[mc]
571 data.expandKeys(envdata)
572 parse.ast.runAnonFuncs(envdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500573
574 # Display history
575 with closing(StringIO()) as env:
576 self.data.inchistory.emit(env)
577 logger.plain(env.getvalue())
578
579 # emit variables and shell functions
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500580 with closing(StringIO()) as env:
581 data.emit_env(env, envdata, True)
582 logger.plain(env.getvalue())
583
584 # emit the metadata which isnt valid shell
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500585 for e in sorted(envdata.keys()):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600586 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500587 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500588
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500589 if not orig_tracking:
590 self.disableDataTracking()
591 self.reset()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500592
593 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
594 """
595 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
596 """
597 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
598
599 # A task of None means use the default task
600 if task is None:
601 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500602 if not task.startswith("do_"):
603 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500604
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500605 targetlist = self.checkPackages(pkgs_to_build, task)
606 fulltargetlist = []
607 defaulttask_implicit = ''
608 defaulttask_explicit = False
609 wildcard = False
610
611 # Wild card expansion:
Brad Bishop15ae2502019-06-18 21:44:24 -0400612 # Replace string such as "mc:*:bash"
613 # into "mc:A:bash mc:B:bash bash"
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500614 for k in targetlist:
Brad Bishop15ae2502019-06-18 21:44:24 -0400615 if k.startswith("mc:"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500616 if wildcard:
617 bb.fatal('multiconfig conflict')
618 if k.split(":")[1] == "*":
619 wildcard = True
620 for mc in self.multiconfigs:
621 if mc:
622 fulltargetlist.append(k.replace('*', mc))
623 # implicit default task
624 else:
625 defaulttask_implicit = k.split(":")[2]
626 else:
627 fulltargetlist.append(k)
628 else:
629 defaulttask_explicit = True
630 fulltargetlist.append(k)
631
632 if not defaulttask_explicit and defaulttask_implicit != '':
633 fulltargetlist.append(defaulttask_implicit)
634
635 bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600636 taskdata = {}
637 localdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500638
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600639 for mc in self.multiconfigs:
640 taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
641 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600642 bb.data.expandKeys(localdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500643
644 current = 0
645 runlist = []
646 for k in fulltargetlist:
Andrew Geisslerb7d28612020-07-24 16:15:54 -0500647 origk = k
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600648 mc = ""
Brad Bishop15ae2502019-06-18 21:44:24 -0400649 if k.startswith("mc:"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600650 mc = k.split(":")[1]
651 k = ":".join(k.split(":")[2:])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500652 ktask = task
653 if ":do_" in k:
654 k2 = k.split(":do_")
655 k = k2[0]
656 ktask = k2[1]
Andrew Geisslerb7d28612020-07-24 16:15:54 -0500657
658 if mc not in self.multiconfigs:
659 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named %s" % (origk, mc))
660
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600661 taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500662 current += 1
663 if not ktask.startswith("do_"):
664 ktask = "do_%s" % ktask
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600665 if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
666 # e.g. in ASSUME_PROVIDED
667 continue
668 fn = taskdata[mc].build_targets[k][0]
669 runlist.append([mc, k, ktask, fn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500670 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600671
Brad Bishop15ae2502019-06-18 21:44:24 -0400672 havemc = False
673 for mc in self.multiconfigs:
674 if taskdata[mc].get_mcdepends():
675 havemc = True
Brad Bishopf058f492019-01-28 23:50:33 -0500676
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800677 # No need to do check providers if there are no mcdeps or not an mc build
Brad Bishop15ae2502019-06-18 21:44:24 -0400678 if havemc or len(self.multiconfigs) > 1:
Andrew Geissler99467da2019-02-25 18:54:23 -0600679 seen = set()
680 new = True
681 # Make sure we can provide the multiconfig dependency
682 while new:
683 mcdeps = set()
684 # Add unresolved first, so we can get multiconfig indirect dependencies on time
685 for mc in self.multiconfigs:
686 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
687 mcdeps |= set(taskdata[mc].get_mcdepends())
688 new = False
689 for mc in self.multiconfigs:
690 for k in mcdeps:
691 if k in seen:
692 continue
693 l = k.split(':')
694 depmc = l[2]
695 if depmc not in self.multiconfigs:
Andrew Geisslerb7d28612020-07-24 16:15:54 -0500696 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc))
Andrew Geissler99467da2019-02-25 18:54:23 -0600697 else:
698 logger.debug(1, "Adding providers for multiconfig dependency %s" % l[3])
699 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
700 seen.add(k)
701 new = True
Brad Bishopf058f492019-01-28 23:50:33 -0500702
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600703 for mc in self.multiconfigs:
704 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
705
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500706 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600707 return taskdata, runlist
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500708
709 def prepareTreeData(self, pkgs_to_build, task):
710 """
711 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
712 """
713
714 # We set abort to False here to prevent unbuildable targets raising
715 # an exception when we're just generating data
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600716 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500717
718 return runlist, taskdata
719
720 ######## WARNING : this function requires cache_extra to be enabled ########
721
722 def generateTaskDepTreeData(self, pkgs_to_build, task):
723 """
724 Create a dependency graph of pkgs_to_build including reverse dependency
725 information.
726 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500727 if not task.startswith("do_"):
728 task = "do_%s" % task
729
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500730 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600731 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500732 rq.rqdata.prepare()
733 return self.buildDependTree(rq, taskdata)
734
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600735 @staticmethod
736 def add_mc_prefix(mc, pn):
737 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -0400738 return "mc:%s:%s" % (mc, pn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600739 return pn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500740
741 def buildDependTree(self, rq, taskdata):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600742 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500743 depend_tree = {}
744 depend_tree["depends"] = {}
745 depend_tree["tdepends"] = {}
746 depend_tree["pn"] = {}
747 depend_tree["rdepends-pn"] = {}
748 depend_tree["packages"] = {}
749 depend_tree["rdepends-pkg"] = {}
750 depend_tree["rrecs-pkg"] = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500751 depend_tree['providermap'] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600752 depend_tree["layer-priorities"] = self.bbfile_config_priorities
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500753
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600754 for mc in taskdata:
755 for name, fn in list(taskdata[mc].get_providermap().items()):
756 pn = self.recipecaches[mc].pkg_fn[fn]
757 pn = self.add_mc_prefix(mc, pn)
758 if name != pn:
759 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
760 depend_tree['providermap'][name] = (pn, version)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500761
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600762 for tid in rq.rqdata.runtaskentries:
763 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
764 pn = self.recipecaches[mc].pkg_fn[taskfn]
765 pn = self.add_mc_prefix(mc, pn)
766 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500767 if pn not in depend_tree["pn"]:
768 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600769 depend_tree["pn"][pn]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500770 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600771 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500772
773 # if we have extra caches, list all attributes they bring in
774 extra_info = []
775 for cache_class in self.caches_array:
776 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
777 cachefields = getattr(cache_class, 'cachefields', [])
778 extra_info = extra_info + cachefields
779
780 # for all attributes stored, add them to the dependency tree
781 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600782 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500783
784
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500785 dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
786 if not dotname in depend_tree["tdepends"]:
787 depend_tree["tdepends"][dotname] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600788 for dep in rq.rqdata.runtaskentries[tid].depends:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800789 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
790 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600791 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
792 if taskfn not in seen_fns:
793 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500794 packages = []
795
796 depend_tree["depends"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600797 for dep in taskdata[mc].depids[taskfn]:
798 depend_tree["depends"][pn].append(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500799
800 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600801 for rdep in taskdata[mc].rdepids[taskfn]:
802 depend_tree["rdepends-pn"][pn].append(rdep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500803
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600804 rdepends = self.recipecaches[mc].rundeps[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500805 for package in rdepends:
806 depend_tree["rdepends-pkg"][package] = []
807 for rdepend in rdepends[package]:
808 depend_tree["rdepends-pkg"][package].append(rdepend)
809 packages.append(package)
810
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600811 rrecs = self.recipecaches[mc].runrecs[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500812 for package in rrecs:
813 depend_tree["rrecs-pkg"][package] = []
814 for rdepend in rrecs[package]:
815 depend_tree["rrecs-pkg"][package].append(rdepend)
816 if not package in packages:
817 packages.append(package)
818
819 for package in packages:
820 if package not in depend_tree["packages"]:
821 depend_tree["packages"][package] = {}
822 depend_tree["packages"][package]["pn"] = pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600823 depend_tree["packages"][package]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500824 depend_tree["packages"][package]["version"] = version
825
826 return depend_tree
827
828 ######## WARNING : this function requires cache_extra to be enabled ########
829 def generatePkgDepTreeData(self, pkgs_to_build, task):
830 """
831 Create a dependency tree of pkgs_to_build, returning the data.
832 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500833 if not task.startswith("do_"):
834 task = "do_%s" % task
835
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500836 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500837
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600838 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500839 depend_tree = {}
840 depend_tree["depends"] = {}
841 depend_tree["pn"] = {}
842 depend_tree["rdepends-pn"] = {}
843 depend_tree["rdepends-pkg"] = {}
844 depend_tree["rrecs-pkg"] = {}
845
846 # if we have extra caches, list all attributes they bring in
847 extra_info = []
848 for cache_class in self.caches_array:
849 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
850 cachefields = getattr(cache_class, 'cachefields', [])
851 extra_info = extra_info + cachefields
852
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600853 tids = []
854 for mc in taskdata:
855 for tid in taskdata[mc].taskentries:
856 tids.append(tid)
857
858 for tid in tids:
859 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
860
861 pn = self.recipecaches[mc].pkg_fn[taskfn]
862 pn = self.add_mc_prefix(mc, pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500863
864 if pn not in depend_tree["pn"]:
865 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600866 depend_tree["pn"][pn]["filename"] = taskfn
867 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500868 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600869 rdepends = self.recipecaches[mc].rundeps[taskfn]
870 rrecs = self.recipecaches[mc].runrecs[taskfn]
871 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500872
873 # for all extra attributes stored, add them to the dependency tree
874 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600875 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500876
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600877 if taskfn not in seen_fns:
878 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500879
880 depend_tree["depends"][pn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500881 for dep in taskdata[mc].depids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500882 pn_provider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600883 if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
884 fn_provider = taskdata[mc].build_targets[dep][0]
885 pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500886 else:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500887 pn_provider = dep
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600888 pn_provider = self.add_mc_prefix(mc, pn_provider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500889 depend_tree["depends"][pn].append(pn_provider)
890
891 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600892 for rdep in taskdata[mc].rdepids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500893 pn_rprovider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600894 if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
895 fn_rprovider = taskdata[mc].run_targets[rdep][0]
896 pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500897 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600898 pn_rprovider = rdep
899 pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500900 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
901
902 depend_tree["rdepends-pkg"].update(rdepends)
903 depend_tree["rrecs-pkg"].update(rrecs)
904
905 return depend_tree
906
907 def generateDepTreeEvent(self, pkgs_to_build, task):
908 """
909 Create a task dependency graph of pkgs_to_build.
910 Generate an event with the result
911 """
912 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
913 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
914
915 def generateDotGraphFiles(self, pkgs_to_build, task):
916 """
917 Create a task dependency graph of pkgs_to_build.
918 Save the result to a set of .dot files.
919 """
920
921 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
922
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500923 with open('pn-buildlist', 'w') as f:
924 for pn in depgraph["pn"]:
925 f.write(pn + "\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500926 logger.info("PN build list saved to 'pn-buildlist'")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500927
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500928 # Remove old format output files to ensure no confusion with stale data
929 try:
930 os.unlink('pn-depends.dot')
931 except FileNotFoundError:
932 pass
933 try:
934 os.unlink('package-depends.dot')
935 except FileNotFoundError:
936 pass
Brad Bishop79641f22019-09-10 07:20:22 -0400937 try:
938 os.unlink('recipe-depends.dot')
939 except FileNotFoundError:
940 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500941
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500942 with open('task-depends.dot', 'w') as f:
943 f.write("digraph depends {\n")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400944 for task in sorted(depgraph["tdepends"]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500945 (pn, taskname) = task.rsplit(".", 1)
946 fn = depgraph["pn"][pn]["filename"]
947 version = depgraph["pn"][pn]["version"]
948 f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400949 for dep in sorted(depgraph["tdepends"][task]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500950 f.write('"%s" -> "%s"\n' % (task, dep))
951 f.write("}\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500952 logger.info("Task dependencies saved to 'task-depends.dot'")
953
954 def show_appends_with_no_recipes(self):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500955 appends_without_recipes = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500956 # Determine which bbappends haven't been applied
Andrew Geissler5a43b432020-06-13 10:46:56 -0500957 for mc in self.multiconfigs:
958 # First get list of recipes, including skipped
959 recipefns = list(self.recipecaches[mc].pkg_fn.keys())
960 recipefns.extend(self.skiplist.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500961
Andrew Geissler5a43b432020-06-13 10:46:56 -0500962 # Work out list of bbappends that have been applied
963 applied_appends = []
964 for fn in recipefns:
965 applied_appends.extend(self.collections[mc].get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500966
Andrew Geissler5a43b432020-06-13 10:46:56 -0500967 appends_without_recipes[mc] = []
968 for _, appendfn in self.collections[mc].bbappends:
969 if not appendfn in applied_appends:
970 appends_without_recipes[mc].append(appendfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500971
Andrew Geissler5a43b432020-06-13 10:46:56 -0500972 msgs = []
973 for mc in sorted(appends_without_recipes.keys()):
974 if appends_without_recipes[mc]:
975 msgs.append('No recipes in %s available for:\n %s' % (mc if mc else 'default',
976 '\n '.join(appends_without_recipes[mc])))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500977
Andrew Geissler5a43b432020-06-13 10:46:56 -0500978 if msgs:
979 msg = "\n".join(msgs)
980 warn_only = self.databuilder.mcdata[mc].getVar("BB_DANGLINGAPPENDS_WARNONLY", \
981 False) or "no"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500982 if warn_only.lower() in ("1", "yes", "true"):
983 bb.warn(msg)
984 else:
985 bb.fatal(msg)
986
987 def handlePrefProviders(self):
988
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600989 for mc in self.multiconfigs:
990 localdata = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600991 bb.data.expandKeys(localdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500992
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600993 # Handle PREFERRED_PROVIDERS
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500994 for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600995 try:
996 (providee, provider) = p.split(':')
997 except:
998 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
999 continue
1000 if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
1001 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
1002 self.recipecaches[mc].preferred[providee] = provider
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001003
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001004 def findConfigFilePath(self, configfile):
1005 """
1006 Find the location on disk of configfile and if it exists and was parsed by BitBake
1007 emit the ConfigFilePathFound event with the path to the file.
1008 """
1009 path = bb.cookerdata.findConfigFile(configfile, self.data)
1010 if not path:
1011 return
1012
1013 # Generate a list of parsed configuration files by searching the files
1014 # listed in the __depends and __base_depends variables with a .conf suffix.
1015 conffiles = []
1016 dep_files = self.data.getVar('__base_depends', False) or []
1017 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
1018
1019 for f in dep_files:
1020 if f[0].endswith(".conf"):
1021 conffiles.append(f[0])
1022
1023 _, conf, conffile = path.rpartition("conf/")
1024 match = os.path.join(conf, conffile)
1025 # Try and find matches for conf/conffilename.conf as we don't always
1026 # have the full path to the file.
1027 for cfg in conffiles:
1028 if cfg.endswith(match):
1029 bb.event.fire(bb.event.ConfigFilePathFound(path),
1030 self.data)
1031 break
1032
1033 def findFilesMatchingInDir(self, filepattern, directory):
1034 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001035 Searches for files containing the substring 'filepattern' which are children of
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001036 'directory' in each BBPATH. i.e. to find all rootfs package classes available
1037 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1038 or to find all machine configuration files one could call:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001039 findFilesMatchingInDir(self, '.conf', 'conf/machine')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001040 """
1041
1042 matches = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001043 bbpaths = self.data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001044 for path in bbpaths:
1045 dirpath = os.path.join(path, directory)
1046 if os.path.exists(dirpath):
1047 for root, dirs, files in os.walk(dirpath):
1048 for f in files:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001049 if filepattern in f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001050 matches.append(f)
1051
1052 if matches:
1053 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1054
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001055 def findProviders(self, mc=''):
Andrew Geissler82c905d2020-04-13 13:39:40 -05001056 return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001057
1058 def findBestProvider(self, pn, mc=''):
1059 if pn in self.recipecaches[mc].providers:
1060 filenames = self.recipecaches[mc].providers[pn]
Andrew Geissler82c905d2020-04-13 13:39:40 -05001061 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc])
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001062 filename = eligible[0]
1063 return None, None, None, filename
1064 elif pn in self.recipecaches[mc].pkg_pn:
Andrew Geissler82c905d2020-04-13 13:39:40 -05001065 return bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001066 else:
1067 return None, None, None, None
1068
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001069 def findConfigFiles(self, varname):
1070 """
1071 Find config files which are appropriate values for varname.
1072 i.e. MACHINE, DISTRO
1073 """
1074 possible = []
1075 var = varname.lower()
1076
1077 data = self.data
1078 # iterate configs
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001079 bbpaths = data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001080 for path in bbpaths:
1081 confpath = os.path.join(path, "conf", var)
1082 if os.path.exists(confpath):
1083 for root, dirs, files in os.walk(confpath):
1084 # get all child files, these are appropriate values
1085 for f in files:
1086 val, sep, end = f.rpartition('.')
1087 if end == 'conf':
1088 possible.append(val)
1089
1090 if possible:
1091 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1092
1093 def findInheritsClass(self, klass):
1094 """
1095 Find all recipes which inherit the specified class
1096 """
1097 pkg_list = []
1098
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001099 for pfn in self.recipecaches[''].pkg_fn:
1100 inherits = self.recipecaches[''].inherits.get(pfn, None)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001101 if inherits and klass in inherits:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001102 pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001103
1104 return pkg_list
1105
1106 def generateTargetsTree(self, klass=None, pkgs=None):
1107 """
1108 Generate a dependency tree of buildable targets
1109 Generate an event with the result
1110 """
1111 # if the caller hasn't specified a pkgs list default to universe
1112 if not pkgs:
1113 pkgs = ['universe']
1114 # if inherited_class passed ensure all recipes which inherit the
1115 # specified class are included in pkgs
1116 if klass:
1117 extra_pkgs = self.findInheritsClass(klass)
1118 pkgs = pkgs + extra_pkgs
1119
1120 # generate a dependency tree for all our packages
1121 tree = self.generatePkgDepTreeData(pkgs, 'build')
1122 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1123
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001124 def interactiveMode( self ):
1125 """Drop off into a shell"""
1126 try:
1127 from bb import shell
1128 except ImportError:
1129 parselog.exception("Interactive mode not available")
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001130 raise bb.BBHandledException()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001131 else:
1132 shell.start( self )
1133
1134
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001135 def handleCollections(self, collections):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001136 """Handle collections"""
1137 errors = False
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001138 self.bbfile_config_priorities = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001139 if collections:
1140 collection_priorities = {}
1141 collection_depends = {}
1142 collection_list = collections.split()
1143 min_prio = 0
1144 for c in collection_list:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001145 bb.debug(1,'Processing %s in collection list' % (c))
1146
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001147 # Get collection priority if defined explicitly
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001148 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001149 if priority:
1150 try:
1151 prio = int(priority)
1152 except ValueError:
1153 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1154 errors = True
1155 if min_prio == 0 or prio < min_prio:
1156 min_prio = prio
1157 collection_priorities[c] = prio
1158 else:
1159 collection_priorities[c] = None
1160
1161 # Check dependencies and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001162 deps = self.data.getVar("LAYERDEPENDS_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001163 if deps:
1164 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001165 depDict = bb.utils.explode_dep_versions2(deps)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001166 except bb.utils.VersionStringException as vse:
1167 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001168 for dep, oplist in list(depDict.items()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001169 if dep in collection_list:
1170 for opstr in oplist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001171 layerver = self.data.getVar("LAYERVERSION_%s" % dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001172 (op, depver) = opstr.split()
1173 if layerver:
1174 try:
1175 res = bb.utils.vercmp_string_op(layerver, depver, op)
1176 except bb.utils.VersionStringException as vse:
1177 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1178 if not res:
1179 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1180 errors = True
1181 else:
1182 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1183 errors = True
1184 else:
1185 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1186 errors = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001187 collection_depends[c] = list(depDict.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001188 else:
1189 collection_depends[c] = []
1190
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001191 # Check recommends and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001192 recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001193 if recs:
1194 try:
1195 recDict = bb.utils.explode_dep_versions2(recs)
1196 except bb.utils.VersionStringException as vse:
1197 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1198 for rec, oplist in list(recDict.items()):
1199 if rec in collection_list:
1200 if oplist:
1201 opstr = oplist[0]
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001202 layerver = self.data.getVar("LAYERVERSION_%s" % rec)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001203 if layerver:
1204 (op, recver) = opstr.split()
1205 try:
1206 res = bb.utils.vercmp_string_op(layerver, recver, op)
1207 except bb.utils.VersionStringException as vse:
1208 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1209 if not res:
1210 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1211 continue
1212 else:
1213 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1214 continue
1215 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1216 collection_depends[c].append(rec)
1217 else:
1218 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1219
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001220 # Recursively work out collection priorities based on dependencies
1221 def calc_layer_priority(collection):
1222 if not collection_priorities[collection]:
1223 max_depprio = min_prio
1224 for dep in collection_depends[collection]:
1225 calc_layer_priority(dep)
1226 depprio = collection_priorities[dep]
1227 if depprio > max_depprio:
1228 max_depprio = depprio
1229 max_depprio += 1
1230 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1231 collection_priorities[collection] = max_depprio
1232
1233 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1234 for c in collection_list:
1235 calc_layer_priority(c)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001236 regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
Andrew Geissler82c905d2020-04-13 13:39:40 -05001237 if regex is None:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001238 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1239 errors = True
1240 continue
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001241 elif regex == "":
1242 parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
Brad Bishop19323692019-04-05 15:28:33 -04001243 cre = re.compile('^NULL$')
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001244 errors = False
1245 else:
1246 try:
1247 cre = re.compile(regex)
1248 except re.error:
1249 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1250 errors = True
1251 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001252 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001253 if errors:
1254 # We've already printed the actual error(s)
1255 raise CollectionError("Errors during parsing layer configuration")
1256
1257 def buildSetVars(self):
1258 """
1259 Setup any variables needed before starting a build
1260 """
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001261 t = time.gmtime()
1262 for mc in self.databuilder.mcdata:
1263 ds = self.databuilder.mcdata[mc]
1264 if not ds.getVar("BUILDNAME", False):
1265 ds.setVar("BUILDNAME", "${DATE}${TIME}")
1266 ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1267 ds.setVar("DATE", time.strftime('%Y%m%d', t))
1268 ds.setVar("TIME", time.strftime('%H%M%S', t))
1269
1270 def reset_mtime_caches(self):
1271 """
1272 Reset mtime caches - this is particularly important when memory resident as something
1273 which is cached is not unlikely to have changed since the last invocation (e.g. a
1274 file associated with a recipe might have been modified by the user).
1275 """
1276 build.reset_cache()
1277 bb.fetch._checksum_cache.mtime_cache.clear()
1278 siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1279 if siggen_cache:
1280 bb.parse.siggen.checksum_cache.mtime_cache.clear()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001281
Andrew Geissler5a43b432020-06-13 10:46:56 -05001282 def matchFiles(self, bf, mc=''):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001283 """
1284 Find the .bb files which match the expression in 'buildfile'.
1285 """
1286 if bf.startswith("/") or bf.startswith("../"):
1287 bf = os.path.abspath(bf)
1288
Andrew Geissler5a43b432020-06-13 10:46:56 -05001289 self.collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)}
1290 filelist, masked, searchdirs = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001291 try:
1292 os.stat(bf)
1293 bf = os.path.abspath(bf)
1294 return [bf]
1295 except OSError:
1296 regexp = re.compile(bf)
1297 matches = []
1298 for f in filelist:
1299 if regexp.search(f) and os.path.isfile(f):
1300 matches.append(f)
1301 return matches
1302
Andrew Geissler5a43b432020-06-13 10:46:56 -05001303 def matchFile(self, buildfile, mc=''):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001304 """
1305 Find the .bb file which matches the expression in 'buildfile'.
1306 Raise an error if multiple files
1307 """
Andrew Geissler5a43b432020-06-13 10:46:56 -05001308 matches = self.matchFiles(buildfile, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001309 if len(matches) != 1:
1310 if matches:
1311 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1312 if matches:
1313 for f in matches:
1314 msg += "\n %s" % f
1315 parselog.error(msg)
1316 else:
1317 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1318 raise NoSpecificMatch
1319 return matches[0]
1320
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001321 def buildFile(self, buildfile, task):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001322 """
1323 Build the file matching regexp buildfile
1324 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001325 bb.event.fire(bb.event.BuildInit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001326
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001327 # Too many people use -b because they think it's how you normally
1328 # specify a target to be built, so show a warning
1329 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1330
1331 self.buildFileInternal(buildfile, task)
1332
1333 def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1334 """
1335 Build the file matching regexp buildfile
1336 """
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001337
1338 # Parse the configuration here. We need to do it explicitly here since
1339 # buildFile() doesn't use the cache
1340 self.parseConfiguration()
1341
1342 # If we are told to do the None task then query the default task
Andrew Geissler82c905d2020-04-13 13:39:40 -05001343 if task is None:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001344 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001345 if not task.startswith("do_"):
1346 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001347
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001348 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Andrew Geissler5a43b432020-06-13 10:46:56 -05001349 fn = self.matchFile(fn, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001350
1351 self.buildSetVars()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001352 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001353
Andrew Geissler5a43b432020-06-13 10:46:56 -05001354 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001355
Andrew Geissler5a43b432020-06-13 10:46:56 -05001356 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001357 infos = dict(infos)
1358
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001359 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001360 try:
1361 info_array = infos[fn]
1362 except KeyError:
1363 bb.fatal("%s does not exist" % fn)
1364
1365 if info_array[0].skipped:
1366 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1367
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001368 self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001369
1370 # Tweak some variables
1371 item = info_array[0].pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001372 self.recipecaches[mc].ignored_dependencies = set()
1373 self.recipecaches[mc].bbfile_priority[fn] = 1
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001374 self.configuration.limited_deps = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001375
1376 # Remove external dependencies
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001377 self.recipecaches[mc].task_deps[fn]['depends'] = {}
1378 self.recipecaches[mc].deps[fn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001379 self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1380 self.recipecaches[mc].runrecs[fn] = defaultdict(list)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001381
1382 # Invalidate task for target if force mode active
1383 if self.configuration.force:
1384 logger.verbose("Invalidate task %s, %s", task, fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001385 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001386
1387 # Setup taskdata structure
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001388 taskdata = {}
1389 taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001390 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001391
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001392 if quietlog:
1393 rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1394 bb.runqueue.logger.setLevel(logging.WARNING)
1395
1396 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1397 if fireevents:
1398 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001399
1400 # Execute the runqueue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001401 runlist = [[mc, item, task, fn]]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001402
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001403 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001404
1405 def buildFileIdle(server, rq, abort):
1406
1407 msg = None
1408 interrupted = 0
1409 if abort or self.state == state.forceshutdown:
1410 rq.finish_runqueue(True)
1411 msg = "Forced shutdown"
1412 interrupted = 2
1413 elif self.state == state.shutdown:
1414 rq.finish_runqueue(False)
1415 msg = "Stopped build"
1416 interrupted = 1
1417 failures = 0
1418 try:
1419 retval = rq.execute_runqueue()
1420 except runqueue.TaskFailure as exc:
1421 failures += len(exc.args)
1422 retval = False
1423 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001424 self.command.finishAsyncCommand(str(exc))
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001425 if quietlog:
1426 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001427 return False
1428
1429 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001430 if fireevents:
1431 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001432 self.command.finishAsyncCommand(msg)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001433 # We trashed self.recipecaches above
1434 self.parsecache_valid = False
1435 self.configuration.limited_deps = False
1436 bb.parse.siggen.reset(self.data)
1437 if quietlog:
1438 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001439 return False
1440 if retval is True:
1441 return True
1442 return retval
1443
Andrew Geissler635e0e42020-08-21 15:58:33 -05001444 self.idleCallBackRegister(buildFileIdle, rq)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001445
1446 def buildTargets(self, targets, task):
1447 """
1448 Attempt to build the targets specified
1449 """
1450
1451 def buildTargetsIdle(server, rq, abort):
1452 msg = None
1453 interrupted = 0
1454 if abort or self.state == state.forceshutdown:
1455 rq.finish_runqueue(True)
1456 msg = "Forced shutdown"
1457 interrupted = 2
1458 elif self.state == state.shutdown:
1459 rq.finish_runqueue(False)
1460 msg = "Stopped build"
1461 interrupted = 1
1462 failures = 0
1463 try:
1464 retval = rq.execute_runqueue()
1465 except runqueue.TaskFailure as exc:
1466 failures += len(exc.args)
1467 retval = False
1468 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001469 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001470 return False
1471
1472 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001473 try:
1474 for mc in self.multiconfigs:
1475 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1476 finally:
1477 self.command.finishAsyncCommand(msg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001478 return False
1479 if retval is True:
1480 return True
1481 return retval
1482
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001483 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001484 self.buildSetVars()
1485
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001486 # If we are told to do the None task then query the default task
Andrew Geissler82c905d2020-04-13 13:39:40 -05001487 if task is None:
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001488 task = self.configuration.cmd
1489
1490 if not task.startswith("do_"):
1491 task = "do_%s" % task
1492
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001493 packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1494
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001495 bb.event.fire(bb.event.BuildInit(packages), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001496
1497 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001498
1499 buildname = self.data.getVar("BUILDNAME", False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001500
1501 # make targets to always look as <target>:do_<task>
1502 ntargets = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001503 for target in runlist:
1504 if target[0]:
Brad Bishop15ae2502019-06-18 21:44:24 -04001505 ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2]))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001506 ntargets.append("%s:%s" % (target[1], target[2]))
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001507
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001508 for mc in self.multiconfigs:
1509 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001510
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001511 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001512 if 'universe' in targets:
1513 rq.rqdata.warn_multi_bb = True
1514
Andrew Geissler635e0e42020-08-21 15:58:33 -05001515 self.idleCallBackRegister(buildTargetsIdle, rq)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001516
1517
1518 def getAllKeysWithFlags(self, flaglist):
1519 dump = {}
1520 for k in self.data.keys():
1521 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001522 expand = True
1523 flags = self.data.getVarFlags(k)
1524 if flags and "func" in flags and "python" in flags:
1525 expand = False
1526 v = self.data.getVar(k, expand)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001527 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1528 dump[k] = {
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001529 'v' : str(v) ,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001530 'history' : self.data.varhistory.variable(k),
1531 }
1532 for d in flaglist:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001533 if flags and d in flags:
1534 dump[k][d] = flags[d]
1535 else:
1536 dump[k][d] = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001537 except Exception as e:
1538 print(e)
1539 return dump
1540
1541
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001542 def updateCacheSync(self):
1543 if self.state == state.running:
1544 return
1545
1546 # reload files for which we got notifications
1547 for p in self.inotify_modified_files:
1548 bb.parse.update_cache(p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001549 if p in bb.parse.BBHandler.cached_statements:
1550 del bb.parse.BBHandler.cached_statements[p]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001551 self.inotify_modified_files = []
1552
1553 if not self.baseconfig_valid:
1554 logger.debug(1, "Reloading base configuration data")
1555 self.initConfigurationData()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001556 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001557
1558 # This is called for all async commands when self.state != running
1559 def updateCache(self):
1560 if self.state == state.running:
1561 return
1562
1563 if self.state in (state.shutdown, state.forceshutdown, state.error):
1564 if hasattr(self.parser, 'shutdown'):
1565 self.parser.shutdown(clean=False, force = True)
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001566 self.parser.final_cleanup()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001567 raise bb.BBHandledException()
1568
1569 if self.state != state.parsing:
1570 self.updateCacheSync()
1571
1572 if self.state != state.parsing and not self.parsecache_valid:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001573 bb.parse.siggen.reset(self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001574 self.parseConfiguration ()
1575 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001576 for mc in self.multiconfigs:
1577 bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001578
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001579 for mc in self.multiconfigs:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001580 ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001581 self.recipecaches[mc].ignored_dependencies = set(ignore.split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001582
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001583 for dep in self.configuration.extra_assume_provided:
1584 self.recipecaches[mc].ignored_dependencies.add(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001585
Andrew Geissler5a43b432020-06-13 10:46:56 -05001586 self.collections = {}
1587
1588 mcfilelist = {}
1589 total_masked = 0
1590 searchdirs = set()
1591 for mc in self.multiconfigs:
1592 self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
1593 (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1594
1595 mcfilelist[mc] = filelist
1596 total_masked += masked
1597 searchdirs |= set(search)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001598
1599 # Add inotify watches for directories searched for bb/bbappend files
1600 for dirent in searchdirs:
1601 self.add_filewatch([[dirent]], dirs=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001602
Andrew Geissler5a43b432020-06-13 10:46:56 -05001603 self.parser = CookerParser(self, mcfilelist, total_masked)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001604 self.parsecache_valid = True
1605
1606 self.state = state.parsing
1607
1608 if not self.parser.parse_next():
1609 collectlog.debug(1, "parsing complete")
1610 if self.parser.error:
1611 raise bb.BBHandledException()
1612 self.show_appends_with_no_recipes()
1613 self.handlePrefProviders()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001614 for mc in self.multiconfigs:
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001615 self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001616 self.state = state.running
1617
1618 # Send an event listing all stamps reachable after parsing
1619 # which the metadata may use to clean up stale data
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001620 for mc in self.multiconfigs:
1621 event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1622 bb.event.fire(event, self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001623 return None
1624
1625 return True
1626
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001627 def checkPackages(self, pkgs_to_build, task=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001628
1629 # Return a copy, don't modify the original
1630 pkgs_to_build = pkgs_to_build[:]
1631
1632 if len(pkgs_to_build) == 0:
1633 raise NothingToBuild
1634
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001635 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001636 for pkg in pkgs_to_build.copy():
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001637 if pkg in ignore:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001638 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
Brad Bishop15ae2502019-06-18 21:44:24 -04001639 if pkg.startswith("multiconfig:"):
1640 pkgs_to_build.remove(pkg)
1641 pkgs_to_build.append(pkg.replace("multiconfig:", "mc:"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001642
1643 if 'world' in pkgs_to_build:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001644 pkgs_to_build.remove('world')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001645 for mc in self.multiconfigs:
1646 bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1647 for t in self.recipecaches[mc].world_target:
1648 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001649 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001650 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001651
1652 if 'universe' in pkgs_to_build:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001653 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001654 parselog.debug(1, "collating packages for \"universe\"")
1655 pkgs_to_build.remove('universe')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001656 for mc in self.multiconfigs:
1657 for t in self.recipecaches[mc].universe_target:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001658 if task:
1659 foundtask = False
1660 for provider_fn in self.recipecaches[mc].providers[t]:
1661 if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1662 foundtask = True
1663 break
1664 if not foundtask:
1665 bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1666 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001667 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001668 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001669 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001670
1671 return pkgs_to_build
1672
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001673 def pre_serve(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001674 return
1675
1676 def post_serve(self):
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001677 self.shutdown(force=True)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001678 prserv.serv.auto_shutdown()
Brad Bishop08902b02019-08-20 09:16:51 -04001679 if self.hashserv:
1680 self.hashserv.process.terminate()
1681 self.hashserv.process.join()
Andrew Geisslerc3d88e42020-10-02 09:45:00 -05001682 if hasattr(self, "data"):
1683 bb.event.fire(CookerExit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001684
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001685 def shutdown(self, force = False):
1686 if force:
1687 self.state = state.forceshutdown
1688 else:
1689 self.state = state.shutdown
1690
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001691 if self.parser:
1692 self.parser.shutdown(clean=not force, force=force)
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001693 self.parser.final_cleanup()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001694
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001695 def finishcommand(self):
1696 self.state = state.initial
1697
1698 def reset(self):
1699 self.initConfigurationData()
Brad Bishop08902b02019-08-20 09:16:51 -04001700 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001701
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001702 def clientComplete(self):
1703 """Called when the client is done using the server"""
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001704 self.finishcommand()
1705 self.extraconfigdata = {}
1706 self.command.reset()
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001707 if hasattr(self, "data"):
1708 self.databuilder.reset()
1709 self.data = self.databuilder.data
Andrew Geissler82c905d2020-04-13 13:39:40 -05001710 self.parsecache_valid = False
1711 self.baseconfig_valid = False
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001712
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001713
1714class CookerExit(bb.event.Event):
1715 """
1716 Notify clients of the Cooker shutdown
1717 """
1718
1719 def __init__(self):
1720 bb.event.Event.__init__(self)
1721
1722
1723class CookerCollectFiles(object):
Andrew Geissler5a43b432020-06-13 10:46:56 -05001724 def __init__(self, priorities, mc=''):
1725 self.mc = mc
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001726 self.bbappends = []
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001727 # Priorities is a list of tupples, with the second element as the pattern.
1728 # We need to sort the list with the longest pattern first, and so on to
1729 # the shortest. This allows nested layers to be properly evaluated.
1730 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001731
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001732 def calc_bbfile_priority(self, filename):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001733 for _, _, regex, pri in self.bbfile_config_priorities:
1734 if regex.match(filename):
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001735 return pri, regex
1736 return 0, None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001737
1738 def get_bbfiles(self):
1739 """Get list of default .bb files by reading out the current directory"""
1740 path = os.getcwd()
1741 contents = os.listdir(path)
1742 bbfiles = []
1743 for f in contents:
1744 if f.endswith(".bb"):
1745 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1746 return bbfiles
1747
1748 def find_bbfiles(self, path):
1749 """Find all the .bb and .bbappend files in a directory"""
1750 found = []
1751 for dir, dirs, files in os.walk(path):
1752 for ignored in ('SCCS', 'CVS', '.svn'):
1753 if ignored in dirs:
1754 dirs.remove(ignored)
1755 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1756
1757 return found
1758
1759 def collect_bbfiles(self, config, eventdata):
1760 """Collect all available .bb build files"""
1761 masked = 0
1762
1763 collectlog.debug(1, "collecting .bb files")
1764
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001765 files = (config.getVar( "BBFILES") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001766
1767 # Sort files by priority
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001768 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] )
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001769 config.setVar("BBFILES_PRIORITIZED", " ".join(files))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001770
1771 if not len(files):
1772 files = self.get_bbfiles()
1773
1774 if not len(files):
1775 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1776 bb.event.fire(CookerExit(), eventdata)
1777
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001778 # We need to track where we look so that we can add inotify watches. There
1779 # is no nice way to do this, this is horrid. We intercept the os.listdir()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001780 # (or os.scandir() for python 3.6+) calls while we run glob().
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001781 origlistdir = os.listdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001782 if hasattr(os, 'scandir'):
1783 origscandir = os.scandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001784 searchdirs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001785
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001786 def ourlistdir(d):
1787 searchdirs.append(d)
1788 return origlistdir(d)
1789
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001790 def ourscandir(d):
1791 searchdirs.append(d)
1792 return origscandir(d)
1793
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001794 os.listdir = ourlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001795 if hasattr(os, 'scandir'):
1796 os.scandir = ourscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001797 try:
1798 # Can't use set here as order is important
1799 newfiles = []
1800 for f in files:
1801 if os.path.isdir(f):
1802 dirfiles = self.find_bbfiles(f)
1803 for g in dirfiles:
1804 if g not in newfiles:
1805 newfiles.append(g)
1806 else:
1807 globbed = glob.glob(f)
1808 if not globbed and os.path.exists(f):
1809 globbed = [f]
1810 # glob gives files in order on disk. Sort to be deterministic.
1811 for g in sorted(globbed):
1812 if g not in newfiles:
1813 newfiles.append(g)
1814 finally:
1815 os.listdir = origlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001816 if hasattr(os, 'scandir'):
1817 os.scandir = origscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001818
1819 bbmask = config.getVar('BBMASK')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001820
1821 if bbmask:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001822 # First validate the individual regular expressions and ignore any
1823 # that do not compile
1824 bbmasks = []
1825 for mask in bbmask.split():
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001826 # When constructing an older style single regex, it's possible for BBMASK
1827 # to end up beginning with '|', which matches and masks _everything_.
1828 if mask.startswith("|"):
Andrew Geissler82c905d2020-04-13 13:39:40 -05001829 collectlog.warning("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001830 mask = mask[1:]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001831 try:
1832 re.compile(mask)
1833 bbmasks.append(mask)
1834 except sre_constants.error:
1835 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1836
1837 # Then validate the combined regular expressions. This should never
1838 # fail, but better safe than sorry...
1839 bbmask = "|".join(bbmasks)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001840 try:
1841 bbmask_compiled = re.compile(bbmask)
1842 except sre_constants.error:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001843 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1844 bbmask = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001845
1846 bbfiles = []
1847 bbappend = []
1848 for f in newfiles:
1849 if bbmask and bbmask_compiled.search(f):
1850 collectlog.debug(1, "skipping masked file %s", f)
1851 masked += 1
1852 continue
1853 if f.endswith('.bb'):
1854 bbfiles.append(f)
1855 elif f.endswith('.bbappend'):
1856 bbappend.append(f)
1857 else:
1858 collectlog.debug(1, "skipping %s: unknown file extension", f)
1859
1860 # Build a list of .bbappend files for each .bb file
1861 for f in bbappend:
1862 base = os.path.basename(f).replace('.bbappend', '.bb')
1863 self.bbappends.append((base, f))
1864
1865 # Find overlayed recipes
1866 # bbfiles will be in priority order which makes this easy
1867 bbfile_seen = dict()
1868 self.overlayed = defaultdict(list)
1869 for f in reversed(bbfiles):
1870 base = os.path.basename(f)
1871 if base not in bbfile_seen:
1872 bbfile_seen[base] = f
1873 else:
1874 topfile = bbfile_seen[base]
1875 self.overlayed[topfile].append(f)
1876
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001877 return (bbfiles, masked, searchdirs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001878
1879 def get_file_appends(self, fn):
1880 """
1881 Returns a list of .bbappend files to apply to fn
1882 """
1883 filelist = []
1884 f = os.path.basename(fn)
1885 for b in self.bbappends:
1886 (bbappend, filename) = b
1887 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1888 filelist.append(filename)
Andrew Geissler5a43b432020-06-13 10:46:56 -05001889 return tuple(filelist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001890
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001891 def collection_priorities(self, pkgfns, fns, d):
1892 # Return the priorities of the entries in pkgfns
1893 # Also check that all the regexes in self.bbfile_config_priorities are used
1894 # (but to do that we need to ensure skipped recipes aren't counted, nor
1895 # collections in BBFILE_PATTERN_IGNORE_EMPTY)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001896
1897 priorities = {}
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001898 seen = set()
1899 matched = set()
1900
1901 matched_regex = set()
1902 unmatched_regex = set()
1903 for _, _, regex, _ in self.bbfile_config_priorities:
1904 unmatched_regex.add(regex)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001905
1906 # Calculate priorities for each file
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001907 for p in pkgfns:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001908 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001909 priorities[p], regex = self.calc_bbfile_priority(realfn)
1910 if regex in unmatched_regex:
1911 matched_regex.add(regex)
1912 unmatched_regex.remove(regex)
1913 seen.add(realfn)
1914 if regex:
1915 matched.add(realfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001916
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001917 if unmatched_regex:
1918 # Account for bbappend files
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001919 for b in self.bbappends:
1920 (bbfile, append) = b
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001921 seen.add(append)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001922
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001923 # Account for skipped recipes
1924 seen.update(fns)
1925
1926 seen.difference_update(matched)
1927
1928 def already_matched(fn):
1929 for regex in matched_regex:
1930 if regex.match(fn):
1931 return True
1932 return False
1933
1934 for unmatch in unmatched_regex.copy():
1935 for fn in seen:
1936 if unmatch.match(fn):
1937 # If the bbappend or file was already matched by another regex, skip it
1938 # e.g. for a layer within a layer, the outer regex could match, the inner
1939 # regex may match nothing and we should warn about that
1940 if already_matched(fn):
1941 continue
1942 unmatched_regex.remove(unmatch)
1943 break
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001944
1945 for collection, pattern, regex, _ in self.bbfile_config_priorities:
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001946 if regex in unmatched_regex:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001947 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
Andrew Geissler5a43b432020-06-13 10:46:56 -05001948 collectlog.warning("No bb files in %s matched BBFILE_PATTERN_%s '%s'" % (self.mc if self.mc else 'default',
1949 collection, pattern))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001950
1951 return priorities
1952
1953class ParsingFailure(Exception):
1954 def __init__(self, realexception, recipe):
1955 self.realexception = realexception
1956 self.recipe = recipe
1957 Exception.__init__(self, realexception, recipe)
1958
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001959class Parser(multiprocessing.Process):
1960 def __init__(self, jobs, results, quit, init, profile):
1961 self.jobs = jobs
1962 self.results = results
1963 self.quit = quit
1964 self.init = init
1965 multiprocessing.Process.__init__(self)
1966 self.context = bb.utils.get_context().copy()
1967 self.handlers = bb.event.get_class_handlers().copy()
1968 self.profile = profile
1969
1970 def run(self):
1971
1972 if not self.profile:
1973 self.realrun()
1974 return
1975
1976 try:
1977 import cProfile as profile
1978 except:
1979 import profile
1980 prof = profile.Profile()
1981 try:
1982 profile.Profile.runcall(prof, self.realrun)
1983 finally:
1984 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1985 prof.dump_stats(logfile)
1986
1987 def realrun(self):
1988 if self.init:
1989 self.init()
1990
1991 pending = []
1992 while True:
1993 try:
1994 self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001995 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001996 pass
1997 else:
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001998 self.results.close()
1999 self.results.join_thread()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002000 break
2001
2002 if pending:
2003 result = pending.pop()
2004 else:
2005 try:
Brad Bishop19323692019-04-05 15:28:33 -04002006 job = self.jobs.pop()
2007 except IndexError:
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002008 self.results.close()
2009 self.results.join_thread()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002010 break
2011 result = self.parse(*job)
Andrew Geissler82c905d2020-04-13 13:39:40 -05002012 # Clear the siggen cache after parsing to control memory usage, its huge
2013 bb.parse.siggen.postparsing_clean_cache()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002014 try:
2015 self.results.put(result, timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002016 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002017 pending.append(result)
2018
Andrew Geissler5a43b432020-06-13 10:46:56 -05002019 def parse(self, mc, cache, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002020 try:
Andrew Geissler82c905d2020-04-13 13:39:40 -05002021 origfilter = bb.event.LogHandler.filter
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002022 # Record the filename we're parsing into any events generated
2023 def parse_filter(self, record):
2024 record.taskpid = bb.event.worker_pid
2025 record.fn = filename
2026 return True
2027
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002028 # Reset our environment and handlers to the original settings
2029 bb.utils.set_context(self.context.copy())
2030 bb.event.set_class_handlers(self.handlers.copy())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002031 bb.event.LogHandler.filter = parse_filter
2032
Andrew Geissler5a43b432020-06-13 10:46:56 -05002033 return True, mc, cache.parse(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002034 except Exception as exc:
2035 tb = sys.exc_info()[2]
2036 exc.recipe = filename
2037 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
2038 return True, exc
2039 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2040 # and for example a worker thread doesn't just exit on its own in response to
2041 # a SystemExit event for example.
2042 except BaseException as exc:
2043 return True, ParsingFailure(exc, filename)
Andrew Geissler82c905d2020-04-13 13:39:40 -05002044 finally:
2045 bb.event.LogHandler.filter = origfilter
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002046
2047class CookerParser(object):
Andrew Geissler5a43b432020-06-13 10:46:56 -05002048 def __init__(self, cooker, mcfilelist, masked):
2049 self.mcfilelist = mcfilelist
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002050 self.cooker = cooker
2051 self.cfgdata = cooker.data
2052 self.cfghash = cooker.data_hash
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002053 self.cfgbuilder = cooker.databuilder
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002054
2055 # Accounting statistics
2056 self.parsed = 0
2057 self.cached = 0
2058 self.error = 0
2059 self.masked = masked
2060
2061 self.skipped = 0
2062 self.virtuals = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002063
2064 self.current = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002065 self.process_names = []
2066
Andrew Geissler5a43b432020-06-13 10:46:56 -05002067 self.bb_caches = bb.cache.MulticonfigCache(self.cfgbuilder, self.cfghash, cooker.caches_array)
2068 self.fromcache = set()
2069 self.willparse = set()
2070 for mc in self.cooker.multiconfigs:
2071 for filename in self.mcfilelist[mc]:
2072 appends = self.cooker.collections[mc].get_file_appends(filename)
2073 if not self.bb_caches[mc].cacheValid(filename, appends):
2074 self.willparse.add((mc, self.bb_caches[mc], filename, appends))
2075 else:
2076 self.fromcache.add((mc, self.bb_caches[mc], filename, appends))
2077
2078 self.total = len(self.fromcache) + len(self.willparse)
2079 self.toparse = len(self.willparse)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002080 self.progress_chunk = int(max(self.toparse / 100, 1))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002081
Brad Bishop6e60e8b2018-02-01 10:27:11 -05002082 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
Andrew Geissler5a43b432020-06-13 10:46:56 -05002083 multiprocessing.cpu_count()), self.toparse)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002084
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002085 self.start()
2086 self.haveshutdown = False
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002087 self.syncthread = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002088
2089 def start(self):
2090 self.results = self.load_cached()
2091 self.processes = []
2092 if self.toparse:
2093 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2094 def init():
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002095 signal.signal(signal.SIGTERM, signal.SIG_DFL)
2096 signal.signal(signal.SIGHUP, signal.SIG_DFL)
2097 signal.signal(signal.SIGINT, signal.SIG_IGN)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002098 bb.utils.set_process_name(multiprocessing.current_process().name)
2099 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2100 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002101
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002102 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002103 self.result_queue = multiprocessing.Queue()
Brad Bishop19323692019-04-05 15:28:33 -04002104
2105 def chunkify(lst,n):
2106 return [lst[i::n] for i in range(n)]
Andrew Geissler5a43b432020-06-13 10:46:56 -05002107 self.jobs = chunkify(list(self.willparse), self.num_processes)
Brad Bishop19323692019-04-05 15:28:33 -04002108
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002109 for i in range(0, self.num_processes):
Brad Bishop19323692019-04-05 15:28:33 -04002110 parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002111 parser.start()
2112 self.process_names.append(parser.name)
2113 self.processes.append(parser)
2114
2115 self.results = itertools.chain(self.results, self.parse_generator())
2116
2117 def shutdown(self, clean=True, force=False):
2118 if not self.toparse:
2119 return
2120 if self.haveshutdown:
2121 return
2122 self.haveshutdown = True
2123
2124 if clean:
2125 event = bb.event.ParseCompleted(self.cached, self.parsed,
2126 self.skipped, self.masked,
2127 self.virtuals, self.error,
2128 self.total)
2129
2130 bb.event.fire(event, self.cfgdata)
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002131
2132 for process in self.processes:
2133 self.parser_quit.put(None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002134
Brad Bishop08902b02019-08-20 09:16:51 -04002135 # Cleanup the queue before call process.join(), otherwise there might be
2136 # deadlocks.
2137 while True:
2138 try:
2139 self.result_queue.get(timeout=0.25)
2140 except queue.Empty:
2141 break
2142
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002143 for process in self.processes:
2144 if force:
2145 process.join(.1)
2146 process.terminate()
2147 else:
2148 process.join()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002149
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002150 self.parser_quit.close()
2151 # Allow data left in the cancel queue to be discarded
2152 self.parser_quit.cancel_join_thread()
2153
Andrew Geissler5a43b432020-06-13 10:46:56 -05002154 def sync_caches():
2155 for c in self.bb_caches.values():
2156 c.sync()
2157
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002158 sync = threading.Thread(target=sync_caches, name="SyncThread")
2159 self.syncthread = sync
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002160 sync.start()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002161 bb.codeparser.parser_cache_savemerge()
2162 bb.fetch.fetcher_parse_done()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002163 if self.cooker.configuration.profile:
2164 profiles = []
2165 for i in self.process_names:
2166 logfile = "profile-parse-%s.log" % i
2167 if os.path.exists(logfile):
2168 profiles.append(logfile)
2169
2170 pout = "profile-parse.log.processed"
2171 bb.utils.process_profilelog(profiles, pout = pout)
2172 print("Processed parsing statistics saved to %s" % (pout))
2173
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002174 def final_cleanup(self):
2175 if self.syncthread:
2176 self.syncthread.join()
2177
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002178 def load_cached(self):
Andrew Geissler5a43b432020-06-13 10:46:56 -05002179 for mc, cache, filename, appends in self.fromcache:
2180 cached, infos = cache.load(filename, appends)
2181 yield not cached, mc, infos
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002182
2183 def parse_generator(self):
2184 while True:
2185 if self.parsed >= self.toparse:
2186 break
2187
2188 try:
2189 result = self.result_queue.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002190 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002191 pass
2192 else:
2193 value = result[1]
2194 if isinstance(value, BaseException):
2195 raise value
2196 else:
2197 yield result
2198
2199 def parse_next(self):
2200 result = []
2201 parsed = None
2202 try:
Andrew Geissler5a43b432020-06-13 10:46:56 -05002203 parsed, mc, result = next(self.results)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002204 except StopIteration:
2205 self.shutdown()
2206 return False
2207 except bb.BBHandledException as exc:
2208 self.error += 1
2209 logger.error('Failed to parse recipe: %s' % exc.recipe)
2210 self.shutdown(clean=False)
2211 return False
2212 except ParsingFailure as exc:
2213 self.error += 1
2214 logger.error('Unable to parse %s: %s' %
2215 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2216 self.shutdown(clean=False)
2217 return False
2218 except bb.parse.ParseError as exc:
2219 self.error += 1
2220 logger.error(str(exc))
2221 self.shutdown(clean=False)
2222 return False
2223 except bb.data_smart.ExpansionError as exc:
2224 self.error += 1
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002225 bbdir = os.path.dirname(__file__) + os.sep
2226 etype, value, _ = sys.exc_info()
2227 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2228 logger.error('ExpansionError during parsing %s', value.recipe,
2229 exc_info=(etype, value, tb))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002230 self.shutdown(clean=False)
2231 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002232 except Exception as exc:
2233 self.error += 1
2234 etype, value, tb = sys.exc_info()
2235 if hasattr(value, "recipe"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002236 logger.error('Unable to parse %s' % value.recipe,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002237 exc_info=(etype, value, exc.traceback))
2238 else:
2239 # Most likely, an exception occurred during raising an exception
2240 import traceback
2241 logger.error('Exception during parse: %s' % traceback.format_exc())
2242 self.shutdown(clean=False)
2243 return False
2244
2245 self.current += 1
2246 self.virtuals += len(result)
2247 if parsed:
2248 self.parsed += 1
2249 if self.parsed % self.progress_chunk == 0:
2250 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2251 self.cfgdata)
2252 else:
2253 self.cached += 1
2254
2255 for virtualfn, info_array in result:
2256 if info_array[0].skipped:
2257 self.skipped += 1
2258 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
Andrew Geissler5a43b432020-06-13 10:46:56 -05002259 self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002260 parsed=parsed, watcher = self.cooker.add_filewatch)
2261 return True
2262
2263 def reparse(self, filename):
Andrew Geissler5a43b432020-06-13 10:46:56 -05002264 to_reparse = set()
2265 for mc in self.cooker.multiconfigs:
2266 to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename)))
2267
2268 for mc, filename, appends in to_reparse:
2269 infos = self.bb_caches[mc].parse(filename, appends)
2270 for vfn, info_array in infos:
2271 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)