blob: 83cfee7fb4a8c6e4b0baa4197334ebec337d83ba [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# Copyright (C) 2003, 2004 Chris Larson
3# Copyright (C) 2003, 2004 Phil Blundell
4# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
5# Copyright (C) 2005 Holger Hans Peter Freyther
6# Copyright (C) 2005 ROAD GmbH
7# Copyright (C) 2006 - 2007 Richard Purdie
8#
Brad Bishopc342db32019-05-15 21:57:59 -04009# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050010#
Patrick Williamsc0f7c042017-02-23 20:41:17 -060011
Patrick Williamsc124f4f2015-09-15 14:41:29 -050012import sys, os, glob, os.path, re, time
Patrick Williamsc124f4f2015-09-15 14:41:29 -050013import itertools
14import logging
15import multiprocessing
16import sre_constants
17import threading
Patrick Williamsc0f7c042017-02-23 20:41:17 -060018from io import StringIO, UnsupportedOperation
Patrick Williamsc124f4f2015-09-15 14:41:29 -050019from contextlib import closing
Patrick Williamsc0f7c042017-02-23 20:41:17 -060020from collections import defaultdict, namedtuple
Patrick Williamsc124f4f2015-09-15 14:41:29 -050021import bb, bb.exceptions, bb.command
22from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
Patrick Williamsc0f7c042017-02-23 20:41:17 -060023import queue
Patrick Williamsc124f4f2015-09-15 14:41:29 -050024import signal
Patrick Williamsc124f4f2015-09-15 14:41:29 -050025import prserv.serv
26import pyinotify
Patrick Williamsc0f7c042017-02-23 20:41:17 -060027import json
28import pickle
29import codecs
Brad Bishop08902b02019-08-20 09:16:51 -040030import hashserv
Patrick Williamsc124f4f2015-09-15 14:41:29 -050031
32logger = logging.getLogger("BitBake")
33collectlog = logging.getLogger("BitBake.Collection")
34buildlog = logging.getLogger("BitBake.Build")
35parselog = logging.getLogger("BitBake.Parsing")
36providerlog = logging.getLogger("BitBake.Provider")
37
38class NoSpecificMatch(bb.BBHandledException):
39 """
40 Exception raised when no or multiple file matches are found
41 """
42
43class NothingToBuild(Exception):
44 """
45 Exception raised when there is nothing to build
46 """
47
48class CollectionError(bb.BBHandledException):
49 """
50 Exception raised when layer configuration is incorrect
51 """
52
53class state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060054 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050055
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050056 @classmethod
57 def get_name(cls, code):
58 for name in dir(cls):
59 value = getattr(cls, name)
60 if type(value) == type(cls.initial) and value == code:
61 return name
62 raise ValueError("Invalid status code: %s" % code)
63
Patrick Williamsc124f4f2015-09-15 14:41:29 -050064
65class SkippedPackage:
66 def __init__(self, info = None, reason = None):
67 self.pn = None
68 self.skipreason = None
69 self.provides = None
70 self.rprovides = None
71
72 if info:
73 self.pn = info.pn
74 self.skipreason = info.skipreason
75 self.provides = info.provides
Andrew Geisslerc723b722021-01-08 16:14:09 -060076 self.rprovides = info.packages + info.rprovides
77 for package in info.packages:
78 self.rprovides += info.rprovides_pkg[package]
Patrick Williamsc124f4f2015-09-15 14:41:29 -050079 elif reason:
80 self.skipreason = reason
81
82
83class CookerFeatures(object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -060084 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050085
86 def __init__(self):
87 self._features=set()
88
89 def setFeature(self, f):
90 # validate we got a request for a feature we support
91 if f not in CookerFeatures._feature_list:
92 return
93 self._features.add(f)
94
95 def __contains__(self, f):
96 return f in self._features
97
98 def __iter__(self):
99 return self._features.__iter__()
100
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600101 def __next__(self):
102 return next(self._features)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500103
104
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600105class EventWriter:
106 def __init__(self, cooker, eventfile):
107 self.file_inited = None
108 self.cooker = cooker
109 self.eventfile = eventfile
110 self.event_queue = []
111
112 def write_event(self, event):
113 with open(self.eventfile, "a") as f:
114 try:
115 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
116 f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
117 "vars": str_event}))
118 except Exception as err:
119 import traceback
120 print(err, traceback.format_exc())
121
122 def send(self, event):
123 if self.file_inited:
124 # we have the file, just write the event
125 self.write_event(event)
126 else:
127 # init on bb.event.BuildStarted
128 name = "%s.%s" % (event.__module__, event.__class__.__name__)
129 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
130 with open(self.eventfile, "w") as f:
131 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
132
133 self.file_inited = True
134
135 # write pending events
136 for evt in self.event_queue:
137 self.write_event(evt)
138
139 # also write the current event
140 self.write_event(event)
141 else:
142 # queue all events until the file is inited
143 self.event_queue.append(event)
144
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500145#============================================================================#
146# BBCooker
147#============================================================================#
148class BBCooker:
149 """
150 Manages one bitbake build run
151 """
152
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500153 def __init__(self, featureSet=None, idleCallBackRegister=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600154 self.recipecaches = None
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500155 self.eventlog = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500156 self.skiplist = {}
157 self.featureset = CookerFeatures()
158 if featureSet:
159 for f in featureSet:
160 self.featureset.setFeature(f)
161
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500162 self.configuration = bb.cookerdata.CookerConfiguration()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500163
Andrew Geissler635e0e42020-08-21 15:58:33 -0500164 self.idleCallBackRegister = idleCallBackRegister
165
Brad Bishopf058f492019-01-28 23:50:33 -0500166 bb.debug(1, "BBCooker starting %s" % time.time())
167 sys.stdout.flush()
168
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500169 self.configwatcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500170 bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
171 sys.stdout.flush()
172
Andrew Geissler82c905d2020-04-13 13:39:40 -0500173 self.configwatcher.bbseen = set()
174 self.configwatcher.bbwatchedfiles = set()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500175 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
Brad Bishopf058f492019-01-28 23:50:33 -0500176 bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
177 sys.stdout.flush()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500178 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
179 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500180 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500181 self.watcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500182 bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
183 sys.stdout.flush()
Andrew Geissler82c905d2020-04-13 13:39:40 -0500184 self.watcher.bbseen = set()
185 self.watcher.bbwatchedfiles = set()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500186 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
187
Brad Bishopf058f492019-01-28 23:50:33 -0500188 bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
189 sys.stdout.flush()
190
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500191 # If being called by something like tinfoil, we need to clean cached data
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500192 # which may now be invalid
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500193 bb.parse.clear_cache()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500194 bb.parse.BBHandler.cached_statements = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500195
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500196 self.ui_cmdline = None
Brad Bishop08902b02019-08-20 09:16:51 -0400197 self.hashserv = None
Brad Bishopa34c0302019-09-23 22:34:48 -0400198 self.hashservaddr = None
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500199
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500200 self.inotify_modified_files = []
201
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500202 def _process_inotify_updates(server, cooker, abort):
203 cooker.process_inotify_updates()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500204 return 1.0
205
Andrew Geissler635e0e42020-08-21 15:58:33 -0500206 self.idleCallBackRegister(_process_inotify_updates, self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500207
208 # TOSTOP must not be set or our children will hang when they output
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600209 try:
210 fd = sys.stdout.fileno()
211 if os.isatty(fd):
212 import termios
213 tcattr = termios.tcgetattr(fd)
214 if tcattr[3] & termios.TOSTOP:
215 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
216 tcattr[3] = tcattr[3] & ~termios.TOSTOP
217 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
218 except UnsupportedOperation:
219 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500220
221 self.command = bb.command.Command(self)
222 self.state = state.initial
223
224 self.parser = None
225
226 signal.signal(signal.SIGTERM, self.sigterm_exception)
227 # Let SIGHUP exit as SIGTERM
228 signal.signal(signal.SIGHUP, self.sigterm_exception)
229
Brad Bishopf058f492019-01-28 23:50:33 -0500230 bb.debug(1, "BBCooker startup complete %s" % time.time())
231 sys.stdout.flush()
232
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500233 def init_configdata(self):
234 if not hasattr(self, "data"):
235 self.initConfigurationData()
236 bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
237 sys.stdout.flush()
238 self.handlePRServ()
239
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500240 def process_inotify_updates(self):
241 for n in [self.confignotifier, self.notifier]:
242 if n.check_events(timeout=0):
243 # read notified events and enqeue them
244 n.read_events()
245 n.process_events()
246
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500247 def config_notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500248 if event.maskname == "IN_Q_OVERFLOW":
249 bb.warn("inotify event queue overflowed, invalidating caches.")
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500250 self.parsecache_valid = False
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500251 self.baseconfig_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500252 bb.parse.clear_cache()
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500253 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500254 if not event.pathname in self.configwatcher.bbwatchedfiles:
255 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500256 if not event.pathname in self.inotify_modified_files:
257 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500258 self.baseconfig_valid = False
259
260 def notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500261 if event.maskname == "IN_Q_OVERFLOW":
262 bb.warn("inotify event queue overflowed, invalidating caches.")
263 self.parsecache_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500264 bb.parse.clear_cache()
265 return
266 if event.pathname.endswith("bitbake-cookerdaemon.log") \
267 or event.pathname.endswith("bitbake.lock"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500268 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500269 if not event.pathname in self.inotify_modified_files:
270 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500271 self.parsecache_valid = False
272
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500273 def add_filewatch(self, deps, watcher=None, dirs=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500274 if not watcher:
275 watcher = self.watcher
276 for i in deps:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500277 watcher.bbwatchedfiles.add(i[0])
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500278 if dirs:
279 f = i[0]
280 else:
281 f = os.path.dirname(i[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500282 if f in watcher.bbseen:
283 continue
Andrew Geissler82c905d2020-04-13 13:39:40 -0500284 watcher.bbseen.add(f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500285 watchtarget = None
286 while True:
287 # We try and add watches for files that don't exist but if they did, would influence
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500288 # the parser. The parent directory of these files may not exist, in which case we need
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500289 # to watch any parent that does exist for changes.
290 try:
291 watcher.add_watch(f, self.watchmask, quiet=False)
292 if watchtarget:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500293 watcher.bbwatchedfiles.add(watchtarget)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500294 break
295 except pyinotify.WatchManagerError as e:
296 if 'ENOENT' in str(e):
297 watchtarget = f
298 f = os.path.dirname(f)
299 if f in watcher.bbseen:
300 break
Andrew Geissler82c905d2020-04-13 13:39:40 -0500301 watcher.bbseen.add(f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500302 continue
303 if 'ENOSPC' in str(e):
304 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
305 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
306 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
307 providerlog.error("Root privilege is required to modify max_user_watches.")
308 raise
309
310 def sigterm_exception(self, signum, stackframe):
311 if signum == signal.SIGTERM:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500312 bb.warn("Cooker received SIGTERM, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500313 elif signum == signal.SIGHUP:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500314 bb.warn("Cooker received SIGHUP, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500315 self.state = state.forceshutdown
316
317 def setFeatures(self, features):
318 # we only accept a new feature set if we're in state initial, so we can reset without problems
319 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
320 raise Exception("Illegal state for feature set change")
321 original_featureset = list(self.featureset)
322 for feature in features:
323 self.featureset.setFeature(feature)
324 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500325 if (original_featureset != list(self.featureset)) and self.state != state.error and hasattr(self, "data"):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500326 self.reset()
327
328 def initConfigurationData(self):
329
330 self.state = state.initial
331 self.caches_array = []
332
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500333 # Need to preserve BB_CONSOLELOG over resets
334 consolelog = None
335 if hasattr(self, "data"):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500336 consolelog = self.data.getVar("BB_CONSOLELOG")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500337
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500338 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
339 self.enableDataTracking()
340
341 all_extra_cache_names = []
342 # We hardcode all known cache types in a single place, here.
343 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
344 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
345
346 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
347
348 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
349 # This is the entry point, no further check needed!
350 for var in caches_name_array:
351 try:
352 module_name, cache_name = var.split(':')
353 module = __import__(module_name, fromlist=(cache_name,))
354 self.caches_array.append(getattr(module, cache_name))
355 except ImportError as exc:
356 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500357 raise bb.BBHandledException()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500358
359 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
360 self.databuilder.parseBaseConfiguration()
361 self.data = self.databuilder.data
362 self.data_hash = self.databuilder.data_hash
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500363 self.extraconfigdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500364
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500365 if consolelog:
366 self.data.setVar("BB_CONSOLELOG", consolelog)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500367
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500368 self.data.setVar('BB_CMDLINE', self.ui_cmdline)
369
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500370 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
371 self.disableDataTracking()
372
Brad Bishop15ae2502019-06-18 21:44:24 -0400373 for mc in self.databuilder.mcdata.values():
374 mc.renameVar("__depends", "__base_depends")
375 self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500376
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500377 self.baseconfig_valid = True
378 self.parsecache_valid = False
379
380 def handlePRServ(self):
381 # Setup a PR Server based on the new configuration
382 try:
383 self.prhost = prserv.serv.auto_start(self.data)
384 except prserv.serv.PRServiceConfigError as e:
385 bb.fatal("Unable to start PR Server, exitting")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500386
Brad Bishopa34c0302019-09-23 22:34:48 -0400387 if self.data.getVar("BB_HASHSERVE") == "auto":
388 # Create a new hash server bound to a unix domain socket
Brad Bishop08902b02019-08-20 09:16:51 -0400389 if not self.hashserv:
390 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
Brad Bishopa34c0302019-09-23 22:34:48 -0400391 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
392 self.hashserv = hashserv.create_server(self.hashservaddr, dbfile, sync=False)
Brad Bishop08902b02019-08-20 09:16:51 -0400393 self.hashserv.process = multiprocessing.Process(target=self.hashserv.serve_forever)
Brad Bishop08902b02019-08-20 09:16:51 -0400394 self.hashserv.process.start()
Brad Bishopa34c0302019-09-23 22:34:48 -0400395 self.data.setVar("BB_HASHSERVE", self.hashservaddr)
396 self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr)
397 self.databuilder.data.setVar("BB_HASHSERVE", self.hashservaddr)
Brad Bishop08902b02019-08-20 09:16:51 -0400398 for mc in self.databuilder.mcdata:
Brad Bishopa34c0302019-09-23 22:34:48 -0400399 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
Brad Bishop08902b02019-08-20 09:16:51 -0400400
401 bb.parse.init_parser(self.data)
402
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500403 def enableDataTracking(self):
404 self.configuration.tracking = True
405 if hasattr(self, "data"):
406 self.data.enableTracking()
407
408 def disableDataTracking(self):
409 self.configuration.tracking = False
410 if hasattr(self, "data"):
411 self.data.disableTracking()
412
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500413 def parseConfiguration(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500414 # Change nice level if we're asked to
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500415 nice = self.data.getVar("BB_NICE_LEVEL")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500416 if nice:
417 curnice = os.nice(0)
418 nice = int(nice) - curnice
419 buildlog.verbose("Renice to %s " % os.nice(nice))
420
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600421 if self.recipecaches:
422 del self.recipecaches
423 self.multiconfigs = self.databuilder.mcdata.keys()
424 self.recipecaches = {}
425 for mc in self.multiconfigs:
426 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500427
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500428 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500429
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500430 self.parsecache_valid = False
431
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500432 def updateConfigOpts(self, options, environment, cmdline):
433 self.ui_cmdline = cmdline
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500434 clean = True
435 for o in options:
436 if o in ['prefile', 'postfile']:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500437 # Only these options may require a reparse
438 try:
439 if getattr(self.configuration, o) == options[o]:
440 # Value is the same, no need to mark dirty
441 continue
442 except AttributeError:
443 pass
444 logger.debug(1, "Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
445 print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500446 clean = False
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500447 if hasattr(self.configuration, o):
448 setattr(self.configuration, o, options[o])
449
450 if self.configuration.writeeventlog:
451 if self.eventlog and self.eventlog[0] != self.configuration.writeeventlog:
452 bb.event.unregister_UIHhandler(self.eventlog[1])
453 if not self.eventlog or self.eventlog[0] != self.configuration.writeeventlog:
454 # we log all events to a file if so directed
455 # register the log file writer as UI Handler
456 writer = EventWriter(self, self.configuration.writeeventlog)
457 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
458 self.eventlog = (self.configuration.writeeventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)))
459
460 bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel
461 bb.msg.loggerDefaultDomains = self.configuration.debug_domains
462
463 if hasattr(self, "data"):
464 origenv = bb.data.init()
465 for k in environment:
466 origenv.setVar(k, environment[k])
467 self.data.setVar("BB_ORIGENV", origenv)
468
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500469 for k in bb.utils.approved_variables():
470 if k in environment and k not in self.configuration.env:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500471 logger.debug(1, "Updating new environment variable %s to %s" % (k, environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500472 self.configuration.env[k] = environment[k]
473 clean = False
474 if k in self.configuration.env and k not in environment:
475 logger.debug(1, "Updating environment variable %s (deleted)" % (k))
476 del self.configuration.env[k]
477 clean = False
478 if k not in self.configuration.env and k not in environment:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500479 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500480 if environment[k] != self.configuration.env[k]:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500481 logger.debug(1, "Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500482 self.configuration.env[k] = environment[k]
483 clean = False
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500484
485 # Now update all the variables not in the datastore to match
486 self.configuration.env = environment
487
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500488 if not clean:
489 logger.debug(1, "Base environment change, triggering reparse")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500490 self.reset()
491
492 def runCommands(self, server, data, abort):
493 """
494 Run any queued asynchronous command
495 This is done by the idle handler so it runs in true context rather than
496 tied to any UI.
497 """
498
499 return self.command.runAsyncCommand()
500
501 def showVersions(self):
502
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500503 (latest_versions, preferred_versions) = self.findProviders()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500504
505 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
506 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
507
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500508 for p in sorted(self.recipecaches[''].pkg_pn):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500509 pref = preferred_versions[p]
510 latest = latest_versions[p]
511
512 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
513 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
514
515 if pref == latest:
516 prefstr = ""
517
518 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
519
520 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
521 """
522 Show the outer or per-recipe environment
523 """
524 fn = None
525 envdata = None
Brad Bishop15ae2502019-06-18 21:44:24 -0400526 mc = ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500527 if not pkgs_to_build:
528 pkgs_to_build = []
529
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500530 orig_tracking = self.configuration.tracking
531 if not orig_tracking:
532 self.enableDataTracking()
533 self.reset()
534
Brad Bishop15ae2502019-06-18 21:44:24 -0400535 def mc_base(p):
536 if p.startswith('mc:'):
537 s = p.split(':')
538 if len(s) == 2:
539 return s[1]
540 return None
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500541
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500542 if buildfile:
543 # Parse the configuration here. We need to do it explicitly here since
544 # this showEnvironment() code path doesn't use the cache
545 self.parseConfiguration()
546
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600547 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Andrew Geissler5a43b432020-06-13 10:46:56 -0500548 fn = self.matchFile(fn, mc)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600549 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500550 elif len(pkgs_to_build) == 1:
Brad Bishop15ae2502019-06-18 21:44:24 -0400551 mc = mc_base(pkgs_to_build[0])
552 if not mc:
553 ignore = self.data.getVar("ASSUME_PROVIDED") or ""
554 if pkgs_to_build[0] in set(ignore.split()):
555 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500556
Brad Bishop15ae2502019-06-18 21:44:24 -0400557 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500558
Brad Bishop15ae2502019-06-18 21:44:24 -0400559 mc = runlist[0][0]
560 fn = runlist[0][3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500561
562 if fn:
563 try:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500564 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
565 envdata = bb_caches[mc].loadDataFull(fn, self.collections[mc].get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500566 except Exception as e:
567 parselog.exception("Unable to read %s", fn)
568 raise
Brad Bishop15ae2502019-06-18 21:44:24 -0400569 else:
570 if not mc in self.databuilder.mcdata:
571 bb.fatal('Not multiconfig named "%s" found' % mc)
572 envdata = self.databuilder.mcdata[mc]
573 data.expandKeys(envdata)
574 parse.ast.runAnonFuncs(envdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500575
576 # Display history
577 with closing(StringIO()) as env:
578 self.data.inchistory.emit(env)
579 logger.plain(env.getvalue())
580
581 # emit variables and shell functions
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500582 with closing(StringIO()) as env:
583 data.emit_env(env, envdata, True)
584 logger.plain(env.getvalue())
585
586 # emit the metadata which isnt valid shell
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500587 for e in sorted(envdata.keys()):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600588 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500589 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500590
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500591 if not orig_tracking:
592 self.disableDataTracking()
593 self.reset()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500594
595 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
596 """
597 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
598 """
599 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
600
601 # A task of None means use the default task
602 if task is None:
603 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500604 if not task.startswith("do_"):
605 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500606
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500607 targetlist = self.checkPackages(pkgs_to_build, task)
608 fulltargetlist = []
609 defaulttask_implicit = ''
610 defaulttask_explicit = False
611 wildcard = False
612
613 # Wild card expansion:
Brad Bishop15ae2502019-06-18 21:44:24 -0400614 # Replace string such as "mc:*:bash"
615 # into "mc:A:bash mc:B:bash bash"
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500616 for k in targetlist:
Brad Bishop15ae2502019-06-18 21:44:24 -0400617 if k.startswith("mc:"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500618 if wildcard:
619 bb.fatal('multiconfig conflict')
620 if k.split(":")[1] == "*":
621 wildcard = True
622 for mc in self.multiconfigs:
623 if mc:
624 fulltargetlist.append(k.replace('*', mc))
625 # implicit default task
626 else:
627 defaulttask_implicit = k.split(":")[2]
628 else:
629 fulltargetlist.append(k)
630 else:
631 defaulttask_explicit = True
632 fulltargetlist.append(k)
633
634 if not defaulttask_explicit and defaulttask_implicit != '':
635 fulltargetlist.append(defaulttask_implicit)
636
637 bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600638 taskdata = {}
639 localdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500640
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600641 for mc in self.multiconfigs:
642 taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
643 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600644 bb.data.expandKeys(localdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500645
646 current = 0
647 runlist = []
648 for k in fulltargetlist:
Andrew Geisslerb7d28612020-07-24 16:15:54 -0500649 origk = k
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600650 mc = ""
Brad Bishop15ae2502019-06-18 21:44:24 -0400651 if k.startswith("mc:"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600652 mc = k.split(":")[1]
653 k = ":".join(k.split(":")[2:])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500654 ktask = task
655 if ":do_" in k:
656 k2 = k.split(":do_")
657 k = k2[0]
658 ktask = k2[1]
Andrew Geisslerb7d28612020-07-24 16:15:54 -0500659
660 if mc not in self.multiconfigs:
661 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named %s" % (origk, mc))
662
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600663 taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500664 current += 1
665 if not ktask.startswith("do_"):
666 ktask = "do_%s" % ktask
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600667 if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
668 # e.g. in ASSUME_PROVIDED
669 continue
670 fn = taskdata[mc].build_targets[k][0]
671 runlist.append([mc, k, ktask, fn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500672 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600673
Brad Bishop15ae2502019-06-18 21:44:24 -0400674 havemc = False
675 for mc in self.multiconfigs:
676 if taskdata[mc].get_mcdepends():
677 havemc = True
Brad Bishopf058f492019-01-28 23:50:33 -0500678
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800679 # No need to do check providers if there are no mcdeps or not an mc build
Brad Bishop15ae2502019-06-18 21:44:24 -0400680 if havemc or len(self.multiconfigs) > 1:
Andrew Geissler99467da2019-02-25 18:54:23 -0600681 seen = set()
682 new = True
683 # Make sure we can provide the multiconfig dependency
684 while new:
685 mcdeps = set()
686 # Add unresolved first, so we can get multiconfig indirect dependencies on time
687 for mc in self.multiconfigs:
688 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
689 mcdeps |= set(taskdata[mc].get_mcdepends())
690 new = False
691 for mc in self.multiconfigs:
692 for k in mcdeps:
693 if k in seen:
694 continue
695 l = k.split(':')
696 depmc = l[2]
697 if depmc not in self.multiconfigs:
Andrew Geisslerb7d28612020-07-24 16:15:54 -0500698 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc))
Andrew Geissler99467da2019-02-25 18:54:23 -0600699 else:
700 logger.debug(1, "Adding providers for multiconfig dependency %s" % l[3])
701 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
702 seen.add(k)
703 new = True
Brad Bishopf058f492019-01-28 23:50:33 -0500704
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600705 for mc in self.multiconfigs:
706 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
707
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500708 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600709 return taskdata, runlist
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500710
711 def prepareTreeData(self, pkgs_to_build, task):
712 """
713 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
714 """
715
716 # We set abort to False here to prevent unbuildable targets raising
717 # an exception when we're just generating data
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600718 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500719
720 return runlist, taskdata
721
722 ######## WARNING : this function requires cache_extra to be enabled ########
723
724 def generateTaskDepTreeData(self, pkgs_to_build, task):
725 """
726 Create a dependency graph of pkgs_to_build including reverse dependency
727 information.
728 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500729 if not task.startswith("do_"):
730 task = "do_%s" % task
731
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500732 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600733 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500734 rq.rqdata.prepare()
735 return self.buildDependTree(rq, taskdata)
736
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600737 @staticmethod
738 def add_mc_prefix(mc, pn):
739 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -0400740 return "mc:%s:%s" % (mc, pn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600741 return pn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500742
743 def buildDependTree(self, rq, taskdata):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600744 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500745 depend_tree = {}
746 depend_tree["depends"] = {}
747 depend_tree["tdepends"] = {}
748 depend_tree["pn"] = {}
749 depend_tree["rdepends-pn"] = {}
750 depend_tree["packages"] = {}
751 depend_tree["rdepends-pkg"] = {}
752 depend_tree["rrecs-pkg"] = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500753 depend_tree['providermap'] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600754 depend_tree["layer-priorities"] = self.bbfile_config_priorities
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500755
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600756 for mc in taskdata:
757 for name, fn in list(taskdata[mc].get_providermap().items()):
758 pn = self.recipecaches[mc].pkg_fn[fn]
759 pn = self.add_mc_prefix(mc, pn)
760 if name != pn:
761 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
762 depend_tree['providermap'][name] = (pn, version)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500763
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600764 for tid in rq.rqdata.runtaskentries:
765 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
766 pn = self.recipecaches[mc].pkg_fn[taskfn]
767 pn = self.add_mc_prefix(mc, pn)
768 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500769 if pn not in depend_tree["pn"]:
770 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600771 depend_tree["pn"][pn]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500772 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600773 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500774
775 # if we have extra caches, list all attributes they bring in
776 extra_info = []
777 for cache_class in self.caches_array:
778 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
779 cachefields = getattr(cache_class, 'cachefields', [])
780 extra_info = extra_info + cachefields
781
782 # for all attributes stored, add them to the dependency tree
783 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600784 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500785
786
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500787 dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
788 if not dotname in depend_tree["tdepends"]:
789 depend_tree["tdepends"][dotname] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600790 for dep in rq.rqdata.runtaskentries[tid].depends:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800791 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
792 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600793 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
794 if taskfn not in seen_fns:
795 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500796 packages = []
797
798 depend_tree["depends"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600799 for dep in taskdata[mc].depids[taskfn]:
800 depend_tree["depends"][pn].append(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500801
802 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600803 for rdep in taskdata[mc].rdepids[taskfn]:
804 depend_tree["rdepends-pn"][pn].append(rdep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500805
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600806 rdepends = self.recipecaches[mc].rundeps[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500807 for package in rdepends:
808 depend_tree["rdepends-pkg"][package] = []
809 for rdepend in rdepends[package]:
810 depend_tree["rdepends-pkg"][package].append(rdepend)
811 packages.append(package)
812
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600813 rrecs = self.recipecaches[mc].runrecs[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500814 for package in rrecs:
815 depend_tree["rrecs-pkg"][package] = []
816 for rdepend in rrecs[package]:
817 depend_tree["rrecs-pkg"][package].append(rdepend)
818 if not package in packages:
819 packages.append(package)
820
821 for package in packages:
822 if package not in depend_tree["packages"]:
823 depend_tree["packages"][package] = {}
824 depend_tree["packages"][package]["pn"] = pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600825 depend_tree["packages"][package]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500826 depend_tree["packages"][package]["version"] = version
827
828 return depend_tree
829
830 ######## WARNING : this function requires cache_extra to be enabled ########
831 def generatePkgDepTreeData(self, pkgs_to_build, task):
832 """
833 Create a dependency tree of pkgs_to_build, returning the data.
834 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500835 if not task.startswith("do_"):
836 task = "do_%s" % task
837
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500838 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500839
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600840 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500841 depend_tree = {}
842 depend_tree["depends"] = {}
843 depend_tree["pn"] = {}
844 depend_tree["rdepends-pn"] = {}
845 depend_tree["rdepends-pkg"] = {}
846 depend_tree["rrecs-pkg"] = {}
847
848 # if we have extra caches, list all attributes they bring in
849 extra_info = []
850 for cache_class in self.caches_array:
851 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
852 cachefields = getattr(cache_class, 'cachefields', [])
853 extra_info = extra_info + cachefields
854
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600855 tids = []
856 for mc in taskdata:
857 for tid in taskdata[mc].taskentries:
858 tids.append(tid)
859
860 for tid in tids:
861 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
862
863 pn = self.recipecaches[mc].pkg_fn[taskfn]
864 pn = self.add_mc_prefix(mc, pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500865
866 if pn not in depend_tree["pn"]:
867 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600868 depend_tree["pn"][pn]["filename"] = taskfn
869 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500870 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600871 rdepends = self.recipecaches[mc].rundeps[taskfn]
872 rrecs = self.recipecaches[mc].runrecs[taskfn]
873 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500874
875 # for all extra attributes stored, add them to the dependency tree
876 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600877 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500878
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600879 if taskfn not in seen_fns:
880 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500881
882 depend_tree["depends"][pn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500883 for dep in taskdata[mc].depids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500884 pn_provider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600885 if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
886 fn_provider = taskdata[mc].build_targets[dep][0]
887 pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500888 else:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500889 pn_provider = dep
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600890 pn_provider = self.add_mc_prefix(mc, pn_provider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500891 depend_tree["depends"][pn].append(pn_provider)
892
893 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600894 for rdep in taskdata[mc].rdepids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500895 pn_rprovider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600896 if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
897 fn_rprovider = taskdata[mc].run_targets[rdep][0]
898 pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500899 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600900 pn_rprovider = rdep
901 pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500902 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
903
904 depend_tree["rdepends-pkg"].update(rdepends)
905 depend_tree["rrecs-pkg"].update(rrecs)
906
907 return depend_tree
908
909 def generateDepTreeEvent(self, pkgs_to_build, task):
910 """
911 Create a task dependency graph of pkgs_to_build.
912 Generate an event with the result
913 """
914 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
915 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
916
917 def generateDotGraphFiles(self, pkgs_to_build, task):
918 """
919 Create a task dependency graph of pkgs_to_build.
920 Save the result to a set of .dot files.
921 """
922
923 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
924
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500925 with open('pn-buildlist', 'w') as f:
926 for pn in depgraph["pn"]:
927 f.write(pn + "\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500928 logger.info("PN build list saved to 'pn-buildlist'")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500929
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500930 # Remove old format output files to ensure no confusion with stale data
931 try:
932 os.unlink('pn-depends.dot')
933 except FileNotFoundError:
934 pass
935 try:
936 os.unlink('package-depends.dot')
937 except FileNotFoundError:
938 pass
Brad Bishop79641f22019-09-10 07:20:22 -0400939 try:
940 os.unlink('recipe-depends.dot')
941 except FileNotFoundError:
942 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500943
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500944 with open('task-depends.dot', 'w') as f:
945 f.write("digraph depends {\n")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400946 for task in sorted(depgraph["tdepends"]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500947 (pn, taskname) = task.rsplit(".", 1)
948 fn = depgraph["pn"][pn]["filename"]
949 version = depgraph["pn"][pn]["version"]
950 f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400951 for dep in sorted(depgraph["tdepends"][task]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500952 f.write('"%s" -> "%s"\n' % (task, dep))
953 f.write("}\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500954 logger.info("Task dependencies saved to 'task-depends.dot'")
955
956 def show_appends_with_no_recipes(self):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500957 appends_without_recipes = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500958 # Determine which bbappends haven't been applied
Andrew Geissler5a43b432020-06-13 10:46:56 -0500959 for mc in self.multiconfigs:
960 # First get list of recipes, including skipped
961 recipefns = list(self.recipecaches[mc].pkg_fn.keys())
962 recipefns.extend(self.skiplist.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500963
Andrew Geissler5a43b432020-06-13 10:46:56 -0500964 # Work out list of bbappends that have been applied
965 applied_appends = []
966 for fn in recipefns:
967 applied_appends.extend(self.collections[mc].get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500968
Andrew Geissler5a43b432020-06-13 10:46:56 -0500969 appends_without_recipes[mc] = []
970 for _, appendfn in self.collections[mc].bbappends:
971 if not appendfn in applied_appends:
972 appends_without_recipes[mc].append(appendfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500973
Andrew Geissler5a43b432020-06-13 10:46:56 -0500974 msgs = []
975 for mc in sorted(appends_without_recipes.keys()):
976 if appends_without_recipes[mc]:
977 msgs.append('No recipes in %s available for:\n %s' % (mc if mc else 'default',
978 '\n '.join(appends_without_recipes[mc])))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500979
Andrew Geissler5a43b432020-06-13 10:46:56 -0500980 if msgs:
981 msg = "\n".join(msgs)
982 warn_only = self.databuilder.mcdata[mc].getVar("BB_DANGLINGAPPENDS_WARNONLY", \
983 False) or "no"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500984 if warn_only.lower() in ("1", "yes", "true"):
985 bb.warn(msg)
986 else:
987 bb.fatal(msg)
988
989 def handlePrefProviders(self):
990
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600991 for mc in self.multiconfigs:
992 localdata = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600993 bb.data.expandKeys(localdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500994
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600995 # Handle PREFERRED_PROVIDERS
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500996 for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600997 try:
998 (providee, provider) = p.split(':')
999 except:
1000 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
1001 continue
1002 if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
1003 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
1004 self.recipecaches[mc].preferred[providee] = provider
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001005
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001006 def findConfigFilePath(self, configfile):
1007 """
1008 Find the location on disk of configfile and if it exists and was parsed by BitBake
1009 emit the ConfigFilePathFound event with the path to the file.
1010 """
1011 path = bb.cookerdata.findConfigFile(configfile, self.data)
1012 if not path:
1013 return
1014
1015 # Generate a list of parsed configuration files by searching the files
1016 # listed in the __depends and __base_depends variables with a .conf suffix.
1017 conffiles = []
1018 dep_files = self.data.getVar('__base_depends', False) or []
1019 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
1020
1021 for f in dep_files:
1022 if f[0].endswith(".conf"):
1023 conffiles.append(f[0])
1024
1025 _, conf, conffile = path.rpartition("conf/")
1026 match = os.path.join(conf, conffile)
1027 # Try and find matches for conf/conffilename.conf as we don't always
1028 # have the full path to the file.
1029 for cfg in conffiles:
1030 if cfg.endswith(match):
1031 bb.event.fire(bb.event.ConfigFilePathFound(path),
1032 self.data)
1033 break
1034
1035 def findFilesMatchingInDir(self, filepattern, directory):
1036 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001037 Searches for files containing the substring 'filepattern' which are children of
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001038 'directory' in each BBPATH. i.e. to find all rootfs package classes available
1039 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1040 or to find all machine configuration files one could call:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001041 findFilesMatchingInDir(self, '.conf', 'conf/machine')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001042 """
1043
1044 matches = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001045 bbpaths = self.data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001046 for path in bbpaths:
1047 dirpath = os.path.join(path, directory)
1048 if os.path.exists(dirpath):
1049 for root, dirs, files in os.walk(dirpath):
1050 for f in files:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001051 if filepattern in f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001052 matches.append(f)
1053
1054 if matches:
1055 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1056
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001057 def findProviders(self, mc=''):
Andrew Geissler82c905d2020-04-13 13:39:40 -05001058 return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001059
1060 def findBestProvider(self, pn, mc=''):
1061 if pn in self.recipecaches[mc].providers:
1062 filenames = self.recipecaches[mc].providers[pn]
Andrew Geissler82c905d2020-04-13 13:39:40 -05001063 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc])
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001064 filename = eligible[0]
1065 return None, None, None, filename
1066 elif pn in self.recipecaches[mc].pkg_pn:
Andrew Geissler82c905d2020-04-13 13:39:40 -05001067 return bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001068 else:
1069 return None, None, None, None
1070
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001071 def findConfigFiles(self, varname):
1072 """
1073 Find config files which are appropriate values for varname.
1074 i.e. MACHINE, DISTRO
1075 """
1076 possible = []
1077 var = varname.lower()
1078
1079 data = self.data
1080 # iterate configs
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001081 bbpaths = data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001082 for path in bbpaths:
1083 confpath = os.path.join(path, "conf", var)
1084 if os.path.exists(confpath):
1085 for root, dirs, files in os.walk(confpath):
1086 # get all child files, these are appropriate values
1087 for f in files:
1088 val, sep, end = f.rpartition('.')
1089 if end == 'conf':
1090 possible.append(val)
1091
1092 if possible:
1093 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1094
1095 def findInheritsClass(self, klass):
1096 """
1097 Find all recipes which inherit the specified class
1098 """
1099 pkg_list = []
1100
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001101 for pfn in self.recipecaches[''].pkg_fn:
1102 inherits = self.recipecaches[''].inherits.get(pfn, None)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001103 if inherits and klass in inherits:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001104 pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001105
1106 return pkg_list
1107
1108 def generateTargetsTree(self, klass=None, pkgs=None):
1109 """
1110 Generate a dependency tree of buildable targets
1111 Generate an event with the result
1112 """
1113 # if the caller hasn't specified a pkgs list default to universe
1114 if not pkgs:
1115 pkgs = ['universe']
1116 # if inherited_class passed ensure all recipes which inherit the
1117 # specified class are included in pkgs
1118 if klass:
1119 extra_pkgs = self.findInheritsClass(klass)
1120 pkgs = pkgs + extra_pkgs
1121
1122 # generate a dependency tree for all our packages
1123 tree = self.generatePkgDepTreeData(pkgs, 'build')
1124 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1125
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001126 def interactiveMode( self ):
1127 """Drop off into a shell"""
1128 try:
1129 from bb import shell
1130 except ImportError:
1131 parselog.exception("Interactive mode not available")
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001132 raise bb.BBHandledException()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001133 else:
1134 shell.start( self )
1135
1136
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001137 def handleCollections(self, collections):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001138 """Handle collections"""
1139 errors = False
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001140 self.bbfile_config_priorities = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001141 if collections:
1142 collection_priorities = {}
1143 collection_depends = {}
1144 collection_list = collections.split()
1145 min_prio = 0
1146 for c in collection_list:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001147 bb.debug(1,'Processing %s in collection list' % (c))
1148
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001149 # Get collection priority if defined explicitly
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001150 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001151 if priority:
1152 try:
1153 prio = int(priority)
1154 except ValueError:
1155 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1156 errors = True
1157 if min_prio == 0 or prio < min_prio:
1158 min_prio = prio
1159 collection_priorities[c] = prio
1160 else:
1161 collection_priorities[c] = None
1162
1163 # Check dependencies and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001164 deps = self.data.getVar("LAYERDEPENDS_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001165 if deps:
1166 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001167 depDict = bb.utils.explode_dep_versions2(deps)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001168 except bb.utils.VersionStringException as vse:
1169 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001170 for dep, oplist in list(depDict.items()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001171 if dep in collection_list:
1172 for opstr in oplist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001173 layerver = self.data.getVar("LAYERVERSION_%s" % dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001174 (op, depver) = opstr.split()
1175 if layerver:
1176 try:
1177 res = bb.utils.vercmp_string_op(layerver, depver, op)
1178 except bb.utils.VersionStringException as vse:
1179 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1180 if not res:
1181 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1182 errors = True
1183 else:
1184 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1185 errors = True
1186 else:
1187 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1188 errors = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001189 collection_depends[c] = list(depDict.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001190 else:
1191 collection_depends[c] = []
1192
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001193 # Check recommends and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001194 recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001195 if recs:
1196 try:
1197 recDict = bb.utils.explode_dep_versions2(recs)
1198 except bb.utils.VersionStringException as vse:
1199 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1200 for rec, oplist in list(recDict.items()):
1201 if rec in collection_list:
1202 if oplist:
1203 opstr = oplist[0]
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001204 layerver = self.data.getVar("LAYERVERSION_%s" % rec)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001205 if layerver:
1206 (op, recver) = opstr.split()
1207 try:
1208 res = bb.utils.vercmp_string_op(layerver, recver, op)
1209 except bb.utils.VersionStringException as vse:
1210 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1211 if not res:
1212 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1213 continue
1214 else:
1215 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1216 continue
1217 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1218 collection_depends[c].append(rec)
1219 else:
1220 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1221
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001222 # Recursively work out collection priorities based on dependencies
1223 def calc_layer_priority(collection):
1224 if not collection_priorities[collection]:
1225 max_depprio = min_prio
1226 for dep in collection_depends[collection]:
1227 calc_layer_priority(dep)
1228 depprio = collection_priorities[dep]
1229 if depprio > max_depprio:
1230 max_depprio = depprio
1231 max_depprio += 1
1232 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1233 collection_priorities[collection] = max_depprio
1234
1235 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1236 for c in collection_list:
1237 calc_layer_priority(c)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001238 regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
Andrew Geissler82c905d2020-04-13 13:39:40 -05001239 if regex is None:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001240 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1241 errors = True
1242 continue
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001243 elif regex == "":
1244 parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
Brad Bishop19323692019-04-05 15:28:33 -04001245 cre = re.compile('^NULL$')
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001246 errors = False
1247 else:
1248 try:
1249 cre = re.compile(regex)
1250 except re.error:
1251 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1252 errors = True
1253 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001254 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001255 if errors:
1256 # We've already printed the actual error(s)
1257 raise CollectionError("Errors during parsing layer configuration")
1258
1259 def buildSetVars(self):
1260 """
1261 Setup any variables needed before starting a build
1262 """
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001263 t = time.gmtime()
1264 for mc in self.databuilder.mcdata:
1265 ds = self.databuilder.mcdata[mc]
1266 if not ds.getVar("BUILDNAME", False):
1267 ds.setVar("BUILDNAME", "${DATE}${TIME}")
1268 ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1269 ds.setVar("DATE", time.strftime('%Y%m%d', t))
1270 ds.setVar("TIME", time.strftime('%H%M%S', t))
1271
1272 def reset_mtime_caches(self):
1273 """
1274 Reset mtime caches - this is particularly important when memory resident as something
1275 which is cached is not unlikely to have changed since the last invocation (e.g. a
1276 file associated with a recipe might have been modified by the user).
1277 """
1278 build.reset_cache()
1279 bb.fetch._checksum_cache.mtime_cache.clear()
1280 siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1281 if siggen_cache:
1282 bb.parse.siggen.checksum_cache.mtime_cache.clear()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001283
Andrew Geissler5a43b432020-06-13 10:46:56 -05001284 def matchFiles(self, bf, mc=''):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001285 """
1286 Find the .bb files which match the expression in 'buildfile'.
1287 """
1288 if bf.startswith("/") or bf.startswith("../"):
1289 bf = os.path.abspath(bf)
1290
Andrew Geissler5a43b432020-06-13 10:46:56 -05001291 self.collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)}
1292 filelist, masked, searchdirs = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001293 try:
1294 os.stat(bf)
1295 bf = os.path.abspath(bf)
1296 return [bf]
1297 except OSError:
1298 regexp = re.compile(bf)
1299 matches = []
1300 for f in filelist:
1301 if regexp.search(f) and os.path.isfile(f):
1302 matches.append(f)
1303 return matches
1304
Andrew Geissler5a43b432020-06-13 10:46:56 -05001305 def matchFile(self, buildfile, mc=''):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001306 """
1307 Find the .bb file which matches the expression in 'buildfile'.
1308 Raise an error if multiple files
1309 """
Andrew Geissler5a43b432020-06-13 10:46:56 -05001310 matches = self.matchFiles(buildfile, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001311 if len(matches) != 1:
1312 if matches:
1313 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1314 if matches:
1315 for f in matches:
1316 msg += "\n %s" % f
1317 parselog.error(msg)
1318 else:
1319 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1320 raise NoSpecificMatch
1321 return matches[0]
1322
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001323 def buildFile(self, buildfile, task):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001324 """
1325 Build the file matching regexp buildfile
1326 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001327 bb.event.fire(bb.event.BuildInit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001328
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001329 # Too many people use -b because they think it's how you normally
1330 # specify a target to be built, so show a warning
1331 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1332
1333 self.buildFileInternal(buildfile, task)
1334
1335 def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1336 """
1337 Build the file matching regexp buildfile
1338 """
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001339
1340 # Parse the configuration here. We need to do it explicitly here since
1341 # buildFile() doesn't use the cache
1342 self.parseConfiguration()
1343
1344 # If we are told to do the None task then query the default task
Andrew Geissler82c905d2020-04-13 13:39:40 -05001345 if task is None:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001346 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001347 if not task.startswith("do_"):
1348 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001349
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001350 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Andrew Geissler5a43b432020-06-13 10:46:56 -05001351 fn = self.matchFile(fn, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001352
1353 self.buildSetVars()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001354 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001355
Andrew Geissler5a43b432020-06-13 10:46:56 -05001356 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001357
Andrew Geissler5a43b432020-06-13 10:46:56 -05001358 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001359 infos = dict(infos)
1360
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001361 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001362 try:
1363 info_array = infos[fn]
1364 except KeyError:
1365 bb.fatal("%s does not exist" % fn)
1366
1367 if info_array[0].skipped:
1368 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1369
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001370 self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001371
1372 # Tweak some variables
1373 item = info_array[0].pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001374 self.recipecaches[mc].ignored_dependencies = set()
1375 self.recipecaches[mc].bbfile_priority[fn] = 1
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001376 self.configuration.limited_deps = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001377
1378 # Remove external dependencies
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001379 self.recipecaches[mc].task_deps[fn]['depends'] = {}
1380 self.recipecaches[mc].deps[fn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001381 self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1382 self.recipecaches[mc].runrecs[fn] = defaultdict(list)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001383
1384 # Invalidate task for target if force mode active
1385 if self.configuration.force:
1386 logger.verbose("Invalidate task %s, %s", task, fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001387 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001388
1389 # Setup taskdata structure
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001390 taskdata = {}
1391 taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001392 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001393
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001394 if quietlog:
1395 rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1396 bb.runqueue.logger.setLevel(logging.WARNING)
1397
1398 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1399 if fireevents:
1400 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001401
1402 # Execute the runqueue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001403 runlist = [[mc, item, task, fn]]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001404
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001405 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001406
1407 def buildFileIdle(server, rq, abort):
1408
1409 msg = None
1410 interrupted = 0
1411 if abort or self.state == state.forceshutdown:
1412 rq.finish_runqueue(True)
1413 msg = "Forced shutdown"
1414 interrupted = 2
1415 elif self.state == state.shutdown:
1416 rq.finish_runqueue(False)
1417 msg = "Stopped build"
1418 interrupted = 1
1419 failures = 0
1420 try:
1421 retval = rq.execute_runqueue()
1422 except runqueue.TaskFailure as exc:
1423 failures += len(exc.args)
1424 retval = False
1425 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001426 self.command.finishAsyncCommand(str(exc))
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001427 if quietlog:
1428 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001429 return False
1430
1431 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001432 if fireevents:
1433 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001434 self.command.finishAsyncCommand(msg)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001435 # We trashed self.recipecaches above
1436 self.parsecache_valid = False
1437 self.configuration.limited_deps = False
1438 bb.parse.siggen.reset(self.data)
1439 if quietlog:
1440 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001441 return False
1442 if retval is True:
1443 return True
1444 return retval
1445
Andrew Geissler635e0e42020-08-21 15:58:33 -05001446 self.idleCallBackRegister(buildFileIdle, rq)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001447
1448 def buildTargets(self, targets, task):
1449 """
1450 Attempt to build the targets specified
1451 """
1452
1453 def buildTargetsIdle(server, rq, abort):
1454 msg = None
1455 interrupted = 0
1456 if abort or self.state == state.forceshutdown:
1457 rq.finish_runqueue(True)
1458 msg = "Forced shutdown"
1459 interrupted = 2
1460 elif self.state == state.shutdown:
1461 rq.finish_runqueue(False)
1462 msg = "Stopped build"
1463 interrupted = 1
1464 failures = 0
1465 try:
1466 retval = rq.execute_runqueue()
1467 except runqueue.TaskFailure as exc:
1468 failures += len(exc.args)
1469 retval = False
1470 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001471 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001472 return False
1473
1474 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001475 try:
1476 for mc in self.multiconfigs:
1477 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1478 finally:
1479 self.command.finishAsyncCommand(msg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001480 return False
1481 if retval is True:
1482 return True
1483 return retval
1484
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001485 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001486 self.buildSetVars()
1487
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001488 # If we are told to do the None task then query the default task
Andrew Geissler82c905d2020-04-13 13:39:40 -05001489 if task is None:
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001490 task = self.configuration.cmd
1491
1492 if not task.startswith("do_"):
1493 task = "do_%s" % task
1494
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001495 packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1496
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001497 bb.event.fire(bb.event.BuildInit(packages), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001498
1499 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001500
1501 buildname = self.data.getVar("BUILDNAME", False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001502
1503 # make targets to always look as <target>:do_<task>
1504 ntargets = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001505 for target in runlist:
1506 if target[0]:
Brad Bishop15ae2502019-06-18 21:44:24 -04001507 ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2]))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001508 ntargets.append("%s:%s" % (target[1], target[2]))
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001509
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001510 for mc in self.multiconfigs:
1511 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001512
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001513 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001514 if 'universe' in targets:
1515 rq.rqdata.warn_multi_bb = True
1516
Andrew Geissler635e0e42020-08-21 15:58:33 -05001517 self.idleCallBackRegister(buildTargetsIdle, rq)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001518
1519
1520 def getAllKeysWithFlags(self, flaglist):
1521 dump = {}
1522 for k in self.data.keys():
1523 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001524 expand = True
1525 flags = self.data.getVarFlags(k)
1526 if flags and "func" in flags and "python" in flags:
1527 expand = False
1528 v = self.data.getVar(k, expand)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001529 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1530 dump[k] = {
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001531 'v' : str(v) ,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001532 'history' : self.data.varhistory.variable(k),
1533 }
1534 for d in flaglist:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001535 if flags and d in flags:
1536 dump[k][d] = flags[d]
1537 else:
1538 dump[k][d] = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001539 except Exception as e:
1540 print(e)
1541 return dump
1542
1543
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001544 def updateCacheSync(self):
1545 if self.state == state.running:
1546 return
1547
1548 # reload files for which we got notifications
1549 for p in self.inotify_modified_files:
1550 bb.parse.update_cache(p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001551 if p in bb.parse.BBHandler.cached_statements:
1552 del bb.parse.BBHandler.cached_statements[p]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001553 self.inotify_modified_files = []
1554
1555 if not self.baseconfig_valid:
1556 logger.debug(1, "Reloading base configuration data")
1557 self.initConfigurationData()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001558 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001559
1560 # This is called for all async commands when self.state != running
1561 def updateCache(self):
1562 if self.state == state.running:
1563 return
1564
1565 if self.state in (state.shutdown, state.forceshutdown, state.error):
1566 if hasattr(self.parser, 'shutdown'):
1567 self.parser.shutdown(clean=False, force = True)
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001568 self.parser.final_cleanup()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001569 raise bb.BBHandledException()
1570
1571 if self.state != state.parsing:
1572 self.updateCacheSync()
1573
1574 if self.state != state.parsing and not self.parsecache_valid:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001575 bb.parse.siggen.reset(self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001576 self.parseConfiguration ()
1577 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001578 for mc in self.multiconfigs:
1579 bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001580
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001581 for mc in self.multiconfigs:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001582 ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001583 self.recipecaches[mc].ignored_dependencies = set(ignore.split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001584
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001585 for dep in self.configuration.extra_assume_provided:
1586 self.recipecaches[mc].ignored_dependencies.add(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001587
Andrew Geissler5a43b432020-06-13 10:46:56 -05001588 self.collections = {}
1589
1590 mcfilelist = {}
1591 total_masked = 0
1592 searchdirs = set()
1593 for mc in self.multiconfigs:
1594 self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
1595 (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1596
1597 mcfilelist[mc] = filelist
1598 total_masked += masked
1599 searchdirs |= set(search)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001600
1601 # Add inotify watches for directories searched for bb/bbappend files
1602 for dirent in searchdirs:
1603 self.add_filewatch([[dirent]], dirs=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001604
Andrew Geissler5a43b432020-06-13 10:46:56 -05001605 self.parser = CookerParser(self, mcfilelist, total_masked)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001606 self.parsecache_valid = True
1607
1608 self.state = state.parsing
1609
1610 if not self.parser.parse_next():
1611 collectlog.debug(1, "parsing complete")
1612 if self.parser.error:
1613 raise bb.BBHandledException()
1614 self.show_appends_with_no_recipes()
1615 self.handlePrefProviders()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001616 for mc in self.multiconfigs:
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001617 self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001618 self.state = state.running
1619
1620 # Send an event listing all stamps reachable after parsing
1621 # which the metadata may use to clean up stale data
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001622 for mc in self.multiconfigs:
1623 event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1624 bb.event.fire(event, self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001625 return None
1626
1627 return True
1628
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001629 def checkPackages(self, pkgs_to_build, task=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001630
1631 # Return a copy, don't modify the original
1632 pkgs_to_build = pkgs_to_build[:]
1633
1634 if len(pkgs_to_build) == 0:
1635 raise NothingToBuild
1636
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001637 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001638 for pkg in pkgs_to_build.copy():
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001639 if pkg in ignore:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001640 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
Brad Bishop15ae2502019-06-18 21:44:24 -04001641 if pkg.startswith("multiconfig:"):
1642 pkgs_to_build.remove(pkg)
1643 pkgs_to_build.append(pkg.replace("multiconfig:", "mc:"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001644
1645 if 'world' in pkgs_to_build:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001646 pkgs_to_build.remove('world')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001647 for mc in self.multiconfigs:
1648 bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1649 for t in self.recipecaches[mc].world_target:
1650 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001651 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001652 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001653
1654 if 'universe' in pkgs_to_build:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001655 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001656 parselog.debug(1, "collating packages for \"universe\"")
1657 pkgs_to_build.remove('universe')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001658 for mc in self.multiconfigs:
1659 for t in self.recipecaches[mc].universe_target:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001660 if task:
1661 foundtask = False
1662 for provider_fn in self.recipecaches[mc].providers[t]:
1663 if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1664 foundtask = True
1665 break
1666 if not foundtask:
1667 bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1668 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001669 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001670 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001671 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001672
1673 return pkgs_to_build
1674
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001675 def pre_serve(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001676 return
1677
1678 def post_serve(self):
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001679 self.shutdown(force=True)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001680 prserv.serv.auto_shutdown()
Brad Bishop08902b02019-08-20 09:16:51 -04001681 if self.hashserv:
1682 self.hashserv.process.terminate()
1683 self.hashserv.process.join()
Andrew Geisslerc3d88e42020-10-02 09:45:00 -05001684 if hasattr(self, "data"):
1685 bb.event.fire(CookerExit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001686
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001687 def shutdown(self, force = False):
1688 if force:
1689 self.state = state.forceshutdown
1690 else:
1691 self.state = state.shutdown
1692
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001693 if self.parser:
1694 self.parser.shutdown(clean=not force, force=force)
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001695 self.parser.final_cleanup()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001696
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001697 def finishcommand(self):
1698 self.state = state.initial
1699
1700 def reset(self):
1701 self.initConfigurationData()
Brad Bishop08902b02019-08-20 09:16:51 -04001702 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001703
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001704 def clientComplete(self):
1705 """Called when the client is done using the server"""
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001706 self.finishcommand()
1707 self.extraconfigdata = {}
1708 self.command.reset()
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001709 if hasattr(self, "data"):
1710 self.databuilder.reset()
1711 self.data = self.databuilder.data
Andrew Geissler82c905d2020-04-13 13:39:40 -05001712 self.parsecache_valid = False
1713 self.baseconfig_valid = False
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001714
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001715
1716class CookerExit(bb.event.Event):
1717 """
1718 Notify clients of the Cooker shutdown
1719 """
1720
1721 def __init__(self):
1722 bb.event.Event.__init__(self)
1723
1724
1725class CookerCollectFiles(object):
Andrew Geissler5a43b432020-06-13 10:46:56 -05001726 def __init__(self, priorities, mc=''):
1727 self.mc = mc
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001728 self.bbappends = []
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001729 # Priorities is a list of tupples, with the second element as the pattern.
1730 # We need to sort the list with the longest pattern first, and so on to
1731 # the shortest. This allows nested layers to be properly evaluated.
1732 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001733
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001734 def calc_bbfile_priority(self, filename):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001735 for _, _, regex, pri in self.bbfile_config_priorities:
1736 if regex.match(filename):
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001737 return pri, regex
1738 return 0, None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001739
1740 def get_bbfiles(self):
1741 """Get list of default .bb files by reading out the current directory"""
1742 path = os.getcwd()
1743 contents = os.listdir(path)
1744 bbfiles = []
1745 for f in contents:
1746 if f.endswith(".bb"):
1747 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1748 return bbfiles
1749
1750 def find_bbfiles(self, path):
1751 """Find all the .bb and .bbappend files in a directory"""
1752 found = []
1753 for dir, dirs, files in os.walk(path):
1754 for ignored in ('SCCS', 'CVS', '.svn'):
1755 if ignored in dirs:
1756 dirs.remove(ignored)
1757 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1758
1759 return found
1760
1761 def collect_bbfiles(self, config, eventdata):
1762 """Collect all available .bb build files"""
1763 masked = 0
1764
1765 collectlog.debug(1, "collecting .bb files")
1766
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001767 files = (config.getVar( "BBFILES") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001768
1769 # Sort files by priority
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001770 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] )
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001771 config.setVar("BBFILES_PRIORITIZED", " ".join(files))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001772
1773 if not len(files):
1774 files = self.get_bbfiles()
1775
1776 if not len(files):
1777 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1778 bb.event.fire(CookerExit(), eventdata)
1779
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001780 # We need to track where we look so that we can add inotify watches. There
1781 # is no nice way to do this, this is horrid. We intercept the os.listdir()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001782 # (or os.scandir() for python 3.6+) calls while we run glob().
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001783 origlistdir = os.listdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001784 if hasattr(os, 'scandir'):
1785 origscandir = os.scandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001786 searchdirs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001787
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001788 def ourlistdir(d):
1789 searchdirs.append(d)
1790 return origlistdir(d)
1791
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001792 def ourscandir(d):
1793 searchdirs.append(d)
1794 return origscandir(d)
1795
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001796 os.listdir = ourlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001797 if hasattr(os, 'scandir'):
1798 os.scandir = ourscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001799 try:
1800 # Can't use set here as order is important
1801 newfiles = []
1802 for f in files:
1803 if os.path.isdir(f):
1804 dirfiles = self.find_bbfiles(f)
1805 for g in dirfiles:
1806 if g not in newfiles:
1807 newfiles.append(g)
1808 else:
1809 globbed = glob.glob(f)
1810 if not globbed and os.path.exists(f):
1811 globbed = [f]
1812 # glob gives files in order on disk. Sort to be deterministic.
1813 for g in sorted(globbed):
1814 if g not in newfiles:
1815 newfiles.append(g)
1816 finally:
1817 os.listdir = origlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001818 if hasattr(os, 'scandir'):
1819 os.scandir = origscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001820
1821 bbmask = config.getVar('BBMASK')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001822
1823 if bbmask:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001824 # First validate the individual regular expressions and ignore any
1825 # that do not compile
1826 bbmasks = []
1827 for mask in bbmask.split():
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001828 # When constructing an older style single regex, it's possible for BBMASK
1829 # to end up beginning with '|', which matches and masks _everything_.
1830 if mask.startswith("|"):
Andrew Geissler82c905d2020-04-13 13:39:40 -05001831 collectlog.warning("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001832 mask = mask[1:]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001833 try:
1834 re.compile(mask)
1835 bbmasks.append(mask)
1836 except sre_constants.error:
1837 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1838
1839 # Then validate the combined regular expressions. This should never
1840 # fail, but better safe than sorry...
1841 bbmask = "|".join(bbmasks)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001842 try:
1843 bbmask_compiled = re.compile(bbmask)
1844 except sre_constants.error:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001845 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1846 bbmask = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001847
1848 bbfiles = []
1849 bbappend = []
1850 for f in newfiles:
1851 if bbmask and bbmask_compiled.search(f):
1852 collectlog.debug(1, "skipping masked file %s", f)
1853 masked += 1
1854 continue
1855 if f.endswith('.bb'):
1856 bbfiles.append(f)
1857 elif f.endswith('.bbappend'):
1858 bbappend.append(f)
1859 else:
1860 collectlog.debug(1, "skipping %s: unknown file extension", f)
1861
1862 # Build a list of .bbappend files for each .bb file
1863 for f in bbappend:
1864 base = os.path.basename(f).replace('.bbappend', '.bb')
1865 self.bbappends.append((base, f))
1866
1867 # Find overlayed recipes
1868 # bbfiles will be in priority order which makes this easy
1869 bbfile_seen = dict()
1870 self.overlayed = defaultdict(list)
1871 for f in reversed(bbfiles):
1872 base = os.path.basename(f)
1873 if base not in bbfile_seen:
1874 bbfile_seen[base] = f
1875 else:
1876 topfile = bbfile_seen[base]
1877 self.overlayed[topfile].append(f)
1878
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001879 return (bbfiles, masked, searchdirs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001880
1881 def get_file_appends(self, fn):
1882 """
1883 Returns a list of .bbappend files to apply to fn
1884 """
1885 filelist = []
1886 f = os.path.basename(fn)
1887 for b in self.bbappends:
1888 (bbappend, filename) = b
1889 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1890 filelist.append(filename)
Andrew Geissler5a43b432020-06-13 10:46:56 -05001891 return tuple(filelist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001892
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001893 def collection_priorities(self, pkgfns, fns, d):
1894 # Return the priorities of the entries in pkgfns
1895 # Also check that all the regexes in self.bbfile_config_priorities are used
1896 # (but to do that we need to ensure skipped recipes aren't counted, nor
1897 # collections in BBFILE_PATTERN_IGNORE_EMPTY)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001898
1899 priorities = {}
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001900 seen = set()
1901 matched = set()
1902
1903 matched_regex = set()
1904 unmatched_regex = set()
1905 for _, _, regex, _ in self.bbfile_config_priorities:
1906 unmatched_regex.add(regex)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001907
1908 # Calculate priorities for each file
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001909 for p in pkgfns:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001910 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001911 priorities[p], regex = self.calc_bbfile_priority(realfn)
1912 if regex in unmatched_regex:
1913 matched_regex.add(regex)
1914 unmatched_regex.remove(regex)
1915 seen.add(realfn)
1916 if regex:
1917 matched.add(realfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001918
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001919 if unmatched_regex:
1920 # Account for bbappend files
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001921 for b in self.bbappends:
1922 (bbfile, append) = b
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001923 seen.add(append)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001924
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001925 # Account for skipped recipes
1926 seen.update(fns)
1927
1928 seen.difference_update(matched)
1929
1930 def already_matched(fn):
1931 for regex in matched_regex:
1932 if regex.match(fn):
1933 return True
1934 return False
1935
1936 for unmatch in unmatched_regex.copy():
1937 for fn in seen:
1938 if unmatch.match(fn):
1939 # If the bbappend or file was already matched by another regex, skip it
1940 # e.g. for a layer within a layer, the outer regex could match, the inner
1941 # regex may match nothing and we should warn about that
1942 if already_matched(fn):
1943 continue
1944 unmatched_regex.remove(unmatch)
1945 break
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001946
1947 for collection, pattern, regex, _ in self.bbfile_config_priorities:
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001948 if regex in unmatched_regex:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001949 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
Andrew Geissler5a43b432020-06-13 10:46:56 -05001950 collectlog.warning("No bb files in %s matched BBFILE_PATTERN_%s '%s'" % (self.mc if self.mc else 'default',
1951 collection, pattern))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001952
1953 return priorities
1954
1955class ParsingFailure(Exception):
1956 def __init__(self, realexception, recipe):
1957 self.realexception = realexception
1958 self.recipe = recipe
1959 Exception.__init__(self, realexception, recipe)
1960
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001961class Parser(multiprocessing.Process):
1962 def __init__(self, jobs, results, quit, init, profile):
1963 self.jobs = jobs
1964 self.results = results
1965 self.quit = quit
1966 self.init = init
1967 multiprocessing.Process.__init__(self)
1968 self.context = bb.utils.get_context().copy()
1969 self.handlers = bb.event.get_class_handlers().copy()
1970 self.profile = profile
1971
1972 def run(self):
1973
1974 if not self.profile:
1975 self.realrun()
1976 return
1977
1978 try:
1979 import cProfile as profile
1980 except:
1981 import profile
1982 prof = profile.Profile()
1983 try:
1984 profile.Profile.runcall(prof, self.realrun)
1985 finally:
1986 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1987 prof.dump_stats(logfile)
1988
1989 def realrun(self):
1990 if self.init:
1991 self.init()
1992
1993 pending = []
1994 while True:
1995 try:
1996 self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001997 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001998 pass
1999 else:
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002000 self.results.close()
2001 self.results.join_thread()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002002 break
2003
2004 if pending:
2005 result = pending.pop()
2006 else:
2007 try:
Brad Bishop19323692019-04-05 15:28:33 -04002008 job = self.jobs.pop()
2009 except IndexError:
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002010 self.results.close()
2011 self.results.join_thread()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002012 break
2013 result = self.parse(*job)
Andrew Geissler82c905d2020-04-13 13:39:40 -05002014 # Clear the siggen cache after parsing to control memory usage, its huge
2015 bb.parse.siggen.postparsing_clean_cache()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002016 try:
2017 self.results.put(result, timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002018 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002019 pending.append(result)
2020
Andrew Geissler5a43b432020-06-13 10:46:56 -05002021 def parse(self, mc, cache, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002022 try:
Andrew Geissler82c905d2020-04-13 13:39:40 -05002023 origfilter = bb.event.LogHandler.filter
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002024 # Record the filename we're parsing into any events generated
2025 def parse_filter(self, record):
2026 record.taskpid = bb.event.worker_pid
2027 record.fn = filename
2028 return True
2029
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002030 # Reset our environment and handlers to the original settings
2031 bb.utils.set_context(self.context.copy())
2032 bb.event.set_class_handlers(self.handlers.copy())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002033 bb.event.LogHandler.filter = parse_filter
2034
Andrew Geissler5a43b432020-06-13 10:46:56 -05002035 return True, mc, cache.parse(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002036 except Exception as exc:
2037 tb = sys.exc_info()[2]
2038 exc.recipe = filename
2039 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
2040 return True, exc
2041 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2042 # and for example a worker thread doesn't just exit on its own in response to
2043 # a SystemExit event for example.
2044 except BaseException as exc:
2045 return True, ParsingFailure(exc, filename)
Andrew Geissler82c905d2020-04-13 13:39:40 -05002046 finally:
2047 bb.event.LogHandler.filter = origfilter
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002048
2049class CookerParser(object):
Andrew Geissler5a43b432020-06-13 10:46:56 -05002050 def __init__(self, cooker, mcfilelist, masked):
2051 self.mcfilelist = mcfilelist
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002052 self.cooker = cooker
2053 self.cfgdata = cooker.data
2054 self.cfghash = cooker.data_hash
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002055 self.cfgbuilder = cooker.databuilder
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002056
2057 # Accounting statistics
2058 self.parsed = 0
2059 self.cached = 0
2060 self.error = 0
2061 self.masked = masked
2062
2063 self.skipped = 0
2064 self.virtuals = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002065
2066 self.current = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002067 self.process_names = []
2068
Andrew Geissler5a43b432020-06-13 10:46:56 -05002069 self.bb_caches = bb.cache.MulticonfigCache(self.cfgbuilder, self.cfghash, cooker.caches_array)
2070 self.fromcache = set()
2071 self.willparse = set()
2072 for mc in self.cooker.multiconfigs:
2073 for filename in self.mcfilelist[mc]:
2074 appends = self.cooker.collections[mc].get_file_appends(filename)
2075 if not self.bb_caches[mc].cacheValid(filename, appends):
2076 self.willparse.add((mc, self.bb_caches[mc], filename, appends))
2077 else:
2078 self.fromcache.add((mc, self.bb_caches[mc], filename, appends))
2079
2080 self.total = len(self.fromcache) + len(self.willparse)
2081 self.toparse = len(self.willparse)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002082 self.progress_chunk = int(max(self.toparse / 100, 1))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002083
Brad Bishop6e60e8b2018-02-01 10:27:11 -05002084 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
Andrew Geissler5a43b432020-06-13 10:46:56 -05002085 multiprocessing.cpu_count()), self.toparse)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002086
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002087 self.start()
2088 self.haveshutdown = False
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002089 self.syncthread = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002090
2091 def start(self):
2092 self.results = self.load_cached()
2093 self.processes = []
2094 if self.toparse:
2095 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2096 def init():
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002097 signal.signal(signal.SIGTERM, signal.SIG_DFL)
2098 signal.signal(signal.SIGHUP, signal.SIG_DFL)
2099 signal.signal(signal.SIGINT, signal.SIG_IGN)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002100 bb.utils.set_process_name(multiprocessing.current_process().name)
2101 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2102 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002103
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002104 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002105 self.result_queue = multiprocessing.Queue()
Brad Bishop19323692019-04-05 15:28:33 -04002106
2107 def chunkify(lst,n):
2108 return [lst[i::n] for i in range(n)]
Andrew Geissler5a43b432020-06-13 10:46:56 -05002109 self.jobs = chunkify(list(self.willparse), self.num_processes)
Brad Bishop19323692019-04-05 15:28:33 -04002110
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002111 for i in range(0, self.num_processes):
Brad Bishop19323692019-04-05 15:28:33 -04002112 parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002113 parser.start()
2114 self.process_names.append(parser.name)
2115 self.processes.append(parser)
2116
2117 self.results = itertools.chain(self.results, self.parse_generator())
2118
2119 def shutdown(self, clean=True, force=False):
2120 if not self.toparse:
2121 return
2122 if self.haveshutdown:
2123 return
2124 self.haveshutdown = True
2125
2126 if clean:
2127 event = bb.event.ParseCompleted(self.cached, self.parsed,
2128 self.skipped, self.masked,
2129 self.virtuals, self.error,
2130 self.total)
2131
2132 bb.event.fire(event, self.cfgdata)
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002133
2134 for process in self.processes:
2135 self.parser_quit.put(None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002136
Brad Bishop08902b02019-08-20 09:16:51 -04002137 # Cleanup the queue before call process.join(), otherwise there might be
2138 # deadlocks.
2139 while True:
2140 try:
2141 self.result_queue.get(timeout=0.25)
2142 except queue.Empty:
2143 break
2144
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002145 for process in self.processes:
2146 if force:
2147 process.join(.1)
2148 process.terminate()
2149 else:
2150 process.join()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002151
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002152 self.parser_quit.close()
2153 # Allow data left in the cancel queue to be discarded
2154 self.parser_quit.cancel_join_thread()
2155
Andrew Geissler5a43b432020-06-13 10:46:56 -05002156 def sync_caches():
2157 for c in self.bb_caches.values():
2158 c.sync()
2159
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002160 sync = threading.Thread(target=sync_caches, name="SyncThread")
2161 self.syncthread = sync
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002162 sync.start()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002163 bb.codeparser.parser_cache_savemerge()
2164 bb.fetch.fetcher_parse_done()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002165 if self.cooker.configuration.profile:
2166 profiles = []
2167 for i in self.process_names:
2168 logfile = "profile-parse-%s.log" % i
2169 if os.path.exists(logfile):
2170 profiles.append(logfile)
2171
2172 pout = "profile-parse.log.processed"
2173 bb.utils.process_profilelog(profiles, pout = pout)
2174 print("Processed parsing statistics saved to %s" % (pout))
2175
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002176 def final_cleanup(self):
2177 if self.syncthread:
2178 self.syncthread.join()
2179
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002180 def load_cached(self):
Andrew Geissler5a43b432020-06-13 10:46:56 -05002181 for mc, cache, filename, appends in self.fromcache:
2182 cached, infos = cache.load(filename, appends)
2183 yield not cached, mc, infos
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002184
2185 def parse_generator(self):
2186 while True:
2187 if self.parsed >= self.toparse:
2188 break
2189
2190 try:
2191 result = self.result_queue.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002192 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002193 pass
2194 else:
2195 value = result[1]
2196 if isinstance(value, BaseException):
2197 raise value
2198 else:
2199 yield result
2200
2201 def parse_next(self):
2202 result = []
2203 parsed = None
2204 try:
Andrew Geissler5a43b432020-06-13 10:46:56 -05002205 parsed, mc, result = next(self.results)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002206 except StopIteration:
2207 self.shutdown()
2208 return False
2209 except bb.BBHandledException as exc:
2210 self.error += 1
2211 logger.error('Failed to parse recipe: %s' % exc.recipe)
2212 self.shutdown(clean=False)
2213 return False
2214 except ParsingFailure as exc:
2215 self.error += 1
2216 logger.error('Unable to parse %s: %s' %
2217 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2218 self.shutdown(clean=False)
2219 return False
2220 except bb.parse.ParseError as exc:
2221 self.error += 1
2222 logger.error(str(exc))
2223 self.shutdown(clean=False)
2224 return False
2225 except bb.data_smart.ExpansionError as exc:
2226 self.error += 1
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002227 bbdir = os.path.dirname(__file__) + os.sep
2228 etype, value, _ = sys.exc_info()
2229 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2230 logger.error('ExpansionError during parsing %s', value.recipe,
2231 exc_info=(etype, value, tb))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002232 self.shutdown(clean=False)
2233 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002234 except Exception as exc:
2235 self.error += 1
2236 etype, value, tb = sys.exc_info()
2237 if hasattr(value, "recipe"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002238 logger.error('Unable to parse %s' % value.recipe,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002239 exc_info=(etype, value, exc.traceback))
2240 else:
2241 # Most likely, an exception occurred during raising an exception
2242 import traceback
2243 logger.error('Exception during parse: %s' % traceback.format_exc())
2244 self.shutdown(clean=False)
2245 return False
2246
2247 self.current += 1
2248 self.virtuals += len(result)
2249 if parsed:
2250 self.parsed += 1
2251 if self.parsed % self.progress_chunk == 0:
2252 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2253 self.cfgdata)
2254 else:
2255 self.cached += 1
2256
2257 for virtualfn, info_array in result:
2258 if info_array[0].skipped:
2259 self.skipped += 1
2260 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
Andrew Geissler5a43b432020-06-13 10:46:56 -05002261 self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002262 parsed=parsed, watcher = self.cooker.add_filewatch)
2263 return True
2264
2265 def reparse(self, filename):
Andrew Geissler5a43b432020-06-13 10:46:56 -05002266 to_reparse = set()
2267 for mc in self.cooker.multiconfigs:
2268 to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename)))
2269
2270 for mc, filename, appends in to_reparse:
2271 infos = self.bb_caches[mc].parse(filename, appends)
2272 for vfn, info_array in infos:
2273 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)