blob: df4acca18a8a199da47c8e4850484c5d8c469edf [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# Copyright (C) 2003, 2004 Chris Larson
3# Copyright (C) 2003, 2004 Phil Blundell
4# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
5# Copyright (C) 2005 Holger Hans Peter Freyther
6# Copyright (C) 2005 ROAD GmbH
7# Copyright (C) 2006 - 2007 Richard Purdie
8#
Brad Bishopc342db32019-05-15 21:57:59 -04009# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050010#
Patrick Williamsc0f7c042017-02-23 20:41:17 -060011
Patrick Williamsc124f4f2015-09-15 14:41:29 -050012import sys, os, glob, os.path, re, time
Patrick Williamsc124f4f2015-09-15 14:41:29 -050013import itertools
14import logging
15import multiprocessing
16import sre_constants
17import threading
Patrick Williamsc0f7c042017-02-23 20:41:17 -060018from io import StringIO, UnsupportedOperation
Patrick Williamsc124f4f2015-09-15 14:41:29 -050019from contextlib import closing
Patrick Williamsc0f7c042017-02-23 20:41:17 -060020from collections import defaultdict, namedtuple
Patrick Williamsc124f4f2015-09-15 14:41:29 -050021import bb, bb.exceptions, bb.command
22from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
Patrick Williamsc0f7c042017-02-23 20:41:17 -060023import queue
Patrick Williamsc124f4f2015-09-15 14:41:29 -050024import signal
Patrick Williamsc124f4f2015-09-15 14:41:29 -050025import prserv.serv
26import pyinotify
Patrick Williamsc0f7c042017-02-23 20:41:17 -060027import json
28import pickle
29import codecs
Brad Bishop08902b02019-08-20 09:16:51 -040030import hashserv
Patrick Williamsc124f4f2015-09-15 14:41:29 -050031
32logger = logging.getLogger("BitBake")
33collectlog = logging.getLogger("BitBake.Collection")
34buildlog = logging.getLogger("BitBake.Build")
35parselog = logging.getLogger("BitBake.Parsing")
36providerlog = logging.getLogger("BitBake.Provider")
37
38class NoSpecificMatch(bb.BBHandledException):
39 """
40 Exception raised when no or multiple file matches are found
41 """
42
43class NothingToBuild(Exception):
44 """
45 Exception raised when there is nothing to build
46 """
47
48class CollectionError(bb.BBHandledException):
49 """
50 Exception raised when layer configuration is incorrect
51 """
52
53class state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060054 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050055
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050056 @classmethod
57 def get_name(cls, code):
58 for name in dir(cls):
59 value = getattr(cls, name)
60 if type(value) == type(cls.initial) and value == code:
61 return name
62 raise ValueError("Invalid status code: %s" % code)
63
Patrick Williamsc124f4f2015-09-15 14:41:29 -050064
65class SkippedPackage:
66 def __init__(self, info = None, reason = None):
67 self.pn = None
68 self.skipreason = None
69 self.provides = None
70 self.rprovides = None
71
72 if info:
73 self.pn = info.pn
74 self.skipreason = info.skipreason
75 self.provides = info.provides
Andrew Geisslerd1e89492021-02-12 15:35:20 -060076 self.rprovides = info.packages + info.rprovides
77 for package in info.packages:
78 self.rprovides += info.rprovides_pkg[package]
Patrick Williamsc124f4f2015-09-15 14:41:29 -050079 elif reason:
80 self.skipreason = reason
81
82
83class CookerFeatures(object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -060084 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050085
86 def __init__(self):
87 self._features=set()
88
89 def setFeature(self, f):
90 # validate we got a request for a feature we support
91 if f not in CookerFeatures._feature_list:
92 return
93 self._features.add(f)
94
95 def __contains__(self, f):
96 return f in self._features
97
98 def __iter__(self):
99 return self._features.__iter__()
100
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600101 def __next__(self):
102 return next(self._features)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500103
104
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600105class EventWriter:
106 def __init__(self, cooker, eventfile):
107 self.file_inited = None
108 self.cooker = cooker
109 self.eventfile = eventfile
110 self.event_queue = []
111
112 def write_event(self, event):
113 with open(self.eventfile, "a") as f:
114 try:
115 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
116 f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
117 "vars": str_event}))
118 except Exception as err:
119 import traceback
120 print(err, traceback.format_exc())
121
122 def send(self, event):
123 if self.file_inited:
124 # we have the file, just write the event
125 self.write_event(event)
126 else:
127 # init on bb.event.BuildStarted
128 name = "%s.%s" % (event.__module__, event.__class__.__name__)
129 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
130 with open(self.eventfile, "w") as f:
131 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
132
133 self.file_inited = True
134
135 # write pending events
136 for evt in self.event_queue:
137 self.write_event(evt)
138
139 # also write the current event
140 self.write_event(event)
141 else:
142 # queue all events until the file is inited
143 self.event_queue.append(event)
144
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500145#============================================================================#
146# BBCooker
147#============================================================================#
148class BBCooker:
149 """
150 Manages one bitbake build run
151 """
152
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500153 def __init__(self, featureSet=None, idleCallBackRegister=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600154 self.recipecaches = None
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500155 self.eventlog = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500156 self.skiplist = {}
157 self.featureset = CookerFeatures()
158 if featureSet:
159 for f in featureSet:
160 self.featureset.setFeature(f)
161
Patrick Williams45852732022-04-02 08:58:32 -0500162 self.orig_syspath = sys.path.copy()
163 self.orig_sysmodules = [*sys.modules]
164
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500165 self.configuration = bb.cookerdata.CookerConfiguration()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500166
Andrew Geissler635e0e42020-08-21 15:58:33 -0500167 self.idleCallBackRegister = idleCallBackRegister
168
Brad Bishopf058f492019-01-28 23:50:33 -0500169 bb.debug(1, "BBCooker starting %s" % time.time())
170 sys.stdout.flush()
171
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500172 self.configwatcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500173 bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
174 sys.stdout.flush()
175
Andrew Geissler82c905d2020-04-13 13:39:40 -0500176 self.configwatcher.bbseen = set()
177 self.configwatcher.bbwatchedfiles = set()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500178 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
Brad Bishopf058f492019-01-28 23:50:33 -0500179 bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
180 sys.stdout.flush()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500181 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
182 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500183 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500184 self.watcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500185 bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
186 sys.stdout.flush()
Andrew Geissler82c905d2020-04-13 13:39:40 -0500187 self.watcher.bbseen = set()
188 self.watcher.bbwatchedfiles = set()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500189 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
190
Brad Bishopf058f492019-01-28 23:50:33 -0500191 bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
192 sys.stdout.flush()
193
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500194 # If being called by something like tinfoil, we need to clean cached data
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500195 # which may now be invalid
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500196 bb.parse.clear_cache()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500197 bb.parse.BBHandler.cached_statements = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500198
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500199 self.ui_cmdline = None
Brad Bishop08902b02019-08-20 09:16:51 -0400200 self.hashserv = None
Brad Bishopa34c0302019-09-23 22:34:48 -0400201 self.hashservaddr = None
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500202
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500203 self.inotify_modified_files = []
204
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000205 def _process_inotify_updates(server, cooker, halt):
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500206 cooker.process_inotify_updates()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500207 return 1.0
208
Andrew Geissler635e0e42020-08-21 15:58:33 -0500209 self.idleCallBackRegister(_process_inotify_updates, self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500210
211 # TOSTOP must not be set or our children will hang when they output
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600212 try:
213 fd = sys.stdout.fileno()
214 if os.isatty(fd):
215 import termios
216 tcattr = termios.tcgetattr(fd)
217 if tcattr[3] & termios.TOSTOP:
218 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
219 tcattr[3] = tcattr[3] & ~termios.TOSTOP
220 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
221 except UnsupportedOperation:
222 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500223
224 self.command = bb.command.Command(self)
225 self.state = state.initial
226
227 self.parser = None
228
229 signal.signal(signal.SIGTERM, self.sigterm_exception)
230 # Let SIGHUP exit as SIGTERM
231 signal.signal(signal.SIGHUP, self.sigterm_exception)
232
Brad Bishopf058f492019-01-28 23:50:33 -0500233 bb.debug(1, "BBCooker startup complete %s" % time.time())
234 sys.stdout.flush()
235
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500236 def init_configdata(self):
237 if not hasattr(self, "data"):
238 self.initConfigurationData()
239 bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
240 sys.stdout.flush()
241 self.handlePRServ()
242
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500243 def process_inotify_updates(self):
244 for n in [self.confignotifier, self.notifier]:
245 if n.check_events(timeout=0):
246 # read notified events and enqeue them
247 n.read_events()
248 n.process_events()
249
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500250 def config_notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500251 if event.maskname == "IN_Q_OVERFLOW":
252 bb.warn("inotify event queue overflowed, invalidating caches.")
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500253 self.parsecache_valid = False
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500254 self.baseconfig_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500255 bb.parse.clear_cache()
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500256 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500257 if not event.pathname in self.configwatcher.bbwatchedfiles:
258 return
Andrew Geissler9aee5002022-03-30 16:27:02 +0000259 if "IN_ISDIR" in event.maskname:
Patrick Williams45852732022-04-02 08:58:32 -0500260 if "IN_CREATE" in event.maskname or "IN_DELETE" in event.maskname:
261 if event.pathname in self.configwatcher.bbseen:
262 self.configwatcher.bbseen.remove(event.pathname)
263 # Could remove all entries starting with the directory but for now...
264 bb.parse.clear_cache()
Andrew Geissler9aee5002022-03-30 16:27:02 +0000265 if "IN_CREATE" in event.maskname:
266 self.add_filewatch([[event.pathname]], watcher=self.configwatcher, dirs=True)
Patrick Williams45852732022-04-02 08:58:32 -0500267 elif "IN_DELETE" in event.maskname and event.pathname in self.configwatcher.bbseen:
Andrew Geissler9aee5002022-03-30 16:27:02 +0000268 self.configwatcher.bbseen.remove(event.pathname)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500269 if not event.pathname in self.inotify_modified_files:
270 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500271 self.baseconfig_valid = False
272
273 def notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500274 if event.maskname == "IN_Q_OVERFLOW":
275 bb.warn("inotify event queue overflowed, invalidating caches.")
276 self.parsecache_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500277 bb.parse.clear_cache()
278 return
279 if event.pathname.endswith("bitbake-cookerdaemon.log") \
280 or event.pathname.endswith("bitbake.lock"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500281 return
Andrew Geissler9aee5002022-03-30 16:27:02 +0000282 if "IN_ISDIR" in event.maskname:
Patrick Williams45852732022-04-02 08:58:32 -0500283 if "IN_CREATE" in event.maskname or "IN_DELETE" in event.maskname:
284 if event.pathname in self.watcher.bbseen:
285 self.watcher.bbseen.remove(event.pathname)
286 # Could remove all entries starting with the directory but for now...
287 bb.parse.clear_cache()
Andrew Geissler9aee5002022-03-30 16:27:02 +0000288 if "IN_CREATE" in event.maskname:
289 self.add_filewatch([[event.pathname]], dirs=True)
290 elif "IN_DELETE" in event.maskname and event.pathname in self.watcher.bbseen:
291 self.watcher.bbseen.remove(event.pathname)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500292 if not event.pathname in self.inotify_modified_files:
293 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500294 self.parsecache_valid = False
295
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500296 def add_filewatch(self, deps, watcher=None, dirs=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500297 if not watcher:
298 watcher = self.watcher
299 for i in deps:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500300 watcher.bbwatchedfiles.add(i[0])
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500301 if dirs:
302 f = i[0]
303 else:
304 f = os.path.dirname(i[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500305 if f in watcher.bbseen:
306 continue
Andrew Geissler82c905d2020-04-13 13:39:40 -0500307 watcher.bbseen.add(f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500308 watchtarget = None
309 while True:
310 # We try and add watches for files that don't exist but if they did, would influence
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500311 # the parser. The parent directory of these files may not exist, in which case we need
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500312 # to watch any parent that does exist for changes.
313 try:
314 watcher.add_watch(f, self.watchmask, quiet=False)
315 if watchtarget:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500316 watcher.bbwatchedfiles.add(watchtarget)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500317 break
318 except pyinotify.WatchManagerError as e:
319 if 'ENOENT' in str(e):
320 watchtarget = f
321 f = os.path.dirname(f)
322 if f in watcher.bbseen:
323 break
Andrew Geissler82c905d2020-04-13 13:39:40 -0500324 watcher.bbseen.add(f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500325 continue
326 if 'ENOSPC' in str(e):
327 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
328 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
329 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
330 providerlog.error("Root privilege is required to modify max_user_watches.")
331 raise
332
333 def sigterm_exception(self, signum, stackframe):
334 if signum == signal.SIGTERM:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500335 bb.warn("Cooker received SIGTERM, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500336 elif signum == signal.SIGHUP:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500337 bb.warn("Cooker received SIGHUP, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500338 self.state = state.forceshutdown
339
340 def setFeatures(self, features):
341 # we only accept a new feature set if we're in state initial, so we can reset without problems
342 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
343 raise Exception("Illegal state for feature set change")
344 original_featureset = list(self.featureset)
345 for feature in features:
346 self.featureset.setFeature(feature)
347 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500348 if (original_featureset != list(self.featureset)) and self.state != state.error and hasattr(self, "data"):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500349 self.reset()
350
351 def initConfigurationData(self):
352
353 self.state = state.initial
354 self.caches_array = []
355
Patrick Williams45852732022-04-02 08:58:32 -0500356 sys.path = self.orig_syspath.copy()
357 for mod in [*sys.modules]:
358 if mod not in self.orig_sysmodules:
359 del sys.modules[mod]
360
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500361 # Need to preserve BB_CONSOLELOG over resets
362 consolelog = None
363 if hasattr(self, "data"):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500364 consolelog = self.data.getVar("BB_CONSOLELOG")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500365
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500366 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
367 self.enableDataTracking()
368
369 all_extra_cache_names = []
370 # We hardcode all known cache types in a single place, here.
371 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
372 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
373
374 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
375
376 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
377 # This is the entry point, no further check needed!
378 for var in caches_name_array:
379 try:
380 module_name, cache_name = var.split(':')
381 module = __import__(module_name, fromlist=(cache_name,))
382 self.caches_array.append(getattr(module, cache_name))
383 except ImportError as exc:
384 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500385 raise bb.BBHandledException()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500386
387 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
388 self.databuilder.parseBaseConfiguration()
389 self.data = self.databuilder.data
390 self.data_hash = self.databuilder.data_hash
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500391 self.extraconfigdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500392
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500393 if consolelog:
394 self.data.setVar("BB_CONSOLELOG", consolelog)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500395
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500396 self.data.setVar('BB_CMDLINE', self.ui_cmdline)
397
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500398 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
399 self.disableDataTracking()
400
Brad Bishop15ae2502019-06-18 21:44:24 -0400401 for mc in self.databuilder.mcdata.values():
402 mc.renameVar("__depends", "__base_depends")
403 self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500404
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500405 self.baseconfig_valid = True
406 self.parsecache_valid = False
407
408 def handlePRServ(self):
409 # Setup a PR Server based on the new configuration
410 try:
411 self.prhost = prserv.serv.auto_start(self.data)
412 except prserv.serv.PRServiceConfigError as e:
Andrew Geisslerd159c7f2021-09-02 21:05:58 -0500413 bb.fatal("Unable to start PR Server, exiting, check the bitbake-cookerdaemon.log")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500414
Brad Bishopa34c0302019-09-23 22:34:48 -0400415 if self.data.getVar("BB_HASHSERVE") == "auto":
416 # Create a new hash server bound to a unix domain socket
Brad Bishop08902b02019-08-20 09:16:51 -0400417 if not self.hashserv:
418 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
Andrew Geissler595f6302022-01-24 19:11:47 +0000419 upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None
420 if upstream:
421 import socket
422 try:
423 sock = socket.create_connection(upstream.split(":"), 5)
424 sock.close()
425 except socket.error as e:
426 bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s"
427 % (upstream, repr(e)))
428
Brad Bishopa34c0302019-09-23 22:34:48 -0400429 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
Andrew Geissler5199d832021-09-24 16:47:35 -0500430 self.hashserv = hashserv.create_server(
431 self.hashservaddr,
432 dbfile,
433 sync=False,
Andrew Geissler595f6302022-01-24 19:11:47 +0000434 upstream=upstream,
Andrew Geissler5199d832021-09-24 16:47:35 -0500435 )
Patrick Williams213cb262021-08-07 19:21:33 -0500436 self.hashserv.serve_as_process()
Brad Bishopa34c0302019-09-23 22:34:48 -0400437 self.data.setVar("BB_HASHSERVE", self.hashservaddr)
438 self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr)
439 self.databuilder.data.setVar("BB_HASHSERVE", self.hashservaddr)
Brad Bishop08902b02019-08-20 09:16:51 -0400440 for mc in self.databuilder.mcdata:
Brad Bishopa34c0302019-09-23 22:34:48 -0400441 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
Brad Bishop08902b02019-08-20 09:16:51 -0400442
443 bb.parse.init_parser(self.data)
444
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500445 def enableDataTracking(self):
446 self.configuration.tracking = True
447 if hasattr(self, "data"):
448 self.data.enableTracking()
449
450 def disableDataTracking(self):
451 self.configuration.tracking = False
452 if hasattr(self, "data"):
453 self.data.disableTracking()
454
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500455 def parseConfiguration(self):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600456 self.updateCacheSync()
457
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500458 # Change nice level if we're asked to
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500459 nice = self.data.getVar("BB_NICE_LEVEL")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500460 if nice:
461 curnice = os.nice(0)
462 nice = int(nice) - curnice
463 buildlog.verbose("Renice to %s " % os.nice(nice))
464
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600465 if self.recipecaches:
466 del self.recipecaches
467 self.multiconfigs = self.databuilder.mcdata.keys()
468 self.recipecaches = {}
469 for mc in self.multiconfigs:
470 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500471
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500472 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500473
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500474 self.parsecache_valid = False
475
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500476 def updateConfigOpts(self, options, environment, cmdline):
477 self.ui_cmdline = cmdline
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500478 clean = True
479 for o in options:
480 if o in ['prefile', 'postfile']:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500481 # Only these options may require a reparse
482 try:
483 if getattr(self.configuration, o) == options[o]:
484 # Value is the same, no need to mark dirty
485 continue
486 except AttributeError:
487 pass
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600488 logger.debug("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500489 print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500490 clean = False
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500491 if hasattr(self.configuration, o):
492 setattr(self.configuration, o, options[o])
493
494 if self.configuration.writeeventlog:
495 if self.eventlog and self.eventlog[0] != self.configuration.writeeventlog:
496 bb.event.unregister_UIHhandler(self.eventlog[1])
497 if not self.eventlog or self.eventlog[0] != self.configuration.writeeventlog:
498 # we log all events to a file if so directed
499 # register the log file writer as UI Handler
500 writer = EventWriter(self, self.configuration.writeeventlog)
501 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
502 self.eventlog = (self.configuration.writeeventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)))
503
504 bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel
505 bb.msg.loggerDefaultDomains = self.configuration.debug_domains
506
507 if hasattr(self, "data"):
508 origenv = bb.data.init()
509 for k in environment:
510 origenv.setVar(k, environment[k])
511 self.data.setVar("BB_ORIGENV", origenv)
512
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500513 for k in bb.utils.approved_variables():
514 if k in environment and k not in self.configuration.env:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600515 logger.debug("Updating new environment variable %s to %s" % (k, environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500516 self.configuration.env[k] = environment[k]
517 clean = False
518 if k in self.configuration.env and k not in environment:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600519 logger.debug("Updating environment variable %s (deleted)" % (k))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500520 del self.configuration.env[k]
521 clean = False
522 if k not in self.configuration.env and k not in environment:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500523 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500524 if environment[k] != self.configuration.env[k]:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600525 logger.debug("Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500526 self.configuration.env[k] = environment[k]
527 clean = False
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500528
529 # Now update all the variables not in the datastore to match
530 self.configuration.env = environment
531
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500532 if not clean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600533 logger.debug("Base environment change, triggering reparse")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500534 self.reset()
535
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000536 def runCommands(self, server, data, halt):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500537 """
538 Run any queued asynchronous command
539 This is done by the idle handler so it runs in true context rather than
540 tied to any UI.
541 """
542
543 return self.command.runAsyncCommand()
544
545 def showVersions(self):
546
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500547 (latest_versions, preferred_versions, required) = self.findProviders()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500548
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500549 logger.plain("%-35s %25s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version", "Required Version")
550 logger.plain("%-35s %25s %25s %25s\n", "===========", "==============", "=================", "================")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500551
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500552 for p in sorted(self.recipecaches[''].pkg_pn):
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500553 preferred = preferred_versions[p]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500554 latest = latest_versions[p]
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500555 requiredstr = ""
556 preferredstr = ""
557 if required[p]:
558 if preferred[0] is not None:
559 requiredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
560 else:
561 bb.fatal("REQUIRED_VERSION of package %s not available" % p)
562 else:
563 preferredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500564
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500565 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
566
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500567 if preferred == latest:
568 preferredstr = ""
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500569
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500570 logger.plain("%-35s %25s %25s %25s", p, lateststr, preferredstr, requiredstr)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500571
572 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
573 """
574 Show the outer or per-recipe environment
575 """
576 fn = None
577 envdata = None
Brad Bishop15ae2502019-06-18 21:44:24 -0400578 mc = ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500579 if not pkgs_to_build:
580 pkgs_to_build = []
581
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500582 orig_tracking = self.configuration.tracking
583 if not orig_tracking:
584 self.enableDataTracking()
585 self.reset()
Andrew Geissler9aee5002022-03-30 16:27:02 +0000586 # reset() resets to the UI requested value so we have to redo this
587 self.enableDataTracking()
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500588
Brad Bishop15ae2502019-06-18 21:44:24 -0400589 def mc_base(p):
590 if p.startswith('mc:'):
591 s = p.split(':')
592 if len(s) == 2:
593 return s[1]
594 return None
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500595
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500596 if buildfile:
597 # Parse the configuration here. We need to do it explicitly here since
598 # this showEnvironment() code path doesn't use the cache
599 self.parseConfiguration()
600
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600601 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Andrew Geissler5a43b432020-06-13 10:46:56 -0500602 fn = self.matchFile(fn, mc)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600603 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500604 elif len(pkgs_to_build) == 1:
Brad Bishop15ae2502019-06-18 21:44:24 -0400605 mc = mc_base(pkgs_to_build[0])
606 if not mc:
607 ignore = self.data.getVar("ASSUME_PROVIDED") or ""
608 if pkgs_to_build[0] in set(ignore.split()):
609 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500610
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000611 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.halt, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500612
Brad Bishop15ae2502019-06-18 21:44:24 -0400613 mc = runlist[0][0]
614 fn = runlist[0][3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500615
616 if fn:
617 try:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500618 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
619 envdata = bb_caches[mc].loadDataFull(fn, self.collections[mc].get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500620 except Exception as e:
621 parselog.exception("Unable to read %s", fn)
622 raise
Brad Bishop15ae2502019-06-18 21:44:24 -0400623 else:
624 if not mc in self.databuilder.mcdata:
625 bb.fatal('Not multiconfig named "%s" found' % mc)
626 envdata = self.databuilder.mcdata[mc]
627 data.expandKeys(envdata)
628 parse.ast.runAnonFuncs(envdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500629
630 # Display history
631 with closing(StringIO()) as env:
632 self.data.inchistory.emit(env)
633 logger.plain(env.getvalue())
634
635 # emit variables and shell functions
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500636 with closing(StringIO()) as env:
637 data.emit_env(env, envdata, True)
638 logger.plain(env.getvalue())
639
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000640 # emit the metadata which isn't valid shell
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500641 for e in sorted(envdata.keys()):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600642 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500643 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500644
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500645 if not orig_tracking:
646 self.disableDataTracking()
647 self.reset()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500648
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000649 def buildTaskData(self, pkgs_to_build, task, halt, allowincomplete=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500650 """
651 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
652 """
653 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
654
655 # A task of None means use the default task
656 if task is None:
657 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500658 if not task.startswith("do_"):
659 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500660
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500661 targetlist = self.checkPackages(pkgs_to_build, task)
662 fulltargetlist = []
663 defaulttask_implicit = ''
664 defaulttask_explicit = False
665 wildcard = False
666
667 # Wild card expansion:
Brad Bishop15ae2502019-06-18 21:44:24 -0400668 # Replace string such as "mc:*:bash"
669 # into "mc:A:bash mc:B:bash bash"
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500670 for k in targetlist:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600671 if k.startswith("mc:") and k.count(':') >= 2:
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500672 if wildcard:
673 bb.fatal('multiconfig conflict')
674 if k.split(":")[1] == "*":
675 wildcard = True
676 for mc in self.multiconfigs:
677 if mc:
678 fulltargetlist.append(k.replace('*', mc))
679 # implicit default task
680 else:
681 defaulttask_implicit = k.split(":")[2]
682 else:
683 fulltargetlist.append(k)
684 else:
685 defaulttask_explicit = True
686 fulltargetlist.append(k)
687
688 if not defaulttask_explicit and defaulttask_implicit != '':
689 fulltargetlist.append(defaulttask_implicit)
690
691 bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600692 taskdata = {}
693 localdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500694
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600695 for mc in self.multiconfigs:
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000696 taskdata[mc] = bb.taskdata.TaskData(halt, skiplist=self.skiplist, allowincomplete=allowincomplete)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600697 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600698 bb.data.expandKeys(localdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500699
700 current = 0
701 runlist = []
702 for k in fulltargetlist:
Andrew Geisslerb7d28612020-07-24 16:15:54 -0500703 origk = k
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600704 mc = ""
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600705 if k.startswith("mc:") and k.count(':') >= 2:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600706 mc = k.split(":")[1]
707 k = ":".join(k.split(":")[2:])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500708 ktask = task
709 if ":do_" in k:
710 k2 = k.split(":do_")
711 k = k2[0]
712 ktask = k2[1]
Andrew Geisslerb7d28612020-07-24 16:15:54 -0500713
714 if mc not in self.multiconfigs:
715 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named %s" % (origk, mc))
716
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600717 taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500718 current += 1
719 if not ktask.startswith("do_"):
720 ktask = "do_%s" % ktask
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600721 if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
722 # e.g. in ASSUME_PROVIDED
723 continue
724 fn = taskdata[mc].build_targets[k][0]
725 runlist.append([mc, k, ktask, fn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500726 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600727
Brad Bishop15ae2502019-06-18 21:44:24 -0400728 havemc = False
729 for mc in self.multiconfigs:
730 if taskdata[mc].get_mcdepends():
731 havemc = True
Brad Bishopf058f492019-01-28 23:50:33 -0500732
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800733 # No need to do check providers if there are no mcdeps or not an mc build
Brad Bishop15ae2502019-06-18 21:44:24 -0400734 if havemc or len(self.multiconfigs) > 1:
Andrew Geissler99467da2019-02-25 18:54:23 -0600735 seen = set()
736 new = True
737 # Make sure we can provide the multiconfig dependency
738 while new:
739 mcdeps = set()
740 # Add unresolved first, so we can get multiconfig indirect dependencies on time
741 for mc in self.multiconfigs:
742 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
743 mcdeps |= set(taskdata[mc].get_mcdepends())
744 new = False
745 for mc in self.multiconfigs:
746 for k in mcdeps:
747 if k in seen:
748 continue
749 l = k.split(':')
750 depmc = l[2]
751 if depmc not in self.multiconfigs:
Andrew Geisslerb7d28612020-07-24 16:15:54 -0500752 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc))
Andrew Geissler99467da2019-02-25 18:54:23 -0600753 else:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600754 logger.debug("Adding providers for multiconfig dependency %s" % l[3])
Andrew Geissler99467da2019-02-25 18:54:23 -0600755 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
756 seen.add(k)
757 new = True
Brad Bishopf058f492019-01-28 23:50:33 -0500758
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600759 for mc in self.multiconfigs:
760 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
761
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500762 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600763 return taskdata, runlist
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500764
765 def prepareTreeData(self, pkgs_to_build, task):
766 """
767 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
768 """
769
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000770 # We set halt to False here to prevent unbuildable targets raising
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500771 # an exception when we're just generating data
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600772 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500773
774 return runlist, taskdata
775
776 ######## WARNING : this function requires cache_extra to be enabled ########
777
778 def generateTaskDepTreeData(self, pkgs_to_build, task):
779 """
780 Create a dependency graph of pkgs_to_build including reverse dependency
781 information.
782 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500783 if not task.startswith("do_"):
784 task = "do_%s" % task
785
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500786 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600787 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500788 rq.rqdata.prepare()
789 return self.buildDependTree(rq, taskdata)
790
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600791 @staticmethod
792 def add_mc_prefix(mc, pn):
793 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -0400794 return "mc:%s:%s" % (mc, pn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600795 return pn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500796
797 def buildDependTree(self, rq, taskdata):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600798 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500799 depend_tree = {}
800 depend_tree["depends"] = {}
801 depend_tree["tdepends"] = {}
802 depend_tree["pn"] = {}
803 depend_tree["rdepends-pn"] = {}
804 depend_tree["packages"] = {}
805 depend_tree["rdepends-pkg"] = {}
806 depend_tree["rrecs-pkg"] = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500807 depend_tree['providermap'] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600808 depend_tree["layer-priorities"] = self.bbfile_config_priorities
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500809
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600810 for mc in taskdata:
811 for name, fn in list(taskdata[mc].get_providermap().items()):
812 pn = self.recipecaches[mc].pkg_fn[fn]
813 pn = self.add_mc_prefix(mc, pn)
814 if name != pn:
815 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
816 depend_tree['providermap'][name] = (pn, version)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500817
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600818 for tid in rq.rqdata.runtaskentries:
819 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
820 pn = self.recipecaches[mc].pkg_fn[taskfn]
821 pn = self.add_mc_prefix(mc, pn)
822 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500823 if pn not in depend_tree["pn"]:
824 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600825 depend_tree["pn"][pn]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500826 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600827 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500828
829 # if we have extra caches, list all attributes they bring in
830 extra_info = []
831 for cache_class in self.caches_array:
832 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
833 cachefields = getattr(cache_class, 'cachefields', [])
834 extra_info = extra_info + cachefields
835
836 # for all attributes stored, add them to the dependency tree
837 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600838 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500839
840
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500841 dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
842 if not dotname in depend_tree["tdepends"]:
843 depend_tree["tdepends"][dotname] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600844 for dep in rq.rqdata.runtaskentries[tid].depends:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800845 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
846 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
Andrew Geissler595f6302022-01-24 19:11:47 +0000847 if depmc:
848 depmc = "mc:" + depmc + ":"
849 depend_tree["tdepends"][dotname].append("%s%s.%s" % (depmc, deppn, bb.runqueue.taskname_from_tid(dep)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600850 if taskfn not in seen_fns:
851 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500852 packages = []
853
854 depend_tree["depends"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600855 for dep in taskdata[mc].depids[taskfn]:
856 depend_tree["depends"][pn].append(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500857
858 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600859 for rdep in taskdata[mc].rdepids[taskfn]:
860 depend_tree["rdepends-pn"][pn].append(rdep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500861
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600862 rdepends = self.recipecaches[mc].rundeps[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500863 for package in rdepends:
864 depend_tree["rdepends-pkg"][package] = []
865 for rdepend in rdepends[package]:
866 depend_tree["rdepends-pkg"][package].append(rdepend)
867 packages.append(package)
868
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600869 rrecs = self.recipecaches[mc].runrecs[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500870 for package in rrecs:
871 depend_tree["rrecs-pkg"][package] = []
872 for rdepend in rrecs[package]:
873 depend_tree["rrecs-pkg"][package].append(rdepend)
874 if not package in packages:
875 packages.append(package)
876
877 for package in packages:
878 if package not in depend_tree["packages"]:
879 depend_tree["packages"][package] = {}
880 depend_tree["packages"][package]["pn"] = pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600881 depend_tree["packages"][package]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500882 depend_tree["packages"][package]["version"] = version
883
884 return depend_tree
885
886 ######## WARNING : this function requires cache_extra to be enabled ########
887 def generatePkgDepTreeData(self, pkgs_to_build, task):
888 """
889 Create a dependency tree of pkgs_to_build, returning the data.
890 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500891 if not task.startswith("do_"):
892 task = "do_%s" % task
893
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500894 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500895
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600896 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500897 depend_tree = {}
898 depend_tree["depends"] = {}
899 depend_tree["pn"] = {}
900 depend_tree["rdepends-pn"] = {}
901 depend_tree["rdepends-pkg"] = {}
902 depend_tree["rrecs-pkg"] = {}
903
904 # if we have extra caches, list all attributes they bring in
905 extra_info = []
906 for cache_class in self.caches_array:
907 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
908 cachefields = getattr(cache_class, 'cachefields', [])
909 extra_info = extra_info + cachefields
910
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600911 tids = []
912 for mc in taskdata:
913 for tid in taskdata[mc].taskentries:
914 tids.append(tid)
915
916 for tid in tids:
917 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
918
919 pn = self.recipecaches[mc].pkg_fn[taskfn]
920 pn = self.add_mc_prefix(mc, pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500921
922 if pn not in depend_tree["pn"]:
923 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600924 depend_tree["pn"][pn]["filename"] = taskfn
925 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500926 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600927 rdepends = self.recipecaches[mc].rundeps[taskfn]
928 rrecs = self.recipecaches[mc].runrecs[taskfn]
929 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500930
931 # for all extra attributes stored, add them to the dependency tree
932 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600933 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500934
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600935 if taskfn not in seen_fns:
936 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500937
938 depend_tree["depends"][pn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500939 for dep in taskdata[mc].depids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500940 pn_provider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600941 if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
942 fn_provider = taskdata[mc].build_targets[dep][0]
943 pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500944 else:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500945 pn_provider = dep
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600946 pn_provider = self.add_mc_prefix(mc, pn_provider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500947 depend_tree["depends"][pn].append(pn_provider)
948
949 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600950 for rdep in taskdata[mc].rdepids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500951 pn_rprovider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600952 if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
953 fn_rprovider = taskdata[mc].run_targets[rdep][0]
954 pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500955 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600956 pn_rprovider = rdep
957 pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500958 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
959
960 depend_tree["rdepends-pkg"].update(rdepends)
961 depend_tree["rrecs-pkg"].update(rrecs)
962
963 return depend_tree
964
965 def generateDepTreeEvent(self, pkgs_to_build, task):
966 """
967 Create a task dependency graph of pkgs_to_build.
968 Generate an event with the result
969 """
970 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
971 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
972
973 def generateDotGraphFiles(self, pkgs_to_build, task):
974 """
975 Create a task dependency graph of pkgs_to_build.
976 Save the result to a set of .dot files.
977 """
978
979 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
980
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500981 with open('pn-buildlist', 'w') as f:
982 for pn in depgraph["pn"]:
983 f.write(pn + "\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500984 logger.info("PN build list saved to 'pn-buildlist'")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500985
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500986 # Remove old format output files to ensure no confusion with stale data
987 try:
988 os.unlink('pn-depends.dot')
989 except FileNotFoundError:
990 pass
991 try:
992 os.unlink('package-depends.dot')
993 except FileNotFoundError:
994 pass
Brad Bishop79641f22019-09-10 07:20:22 -0400995 try:
996 os.unlink('recipe-depends.dot')
997 except FileNotFoundError:
998 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500999
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001000 with open('task-depends.dot', 'w') as f:
1001 f.write("digraph depends {\n")
Brad Bishop316dfdd2018-06-25 12:45:53 -04001002 for task in sorted(depgraph["tdepends"]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001003 (pn, taskname) = task.rsplit(".", 1)
1004 fn = depgraph["pn"][pn]["filename"]
1005 version = depgraph["pn"][pn]["version"]
1006 f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
Brad Bishop316dfdd2018-06-25 12:45:53 -04001007 for dep in sorted(depgraph["tdepends"][task]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001008 f.write('"%s" -> "%s"\n' % (task, dep))
1009 f.write("}\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001010 logger.info("Task dependencies saved to 'task-depends.dot'")
1011
1012 def show_appends_with_no_recipes(self):
Andrew Geissler5a43b432020-06-13 10:46:56 -05001013 appends_without_recipes = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001014 # Determine which bbappends haven't been applied
Andrew Geissler5a43b432020-06-13 10:46:56 -05001015 for mc in self.multiconfigs:
1016 # First get list of recipes, including skipped
1017 recipefns = list(self.recipecaches[mc].pkg_fn.keys())
1018 recipefns.extend(self.skiplist.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001019
Andrew Geissler5a43b432020-06-13 10:46:56 -05001020 # Work out list of bbappends that have been applied
1021 applied_appends = []
1022 for fn in recipefns:
1023 applied_appends.extend(self.collections[mc].get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001024
Andrew Geissler5a43b432020-06-13 10:46:56 -05001025 appends_without_recipes[mc] = []
1026 for _, appendfn in self.collections[mc].bbappends:
1027 if not appendfn in applied_appends:
1028 appends_without_recipes[mc].append(appendfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001029
Andrew Geissler5a43b432020-06-13 10:46:56 -05001030 msgs = []
1031 for mc in sorted(appends_without_recipes.keys()):
1032 if appends_without_recipes[mc]:
1033 msgs.append('No recipes in %s available for:\n %s' % (mc if mc else 'default',
1034 '\n '.join(appends_without_recipes[mc])))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001035
Andrew Geissler5a43b432020-06-13 10:46:56 -05001036 if msgs:
1037 msg = "\n".join(msgs)
1038 warn_only = self.databuilder.mcdata[mc].getVar("BB_DANGLINGAPPENDS_WARNONLY", \
1039 False) or "no"
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001040 if warn_only.lower() in ("1", "yes", "true"):
1041 bb.warn(msg)
1042 else:
1043 bb.fatal(msg)
1044
1045 def handlePrefProviders(self):
1046
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001047 for mc in self.multiconfigs:
1048 localdata = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001049 bb.data.expandKeys(localdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001050
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001051 # Handle PREFERRED_PROVIDERS
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001052 for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001053 try:
1054 (providee, provider) = p.split(':')
1055 except:
1056 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
1057 continue
1058 if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
1059 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
1060 self.recipecaches[mc].preferred[providee] = provider
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001061
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001062 def findConfigFilePath(self, configfile):
1063 """
1064 Find the location on disk of configfile and if it exists and was parsed by BitBake
1065 emit the ConfigFilePathFound event with the path to the file.
1066 """
1067 path = bb.cookerdata.findConfigFile(configfile, self.data)
1068 if not path:
1069 return
1070
1071 # Generate a list of parsed configuration files by searching the files
1072 # listed in the __depends and __base_depends variables with a .conf suffix.
1073 conffiles = []
1074 dep_files = self.data.getVar('__base_depends', False) or []
1075 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
1076
1077 for f in dep_files:
1078 if f[0].endswith(".conf"):
1079 conffiles.append(f[0])
1080
1081 _, conf, conffile = path.rpartition("conf/")
1082 match = os.path.join(conf, conffile)
1083 # Try and find matches for conf/conffilename.conf as we don't always
1084 # have the full path to the file.
1085 for cfg in conffiles:
1086 if cfg.endswith(match):
1087 bb.event.fire(bb.event.ConfigFilePathFound(path),
1088 self.data)
1089 break
1090
1091 def findFilesMatchingInDir(self, filepattern, directory):
1092 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001093 Searches for files containing the substring 'filepattern' which are children of
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001094 'directory' in each BBPATH. i.e. to find all rootfs package classes available
1095 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1096 or to find all machine configuration files one could call:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001097 findFilesMatchingInDir(self, '.conf', 'conf/machine')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001098 """
1099
1100 matches = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001101 bbpaths = self.data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001102 for path in bbpaths:
1103 dirpath = os.path.join(path, directory)
1104 if os.path.exists(dirpath):
1105 for root, dirs, files in os.walk(dirpath):
1106 for f in files:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001107 if filepattern in f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001108 matches.append(f)
1109
1110 if matches:
1111 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1112
Patrick Williams93c203f2021-10-06 16:15:23 -05001113 def testCookerCommandEvent(self, filepattern):
1114 # Dummy command used by OEQA selftest to test tinfoil without IO
1115 matches = ["A", "B"]
1116 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1117
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001118 def findProviders(self, mc=''):
Andrew Geissler82c905d2020-04-13 13:39:40 -05001119 return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001120
1121 def findBestProvider(self, pn, mc=''):
1122 if pn in self.recipecaches[mc].providers:
1123 filenames = self.recipecaches[mc].providers[pn]
Andrew Geissler82c905d2020-04-13 13:39:40 -05001124 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc])
Andrew Geissler95ac1b82021-03-31 14:34:31 -05001125 if eligible is not None:
1126 filename = eligible[0]
1127 else:
1128 filename = None
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001129 return None, None, None, filename
1130 elif pn in self.recipecaches[mc].pkg_pn:
Andrew Geissler95ac1b82021-03-31 14:34:31 -05001131 (latest, latest_f, preferred_ver, preferred_file, required) = bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1132 if required and preferred_file is None:
1133 return None, None, None, None
1134 return (latest, latest_f, preferred_ver, preferred_file)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001135 else:
1136 return None, None, None, None
1137
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001138 def findConfigFiles(self, varname):
1139 """
1140 Find config files which are appropriate values for varname.
1141 i.e. MACHINE, DISTRO
1142 """
1143 possible = []
1144 var = varname.lower()
1145
1146 data = self.data
1147 # iterate configs
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001148 bbpaths = data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001149 for path in bbpaths:
1150 confpath = os.path.join(path, "conf", var)
1151 if os.path.exists(confpath):
1152 for root, dirs, files in os.walk(confpath):
1153 # get all child files, these are appropriate values
1154 for f in files:
1155 val, sep, end = f.rpartition('.')
1156 if end == 'conf':
1157 possible.append(val)
1158
1159 if possible:
1160 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1161
1162 def findInheritsClass(self, klass):
1163 """
1164 Find all recipes which inherit the specified class
1165 """
1166 pkg_list = []
1167
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001168 for pfn in self.recipecaches[''].pkg_fn:
1169 inherits = self.recipecaches[''].inherits.get(pfn, None)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001170 if inherits and klass in inherits:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001171 pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001172
1173 return pkg_list
1174
1175 def generateTargetsTree(self, klass=None, pkgs=None):
1176 """
1177 Generate a dependency tree of buildable targets
1178 Generate an event with the result
1179 """
1180 # if the caller hasn't specified a pkgs list default to universe
1181 if not pkgs:
1182 pkgs = ['universe']
1183 # if inherited_class passed ensure all recipes which inherit the
1184 # specified class are included in pkgs
1185 if klass:
1186 extra_pkgs = self.findInheritsClass(klass)
1187 pkgs = pkgs + extra_pkgs
1188
1189 # generate a dependency tree for all our packages
1190 tree = self.generatePkgDepTreeData(pkgs, 'build')
1191 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1192
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001193 def interactiveMode( self ):
1194 """Drop off into a shell"""
1195 try:
1196 from bb import shell
1197 except ImportError:
1198 parselog.exception("Interactive mode not available")
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001199 raise bb.BBHandledException()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001200 else:
1201 shell.start( self )
1202
1203
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001204 def handleCollections(self, collections):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001205 """Handle collections"""
1206 errors = False
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001207 self.bbfile_config_priorities = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001208 if collections:
1209 collection_priorities = {}
1210 collection_depends = {}
1211 collection_list = collections.split()
1212 min_prio = 0
1213 for c in collection_list:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001214 bb.debug(1,'Processing %s in collection list' % (c))
1215
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001216 # Get collection priority if defined explicitly
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001217 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001218 if priority:
1219 try:
1220 prio = int(priority)
1221 except ValueError:
1222 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1223 errors = True
1224 if min_prio == 0 or prio < min_prio:
1225 min_prio = prio
1226 collection_priorities[c] = prio
1227 else:
1228 collection_priorities[c] = None
1229
1230 # Check dependencies and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001231 deps = self.data.getVar("LAYERDEPENDS_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001232 if deps:
1233 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001234 depDict = bb.utils.explode_dep_versions2(deps)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001235 except bb.utils.VersionStringException as vse:
1236 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001237 for dep, oplist in list(depDict.items()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001238 if dep in collection_list:
1239 for opstr in oplist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001240 layerver = self.data.getVar("LAYERVERSION_%s" % dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001241 (op, depver) = opstr.split()
1242 if layerver:
1243 try:
1244 res = bb.utils.vercmp_string_op(layerver, depver, op)
1245 except bb.utils.VersionStringException as vse:
1246 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1247 if not res:
1248 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1249 errors = True
1250 else:
1251 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1252 errors = True
1253 else:
1254 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1255 errors = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001256 collection_depends[c] = list(depDict.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001257 else:
1258 collection_depends[c] = []
1259
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001260 # Check recommends and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001261 recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001262 if recs:
1263 try:
1264 recDict = bb.utils.explode_dep_versions2(recs)
1265 except bb.utils.VersionStringException as vse:
1266 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1267 for rec, oplist in list(recDict.items()):
1268 if rec in collection_list:
1269 if oplist:
1270 opstr = oplist[0]
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001271 layerver = self.data.getVar("LAYERVERSION_%s" % rec)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001272 if layerver:
1273 (op, recver) = opstr.split()
1274 try:
1275 res = bb.utils.vercmp_string_op(layerver, recver, op)
1276 except bb.utils.VersionStringException as vse:
1277 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1278 if not res:
1279 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1280 continue
1281 else:
1282 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1283 continue
1284 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1285 collection_depends[c].append(rec)
1286 else:
1287 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1288
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001289 # Recursively work out collection priorities based on dependencies
1290 def calc_layer_priority(collection):
1291 if not collection_priorities[collection]:
1292 max_depprio = min_prio
1293 for dep in collection_depends[collection]:
1294 calc_layer_priority(dep)
1295 depprio = collection_priorities[dep]
1296 if depprio > max_depprio:
1297 max_depprio = depprio
1298 max_depprio += 1
1299 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1300 collection_priorities[collection] = max_depprio
1301
1302 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1303 for c in collection_list:
1304 calc_layer_priority(c)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001305 regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
Andrew Geissler82c905d2020-04-13 13:39:40 -05001306 if regex is None:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001307 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1308 errors = True
1309 continue
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001310 elif regex == "":
1311 parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
Brad Bishop19323692019-04-05 15:28:33 -04001312 cre = re.compile('^NULL$')
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001313 errors = False
1314 else:
1315 try:
1316 cre = re.compile(regex)
1317 except re.error:
1318 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1319 errors = True
1320 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001321 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001322 if errors:
1323 # We've already printed the actual error(s)
1324 raise CollectionError("Errors during parsing layer configuration")
1325
1326 def buildSetVars(self):
1327 """
1328 Setup any variables needed before starting a build
1329 """
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001330 t = time.gmtime()
1331 for mc in self.databuilder.mcdata:
1332 ds = self.databuilder.mcdata[mc]
1333 if not ds.getVar("BUILDNAME", False):
1334 ds.setVar("BUILDNAME", "${DATE}${TIME}")
1335 ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1336 ds.setVar("DATE", time.strftime('%Y%m%d', t))
1337 ds.setVar("TIME", time.strftime('%H%M%S', t))
1338
1339 def reset_mtime_caches(self):
1340 """
1341 Reset mtime caches - this is particularly important when memory resident as something
1342 which is cached is not unlikely to have changed since the last invocation (e.g. a
1343 file associated with a recipe might have been modified by the user).
1344 """
1345 build.reset_cache()
1346 bb.fetch._checksum_cache.mtime_cache.clear()
1347 siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1348 if siggen_cache:
1349 bb.parse.siggen.checksum_cache.mtime_cache.clear()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001350
Andrew Geissler5a43b432020-06-13 10:46:56 -05001351 def matchFiles(self, bf, mc=''):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001352 """
1353 Find the .bb files which match the expression in 'buildfile'.
1354 """
1355 if bf.startswith("/") or bf.startswith("../"):
1356 bf = os.path.abspath(bf)
1357
Andrew Geissler5a43b432020-06-13 10:46:56 -05001358 self.collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)}
1359 filelist, masked, searchdirs = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001360 try:
1361 os.stat(bf)
1362 bf = os.path.abspath(bf)
1363 return [bf]
1364 except OSError:
1365 regexp = re.compile(bf)
1366 matches = []
1367 for f in filelist:
1368 if regexp.search(f) and os.path.isfile(f):
1369 matches.append(f)
1370 return matches
1371
Andrew Geissler5a43b432020-06-13 10:46:56 -05001372 def matchFile(self, buildfile, mc=''):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001373 """
1374 Find the .bb file which matches the expression in 'buildfile'.
1375 Raise an error if multiple files
1376 """
Andrew Geissler5a43b432020-06-13 10:46:56 -05001377 matches = self.matchFiles(buildfile, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001378 if len(matches) != 1:
1379 if matches:
1380 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1381 if matches:
1382 for f in matches:
1383 msg += "\n %s" % f
1384 parselog.error(msg)
1385 else:
1386 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1387 raise NoSpecificMatch
1388 return matches[0]
1389
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001390 def buildFile(self, buildfile, task):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001391 """
1392 Build the file matching regexp buildfile
1393 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001394 bb.event.fire(bb.event.BuildInit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001395
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001396 # Too many people use -b because they think it's how you normally
1397 # specify a target to be built, so show a warning
1398 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1399
1400 self.buildFileInternal(buildfile, task)
1401
1402 def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1403 """
1404 Build the file matching regexp buildfile
1405 """
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001406
1407 # Parse the configuration here. We need to do it explicitly here since
1408 # buildFile() doesn't use the cache
1409 self.parseConfiguration()
1410
1411 # If we are told to do the None task then query the default task
Andrew Geissler82c905d2020-04-13 13:39:40 -05001412 if task is None:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001413 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001414 if not task.startswith("do_"):
1415 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001416
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001417 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Andrew Geissler5a43b432020-06-13 10:46:56 -05001418 fn = self.matchFile(fn, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001419
1420 self.buildSetVars()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001421 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001422
Andrew Geissler5a43b432020-06-13 10:46:56 -05001423 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001424
Andrew Geissler5a43b432020-06-13 10:46:56 -05001425 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001426 infos = dict(infos)
1427
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001428 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001429 try:
1430 info_array = infos[fn]
1431 except KeyError:
1432 bb.fatal("%s does not exist" % fn)
1433
1434 if info_array[0].skipped:
1435 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1436
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001437 self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001438
1439 # Tweak some variables
1440 item = info_array[0].pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001441 self.recipecaches[mc].ignored_dependencies = set()
1442 self.recipecaches[mc].bbfile_priority[fn] = 1
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001443 self.configuration.limited_deps = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001444
1445 # Remove external dependencies
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001446 self.recipecaches[mc].task_deps[fn]['depends'] = {}
1447 self.recipecaches[mc].deps[fn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001448 self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1449 self.recipecaches[mc].runrecs[fn] = defaultdict(list)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001450
1451 # Invalidate task for target if force mode active
1452 if self.configuration.force:
1453 logger.verbose("Invalidate task %s, %s", task, fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001454 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001455
1456 # Setup taskdata structure
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001457 taskdata = {}
Andrew Geissler7e0e3c02022-02-25 20:34:39 +00001458 taskdata[mc] = bb.taskdata.TaskData(self.configuration.halt)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001459 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001460
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001461 if quietlog:
1462 rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1463 bb.runqueue.logger.setLevel(logging.WARNING)
1464
1465 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1466 if fireevents:
1467 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001468
1469 # Execute the runqueue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001470 runlist = [[mc, item, task, fn]]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001471
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001472 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001473
Andrew Geissler7e0e3c02022-02-25 20:34:39 +00001474 def buildFileIdle(server, rq, halt):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001475
1476 msg = None
1477 interrupted = 0
Andrew Geissler7e0e3c02022-02-25 20:34:39 +00001478 if halt or self.state == state.forceshutdown:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001479 rq.finish_runqueue(True)
1480 msg = "Forced shutdown"
1481 interrupted = 2
1482 elif self.state == state.shutdown:
1483 rq.finish_runqueue(False)
1484 msg = "Stopped build"
1485 interrupted = 1
1486 failures = 0
1487 try:
1488 retval = rq.execute_runqueue()
1489 except runqueue.TaskFailure as exc:
1490 failures += len(exc.args)
1491 retval = False
1492 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001493 self.command.finishAsyncCommand(str(exc))
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001494 if quietlog:
1495 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001496 return False
1497
1498 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001499 if fireevents:
1500 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001501 self.command.finishAsyncCommand(msg)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001502 # We trashed self.recipecaches above
1503 self.parsecache_valid = False
1504 self.configuration.limited_deps = False
1505 bb.parse.siggen.reset(self.data)
1506 if quietlog:
1507 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001508 return False
1509 if retval is True:
1510 return True
1511 return retval
1512
Andrew Geissler635e0e42020-08-21 15:58:33 -05001513 self.idleCallBackRegister(buildFileIdle, rq)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001514
1515 def buildTargets(self, targets, task):
1516 """
1517 Attempt to build the targets specified
1518 """
1519
Andrew Geissler7e0e3c02022-02-25 20:34:39 +00001520 def buildTargetsIdle(server, rq, halt):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001521 msg = None
1522 interrupted = 0
Andrew Geissler7e0e3c02022-02-25 20:34:39 +00001523 if halt or self.state == state.forceshutdown:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001524 rq.finish_runqueue(True)
1525 msg = "Forced shutdown"
1526 interrupted = 2
1527 elif self.state == state.shutdown:
1528 rq.finish_runqueue(False)
1529 msg = "Stopped build"
1530 interrupted = 1
1531 failures = 0
1532 try:
1533 retval = rq.execute_runqueue()
1534 except runqueue.TaskFailure as exc:
1535 failures += len(exc.args)
1536 retval = False
1537 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001538 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001539 return False
1540
1541 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001542 try:
1543 for mc in self.multiconfigs:
1544 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1545 finally:
1546 self.command.finishAsyncCommand(msg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001547 return False
1548 if retval is True:
1549 return True
1550 return retval
1551
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001552 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001553 self.buildSetVars()
1554
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001555 # If we are told to do the None task then query the default task
Andrew Geissler82c905d2020-04-13 13:39:40 -05001556 if task is None:
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001557 task = self.configuration.cmd
1558
1559 if not task.startswith("do_"):
1560 task = "do_%s" % task
1561
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001562 packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1563
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001564 bb.event.fire(bb.event.BuildInit(packages), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001565
Andrew Geissler7e0e3c02022-02-25 20:34:39 +00001566 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.halt)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001567
1568 buildname = self.data.getVar("BUILDNAME", False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001569
1570 # make targets to always look as <target>:do_<task>
1571 ntargets = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001572 for target in runlist:
1573 if target[0]:
Brad Bishop15ae2502019-06-18 21:44:24 -04001574 ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2]))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001575 ntargets.append("%s:%s" % (target[1], target[2]))
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001576
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001577 for mc in self.multiconfigs:
1578 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001579
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001580 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001581 if 'universe' in targets:
1582 rq.rqdata.warn_multi_bb = True
1583
Andrew Geissler635e0e42020-08-21 15:58:33 -05001584 self.idleCallBackRegister(buildTargetsIdle, rq)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001585
1586
1587 def getAllKeysWithFlags(self, flaglist):
1588 dump = {}
1589 for k in self.data.keys():
1590 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001591 expand = True
1592 flags = self.data.getVarFlags(k)
1593 if flags and "func" in flags and "python" in flags:
1594 expand = False
1595 v = self.data.getVar(k, expand)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001596 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1597 dump[k] = {
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001598 'v' : str(v) ,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001599 'history' : self.data.varhistory.variable(k),
1600 }
1601 for d in flaglist:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001602 if flags and d in flags:
1603 dump[k][d] = flags[d]
1604 else:
1605 dump[k][d] = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001606 except Exception as e:
1607 print(e)
1608 return dump
1609
1610
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001611 def updateCacheSync(self):
1612 if self.state == state.running:
1613 return
1614
1615 # reload files for which we got notifications
1616 for p in self.inotify_modified_files:
1617 bb.parse.update_cache(p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001618 if p in bb.parse.BBHandler.cached_statements:
1619 del bb.parse.BBHandler.cached_statements[p]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001620 self.inotify_modified_files = []
1621
1622 if not self.baseconfig_valid:
Andrew Geisslerd1e89492021-02-12 15:35:20 -06001623 logger.debug("Reloading base configuration data")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001624 self.initConfigurationData()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001625 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001626
1627 # This is called for all async commands when self.state != running
1628 def updateCache(self):
1629 if self.state == state.running:
1630 return
1631
1632 if self.state in (state.shutdown, state.forceshutdown, state.error):
1633 if hasattr(self.parser, 'shutdown'):
Andrew Geissler9aee5002022-03-30 16:27:02 +00001634 self.parser.shutdown(clean=False)
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001635 self.parser.final_cleanup()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001636 raise bb.BBHandledException()
1637
1638 if self.state != state.parsing:
1639 self.updateCacheSync()
1640
1641 if self.state != state.parsing and not self.parsecache_valid:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001642 bb.parse.siggen.reset(self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001643 self.parseConfiguration ()
1644 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001645 for mc in self.multiconfigs:
1646 bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001647
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001648 for mc in self.multiconfigs:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001649 ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001650 self.recipecaches[mc].ignored_dependencies = set(ignore.split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001651
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001652 for dep in self.configuration.extra_assume_provided:
1653 self.recipecaches[mc].ignored_dependencies.add(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001654
Andrew Geissler5a43b432020-06-13 10:46:56 -05001655 self.collections = {}
1656
1657 mcfilelist = {}
1658 total_masked = 0
1659 searchdirs = set()
1660 for mc in self.multiconfigs:
1661 self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
1662 (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1663
1664 mcfilelist[mc] = filelist
1665 total_masked += masked
1666 searchdirs |= set(search)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001667
1668 # Add inotify watches for directories searched for bb/bbappend files
1669 for dirent in searchdirs:
1670 self.add_filewatch([[dirent]], dirs=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001671
Andrew Geissler5a43b432020-06-13 10:46:56 -05001672 self.parser = CookerParser(self, mcfilelist, total_masked)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001673 self.parsecache_valid = True
1674
1675 self.state = state.parsing
1676
1677 if not self.parser.parse_next():
1678 collectlog.debug(1, "parsing complete")
1679 if self.parser.error:
1680 raise bb.BBHandledException()
1681 self.show_appends_with_no_recipes()
1682 self.handlePrefProviders()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001683 for mc in self.multiconfigs:
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001684 self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001685 self.state = state.running
1686
1687 # Send an event listing all stamps reachable after parsing
1688 # which the metadata may use to clean up stale data
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001689 for mc in self.multiconfigs:
1690 event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1691 bb.event.fire(event, self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001692 return None
1693
1694 return True
1695
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001696 def checkPackages(self, pkgs_to_build, task=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001697
1698 # Return a copy, don't modify the original
1699 pkgs_to_build = pkgs_to_build[:]
1700
Andrew Geissler595f6302022-01-24 19:11:47 +00001701 if not pkgs_to_build:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001702 raise NothingToBuild
1703
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001704 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001705 for pkg in pkgs_to_build.copy():
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001706 if pkg in ignore:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001707 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
Brad Bishop15ae2502019-06-18 21:44:24 -04001708 if pkg.startswith("multiconfig:"):
1709 pkgs_to_build.remove(pkg)
1710 pkgs_to_build.append(pkg.replace("multiconfig:", "mc:"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001711
1712 if 'world' in pkgs_to_build:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001713 pkgs_to_build.remove('world')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001714 for mc in self.multiconfigs:
1715 bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1716 for t in self.recipecaches[mc].world_target:
1717 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001718 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001719 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001720
1721 if 'universe' in pkgs_to_build:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001722 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001723 parselog.debug(1, "collating packages for \"universe\"")
1724 pkgs_to_build.remove('universe')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001725 for mc in self.multiconfigs:
1726 for t in self.recipecaches[mc].universe_target:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001727 if task:
1728 foundtask = False
1729 for provider_fn in self.recipecaches[mc].providers[t]:
1730 if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1731 foundtask = True
1732 break
1733 if not foundtask:
1734 bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1735 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001736 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001737 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001738 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001739
1740 return pkgs_to_build
1741
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001742 def pre_serve(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001743 return
1744
1745 def post_serve(self):
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001746 self.shutdown(force=True)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001747 prserv.serv.auto_shutdown()
Patrick Williams45852732022-04-02 08:58:32 -05001748 if hasattr(bb.parse, "siggen"):
1749 bb.parse.siggen.exit()
Brad Bishop08902b02019-08-20 09:16:51 -04001750 if self.hashserv:
1751 self.hashserv.process.terminate()
1752 self.hashserv.process.join()
Andrew Geisslerc3d88e42020-10-02 09:45:00 -05001753 if hasattr(self, "data"):
1754 bb.event.fire(CookerExit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001755
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001756 def shutdown(self, force = False):
1757 if force:
1758 self.state = state.forceshutdown
1759 else:
1760 self.state = state.shutdown
1761
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001762 if self.parser:
Andrew Geissler9aee5002022-03-30 16:27:02 +00001763 self.parser.shutdown(clean=not force)
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001764 self.parser.final_cleanup()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001765
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001766 def finishcommand(self):
1767 self.state = state.initial
1768
1769 def reset(self):
Patrick Williams45852732022-04-02 08:58:32 -05001770 if hasattr(bb.parse, "siggen"):
1771 bb.parse.siggen.exit()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001772 self.initConfigurationData()
Brad Bishop08902b02019-08-20 09:16:51 -04001773 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001774
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001775 def clientComplete(self):
1776 """Called when the client is done using the server"""
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001777 self.finishcommand()
1778 self.extraconfigdata = {}
1779 self.command.reset()
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001780 if hasattr(self, "data"):
1781 self.databuilder.reset()
1782 self.data = self.databuilder.data
Andrew Geissler82c905d2020-04-13 13:39:40 -05001783 self.parsecache_valid = False
1784 self.baseconfig_valid = False
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001785
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001786
1787class CookerExit(bb.event.Event):
1788 """
1789 Notify clients of the Cooker shutdown
1790 """
1791
1792 def __init__(self):
1793 bb.event.Event.__init__(self)
1794
1795
1796class CookerCollectFiles(object):
Andrew Geissler5a43b432020-06-13 10:46:56 -05001797 def __init__(self, priorities, mc=''):
1798 self.mc = mc
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001799 self.bbappends = []
Andrew Geissler7e0e3c02022-02-25 20:34:39 +00001800 # Priorities is a list of tuples, with the second element as the pattern.
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001801 # We need to sort the list with the longest pattern first, and so on to
1802 # the shortest. This allows nested layers to be properly evaluated.
1803 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001804
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001805 def calc_bbfile_priority(self, filename):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001806 for _, _, regex, pri in self.bbfile_config_priorities:
1807 if regex.match(filename):
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001808 return pri, regex
1809 return 0, None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001810
1811 def get_bbfiles(self):
1812 """Get list of default .bb files by reading out the current directory"""
1813 path = os.getcwd()
1814 contents = os.listdir(path)
1815 bbfiles = []
1816 for f in contents:
1817 if f.endswith(".bb"):
1818 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1819 return bbfiles
1820
1821 def find_bbfiles(self, path):
1822 """Find all the .bb and .bbappend files in a directory"""
1823 found = []
1824 for dir, dirs, files in os.walk(path):
1825 for ignored in ('SCCS', 'CVS', '.svn'):
1826 if ignored in dirs:
1827 dirs.remove(ignored)
1828 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1829
1830 return found
1831
1832 def collect_bbfiles(self, config, eventdata):
1833 """Collect all available .bb build files"""
1834 masked = 0
1835
1836 collectlog.debug(1, "collecting .bb files")
1837
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001838 files = (config.getVar( "BBFILES") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001839
1840 # Sort files by priority
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001841 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] )
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001842 config.setVar("BBFILES_PRIORITIZED", " ".join(files))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001843
Andrew Geissler595f6302022-01-24 19:11:47 +00001844 if not files:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001845 files = self.get_bbfiles()
1846
Andrew Geissler595f6302022-01-24 19:11:47 +00001847 if not files:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001848 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1849 bb.event.fire(CookerExit(), eventdata)
1850
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001851 # We need to track where we look so that we can add inotify watches. There
1852 # is no nice way to do this, this is horrid. We intercept the os.listdir()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001853 # (or os.scandir() for python 3.6+) calls while we run glob().
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001854 origlistdir = os.listdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001855 if hasattr(os, 'scandir'):
1856 origscandir = os.scandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001857 searchdirs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001858
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001859 def ourlistdir(d):
1860 searchdirs.append(d)
1861 return origlistdir(d)
1862
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001863 def ourscandir(d):
1864 searchdirs.append(d)
1865 return origscandir(d)
1866
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001867 os.listdir = ourlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001868 if hasattr(os, 'scandir'):
1869 os.scandir = ourscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001870 try:
1871 # Can't use set here as order is important
1872 newfiles = []
1873 for f in files:
1874 if os.path.isdir(f):
1875 dirfiles = self.find_bbfiles(f)
1876 for g in dirfiles:
1877 if g not in newfiles:
1878 newfiles.append(g)
1879 else:
1880 globbed = glob.glob(f)
1881 if not globbed and os.path.exists(f):
1882 globbed = [f]
1883 # glob gives files in order on disk. Sort to be deterministic.
1884 for g in sorted(globbed):
1885 if g not in newfiles:
1886 newfiles.append(g)
1887 finally:
1888 os.listdir = origlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001889 if hasattr(os, 'scandir'):
1890 os.scandir = origscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001891
1892 bbmask = config.getVar('BBMASK')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001893
1894 if bbmask:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001895 # First validate the individual regular expressions and ignore any
1896 # that do not compile
1897 bbmasks = []
1898 for mask in bbmask.split():
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001899 # When constructing an older style single regex, it's possible for BBMASK
1900 # to end up beginning with '|', which matches and masks _everything_.
1901 if mask.startswith("|"):
Andrew Geissler82c905d2020-04-13 13:39:40 -05001902 collectlog.warning("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001903 mask = mask[1:]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001904 try:
1905 re.compile(mask)
1906 bbmasks.append(mask)
1907 except sre_constants.error:
1908 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1909
1910 # Then validate the combined regular expressions. This should never
1911 # fail, but better safe than sorry...
1912 bbmask = "|".join(bbmasks)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001913 try:
1914 bbmask_compiled = re.compile(bbmask)
1915 except sre_constants.error:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001916 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1917 bbmask = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001918
1919 bbfiles = []
1920 bbappend = []
1921 for f in newfiles:
1922 if bbmask and bbmask_compiled.search(f):
1923 collectlog.debug(1, "skipping masked file %s", f)
1924 masked += 1
1925 continue
1926 if f.endswith('.bb'):
1927 bbfiles.append(f)
1928 elif f.endswith('.bbappend'):
1929 bbappend.append(f)
1930 else:
1931 collectlog.debug(1, "skipping %s: unknown file extension", f)
1932
1933 # Build a list of .bbappend files for each .bb file
1934 for f in bbappend:
1935 base = os.path.basename(f).replace('.bbappend', '.bb')
1936 self.bbappends.append((base, f))
1937
1938 # Find overlayed recipes
1939 # bbfiles will be in priority order which makes this easy
1940 bbfile_seen = dict()
1941 self.overlayed = defaultdict(list)
1942 for f in reversed(bbfiles):
1943 base = os.path.basename(f)
1944 if base not in bbfile_seen:
1945 bbfile_seen[base] = f
1946 else:
1947 topfile = bbfile_seen[base]
1948 self.overlayed[topfile].append(f)
1949
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001950 return (bbfiles, masked, searchdirs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001951
1952 def get_file_appends(self, fn):
1953 """
1954 Returns a list of .bbappend files to apply to fn
1955 """
1956 filelist = []
1957 f = os.path.basename(fn)
1958 for b in self.bbappends:
1959 (bbappend, filename) = b
1960 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1961 filelist.append(filename)
Andrew Geissler5a43b432020-06-13 10:46:56 -05001962 return tuple(filelist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001963
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001964 def collection_priorities(self, pkgfns, fns, d):
1965 # Return the priorities of the entries in pkgfns
1966 # Also check that all the regexes in self.bbfile_config_priorities are used
1967 # (but to do that we need to ensure skipped recipes aren't counted, nor
1968 # collections in BBFILE_PATTERN_IGNORE_EMPTY)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001969
1970 priorities = {}
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001971 seen = set()
1972 matched = set()
1973
1974 matched_regex = set()
1975 unmatched_regex = set()
1976 for _, _, regex, _ in self.bbfile_config_priorities:
1977 unmatched_regex.add(regex)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001978
1979 # Calculate priorities for each file
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001980 for p in pkgfns:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001981 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001982 priorities[p], regex = self.calc_bbfile_priority(realfn)
1983 if regex in unmatched_regex:
1984 matched_regex.add(regex)
1985 unmatched_regex.remove(regex)
1986 seen.add(realfn)
1987 if regex:
1988 matched.add(realfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001989
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001990 if unmatched_regex:
1991 # Account for bbappend files
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001992 for b in self.bbappends:
1993 (bbfile, append) = b
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001994 seen.add(append)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001995
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001996 # Account for skipped recipes
1997 seen.update(fns)
1998
1999 seen.difference_update(matched)
2000
2001 def already_matched(fn):
2002 for regex in matched_regex:
2003 if regex.match(fn):
2004 return True
2005 return False
2006
2007 for unmatch in unmatched_regex.copy():
2008 for fn in seen:
2009 if unmatch.match(fn):
2010 # If the bbappend or file was already matched by another regex, skip it
2011 # e.g. for a layer within a layer, the outer regex could match, the inner
2012 # regex may match nothing and we should warn about that
2013 if already_matched(fn):
2014 continue
2015 unmatched_regex.remove(unmatch)
2016 break
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002017
2018 for collection, pattern, regex, _ in self.bbfile_config_priorities:
Andrew Geisslerb7d28612020-07-24 16:15:54 -05002019 if regex in unmatched_regex:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05002020 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
Andrew Geissler5a43b432020-06-13 10:46:56 -05002021 collectlog.warning("No bb files in %s matched BBFILE_PATTERN_%s '%s'" % (self.mc if self.mc else 'default',
2022 collection, pattern))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002023
2024 return priorities
2025
2026class ParsingFailure(Exception):
2027 def __init__(self, realexception, recipe):
2028 self.realexception = realexception
2029 self.recipe = recipe
2030 Exception.__init__(self, realexception, recipe)
2031
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002032class Parser(multiprocessing.Process):
Andrew Geissler9aee5002022-03-30 16:27:02 +00002033 def __init__(self, jobs, results, quit, profile):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002034 self.jobs = jobs
2035 self.results = results
2036 self.quit = quit
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002037 multiprocessing.Process.__init__(self)
2038 self.context = bb.utils.get_context().copy()
2039 self.handlers = bb.event.get_class_handlers().copy()
2040 self.profile = profile
Andrew Geissler9aee5002022-03-30 16:27:02 +00002041 self.queue_signals = False
2042 self.signal_received = []
2043 self.signal_threadlock = threading.Lock()
2044
2045 def catch_sig(self, signum, frame):
2046 if self.queue_signals:
2047 self.signal_received.append(signum)
2048 else:
2049 self.handle_sig(signum, frame)
2050
2051 def handle_sig(self, signum, frame):
2052 if signum == signal.SIGTERM:
2053 signal.signal(signal.SIGTERM, signal.SIG_DFL)
2054 os.kill(os.getpid(), signal.SIGTERM)
2055 elif signum == signal.SIGINT:
2056 signal.default_int_handler(signum, frame)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002057
2058 def run(self):
2059
2060 if not self.profile:
2061 self.realrun()
2062 return
2063
2064 try:
2065 import cProfile as profile
2066 except:
2067 import profile
2068 prof = profile.Profile()
2069 try:
2070 profile.Profile.runcall(prof, self.realrun)
2071 finally:
2072 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
2073 prof.dump_stats(logfile)
2074
2075 def realrun(self):
Andrew Geissler9aee5002022-03-30 16:27:02 +00002076 # Signal handling here is hard. We must not terminate any process or thread holding the write
2077 # lock for the event stream as it will not be released, ever, and things will hang.
2078 # Python handles signals in the main thread/process but they can be raised from any thread and
2079 # we want to defer processing of any SIGTERM/SIGINT signal until we're outside the critical section
2080 # and don't hold the lock (see server/process.py). We therefore always catch the signals (so any
2081 # new thread should also do so) and we defer handling but we handle with the local thread lock
2082 # held (a threading lock, not a multiprocessing one) so that no other thread in the process
2083 # can be in the critical section.
2084 signal.signal(signal.SIGTERM, self.catch_sig)
2085 signal.signal(signal.SIGHUP, signal.SIG_DFL)
2086 signal.signal(signal.SIGINT, self.catch_sig)
2087 bb.utils.set_process_name(multiprocessing.current_process().name)
2088 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2089 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002090
2091 pending = []
Andrew Geissler9aee5002022-03-30 16:27:02 +00002092 try:
2093 while True:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002094 try:
Andrew Geissler9aee5002022-03-30 16:27:02 +00002095 self.quit.get_nowait()
2096 except queue.Empty:
2097 pass
2098 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002099 break
Andrew Geissler9aee5002022-03-30 16:27:02 +00002100
2101 if pending:
2102 result = pending.pop()
2103 else:
2104 try:
2105 job = self.jobs.pop()
2106 except IndexError:
2107 break
2108 result = self.parse(*job)
2109 # Clear the siggen cache after parsing to control memory usage, its huge
2110 bb.parse.siggen.postparsing_clean_cache()
2111 try:
2112 self.results.put(result, timeout=0.25)
2113 except queue.Full:
2114 pending.append(result)
2115 finally:
2116 self.results.close()
2117 self.results.join_thread()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002118
Andrew Geissler5a43b432020-06-13 10:46:56 -05002119 def parse(self, mc, cache, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002120 try:
Andrew Geissler82c905d2020-04-13 13:39:40 -05002121 origfilter = bb.event.LogHandler.filter
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002122 # Record the filename we're parsing into any events generated
2123 def parse_filter(self, record):
2124 record.taskpid = bb.event.worker_pid
2125 record.fn = filename
2126 return True
2127
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002128 # Reset our environment and handlers to the original settings
2129 bb.utils.set_context(self.context.copy())
2130 bb.event.set_class_handlers(self.handlers.copy())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002131 bb.event.LogHandler.filter = parse_filter
2132
Andrew Geissler5a43b432020-06-13 10:46:56 -05002133 return True, mc, cache.parse(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002134 except Exception as exc:
2135 tb = sys.exc_info()[2]
2136 exc.recipe = filename
2137 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
Andrew Geissler9aee5002022-03-30 16:27:02 +00002138 return True, None, exc
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002139 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2140 # and for example a worker thread doesn't just exit on its own in response to
2141 # a SystemExit event for example.
2142 except BaseException as exc:
Andrew Geissler9aee5002022-03-30 16:27:02 +00002143 return True, None, ParsingFailure(exc, filename)
Andrew Geissler82c905d2020-04-13 13:39:40 -05002144 finally:
2145 bb.event.LogHandler.filter = origfilter
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002146
2147class CookerParser(object):
Andrew Geissler5a43b432020-06-13 10:46:56 -05002148 def __init__(self, cooker, mcfilelist, masked):
2149 self.mcfilelist = mcfilelist
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002150 self.cooker = cooker
2151 self.cfgdata = cooker.data
2152 self.cfghash = cooker.data_hash
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002153 self.cfgbuilder = cooker.databuilder
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002154
2155 # Accounting statistics
2156 self.parsed = 0
2157 self.cached = 0
2158 self.error = 0
2159 self.masked = masked
2160
2161 self.skipped = 0
2162 self.virtuals = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002163
2164 self.current = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002165 self.process_names = []
2166
Andrew Geissler5a43b432020-06-13 10:46:56 -05002167 self.bb_caches = bb.cache.MulticonfigCache(self.cfgbuilder, self.cfghash, cooker.caches_array)
2168 self.fromcache = set()
2169 self.willparse = set()
2170 for mc in self.cooker.multiconfigs:
2171 for filename in self.mcfilelist[mc]:
2172 appends = self.cooker.collections[mc].get_file_appends(filename)
2173 if not self.bb_caches[mc].cacheValid(filename, appends):
2174 self.willparse.add((mc, self.bb_caches[mc], filename, appends))
2175 else:
2176 self.fromcache.add((mc, self.bb_caches[mc], filename, appends))
2177
2178 self.total = len(self.fromcache) + len(self.willparse)
2179 self.toparse = len(self.willparse)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002180 self.progress_chunk = int(max(self.toparse / 100, 1))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002181
Brad Bishop6e60e8b2018-02-01 10:27:11 -05002182 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
Andrew Geissler5a43b432020-06-13 10:46:56 -05002183 multiprocessing.cpu_count()), self.toparse)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002184
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002185 self.start()
2186 self.haveshutdown = False
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002187 self.syncthread = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002188
2189 def start(self):
2190 self.results = self.load_cached()
2191 self.processes = []
2192 if self.toparse:
2193 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002194
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002195 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002196 self.result_queue = multiprocessing.Queue()
Brad Bishop19323692019-04-05 15:28:33 -04002197
2198 def chunkify(lst,n):
2199 return [lst[i::n] for i in range(n)]
Andrew Geissler5a43b432020-06-13 10:46:56 -05002200 self.jobs = chunkify(list(self.willparse), self.num_processes)
Brad Bishop19323692019-04-05 15:28:33 -04002201
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002202 for i in range(0, self.num_processes):
Andrew Geissler9aee5002022-03-30 16:27:02 +00002203 parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, self.cooker.configuration.profile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002204 parser.start()
2205 self.process_names.append(parser.name)
2206 self.processes.append(parser)
2207
2208 self.results = itertools.chain(self.results, self.parse_generator())
2209
Andrew Geissler9aee5002022-03-30 16:27:02 +00002210 def shutdown(self, clean=True):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002211 if not self.toparse:
2212 return
2213 if self.haveshutdown:
2214 return
2215 self.haveshutdown = True
2216
2217 if clean:
2218 event = bb.event.ParseCompleted(self.cached, self.parsed,
2219 self.skipped, self.masked,
2220 self.virtuals, self.error,
2221 self.total)
2222
2223 bb.event.fire(event, self.cfgdata)
Andrew Geissler7e0e3c02022-02-25 20:34:39 +00002224 else:
Andrew Geissler9aee5002022-03-30 16:27:02 +00002225 bb.error("Parsing halted due to errors, see error messages above")
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002226
2227 for process in self.processes:
2228 self.parser_quit.put(None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002229
Brad Bishop08902b02019-08-20 09:16:51 -04002230 # Cleanup the queue before call process.join(), otherwise there might be
2231 # deadlocks.
2232 while True:
2233 try:
2234 self.result_queue.get(timeout=0.25)
2235 except queue.Empty:
2236 break
2237
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002238 for process in self.processes:
Andrew Geissler9aee5002022-03-30 16:27:02 +00002239 process.join(0.5)
2240
2241 for process in self.processes:
2242 if process.exitcode is None:
2243 os.kill(process.pid, signal.SIGINT)
2244
2245 for process in self.processes:
2246 process.join(0.5)
2247
2248 for process in self.processes:
2249 if process.exitcode is None:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002250 process.terminate()
Andrew Geissler9aee5002022-03-30 16:27:02 +00002251
2252 for process in self.processes:
2253 process.join()
2254 # Added in 3.7, cleans up zombies
2255 if hasattr(process, "close"):
2256 process.close()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002257
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002258 self.parser_quit.close()
2259 # Allow data left in the cancel queue to be discarded
2260 self.parser_quit.cancel_join_thread()
2261
Andrew Geissler5a43b432020-06-13 10:46:56 -05002262 def sync_caches():
2263 for c in self.bb_caches.values():
2264 c.sync()
2265
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002266 sync = threading.Thread(target=sync_caches, name="SyncThread")
2267 self.syncthread = sync
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002268 sync.start()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002269 bb.codeparser.parser_cache_savemerge()
2270 bb.fetch.fetcher_parse_done()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002271 if self.cooker.configuration.profile:
2272 profiles = []
2273 for i in self.process_names:
2274 logfile = "profile-parse-%s.log" % i
2275 if os.path.exists(logfile):
2276 profiles.append(logfile)
2277
2278 pout = "profile-parse.log.processed"
2279 bb.utils.process_profilelog(profiles, pout = pout)
2280 print("Processed parsing statistics saved to %s" % (pout))
2281
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002282 def final_cleanup(self):
2283 if self.syncthread:
2284 self.syncthread.join()
2285
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002286 def load_cached(self):
Andrew Geissler5a43b432020-06-13 10:46:56 -05002287 for mc, cache, filename, appends in self.fromcache:
2288 cached, infos = cache.load(filename, appends)
2289 yield not cached, mc, infos
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002290
2291 def parse_generator(self):
Andrew Geissler595f6302022-01-24 19:11:47 +00002292 empty = False
2293 while self.processes or not empty:
2294 for process in self.processes.copy():
2295 if not process.is_alive():
2296 process.join()
2297 self.processes.remove(process)
2298
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002299 if self.parsed >= self.toparse:
2300 break
2301
2302 try:
2303 result = self.result_queue.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002304 except queue.Empty:
Andrew Geissler595f6302022-01-24 19:11:47 +00002305 empty = True
Andrew Geissler9aee5002022-03-30 16:27:02 +00002306 yield None, None, None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002307 else:
Andrew Geissler595f6302022-01-24 19:11:47 +00002308 empty = False
Andrew Geissler9aee5002022-03-30 16:27:02 +00002309 yield result
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002310
Andrew Geissler595f6302022-01-24 19:11:47 +00002311 if not (self.parsed >= self.toparse):
2312 raise bb.parse.ParseError("Not all recipes parsed, parser thread killed/died? Exiting.", None)
2313
2314
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002315 def parse_next(self):
2316 result = []
2317 parsed = None
2318 try:
Andrew Geissler5a43b432020-06-13 10:46:56 -05002319 parsed, mc, result = next(self.results)
Andrew Geissler9aee5002022-03-30 16:27:02 +00002320 if isinstance(result, BaseException):
2321 # Turn exceptions back into exceptions
2322 raise result
2323 if parsed is None:
2324 # Timeout, loop back through the main loop
2325 return True
2326
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002327 except StopIteration:
2328 self.shutdown()
2329 return False
2330 except bb.BBHandledException as exc:
2331 self.error += 1
Andrew Geissler7e0e3c02022-02-25 20:34:39 +00002332 logger.debug('Failed to parse recipe: %s' % exc.recipe)
Andrew Geissler9aee5002022-03-30 16:27:02 +00002333 self.shutdown(clean=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002334 return False
2335 except ParsingFailure as exc:
2336 self.error += 1
2337 logger.error('Unable to parse %s: %s' %
2338 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
Andrew Geissler9aee5002022-03-30 16:27:02 +00002339 self.shutdown(clean=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002340 return False
2341 except bb.parse.ParseError as exc:
2342 self.error += 1
2343 logger.error(str(exc))
Andrew Geissler9aee5002022-03-30 16:27:02 +00002344 self.shutdown(clean=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002345 return False
2346 except bb.data_smart.ExpansionError as exc:
2347 self.error += 1
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002348 bbdir = os.path.dirname(__file__) + os.sep
2349 etype, value, _ = sys.exc_info()
2350 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2351 logger.error('ExpansionError during parsing %s', value.recipe,
2352 exc_info=(etype, value, tb))
Andrew Geissler9aee5002022-03-30 16:27:02 +00002353 self.shutdown(clean=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002354 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002355 except Exception as exc:
2356 self.error += 1
2357 etype, value, tb = sys.exc_info()
2358 if hasattr(value, "recipe"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002359 logger.error('Unable to parse %s' % value.recipe,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002360 exc_info=(etype, value, exc.traceback))
2361 else:
2362 # Most likely, an exception occurred during raising an exception
2363 import traceback
2364 logger.error('Exception during parse: %s' % traceback.format_exc())
Andrew Geissler9aee5002022-03-30 16:27:02 +00002365 self.shutdown(clean=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002366 return False
2367
2368 self.current += 1
2369 self.virtuals += len(result)
2370 if parsed:
2371 self.parsed += 1
2372 if self.parsed % self.progress_chunk == 0:
2373 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2374 self.cfgdata)
2375 else:
2376 self.cached += 1
2377
2378 for virtualfn, info_array in result:
2379 if info_array[0].skipped:
2380 self.skipped += 1
2381 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
Andrew Geissler5a43b432020-06-13 10:46:56 -05002382 self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002383 parsed=parsed, watcher = self.cooker.add_filewatch)
2384 return True
2385
2386 def reparse(self, filename):
Andrew Geissler5a43b432020-06-13 10:46:56 -05002387 to_reparse = set()
2388 for mc in self.cooker.multiconfigs:
2389 to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename)))
2390
2391 for mc, filename, appends in to_reparse:
2392 infos = self.bb_caches[mc].parse(filename, appends)
2393 for vfn, info_array in infos:
2394 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)