blob: f435b18c87bbab2972a70982d696858339732a29 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# Copyright (C) 2003, 2004 Chris Larson
3# Copyright (C) 2003, 2004 Phil Blundell
4# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
5# Copyright (C) 2005 Holger Hans Peter Freyther
6# Copyright (C) 2005 ROAD GmbH
7# Copyright (C) 2006 - 2007 Richard Purdie
8#
Brad Bishopc342db32019-05-15 21:57:59 -04009# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050010#
Patrick Williamsc0f7c042017-02-23 20:41:17 -060011
Patrick Williamsc124f4f2015-09-15 14:41:29 -050012import sys, os, glob, os.path, re, time
Patrick Williamsc124f4f2015-09-15 14:41:29 -050013import itertools
14import logging
15import multiprocessing
16import sre_constants
17import threading
Patrick Williamsc0f7c042017-02-23 20:41:17 -060018from io import StringIO, UnsupportedOperation
Patrick Williamsc124f4f2015-09-15 14:41:29 -050019from contextlib import closing
Patrick Williamsc0f7c042017-02-23 20:41:17 -060020from collections import defaultdict, namedtuple
Patrick Williamsc124f4f2015-09-15 14:41:29 -050021import bb, bb.exceptions, bb.command
22from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
Patrick Williamsc0f7c042017-02-23 20:41:17 -060023import queue
Patrick Williamsc124f4f2015-09-15 14:41:29 -050024import signal
Patrick Williamsc124f4f2015-09-15 14:41:29 -050025import prserv.serv
26import pyinotify
Patrick Williamsc0f7c042017-02-23 20:41:17 -060027import json
28import pickle
29import codecs
Brad Bishop08902b02019-08-20 09:16:51 -040030import hashserv
Patrick Williamsc124f4f2015-09-15 14:41:29 -050031
32logger = logging.getLogger("BitBake")
33collectlog = logging.getLogger("BitBake.Collection")
34buildlog = logging.getLogger("BitBake.Build")
35parselog = logging.getLogger("BitBake.Parsing")
36providerlog = logging.getLogger("BitBake.Provider")
37
38class NoSpecificMatch(bb.BBHandledException):
39 """
40 Exception raised when no or multiple file matches are found
41 """
42
43class NothingToBuild(Exception):
44 """
45 Exception raised when there is nothing to build
46 """
47
48class CollectionError(bb.BBHandledException):
49 """
50 Exception raised when layer configuration is incorrect
51 """
52
53class state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060054 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050055
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050056 @classmethod
57 def get_name(cls, code):
58 for name in dir(cls):
59 value = getattr(cls, name)
60 if type(value) == type(cls.initial) and value == code:
61 return name
62 raise ValueError("Invalid status code: %s" % code)
63
Patrick Williamsc124f4f2015-09-15 14:41:29 -050064
65class SkippedPackage:
66 def __init__(self, info = None, reason = None):
67 self.pn = None
68 self.skipreason = None
69 self.provides = None
70 self.rprovides = None
71
72 if info:
73 self.pn = info.pn
74 self.skipreason = info.skipreason
75 self.provides = info.provides
Andrew Geisslerd1e89492021-02-12 15:35:20 -060076 self.rprovides = info.packages + info.rprovides
77 for package in info.packages:
78 self.rprovides += info.rprovides_pkg[package]
Patrick Williamsc124f4f2015-09-15 14:41:29 -050079 elif reason:
80 self.skipreason = reason
81
82
83class CookerFeatures(object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -060084 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050085
86 def __init__(self):
87 self._features=set()
88
89 def setFeature(self, f):
90 # validate we got a request for a feature we support
91 if f not in CookerFeatures._feature_list:
92 return
93 self._features.add(f)
94
95 def __contains__(self, f):
96 return f in self._features
97
98 def __iter__(self):
99 return self._features.__iter__()
100
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600101 def __next__(self):
102 return next(self._features)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500103
104
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600105class EventWriter:
106 def __init__(self, cooker, eventfile):
107 self.file_inited = None
108 self.cooker = cooker
109 self.eventfile = eventfile
110 self.event_queue = []
111
112 def write_event(self, event):
113 with open(self.eventfile, "a") as f:
114 try:
115 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
116 f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
117 "vars": str_event}))
118 except Exception as err:
119 import traceback
120 print(err, traceback.format_exc())
121
122 def send(self, event):
123 if self.file_inited:
124 # we have the file, just write the event
125 self.write_event(event)
126 else:
127 # init on bb.event.BuildStarted
128 name = "%s.%s" % (event.__module__, event.__class__.__name__)
129 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
130 with open(self.eventfile, "w") as f:
131 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
132
133 self.file_inited = True
134
135 # write pending events
136 for evt in self.event_queue:
137 self.write_event(evt)
138
139 # also write the current event
140 self.write_event(event)
141 else:
142 # queue all events until the file is inited
143 self.event_queue.append(event)
144
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500145#============================================================================#
146# BBCooker
147#============================================================================#
148class BBCooker:
149 """
150 Manages one bitbake build run
151 """
152
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500153 def __init__(self, featureSet=None, idleCallBackRegister=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600154 self.recipecaches = None
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500155 self.eventlog = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500156 self.skiplist = {}
157 self.featureset = CookerFeatures()
158 if featureSet:
159 for f in featureSet:
160 self.featureset.setFeature(f)
161
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500162 self.configuration = bb.cookerdata.CookerConfiguration()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500163
Andrew Geissler635e0e42020-08-21 15:58:33 -0500164 self.idleCallBackRegister = idleCallBackRegister
165
Brad Bishopf058f492019-01-28 23:50:33 -0500166 bb.debug(1, "BBCooker starting %s" % time.time())
167 sys.stdout.flush()
168
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500169 self.configwatcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500170 bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
171 sys.stdout.flush()
172
Andrew Geissler82c905d2020-04-13 13:39:40 -0500173 self.configwatcher.bbseen = set()
174 self.configwatcher.bbwatchedfiles = set()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500175 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
Brad Bishopf058f492019-01-28 23:50:33 -0500176 bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
177 sys.stdout.flush()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500178 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
179 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500180 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500181 self.watcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500182 bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
183 sys.stdout.flush()
Andrew Geissler82c905d2020-04-13 13:39:40 -0500184 self.watcher.bbseen = set()
185 self.watcher.bbwatchedfiles = set()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500186 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
187
Brad Bishopf058f492019-01-28 23:50:33 -0500188 bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
189 sys.stdout.flush()
190
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500191 # If being called by something like tinfoil, we need to clean cached data
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500192 # which may now be invalid
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500193 bb.parse.clear_cache()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500194 bb.parse.BBHandler.cached_statements = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500195
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500196 self.ui_cmdline = None
Brad Bishop08902b02019-08-20 09:16:51 -0400197 self.hashserv = None
Brad Bishopa34c0302019-09-23 22:34:48 -0400198 self.hashservaddr = None
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500199
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500200 self.inotify_modified_files = []
201
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000202 def _process_inotify_updates(server, cooker, halt):
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500203 cooker.process_inotify_updates()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500204 return 1.0
205
Andrew Geissler635e0e42020-08-21 15:58:33 -0500206 self.idleCallBackRegister(_process_inotify_updates, self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500207
208 # TOSTOP must not be set or our children will hang when they output
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600209 try:
210 fd = sys.stdout.fileno()
211 if os.isatty(fd):
212 import termios
213 tcattr = termios.tcgetattr(fd)
214 if tcattr[3] & termios.TOSTOP:
215 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
216 tcattr[3] = tcattr[3] & ~termios.TOSTOP
217 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
218 except UnsupportedOperation:
219 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500220
221 self.command = bb.command.Command(self)
222 self.state = state.initial
223
224 self.parser = None
225
226 signal.signal(signal.SIGTERM, self.sigterm_exception)
227 # Let SIGHUP exit as SIGTERM
228 signal.signal(signal.SIGHUP, self.sigterm_exception)
229
Brad Bishopf058f492019-01-28 23:50:33 -0500230 bb.debug(1, "BBCooker startup complete %s" % time.time())
231 sys.stdout.flush()
232
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500233 def init_configdata(self):
234 if not hasattr(self, "data"):
235 self.initConfigurationData()
236 bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
237 sys.stdout.flush()
238 self.handlePRServ()
239
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500240 def process_inotify_updates(self):
241 for n in [self.confignotifier, self.notifier]:
242 if n.check_events(timeout=0):
243 # read notified events and enqeue them
244 n.read_events()
245 n.process_events()
246
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500247 def config_notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500248 if event.maskname == "IN_Q_OVERFLOW":
249 bb.warn("inotify event queue overflowed, invalidating caches.")
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500250 self.parsecache_valid = False
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500251 self.baseconfig_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500252 bb.parse.clear_cache()
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500253 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500254 if not event.pathname in self.configwatcher.bbwatchedfiles:
255 return
Andrew Geissler9aee5002022-03-30 16:27:02 +0000256 if "IN_ISDIR" in event.maskname:
257 if "IN_CREATE" in event.maskname:
258 self.add_filewatch([[event.pathname]], watcher=self.configwatcher, dirs=True)
259 elif "IN_DELETE" in event.maskname and event.pathname in self.watcher.bbseen:
260 self.configwatcher.bbseen.remove(event.pathname)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500261 if not event.pathname in self.inotify_modified_files:
262 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500263 self.baseconfig_valid = False
264
265 def notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500266 if event.maskname == "IN_Q_OVERFLOW":
267 bb.warn("inotify event queue overflowed, invalidating caches.")
268 self.parsecache_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500269 bb.parse.clear_cache()
270 return
271 if event.pathname.endswith("bitbake-cookerdaemon.log") \
272 or event.pathname.endswith("bitbake.lock"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500273 return
Andrew Geissler9aee5002022-03-30 16:27:02 +0000274 if "IN_ISDIR" in event.maskname:
275 if "IN_CREATE" in event.maskname:
276 self.add_filewatch([[event.pathname]], dirs=True)
277 elif "IN_DELETE" in event.maskname and event.pathname in self.watcher.bbseen:
278 self.watcher.bbseen.remove(event.pathname)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500279 if not event.pathname in self.inotify_modified_files:
280 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500281 self.parsecache_valid = False
282
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500283 def add_filewatch(self, deps, watcher=None, dirs=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500284 if not watcher:
285 watcher = self.watcher
286 for i in deps:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500287 watcher.bbwatchedfiles.add(i[0])
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500288 if dirs:
289 f = i[0]
290 else:
291 f = os.path.dirname(i[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500292 if f in watcher.bbseen:
293 continue
Andrew Geissler82c905d2020-04-13 13:39:40 -0500294 watcher.bbseen.add(f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500295 watchtarget = None
296 while True:
297 # We try and add watches for files that don't exist but if they did, would influence
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500298 # the parser. The parent directory of these files may not exist, in which case we need
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500299 # to watch any parent that does exist for changes.
300 try:
301 watcher.add_watch(f, self.watchmask, quiet=False)
302 if watchtarget:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500303 watcher.bbwatchedfiles.add(watchtarget)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500304 break
305 except pyinotify.WatchManagerError as e:
306 if 'ENOENT' in str(e):
307 watchtarget = f
308 f = os.path.dirname(f)
309 if f in watcher.bbseen:
310 break
Andrew Geissler82c905d2020-04-13 13:39:40 -0500311 watcher.bbseen.add(f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500312 continue
313 if 'ENOSPC' in str(e):
314 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
315 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
316 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
317 providerlog.error("Root privilege is required to modify max_user_watches.")
318 raise
319
320 def sigterm_exception(self, signum, stackframe):
321 if signum == signal.SIGTERM:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500322 bb.warn("Cooker received SIGTERM, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500323 elif signum == signal.SIGHUP:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500324 bb.warn("Cooker received SIGHUP, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500325 self.state = state.forceshutdown
326
327 def setFeatures(self, features):
328 # we only accept a new feature set if we're in state initial, so we can reset without problems
329 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
330 raise Exception("Illegal state for feature set change")
331 original_featureset = list(self.featureset)
332 for feature in features:
333 self.featureset.setFeature(feature)
334 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500335 if (original_featureset != list(self.featureset)) and self.state != state.error and hasattr(self, "data"):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500336 self.reset()
337
338 def initConfigurationData(self):
339
340 self.state = state.initial
341 self.caches_array = []
342
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500343 # Need to preserve BB_CONSOLELOG over resets
344 consolelog = None
345 if hasattr(self, "data"):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500346 consolelog = self.data.getVar("BB_CONSOLELOG")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500347
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500348 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
349 self.enableDataTracking()
350
351 all_extra_cache_names = []
352 # We hardcode all known cache types in a single place, here.
353 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
354 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
355
356 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
357
358 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
359 # This is the entry point, no further check needed!
360 for var in caches_name_array:
361 try:
362 module_name, cache_name = var.split(':')
363 module = __import__(module_name, fromlist=(cache_name,))
364 self.caches_array.append(getattr(module, cache_name))
365 except ImportError as exc:
366 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500367 raise bb.BBHandledException()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500368
369 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
370 self.databuilder.parseBaseConfiguration()
371 self.data = self.databuilder.data
372 self.data_hash = self.databuilder.data_hash
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500373 self.extraconfigdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500374
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500375 if consolelog:
376 self.data.setVar("BB_CONSOLELOG", consolelog)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500377
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500378 self.data.setVar('BB_CMDLINE', self.ui_cmdline)
379
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500380 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
381 self.disableDataTracking()
382
Brad Bishop15ae2502019-06-18 21:44:24 -0400383 for mc in self.databuilder.mcdata.values():
384 mc.renameVar("__depends", "__base_depends")
385 self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500386
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500387 self.baseconfig_valid = True
388 self.parsecache_valid = False
389
390 def handlePRServ(self):
391 # Setup a PR Server based on the new configuration
392 try:
393 self.prhost = prserv.serv.auto_start(self.data)
394 except prserv.serv.PRServiceConfigError as e:
Andrew Geisslerd159c7f2021-09-02 21:05:58 -0500395 bb.fatal("Unable to start PR Server, exiting, check the bitbake-cookerdaemon.log")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500396
Brad Bishopa34c0302019-09-23 22:34:48 -0400397 if self.data.getVar("BB_HASHSERVE") == "auto":
398 # Create a new hash server bound to a unix domain socket
Brad Bishop08902b02019-08-20 09:16:51 -0400399 if not self.hashserv:
400 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
Andrew Geissler595f6302022-01-24 19:11:47 +0000401 upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None
402 if upstream:
403 import socket
404 try:
405 sock = socket.create_connection(upstream.split(":"), 5)
406 sock.close()
407 except socket.error as e:
408 bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s"
409 % (upstream, repr(e)))
410
Brad Bishopa34c0302019-09-23 22:34:48 -0400411 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
Andrew Geissler5199d832021-09-24 16:47:35 -0500412 self.hashserv = hashserv.create_server(
413 self.hashservaddr,
414 dbfile,
415 sync=False,
Andrew Geissler595f6302022-01-24 19:11:47 +0000416 upstream=upstream,
Andrew Geissler5199d832021-09-24 16:47:35 -0500417 )
Patrick Williams213cb262021-08-07 19:21:33 -0500418 self.hashserv.serve_as_process()
Brad Bishopa34c0302019-09-23 22:34:48 -0400419 self.data.setVar("BB_HASHSERVE", self.hashservaddr)
420 self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr)
421 self.databuilder.data.setVar("BB_HASHSERVE", self.hashservaddr)
Brad Bishop08902b02019-08-20 09:16:51 -0400422 for mc in self.databuilder.mcdata:
Brad Bishopa34c0302019-09-23 22:34:48 -0400423 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
Brad Bishop08902b02019-08-20 09:16:51 -0400424
425 bb.parse.init_parser(self.data)
426
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500427 def enableDataTracking(self):
428 self.configuration.tracking = True
429 if hasattr(self, "data"):
430 self.data.enableTracking()
431
432 def disableDataTracking(self):
433 self.configuration.tracking = False
434 if hasattr(self, "data"):
435 self.data.disableTracking()
436
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500437 def parseConfiguration(self):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600438 self.updateCacheSync()
439
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500440 # Change nice level if we're asked to
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500441 nice = self.data.getVar("BB_NICE_LEVEL")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500442 if nice:
443 curnice = os.nice(0)
444 nice = int(nice) - curnice
445 buildlog.verbose("Renice to %s " % os.nice(nice))
446
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600447 if self.recipecaches:
448 del self.recipecaches
449 self.multiconfigs = self.databuilder.mcdata.keys()
450 self.recipecaches = {}
451 for mc in self.multiconfigs:
452 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500453
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500454 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500455
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500456 self.parsecache_valid = False
457
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500458 def updateConfigOpts(self, options, environment, cmdline):
459 self.ui_cmdline = cmdline
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500460 clean = True
461 for o in options:
462 if o in ['prefile', 'postfile']:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500463 # Only these options may require a reparse
464 try:
465 if getattr(self.configuration, o) == options[o]:
466 # Value is the same, no need to mark dirty
467 continue
468 except AttributeError:
469 pass
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600470 logger.debug("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500471 print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500472 clean = False
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500473 if hasattr(self.configuration, o):
474 setattr(self.configuration, o, options[o])
475
476 if self.configuration.writeeventlog:
477 if self.eventlog and self.eventlog[0] != self.configuration.writeeventlog:
478 bb.event.unregister_UIHhandler(self.eventlog[1])
479 if not self.eventlog or self.eventlog[0] != self.configuration.writeeventlog:
480 # we log all events to a file if so directed
481 # register the log file writer as UI Handler
482 writer = EventWriter(self, self.configuration.writeeventlog)
483 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
484 self.eventlog = (self.configuration.writeeventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)))
485
486 bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel
487 bb.msg.loggerDefaultDomains = self.configuration.debug_domains
488
489 if hasattr(self, "data"):
490 origenv = bb.data.init()
491 for k in environment:
492 origenv.setVar(k, environment[k])
493 self.data.setVar("BB_ORIGENV", origenv)
494
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500495 for k in bb.utils.approved_variables():
496 if k in environment and k not in self.configuration.env:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600497 logger.debug("Updating new environment variable %s to %s" % (k, environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500498 self.configuration.env[k] = environment[k]
499 clean = False
500 if k in self.configuration.env and k not in environment:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600501 logger.debug("Updating environment variable %s (deleted)" % (k))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500502 del self.configuration.env[k]
503 clean = False
504 if k not in self.configuration.env and k not in environment:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500505 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500506 if environment[k] != self.configuration.env[k]:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600507 logger.debug("Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500508 self.configuration.env[k] = environment[k]
509 clean = False
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500510
511 # Now update all the variables not in the datastore to match
512 self.configuration.env = environment
513
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500514 if not clean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600515 logger.debug("Base environment change, triggering reparse")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500516 self.reset()
517
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000518 def runCommands(self, server, data, halt):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500519 """
520 Run any queued asynchronous command
521 This is done by the idle handler so it runs in true context rather than
522 tied to any UI.
523 """
524
525 return self.command.runAsyncCommand()
526
527 def showVersions(self):
528
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500529 (latest_versions, preferred_versions, required) = self.findProviders()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500530
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500531 logger.plain("%-35s %25s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version", "Required Version")
532 logger.plain("%-35s %25s %25s %25s\n", "===========", "==============", "=================", "================")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500533
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500534 for p in sorted(self.recipecaches[''].pkg_pn):
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500535 preferred = preferred_versions[p]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500536 latest = latest_versions[p]
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500537 requiredstr = ""
538 preferredstr = ""
539 if required[p]:
540 if preferred[0] is not None:
541 requiredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
542 else:
543 bb.fatal("REQUIRED_VERSION of package %s not available" % p)
544 else:
545 preferredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500546
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500547 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
548
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500549 if preferred == latest:
550 preferredstr = ""
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500551
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500552 logger.plain("%-35s %25s %25s %25s", p, lateststr, preferredstr, requiredstr)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500553
554 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
555 """
556 Show the outer or per-recipe environment
557 """
558 fn = None
559 envdata = None
Brad Bishop15ae2502019-06-18 21:44:24 -0400560 mc = ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500561 if not pkgs_to_build:
562 pkgs_to_build = []
563
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500564 orig_tracking = self.configuration.tracking
565 if not orig_tracking:
566 self.enableDataTracking()
567 self.reset()
Andrew Geissler9aee5002022-03-30 16:27:02 +0000568 # reset() resets to the UI requested value so we have to redo this
569 self.enableDataTracking()
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500570
Brad Bishop15ae2502019-06-18 21:44:24 -0400571 def mc_base(p):
572 if p.startswith('mc:'):
573 s = p.split(':')
574 if len(s) == 2:
575 return s[1]
576 return None
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500577
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500578 if buildfile:
579 # Parse the configuration here. We need to do it explicitly here since
580 # this showEnvironment() code path doesn't use the cache
581 self.parseConfiguration()
582
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600583 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Andrew Geissler5a43b432020-06-13 10:46:56 -0500584 fn = self.matchFile(fn, mc)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600585 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500586 elif len(pkgs_to_build) == 1:
Brad Bishop15ae2502019-06-18 21:44:24 -0400587 mc = mc_base(pkgs_to_build[0])
588 if not mc:
589 ignore = self.data.getVar("ASSUME_PROVIDED") or ""
590 if pkgs_to_build[0] in set(ignore.split()):
591 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500592
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000593 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.halt, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500594
Brad Bishop15ae2502019-06-18 21:44:24 -0400595 mc = runlist[0][0]
596 fn = runlist[0][3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500597
598 if fn:
599 try:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500600 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
601 envdata = bb_caches[mc].loadDataFull(fn, self.collections[mc].get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500602 except Exception as e:
603 parselog.exception("Unable to read %s", fn)
604 raise
Brad Bishop15ae2502019-06-18 21:44:24 -0400605 else:
606 if not mc in self.databuilder.mcdata:
607 bb.fatal('Not multiconfig named "%s" found' % mc)
608 envdata = self.databuilder.mcdata[mc]
609 data.expandKeys(envdata)
610 parse.ast.runAnonFuncs(envdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500611
612 # Display history
613 with closing(StringIO()) as env:
614 self.data.inchistory.emit(env)
615 logger.plain(env.getvalue())
616
617 # emit variables and shell functions
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500618 with closing(StringIO()) as env:
619 data.emit_env(env, envdata, True)
620 logger.plain(env.getvalue())
621
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000622 # emit the metadata which isn't valid shell
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500623 for e in sorted(envdata.keys()):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600624 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500625 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500626
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500627 if not orig_tracking:
628 self.disableDataTracking()
629 self.reset()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500630
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000631 def buildTaskData(self, pkgs_to_build, task, halt, allowincomplete=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500632 """
633 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
634 """
635 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
636
637 # A task of None means use the default task
638 if task is None:
639 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500640 if not task.startswith("do_"):
641 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500642
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500643 targetlist = self.checkPackages(pkgs_to_build, task)
644 fulltargetlist = []
645 defaulttask_implicit = ''
646 defaulttask_explicit = False
647 wildcard = False
648
649 # Wild card expansion:
Brad Bishop15ae2502019-06-18 21:44:24 -0400650 # Replace string such as "mc:*:bash"
651 # into "mc:A:bash mc:B:bash bash"
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500652 for k in targetlist:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600653 if k.startswith("mc:") and k.count(':') >= 2:
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500654 if wildcard:
655 bb.fatal('multiconfig conflict')
656 if k.split(":")[1] == "*":
657 wildcard = True
658 for mc in self.multiconfigs:
659 if mc:
660 fulltargetlist.append(k.replace('*', mc))
661 # implicit default task
662 else:
663 defaulttask_implicit = k.split(":")[2]
664 else:
665 fulltargetlist.append(k)
666 else:
667 defaulttask_explicit = True
668 fulltargetlist.append(k)
669
670 if not defaulttask_explicit and defaulttask_implicit != '':
671 fulltargetlist.append(defaulttask_implicit)
672
673 bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600674 taskdata = {}
675 localdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500676
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600677 for mc in self.multiconfigs:
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000678 taskdata[mc] = bb.taskdata.TaskData(halt, skiplist=self.skiplist, allowincomplete=allowincomplete)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600679 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600680 bb.data.expandKeys(localdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500681
682 current = 0
683 runlist = []
684 for k in fulltargetlist:
Andrew Geisslerb7d28612020-07-24 16:15:54 -0500685 origk = k
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600686 mc = ""
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600687 if k.startswith("mc:") and k.count(':') >= 2:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600688 mc = k.split(":")[1]
689 k = ":".join(k.split(":")[2:])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500690 ktask = task
691 if ":do_" in k:
692 k2 = k.split(":do_")
693 k = k2[0]
694 ktask = k2[1]
Andrew Geisslerb7d28612020-07-24 16:15:54 -0500695
696 if mc not in self.multiconfigs:
697 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named %s" % (origk, mc))
698
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600699 taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500700 current += 1
701 if not ktask.startswith("do_"):
702 ktask = "do_%s" % ktask
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600703 if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
704 # e.g. in ASSUME_PROVIDED
705 continue
706 fn = taskdata[mc].build_targets[k][0]
707 runlist.append([mc, k, ktask, fn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500708 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600709
Brad Bishop15ae2502019-06-18 21:44:24 -0400710 havemc = False
711 for mc in self.multiconfigs:
712 if taskdata[mc].get_mcdepends():
713 havemc = True
Brad Bishopf058f492019-01-28 23:50:33 -0500714
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800715 # No need to do check providers if there are no mcdeps or not an mc build
Brad Bishop15ae2502019-06-18 21:44:24 -0400716 if havemc or len(self.multiconfigs) > 1:
Andrew Geissler99467da2019-02-25 18:54:23 -0600717 seen = set()
718 new = True
719 # Make sure we can provide the multiconfig dependency
720 while new:
721 mcdeps = set()
722 # Add unresolved first, so we can get multiconfig indirect dependencies on time
723 for mc in self.multiconfigs:
724 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
725 mcdeps |= set(taskdata[mc].get_mcdepends())
726 new = False
727 for mc in self.multiconfigs:
728 for k in mcdeps:
729 if k in seen:
730 continue
731 l = k.split(':')
732 depmc = l[2]
733 if depmc not in self.multiconfigs:
Andrew Geisslerb7d28612020-07-24 16:15:54 -0500734 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc))
Andrew Geissler99467da2019-02-25 18:54:23 -0600735 else:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600736 logger.debug("Adding providers for multiconfig dependency %s" % l[3])
Andrew Geissler99467da2019-02-25 18:54:23 -0600737 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
738 seen.add(k)
739 new = True
Brad Bishopf058f492019-01-28 23:50:33 -0500740
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600741 for mc in self.multiconfigs:
742 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
743
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500744 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600745 return taskdata, runlist
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500746
747 def prepareTreeData(self, pkgs_to_build, task):
748 """
749 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
750 """
751
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000752 # We set halt to False here to prevent unbuildable targets raising
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500753 # an exception when we're just generating data
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600754 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500755
756 return runlist, taskdata
757
758 ######## WARNING : this function requires cache_extra to be enabled ########
759
760 def generateTaskDepTreeData(self, pkgs_to_build, task):
761 """
762 Create a dependency graph of pkgs_to_build including reverse dependency
763 information.
764 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500765 if not task.startswith("do_"):
766 task = "do_%s" % task
767
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500768 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600769 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500770 rq.rqdata.prepare()
771 return self.buildDependTree(rq, taskdata)
772
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600773 @staticmethod
774 def add_mc_prefix(mc, pn):
775 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -0400776 return "mc:%s:%s" % (mc, pn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600777 return pn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500778
779 def buildDependTree(self, rq, taskdata):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600780 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500781 depend_tree = {}
782 depend_tree["depends"] = {}
783 depend_tree["tdepends"] = {}
784 depend_tree["pn"] = {}
785 depend_tree["rdepends-pn"] = {}
786 depend_tree["packages"] = {}
787 depend_tree["rdepends-pkg"] = {}
788 depend_tree["rrecs-pkg"] = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500789 depend_tree['providermap'] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600790 depend_tree["layer-priorities"] = self.bbfile_config_priorities
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500791
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600792 for mc in taskdata:
793 for name, fn in list(taskdata[mc].get_providermap().items()):
794 pn = self.recipecaches[mc].pkg_fn[fn]
795 pn = self.add_mc_prefix(mc, pn)
796 if name != pn:
797 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
798 depend_tree['providermap'][name] = (pn, version)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500799
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600800 for tid in rq.rqdata.runtaskentries:
801 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
802 pn = self.recipecaches[mc].pkg_fn[taskfn]
803 pn = self.add_mc_prefix(mc, pn)
804 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500805 if pn not in depend_tree["pn"]:
806 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600807 depend_tree["pn"][pn]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500808 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600809 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500810
811 # if we have extra caches, list all attributes they bring in
812 extra_info = []
813 for cache_class in self.caches_array:
814 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
815 cachefields = getattr(cache_class, 'cachefields', [])
816 extra_info = extra_info + cachefields
817
818 # for all attributes stored, add them to the dependency tree
819 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600820 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500821
822
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500823 dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
824 if not dotname in depend_tree["tdepends"]:
825 depend_tree["tdepends"][dotname] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600826 for dep in rq.rqdata.runtaskentries[tid].depends:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800827 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
828 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
Andrew Geissler595f6302022-01-24 19:11:47 +0000829 if depmc:
830 depmc = "mc:" + depmc + ":"
831 depend_tree["tdepends"][dotname].append("%s%s.%s" % (depmc, deppn, bb.runqueue.taskname_from_tid(dep)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600832 if taskfn not in seen_fns:
833 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500834 packages = []
835
836 depend_tree["depends"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600837 for dep in taskdata[mc].depids[taskfn]:
838 depend_tree["depends"][pn].append(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500839
840 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600841 for rdep in taskdata[mc].rdepids[taskfn]:
842 depend_tree["rdepends-pn"][pn].append(rdep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500843
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600844 rdepends = self.recipecaches[mc].rundeps[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500845 for package in rdepends:
846 depend_tree["rdepends-pkg"][package] = []
847 for rdepend in rdepends[package]:
848 depend_tree["rdepends-pkg"][package].append(rdepend)
849 packages.append(package)
850
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600851 rrecs = self.recipecaches[mc].runrecs[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500852 for package in rrecs:
853 depend_tree["rrecs-pkg"][package] = []
854 for rdepend in rrecs[package]:
855 depend_tree["rrecs-pkg"][package].append(rdepend)
856 if not package in packages:
857 packages.append(package)
858
859 for package in packages:
860 if package not in depend_tree["packages"]:
861 depend_tree["packages"][package] = {}
862 depend_tree["packages"][package]["pn"] = pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600863 depend_tree["packages"][package]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500864 depend_tree["packages"][package]["version"] = version
865
866 return depend_tree
867
868 ######## WARNING : this function requires cache_extra to be enabled ########
869 def generatePkgDepTreeData(self, pkgs_to_build, task):
870 """
871 Create a dependency tree of pkgs_to_build, returning the data.
872 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500873 if not task.startswith("do_"):
874 task = "do_%s" % task
875
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500876 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500877
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600878 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500879 depend_tree = {}
880 depend_tree["depends"] = {}
881 depend_tree["pn"] = {}
882 depend_tree["rdepends-pn"] = {}
883 depend_tree["rdepends-pkg"] = {}
884 depend_tree["rrecs-pkg"] = {}
885
886 # if we have extra caches, list all attributes they bring in
887 extra_info = []
888 for cache_class in self.caches_array:
889 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
890 cachefields = getattr(cache_class, 'cachefields', [])
891 extra_info = extra_info + cachefields
892
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600893 tids = []
894 for mc in taskdata:
895 for tid in taskdata[mc].taskentries:
896 tids.append(tid)
897
898 for tid in tids:
899 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
900
901 pn = self.recipecaches[mc].pkg_fn[taskfn]
902 pn = self.add_mc_prefix(mc, pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500903
904 if pn not in depend_tree["pn"]:
905 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600906 depend_tree["pn"][pn]["filename"] = taskfn
907 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500908 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600909 rdepends = self.recipecaches[mc].rundeps[taskfn]
910 rrecs = self.recipecaches[mc].runrecs[taskfn]
911 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500912
913 # for all extra attributes stored, add them to the dependency tree
914 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600915 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500916
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600917 if taskfn not in seen_fns:
918 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500919
920 depend_tree["depends"][pn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500921 for dep in taskdata[mc].depids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500922 pn_provider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600923 if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
924 fn_provider = taskdata[mc].build_targets[dep][0]
925 pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500926 else:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500927 pn_provider = dep
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600928 pn_provider = self.add_mc_prefix(mc, pn_provider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500929 depend_tree["depends"][pn].append(pn_provider)
930
931 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600932 for rdep in taskdata[mc].rdepids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500933 pn_rprovider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600934 if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
935 fn_rprovider = taskdata[mc].run_targets[rdep][0]
936 pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500937 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600938 pn_rprovider = rdep
939 pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500940 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
941
942 depend_tree["rdepends-pkg"].update(rdepends)
943 depend_tree["rrecs-pkg"].update(rrecs)
944
945 return depend_tree
946
947 def generateDepTreeEvent(self, pkgs_to_build, task):
948 """
949 Create a task dependency graph of pkgs_to_build.
950 Generate an event with the result
951 """
952 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
953 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
954
955 def generateDotGraphFiles(self, pkgs_to_build, task):
956 """
957 Create a task dependency graph of pkgs_to_build.
958 Save the result to a set of .dot files.
959 """
960
961 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
962
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500963 with open('pn-buildlist', 'w') as f:
964 for pn in depgraph["pn"]:
965 f.write(pn + "\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500966 logger.info("PN build list saved to 'pn-buildlist'")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500967
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500968 # Remove old format output files to ensure no confusion with stale data
969 try:
970 os.unlink('pn-depends.dot')
971 except FileNotFoundError:
972 pass
973 try:
974 os.unlink('package-depends.dot')
975 except FileNotFoundError:
976 pass
Brad Bishop79641f22019-09-10 07:20:22 -0400977 try:
978 os.unlink('recipe-depends.dot')
979 except FileNotFoundError:
980 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500981
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500982 with open('task-depends.dot', 'w') as f:
983 f.write("digraph depends {\n")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400984 for task in sorted(depgraph["tdepends"]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500985 (pn, taskname) = task.rsplit(".", 1)
986 fn = depgraph["pn"][pn]["filename"]
987 version = depgraph["pn"][pn]["version"]
988 f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400989 for dep in sorted(depgraph["tdepends"][task]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500990 f.write('"%s" -> "%s"\n' % (task, dep))
991 f.write("}\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500992 logger.info("Task dependencies saved to 'task-depends.dot'")
993
994 def show_appends_with_no_recipes(self):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500995 appends_without_recipes = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500996 # Determine which bbappends haven't been applied
Andrew Geissler5a43b432020-06-13 10:46:56 -0500997 for mc in self.multiconfigs:
998 # First get list of recipes, including skipped
999 recipefns = list(self.recipecaches[mc].pkg_fn.keys())
1000 recipefns.extend(self.skiplist.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001001
Andrew Geissler5a43b432020-06-13 10:46:56 -05001002 # Work out list of bbappends that have been applied
1003 applied_appends = []
1004 for fn in recipefns:
1005 applied_appends.extend(self.collections[mc].get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001006
Andrew Geissler5a43b432020-06-13 10:46:56 -05001007 appends_without_recipes[mc] = []
1008 for _, appendfn in self.collections[mc].bbappends:
1009 if not appendfn in applied_appends:
1010 appends_without_recipes[mc].append(appendfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001011
Andrew Geissler5a43b432020-06-13 10:46:56 -05001012 msgs = []
1013 for mc in sorted(appends_without_recipes.keys()):
1014 if appends_without_recipes[mc]:
1015 msgs.append('No recipes in %s available for:\n %s' % (mc if mc else 'default',
1016 '\n '.join(appends_without_recipes[mc])))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001017
Andrew Geissler5a43b432020-06-13 10:46:56 -05001018 if msgs:
1019 msg = "\n".join(msgs)
1020 warn_only = self.databuilder.mcdata[mc].getVar("BB_DANGLINGAPPENDS_WARNONLY", \
1021 False) or "no"
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001022 if warn_only.lower() in ("1", "yes", "true"):
1023 bb.warn(msg)
1024 else:
1025 bb.fatal(msg)
1026
1027 def handlePrefProviders(self):
1028
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001029 for mc in self.multiconfigs:
1030 localdata = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001031 bb.data.expandKeys(localdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001032
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001033 # Handle PREFERRED_PROVIDERS
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001034 for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001035 try:
1036 (providee, provider) = p.split(':')
1037 except:
1038 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
1039 continue
1040 if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
1041 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
1042 self.recipecaches[mc].preferred[providee] = provider
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001043
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001044 def findConfigFilePath(self, configfile):
1045 """
1046 Find the location on disk of configfile and if it exists and was parsed by BitBake
1047 emit the ConfigFilePathFound event with the path to the file.
1048 """
1049 path = bb.cookerdata.findConfigFile(configfile, self.data)
1050 if not path:
1051 return
1052
1053 # Generate a list of parsed configuration files by searching the files
1054 # listed in the __depends and __base_depends variables with a .conf suffix.
1055 conffiles = []
1056 dep_files = self.data.getVar('__base_depends', False) or []
1057 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
1058
1059 for f in dep_files:
1060 if f[0].endswith(".conf"):
1061 conffiles.append(f[0])
1062
1063 _, conf, conffile = path.rpartition("conf/")
1064 match = os.path.join(conf, conffile)
1065 # Try and find matches for conf/conffilename.conf as we don't always
1066 # have the full path to the file.
1067 for cfg in conffiles:
1068 if cfg.endswith(match):
1069 bb.event.fire(bb.event.ConfigFilePathFound(path),
1070 self.data)
1071 break
1072
1073 def findFilesMatchingInDir(self, filepattern, directory):
1074 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001075 Searches for files containing the substring 'filepattern' which are children of
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001076 'directory' in each BBPATH. i.e. to find all rootfs package classes available
1077 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1078 or to find all machine configuration files one could call:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001079 findFilesMatchingInDir(self, '.conf', 'conf/machine')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001080 """
1081
1082 matches = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001083 bbpaths = self.data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001084 for path in bbpaths:
1085 dirpath = os.path.join(path, directory)
1086 if os.path.exists(dirpath):
1087 for root, dirs, files in os.walk(dirpath):
1088 for f in files:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001089 if filepattern in f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001090 matches.append(f)
1091
1092 if matches:
1093 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1094
Patrick Williams93c203f2021-10-06 16:15:23 -05001095 def testCookerCommandEvent(self, filepattern):
1096 # Dummy command used by OEQA selftest to test tinfoil without IO
1097 matches = ["A", "B"]
1098 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1099
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001100 def findProviders(self, mc=''):
Andrew Geissler82c905d2020-04-13 13:39:40 -05001101 return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001102
1103 def findBestProvider(self, pn, mc=''):
1104 if pn in self.recipecaches[mc].providers:
1105 filenames = self.recipecaches[mc].providers[pn]
Andrew Geissler82c905d2020-04-13 13:39:40 -05001106 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc])
Andrew Geissler95ac1b82021-03-31 14:34:31 -05001107 if eligible is not None:
1108 filename = eligible[0]
1109 else:
1110 filename = None
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001111 return None, None, None, filename
1112 elif pn in self.recipecaches[mc].pkg_pn:
Andrew Geissler95ac1b82021-03-31 14:34:31 -05001113 (latest, latest_f, preferred_ver, preferred_file, required) = bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1114 if required and preferred_file is None:
1115 return None, None, None, None
1116 return (latest, latest_f, preferred_ver, preferred_file)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001117 else:
1118 return None, None, None, None
1119
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001120 def findConfigFiles(self, varname):
1121 """
1122 Find config files which are appropriate values for varname.
1123 i.e. MACHINE, DISTRO
1124 """
1125 possible = []
1126 var = varname.lower()
1127
1128 data = self.data
1129 # iterate configs
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001130 bbpaths = data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001131 for path in bbpaths:
1132 confpath = os.path.join(path, "conf", var)
1133 if os.path.exists(confpath):
1134 for root, dirs, files in os.walk(confpath):
1135 # get all child files, these are appropriate values
1136 for f in files:
1137 val, sep, end = f.rpartition('.')
1138 if end == 'conf':
1139 possible.append(val)
1140
1141 if possible:
1142 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1143
1144 def findInheritsClass(self, klass):
1145 """
1146 Find all recipes which inherit the specified class
1147 """
1148 pkg_list = []
1149
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001150 for pfn in self.recipecaches[''].pkg_fn:
1151 inherits = self.recipecaches[''].inherits.get(pfn, None)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001152 if inherits and klass in inherits:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001153 pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001154
1155 return pkg_list
1156
1157 def generateTargetsTree(self, klass=None, pkgs=None):
1158 """
1159 Generate a dependency tree of buildable targets
1160 Generate an event with the result
1161 """
1162 # if the caller hasn't specified a pkgs list default to universe
1163 if not pkgs:
1164 pkgs = ['universe']
1165 # if inherited_class passed ensure all recipes which inherit the
1166 # specified class are included in pkgs
1167 if klass:
1168 extra_pkgs = self.findInheritsClass(klass)
1169 pkgs = pkgs + extra_pkgs
1170
1171 # generate a dependency tree for all our packages
1172 tree = self.generatePkgDepTreeData(pkgs, 'build')
1173 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1174
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001175 def interactiveMode( self ):
1176 """Drop off into a shell"""
1177 try:
1178 from bb import shell
1179 except ImportError:
1180 parselog.exception("Interactive mode not available")
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001181 raise bb.BBHandledException()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001182 else:
1183 shell.start( self )
1184
1185
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001186 def handleCollections(self, collections):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001187 """Handle collections"""
1188 errors = False
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001189 self.bbfile_config_priorities = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001190 if collections:
1191 collection_priorities = {}
1192 collection_depends = {}
1193 collection_list = collections.split()
1194 min_prio = 0
1195 for c in collection_list:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001196 bb.debug(1,'Processing %s in collection list' % (c))
1197
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001198 # Get collection priority if defined explicitly
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001199 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001200 if priority:
1201 try:
1202 prio = int(priority)
1203 except ValueError:
1204 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1205 errors = True
1206 if min_prio == 0 or prio < min_prio:
1207 min_prio = prio
1208 collection_priorities[c] = prio
1209 else:
1210 collection_priorities[c] = None
1211
1212 # Check dependencies and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001213 deps = self.data.getVar("LAYERDEPENDS_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001214 if deps:
1215 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001216 depDict = bb.utils.explode_dep_versions2(deps)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001217 except bb.utils.VersionStringException as vse:
1218 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001219 for dep, oplist in list(depDict.items()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001220 if dep in collection_list:
1221 for opstr in oplist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001222 layerver = self.data.getVar("LAYERVERSION_%s" % dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001223 (op, depver) = opstr.split()
1224 if layerver:
1225 try:
1226 res = bb.utils.vercmp_string_op(layerver, depver, op)
1227 except bb.utils.VersionStringException as vse:
1228 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1229 if not res:
1230 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1231 errors = True
1232 else:
1233 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1234 errors = True
1235 else:
1236 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1237 errors = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001238 collection_depends[c] = list(depDict.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001239 else:
1240 collection_depends[c] = []
1241
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001242 # Check recommends and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001243 recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001244 if recs:
1245 try:
1246 recDict = bb.utils.explode_dep_versions2(recs)
1247 except bb.utils.VersionStringException as vse:
1248 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1249 for rec, oplist in list(recDict.items()):
1250 if rec in collection_list:
1251 if oplist:
1252 opstr = oplist[0]
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001253 layerver = self.data.getVar("LAYERVERSION_%s" % rec)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001254 if layerver:
1255 (op, recver) = opstr.split()
1256 try:
1257 res = bb.utils.vercmp_string_op(layerver, recver, op)
1258 except bb.utils.VersionStringException as vse:
1259 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1260 if not res:
1261 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1262 continue
1263 else:
1264 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1265 continue
1266 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1267 collection_depends[c].append(rec)
1268 else:
1269 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1270
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001271 # Recursively work out collection priorities based on dependencies
1272 def calc_layer_priority(collection):
1273 if not collection_priorities[collection]:
1274 max_depprio = min_prio
1275 for dep in collection_depends[collection]:
1276 calc_layer_priority(dep)
1277 depprio = collection_priorities[dep]
1278 if depprio > max_depprio:
1279 max_depprio = depprio
1280 max_depprio += 1
1281 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1282 collection_priorities[collection] = max_depprio
1283
1284 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1285 for c in collection_list:
1286 calc_layer_priority(c)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001287 regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
Andrew Geissler82c905d2020-04-13 13:39:40 -05001288 if regex is None:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001289 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1290 errors = True
1291 continue
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001292 elif regex == "":
1293 parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
Brad Bishop19323692019-04-05 15:28:33 -04001294 cre = re.compile('^NULL$')
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001295 errors = False
1296 else:
1297 try:
1298 cre = re.compile(regex)
1299 except re.error:
1300 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1301 errors = True
1302 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001303 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001304 if errors:
1305 # We've already printed the actual error(s)
1306 raise CollectionError("Errors during parsing layer configuration")
1307
1308 def buildSetVars(self):
1309 """
1310 Setup any variables needed before starting a build
1311 """
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001312 t = time.gmtime()
1313 for mc in self.databuilder.mcdata:
1314 ds = self.databuilder.mcdata[mc]
1315 if not ds.getVar("BUILDNAME", False):
1316 ds.setVar("BUILDNAME", "${DATE}${TIME}")
1317 ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1318 ds.setVar("DATE", time.strftime('%Y%m%d', t))
1319 ds.setVar("TIME", time.strftime('%H%M%S', t))
1320
1321 def reset_mtime_caches(self):
1322 """
1323 Reset mtime caches - this is particularly important when memory resident as something
1324 which is cached is not unlikely to have changed since the last invocation (e.g. a
1325 file associated with a recipe might have been modified by the user).
1326 """
1327 build.reset_cache()
1328 bb.fetch._checksum_cache.mtime_cache.clear()
1329 siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1330 if siggen_cache:
1331 bb.parse.siggen.checksum_cache.mtime_cache.clear()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001332
Andrew Geissler5a43b432020-06-13 10:46:56 -05001333 def matchFiles(self, bf, mc=''):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001334 """
1335 Find the .bb files which match the expression in 'buildfile'.
1336 """
1337 if bf.startswith("/") or bf.startswith("../"):
1338 bf = os.path.abspath(bf)
1339
Andrew Geissler5a43b432020-06-13 10:46:56 -05001340 self.collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)}
1341 filelist, masked, searchdirs = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001342 try:
1343 os.stat(bf)
1344 bf = os.path.abspath(bf)
1345 return [bf]
1346 except OSError:
1347 regexp = re.compile(bf)
1348 matches = []
1349 for f in filelist:
1350 if regexp.search(f) and os.path.isfile(f):
1351 matches.append(f)
1352 return matches
1353
Andrew Geissler5a43b432020-06-13 10:46:56 -05001354 def matchFile(self, buildfile, mc=''):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001355 """
1356 Find the .bb file which matches the expression in 'buildfile'.
1357 Raise an error if multiple files
1358 """
Andrew Geissler5a43b432020-06-13 10:46:56 -05001359 matches = self.matchFiles(buildfile, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001360 if len(matches) != 1:
1361 if matches:
1362 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1363 if matches:
1364 for f in matches:
1365 msg += "\n %s" % f
1366 parselog.error(msg)
1367 else:
1368 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1369 raise NoSpecificMatch
1370 return matches[0]
1371
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001372 def buildFile(self, buildfile, task):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001373 """
1374 Build the file matching regexp buildfile
1375 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001376 bb.event.fire(bb.event.BuildInit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001377
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001378 # Too many people use -b because they think it's how you normally
1379 # specify a target to be built, so show a warning
1380 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1381
1382 self.buildFileInternal(buildfile, task)
1383
1384 def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1385 """
1386 Build the file matching regexp buildfile
1387 """
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001388
1389 # Parse the configuration here. We need to do it explicitly here since
1390 # buildFile() doesn't use the cache
1391 self.parseConfiguration()
1392
1393 # If we are told to do the None task then query the default task
Andrew Geissler82c905d2020-04-13 13:39:40 -05001394 if task is None:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001395 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001396 if not task.startswith("do_"):
1397 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001398
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001399 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Andrew Geissler5a43b432020-06-13 10:46:56 -05001400 fn = self.matchFile(fn, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001401
1402 self.buildSetVars()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001403 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001404
Andrew Geissler5a43b432020-06-13 10:46:56 -05001405 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001406
Andrew Geissler5a43b432020-06-13 10:46:56 -05001407 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001408 infos = dict(infos)
1409
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001410 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001411 try:
1412 info_array = infos[fn]
1413 except KeyError:
1414 bb.fatal("%s does not exist" % fn)
1415
1416 if info_array[0].skipped:
1417 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1418
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001419 self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001420
1421 # Tweak some variables
1422 item = info_array[0].pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001423 self.recipecaches[mc].ignored_dependencies = set()
1424 self.recipecaches[mc].bbfile_priority[fn] = 1
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001425 self.configuration.limited_deps = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001426
1427 # Remove external dependencies
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001428 self.recipecaches[mc].task_deps[fn]['depends'] = {}
1429 self.recipecaches[mc].deps[fn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001430 self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1431 self.recipecaches[mc].runrecs[fn] = defaultdict(list)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001432
1433 # Invalidate task for target if force mode active
1434 if self.configuration.force:
1435 logger.verbose("Invalidate task %s, %s", task, fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001436 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001437
1438 # Setup taskdata structure
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001439 taskdata = {}
Andrew Geissler7e0e3c02022-02-25 20:34:39 +00001440 taskdata[mc] = bb.taskdata.TaskData(self.configuration.halt)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001441 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001442
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001443 if quietlog:
1444 rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1445 bb.runqueue.logger.setLevel(logging.WARNING)
1446
1447 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1448 if fireevents:
1449 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001450
1451 # Execute the runqueue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001452 runlist = [[mc, item, task, fn]]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001453
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001454 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001455
Andrew Geissler7e0e3c02022-02-25 20:34:39 +00001456 def buildFileIdle(server, rq, halt):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001457
1458 msg = None
1459 interrupted = 0
Andrew Geissler7e0e3c02022-02-25 20:34:39 +00001460 if halt or self.state == state.forceshutdown:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001461 rq.finish_runqueue(True)
1462 msg = "Forced shutdown"
1463 interrupted = 2
1464 elif self.state == state.shutdown:
1465 rq.finish_runqueue(False)
1466 msg = "Stopped build"
1467 interrupted = 1
1468 failures = 0
1469 try:
1470 retval = rq.execute_runqueue()
1471 except runqueue.TaskFailure as exc:
1472 failures += len(exc.args)
1473 retval = False
1474 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001475 self.command.finishAsyncCommand(str(exc))
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001476 if quietlog:
1477 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001478 return False
1479
1480 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001481 if fireevents:
1482 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001483 self.command.finishAsyncCommand(msg)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001484 # We trashed self.recipecaches above
1485 self.parsecache_valid = False
1486 self.configuration.limited_deps = False
1487 bb.parse.siggen.reset(self.data)
1488 if quietlog:
1489 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001490 return False
1491 if retval is True:
1492 return True
1493 return retval
1494
Andrew Geissler635e0e42020-08-21 15:58:33 -05001495 self.idleCallBackRegister(buildFileIdle, rq)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001496
1497 def buildTargets(self, targets, task):
1498 """
1499 Attempt to build the targets specified
1500 """
1501
Andrew Geissler7e0e3c02022-02-25 20:34:39 +00001502 def buildTargetsIdle(server, rq, halt):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001503 msg = None
1504 interrupted = 0
Andrew Geissler7e0e3c02022-02-25 20:34:39 +00001505 if halt or self.state == state.forceshutdown:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001506 rq.finish_runqueue(True)
1507 msg = "Forced shutdown"
1508 interrupted = 2
1509 elif self.state == state.shutdown:
1510 rq.finish_runqueue(False)
1511 msg = "Stopped build"
1512 interrupted = 1
1513 failures = 0
1514 try:
1515 retval = rq.execute_runqueue()
1516 except runqueue.TaskFailure as exc:
1517 failures += len(exc.args)
1518 retval = False
1519 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001520 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001521 return False
1522
1523 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001524 try:
1525 for mc in self.multiconfigs:
1526 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1527 finally:
1528 self.command.finishAsyncCommand(msg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001529 return False
1530 if retval is True:
1531 return True
1532 return retval
1533
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001534 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001535 self.buildSetVars()
1536
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001537 # If we are told to do the None task then query the default task
Andrew Geissler82c905d2020-04-13 13:39:40 -05001538 if task is None:
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001539 task = self.configuration.cmd
1540
1541 if not task.startswith("do_"):
1542 task = "do_%s" % task
1543
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001544 packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1545
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001546 bb.event.fire(bb.event.BuildInit(packages), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001547
Andrew Geissler7e0e3c02022-02-25 20:34:39 +00001548 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.halt)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001549
1550 buildname = self.data.getVar("BUILDNAME", False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001551
1552 # make targets to always look as <target>:do_<task>
1553 ntargets = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001554 for target in runlist:
1555 if target[0]:
Brad Bishop15ae2502019-06-18 21:44:24 -04001556 ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2]))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001557 ntargets.append("%s:%s" % (target[1], target[2]))
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001558
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001559 for mc in self.multiconfigs:
1560 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001561
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001562 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001563 if 'universe' in targets:
1564 rq.rqdata.warn_multi_bb = True
1565
Andrew Geissler635e0e42020-08-21 15:58:33 -05001566 self.idleCallBackRegister(buildTargetsIdle, rq)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001567
1568
1569 def getAllKeysWithFlags(self, flaglist):
1570 dump = {}
1571 for k in self.data.keys():
1572 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001573 expand = True
1574 flags = self.data.getVarFlags(k)
1575 if flags and "func" in flags and "python" in flags:
1576 expand = False
1577 v = self.data.getVar(k, expand)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001578 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1579 dump[k] = {
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001580 'v' : str(v) ,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001581 'history' : self.data.varhistory.variable(k),
1582 }
1583 for d in flaglist:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001584 if flags and d in flags:
1585 dump[k][d] = flags[d]
1586 else:
1587 dump[k][d] = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001588 except Exception as e:
1589 print(e)
1590 return dump
1591
1592
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001593 def updateCacheSync(self):
1594 if self.state == state.running:
1595 return
1596
1597 # reload files for which we got notifications
1598 for p in self.inotify_modified_files:
1599 bb.parse.update_cache(p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001600 if p in bb.parse.BBHandler.cached_statements:
1601 del bb.parse.BBHandler.cached_statements[p]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001602 self.inotify_modified_files = []
1603
1604 if not self.baseconfig_valid:
Andrew Geisslerd1e89492021-02-12 15:35:20 -06001605 logger.debug("Reloading base configuration data")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001606 self.initConfigurationData()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001607 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001608
1609 # This is called for all async commands when self.state != running
1610 def updateCache(self):
1611 if self.state == state.running:
1612 return
1613
1614 if self.state in (state.shutdown, state.forceshutdown, state.error):
1615 if hasattr(self.parser, 'shutdown'):
Andrew Geissler9aee5002022-03-30 16:27:02 +00001616 self.parser.shutdown(clean=False)
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001617 self.parser.final_cleanup()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001618 raise bb.BBHandledException()
1619
1620 if self.state != state.parsing:
1621 self.updateCacheSync()
1622
1623 if self.state != state.parsing and not self.parsecache_valid:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001624 bb.parse.siggen.reset(self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001625 self.parseConfiguration ()
1626 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001627 for mc in self.multiconfigs:
1628 bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001629
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001630 for mc in self.multiconfigs:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001631 ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001632 self.recipecaches[mc].ignored_dependencies = set(ignore.split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001633
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001634 for dep in self.configuration.extra_assume_provided:
1635 self.recipecaches[mc].ignored_dependencies.add(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001636
Andrew Geissler5a43b432020-06-13 10:46:56 -05001637 self.collections = {}
1638
1639 mcfilelist = {}
1640 total_masked = 0
1641 searchdirs = set()
1642 for mc in self.multiconfigs:
1643 self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
1644 (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1645
1646 mcfilelist[mc] = filelist
1647 total_masked += masked
1648 searchdirs |= set(search)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001649
1650 # Add inotify watches for directories searched for bb/bbappend files
1651 for dirent in searchdirs:
1652 self.add_filewatch([[dirent]], dirs=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001653
Andrew Geissler5a43b432020-06-13 10:46:56 -05001654 self.parser = CookerParser(self, mcfilelist, total_masked)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001655 self.parsecache_valid = True
1656
1657 self.state = state.parsing
1658
1659 if not self.parser.parse_next():
1660 collectlog.debug(1, "parsing complete")
1661 if self.parser.error:
1662 raise bb.BBHandledException()
1663 self.show_appends_with_no_recipes()
1664 self.handlePrefProviders()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001665 for mc in self.multiconfigs:
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001666 self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001667 self.state = state.running
1668
1669 # Send an event listing all stamps reachable after parsing
1670 # which the metadata may use to clean up stale data
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001671 for mc in self.multiconfigs:
1672 event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1673 bb.event.fire(event, self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001674 return None
1675
1676 return True
1677
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001678 def checkPackages(self, pkgs_to_build, task=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001679
1680 # Return a copy, don't modify the original
1681 pkgs_to_build = pkgs_to_build[:]
1682
Andrew Geissler595f6302022-01-24 19:11:47 +00001683 if not pkgs_to_build:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001684 raise NothingToBuild
1685
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001686 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001687 for pkg in pkgs_to_build.copy():
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001688 if pkg in ignore:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001689 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
Brad Bishop15ae2502019-06-18 21:44:24 -04001690 if pkg.startswith("multiconfig:"):
1691 pkgs_to_build.remove(pkg)
1692 pkgs_to_build.append(pkg.replace("multiconfig:", "mc:"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001693
1694 if 'world' in pkgs_to_build:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001695 pkgs_to_build.remove('world')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001696 for mc in self.multiconfigs:
1697 bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1698 for t in self.recipecaches[mc].world_target:
1699 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001700 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001701 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001702
1703 if 'universe' in pkgs_to_build:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001704 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001705 parselog.debug(1, "collating packages for \"universe\"")
1706 pkgs_to_build.remove('universe')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001707 for mc in self.multiconfigs:
1708 for t in self.recipecaches[mc].universe_target:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001709 if task:
1710 foundtask = False
1711 for provider_fn in self.recipecaches[mc].providers[t]:
1712 if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1713 foundtask = True
1714 break
1715 if not foundtask:
1716 bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1717 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001718 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001719 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001720 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001721
1722 return pkgs_to_build
1723
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001724 def pre_serve(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001725 return
1726
1727 def post_serve(self):
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001728 self.shutdown(force=True)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001729 prserv.serv.auto_shutdown()
Andrew Geissler9aee5002022-03-30 16:27:02 +00001730 bb.parse.siggen.exit()
Brad Bishop08902b02019-08-20 09:16:51 -04001731 if self.hashserv:
1732 self.hashserv.process.terminate()
1733 self.hashserv.process.join()
Andrew Geisslerc3d88e42020-10-02 09:45:00 -05001734 if hasattr(self, "data"):
1735 bb.event.fire(CookerExit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001736
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001737 def shutdown(self, force = False):
1738 if force:
1739 self.state = state.forceshutdown
1740 else:
1741 self.state = state.shutdown
1742
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001743 if self.parser:
Andrew Geissler9aee5002022-03-30 16:27:02 +00001744 self.parser.shutdown(clean=not force)
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001745 self.parser.final_cleanup()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001746
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001747 def finishcommand(self):
1748 self.state = state.initial
1749
1750 def reset(self):
1751 self.initConfigurationData()
Brad Bishop08902b02019-08-20 09:16:51 -04001752 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001753
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001754 def clientComplete(self):
1755 """Called when the client is done using the server"""
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001756 self.finishcommand()
1757 self.extraconfigdata = {}
1758 self.command.reset()
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001759 if hasattr(self, "data"):
1760 self.databuilder.reset()
1761 self.data = self.databuilder.data
Andrew Geissler82c905d2020-04-13 13:39:40 -05001762 self.parsecache_valid = False
1763 self.baseconfig_valid = False
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001764
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001765
1766class CookerExit(bb.event.Event):
1767 """
1768 Notify clients of the Cooker shutdown
1769 """
1770
1771 def __init__(self):
1772 bb.event.Event.__init__(self)
1773
1774
1775class CookerCollectFiles(object):
Andrew Geissler5a43b432020-06-13 10:46:56 -05001776 def __init__(self, priorities, mc=''):
1777 self.mc = mc
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001778 self.bbappends = []
Andrew Geissler7e0e3c02022-02-25 20:34:39 +00001779 # Priorities is a list of tuples, with the second element as the pattern.
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001780 # We need to sort the list with the longest pattern first, and so on to
1781 # the shortest. This allows nested layers to be properly evaluated.
1782 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001783
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001784 def calc_bbfile_priority(self, filename):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001785 for _, _, regex, pri in self.bbfile_config_priorities:
1786 if regex.match(filename):
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001787 return pri, regex
1788 return 0, None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001789
1790 def get_bbfiles(self):
1791 """Get list of default .bb files by reading out the current directory"""
1792 path = os.getcwd()
1793 contents = os.listdir(path)
1794 bbfiles = []
1795 for f in contents:
1796 if f.endswith(".bb"):
1797 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1798 return bbfiles
1799
1800 def find_bbfiles(self, path):
1801 """Find all the .bb and .bbappend files in a directory"""
1802 found = []
1803 for dir, dirs, files in os.walk(path):
1804 for ignored in ('SCCS', 'CVS', '.svn'):
1805 if ignored in dirs:
1806 dirs.remove(ignored)
1807 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1808
1809 return found
1810
1811 def collect_bbfiles(self, config, eventdata):
1812 """Collect all available .bb build files"""
1813 masked = 0
1814
1815 collectlog.debug(1, "collecting .bb files")
1816
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001817 files = (config.getVar( "BBFILES") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001818
1819 # Sort files by priority
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001820 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] )
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001821 config.setVar("BBFILES_PRIORITIZED", " ".join(files))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001822
Andrew Geissler595f6302022-01-24 19:11:47 +00001823 if not files:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001824 files = self.get_bbfiles()
1825
Andrew Geissler595f6302022-01-24 19:11:47 +00001826 if not files:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001827 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1828 bb.event.fire(CookerExit(), eventdata)
1829
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001830 # We need to track where we look so that we can add inotify watches. There
1831 # is no nice way to do this, this is horrid. We intercept the os.listdir()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001832 # (or os.scandir() for python 3.6+) calls while we run glob().
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001833 origlistdir = os.listdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001834 if hasattr(os, 'scandir'):
1835 origscandir = os.scandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001836 searchdirs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001837
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001838 def ourlistdir(d):
1839 searchdirs.append(d)
1840 return origlistdir(d)
1841
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001842 def ourscandir(d):
1843 searchdirs.append(d)
1844 return origscandir(d)
1845
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001846 os.listdir = ourlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001847 if hasattr(os, 'scandir'):
1848 os.scandir = ourscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001849 try:
1850 # Can't use set here as order is important
1851 newfiles = []
1852 for f in files:
1853 if os.path.isdir(f):
1854 dirfiles = self.find_bbfiles(f)
1855 for g in dirfiles:
1856 if g not in newfiles:
1857 newfiles.append(g)
1858 else:
1859 globbed = glob.glob(f)
1860 if not globbed and os.path.exists(f):
1861 globbed = [f]
1862 # glob gives files in order on disk. Sort to be deterministic.
1863 for g in sorted(globbed):
1864 if g not in newfiles:
1865 newfiles.append(g)
1866 finally:
1867 os.listdir = origlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001868 if hasattr(os, 'scandir'):
1869 os.scandir = origscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001870
1871 bbmask = config.getVar('BBMASK')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001872
1873 if bbmask:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001874 # First validate the individual regular expressions and ignore any
1875 # that do not compile
1876 bbmasks = []
1877 for mask in bbmask.split():
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001878 # When constructing an older style single regex, it's possible for BBMASK
1879 # to end up beginning with '|', which matches and masks _everything_.
1880 if mask.startswith("|"):
Andrew Geissler82c905d2020-04-13 13:39:40 -05001881 collectlog.warning("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001882 mask = mask[1:]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001883 try:
1884 re.compile(mask)
1885 bbmasks.append(mask)
1886 except sre_constants.error:
1887 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1888
1889 # Then validate the combined regular expressions. This should never
1890 # fail, but better safe than sorry...
1891 bbmask = "|".join(bbmasks)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001892 try:
1893 bbmask_compiled = re.compile(bbmask)
1894 except sre_constants.error:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001895 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1896 bbmask = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001897
1898 bbfiles = []
1899 bbappend = []
1900 for f in newfiles:
1901 if bbmask and bbmask_compiled.search(f):
1902 collectlog.debug(1, "skipping masked file %s", f)
1903 masked += 1
1904 continue
1905 if f.endswith('.bb'):
1906 bbfiles.append(f)
1907 elif f.endswith('.bbappend'):
1908 bbappend.append(f)
1909 else:
1910 collectlog.debug(1, "skipping %s: unknown file extension", f)
1911
1912 # Build a list of .bbappend files for each .bb file
1913 for f in bbappend:
1914 base = os.path.basename(f).replace('.bbappend', '.bb')
1915 self.bbappends.append((base, f))
1916
1917 # Find overlayed recipes
1918 # bbfiles will be in priority order which makes this easy
1919 bbfile_seen = dict()
1920 self.overlayed = defaultdict(list)
1921 for f in reversed(bbfiles):
1922 base = os.path.basename(f)
1923 if base not in bbfile_seen:
1924 bbfile_seen[base] = f
1925 else:
1926 topfile = bbfile_seen[base]
1927 self.overlayed[topfile].append(f)
1928
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001929 return (bbfiles, masked, searchdirs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001930
1931 def get_file_appends(self, fn):
1932 """
1933 Returns a list of .bbappend files to apply to fn
1934 """
1935 filelist = []
1936 f = os.path.basename(fn)
1937 for b in self.bbappends:
1938 (bbappend, filename) = b
1939 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1940 filelist.append(filename)
Andrew Geissler5a43b432020-06-13 10:46:56 -05001941 return tuple(filelist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001942
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001943 def collection_priorities(self, pkgfns, fns, d):
1944 # Return the priorities of the entries in pkgfns
1945 # Also check that all the regexes in self.bbfile_config_priorities are used
1946 # (but to do that we need to ensure skipped recipes aren't counted, nor
1947 # collections in BBFILE_PATTERN_IGNORE_EMPTY)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001948
1949 priorities = {}
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001950 seen = set()
1951 matched = set()
1952
1953 matched_regex = set()
1954 unmatched_regex = set()
1955 for _, _, regex, _ in self.bbfile_config_priorities:
1956 unmatched_regex.add(regex)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001957
1958 # Calculate priorities for each file
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001959 for p in pkgfns:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001960 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001961 priorities[p], regex = self.calc_bbfile_priority(realfn)
1962 if regex in unmatched_regex:
1963 matched_regex.add(regex)
1964 unmatched_regex.remove(regex)
1965 seen.add(realfn)
1966 if regex:
1967 matched.add(realfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001968
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001969 if unmatched_regex:
1970 # Account for bbappend files
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001971 for b in self.bbappends:
1972 (bbfile, append) = b
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001973 seen.add(append)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001974
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001975 # Account for skipped recipes
1976 seen.update(fns)
1977
1978 seen.difference_update(matched)
1979
1980 def already_matched(fn):
1981 for regex in matched_regex:
1982 if regex.match(fn):
1983 return True
1984 return False
1985
1986 for unmatch in unmatched_regex.copy():
1987 for fn in seen:
1988 if unmatch.match(fn):
1989 # If the bbappend or file was already matched by another regex, skip it
1990 # e.g. for a layer within a layer, the outer regex could match, the inner
1991 # regex may match nothing and we should warn about that
1992 if already_matched(fn):
1993 continue
1994 unmatched_regex.remove(unmatch)
1995 break
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001996
1997 for collection, pattern, regex, _ in self.bbfile_config_priorities:
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001998 if regex in unmatched_regex:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001999 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
Andrew Geissler5a43b432020-06-13 10:46:56 -05002000 collectlog.warning("No bb files in %s matched BBFILE_PATTERN_%s '%s'" % (self.mc if self.mc else 'default',
2001 collection, pattern))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002002
2003 return priorities
2004
2005class ParsingFailure(Exception):
2006 def __init__(self, realexception, recipe):
2007 self.realexception = realexception
2008 self.recipe = recipe
2009 Exception.__init__(self, realexception, recipe)
2010
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002011class Parser(multiprocessing.Process):
Andrew Geissler9aee5002022-03-30 16:27:02 +00002012 def __init__(self, jobs, results, quit, profile):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002013 self.jobs = jobs
2014 self.results = results
2015 self.quit = quit
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002016 multiprocessing.Process.__init__(self)
2017 self.context = bb.utils.get_context().copy()
2018 self.handlers = bb.event.get_class_handlers().copy()
2019 self.profile = profile
Andrew Geissler9aee5002022-03-30 16:27:02 +00002020 self.queue_signals = False
2021 self.signal_received = []
2022 self.signal_threadlock = threading.Lock()
2023
2024 def catch_sig(self, signum, frame):
2025 if self.queue_signals:
2026 self.signal_received.append(signum)
2027 else:
2028 self.handle_sig(signum, frame)
2029
2030 def handle_sig(self, signum, frame):
2031 if signum == signal.SIGTERM:
2032 signal.signal(signal.SIGTERM, signal.SIG_DFL)
2033 os.kill(os.getpid(), signal.SIGTERM)
2034 elif signum == signal.SIGINT:
2035 signal.default_int_handler(signum, frame)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002036
2037 def run(self):
2038
2039 if not self.profile:
2040 self.realrun()
2041 return
2042
2043 try:
2044 import cProfile as profile
2045 except:
2046 import profile
2047 prof = profile.Profile()
2048 try:
2049 profile.Profile.runcall(prof, self.realrun)
2050 finally:
2051 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
2052 prof.dump_stats(logfile)
2053
2054 def realrun(self):
Andrew Geissler9aee5002022-03-30 16:27:02 +00002055 # Signal handling here is hard. We must not terminate any process or thread holding the write
2056 # lock for the event stream as it will not be released, ever, and things will hang.
2057 # Python handles signals in the main thread/process but they can be raised from any thread and
2058 # we want to defer processing of any SIGTERM/SIGINT signal until we're outside the critical section
2059 # and don't hold the lock (see server/process.py). We therefore always catch the signals (so any
2060 # new thread should also do so) and we defer handling but we handle with the local thread lock
2061 # held (a threading lock, not a multiprocessing one) so that no other thread in the process
2062 # can be in the critical section.
2063 signal.signal(signal.SIGTERM, self.catch_sig)
2064 signal.signal(signal.SIGHUP, signal.SIG_DFL)
2065 signal.signal(signal.SIGINT, self.catch_sig)
2066 bb.utils.set_process_name(multiprocessing.current_process().name)
2067 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2068 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002069
2070 pending = []
Andrew Geissler9aee5002022-03-30 16:27:02 +00002071 try:
2072 while True:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002073 try:
Andrew Geissler9aee5002022-03-30 16:27:02 +00002074 self.quit.get_nowait()
2075 except queue.Empty:
2076 pass
2077 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002078 break
Andrew Geissler9aee5002022-03-30 16:27:02 +00002079
2080 if pending:
2081 result = pending.pop()
2082 else:
2083 try:
2084 job = self.jobs.pop()
2085 except IndexError:
2086 break
2087 result = self.parse(*job)
2088 # Clear the siggen cache after parsing to control memory usage, its huge
2089 bb.parse.siggen.postparsing_clean_cache()
2090 try:
2091 self.results.put(result, timeout=0.25)
2092 except queue.Full:
2093 pending.append(result)
2094 finally:
2095 self.results.close()
2096 self.results.join_thread()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002097
Andrew Geissler5a43b432020-06-13 10:46:56 -05002098 def parse(self, mc, cache, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002099 try:
Andrew Geissler82c905d2020-04-13 13:39:40 -05002100 origfilter = bb.event.LogHandler.filter
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002101 # Record the filename we're parsing into any events generated
2102 def parse_filter(self, record):
2103 record.taskpid = bb.event.worker_pid
2104 record.fn = filename
2105 return True
2106
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002107 # Reset our environment and handlers to the original settings
2108 bb.utils.set_context(self.context.copy())
2109 bb.event.set_class_handlers(self.handlers.copy())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002110 bb.event.LogHandler.filter = parse_filter
2111
Andrew Geissler5a43b432020-06-13 10:46:56 -05002112 return True, mc, cache.parse(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002113 except Exception as exc:
2114 tb = sys.exc_info()[2]
2115 exc.recipe = filename
2116 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
Andrew Geissler9aee5002022-03-30 16:27:02 +00002117 return True, None, exc
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002118 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2119 # and for example a worker thread doesn't just exit on its own in response to
2120 # a SystemExit event for example.
2121 except BaseException as exc:
Andrew Geissler9aee5002022-03-30 16:27:02 +00002122 return True, None, ParsingFailure(exc, filename)
Andrew Geissler82c905d2020-04-13 13:39:40 -05002123 finally:
2124 bb.event.LogHandler.filter = origfilter
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002125
2126class CookerParser(object):
Andrew Geissler5a43b432020-06-13 10:46:56 -05002127 def __init__(self, cooker, mcfilelist, masked):
2128 self.mcfilelist = mcfilelist
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002129 self.cooker = cooker
2130 self.cfgdata = cooker.data
2131 self.cfghash = cooker.data_hash
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002132 self.cfgbuilder = cooker.databuilder
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002133
2134 # Accounting statistics
2135 self.parsed = 0
2136 self.cached = 0
2137 self.error = 0
2138 self.masked = masked
2139
2140 self.skipped = 0
2141 self.virtuals = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002142
2143 self.current = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002144 self.process_names = []
2145
Andrew Geissler5a43b432020-06-13 10:46:56 -05002146 self.bb_caches = bb.cache.MulticonfigCache(self.cfgbuilder, self.cfghash, cooker.caches_array)
2147 self.fromcache = set()
2148 self.willparse = set()
2149 for mc in self.cooker.multiconfigs:
2150 for filename in self.mcfilelist[mc]:
2151 appends = self.cooker.collections[mc].get_file_appends(filename)
2152 if not self.bb_caches[mc].cacheValid(filename, appends):
2153 self.willparse.add((mc, self.bb_caches[mc], filename, appends))
2154 else:
2155 self.fromcache.add((mc, self.bb_caches[mc], filename, appends))
2156
2157 self.total = len(self.fromcache) + len(self.willparse)
2158 self.toparse = len(self.willparse)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002159 self.progress_chunk = int(max(self.toparse / 100, 1))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002160
Brad Bishop6e60e8b2018-02-01 10:27:11 -05002161 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
Andrew Geissler5a43b432020-06-13 10:46:56 -05002162 multiprocessing.cpu_count()), self.toparse)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002163
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002164 self.start()
2165 self.haveshutdown = False
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002166 self.syncthread = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002167
2168 def start(self):
2169 self.results = self.load_cached()
2170 self.processes = []
2171 if self.toparse:
2172 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002173
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002174 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002175 self.result_queue = multiprocessing.Queue()
Brad Bishop19323692019-04-05 15:28:33 -04002176
2177 def chunkify(lst,n):
2178 return [lst[i::n] for i in range(n)]
Andrew Geissler5a43b432020-06-13 10:46:56 -05002179 self.jobs = chunkify(list(self.willparse), self.num_processes)
Brad Bishop19323692019-04-05 15:28:33 -04002180
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002181 for i in range(0, self.num_processes):
Andrew Geissler9aee5002022-03-30 16:27:02 +00002182 parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, self.cooker.configuration.profile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002183 parser.start()
2184 self.process_names.append(parser.name)
2185 self.processes.append(parser)
2186
2187 self.results = itertools.chain(self.results, self.parse_generator())
2188
Andrew Geissler9aee5002022-03-30 16:27:02 +00002189 def shutdown(self, clean=True):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002190 if not self.toparse:
2191 return
2192 if self.haveshutdown:
2193 return
2194 self.haveshutdown = True
2195
2196 if clean:
2197 event = bb.event.ParseCompleted(self.cached, self.parsed,
2198 self.skipped, self.masked,
2199 self.virtuals, self.error,
2200 self.total)
2201
2202 bb.event.fire(event, self.cfgdata)
Andrew Geissler7e0e3c02022-02-25 20:34:39 +00002203 else:
Andrew Geissler9aee5002022-03-30 16:27:02 +00002204 bb.error("Parsing halted due to errors, see error messages above")
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002205
2206 for process in self.processes:
2207 self.parser_quit.put(None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002208
Brad Bishop08902b02019-08-20 09:16:51 -04002209 # Cleanup the queue before call process.join(), otherwise there might be
2210 # deadlocks.
2211 while True:
2212 try:
2213 self.result_queue.get(timeout=0.25)
2214 except queue.Empty:
2215 break
2216
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002217 for process in self.processes:
Andrew Geissler9aee5002022-03-30 16:27:02 +00002218 process.join(0.5)
2219
2220 for process in self.processes:
2221 if process.exitcode is None:
2222 os.kill(process.pid, signal.SIGINT)
2223
2224 for process in self.processes:
2225 process.join(0.5)
2226
2227 for process in self.processes:
2228 if process.exitcode is None:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002229 process.terminate()
Andrew Geissler9aee5002022-03-30 16:27:02 +00002230
2231 for process in self.processes:
2232 process.join()
2233 # Added in 3.7, cleans up zombies
2234 if hasattr(process, "close"):
2235 process.close()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002236
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002237 self.parser_quit.close()
2238 # Allow data left in the cancel queue to be discarded
2239 self.parser_quit.cancel_join_thread()
2240
Andrew Geissler5a43b432020-06-13 10:46:56 -05002241 def sync_caches():
2242 for c in self.bb_caches.values():
2243 c.sync()
2244
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002245 sync = threading.Thread(target=sync_caches, name="SyncThread")
2246 self.syncthread = sync
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002247 sync.start()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002248 bb.codeparser.parser_cache_savemerge()
2249 bb.fetch.fetcher_parse_done()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002250 if self.cooker.configuration.profile:
2251 profiles = []
2252 for i in self.process_names:
2253 logfile = "profile-parse-%s.log" % i
2254 if os.path.exists(logfile):
2255 profiles.append(logfile)
2256
2257 pout = "profile-parse.log.processed"
2258 bb.utils.process_profilelog(profiles, pout = pout)
2259 print("Processed parsing statistics saved to %s" % (pout))
2260
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002261 def final_cleanup(self):
2262 if self.syncthread:
2263 self.syncthread.join()
2264
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002265 def load_cached(self):
Andrew Geissler5a43b432020-06-13 10:46:56 -05002266 for mc, cache, filename, appends in self.fromcache:
2267 cached, infos = cache.load(filename, appends)
2268 yield not cached, mc, infos
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002269
2270 def parse_generator(self):
Andrew Geissler595f6302022-01-24 19:11:47 +00002271 empty = False
2272 while self.processes or not empty:
2273 for process in self.processes.copy():
2274 if not process.is_alive():
2275 process.join()
2276 self.processes.remove(process)
2277
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002278 if self.parsed >= self.toparse:
2279 break
2280
2281 try:
2282 result = self.result_queue.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002283 except queue.Empty:
Andrew Geissler595f6302022-01-24 19:11:47 +00002284 empty = True
Andrew Geissler9aee5002022-03-30 16:27:02 +00002285 yield None, None, None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002286 else:
Andrew Geissler595f6302022-01-24 19:11:47 +00002287 empty = False
Andrew Geissler9aee5002022-03-30 16:27:02 +00002288 yield result
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002289
Andrew Geissler595f6302022-01-24 19:11:47 +00002290 if not (self.parsed >= self.toparse):
2291 raise bb.parse.ParseError("Not all recipes parsed, parser thread killed/died? Exiting.", None)
2292
2293
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002294 def parse_next(self):
2295 result = []
2296 parsed = None
2297 try:
Andrew Geissler5a43b432020-06-13 10:46:56 -05002298 parsed, mc, result = next(self.results)
Andrew Geissler9aee5002022-03-30 16:27:02 +00002299 if isinstance(result, BaseException):
2300 # Turn exceptions back into exceptions
2301 raise result
2302 if parsed is None:
2303 # Timeout, loop back through the main loop
2304 return True
2305
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002306 except StopIteration:
2307 self.shutdown()
2308 return False
2309 except bb.BBHandledException as exc:
2310 self.error += 1
Andrew Geissler7e0e3c02022-02-25 20:34:39 +00002311 logger.debug('Failed to parse recipe: %s' % exc.recipe)
Andrew Geissler9aee5002022-03-30 16:27:02 +00002312 self.shutdown(clean=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002313 return False
2314 except ParsingFailure as exc:
2315 self.error += 1
2316 logger.error('Unable to parse %s: %s' %
2317 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
Andrew Geissler9aee5002022-03-30 16:27:02 +00002318 self.shutdown(clean=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002319 return False
2320 except bb.parse.ParseError as exc:
2321 self.error += 1
2322 logger.error(str(exc))
Andrew Geissler9aee5002022-03-30 16:27:02 +00002323 self.shutdown(clean=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002324 return False
2325 except bb.data_smart.ExpansionError as exc:
2326 self.error += 1
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002327 bbdir = os.path.dirname(__file__) + os.sep
2328 etype, value, _ = sys.exc_info()
2329 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2330 logger.error('ExpansionError during parsing %s', value.recipe,
2331 exc_info=(etype, value, tb))
Andrew Geissler9aee5002022-03-30 16:27:02 +00002332 self.shutdown(clean=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002333 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002334 except Exception as exc:
2335 self.error += 1
2336 etype, value, tb = sys.exc_info()
2337 if hasattr(value, "recipe"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002338 logger.error('Unable to parse %s' % value.recipe,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002339 exc_info=(etype, value, exc.traceback))
2340 else:
2341 # Most likely, an exception occurred during raising an exception
2342 import traceback
2343 logger.error('Exception during parse: %s' % traceback.format_exc())
Andrew Geissler9aee5002022-03-30 16:27:02 +00002344 self.shutdown(clean=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002345 return False
2346
2347 self.current += 1
2348 self.virtuals += len(result)
2349 if parsed:
2350 self.parsed += 1
2351 if self.parsed % self.progress_chunk == 0:
2352 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2353 self.cfgdata)
2354 else:
2355 self.cached += 1
2356
2357 for virtualfn, info_array in result:
2358 if info_array[0].skipped:
2359 self.skipped += 1
2360 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
Andrew Geissler5a43b432020-06-13 10:46:56 -05002361 self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002362 parsed=parsed, watcher = self.cooker.add_filewatch)
2363 return True
2364
2365 def reparse(self, filename):
Andrew Geissler5a43b432020-06-13 10:46:56 -05002366 to_reparse = set()
2367 for mc in self.cooker.multiconfigs:
2368 to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename)))
2369
2370 for mc, filename, appends in to_reparse:
2371 infos = self.bb_caches[mc].parse(filename, appends)
2372 for vfn, info_array in infos:
2373 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)