blob: af794b4c4201660c2afaf885ac5d9265ec760337 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# Copyright (C) 2003, 2004 Chris Larson
3# Copyright (C) 2003, 2004 Phil Blundell
4# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
5# Copyright (C) 2005 Holger Hans Peter Freyther
6# Copyright (C) 2005 ROAD GmbH
7# Copyright (C) 2006 - 2007 Richard Purdie
8#
Brad Bishopc342db32019-05-15 21:57:59 -04009# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050010#
Patrick Williamsc0f7c042017-02-23 20:41:17 -060011
Patrick Williamsc124f4f2015-09-15 14:41:29 -050012import sys, os, glob, os.path, re, time
Patrick Williamsc124f4f2015-09-15 14:41:29 -050013import itertools
14import logging
15import multiprocessing
16import sre_constants
17import threading
Patrick Williamsc0f7c042017-02-23 20:41:17 -060018from io import StringIO, UnsupportedOperation
Patrick Williamsc124f4f2015-09-15 14:41:29 -050019from contextlib import closing
Patrick Williamsc0f7c042017-02-23 20:41:17 -060020from collections import defaultdict, namedtuple
Patrick Williamsc124f4f2015-09-15 14:41:29 -050021import bb, bb.exceptions, bb.command
22from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
Patrick Williamsc0f7c042017-02-23 20:41:17 -060023import queue
Patrick Williamsc124f4f2015-09-15 14:41:29 -050024import signal
Patrick Williamsc124f4f2015-09-15 14:41:29 -050025import prserv.serv
26import pyinotify
Patrick Williamsc0f7c042017-02-23 20:41:17 -060027import json
28import pickle
29import codecs
Brad Bishop08902b02019-08-20 09:16:51 -040030import hashserv
Patrick Williamsc124f4f2015-09-15 14:41:29 -050031
32logger = logging.getLogger("BitBake")
33collectlog = logging.getLogger("BitBake.Collection")
34buildlog = logging.getLogger("BitBake.Build")
35parselog = logging.getLogger("BitBake.Parsing")
36providerlog = logging.getLogger("BitBake.Provider")
37
38class NoSpecificMatch(bb.BBHandledException):
39 """
40 Exception raised when no or multiple file matches are found
41 """
42
43class NothingToBuild(Exception):
44 """
45 Exception raised when there is nothing to build
46 """
47
48class CollectionError(bb.BBHandledException):
49 """
50 Exception raised when layer configuration is incorrect
51 """
52
53class state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060054 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050055
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050056 @classmethod
57 def get_name(cls, code):
58 for name in dir(cls):
59 value = getattr(cls, name)
60 if type(value) == type(cls.initial) and value == code:
61 return name
62 raise ValueError("Invalid status code: %s" % code)
63
Patrick Williamsc124f4f2015-09-15 14:41:29 -050064
65class SkippedPackage:
66 def __init__(self, info = None, reason = None):
67 self.pn = None
68 self.skipreason = None
69 self.provides = None
70 self.rprovides = None
71
72 if info:
73 self.pn = info.pn
74 self.skipreason = info.skipreason
75 self.provides = info.provides
Andrew Geisslerd1e89492021-02-12 15:35:20 -060076 self.rprovides = info.packages + info.rprovides
77 for package in info.packages:
78 self.rprovides += info.rprovides_pkg[package]
Patrick Williamsc124f4f2015-09-15 14:41:29 -050079 elif reason:
80 self.skipreason = reason
81
82
83class CookerFeatures(object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -060084 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050085
86 def __init__(self):
87 self._features=set()
88
89 def setFeature(self, f):
90 # validate we got a request for a feature we support
91 if f not in CookerFeatures._feature_list:
92 return
93 self._features.add(f)
94
95 def __contains__(self, f):
96 return f in self._features
97
98 def __iter__(self):
99 return self._features.__iter__()
100
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600101 def __next__(self):
102 return next(self._features)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500103
104
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600105class EventWriter:
106 def __init__(self, cooker, eventfile):
107 self.file_inited = None
108 self.cooker = cooker
109 self.eventfile = eventfile
110 self.event_queue = []
111
112 def write_event(self, event):
113 with open(self.eventfile, "a") as f:
114 try:
115 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
116 f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
117 "vars": str_event}))
118 except Exception as err:
119 import traceback
120 print(err, traceback.format_exc())
121
122 def send(self, event):
123 if self.file_inited:
124 # we have the file, just write the event
125 self.write_event(event)
126 else:
127 # init on bb.event.BuildStarted
128 name = "%s.%s" % (event.__module__, event.__class__.__name__)
129 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
130 with open(self.eventfile, "w") as f:
131 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
132
133 self.file_inited = True
134
135 # write pending events
136 for evt in self.event_queue:
137 self.write_event(evt)
138
139 # also write the current event
140 self.write_event(event)
141 else:
142 # queue all events until the file is inited
143 self.event_queue.append(event)
144
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500145#============================================================================#
146# BBCooker
147#============================================================================#
148class BBCooker:
149 """
150 Manages one bitbake build run
151 """
152
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500153 def __init__(self, featureSet=None, idleCallBackRegister=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600154 self.recipecaches = None
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500155 self.eventlog = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500156 self.skiplist = {}
157 self.featureset = CookerFeatures()
158 if featureSet:
159 for f in featureSet:
160 self.featureset.setFeature(f)
161
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500162 self.configuration = bb.cookerdata.CookerConfiguration()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500163
Andrew Geissler635e0e42020-08-21 15:58:33 -0500164 self.idleCallBackRegister = idleCallBackRegister
165
Brad Bishopf058f492019-01-28 23:50:33 -0500166 bb.debug(1, "BBCooker starting %s" % time.time())
167 sys.stdout.flush()
168
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500169 self.configwatcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500170 bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
171 sys.stdout.flush()
172
Andrew Geissler82c905d2020-04-13 13:39:40 -0500173 self.configwatcher.bbseen = set()
174 self.configwatcher.bbwatchedfiles = set()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500175 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
Brad Bishopf058f492019-01-28 23:50:33 -0500176 bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
177 sys.stdout.flush()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500178 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
179 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500180 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500181 self.watcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500182 bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
183 sys.stdout.flush()
Andrew Geissler82c905d2020-04-13 13:39:40 -0500184 self.watcher.bbseen = set()
185 self.watcher.bbwatchedfiles = set()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500186 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
187
Brad Bishopf058f492019-01-28 23:50:33 -0500188 bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
189 sys.stdout.flush()
190
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500191 # If being called by something like tinfoil, we need to clean cached data
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500192 # which may now be invalid
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500193 bb.parse.clear_cache()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500194 bb.parse.BBHandler.cached_statements = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500195
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500196 self.ui_cmdline = None
Brad Bishop08902b02019-08-20 09:16:51 -0400197 self.hashserv = None
Brad Bishopa34c0302019-09-23 22:34:48 -0400198 self.hashservaddr = None
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500199
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500200 self.inotify_modified_files = []
201
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500202 def _process_inotify_updates(server, cooker, abort):
203 cooker.process_inotify_updates()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500204 return 1.0
205
Andrew Geissler635e0e42020-08-21 15:58:33 -0500206 self.idleCallBackRegister(_process_inotify_updates, self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500207
208 # TOSTOP must not be set or our children will hang when they output
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600209 try:
210 fd = sys.stdout.fileno()
211 if os.isatty(fd):
212 import termios
213 tcattr = termios.tcgetattr(fd)
214 if tcattr[3] & termios.TOSTOP:
215 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
216 tcattr[3] = tcattr[3] & ~termios.TOSTOP
217 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
218 except UnsupportedOperation:
219 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500220
221 self.command = bb.command.Command(self)
222 self.state = state.initial
223
224 self.parser = None
225
226 signal.signal(signal.SIGTERM, self.sigterm_exception)
227 # Let SIGHUP exit as SIGTERM
228 signal.signal(signal.SIGHUP, self.sigterm_exception)
229
Brad Bishopf058f492019-01-28 23:50:33 -0500230 bb.debug(1, "BBCooker startup complete %s" % time.time())
231 sys.stdout.flush()
232
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500233 def init_configdata(self):
234 if not hasattr(self, "data"):
235 self.initConfigurationData()
236 bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
237 sys.stdout.flush()
238 self.handlePRServ()
239
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500240 def process_inotify_updates(self):
241 for n in [self.confignotifier, self.notifier]:
242 if n.check_events(timeout=0):
243 # read notified events and enqeue them
244 n.read_events()
245 n.process_events()
246
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500247 def config_notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500248 if event.maskname == "IN_Q_OVERFLOW":
249 bb.warn("inotify event queue overflowed, invalidating caches.")
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500250 self.parsecache_valid = False
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500251 self.baseconfig_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500252 bb.parse.clear_cache()
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500253 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500254 if not event.pathname in self.configwatcher.bbwatchedfiles:
255 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500256 if not event.pathname in self.inotify_modified_files:
257 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500258 self.baseconfig_valid = False
259
260 def notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500261 if event.maskname == "IN_Q_OVERFLOW":
262 bb.warn("inotify event queue overflowed, invalidating caches.")
263 self.parsecache_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500264 bb.parse.clear_cache()
265 return
266 if event.pathname.endswith("bitbake-cookerdaemon.log") \
267 or event.pathname.endswith("bitbake.lock"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500268 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500269 if not event.pathname in self.inotify_modified_files:
270 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500271 self.parsecache_valid = False
272
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500273 def add_filewatch(self, deps, watcher=None, dirs=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500274 if not watcher:
275 watcher = self.watcher
276 for i in deps:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500277 watcher.bbwatchedfiles.add(i[0])
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500278 if dirs:
279 f = i[0]
280 else:
281 f = os.path.dirname(i[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500282 if f in watcher.bbseen:
283 continue
Andrew Geissler82c905d2020-04-13 13:39:40 -0500284 watcher.bbseen.add(f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500285 watchtarget = None
286 while True:
287 # We try and add watches for files that don't exist but if they did, would influence
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500288 # the parser. The parent directory of these files may not exist, in which case we need
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500289 # to watch any parent that does exist for changes.
290 try:
291 watcher.add_watch(f, self.watchmask, quiet=False)
292 if watchtarget:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500293 watcher.bbwatchedfiles.add(watchtarget)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500294 break
295 except pyinotify.WatchManagerError as e:
296 if 'ENOENT' in str(e):
297 watchtarget = f
298 f = os.path.dirname(f)
299 if f in watcher.bbseen:
300 break
Andrew Geissler82c905d2020-04-13 13:39:40 -0500301 watcher.bbseen.add(f)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500302 continue
303 if 'ENOSPC' in str(e):
304 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
305 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
306 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
307 providerlog.error("Root privilege is required to modify max_user_watches.")
308 raise
309
310 def sigterm_exception(self, signum, stackframe):
311 if signum == signal.SIGTERM:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500312 bb.warn("Cooker received SIGTERM, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500313 elif signum == signal.SIGHUP:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500314 bb.warn("Cooker received SIGHUP, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500315 self.state = state.forceshutdown
316
317 def setFeatures(self, features):
318 # we only accept a new feature set if we're in state initial, so we can reset without problems
319 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
320 raise Exception("Illegal state for feature set change")
321 original_featureset = list(self.featureset)
322 for feature in features:
323 self.featureset.setFeature(feature)
324 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500325 if (original_featureset != list(self.featureset)) and self.state != state.error and hasattr(self, "data"):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500326 self.reset()
327
328 def initConfigurationData(self):
329
330 self.state = state.initial
331 self.caches_array = []
332
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500333 # Need to preserve BB_CONSOLELOG over resets
334 consolelog = None
335 if hasattr(self, "data"):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500336 consolelog = self.data.getVar("BB_CONSOLELOG")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500337
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500338 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
339 self.enableDataTracking()
340
341 all_extra_cache_names = []
342 # We hardcode all known cache types in a single place, here.
343 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
344 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
345
346 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
347
348 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
349 # This is the entry point, no further check needed!
350 for var in caches_name_array:
351 try:
352 module_name, cache_name = var.split(':')
353 module = __import__(module_name, fromlist=(cache_name,))
354 self.caches_array.append(getattr(module, cache_name))
355 except ImportError as exc:
356 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500357 raise bb.BBHandledException()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500358
359 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
360 self.databuilder.parseBaseConfiguration()
361 self.data = self.databuilder.data
362 self.data_hash = self.databuilder.data_hash
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500363 self.extraconfigdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500364
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500365 if consolelog:
366 self.data.setVar("BB_CONSOLELOG", consolelog)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500367
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500368 self.data.setVar('BB_CMDLINE', self.ui_cmdline)
369
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500370 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
371 self.disableDataTracking()
372
Brad Bishop15ae2502019-06-18 21:44:24 -0400373 for mc in self.databuilder.mcdata.values():
374 mc.renameVar("__depends", "__base_depends")
375 self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500376
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500377 self.baseconfig_valid = True
378 self.parsecache_valid = False
379
380 def handlePRServ(self):
381 # Setup a PR Server based on the new configuration
382 try:
383 self.prhost = prserv.serv.auto_start(self.data)
384 except prserv.serv.PRServiceConfigError as e:
Andrew Geisslerd159c7f2021-09-02 21:05:58 -0500385 bb.fatal("Unable to start PR Server, exiting, check the bitbake-cookerdaemon.log")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500386
Brad Bishopa34c0302019-09-23 22:34:48 -0400387 if self.data.getVar("BB_HASHSERVE") == "auto":
388 # Create a new hash server bound to a unix domain socket
Brad Bishop08902b02019-08-20 09:16:51 -0400389 if not self.hashserv:
390 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
Brad Bishopa34c0302019-09-23 22:34:48 -0400391 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
Andrew Geissler5199d832021-09-24 16:47:35 -0500392 self.hashserv = hashserv.create_server(
393 self.hashservaddr,
394 dbfile,
395 sync=False,
396 upstream=self.data.getVar("BB_HASHSERVE_UPSTREAM") or None,
397 )
Patrick Williams213cb262021-08-07 19:21:33 -0500398 self.hashserv.serve_as_process()
Brad Bishopa34c0302019-09-23 22:34:48 -0400399 self.data.setVar("BB_HASHSERVE", self.hashservaddr)
400 self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr)
401 self.databuilder.data.setVar("BB_HASHSERVE", self.hashservaddr)
Brad Bishop08902b02019-08-20 09:16:51 -0400402 for mc in self.databuilder.mcdata:
Brad Bishopa34c0302019-09-23 22:34:48 -0400403 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
Brad Bishop08902b02019-08-20 09:16:51 -0400404
405 bb.parse.init_parser(self.data)
406
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500407 def enableDataTracking(self):
408 self.configuration.tracking = True
409 if hasattr(self, "data"):
410 self.data.enableTracking()
411
412 def disableDataTracking(self):
413 self.configuration.tracking = False
414 if hasattr(self, "data"):
415 self.data.disableTracking()
416
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500417 def parseConfiguration(self):
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600418 self.updateCacheSync()
419
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500420 # Change nice level if we're asked to
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500421 nice = self.data.getVar("BB_NICE_LEVEL")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500422 if nice:
423 curnice = os.nice(0)
424 nice = int(nice) - curnice
425 buildlog.verbose("Renice to %s " % os.nice(nice))
426
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600427 if self.recipecaches:
428 del self.recipecaches
429 self.multiconfigs = self.databuilder.mcdata.keys()
430 self.recipecaches = {}
431 for mc in self.multiconfigs:
432 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500433
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500434 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500435
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500436 self.parsecache_valid = False
437
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500438 def updateConfigOpts(self, options, environment, cmdline):
439 self.ui_cmdline = cmdline
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500440 clean = True
441 for o in options:
442 if o in ['prefile', 'postfile']:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500443 # Only these options may require a reparse
444 try:
445 if getattr(self.configuration, o) == options[o]:
446 # Value is the same, no need to mark dirty
447 continue
448 except AttributeError:
449 pass
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600450 logger.debug("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500451 print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500452 clean = False
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500453 if hasattr(self.configuration, o):
454 setattr(self.configuration, o, options[o])
455
456 if self.configuration.writeeventlog:
457 if self.eventlog and self.eventlog[0] != self.configuration.writeeventlog:
458 bb.event.unregister_UIHhandler(self.eventlog[1])
459 if not self.eventlog or self.eventlog[0] != self.configuration.writeeventlog:
460 # we log all events to a file if so directed
461 # register the log file writer as UI Handler
462 writer = EventWriter(self, self.configuration.writeeventlog)
463 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
464 self.eventlog = (self.configuration.writeeventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)))
465
466 bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel
467 bb.msg.loggerDefaultDomains = self.configuration.debug_domains
468
469 if hasattr(self, "data"):
470 origenv = bb.data.init()
471 for k in environment:
472 origenv.setVar(k, environment[k])
473 self.data.setVar("BB_ORIGENV", origenv)
474
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500475 for k in bb.utils.approved_variables():
476 if k in environment and k not in self.configuration.env:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600477 logger.debug("Updating new environment variable %s to %s" % (k, environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500478 self.configuration.env[k] = environment[k]
479 clean = False
480 if k in self.configuration.env and k not in environment:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600481 logger.debug("Updating environment variable %s (deleted)" % (k))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500482 del self.configuration.env[k]
483 clean = False
484 if k not in self.configuration.env and k not in environment:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500485 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500486 if environment[k] != self.configuration.env[k]:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600487 logger.debug("Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500488 self.configuration.env[k] = environment[k]
489 clean = False
Andrew Geisslerc9f78652020-09-18 14:11:35 -0500490
491 # Now update all the variables not in the datastore to match
492 self.configuration.env = environment
493
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500494 if not clean:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600495 logger.debug("Base environment change, triggering reparse")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500496 self.reset()
497
498 def runCommands(self, server, data, abort):
499 """
500 Run any queued asynchronous command
501 This is done by the idle handler so it runs in true context rather than
502 tied to any UI.
503 """
504
505 return self.command.runAsyncCommand()
506
507 def showVersions(self):
508
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500509 (latest_versions, preferred_versions, required) = self.findProviders()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500510
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500511 logger.plain("%-35s %25s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version", "Required Version")
512 logger.plain("%-35s %25s %25s %25s\n", "===========", "==============", "=================", "================")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500513
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500514 for p in sorted(self.recipecaches[''].pkg_pn):
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500515 preferred = preferred_versions[p]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500516 latest = latest_versions[p]
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500517 requiredstr = ""
518 preferredstr = ""
519 if required[p]:
520 if preferred[0] is not None:
521 requiredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
522 else:
523 bb.fatal("REQUIRED_VERSION of package %s not available" % p)
524 else:
525 preferredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500526
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500527 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
528
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500529 if preferred == latest:
530 preferredstr = ""
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500531
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500532 logger.plain("%-35s %25s %25s %25s", p, lateststr, preferredstr, requiredstr)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500533
534 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
535 """
536 Show the outer or per-recipe environment
537 """
538 fn = None
539 envdata = None
Brad Bishop15ae2502019-06-18 21:44:24 -0400540 mc = ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500541 if not pkgs_to_build:
542 pkgs_to_build = []
543
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500544 orig_tracking = self.configuration.tracking
545 if not orig_tracking:
546 self.enableDataTracking()
547 self.reset()
548
Brad Bishop15ae2502019-06-18 21:44:24 -0400549 def mc_base(p):
550 if p.startswith('mc:'):
551 s = p.split(':')
552 if len(s) == 2:
553 return s[1]
554 return None
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500555
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500556 if buildfile:
557 # Parse the configuration here. We need to do it explicitly here since
558 # this showEnvironment() code path doesn't use the cache
559 self.parseConfiguration()
560
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600561 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Andrew Geissler5a43b432020-06-13 10:46:56 -0500562 fn = self.matchFile(fn, mc)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600563 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500564 elif len(pkgs_to_build) == 1:
Brad Bishop15ae2502019-06-18 21:44:24 -0400565 mc = mc_base(pkgs_to_build[0])
566 if not mc:
567 ignore = self.data.getVar("ASSUME_PROVIDED") or ""
568 if pkgs_to_build[0] in set(ignore.split()):
569 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500570
Brad Bishop15ae2502019-06-18 21:44:24 -0400571 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500572
Brad Bishop15ae2502019-06-18 21:44:24 -0400573 mc = runlist[0][0]
574 fn = runlist[0][3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500575
576 if fn:
577 try:
Andrew Geissler5a43b432020-06-13 10:46:56 -0500578 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
579 envdata = bb_caches[mc].loadDataFull(fn, self.collections[mc].get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500580 except Exception as e:
581 parselog.exception("Unable to read %s", fn)
582 raise
Brad Bishop15ae2502019-06-18 21:44:24 -0400583 else:
584 if not mc in self.databuilder.mcdata:
585 bb.fatal('Not multiconfig named "%s" found' % mc)
586 envdata = self.databuilder.mcdata[mc]
587 data.expandKeys(envdata)
588 parse.ast.runAnonFuncs(envdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500589
590 # Display history
591 with closing(StringIO()) as env:
592 self.data.inchistory.emit(env)
593 logger.plain(env.getvalue())
594
595 # emit variables and shell functions
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500596 with closing(StringIO()) as env:
597 data.emit_env(env, envdata, True)
598 logger.plain(env.getvalue())
599
600 # emit the metadata which isnt valid shell
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500601 for e in sorted(envdata.keys()):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600602 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500603 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500604
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500605 if not orig_tracking:
606 self.disableDataTracking()
607 self.reset()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500608
609 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
610 """
611 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
612 """
613 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
614
615 # A task of None means use the default task
616 if task is None:
617 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500618 if not task.startswith("do_"):
619 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500620
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500621 targetlist = self.checkPackages(pkgs_to_build, task)
622 fulltargetlist = []
623 defaulttask_implicit = ''
624 defaulttask_explicit = False
625 wildcard = False
626
627 # Wild card expansion:
Brad Bishop15ae2502019-06-18 21:44:24 -0400628 # Replace string such as "mc:*:bash"
629 # into "mc:A:bash mc:B:bash bash"
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500630 for k in targetlist:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600631 if k.startswith("mc:") and k.count(':') >= 2:
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500632 if wildcard:
633 bb.fatal('multiconfig conflict')
634 if k.split(":")[1] == "*":
635 wildcard = True
636 for mc in self.multiconfigs:
637 if mc:
638 fulltargetlist.append(k.replace('*', mc))
639 # implicit default task
640 else:
641 defaulttask_implicit = k.split(":")[2]
642 else:
643 fulltargetlist.append(k)
644 else:
645 defaulttask_explicit = True
646 fulltargetlist.append(k)
647
648 if not defaulttask_explicit and defaulttask_implicit != '':
649 fulltargetlist.append(defaulttask_implicit)
650
651 bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600652 taskdata = {}
653 localdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500654
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600655 for mc in self.multiconfigs:
656 taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
657 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600658 bb.data.expandKeys(localdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500659
660 current = 0
661 runlist = []
662 for k in fulltargetlist:
Andrew Geisslerb7d28612020-07-24 16:15:54 -0500663 origk = k
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600664 mc = ""
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600665 if k.startswith("mc:") and k.count(':') >= 2:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600666 mc = k.split(":")[1]
667 k = ":".join(k.split(":")[2:])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500668 ktask = task
669 if ":do_" in k:
670 k2 = k.split(":do_")
671 k = k2[0]
672 ktask = k2[1]
Andrew Geisslerb7d28612020-07-24 16:15:54 -0500673
674 if mc not in self.multiconfigs:
675 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named %s" % (origk, mc))
676
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600677 taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500678 current += 1
679 if not ktask.startswith("do_"):
680 ktask = "do_%s" % ktask
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600681 if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
682 # e.g. in ASSUME_PROVIDED
683 continue
684 fn = taskdata[mc].build_targets[k][0]
685 runlist.append([mc, k, ktask, fn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500686 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600687
Brad Bishop15ae2502019-06-18 21:44:24 -0400688 havemc = False
689 for mc in self.multiconfigs:
690 if taskdata[mc].get_mcdepends():
691 havemc = True
Brad Bishopf058f492019-01-28 23:50:33 -0500692
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800693 # No need to do check providers if there are no mcdeps or not an mc build
Brad Bishop15ae2502019-06-18 21:44:24 -0400694 if havemc or len(self.multiconfigs) > 1:
Andrew Geissler99467da2019-02-25 18:54:23 -0600695 seen = set()
696 new = True
697 # Make sure we can provide the multiconfig dependency
698 while new:
699 mcdeps = set()
700 # Add unresolved first, so we can get multiconfig indirect dependencies on time
701 for mc in self.multiconfigs:
702 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
703 mcdeps |= set(taskdata[mc].get_mcdepends())
704 new = False
705 for mc in self.multiconfigs:
706 for k in mcdeps:
707 if k in seen:
708 continue
709 l = k.split(':')
710 depmc = l[2]
711 if depmc not in self.multiconfigs:
Andrew Geisslerb7d28612020-07-24 16:15:54 -0500712 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc))
Andrew Geissler99467da2019-02-25 18:54:23 -0600713 else:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600714 logger.debug("Adding providers for multiconfig dependency %s" % l[3])
Andrew Geissler99467da2019-02-25 18:54:23 -0600715 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
716 seen.add(k)
717 new = True
Brad Bishopf058f492019-01-28 23:50:33 -0500718
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600719 for mc in self.multiconfigs:
720 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
721
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500722 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600723 return taskdata, runlist
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500724
725 def prepareTreeData(self, pkgs_to_build, task):
726 """
727 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
728 """
729
730 # We set abort to False here to prevent unbuildable targets raising
731 # an exception when we're just generating data
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600732 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500733
734 return runlist, taskdata
735
736 ######## WARNING : this function requires cache_extra to be enabled ########
737
738 def generateTaskDepTreeData(self, pkgs_to_build, task):
739 """
740 Create a dependency graph of pkgs_to_build including reverse dependency
741 information.
742 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500743 if not task.startswith("do_"):
744 task = "do_%s" % task
745
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500746 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600747 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500748 rq.rqdata.prepare()
749 return self.buildDependTree(rq, taskdata)
750
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600751 @staticmethod
752 def add_mc_prefix(mc, pn):
753 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -0400754 return "mc:%s:%s" % (mc, pn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600755 return pn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500756
757 def buildDependTree(self, rq, taskdata):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600758 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500759 depend_tree = {}
760 depend_tree["depends"] = {}
761 depend_tree["tdepends"] = {}
762 depend_tree["pn"] = {}
763 depend_tree["rdepends-pn"] = {}
764 depend_tree["packages"] = {}
765 depend_tree["rdepends-pkg"] = {}
766 depend_tree["rrecs-pkg"] = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500767 depend_tree['providermap'] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600768 depend_tree["layer-priorities"] = self.bbfile_config_priorities
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500769
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600770 for mc in taskdata:
771 for name, fn in list(taskdata[mc].get_providermap().items()):
772 pn = self.recipecaches[mc].pkg_fn[fn]
773 pn = self.add_mc_prefix(mc, pn)
774 if name != pn:
775 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
776 depend_tree['providermap'][name] = (pn, version)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500777
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600778 for tid in rq.rqdata.runtaskentries:
779 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
780 pn = self.recipecaches[mc].pkg_fn[taskfn]
781 pn = self.add_mc_prefix(mc, pn)
782 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500783 if pn not in depend_tree["pn"]:
784 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600785 depend_tree["pn"][pn]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500786 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600787 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500788
789 # if we have extra caches, list all attributes they bring in
790 extra_info = []
791 for cache_class in self.caches_array:
792 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
793 cachefields = getattr(cache_class, 'cachefields', [])
794 extra_info = extra_info + cachefields
795
796 # for all attributes stored, add them to the dependency tree
797 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600798 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500799
800
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500801 dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
802 if not dotname in depend_tree["tdepends"]:
803 depend_tree["tdepends"][dotname] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600804 for dep in rq.rqdata.runtaskentries[tid].depends:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800805 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
806 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600807 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
808 if taskfn not in seen_fns:
809 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500810 packages = []
811
812 depend_tree["depends"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600813 for dep in taskdata[mc].depids[taskfn]:
814 depend_tree["depends"][pn].append(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500815
816 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600817 for rdep in taskdata[mc].rdepids[taskfn]:
818 depend_tree["rdepends-pn"][pn].append(rdep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500819
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600820 rdepends = self.recipecaches[mc].rundeps[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500821 for package in rdepends:
822 depend_tree["rdepends-pkg"][package] = []
823 for rdepend in rdepends[package]:
824 depend_tree["rdepends-pkg"][package].append(rdepend)
825 packages.append(package)
826
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600827 rrecs = self.recipecaches[mc].runrecs[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500828 for package in rrecs:
829 depend_tree["rrecs-pkg"][package] = []
830 for rdepend in rrecs[package]:
831 depend_tree["rrecs-pkg"][package].append(rdepend)
832 if not package in packages:
833 packages.append(package)
834
835 for package in packages:
836 if package not in depend_tree["packages"]:
837 depend_tree["packages"][package] = {}
838 depend_tree["packages"][package]["pn"] = pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600839 depend_tree["packages"][package]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500840 depend_tree["packages"][package]["version"] = version
841
842 return depend_tree
843
844 ######## WARNING : this function requires cache_extra to be enabled ########
845 def generatePkgDepTreeData(self, pkgs_to_build, task):
846 """
847 Create a dependency tree of pkgs_to_build, returning the data.
848 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500849 if not task.startswith("do_"):
850 task = "do_%s" % task
851
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500852 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500853
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600854 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500855 depend_tree = {}
856 depend_tree["depends"] = {}
857 depend_tree["pn"] = {}
858 depend_tree["rdepends-pn"] = {}
859 depend_tree["rdepends-pkg"] = {}
860 depend_tree["rrecs-pkg"] = {}
861
862 # if we have extra caches, list all attributes they bring in
863 extra_info = []
864 for cache_class in self.caches_array:
865 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
866 cachefields = getattr(cache_class, 'cachefields', [])
867 extra_info = extra_info + cachefields
868
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600869 tids = []
870 for mc in taskdata:
871 for tid in taskdata[mc].taskentries:
872 tids.append(tid)
873
874 for tid in tids:
875 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
876
877 pn = self.recipecaches[mc].pkg_fn[taskfn]
878 pn = self.add_mc_prefix(mc, pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500879
880 if pn not in depend_tree["pn"]:
881 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600882 depend_tree["pn"][pn]["filename"] = taskfn
883 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500884 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600885 rdepends = self.recipecaches[mc].rundeps[taskfn]
886 rrecs = self.recipecaches[mc].runrecs[taskfn]
887 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500888
889 # for all extra attributes stored, add them to the dependency tree
890 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600891 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500892
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600893 if taskfn not in seen_fns:
894 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500895
896 depend_tree["depends"][pn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500897 for dep in taskdata[mc].depids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500898 pn_provider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600899 if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
900 fn_provider = taskdata[mc].build_targets[dep][0]
901 pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500902 else:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500903 pn_provider = dep
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600904 pn_provider = self.add_mc_prefix(mc, pn_provider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500905 depend_tree["depends"][pn].append(pn_provider)
906
907 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600908 for rdep in taskdata[mc].rdepids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500909 pn_rprovider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600910 if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
911 fn_rprovider = taskdata[mc].run_targets[rdep][0]
912 pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500913 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600914 pn_rprovider = rdep
915 pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500916 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
917
918 depend_tree["rdepends-pkg"].update(rdepends)
919 depend_tree["rrecs-pkg"].update(rrecs)
920
921 return depend_tree
922
923 def generateDepTreeEvent(self, pkgs_to_build, task):
924 """
925 Create a task dependency graph of pkgs_to_build.
926 Generate an event with the result
927 """
928 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
929 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
930
931 def generateDotGraphFiles(self, pkgs_to_build, task):
932 """
933 Create a task dependency graph of pkgs_to_build.
934 Save the result to a set of .dot files.
935 """
936
937 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
938
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500939 with open('pn-buildlist', 'w') as f:
940 for pn in depgraph["pn"]:
941 f.write(pn + "\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500942 logger.info("PN build list saved to 'pn-buildlist'")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500943
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500944 # Remove old format output files to ensure no confusion with stale data
945 try:
946 os.unlink('pn-depends.dot')
947 except FileNotFoundError:
948 pass
949 try:
950 os.unlink('package-depends.dot')
951 except FileNotFoundError:
952 pass
Brad Bishop79641f22019-09-10 07:20:22 -0400953 try:
954 os.unlink('recipe-depends.dot')
955 except FileNotFoundError:
956 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500957
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500958 with open('task-depends.dot', 'w') as f:
959 f.write("digraph depends {\n")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400960 for task in sorted(depgraph["tdepends"]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500961 (pn, taskname) = task.rsplit(".", 1)
962 fn = depgraph["pn"][pn]["filename"]
963 version = depgraph["pn"][pn]["version"]
964 f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400965 for dep in sorted(depgraph["tdepends"][task]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500966 f.write('"%s" -> "%s"\n' % (task, dep))
967 f.write("}\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500968 logger.info("Task dependencies saved to 'task-depends.dot'")
969
970 def show_appends_with_no_recipes(self):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500971 appends_without_recipes = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500972 # Determine which bbappends haven't been applied
Andrew Geissler5a43b432020-06-13 10:46:56 -0500973 for mc in self.multiconfigs:
974 # First get list of recipes, including skipped
975 recipefns = list(self.recipecaches[mc].pkg_fn.keys())
976 recipefns.extend(self.skiplist.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500977
Andrew Geissler5a43b432020-06-13 10:46:56 -0500978 # Work out list of bbappends that have been applied
979 applied_appends = []
980 for fn in recipefns:
981 applied_appends.extend(self.collections[mc].get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500982
Andrew Geissler5a43b432020-06-13 10:46:56 -0500983 appends_without_recipes[mc] = []
984 for _, appendfn in self.collections[mc].bbappends:
985 if not appendfn in applied_appends:
986 appends_without_recipes[mc].append(appendfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500987
Andrew Geissler5a43b432020-06-13 10:46:56 -0500988 msgs = []
989 for mc in sorted(appends_without_recipes.keys()):
990 if appends_without_recipes[mc]:
991 msgs.append('No recipes in %s available for:\n %s' % (mc if mc else 'default',
992 '\n '.join(appends_without_recipes[mc])))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500993
Andrew Geissler5a43b432020-06-13 10:46:56 -0500994 if msgs:
995 msg = "\n".join(msgs)
996 warn_only = self.databuilder.mcdata[mc].getVar("BB_DANGLINGAPPENDS_WARNONLY", \
997 False) or "no"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500998 if warn_only.lower() in ("1", "yes", "true"):
999 bb.warn(msg)
1000 else:
1001 bb.fatal(msg)
1002
1003 def handlePrefProviders(self):
1004
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001005 for mc in self.multiconfigs:
1006 localdata = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001007 bb.data.expandKeys(localdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001008
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001009 # Handle PREFERRED_PROVIDERS
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001010 for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001011 try:
1012 (providee, provider) = p.split(':')
1013 except:
1014 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
1015 continue
1016 if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
1017 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
1018 self.recipecaches[mc].preferred[providee] = provider
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001019
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001020 def findConfigFilePath(self, configfile):
1021 """
1022 Find the location on disk of configfile and if it exists and was parsed by BitBake
1023 emit the ConfigFilePathFound event with the path to the file.
1024 """
1025 path = bb.cookerdata.findConfigFile(configfile, self.data)
1026 if not path:
1027 return
1028
1029 # Generate a list of parsed configuration files by searching the files
1030 # listed in the __depends and __base_depends variables with a .conf suffix.
1031 conffiles = []
1032 dep_files = self.data.getVar('__base_depends', False) or []
1033 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
1034
1035 for f in dep_files:
1036 if f[0].endswith(".conf"):
1037 conffiles.append(f[0])
1038
1039 _, conf, conffile = path.rpartition("conf/")
1040 match = os.path.join(conf, conffile)
1041 # Try and find matches for conf/conffilename.conf as we don't always
1042 # have the full path to the file.
1043 for cfg in conffiles:
1044 if cfg.endswith(match):
1045 bb.event.fire(bb.event.ConfigFilePathFound(path),
1046 self.data)
1047 break
1048
1049 def findFilesMatchingInDir(self, filepattern, directory):
1050 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001051 Searches for files containing the substring 'filepattern' which are children of
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001052 'directory' in each BBPATH. i.e. to find all rootfs package classes available
1053 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1054 or to find all machine configuration files one could call:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001055 findFilesMatchingInDir(self, '.conf', 'conf/machine')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001056 """
1057
1058 matches = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001059 bbpaths = self.data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001060 for path in bbpaths:
1061 dirpath = os.path.join(path, directory)
1062 if os.path.exists(dirpath):
1063 for root, dirs, files in os.walk(dirpath):
1064 for f in files:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001065 if filepattern in f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001066 matches.append(f)
1067
1068 if matches:
1069 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1070
Patrick Williams93c203f2021-10-06 16:15:23 -05001071 def testCookerCommandEvent(self, filepattern):
1072 # Dummy command used by OEQA selftest to test tinfoil without IO
1073 matches = ["A", "B"]
1074 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1075
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001076 def findProviders(self, mc=''):
Andrew Geissler82c905d2020-04-13 13:39:40 -05001077 return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001078
1079 def findBestProvider(self, pn, mc=''):
1080 if pn in self.recipecaches[mc].providers:
1081 filenames = self.recipecaches[mc].providers[pn]
Andrew Geissler82c905d2020-04-13 13:39:40 -05001082 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc])
Andrew Geissler95ac1b82021-03-31 14:34:31 -05001083 if eligible is not None:
1084 filename = eligible[0]
1085 else:
1086 filename = None
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001087 return None, None, None, filename
1088 elif pn in self.recipecaches[mc].pkg_pn:
Andrew Geissler95ac1b82021-03-31 14:34:31 -05001089 (latest, latest_f, preferred_ver, preferred_file, required) = bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1090 if required and preferred_file is None:
1091 return None, None, None, None
1092 return (latest, latest_f, preferred_ver, preferred_file)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001093 else:
1094 return None, None, None, None
1095
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001096 def findConfigFiles(self, varname):
1097 """
1098 Find config files which are appropriate values for varname.
1099 i.e. MACHINE, DISTRO
1100 """
1101 possible = []
1102 var = varname.lower()
1103
1104 data = self.data
1105 # iterate configs
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001106 bbpaths = data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001107 for path in bbpaths:
1108 confpath = os.path.join(path, "conf", var)
1109 if os.path.exists(confpath):
1110 for root, dirs, files in os.walk(confpath):
1111 # get all child files, these are appropriate values
1112 for f in files:
1113 val, sep, end = f.rpartition('.')
1114 if end == 'conf':
1115 possible.append(val)
1116
1117 if possible:
1118 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1119
1120 def findInheritsClass(self, klass):
1121 """
1122 Find all recipes which inherit the specified class
1123 """
1124 pkg_list = []
1125
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001126 for pfn in self.recipecaches[''].pkg_fn:
1127 inherits = self.recipecaches[''].inherits.get(pfn, None)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001128 if inherits and klass in inherits:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001129 pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001130
1131 return pkg_list
1132
1133 def generateTargetsTree(self, klass=None, pkgs=None):
1134 """
1135 Generate a dependency tree of buildable targets
1136 Generate an event with the result
1137 """
1138 # if the caller hasn't specified a pkgs list default to universe
1139 if not pkgs:
1140 pkgs = ['universe']
1141 # if inherited_class passed ensure all recipes which inherit the
1142 # specified class are included in pkgs
1143 if klass:
1144 extra_pkgs = self.findInheritsClass(klass)
1145 pkgs = pkgs + extra_pkgs
1146
1147 # generate a dependency tree for all our packages
1148 tree = self.generatePkgDepTreeData(pkgs, 'build')
1149 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1150
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001151 def interactiveMode( self ):
1152 """Drop off into a shell"""
1153 try:
1154 from bb import shell
1155 except ImportError:
1156 parselog.exception("Interactive mode not available")
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001157 raise bb.BBHandledException()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001158 else:
1159 shell.start( self )
1160
1161
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001162 def handleCollections(self, collections):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001163 """Handle collections"""
1164 errors = False
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001165 self.bbfile_config_priorities = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001166 if collections:
1167 collection_priorities = {}
1168 collection_depends = {}
1169 collection_list = collections.split()
1170 min_prio = 0
1171 for c in collection_list:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001172 bb.debug(1,'Processing %s in collection list' % (c))
1173
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001174 # Get collection priority if defined explicitly
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001175 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001176 if priority:
1177 try:
1178 prio = int(priority)
1179 except ValueError:
1180 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1181 errors = True
1182 if min_prio == 0 or prio < min_prio:
1183 min_prio = prio
1184 collection_priorities[c] = prio
1185 else:
1186 collection_priorities[c] = None
1187
1188 # Check dependencies and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001189 deps = self.data.getVar("LAYERDEPENDS_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001190 if deps:
1191 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001192 depDict = bb.utils.explode_dep_versions2(deps)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001193 except bb.utils.VersionStringException as vse:
1194 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001195 for dep, oplist in list(depDict.items()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001196 if dep in collection_list:
1197 for opstr in oplist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001198 layerver = self.data.getVar("LAYERVERSION_%s" % dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001199 (op, depver) = opstr.split()
1200 if layerver:
1201 try:
1202 res = bb.utils.vercmp_string_op(layerver, depver, op)
1203 except bb.utils.VersionStringException as vse:
1204 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1205 if not res:
1206 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1207 errors = True
1208 else:
1209 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1210 errors = True
1211 else:
1212 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1213 errors = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001214 collection_depends[c] = list(depDict.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001215 else:
1216 collection_depends[c] = []
1217
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001218 # Check recommends and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001219 recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001220 if recs:
1221 try:
1222 recDict = bb.utils.explode_dep_versions2(recs)
1223 except bb.utils.VersionStringException as vse:
1224 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1225 for rec, oplist in list(recDict.items()):
1226 if rec in collection_list:
1227 if oplist:
1228 opstr = oplist[0]
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001229 layerver = self.data.getVar("LAYERVERSION_%s" % rec)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001230 if layerver:
1231 (op, recver) = opstr.split()
1232 try:
1233 res = bb.utils.vercmp_string_op(layerver, recver, op)
1234 except bb.utils.VersionStringException as vse:
1235 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1236 if not res:
1237 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1238 continue
1239 else:
1240 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1241 continue
1242 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1243 collection_depends[c].append(rec)
1244 else:
1245 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1246
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001247 # Recursively work out collection priorities based on dependencies
1248 def calc_layer_priority(collection):
1249 if not collection_priorities[collection]:
1250 max_depprio = min_prio
1251 for dep in collection_depends[collection]:
1252 calc_layer_priority(dep)
1253 depprio = collection_priorities[dep]
1254 if depprio > max_depprio:
1255 max_depprio = depprio
1256 max_depprio += 1
1257 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1258 collection_priorities[collection] = max_depprio
1259
1260 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1261 for c in collection_list:
1262 calc_layer_priority(c)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001263 regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
Andrew Geissler82c905d2020-04-13 13:39:40 -05001264 if regex is None:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001265 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1266 errors = True
1267 continue
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001268 elif regex == "":
1269 parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
Brad Bishop19323692019-04-05 15:28:33 -04001270 cre = re.compile('^NULL$')
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001271 errors = False
1272 else:
1273 try:
1274 cre = re.compile(regex)
1275 except re.error:
1276 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1277 errors = True
1278 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001279 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001280 if errors:
1281 # We've already printed the actual error(s)
1282 raise CollectionError("Errors during parsing layer configuration")
1283
1284 def buildSetVars(self):
1285 """
1286 Setup any variables needed before starting a build
1287 """
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001288 t = time.gmtime()
1289 for mc in self.databuilder.mcdata:
1290 ds = self.databuilder.mcdata[mc]
1291 if not ds.getVar("BUILDNAME", False):
1292 ds.setVar("BUILDNAME", "${DATE}${TIME}")
1293 ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1294 ds.setVar("DATE", time.strftime('%Y%m%d', t))
1295 ds.setVar("TIME", time.strftime('%H%M%S', t))
1296
1297 def reset_mtime_caches(self):
1298 """
1299 Reset mtime caches - this is particularly important when memory resident as something
1300 which is cached is not unlikely to have changed since the last invocation (e.g. a
1301 file associated with a recipe might have been modified by the user).
1302 """
1303 build.reset_cache()
1304 bb.fetch._checksum_cache.mtime_cache.clear()
1305 siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1306 if siggen_cache:
1307 bb.parse.siggen.checksum_cache.mtime_cache.clear()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001308
Andrew Geissler5a43b432020-06-13 10:46:56 -05001309 def matchFiles(self, bf, mc=''):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001310 """
1311 Find the .bb files which match the expression in 'buildfile'.
1312 """
1313 if bf.startswith("/") or bf.startswith("../"):
1314 bf = os.path.abspath(bf)
1315
Andrew Geissler5a43b432020-06-13 10:46:56 -05001316 self.collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)}
1317 filelist, masked, searchdirs = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001318 try:
1319 os.stat(bf)
1320 bf = os.path.abspath(bf)
1321 return [bf]
1322 except OSError:
1323 regexp = re.compile(bf)
1324 matches = []
1325 for f in filelist:
1326 if regexp.search(f) and os.path.isfile(f):
1327 matches.append(f)
1328 return matches
1329
Andrew Geissler5a43b432020-06-13 10:46:56 -05001330 def matchFile(self, buildfile, mc=''):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001331 """
1332 Find the .bb file which matches the expression in 'buildfile'.
1333 Raise an error if multiple files
1334 """
Andrew Geissler5a43b432020-06-13 10:46:56 -05001335 matches = self.matchFiles(buildfile, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001336 if len(matches) != 1:
1337 if matches:
1338 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1339 if matches:
1340 for f in matches:
1341 msg += "\n %s" % f
1342 parselog.error(msg)
1343 else:
1344 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1345 raise NoSpecificMatch
1346 return matches[0]
1347
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001348 def buildFile(self, buildfile, task):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001349 """
1350 Build the file matching regexp buildfile
1351 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001352 bb.event.fire(bb.event.BuildInit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001353
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001354 # Too many people use -b because they think it's how you normally
1355 # specify a target to be built, so show a warning
1356 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1357
1358 self.buildFileInternal(buildfile, task)
1359
1360 def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1361 """
1362 Build the file matching regexp buildfile
1363 """
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001364
1365 # Parse the configuration here. We need to do it explicitly here since
1366 # buildFile() doesn't use the cache
1367 self.parseConfiguration()
1368
1369 # If we are told to do the None task then query the default task
Andrew Geissler82c905d2020-04-13 13:39:40 -05001370 if task is None:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001371 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001372 if not task.startswith("do_"):
1373 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001374
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001375 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Andrew Geissler5a43b432020-06-13 10:46:56 -05001376 fn = self.matchFile(fn, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001377
1378 self.buildSetVars()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001379 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001380
Andrew Geissler5a43b432020-06-13 10:46:56 -05001381 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001382
Andrew Geissler5a43b432020-06-13 10:46:56 -05001383 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001384 infos = dict(infos)
1385
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001386 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001387 try:
1388 info_array = infos[fn]
1389 except KeyError:
1390 bb.fatal("%s does not exist" % fn)
1391
1392 if info_array[0].skipped:
1393 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1394
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001395 self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001396
1397 # Tweak some variables
1398 item = info_array[0].pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001399 self.recipecaches[mc].ignored_dependencies = set()
1400 self.recipecaches[mc].bbfile_priority[fn] = 1
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001401 self.configuration.limited_deps = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001402
1403 # Remove external dependencies
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001404 self.recipecaches[mc].task_deps[fn]['depends'] = {}
1405 self.recipecaches[mc].deps[fn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001406 self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1407 self.recipecaches[mc].runrecs[fn] = defaultdict(list)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001408
1409 # Invalidate task for target if force mode active
1410 if self.configuration.force:
1411 logger.verbose("Invalidate task %s, %s", task, fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001412 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001413
1414 # Setup taskdata structure
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001415 taskdata = {}
1416 taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001417 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001418
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001419 if quietlog:
1420 rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1421 bb.runqueue.logger.setLevel(logging.WARNING)
1422
1423 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1424 if fireevents:
1425 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001426
1427 # Execute the runqueue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001428 runlist = [[mc, item, task, fn]]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001429
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001430 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001431
1432 def buildFileIdle(server, rq, abort):
1433
1434 msg = None
1435 interrupted = 0
1436 if abort or self.state == state.forceshutdown:
1437 rq.finish_runqueue(True)
1438 msg = "Forced shutdown"
1439 interrupted = 2
1440 elif self.state == state.shutdown:
1441 rq.finish_runqueue(False)
1442 msg = "Stopped build"
1443 interrupted = 1
1444 failures = 0
1445 try:
1446 retval = rq.execute_runqueue()
1447 except runqueue.TaskFailure as exc:
1448 failures += len(exc.args)
1449 retval = False
1450 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001451 self.command.finishAsyncCommand(str(exc))
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001452 if quietlog:
1453 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001454 return False
1455
1456 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001457 if fireevents:
1458 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001459 self.command.finishAsyncCommand(msg)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001460 # We trashed self.recipecaches above
1461 self.parsecache_valid = False
1462 self.configuration.limited_deps = False
1463 bb.parse.siggen.reset(self.data)
1464 if quietlog:
1465 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001466 return False
1467 if retval is True:
1468 return True
1469 return retval
1470
Andrew Geissler635e0e42020-08-21 15:58:33 -05001471 self.idleCallBackRegister(buildFileIdle, rq)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001472
1473 def buildTargets(self, targets, task):
1474 """
1475 Attempt to build the targets specified
1476 """
1477
1478 def buildTargetsIdle(server, rq, abort):
1479 msg = None
1480 interrupted = 0
1481 if abort or self.state == state.forceshutdown:
1482 rq.finish_runqueue(True)
1483 msg = "Forced shutdown"
1484 interrupted = 2
1485 elif self.state == state.shutdown:
1486 rq.finish_runqueue(False)
1487 msg = "Stopped build"
1488 interrupted = 1
1489 failures = 0
1490 try:
1491 retval = rq.execute_runqueue()
1492 except runqueue.TaskFailure as exc:
1493 failures += len(exc.args)
1494 retval = False
1495 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001496 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001497 return False
1498
1499 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001500 try:
1501 for mc in self.multiconfigs:
1502 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1503 finally:
1504 self.command.finishAsyncCommand(msg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001505 return False
1506 if retval is True:
1507 return True
1508 return retval
1509
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001510 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001511 self.buildSetVars()
1512
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001513 # If we are told to do the None task then query the default task
Andrew Geissler82c905d2020-04-13 13:39:40 -05001514 if task is None:
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001515 task = self.configuration.cmd
1516
1517 if not task.startswith("do_"):
1518 task = "do_%s" % task
1519
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001520 packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1521
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001522 bb.event.fire(bb.event.BuildInit(packages), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001523
1524 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001525
1526 buildname = self.data.getVar("BUILDNAME", False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001527
1528 # make targets to always look as <target>:do_<task>
1529 ntargets = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001530 for target in runlist:
1531 if target[0]:
Brad Bishop15ae2502019-06-18 21:44:24 -04001532 ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2]))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001533 ntargets.append("%s:%s" % (target[1], target[2]))
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001534
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001535 for mc in self.multiconfigs:
1536 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001537
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001538 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001539 if 'universe' in targets:
1540 rq.rqdata.warn_multi_bb = True
1541
Andrew Geissler635e0e42020-08-21 15:58:33 -05001542 self.idleCallBackRegister(buildTargetsIdle, rq)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001543
1544
1545 def getAllKeysWithFlags(self, flaglist):
1546 dump = {}
1547 for k in self.data.keys():
1548 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001549 expand = True
1550 flags = self.data.getVarFlags(k)
1551 if flags and "func" in flags and "python" in flags:
1552 expand = False
1553 v = self.data.getVar(k, expand)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001554 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1555 dump[k] = {
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001556 'v' : str(v) ,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001557 'history' : self.data.varhistory.variable(k),
1558 }
1559 for d in flaglist:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001560 if flags and d in flags:
1561 dump[k][d] = flags[d]
1562 else:
1563 dump[k][d] = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001564 except Exception as e:
1565 print(e)
1566 return dump
1567
1568
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001569 def updateCacheSync(self):
1570 if self.state == state.running:
1571 return
1572
1573 # reload files for which we got notifications
1574 for p in self.inotify_modified_files:
1575 bb.parse.update_cache(p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001576 if p in bb.parse.BBHandler.cached_statements:
1577 del bb.parse.BBHandler.cached_statements[p]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001578 self.inotify_modified_files = []
1579
1580 if not self.baseconfig_valid:
Andrew Geisslerd1e89492021-02-12 15:35:20 -06001581 logger.debug("Reloading base configuration data")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001582 self.initConfigurationData()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001583 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001584
1585 # This is called for all async commands when self.state != running
1586 def updateCache(self):
1587 if self.state == state.running:
1588 return
1589
1590 if self.state in (state.shutdown, state.forceshutdown, state.error):
1591 if hasattr(self.parser, 'shutdown'):
1592 self.parser.shutdown(clean=False, force = True)
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001593 self.parser.final_cleanup()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001594 raise bb.BBHandledException()
1595
1596 if self.state != state.parsing:
1597 self.updateCacheSync()
1598
1599 if self.state != state.parsing and not self.parsecache_valid:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001600 bb.parse.siggen.reset(self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001601 self.parseConfiguration ()
1602 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001603 for mc in self.multiconfigs:
1604 bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001605
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001606 for mc in self.multiconfigs:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001607 ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001608 self.recipecaches[mc].ignored_dependencies = set(ignore.split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001609
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001610 for dep in self.configuration.extra_assume_provided:
1611 self.recipecaches[mc].ignored_dependencies.add(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001612
Andrew Geissler5a43b432020-06-13 10:46:56 -05001613 self.collections = {}
1614
1615 mcfilelist = {}
1616 total_masked = 0
1617 searchdirs = set()
1618 for mc in self.multiconfigs:
1619 self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
1620 (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1621
1622 mcfilelist[mc] = filelist
1623 total_masked += masked
1624 searchdirs |= set(search)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001625
1626 # Add inotify watches for directories searched for bb/bbappend files
1627 for dirent in searchdirs:
1628 self.add_filewatch([[dirent]], dirs=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001629
Andrew Geissler5a43b432020-06-13 10:46:56 -05001630 self.parser = CookerParser(self, mcfilelist, total_masked)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001631 self.parsecache_valid = True
1632
1633 self.state = state.parsing
1634
1635 if not self.parser.parse_next():
1636 collectlog.debug(1, "parsing complete")
1637 if self.parser.error:
1638 raise bb.BBHandledException()
1639 self.show_appends_with_no_recipes()
1640 self.handlePrefProviders()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001641 for mc in self.multiconfigs:
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001642 self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001643 self.state = state.running
1644
1645 # Send an event listing all stamps reachable after parsing
1646 # which the metadata may use to clean up stale data
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001647 for mc in self.multiconfigs:
1648 event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1649 bb.event.fire(event, self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001650 return None
1651
1652 return True
1653
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001654 def checkPackages(self, pkgs_to_build, task=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001655
1656 # Return a copy, don't modify the original
1657 pkgs_to_build = pkgs_to_build[:]
1658
1659 if len(pkgs_to_build) == 0:
1660 raise NothingToBuild
1661
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001662 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001663 for pkg in pkgs_to_build.copy():
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001664 if pkg in ignore:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001665 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
Brad Bishop15ae2502019-06-18 21:44:24 -04001666 if pkg.startswith("multiconfig:"):
1667 pkgs_to_build.remove(pkg)
1668 pkgs_to_build.append(pkg.replace("multiconfig:", "mc:"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001669
1670 if 'world' in pkgs_to_build:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001671 pkgs_to_build.remove('world')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001672 for mc in self.multiconfigs:
1673 bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1674 for t in self.recipecaches[mc].world_target:
1675 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001676 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001677 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001678
1679 if 'universe' in pkgs_to_build:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001680 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001681 parselog.debug(1, "collating packages for \"universe\"")
1682 pkgs_to_build.remove('universe')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001683 for mc in self.multiconfigs:
1684 for t in self.recipecaches[mc].universe_target:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001685 if task:
1686 foundtask = False
1687 for provider_fn in self.recipecaches[mc].providers[t]:
1688 if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1689 foundtask = True
1690 break
1691 if not foundtask:
1692 bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1693 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001694 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001695 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001696 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001697
1698 return pkgs_to_build
1699
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001700 def pre_serve(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001701 return
1702
1703 def post_serve(self):
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001704 self.shutdown(force=True)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001705 prserv.serv.auto_shutdown()
Brad Bishop08902b02019-08-20 09:16:51 -04001706 if self.hashserv:
1707 self.hashserv.process.terminate()
1708 self.hashserv.process.join()
Andrew Geisslerc3d88e42020-10-02 09:45:00 -05001709 if hasattr(self, "data"):
1710 bb.event.fire(CookerExit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001711
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001712 def shutdown(self, force = False):
1713 if force:
1714 self.state = state.forceshutdown
1715 else:
1716 self.state = state.shutdown
1717
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001718 if self.parser:
1719 self.parser.shutdown(clean=not force, force=force)
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001720 self.parser.final_cleanup()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001721
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001722 def finishcommand(self):
1723 self.state = state.initial
1724
1725 def reset(self):
1726 self.initConfigurationData()
Brad Bishop08902b02019-08-20 09:16:51 -04001727 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001728
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001729 def clientComplete(self):
1730 """Called when the client is done using the server"""
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001731 self.finishcommand()
1732 self.extraconfigdata = {}
1733 self.command.reset()
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001734 if hasattr(self, "data"):
1735 self.databuilder.reset()
1736 self.data = self.databuilder.data
Andrew Geissler82c905d2020-04-13 13:39:40 -05001737 self.parsecache_valid = False
1738 self.baseconfig_valid = False
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001739
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001740
1741class CookerExit(bb.event.Event):
1742 """
1743 Notify clients of the Cooker shutdown
1744 """
1745
1746 def __init__(self):
1747 bb.event.Event.__init__(self)
1748
1749
1750class CookerCollectFiles(object):
Andrew Geissler5a43b432020-06-13 10:46:56 -05001751 def __init__(self, priorities, mc=''):
1752 self.mc = mc
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001753 self.bbappends = []
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001754 # Priorities is a list of tupples, with the second element as the pattern.
1755 # We need to sort the list with the longest pattern first, and so on to
1756 # the shortest. This allows nested layers to be properly evaluated.
1757 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001758
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001759 def calc_bbfile_priority(self, filename):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001760 for _, _, regex, pri in self.bbfile_config_priorities:
1761 if regex.match(filename):
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001762 return pri, regex
1763 return 0, None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001764
1765 def get_bbfiles(self):
1766 """Get list of default .bb files by reading out the current directory"""
1767 path = os.getcwd()
1768 contents = os.listdir(path)
1769 bbfiles = []
1770 for f in contents:
1771 if f.endswith(".bb"):
1772 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1773 return bbfiles
1774
1775 def find_bbfiles(self, path):
1776 """Find all the .bb and .bbappend files in a directory"""
1777 found = []
1778 for dir, dirs, files in os.walk(path):
1779 for ignored in ('SCCS', 'CVS', '.svn'):
1780 if ignored in dirs:
1781 dirs.remove(ignored)
1782 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1783
1784 return found
1785
1786 def collect_bbfiles(self, config, eventdata):
1787 """Collect all available .bb build files"""
1788 masked = 0
1789
1790 collectlog.debug(1, "collecting .bb files")
1791
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001792 files = (config.getVar( "BBFILES") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001793
1794 # Sort files by priority
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001795 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] )
Andrew Geisslerc9f78652020-09-18 14:11:35 -05001796 config.setVar("BBFILES_PRIORITIZED", " ".join(files))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001797
1798 if not len(files):
1799 files = self.get_bbfiles()
1800
1801 if not len(files):
1802 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1803 bb.event.fire(CookerExit(), eventdata)
1804
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001805 # We need to track where we look so that we can add inotify watches. There
1806 # is no nice way to do this, this is horrid. We intercept the os.listdir()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001807 # (or os.scandir() for python 3.6+) calls while we run glob().
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001808 origlistdir = os.listdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001809 if hasattr(os, 'scandir'):
1810 origscandir = os.scandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001811 searchdirs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001812
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001813 def ourlistdir(d):
1814 searchdirs.append(d)
1815 return origlistdir(d)
1816
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001817 def ourscandir(d):
1818 searchdirs.append(d)
1819 return origscandir(d)
1820
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001821 os.listdir = ourlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001822 if hasattr(os, 'scandir'):
1823 os.scandir = ourscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001824 try:
1825 # Can't use set here as order is important
1826 newfiles = []
1827 for f in files:
1828 if os.path.isdir(f):
1829 dirfiles = self.find_bbfiles(f)
1830 for g in dirfiles:
1831 if g not in newfiles:
1832 newfiles.append(g)
1833 else:
1834 globbed = glob.glob(f)
1835 if not globbed and os.path.exists(f):
1836 globbed = [f]
1837 # glob gives files in order on disk. Sort to be deterministic.
1838 for g in sorted(globbed):
1839 if g not in newfiles:
1840 newfiles.append(g)
1841 finally:
1842 os.listdir = origlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001843 if hasattr(os, 'scandir'):
1844 os.scandir = origscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001845
1846 bbmask = config.getVar('BBMASK')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001847
1848 if bbmask:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001849 # First validate the individual regular expressions and ignore any
1850 # that do not compile
1851 bbmasks = []
1852 for mask in bbmask.split():
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001853 # When constructing an older style single regex, it's possible for BBMASK
1854 # to end up beginning with '|', which matches and masks _everything_.
1855 if mask.startswith("|"):
Andrew Geissler82c905d2020-04-13 13:39:40 -05001856 collectlog.warning("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001857 mask = mask[1:]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001858 try:
1859 re.compile(mask)
1860 bbmasks.append(mask)
1861 except sre_constants.error:
1862 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1863
1864 # Then validate the combined regular expressions. This should never
1865 # fail, but better safe than sorry...
1866 bbmask = "|".join(bbmasks)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001867 try:
1868 bbmask_compiled = re.compile(bbmask)
1869 except sre_constants.error:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001870 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1871 bbmask = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001872
1873 bbfiles = []
1874 bbappend = []
1875 for f in newfiles:
1876 if bbmask and bbmask_compiled.search(f):
1877 collectlog.debug(1, "skipping masked file %s", f)
1878 masked += 1
1879 continue
1880 if f.endswith('.bb'):
1881 bbfiles.append(f)
1882 elif f.endswith('.bbappend'):
1883 bbappend.append(f)
1884 else:
1885 collectlog.debug(1, "skipping %s: unknown file extension", f)
1886
1887 # Build a list of .bbappend files for each .bb file
1888 for f in bbappend:
1889 base = os.path.basename(f).replace('.bbappend', '.bb')
1890 self.bbappends.append((base, f))
1891
1892 # Find overlayed recipes
1893 # bbfiles will be in priority order which makes this easy
1894 bbfile_seen = dict()
1895 self.overlayed = defaultdict(list)
1896 for f in reversed(bbfiles):
1897 base = os.path.basename(f)
1898 if base not in bbfile_seen:
1899 bbfile_seen[base] = f
1900 else:
1901 topfile = bbfile_seen[base]
1902 self.overlayed[topfile].append(f)
1903
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001904 return (bbfiles, masked, searchdirs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001905
1906 def get_file_appends(self, fn):
1907 """
1908 Returns a list of .bbappend files to apply to fn
1909 """
1910 filelist = []
1911 f = os.path.basename(fn)
1912 for b in self.bbappends:
1913 (bbappend, filename) = b
1914 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1915 filelist.append(filename)
Andrew Geissler5a43b432020-06-13 10:46:56 -05001916 return tuple(filelist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001917
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001918 def collection_priorities(self, pkgfns, fns, d):
1919 # Return the priorities of the entries in pkgfns
1920 # Also check that all the regexes in self.bbfile_config_priorities are used
1921 # (but to do that we need to ensure skipped recipes aren't counted, nor
1922 # collections in BBFILE_PATTERN_IGNORE_EMPTY)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001923
1924 priorities = {}
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001925 seen = set()
1926 matched = set()
1927
1928 matched_regex = set()
1929 unmatched_regex = set()
1930 for _, _, regex, _ in self.bbfile_config_priorities:
1931 unmatched_regex.add(regex)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001932
1933 # Calculate priorities for each file
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001934 for p in pkgfns:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001935 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001936 priorities[p], regex = self.calc_bbfile_priority(realfn)
1937 if regex in unmatched_regex:
1938 matched_regex.add(regex)
1939 unmatched_regex.remove(regex)
1940 seen.add(realfn)
1941 if regex:
1942 matched.add(realfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001943
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001944 if unmatched_regex:
1945 # Account for bbappend files
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001946 for b in self.bbappends:
1947 (bbfile, append) = b
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001948 seen.add(append)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001949
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001950 # Account for skipped recipes
1951 seen.update(fns)
1952
1953 seen.difference_update(matched)
1954
1955 def already_matched(fn):
1956 for regex in matched_regex:
1957 if regex.match(fn):
1958 return True
1959 return False
1960
1961 for unmatch in unmatched_regex.copy():
1962 for fn in seen:
1963 if unmatch.match(fn):
1964 # If the bbappend or file was already matched by another regex, skip it
1965 # e.g. for a layer within a layer, the outer regex could match, the inner
1966 # regex may match nothing and we should warn about that
1967 if already_matched(fn):
1968 continue
1969 unmatched_regex.remove(unmatch)
1970 break
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001971
1972 for collection, pattern, regex, _ in self.bbfile_config_priorities:
Andrew Geisslerb7d28612020-07-24 16:15:54 -05001973 if regex in unmatched_regex:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001974 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
Andrew Geissler5a43b432020-06-13 10:46:56 -05001975 collectlog.warning("No bb files in %s matched BBFILE_PATTERN_%s '%s'" % (self.mc if self.mc else 'default',
1976 collection, pattern))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001977
1978 return priorities
1979
1980class ParsingFailure(Exception):
1981 def __init__(self, realexception, recipe):
1982 self.realexception = realexception
1983 self.recipe = recipe
1984 Exception.__init__(self, realexception, recipe)
1985
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001986class Parser(multiprocessing.Process):
1987 def __init__(self, jobs, results, quit, init, profile):
1988 self.jobs = jobs
1989 self.results = results
1990 self.quit = quit
1991 self.init = init
1992 multiprocessing.Process.__init__(self)
1993 self.context = bb.utils.get_context().copy()
1994 self.handlers = bb.event.get_class_handlers().copy()
1995 self.profile = profile
1996
1997 def run(self):
1998
1999 if not self.profile:
2000 self.realrun()
2001 return
2002
2003 try:
2004 import cProfile as profile
2005 except:
2006 import profile
2007 prof = profile.Profile()
2008 try:
2009 profile.Profile.runcall(prof, self.realrun)
2010 finally:
2011 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
2012 prof.dump_stats(logfile)
2013
2014 def realrun(self):
2015 if self.init:
2016 self.init()
2017
2018 pending = []
2019 while True:
2020 try:
2021 self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002022 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002023 pass
2024 else:
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002025 self.results.close()
2026 self.results.join_thread()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002027 break
2028
2029 if pending:
2030 result = pending.pop()
2031 else:
2032 try:
Brad Bishop19323692019-04-05 15:28:33 -04002033 job = self.jobs.pop()
2034 except IndexError:
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002035 self.results.close()
2036 self.results.join_thread()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002037 break
2038 result = self.parse(*job)
Andrew Geissler82c905d2020-04-13 13:39:40 -05002039 # Clear the siggen cache after parsing to control memory usage, its huge
2040 bb.parse.siggen.postparsing_clean_cache()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002041 try:
2042 self.results.put(result, timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002043 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002044 pending.append(result)
2045
Andrew Geissler5a43b432020-06-13 10:46:56 -05002046 def parse(self, mc, cache, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002047 try:
Andrew Geissler82c905d2020-04-13 13:39:40 -05002048 origfilter = bb.event.LogHandler.filter
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002049 # Record the filename we're parsing into any events generated
2050 def parse_filter(self, record):
2051 record.taskpid = bb.event.worker_pid
2052 record.fn = filename
2053 return True
2054
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002055 # Reset our environment and handlers to the original settings
2056 bb.utils.set_context(self.context.copy())
2057 bb.event.set_class_handlers(self.handlers.copy())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002058 bb.event.LogHandler.filter = parse_filter
2059
Andrew Geissler5a43b432020-06-13 10:46:56 -05002060 return True, mc, cache.parse(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002061 except Exception as exc:
2062 tb = sys.exc_info()[2]
2063 exc.recipe = filename
2064 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
2065 return True, exc
2066 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2067 # and for example a worker thread doesn't just exit on its own in response to
2068 # a SystemExit event for example.
2069 except BaseException as exc:
2070 return True, ParsingFailure(exc, filename)
Andrew Geissler82c905d2020-04-13 13:39:40 -05002071 finally:
2072 bb.event.LogHandler.filter = origfilter
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002073
2074class CookerParser(object):
Andrew Geissler5a43b432020-06-13 10:46:56 -05002075 def __init__(self, cooker, mcfilelist, masked):
2076 self.mcfilelist = mcfilelist
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002077 self.cooker = cooker
2078 self.cfgdata = cooker.data
2079 self.cfghash = cooker.data_hash
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002080 self.cfgbuilder = cooker.databuilder
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002081
2082 # Accounting statistics
2083 self.parsed = 0
2084 self.cached = 0
2085 self.error = 0
2086 self.masked = masked
2087
2088 self.skipped = 0
2089 self.virtuals = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002090
2091 self.current = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002092 self.process_names = []
2093
Andrew Geissler5a43b432020-06-13 10:46:56 -05002094 self.bb_caches = bb.cache.MulticonfigCache(self.cfgbuilder, self.cfghash, cooker.caches_array)
2095 self.fromcache = set()
2096 self.willparse = set()
2097 for mc in self.cooker.multiconfigs:
2098 for filename in self.mcfilelist[mc]:
2099 appends = self.cooker.collections[mc].get_file_appends(filename)
2100 if not self.bb_caches[mc].cacheValid(filename, appends):
2101 self.willparse.add((mc, self.bb_caches[mc], filename, appends))
2102 else:
2103 self.fromcache.add((mc, self.bb_caches[mc], filename, appends))
2104
2105 self.total = len(self.fromcache) + len(self.willparse)
2106 self.toparse = len(self.willparse)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002107 self.progress_chunk = int(max(self.toparse / 100, 1))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002108
Brad Bishop6e60e8b2018-02-01 10:27:11 -05002109 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
Andrew Geissler5a43b432020-06-13 10:46:56 -05002110 multiprocessing.cpu_count()), self.toparse)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002111
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002112 self.start()
2113 self.haveshutdown = False
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002114 self.syncthread = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002115
2116 def start(self):
2117 self.results = self.load_cached()
2118 self.processes = []
2119 if self.toparse:
2120 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2121 def init():
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002122 signal.signal(signal.SIGTERM, signal.SIG_DFL)
2123 signal.signal(signal.SIGHUP, signal.SIG_DFL)
2124 signal.signal(signal.SIGINT, signal.SIG_IGN)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002125 bb.utils.set_process_name(multiprocessing.current_process().name)
2126 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2127 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002128
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002129 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002130 self.result_queue = multiprocessing.Queue()
Brad Bishop19323692019-04-05 15:28:33 -04002131
2132 def chunkify(lst,n):
2133 return [lst[i::n] for i in range(n)]
Andrew Geissler5a43b432020-06-13 10:46:56 -05002134 self.jobs = chunkify(list(self.willparse), self.num_processes)
Brad Bishop19323692019-04-05 15:28:33 -04002135
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002136 for i in range(0, self.num_processes):
Brad Bishop19323692019-04-05 15:28:33 -04002137 parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002138 parser.start()
2139 self.process_names.append(parser.name)
2140 self.processes.append(parser)
2141
2142 self.results = itertools.chain(self.results, self.parse_generator())
2143
2144 def shutdown(self, clean=True, force=False):
2145 if not self.toparse:
2146 return
2147 if self.haveshutdown:
2148 return
2149 self.haveshutdown = True
2150
2151 if clean:
2152 event = bb.event.ParseCompleted(self.cached, self.parsed,
2153 self.skipped, self.masked,
2154 self.virtuals, self.error,
2155 self.total)
2156
2157 bb.event.fire(event, self.cfgdata)
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002158
2159 for process in self.processes:
2160 self.parser_quit.put(None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002161
Brad Bishop08902b02019-08-20 09:16:51 -04002162 # Cleanup the queue before call process.join(), otherwise there might be
2163 # deadlocks.
2164 while True:
2165 try:
2166 self.result_queue.get(timeout=0.25)
2167 except queue.Empty:
2168 break
2169
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002170 for process in self.processes:
2171 if force:
2172 process.join(.1)
2173 process.terminate()
2174 else:
2175 process.join()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002176
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002177 self.parser_quit.close()
2178 # Allow data left in the cancel queue to be discarded
2179 self.parser_quit.cancel_join_thread()
2180
Andrew Geissler5a43b432020-06-13 10:46:56 -05002181 def sync_caches():
2182 for c in self.bb_caches.values():
2183 c.sync()
2184
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002185 sync = threading.Thread(target=sync_caches, name="SyncThread")
2186 self.syncthread = sync
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002187 sync.start()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002188 bb.codeparser.parser_cache_savemerge()
2189 bb.fetch.fetcher_parse_done()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002190 if self.cooker.configuration.profile:
2191 profiles = []
2192 for i in self.process_names:
2193 logfile = "profile-parse-%s.log" % i
2194 if os.path.exists(logfile):
2195 profiles.append(logfile)
2196
2197 pout = "profile-parse.log.processed"
2198 bb.utils.process_profilelog(profiles, pout = pout)
2199 print("Processed parsing statistics saved to %s" % (pout))
2200
Andrew Geisslerc9f78652020-09-18 14:11:35 -05002201 def final_cleanup(self):
2202 if self.syncthread:
2203 self.syncthread.join()
2204
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002205 def load_cached(self):
Andrew Geissler5a43b432020-06-13 10:46:56 -05002206 for mc, cache, filename, appends in self.fromcache:
2207 cached, infos = cache.load(filename, appends)
2208 yield not cached, mc, infos
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002209
2210 def parse_generator(self):
2211 while True:
2212 if self.parsed >= self.toparse:
2213 break
2214
2215 try:
2216 result = self.result_queue.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002217 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002218 pass
2219 else:
2220 value = result[1]
2221 if isinstance(value, BaseException):
2222 raise value
2223 else:
2224 yield result
2225
2226 def parse_next(self):
2227 result = []
2228 parsed = None
2229 try:
Andrew Geissler5a43b432020-06-13 10:46:56 -05002230 parsed, mc, result = next(self.results)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002231 except StopIteration:
2232 self.shutdown()
2233 return False
2234 except bb.BBHandledException as exc:
2235 self.error += 1
2236 logger.error('Failed to parse recipe: %s' % exc.recipe)
Andrew Geissler90fd73c2021-03-05 15:25:55 -06002237 self.shutdown(clean=False, force=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002238 return False
2239 except ParsingFailure as exc:
2240 self.error += 1
2241 logger.error('Unable to parse %s: %s' %
2242 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
Andrew Geissler90fd73c2021-03-05 15:25:55 -06002243 self.shutdown(clean=False, force=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002244 return False
2245 except bb.parse.ParseError as exc:
2246 self.error += 1
2247 logger.error(str(exc))
Andrew Geissler90fd73c2021-03-05 15:25:55 -06002248 self.shutdown(clean=False, force=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002249 return False
2250 except bb.data_smart.ExpansionError as exc:
2251 self.error += 1
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002252 bbdir = os.path.dirname(__file__) + os.sep
2253 etype, value, _ = sys.exc_info()
2254 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2255 logger.error('ExpansionError during parsing %s', value.recipe,
2256 exc_info=(etype, value, tb))
Andrew Geissler90fd73c2021-03-05 15:25:55 -06002257 self.shutdown(clean=False, force=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002258 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002259 except Exception as exc:
2260 self.error += 1
2261 etype, value, tb = sys.exc_info()
2262 if hasattr(value, "recipe"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002263 logger.error('Unable to parse %s' % value.recipe,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002264 exc_info=(etype, value, exc.traceback))
2265 else:
2266 # Most likely, an exception occurred during raising an exception
2267 import traceback
2268 logger.error('Exception during parse: %s' % traceback.format_exc())
Andrew Geissler90fd73c2021-03-05 15:25:55 -06002269 self.shutdown(clean=False, force=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002270 return False
2271
2272 self.current += 1
2273 self.virtuals += len(result)
2274 if parsed:
2275 self.parsed += 1
2276 if self.parsed % self.progress_chunk == 0:
2277 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2278 self.cfgdata)
2279 else:
2280 self.cached += 1
2281
2282 for virtualfn, info_array in result:
2283 if info_array[0].skipped:
2284 self.skipped += 1
2285 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
Andrew Geissler5a43b432020-06-13 10:46:56 -05002286 self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002287 parsed=parsed, watcher = self.cooker.add_filewatch)
2288 return True
2289
2290 def reparse(self, filename):
Andrew Geissler5a43b432020-06-13 10:46:56 -05002291 to_reparse = set()
2292 for mc in self.cooker.multiconfigs:
2293 to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename)))
2294
2295 for mc, filename, appends in to_reparse:
2296 infos = self.bb_caches[mc].parse(filename, appends)
2297 for vfn, info_array in infos:
2298 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)