blob: e6442bff93757e3547646f348538f96abc706de8 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# Copyright (C) 2003, 2004 Chris Larson
3# Copyright (C) 2003, 2004 Phil Blundell
4# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
5# Copyright (C) 2005 Holger Hans Peter Freyther
6# Copyright (C) 2005 ROAD GmbH
7# Copyright (C) 2006 - 2007 Richard Purdie
8#
Brad Bishopc342db32019-05-15 21:57:59 -04009# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050010#
Patrick Williamsc0f7c042017-02-23 20:41:17 -060011
Patrick Williamsc124f4f2015-09-15 14:41:29 -050012import sys, os, glob, os.path, re, time
13import atexit
14import itertools
15import logging
16import multiprocessing
17import sre_constants
18import threading
Patrick Williamsc0f7c042017-02-23 20:41:17 -060019from io import StringIO, UnsupportedOperation
Patrick Williamsc124f4f2015-09-15 14:41:29 -050020from contextlib import closing
21from functools import wraps
Patrick Williamsc0f7c042017-02-23 20:41:17 -060022from collections import defaultdict, namedtuple
Patrick Williamsc124f4f2015-09-15 14:41:29 -050023import bb, bb.exceptions, bb.command
24from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
Patrick Williamsc0f7c042017-02-23 20:41:17 -060025import queue
Patrick Williamsc124f4f2015-09-15 14:41:29 -050026import signal
27import subprocess
28import errno
29import prserv.serv
30import pyinotify
Patrick Williamsc0f7c042017-02-23 20:41:17 -060031import json
32import pickle
33import codecs
Brad Bishop08902b02019-08-20 09:16:51 -040034import hashserv
Patrick Williamsc124f4f2015-09-15 14:41:29 -050035
36logger = logging.getLogger("BitBake")
37collectlog = logging.getLogger("BitBake.Collection")
38buildlog = logging.getLogger("BitBake.Build")
39parselog = logging.getLogger("BitBake.Parsing")
40providerlog = logging.getLogger("BitBake.Provider")
41
42class NoSpecificMatch(bb.BBHandledException):
43 """
44 Exception raised when no or multiple file matches are found
45 """
46
47class NothingToBuild(Exception):
48 """
49 Exception raised when there is nothing to build
50 """
51
52class CollectionError(bb.BBHandledException):
53 """
54 Exception raised when layer configuration is incorrect
55 """
56
57class state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060058 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050059
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050060 @classmethod
61 def get_name(cls, code):
62 for name in dir(cls):
63 value = getattr(cls, name)
64 if type(value) == type(cls.initial) and value == code:
65 return name
66 raise ValueError("Invalid status code: %s" % code)
67
Patrick Williamsc124f4f2015-09-15 14:41:29 -050068
69class SkippedPackage:
70 def __init__(self, info = None, reason = None):
71 self.pn = None
72 self.skipreason = None
73 self.provides = None
74 self.rprovides = None
75
76 if info:
77 self.pn = info.pn
78 self.skipreason = info.skipreason
79 self.provides = info.provides
80 self.rprovides = info.rprovides
81 elif reason:
82 self.skipreason = reason
83
84
85class CookerFeatures(object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -060086 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050087
88 def __init__(self):
89 self._features=set()
90
91 def setFeature(self, f):
92 # validate we got a request for a feature we support
93 if f not in CookerFeatures._feature_list:
94 return
95 self._features.add(f)
96
97 def __contains__(self, f):
98 return f in self._features
99
100 def __iter__(self):
101 return self._features.__iter__()
102
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600103 def __next__(self):
104 return next(self._features)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500105
106
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600107class EventWriter:
108 def __init__(self, cooker, eventfile):
109 self.file_inited = None
110 self.cooker = cooker
111 self.eventfile = eventfile
112 self.event_queue = []
113
114 def write_event(self, event):
115 with open(self.eventfile, "a") as f:
116 try:
117 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
118 f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
119 "vars": str_event}))
120 except Exception as err:
121 import traceback
122 print(err, traceback.format_exc())
123
124 def send(self, event):
125 if self.file_inited:
126 # we have the file, just write the event
127 self.write_event(event)
128 else:
129 # init on bb.event.BuildStarted
130 name = "%s.%s" % (event.__module__, event.__class__.__name__)
131 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
132 with open(self.eventfile, "w") as f:
133 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
134
135 self.file_inited = True
136
137 # write pending events
138 for evt in self.event_queue:
139 self.write_event(evt)
140
141 # also write the current event
142 self.write_event(event)
143 else:
144 # queue all events until the file is inited
145 self.event_queue.append(event)
146
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500147#============================================================================#
148# BBCooker
149#============================================================================#
150class BBCooker:
151 """
152 Manages one bitbake build run
153 """
154
155 def __init__(self, configuration, featureSet=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600156 self.recipecaches = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500157 self.skiplist = {}
158 self.featureset = CookerFeatures()
159 if featureSet:
160 for f in featureSet:
161 self.featureset.setFeature(f)
162
163 self.configuration = configuration
164
Brad Bishopf058f492019-01-28 23:50:33 -0500165 bb.debug(1, "BBCooker starting %s" % time.time())
166 sys.stdout.flush()
167
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500168 self.configwatcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500169 bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
170 sys.stdout.flush()
171
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500172 self.configwatcher.bbseen = []
173 self.configwatcher.bbwatchedfiles = []
174 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
Brad Bishopf058f492019-01-28 23:50:33 -0500175 bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
176 sys.stdout.flush()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500177 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
178 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500179 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500180 self.watcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500181 bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
182 sys.stdout.flush()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500183 self.watcher.bbseen = []
184 self.watcher.bbwatchedfiles = []
185 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
186
Brad Bishopf058f492019-01-28 23:50:33 -0500187 bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
188 sys.stdout.flush()
189
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500190 # If being called by something like tinfoil, we need to clean cached data
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500191 # which may now be invalid
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500192 bb.parse.clear_cache()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500193 bb.parse.BBHandler.cached_statements = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500194
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500195 self.ui_cmdline = None
Brad Bishop08902b02019-08-20 09:16:51 -0400196 self.hashserv = None
Brad Bishopa34c0302019-09-23 22:34:48 -0400197 self.hashservaddr = None
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500198
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500199 self.initConfigurationData()
200
Brad Bishopf058f492019-01-28 23:50:33 -0500201 bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
202 sys.stdout.flush()
203
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600204 # we log all events to a file if so directed
205 if self.configuration.writeeventlog:
206 # register the log file writer as UI Handler
207 writer = EventWriter(self, self.configuration.writeeventlog)
208 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
209 bb.event.register_UIHhandler(EventLogWriteHandler(writer))
210
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500211 self.inotify_modified_files = []
212
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500213 def _process_inotify_updates(server, cooker, abort):
214 cooker.process_inotify_updates()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500215 return 1.0
216
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500217 self.configuration.server_register_idlecallback(_process_inotify_updates, self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500218
219 # TOSTOP must not be set or our children will hang when they output
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600220 try:
221 fd = sys.stdout.fileno()
222 if os.isatty(fd):
223 import termios
224 tcattr = termios.tcgetattr(fd)
225 if tcattr[3] & termios.TOSTOP:
226 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
227 tcattr[3] = tcattr[3] & ~termios.TOSTOP
228 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
229 except UnsupportedOperation:
230 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500231
232 self.command = bb.command.Command(self)
233 self.state = state.initial
234
235 self.parser = None
236
237 signal.signal(signal.SIGTERM, self.sigterm_exception)
238 # Let SIGHUP exit as SIGTERM
239 signal.signal(signal.SIGHUP, self.sigterm_exception)
240
Brad Bishopf058f492019-01-28 23:50:33 -0500241 bb.debug(1, "BBCooker startup complete %s" % time.time())
242 sys.stdout.flush()
243
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500244 def process_inotify_updates(self):
245 for n in [self.confignotifier, self.notifier]:
246 if n.check_events(timeout=0):
247 # read notified events and enqeue them
248 n.read_events()
249 n.process_events()
250
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500251 def config_notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500252 if event.maskname == "IN_Q_OVERFLOW":
253 bb.warn("inotify event queue overflowed, invalidating caches.")
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500254 self.parsecache_valid = False
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500255 self.baseconfig_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500256 bb.parse.clear_cache()
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500257 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500258 if not event.pathname in self.configwatcher.bbwatchedfiles:
259 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500260 if not event.pathname in self.inotify_modified_files:
261 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500262 self.baseconfig_valid = False
263
264 def notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500265 if event.maskname == "IN_Q_OVERFLOW":
266 bb.warn("inotify event queue overflowed, invalidating caches.")
267 self.parsecache_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500268 bb.parse.clear_cache()
269 return
270 if event.pathname.endswith("bitbake-cookerdaemon.log") \
271 or event.pathname.endswith("bitbake.lock"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500272 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500273 if not event.pathname in self.inotify_modified_files:
274 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500275 self.parsecache_valid = False
276
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500277 def add_filewatch(self, deps, watcher=None, dirs=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500278 if not watcher:
279 watcher = self.watcher
280 for i in deps:
281 watcher.bbwatchedfiles.append(i[0])
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500282 if dirs:
283 f = i[0]
284 else:
285 f = os.path.dirname(i[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500286 if f in watcher.bbseen:
287 continue
288 watcher.bbseen.append(f)
289 watchtarget = None
290 while True:
291 # We try and add watches for files that don't exist but if they did, would influence
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500292 # the parser. The parent directory of these files may not exist, in which case we need
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500293 # to watch any parent that does exist for changes.
294 try:
295 watcher.add_watch(f, self.watchmask, quiet=False)
296 if watchtarget:
297 watcher.bbwatchedfiles.append(watchtarget)
298 break
299 except pyinotify.WatchManagerError as e:
300 if 'ENOENT' in str(e):
301 watchtarget = f
302 f = os.path.dirname(f)
303 if f in watcher.bbseen:
304 break
305 watcher.bbseen.append(f)
306 continue
307 if 'ENOSPC' in str(e):
308 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
309 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
310 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
311 providerlog.error("Root privilege is required to modify max_user_watches.")
312 raise
313
314 def sigterm_exception(self, signum, stackframe):
315 if signum == signal.SIGTERM:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500316 bb.warn("Cooker received SIGTERM, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500317 elif signum == signal.SIGHUP:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500318 bb.warn("Cooker received SIGHUP, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500319 self.state = state.forceshutdown
320
321 def setFeatures(self, features):
322 # we only accept a new feature set if we're in state initial, so we can reset without problems
323 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
324 raise Exception("Illegal state for feature set change")
325 original_featureset = list(self.featureset)
326 for feature in features:
327 self.featureset.setFeature(feature)
328 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
329 if (original_featureset != list(self.featureset)) and self.state != state.error:
330 self.reset()
331
332 def initConfigurationData(self):
333
334 self.state = state.initial
335 self.caches_array = []
336
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500337 # Need to preserve BB_CONSOLELOG over resets
338 consolelog = None
339 if hasattr(self, "data"):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500340 consolelog = self.data.getVar("BB_CONSOLELOG")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500341
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500342 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
343 self.enableDataTracking()
344
345 all_extra_cache_names = []
346 # We hardcode all known cache types in a single place, here.
347 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
348 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
349
350 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
351
352 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
353 # This is the entry point, no further check needed!
354 for var in caches_name_array:
355 try:
356 module_name, cache_name = var.split(':')
357 module = __import__(module_name, fromlist=(cache_name,))
358 self.caches_array.append(getattr(module, cache_name))
359 except ImportError as exc:
360 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
361 sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
362
363 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
364 self.databuilder.parseBaseConfiguration()
365 self.data = self.databuilder.data
366 self.data_hash = self.databuilder.data_hash
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500367 self.extraconfigdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500368
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500369 if consolelog:
370 self.data.setVar("BB_CONSOLELOG", consolelog)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500371
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500372 self.data.setVar('BB_CMDLINE', self.ui_cmdline)
373
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500374 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
375 self.disableDataTracking()
376
Brad Bishop15ae2502019-06-18 21:44:24 -0400377 for mc in self.databuilder.mcdata.values():
378 mc.renameVar("__depends", "__base_depends")
379 self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500380
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500381 self.baseconfig_valid = True
382 self.parsecache_valid = False
383
384 def handlePRServ(self):
385 # Setup a PR Server based on the new configuration
386 try:
387 self.prhost = prserv.serv.auto_start(self.data)
388 except prserv.serv.PRServiceConfigError as e:
389 bb.fatal("Unable to start PR Server, exitting")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500390
Brad Bishopa34c0302019-09-23 22:34:48 -0400391 if self.data.getVar("BB_HASHSERVE") == "auto":
392 # Create a new hash server bound to a unix domain socket
Brad Bishop08902b02019-08-20 09:16:51 -0400393 if not self.hashserv:
394 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
Brad Bishopa34c0302019-09-23 22:34:48 -0400395 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
396 self.hashserv = hashserv.create_server(self.hashservaddr, dbfile, sync=False)
Brad Bishop08902b02019-08-20 09:16:51 -0400397 self.hashserv.process = multiprocessing.Process(target=self.hashserv.serve_forever)
Brad Bishop08902b02019-08-20 09:16:51 -0400398 self.hashserv.process.start()
Brad Bishopa34c0302019-09-23 22:34:48 -0400399 self.data.setVar("BB_HASHSERVE", self.hashservaddr)
400 self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr)
401 self.databuilder.data.setVar("BB_HASHSERVE", self.hashservaddr)
Brad Bishop08902b02019-08-20 09:16:51 -0400402 for mc in self.databuilder.mcdata:
Brad Bishopa34c0302019-09-23 22:34:48 -0400403 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
Brad Bishop08902b02019-08-20 09:16:51 -0400404
405 bb.parse.init_parser(self.data)
406
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500407 def enableDataTracking(self):
408 self.configuration.tracking = True
409 if hasattr(self, "data"):
410 self.data.enableTracking()
411
412 def disableDataTracking(self):
413 self.configuration.tracking = False
414 if hasattr(self, "data"):
415 self.data.disableTracking()
416
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500417 def parseConfiguration(self):
418 # Set log file verbosity
419 verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
420 if verboselogs:
421 bb.msg.loggerVerboseLogs = True
422
423 # Change nice level if we're asked to
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500424 nice = self.data.getVar("BB_NICE_LEVEL")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500425 if nice:
426 curnice = os.nice(0)
427 nice = int(nice) - curnice
428 buildlog.verbose("Renice to %s " % os.nice(nice))
429
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600430 if self.recipecaches:
431 del self.recipecaches
432 self.multiconfigs = self.databuilder.mcdata.keys()
433 self.recipecaches = {}
434 for mc in self.multiconfigs:
435 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500436
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500437 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500438
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500439 self.parsecache_valid = False
440
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500441 def updateConfigOpts(self, options, environment, cmdline):
442 self.ui_cmdline = cmdline
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500443 clean = True
444 for o in options:
445 if o in ['prefile', 'postfile']:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500446 # Only these options may require a reparse
447 try:
448 if getattr(self.configuration, o) == options[o]:
449 # Value is the same, no need to mark dirty
450 continue
451 except AttributeError:
452 pass
453 logger.debug(1, "Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
454 print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500455 clean = False
456 setattr(self.configuration, o, options[o])
457 for k in bb.utils.approved_variables():
458 if k in environment and k not in self.configuration.env:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500459 logger.debug(1, "Updating new environment variable %s to %s" % (k, environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500460 self.configuration.env[k] = environment[k]
461 clean = False
462 if k in self.configuration.env and k not in environment:
463 logger.debug(1, "Updating environment variable %s (deleted)" % (k))
464 del self.configuration.env[k]
465 clean = False
466 if k not in self.configuration.env and k not in environment:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500467 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500468 if environment[k] != self.configuration.env[k]:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500469 logger.debug(1, "Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500470 self.configuration.env[k] = environment[k]
471 clean = False
472 if not clean:
473 logger.debug(1, "Base environment change, triggering reparse")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500474 self.reset()
475
476 def runCommands(self, server, data, abort):
477 """
478 Run any queued asynchronous command
479 This is done by the idle handler so it runs in true context rather than
480 tied to any UI.
481 """
482
483 return self.command.runAsyncCommand()
484
485 def showVersions(self):
486
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500487 (latest_versions, preferred_versions) = self.findProviders()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500488
489 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
490 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
491
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500492 for p in sorted(self.recipecaches[''].pkg_pn):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500493 pref = preferred_versions[p]
494 latest = latest_versions[p]
495
496 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
497 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
498
499 if pref == latest:
500 prefstr = ""
501
502 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
503
504 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
505 """
506 Show the outer or per-recipe environment
507 """
508 fn = None
509 envdata = None
Brad Bishop15ae2502019-06-18 21:44:24 -0400510 mc = ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500511 if not pkgs_to_build:
512 pkgs_to_build = []
513
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500514 orig_tracking = self.configuration.tracking
515 if not orig_tracking:
516 self.enableDataTracking()
517 self.reset()
518
Brad Bishop15ae2502019-06-18 21:44:24 -0400519 def mc_base(p):
520 if p.startswith('mc:'):
521 s = p.split(':')
522 if len(s) == 2:
523 return s[1]
524 return None
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500525
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500526 if buildfile:
527 # Parse the configuration here. We need to do it explicitly here since
528 # this showEnvironment() code path doesn't use the cache
529 self.parseConfiguration()
530
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600531 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500532 fn = self.matchFile(fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600533 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500534 elif len(pkgs_to_build) == 1:
Brad Bishop15ae2502019-06-18 21:44:24 -0400535 mc = mc_base(pkgs_to_build[0])
536 if not mc:
537 ignore = self.data.getVar("ASSUME_PROVIDED") or ""
538 if pkgs_to_build[0] in set(ignore.split()):
539 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500540
Brad Bishop15ae2502019-06-18 21:44:24 -0400541 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500542
Brad Bishop15ae2502019-06-18 21:44:24 -0400543 mc = runlist[0][0]
544 fn = runlist[0][3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500545
546 if fn:
547 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600548 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
549 envdata = bb_cache.loadDataFull(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500550 except Exception as e:
551 parselog.exception("Unable to read %s", fn)
552 raise
Brad Bishop15ae2502019-06-18 21:44:24 -0400553 else:
554 if not mc in self.databuilder.mcdata:
555 bb.fatal('Not multiconfig named "%s" found' % mc)
556 envdata = self.databuilder.mcdata[mc]
557 data.expandKeys(envdata)
558 parse.ast.runAnonFuncs(envdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500559
560 # Display history
561 with closing(StringIO()) as env:
562 self.data.inchistory.emit(env)
563 logger.plain(env.getvalue())
564
565 # emit variables and shell functions
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500566 with closing(StringIO()) as env:
567 data.emit_env(env, envdata, True)
568 logger.plain(env.getvalue())
569
570 # emit the metadata which isnt valid shell
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500571 for e in sorted(envdata.keys()):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600572 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500573 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500574
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500575 if not orig_tracking:
576 self.disableDataTracking()
577 self.reset()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500578
579 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
580 """
581 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
582 """
583 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
584
585 # A task of None means use the default task
586 if task is None:
587 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500588 if not task.startswith("do_"):
589 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500590
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500591 targetlist = self.checkPackages(pkgs_to_build, task)
592 fulltargetlist = []
593 defaulttask_implicit = ''
594 defaulttask_explicit = False
595 wildcard = False
596
597 # Wild card expansion:
Brad Bishop15ae2502019-06-18 21:44:24 -0400598 # Replace string such as "mc:*:bash"
599 # into "mc:A:bash mc:B:bash bash"
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500600 for k in targetlist:
Brad Bishop15ae2502019-06-18 21:44:24 -0400601 if k.startswith("mc:"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500602 if wildcard:
603 bb.fatal('multiconfig conflict')
604 if k.split(":")[1] == "*":
605 wildcard = True
606 for mc in self.multiconfigs:
607 if mc:
608 fulltargetlist.append(k.replace('*', mc))
609 # implicit default task
610 else:
611 defaulttask_implicit = k.split(":")[2]
612 else:
613 fulltargetlist.append(k)
614 else:
615 defaulttask_explicit = True
616 fulltargetlist.append(k)
617
618 if not defaulttask_explicit and defaulttask_implicit != '':
619 fulltargetlist.append(defaulttask_implicit)
620
621 bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600622 taskdata = {}
623 localdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500624
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600625 for mc in self.multiconfigs:
626 taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
627 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600628 bb.data.expandKeys(localdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500629
630 current = 0
631 runlist = []
632 for k in fulltargetlist:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600633 mc = ""
Brad Bishop15ae2502019-06-18 21:44:24 -0400634 if k.startswith("mc:"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600635 mc = k.split(":")[1]
636 k = ":".join(k.split(":")[2:])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500637 ktask = task
638 if ":do_" in k:
639 k2 = k.split(":do_")
640 k = k2[0]
641 ktask = k2[1]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600642 taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500643 current += 1
644 if not ktask.startswith("do_"):
645 ktask = "do_%s" % ktask
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600646 if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
647 # e.g. in ASSUME_PROVIDED
648 continue
649 fn = taskdata[mc].build_targets[k][0]
650 runlist.append([mc, k, ktask, fn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500651 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600652
Brad Bishop15ae2502019-06-18 21:44:24 -0400653 havemc = False
654 for mc in self.multiconfigs:
655 if taskdata[mc].get_mcdepends():
656 havemc = True
Brad Bishopf058f492019-01-28 23:50:33 -0500657
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800658 # No need to do check providers if there are no mcdeps or not an mc build
Brad Bishop15ae2502019-06-18 21:44:24 -0400659 if havemc or len(self.multiconfigs) > 1:
Andrew Geissler99467da2019-02-25 18:54:23 -0600660 seen = set()
661 new = True
662 # Make sure we can provide the multiconfig dependency
663 while new:
664 mcdeps = set()
665 # Add unresolved first, so we can get multiconfig indirect dependencies on time
666 for mc in self.multiconfigs:
667 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
668 mcdeps |= set(taskdata[mc].get_mcdepends())
669 new = False
670 for mc in self.multiconfigs:
671 for k in mcdeps:
672 if k in seen:
673 continue
674 l = k.split(':')
675 depmc = l[2]
676 if depmc not in self.multiconfigs:
677 bb.fatal("Multiconfig dependency %s depends on nonexistent mc configuration %s" % (k,depmc))
678 else:
679 logger.debug(1, "Adding providers for multiconfig dependency %s" % l[3])
680 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
681 seen.add(k)
682 new = True
Brad Bishopf058f492019-01-28 23:50:33 -0500683
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600684 for mc in self.multiconfigs:
685 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
686
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500687 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600688 return taskdata, runlist
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500689
690 def prepareTreeData(self, pkgs_to_build, task):
691 """
692 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
693 """
694
695 # We set abort to False here to prevent unbuildable targets raising
696 # an exception when we're just generating data
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600697 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500698
699 return runlist, taskdata
700
701 ######## WARNING : this function requires cache_extra to be enabled ########
702
703 def generateTaskDepTreeData(self, pkgs_to_build, task):
704 """
705 Create a dependency graph of pkgs_to_build including reverse dependency
706 information.
707 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500708 if not task.startswith("do_"):
709 task = "do_%s" % task
710
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500711 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600712 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500713 rq.rqdata.prepare()
714 return self.buildDependTree(rq, taskdata)
715
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600716 @staticmethod
717 def add_mc_prefix(mc, pn):
718 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -0400719 return "mc:%s:%s" % (mc, pn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600720 return pn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500721
722 def buildDependTree(self, rq, taskdata):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600723 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500724 depend_tree = {}
725 depend_tree["depends"] = {}
726 depend_tree["tdepends"] = {}
727 depend_tree["pn"] = {}
728 depend_tree["rdepends-pn"] = {}
729 depend_tree["packages"] = {}
730 depend_tree["rdepends-pkg"] = {}
731 depend_tree["rrecs-pkg"] = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500732 depend_tree['providermap'] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600733 depend_tree["layer-priorities"] = self.bbfile_config_priorities
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500734
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600735 for mc in taskdata:
736 for name, fn in list(taskdata[mc].get_providermap().items()):
737 pn = self.recipecaches[mc].pkg_fn[fn]
738 pn = self.add_mc_prefix(mc, pn)
739 if name != pn:
740 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
741 depend_tree['providermap'][name] = (pn, version)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500742
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600743 for tid in rq.rqdata.runtaskentries:
744 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
745 pn = self.recipecaches[mc].pkg_fn[taskfn]
746 pn = self.add_mc_prefix(mc, pn)
747 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500748 if pn not in depend_tree["pn"]:
749 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600750 depend_tree["pn"][pn]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500751 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600752 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500753
754 # if we have extra caches, list all attributes they bring in
755 extra_info = []
756 for cache_class in self.caches_array:
757 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
758 cachefields = getattr(cache_class, 'cachefields', [])
759 extra_info = extra_info + cachefields
760
761 # for all attributes stored, add them to the dependency tree
762 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600763 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500764
765
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500766 dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
767 if not dotname in depend_tree["tdepends"]:
768 depend_tree["tdepends"][dotname] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600769 for dep in rq.rqdata.runtaskentries[tid].depends:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800770 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
771 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600772 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
773 if taskfn not in seen_fns:
774 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500775 packages = []
776
777 depend_tree["depends"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600778 for dep in taskdata[mc].depids[taskfn]:
779 depend_tree["depends"][pn].append(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500780
781 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600782 for rdep in taskdata[mc].rdepids[taskfn]:
783 depend_tree["rdepends-pn"][pn].append(rdep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500784
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600785 rdepends = self.recipecaches[mc].rundeps[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500786 for package in rdepends:
787 depend_tree["rdepends-pkg"][package] = []
788 for rdepend in rdepends[package]:
789 depend_tree["rdepends-pkg"][package].append(rdepend)
790 packages.append(package)
791
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600792 rrecs = self.recipecaches[mc].runrecs[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500793 for package in rrecs:
794 depend_tree["rrecs-pkg"][package] = []
795 for rdepend in rrecs[package]:
796 depend_tree["rrecs-pkg"][package].append(rdepend)
797 if not package in packages:
798 packages.append(package)
799
800 for package in packages:
801 if package not in depend_tree["packages"]:
802 depend_tree["packages"][package] = {}
803 depend_tree["packages"][package]["pn"] = pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600804 depend_tree["packages"][package]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500805 depend_tree["packages"][package]["version"] = version
806
807 return depend_tree
808
809 ######## WARNING : this function requires cache_extra to be enabled ########
810 def generatePkgDepTreeData(self, pkgs_to_build, task):
811 """
812 Create a dependency tree of pkgs_to_build, returning the data.
813 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500814 if not task.startswith("do_"):
815 task = "do_%s" % task
816
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500817 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500818
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600819 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500820 depend_tree = {}
821 depend_tree["depends"] = {}
822 depend_tree["pn"] = {}
823 depend_tree["rdepends-pn"] = {}
824 depend_tree["rdepends-pkg"] = {}
825 depend_tree["rrecs-pkg"] = {}
826
827 # if we have extra caches, list all attributes they bring in
828 extra_info = []
829 for cache_class in self.caches_array:
830 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
831 cachefields = getattr(cache_class, 'cachefields', [])
832 extra_info = extra_info + cachefields
833
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600834 tids = []
835 for mc in taskdata:
836 for tid in taskdata[mc].taskentries:
837 tids.append(tid)
838
839 for tid in tids:
840 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
841
842 pn = self.recipecaches[mc].pkg_fn[taskfn]
843 pn = self.add_mc_prefix(mc, pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500844
845 if pn not in depend_tree["pn"]:
846 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600847 depend_tree["pn"][pn]["filename"] = taskfn
848 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500849 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600850 rdepends = self.recipecaches[mc].rundeps[taskfn]
851 rrecs = self.recipecaches[mc].runrecs[taskfn]
852 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500853
854 # for all extra attributes stored, add them to the dependency tree
855 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600856 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500857
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600858 if taskfn not in seen_fns:
859 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500860
861 depend_tree["depends"][pn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500862 for dep in taskdata[mc].depids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500863 pn_provider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600864 if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
865 fn_provider = taskdata[mc].build_targets[dep][0]
866 pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500867 else:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500868 pn_provider = dep
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600869 pn_provider = self.add_mc_prefix(mc, pn_provider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500870 depend_tree["depends"][pn].append(pn_provider)
871
872 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600873 for rdep in taskdata[mc].rdepids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500874 pn_rprovider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600875 if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
876 fn_rprovider = taskdata[mc].run_targets[rdep][0]
877 pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500878 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600879 pn_rprovider = rdep
880 pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500881 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
882
883 depend_tree["rdepends-pkg"].update(rdepends)
884 depend_tree["rrecs-pkg"].update(rrecs)
885
886 return depend_tree
887
888 def generateDepTreeEvent(self, pkgs_to_build, task):
889 """
890 Create a task dependency graph of pkgs_to_build.
891 Generate an event with the result
892 """
893 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
894 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
895
896 def generateDotGraphFiles(self, pkgs_to_build, task):
897 """
898 Create a task dependency graph of pkgs_to_build.
899 Save the result to a set of .dot files.
900 """
901
902 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
903
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500904 with open('pn-buildlist', 'w') as f:
905 for pn in depgraph["pn"]:
906 f.write(pn + "\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500907 logger.info("PN build list saved to 'pn-buildlist'")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500908
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500909 # Remove old format output files to ensure no confusion with stale data
910 try:
911 os.unlink('pn-depends.dot')
912 except FileNotFoundError:
913 pass
914 try:
915 os.unlink('package-depends.dot')
916 except FileNotFoundError:
917 pass
Brad Bishop79641f22019-09-10 07:20:22 -0400918 try:
919 os.unlink('recipe-depends.dot')
920 except FileNotFoundError:
921 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500922
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500923 with open('task-depends.dot', 'w') as f:
924 f.write("digraph depends {\n")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400925 for task in sorted(depgraph["tdepends"]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500926 (pn, taskname) = task.rsplit(".", 1)
927 fn = depgraph["pn"][pn]["filename"]
928 version = depgraph["pn"][pn]["version"]
929 f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400930 for dep in sorted(depgraph["tdepends"][task]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500931 f.write('"%s" -> "%s"\n' % (task, dep))
932 f.write("}\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500933 logger.info("Task dependencies saved to 'task-depends.dot'")
934
935 def show_appends_with_no_recipes(self):
936 # Determine which bbappends haven't been applied
937
938 # First get list of recipes, including skipped
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600939 recipefns = list(self.recipecaches[''].pkg_fn.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500940 recipefns.extend(self.skiplist.keys())
941
942 # Work out list of bbappends that have been applied
943 applied_appends = []
944 for fn in recipefns:
945 applied_appends.extend(self.collection.get_file_appends(fn))
946
947 appends_without_recipes = []
948 for _, appendfn in self.collection.bbappends:
949 if not appendfn in applied_appends:
950 appends_without_recipes.append(appendfn)
951
952 if appends_without_recipes:
953 msg = 'No recipes available for:\n %s' % '\n '.join(appends_without_recipes)
954 warn_only = self.data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
955 False) or "no"
956 if warn_only.lower() in ("1", "yes", "true"):
957 bb.warn(msg)
958 else:
959 bb.fatal(msg)
960
961 def handlePrefProviders(self):
962
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600963 for mc in self.multiconfigs:
964 localdata = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600965 bb.data.expandKeys(localdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500966
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600967 # Handle PREFERRED_PROVIDERS
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500968 for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600969 try:
970 (providee, provider) = p.split(':')
971 except:
972 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
973 continue
974 if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
975 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
976 self.recipecaches[mc].preferred[providee] = provider
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500977
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500978 def findConfigFilePath(self, configfile):
979 """
980 Find the location on disk of configfile and if it exists and was parsed by BitBake
981 emit the ConfigFilePathFound event with the path to the file.
982 """
983 path = bb.cookerdata.findConfigFile(configfile, self.data)
984 if not path:
985 return
986
987 # Generate a list of parsed configuration files by searching the files
988 # listed in the __depends and __base_depends variables with a .conf suffix.
989 conffiles = []
990 dep_files = self.data.getVar('__base_depends', False) or []
991 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
992
993 for f in dep_files:
994 if f[0].endswith(".conf"):
995 conffiles.append(f[0])
996
997 _, conf, conffile = path.rpartition("conf/")
998 match = os.path.join(conf, conffile)
999 # Try and find matches for conf/conffilename.conf as we don't always
1000 # have the full path to the file.
1001 for cfg in conffiles:
1002 if cfg.endswith(match):
1003 bb.event.fire(bb.event.ConfigFilePathFound(path),
1004 self.data)
1005 break
1006
1007 def findFilesMatchingInDir(self, filepattern, directory):
1008 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001009 Searches for files containing the substring 'filepattern' which are children of
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001010 'directory' in each BBPATH. i.e. to find all rootfs package classes available
1011 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1012 or to find all machine configuration files one could call:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001013 findFilesMatchingInDir(self, '.conf', 'conf/machine')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001014 """
1015
1016 matches = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001017 bbpaths = self.data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001018 for path in bbpaths:
1019 dirpath = os.path.join(path, directory)
1020 if os.path.exists(dirpath):
1021 for root, dirs, files in os.walk(dirpath):
1022 for f in files:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001023 if filepattern in f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001024 matches.append(f)
1025
1026 if matches:
1027 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1028
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001029 def findProviders(self, mc=''):
1030 return bb.providers.findProviders(self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1031
1032 def findBestProvider(self, pn, mc=''):
1033 if pn in self.recipecaches[mc].providers:
1034 filenames = self.recipecaches[mc].providers[pn]
1035 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.data, self.recipecaches[mc])
1036 filename = eligible[0]
1037 return None, None, None, filename
1038 elif pn in self.recipecaches[mc].pkg_pn:
1039 return bb.providers.findBestProvider(pn, self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1040 else:
1041 return None, None, None, None
1042
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001043 def findConfigFiles(self, varname):
1044 """
1045 Find config files which are appropriate values for varname.
1046 i.e. MACHINE, DISTRO
1047 """
1048 possible = []
1049 var = varname.lower()
1050
1051 data = self.data
1052 # iterate configs
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001053 bbpaths = data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001054 for path in bbpaths:
1055 confpath = os.path.join(path, "conf", var)
1056 if os.path.exists(confpath):
1057 for root, dirs, files in os.walk(confpath):
1058 # get all child files, these are appropriate values
1059 for f in files:
1060 val, sep, end = f.rpartition('.')
1061 if end == 'conf':
1062 possible.append(val)
1063
1064 if possible:
1065 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1066
1067 def findInheritsClass(self, klass):
1068 """
1069 Find all recipes which inherit the specified class
1070 """
1071 pkg_list = []
1072
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001073 for pfn in self.recipecaches[''].pkg_fn:
1074 inherits = self.recipecaches[''].inherits.get(pfn, None)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001075 if inherits and klass in inherits:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001076 pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001077
1078 return pkg_list
1079
1080 def generateTargetsTree(self, klass=None, pkgs=None):
1081 """
1082 Generate a dependency tree of buildable targets
1083 Generate an event with the result
1084 """
1085 # if the caller hasn't specified a pkgs list default to universe
1086 if not pkgs:
1087 pkgs = ['universe']
1088 # if inherited_class passed ensure all recipes which inherit the
1089 # specified class are included in pkgs
1090 if klass:
1091 extra_pkgs = self.findInheritsClass(klass)
1092 pkgs = pkgs + extra_pkgs
1093
1094 # generate a dependency tree for all our packages
1095 tree = self.generatePkgDepTreeData(pkgs, 'build')
1096 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1097
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001098 def interactiveMode( self ):
1099 """Drop off into a shell"""
1100 try:
1101 from bb import shell
1102 except ImportError:
1103 parselog.exception("Interactive mode not available")
1104 sys.exit(1)
1105 else:
1106 shell.start( self )
1107
1108
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001109 def handleCollections(self, collections):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001110 """Handle collections"""
1111 errors = False
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001112 self.bbfile_config_priorities = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001113 if collections:
1114 collection_priorities = {}
1115 collection_depends = {}
1116 collection_list = collections.split()
1117 min_prio = 0
1118 for c in collection_list:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001119 bb.debug(1,'Processing %s in collection list' % (c))
1120
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001121 # Get collection priority if defined explicitly
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001122 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001123 if priority:
1124 try:
1125 prio = int(priority)
1126 except ValueError:
1127 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1128 errors = True
1129 if min_prio == 0 or prio < min_prio:
1130 min_prio = prio
1131 collection_priorities[c] = prio
1132 else:
1133 collection_priorities[c] = None
1134
1135 # Check dependencies and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001136 deps = self.data.getVar("LAYERDEPENDS_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001137 if deps:
1138 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001139 depDict = bb.utils.explode_dep_versions2(deps)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001140 except bb.utils.VersionStringException as vse:
1141 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001142 for dep, oplist in list(depDict.items()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001143 if dep in collection_list:
1144 for opstr in oplist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001145 layerver = self.data.getVar("LAYERVERSION_%s" % dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001146 (op, depver) = opstr.split()
1147 if layerver:
1148 try:
1149 res = bb.utils.vercmp_string_op(layerver, depver, op)
1150 except bb.utils.VersionStringException as vse:
1151 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1152 if not res:
1153 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1154 errors = True
1155 else:
1156 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1157 errors = True
1158 else:
1159 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1160 errors = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001161 collection_depends[c] = list(depDict.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001162 else:
1163 collection_depends[c] = []
1164
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001165 # Check recommends and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001166 recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001167 if recs:
1168 try:
1169 recDict = bb.utils.explode_dep_versions2(recs)
1170 except bb.utils.VersionStringException as vse:
1171 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1172 for rec, oplist in list(recDict.items()):
1173 if rec in collection_list:
1174 if oplist:
1175 opstr = oplist[0]
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001176 layerver = self.data.getVar("LAYERVERSION_%s" % rec)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001177 if layerver:
1178 (op, recver) = opstr.split()
1179 try:
1180 res = bb.utils.vercmp_string_op(layerver, recver, op)
1181 except bb.utils.VersionStringException as vse:
1182 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1183 if not res:
1184 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1185 continue
1186 else:
1187 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1188 continue
1189 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1190 collection_depends[c].append(rec)
1191 else:
1192 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1193
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001194 # Recursively work out collection priorities based on dependencies
1195 def calc_layer_priority(collection):
1196 if not collection_priorities[collection]:
1197 max_depprio = min_prio
1198 for dep in collection_depends[collection]:
1199 calc_layer_priority(dep)
1200 depprio = collection_priorities[dep]
1201 if depprio > max_depprio:
1202 max_depprio = depprio
1203 max_depprio += 1
1204 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1205 collection_priorities[collection] = max_depprio
1206
1207 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1208 for c in collection_list:
1209 calc_layer_priority(c)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001210 regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001211 if regex == None:
1212 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1213 errors = True
1214 continue
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001215 elif regex == "":
1216 parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
Brad Bishop19323692019-04-05 15:28:33 -04001217 cre = re.compile('^NULL$')
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001218 errors = False
1219 else:
1220 try:
1221 cre = re.compile(regex)
1222 except re.error:
1223 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1224 errors = True
1225 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001226 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001227 if errors:
1228 # We've already printed the actual error(s)
1229 raise CollectionError("Errors during parsing layer configuration")
1230
1231 def buildSetVars(self):
1232 """
1233 Setup any variables needed before starting a build
1234 """
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001235 t = time.gmtime()
1236 for mc in self.databuilder.mcdata:
1237 ds = self.databuilder.mcdata[mc]
1238 if not ds.getVar("BUILDNAME", False):
1239 ds.setVar("BUILDNAME", "${DATE}${TIME}")
1240 ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1241 ds.setVar("DATE", time.strftime('%Y%m%d', t))
1242 ds.setVar("TIME", time.strftime('%H%M%S', t))
1243
1244 def reset_mtime_caches(self):
1245 """
1246 Reset mtime caches - this is particularly important when memory resident as something
1247 which is cached is not unlikely to have changed since the last invocation (e.g. a
1248 file associated with a recipe might have been modified by the user).
1249 """
1250 build.reset_cache()
1251 bb.fetch._checksum_cache.mtime_cache.clear()
1252 siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1253 if siggen_cache:
1254 bb.parse.siggen.checksum_cache.mtime_cache.clear()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001255
1256 def matchFiles(self, bf):
1257 """
1258 Find the .bb files which match the expression in 'buildfile'.
1259 """
1260 if bf.startswith("/") or bf.startswith("../"):
1261 bf = os.path.abspath(bf)
1262
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001263 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001264 filelist, masked, searchdirs = self.collection.collect_bbfiles(self.data, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001265 try:
1266 os.stat(bf)
1267 bf = os.path.abspath(bf)
1268 return [bf]
1269 except OSError:
1270 regexp = re.compile(bf)
1271 matches = []
1272 for f in filelist:
1273 if regexp.search(f) and os.path.isfile(f):
1274 matches.append(f)
1275 return matches
1276
1277 def matchFile(self, buildfile):
1278 """
1279 Find the .bb file which matches the expression in 'buildfile'.
1280 Raise an error if multiple files
1281 """
1282 matches = self.matchFiles(buildfile)
1283 if len(matches) != 1:
1284 if matches:
1285 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1286 if matches:
1287 for f in matches:
1288 msg += "\n %s" % f
1289 parselog.error(msg)
1290 else:
1291 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1292 raise NoSpecificMatch
1293 return matches[0]
1294
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001295 def buildFile(self, buildfile, task):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001296 """
1297 Build the file matching regexp buildfile
1298 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001299 bb.event.fire(bb.event.BuildInit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001300
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001301 # Too many people use -b because they think it's how you normally
1302 # specify a target to be built, so show a warning
1303 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1304
1305 self.buildFileInternal(buildfile, task)
1306
1307 def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1308 """
1309 Build the file matching regexp buildfile
1310 """
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001311
1312 # Parse the configuration here. We need to do it explicitly here since
1313 # buildFile() doesn't use the cache
1314 self.parseConfiguration()
1315
1316 # If we are told to do the None task then query the default task
1317 if (task == None):
1318 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001319 if not task.startswith("do_"):
1320 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001321
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001322 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001323 fn = self.matchFile(fn)
1324
1325 self.buildSetVars()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001326 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001327
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001328 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
1329
1330 infos = bb_cache.parse(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001331 infos = dict(infos)
1332
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001333 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001334 try:
1335 info_array = infos[fn]
1336 except KeyError:
1337 bb.fatal("%s does not exist" % fn)
1338
1339 if info_array[0].skipped:
1340 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1341
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001342 self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001343
1344 # Tweak some variables
1345 item = info_array[0].pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001346 self.recipecaches[mc].ignored_dependencies = set()
1347 self.recipecaches[mc].bbfile_priority[fn] = 1
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001348 self.configuration.limited_deps = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001349
1350 # Remove external dependencies
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001351 self.recipecaches[mc].task_deps[fn]['depends'] = {}
1352 self.recipecaches[mc].deps[fn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001353 self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1354 self.recipecaches[mc].runrecs[fn] = defaultdict(list)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001355
1356 # Invalidate task for target if force mode active
1357 if self.configuration.force:
1358 logger.verbose("Invalidate task %s, %s", task, fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001359 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001360
1361 # Setup taskdata structure
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001362 taskdata = {}
1363 taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001364 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001365
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001366 if quietlog:
1367 rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1368 bb.runqueue.logger.setLevel(logging.WARNING)
1369
1370 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1371 if fireevents:
1372 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001373
1374 # Execute the runqueue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001375 runlist = [[mc, item, task, fn]]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001376
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001377 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001378
1379 def buildFileIdle(server, rq, abort):
1380
1381 msg = None
1382 interrupted = 0
1383 if abort or self.state == state.forceshutdown:
1384 rq.finish_runqueue(True)
1385 msg = "Forced shutdown"
1386 interrupted = 2
1387 elif self.state == state.shutdown:
1388 rq.finish_runqueue(False)
1389 msg = "Stopped build"
1390 interrupted = 1
1391 failures = 0
1392 try:
1393 retval = rq.execute_runqueue()
1394 except runqueue.TaskFailure as exc:
1395 failures += len(exc.args)
1396 retval = False
1397 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001398 self.command.finishAsyncCommand(str(exc))
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001399 if quietlog:
1400 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001401 return False
1402
1403 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001404 if fireevents:
1405 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001406 self.command.finishAsyncCommand(msg)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001407 # We trashed self.recipecaches above
1408 self.parsecache_valid = False
1409 self.configuration.limited_deps = False
1410 bb.parse.siggen.reset(self.data)
1411 if quietlog:
1412 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001413 return False
1414 if retval is True:
1415 return True
1416 return retval
1417
1418 self.configuration.server_register_idlecallback(buildFileIdle, rq)
1419
1420 def buildTargets(self, targets, task):
1421 """
1422 Attempt to build the targets specified
1423 """
1424
1425 def buildTargetsIdle(server, rq, abort):
1426 msg = None
1427 interrupted = 0
1428 if abort or self.state == state.forceshutdown:
1429 rq.finish_runqueue(True)
1430 msg = "Forced shutdown"
1431 interrupted = 2
1432 elif self.state == state.shutdown:
1433 rq.finish_runqueue(False)
1434 msg = "Stopped build"
1435 interrupted = 1
1436 failures = 0
1437 try:
1438 retval = rq.execute_runqueue()
1439 except runqueue.TaskFailure as exc:
1440 failures += len(exc.args)
1441 retval = False
1442 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001443 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001444 return False
1445
1446 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001447 try:
1448 for mc in self.multiconfigs:
1449 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1450 finally:
1451 self.command.finishAsyncCommand(msg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001452 return False
1453 if retval is True:
1454 return True
1455 return retval
1456
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001457 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001458 self.buildSetVars()
1459
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001460 # If we are told to do the None task then query the default task
1461 if (task == None):
1462 task = self.configuration.cmd
1463
1464 if not task.startswith("do_"):
1465 task = "do_%s" % task
1466
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001467 packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1468
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001469 bb.event.fire(bb.event.BuildInit(packages), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001470
1471 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001472
1473 buildname = self.data.getVar("BUILDNAME", False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001474
1475 # make targets to always look as <target>:do_<task>
1476 ntargets = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001477 for target in runlist:
1478 if target[0]:
Brad Bishop15ae2502019-06-18 21:44:24 -04001479 ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2]))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001480 ntargets.append("%s:%s" % (target[1], target[2]))
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001481
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001482 for mc in self.multiconfigs:
1483 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001484
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001485 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001486 if 'universe' in targets:
1487 rq.rqdata.warn_multi_bb = True
1488
1489 self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
1490
1491
1492 def getAllKeysWithFlags(self, flaglist):
1493 dump = {}
1494 for k in self.data.keys():
1495 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001496 expand = True
1497 flags = self.data.getVarFlags(k)
1498 if flags and "func" in flags and "python" in flags:
1499 expand = False
1500 v = self.data.getVar(k, expand)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001501 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1502 dump[k] = {
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001503 'v' : str(v) ,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001504 'history' : self.data.varhistory.variable(k),
1505 }
1506 for d in flaglist:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001507 if flags and d in flags:
1508 dump[k][d] = flags[d]
1509 else:
1510 dump[k][d] = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001511 except Exception as e:
1512 print(e)
1513 return dump
1514
1515
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001516 def updateCacheSync(self):
1517 if self.state == state.running:
1518 return
1519
1520 # reload files for which we got notifications
1521 for p in self.inotify_modified_files:
1522 bb.parse.update_cache(p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001523 if p in bb.parse.BBHandler.cached_statements:
1524 del bb.parse.BBHandler.cached_statements[p]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001525 self.inotify_modified_files = []
1526
1527 if not self.baseconfig_valid:
1528 logger.debug(1, "Reloading base configuration data")
1529 self.initConfigurationData()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001530 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001531
1532 # This is called for all async commands when self.state != running
1533 def updateCache(self):
1534 if self.state == state.running:
1535 return
1536
1537 if self.state in (state.shutdown, state.forceshutdown, state.error):
1538 if hasattr(self.parser, 'shutdown'):
1539 self.parser.shutdown(clean=False, force = True)
1540 raise bb.BBHandledException()
1541
1542 if self.state != state.parsing:
1543 self.updateCacheSync()
1544
1545 if self.state != state.parsing and not self.parsecache_valid:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001546 bb.parse.siggen.reset(self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001547 self.parseConfiguration ()
1548 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001549 for mc in self.multiconfigs:
1550 bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001551
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001552 for mc in self.multiconfigs:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001553 ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001554 self.recipecaches[mc].ignored_dependencies = set(ignore.split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001555
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001556 for dep in self.configuration.extra_assume_provided:
1557 self.recipecaches[mc].ignored_dependencies.add(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001558
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001559 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001560 (filelist, masked, searchdirs) = self.collection.collect_bbfiles(self.data, self.data)
1561
1562 # Add inotify watches for directories searched for bb/bbappend files
1563 for dirent in searchdirs:
1564 self.add_filewatch([[dirent]], dirs=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001565
1566 self.parser = CookerParser(self, filelist, masked)
1567 self.parsecache_valid = True
1568
1569 self.state = state.parsing
1570
1571 if not self.parser.parse_next():
1572 collectlog.debug(1, "parsing complete")
1573 if self.parser.error:
1574 raise bb.BBHandledException()
1575 self.show_appends_with_no_recipes()
1576 self.handlePrefProviders()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001577 for mc in self.multiconfigs:
1578 self.recipecaches[mc].bbfile_priority = self.collection.collection_priorities(self.recipecaches[mc].pkg_fn, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001579 self.state = state.running
1580
1581 # Send an event listing all stamps reachable after parsing
1582 # which the metadata may use to clean up stale data
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001583 for mc in self.multiconfigs:
1584 event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1585 bb.event.fire(event, self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001586 return None
1587
1588 return True
1589
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001590 def checkPackages(self, pkgs_to_build, task=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001591
1592 # Return a copy, don't modify the original
1593 pkgs_to_build = pkgs_to_build[:]
1594
1595 if len(pkgs_to_build) == 0:
1596 raise NothingToBuild
1597
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001598 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001599 for pkg in pkgs_to_build:
1600 if pkg in ignore:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001601 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
Brad Bishop15ae2502019-06-18 21:44:24 -04001602 if pkg.startswith("multiconfig:"):
1603 pkgs_to_build.remove(pkg)
1604 pkgs_to_build.append(pkg.replace("multiconfig:", "mc:"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001605
1606 if 'world' in pkgs_to_build:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001607 pkgs_to_build.remove('world')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001608 for mc in self.multiconfigs:
1609 bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1610 for t in self.recipecaches[mc].world_target:
1611 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001612 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001613 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001614
1615 if 'universe' in pkgs_to_build:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001616 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001617 parselog.debug(1, "collating packages for \"universe\"")
1618 pkgs_to_build.remove('universe')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001619 for mc in self.multiconfigs:
1620 for t in self.recipecaches[mc].universe_target:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001621 if task:
1622 foundtask = False
1623 for provider_fn in self.recipecaches[mc].providers[t]:
1624 if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1625 foundtask = True
1626 break
1627 if not foundtask:
1628 bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1629 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001630 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001631 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001632 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001633
1634 return pkgs_to_build
1635
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001636 def pre_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001637 # We now are in our own process so we can call this here.
1638 # PRServ exits if its parent process exits
1639 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001640 return
1641
1642 def post_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001643 prserv.serv.auto_shutdown()
Brad Bishop08902b02019-08-20 09:16:51 -04001644 if self.hashserv:
1645 self.hashserv.process.terminate()
1646 self.hashserv.process.join()
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001647 bb.event.fire(CookerExit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001648
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001649 def shutdown(self, force = False):
1650 if force:
1651 self.state = state.forceshutdown
1652 else:
1653 self.state = state.shutdown
1654
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001655 if self.parser:
1656 self.parser.shutdown(clean=not force, force=force)
1657
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001658 def finishcommand(self):
1659 self.state = state.initial
1660
1661 def reset(self):
1662 self.initConfigurationData()
Brad Bishop08902b02019-08-20 09:16:51 -04001663 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001664
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001665 def clientComplete(self):
1666 """Called when the client is done using the server"""
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001667 self.finishcommand()
1668 self.extraconfigdata = {}
1669 self.command.reset()
1670 self.databuilder.reset()
1671 self.data = self.databuilder.data
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001672
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001673
1674class CookerExit(bb.event.Event):
1675 """
1676 Notify clients of the Cooker shutdown
1677 """
1678
1679 def __init__(self):
1680 bb.event.Event.__init__(self)
1681
1682
1683class CookerCollectFiles(object):
1684 def __init__(self, priorities):
1685 self.bbappends = []
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001686 # Priorities is a list of tupples, with the second element as the pattern.
1687 # We need to sort the list with the longest pattern first, and so on to
1688 # the shortest. This allows nested layers to be properly evaluated.
1689 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001690
1691 def calc_bbfile_priority( self, filename, matched = None ):
1692 for _, _, regex, pri in self.bbfile_config_priorities:
1693 if regex.match(filename):
1694 if matched != None:
1695 if not regex in matched:
1696 matched.add(regex)
1697 return pri
1698 return 0
1699
1700 def get_bbfiles(self):
1701 """Get list of default .bb files by reading out the current directory"""
1702 path = os.getcwd()
1703 contents = os.listdir(path)
1704 bbfiles = []
1705 for f in contents:
1706 if f.endswith(".bb"):
1707 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1708 return bbfiles
1709
1710 def find_bbfiles(self, path):
1711 """Find all the .bb and .bbappend files in a directory"""
1712 found = []
1713 for dir, dirs, files in os.walk(path):
1714 for ignored in ('SCCS', 'CVS', '.svn'):
1715 if ignored in dirs:
1716 dirs.remove(ignored)
1717 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1718
1719 return found
1720
1721 def collect_bbfiles(self, config, eventdata):
1722 """Collect all available .bb build files"""
1723 masked = 0
1724
1725 collectlog.debug(1, "collecting .bb files")
1726
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001727 files = (config.getVar( "BBFILES") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001728 config.setVar("BBFILES", " ".join(files))
1729
1730 # Sort files by priority
1731 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
1732
1733 if not len(files):
1734 files = self.get_bbfiles()
1735
1736 if not len(files):
1737 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1738 bb.event.fire(CookerExit(), eventdata)
1739
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001740 # We need to track where we look so that we can add inotify watches. There
1741 # is no nice way to do this, this is horrid. We intercept the os.listdir()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001742 # (or os.scandir() for python 3.6+) calls while we run glob().
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001743 origlistdir = os.listdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001744 if hasattr(os, 'scandir'):
1745 origscandir = os.scandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001746 searchdirs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001747
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001748 def ourlistdir(d):
1749 searchdirs.append(d)
1750 return origlistdir(d)
1751
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001752 def ourscandir(d):
1753 searchdirs.append(d)
1754 return origscandir(d)
1755
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001756 os.listdir = ourlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001757 if hasattr(os, 'scandir'):
1758 os.scandir = ourscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001759 try:
1760 # Can't use set here as order is important
1761 newfiles = []
1762 for f in files:
1763 if os.path.isdir(f):
1764 dirfiles = self.find_bbfiles(f)
1765 for g in dirfiles:
1766 if g not in newfiles:
1767 newfiles.append(g)
1768 else:
1769 globbed = glob.glob(f)
1770 if not globbed and os.path.exists(f):
1771 globbed = [f]
1772 # glob gives files in order on disk. Sort to be deterministic.
1773 for g in sorted(globbed):
1774 if g not in newfiles:
1775 newfiles.append(g)
1776 finally:
1777 os.listdir = origlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001778 if hasattr(os, 'scandir'):
1779 os.scandir = origscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001780
1781 bbmask = config.getVar('BBMASK')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001782
1783 if bbmask:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001784 # First validate the individual regular expressions and ignore any
1785 # that do not compile
1786 bbmasks = []
1787 for mask in bbmask.split():
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001788 # When constructing an older style single regex, it's possible for BBMASK
1789 # to end up beginning with '|', which matches and masks _everything_.
1790 if mask.startswith("|"):
1791 collectlog.warn("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
1792 mask = mask[1:]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001793 try:
1794 re.compile(mask)
1795 bbmasks.append(mask)
1796 except sre_constants.error:
1797 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1798
1799 # Then validate the combined regular expressions. This should never
1800 # fail, but better safe than sorry...
1801 bbmask = "|".join(bbmasks)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001802 try:
1803 bbmask_compiled = re.compile(bbmask)
1804 except sre_constants.error:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001805 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1806 bbmask = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001807
1808 bbfiles = []
1809 bbappend = []
1810 for f in newfiles:
1811 if bbmask and bbmask_compiled.search(f):
1812 collectlog.debug(1, "skipping masked file %s", f)
1813 masked += 1
1814 continue
1815 if f.endswith('.bb'):
1816 bbfiles.append(f)
1817 elif f.endswith('.bbappend'):
1818 bbappend.append(f)
1819 else:
1820 collectlog.debug(1, "skipping %s: unknown file extension", f)
1821
1822 # Build a list of .bbappend files for each .bb file
1823 for f in bbappend:
1824 base = os.path.basename(f).replace('.bbappend', '.bb')
1825 self.bbappends.append((base, f))
1826
1827 # Find overlayed recipes
1828 # bbfiles will be in priority order which makes this easy
1829 bbfile_seen = dict()
1830 self.overlayed = defaultdict(list)
1831 for f in reversed(bbfiles):
1832 base = os.path.basename(f)
1833 if base not in bbfile_seen:
1834 bbfile_seen[base] = f
1835 else:
1836 topfile = bbfile_seen[base]
1837 self.overlayed[topfile].append(f)
1838
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001839 return (bbfiles, masked, searchdirs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001840
1841 def get_file_appends(self, fn):
1842 """
1843 Returns a list of .bbappend files to apply to fn
1844 """
1845 filelist = []
1846 f = os.path.basename(fn)
1847 for b in self.bbappends:
1848 (bbappend, filename) = b
1849 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1850 filelist.append(filename)
1851 return filelist
1852
1853 def collection_priorities(self, pkgfns, d):
1854
1855 priorities = {}
1856
1857 # Calculate priorities for each file
1858 matched = set()
1859 for p in pkgfns:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001860 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001861 priorities[p] = self.calc_bbfile_priority(realfn, matched)
1862
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001863 unmatched = set()
1864 for _, _, regex, pri in self.bbfile_config_priorities:
1865 if not regex in matched:
1866 unmatched.add(regex)
1867
Brad Bishop316dfdd2018-06-25 12:45:53 -04001868 # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
1869 def find_bbappend_match(regex):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001870 for b in self.bbappends:
1871 (bbfile, append) = b
1872 if regex.match(append):
Brad Bishop316dfdd2018-06-25 12:45:53 -04001873 # If the bbappend is matched by already "matched set", return False
1874 for matched_regex in matched:
1875 if matched_regex.match(append):
1876 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001877 return True
1878 return False
1879
1880 for unmatch in unmatched.copy():
Brad Bishop316dfdd2018-06-25 12:45:53 -04001881 if find_bbappend_match(unmatch):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001882 unmatched.remove(unmatch)
1883
1884 for collection, pattern, regex, _ in self.bbfile_config_priorities:
1885 if regex in unmatched:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001886 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001887 collectlog.warning("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001888
1889 return priorities
1890
1891class ParsingFailure(Exception):
1892 def __init__(self, realexception, recipe):
1893 self.realexception = realexception
1894 self.recipe = recipe
1895 Exception.__init__(self, realexception, recipe)
1896
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001897class Parser(multiprocessing.Process):
1898 def __init__(self, jobs, results, quit, init, profile):
1899 self.jobs = jobs
1900 self.results = results
1901 self.quit = quit
1902 self.init = init
1903 multiprocessing.Process.__init__(self)
1904 self.context = bb.utils.get_context().copy()
1905 self.handlers = bb.event.get_class_handlers().copy()
1906 self.profile = profile
1907
1908 def run(self):
1909
1910 if not self.profile:
1911 self.realrun()
1912 return
1913
1914 try:
1915 import cProfile as profile
1916 except:
1917 import profile
1918 prof = profile.Profile()
1919 try:
1920 profile.Profile.runcall(prof, self.realrun)
1921 finally:
1922 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1923 prof.dump_stats(logfile)
1924
1925 def realrun(self):
1926 if self.init:
1927 self.init()
1928
1929 pending = []
1930 while True:
1931 try:
1932 self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001933 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001934 pass
1935 else:
1936 self.results.cancel_join_thread()
1937 break
1938
1939 if pending:
1940 result = pending.pop()
1941 else:
1942 try:
Brad Bishop19323692019-04-05 15:28:33 -04001943 job = self.jobs.pop()
1944 except IndexError:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001945 break
1946 result = self.parse(*job)
1947
1948 try:
1949 self.results.put(result, timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001950 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001951 pending.append(result)
1952
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001953 def parse(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001954 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001955 # Record the filename we're parsing into any events generated
1956 def parse_filter(self, record):
1957 record.taskpid = bb.event.worker_pid
1958 record.fn = filename
1959 return True
1960
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001961 # Reset our environment and handlers to the original settings
1962 bb.utils.set_context(self.context.copy())
1963 bb.event.set_class_handlers(self.handlers.copy())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001964 bb.event.LogHandler.filter = parse_filter
1965
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001966 return True, self.bb_cache.parse(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001967 except Exception as exc:
1968 tb = sys.exc_info()[2]
1969 exc.recipe = filename
1970 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
1971 return True, exc
1972 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
1973 # and for example a worker thread doesn't just exit on its own in response to
1974 # a SystemExit event for example.
1975 except BaseException as exc:
1976 return True, ParsingFailure(exc, filename)
1977
1978class CookerParser(object):
1979 def __init__(self, cooker, filelist, masked):
1980 self.filelist = filelist
1981 self.cooker = cooker
1982 self.cfgdata = cooker.data
1983 self.cfghash = cooker.data_hash
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001984 self.cfgbuilder = cooker.databuilder
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001985
1986 # Accounting statistics
1987 self.parsed = 0
1988 self.cached = 0
1989 self.error = 0
1990 self.masked = masked
1991
1992 self.skipped = 0
1993 self.virtuals = 0
1994 self.total = len(filelist)
1995
1996 self.current = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001997 self.process_names = []
1998
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001999 self.bb_cache = bb.cache.Cache(self.cfgbuilder, self.cfghash, cooker.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002000 self.fromcache = []
2001 self.willparse = []
2002 for filename in self.filelist:
2003 appends = self.cooker.collection.get_file_appends(filename)
2004 if not self.bb_cache.cacheValid(filename, appends):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002005 self.willparse.append((filename, appends))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002006 else:
2007 self.fromcache.append((filename, appends))
2008 self.toparse = self.total - len(self.fromcache)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002009 self.progress_chunk = int(max(self.toparse / 100, 1))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002010
Brad Bishop6e60e8b2018-02-01 10:27:11 -05002011 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002012 multiprocessing.cpu_count()), len(self.willparse))
2013
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002014 self.start()
2015 self.haveshutdown = False
2016
2017 def start(self):
2018 self.results = self.load_cached()
2019 self.processes = []
2020 if self.toparse:
2021 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2022 def init():
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002023 Parser.bb_cache = self.bb_cache
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002024 bb.utils.set_process_name(multiprocessing.current_process().name)
2025 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2026 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002027
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002028 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002029 self.result_queue = multiprocessing.Queue()
Brad Bishop19323692019-04-05 15:28:33 -04002030
2031 def chunkify(lst,n):
2032 return [lst[i::n] for i in range(n)]
2033 self.jobs = chunkify(self.willparse, self.num_processes)
2034
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002035 for i in range(0, self.num_processes):
Brad Bishop19323692019-04-05 15:28:33 -04002036 parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002037 parser.start()
2038 self.process_names.append(parser.name)
2039 self.processes.append(parser)
2040
2041 self.results = itertools.chain(self.results, self.parse_generator())
2042
2043 def shutdown(self, clean=True, force=False):
2044 if not self.toparse:
2045 return
2046 if self.haveshutdown:
2047 return
2048 self.haveshutdown = True
2049
2050 if clean:
2051 event = bb.event.ParseCompleted(self.cached, self.parsed,
2052 self.skipped, self.masked,
2053 self.virtuals, self.error,
2054 self.total)
2055
2056 bb.event.fire(event, self.cfgdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002057 for process in self.processes:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002058 self.parser_quit.put(None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002059 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002060 self.parser_quit.cancel_join_thread()
2061 for process in self.processes:
2062 self.parser_quit.put(None)
2063
Brad Bishop08902b02019-08-20 09:16:51 -04002064 # Cleanup the queue before call process.join(), otherwise there might be
2065 # deadlocks.
2066 while True:
2067 try:
2068 self.result_queue.get(timeout=0.25)
2069 except queue.Empty:
2070 break
2071
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002072 for process in self.processes:
2073 if force:
2074 process.join(.1)
2075 process.terminate()
2076 else:
2077 process.join()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002078
2079 sync = threading.Thread(target=self.bb_cache.sync)
2080 sync.start()
2081 multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002082 bb.codeparser.parser_cache_savemerge()
2083 bb.fetch.fetcher_parse_done()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002084 if self.cooker.configuration.profile:
2085 profiles = []
2086 for i in self.process_names:
2087 logfile = "profile-parse-%s.log" % i
2088 if os.path.exists(logfile):
2089 profiles.append(logfile)
2090
2091 pout = "profile-parse.log.processed"
2092 bb.utils.process_profilelog(profiles, pout = pout)
2093 print("Processed parsing statistics saved to %s" % (pout))
2094
2095 def load_cached(self):
2096 for filename, appends in self.fromcache:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002097 cached, infos = self.bb_cache.load(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002098 yield not cached, infos
2099
2100 def parse_generator(self):
2101 while True:
2102 if self.parsed >= self.toparse:
2103 break
2104
2105 try:
2106 result = self.result_queue.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002107 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002108 pass
2109 else:
2110 value = result[1]
2111 if isinstance(value, BaseException):
2112 raise value
2113 else:
2114 yield result
2115
2116 def parse_next(self):
2117 result = []
2118 parsed = None
2119 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002120 parsed, result = next(self.results)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002121 except StopIteration:
2122 self.shutdown()
2123 return False
2124 except bb.BBHandledException as exc:
2125 self.error += 1
2126 logger.error('Failed to parse recipe: %s' % exc.recipe)
2127 self.shutdown(clean=False)
2128 return False
2129 except ParsingFailure as exc:
2130 self.error += 1
2131 logger.error('Unable to parse %s: %s' %
2132 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2133 self.shutdown(clean=False)
2134 return False
2135 except bb.parse.ParseError as exc:
2136 self.error += 1
2137 logger.error(str(exc))
2138 self.shutdown(clean=False)
2139 return False
2140 except bb.data_smart.ExpansionError as exc:
2141 self.error += 1
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002142 bbdir = os.path.dirname(__file__) + os.sep
2143 etype, value, _ = sys.exc_info()
2144 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2145 logger.error('ExpansionError during parsing %s', value.recipe,
2146 exc_info=(etype, value, tb))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002147 self.shutdown(clean=False)
2148 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002149 except Exception as exc:
2150 self.error += 1
2151 etype, value, tb = sys.exc_info()
2152 if hasattr(value, "recipe"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002153 logger.error('Unable to parse %s' % value.recipe,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002154 exc_info=(etype, value, exc.traceback))
2155 else:
2156 # Most likely, an exception occurred during raising an exception
2157 import traceback
2158 logger.error('Exception during parse: %s' % traceback.format_exc())
2159 self.shutdown(clean=False)
2160 return False
2161
2162 self.current += 1
2163 self.virtuals += len(result)
2164 if parsed:
2165 self.parsed += 1
2166 if self.parsed % self.progress_chunk == 0:
2167 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2168 self.cfgdata)
2169 else:
2170 self.cached += 1
2171
2172 for virtualfn, info_array in result:
2173 if info_array[0].skipped:
2174 self.skipped += 1
2175 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002176 (fn, cls, mc) = bb.cache.virtualfn2realfn(virtualfn)
2177 self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002178 parsed=parsed, watcher = self.cooker.add_filewatch)
2179 return True
2180
2181 def reparse(self, filename):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002182 infos = self.bb_cache.parse(filename, self.cooker.collection.get_file_appends(filename))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002183 for vfn, info_array in infos:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002184 (fn, cls, mc) = bb.cache.virtualfn2realfn(vfn)
2185 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)