blob: b4851e13ede6bcd99f0a25153384ade2af087e40 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# Copyright (C) 2003, 2004 Chris Larson
3# Copyright (C) 2003, 2004 Phil Blundell
4# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
5# Copyright (C) 2005 Holger Hans Peter Freyther
6# Copyright (C) 2005 ROAD GmbH
7# Copyright (C) 2006 - 2007 Richard Purdie
8#
Brad Bishopc342db32019-05-15 21:57:59 -04009# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050010#
Patrick Williamsc0f7c042017-02-23 20:41:17 -060011
Patrick Williamsc124f4f2015-09-15 14:41:29 -050012import sys, os, glob, os.path, re, time
13import atexit
14import itertools
15import logging
16import multiprocessing
17import sre_constants
18import threading
Patrick Williamsc0f7c042017-02-23 20:41:17 -060019from io import StringIO, UnsupportedOperation
Patrick Williamsc124f4f2015-09-15 14:41:29 -050020from contextlib import closing
21from functools import wraps
Patrick Williamsc0f7c042017-02-23 20:41:17 -060022from collections import defaultdict, namedtuple
Patrick Williamsc124f4f2015-09-15 14:41:29 -050023import bb, bb.exceptions, bb.command
24from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
Patrick Williamsc0f7c042017-02-23 20:41:17 -060025import queue
Patrick Williamsc124f4f2015-09-15 14:41:29 -050026import signal
27import subprocess
28import errno
29import prserv.serv
30import pyinotify
Patrick Williamsc0f7c042017-02-23 20:41:17 -060031import json
32import pickle
33import codecs
Patrick Williamsc124f4f2015-09-15 14:41:29 -050034
35logger = logging.getLogger("BitBake")
36collectlog = logging.getLogger("BitBake.Collection")
37buildlog = logging.getLogger("BitBake.Build")
38parselog = logging.getLogger("BitBake.Parsing")
39providerlog = logging.getLogger("BitBake.Provider")
40
41class NoSpecificMatch(bb.BBHandledException):
42 """
43 Exception raised when no or multiple file matches are found
44 """
45
46class NothingToBuild(Exception):
47 """
48 Exception raised when there is nothing to build
49 """
50
51class CollectionError(bb.BBHandledException):
52 """
53 Exception raised when layer configuration is incorrect
54 """
55
56class state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060057 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050058
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050059 @classmethod
60 def get_name(cls, code):
61 for name in dir(cls):
62 value = getattr(cls, name)
63 if type(value) == type(cls.initial) and value == code:
64 return name
65 raise ValueError("Invalid status code: %s" % code)
66
Patrick Williamsc124f4f2015-09-15 14:41:29 -050067
68class SkippedPackage:
69 def __init__(self, info = None, reason = None):
70 self.pn = None
71 self.skipreason = None
72 self.provides = None
73 self.rprovides = None
74
75 if info:
76 self.pn = info.pn
77 self.skipreason = info.skipreason
78 self.provides = info.provides
79 self.rprovides = info.rprovides
80 elif reason:
81 self.skipreason = reason
82
83
84class CookerFeatures(object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -060085 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050086
87 def __init__(self):
88 self._features=set()
89
90 def setFeature(self, f):
91 # validate we got a request for a feature we support
92 if f not in CookerFeatures._feature_list:
93 return
94 self._features.add(f)
95
96 def __contains__(self, f):
97 return f in self._features
98
99 def __iter__(self):
100 return self._features.__iter__()
101
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600102 def __next__(self):
103 return next(self._features)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500104
105
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600106class EventWriter:
107 def __init__(self, cooker, eventfile):
108 self.file_inited = None
109 self.cooker = cooker
110 self.eventfile = eventfile
111 self.event_queue = []
112
113 def write_event(self, event):
114 with open(self.eventfile, "a") as f:
115 try:
116 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
117 f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
118 "vars": str_event}))
119 except Exception as err:
120 import traceback
121 print(err, traceback.format_exc())
122
123 def send(self, event):
124 if self.file_inited:
125 # we have the file, just write the event
126 self.write_event(event)
127 else:
128 # init on bb.event.BuildStarted
129 name = "%s.%s" % (event.__module__, event.__class__.__name__)
130 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
131 with open(self.eventfile, "w") as f:
132 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
133
134 self.file_inited = True
135
136 # write pending events
137 for evt in self.event_queue:
138 self.write_event(evt)
139
140 # also write the current event
141 self.write_event(event)
142 else:
143 # queue all events until the file is inited
144 self.event_queue.append(event)
145
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500146#============================================================================#
147# BBCooker
148#============================================================================#
149class BBCooker:
150 """
151 Manages one bitbake build run
152 """
153
154 def __init__(self, configuration, featureSet=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600155 self.recipecaches = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500156 self.skiplist = {}
157 self.featureset = CookerFeatures()
158 if featureSet:
159 for f in featureSet:
160 self.featureset.setFeature(f)
161
162 self.configuration = configuration
163
Brad Bishopf058f492019-01-28 23:50:33 -0500164 bb.debug(1, "BBCooker starting %s" % time.time())
165 sys.stdout.flush()
166
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500167 self.configwatcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500168 bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
169 sys.stdout.flush()
170
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500171 self.configwatcher.bbseen = []
172 self.configwatcher.bbwatchedfiles = []
173 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
Brad Bishopf058f492019-01-28 23:50:33 -0500174 bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
175 sys.stdout.flush()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500176 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
177 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500178 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500179 self.watcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500180 bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
181 sys.stdout.flush()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500182 self.watcher.bbseen = []
183 self.watcher.bbwatchedfiles = []
184 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
185
Brad Bishopf058f492019-01-28 23:50:33 -0500186 bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
187 sys.stdout.flush()
188
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500189 # If being called by something like tinfoil, we need to clean cached data
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500190 # which may now be invalid
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500191 bb.parse.clear_cache()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500192 bb.parse.BBHandler.cached_statements = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500193
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500194 self.ui_cmdline = None
195
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500196 self.initConfigurationData()
197
Brad Bishopf058f492019-01-28 23:50:33 -0500198 bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
199 sys.stdout.flush()
200
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600201 # we log all events to a file if so directed
202 if self.configuration.writeeventlog:
203 # register the log file writer as UI Handler
204 writer = EventWriter(self, self.configuration.writeeventlog)
205 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
206 bb.event.register_UIHhandler(EventLogWriteHandler(writer))
207
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500208 self.inotify_modified_files = []
209
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500210 def _process_inotify_updates(server, cooker, abort):
211 cooker.process_inotify_updates()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500212 return 1.0
213
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500214 self.configuration.server_register_idlecallback(_process_inotify_updates, self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500215
216 # TOSTOP must not be set or our children will hang when they output
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600217 try:
218 fd = sys.stdout.fileno()
219 if os.isatty(fd):
220 import termios
221 tcattr = termios.tcgetattr(fd)
222 if tcattr[3] & termios.TOSTOP:
223 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
224 tcattr[3] = tcattr[3] & ~termios.TOSTOP
225 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
226 except UnsupportedOperation:
227 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500228
229 self.command = bb.command.Command(self)
230 self.state = state.initial
231
232 self.parser = None
233
234 signal.signal(signal.SIGTERM, self.sigterm_exception)
235 # Let SIGHUP exit as SIGTERM
236 signal.signal(signal.SIGHUP, self.sigterm_exception)
237
Brad Bishopf058f492019-01-28 23:50:33 -0500238 bb.debug(1, "BBCooker startup complete %s" % time.time())
239 sys.stdout.flush()
240
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500241 def process_inotify_updates(self):
242 for n in [self.confignotifier, self.notifier]:
243 if n.check_events(timeout=0):
244 # read notified events and enqeue them
245 n.read_events()
246 n.process_events()
247
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500248 def config_notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500249 if event.maskname == "IN_Q_OVERFLOW":
250 bb.warn("inotify event queue overflowed, invalidating caches.")
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500251 self.parsecache_valid = False
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500252 self.baseconfig_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500253 bb.parse.clear_cache()
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500254 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500255 if not event.pathname in self.configwatcher.bbwatchedfiles:
256 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500257 if not event.pathname in self.inotify_modified_files:
258 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500259 self.baseconfig_valid = False
260
261 def notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500262 if event.maskname == "IN_Q_OVERFLOW":
263 bb.warn("inotify event queue overflowed, invalidating caches.")
264 self.parsecache_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500265 bb.parse.clear_cache()
266 return
267 if event.pathname.endswith("bitbake-cookerdaemon.log") \
268 or event.pathname.endswith("bitbake.lock"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500269 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500270 if not event.pathname in self.inotify_modified_files:
271 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500272 self.parsecache_valid = False
273
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500274 def add_filewatch(self, deps, watcher=None, dirs=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500275 if not watcher:
276 watcher = self.watcher
277 for i in deps:
278 watcher.bbwatchedfiles.append(i[0])
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500279 if dirs:
280 f = i[0]
281 else:
282 f = os.path.dirname(i[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500283 if f in watcher.bbseen:
284 continue
285 watcher.bbseen.append(f)
286 watchtarget = None
287 while True:
288 # We try and add watches for files that don't exist but if they did, would influence
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500289 # the parser. The parent directory of these files may not exist, in which case we need
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500290 # to watch any parent that does exist for changes.
291 try:
292 watcher.add_watch(f, self.watchmask, quiet=False)
293 if watchtarget:
294 watcher.bbwatchedfiles.append(watchtarget)
295 break
296 except pyinotify.WatchManagerError as e:
297 if 'ENOENT' in str(e):
298 watchtarget = f
299 f = os.path.dirname(f)
300 if f in watcher.bbseen:
301 break
302 watcher.bbseen.append(f)
303 continue
304 if 'ENOSPC' in str(e):
305 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
306 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
307 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
308 providerlog.error("Root privilege is required to modify max_user_watches.")
309 raise
310
311 def sigterm_exception(self, signum, stackframe):
312 if signum == signal.SIGTERM:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500313 bb.warn("Cooker received SIGTERM, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500314 elif signum == signal.SIGHUP:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500315 bb.warn("Cooker received SIGHUP, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500316 self.state = state.forceshutdown
317
318 def setFeatures(self, features):
319 # we only accept a new feature set if we're in state initial, so we can reset without problems
320 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
321 raise Exception("Illegal state for feature set change")
322 original_featureset = list(self.featureset)
323 for feature in features:
324 self.featureset.setFeature(feature)
325 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
326 if (original_featureset != list(self.featureset)) and self.state != state.error:
327 self.reset()
328
329 def initConfigurationData(self):
330
331 self.state = state.initial
332 self.caches_array = []
333
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500334 # Need to preserve BB_CONSOLELOG over resets
335 consolelog = None
336 if hasattr(self, "data"):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500337 consolelog = self.data.getVar("BB_CONSOLELOG")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500338
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500339 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
340 self.enableDataTracking()
341
342 all_extra_cache_names = []
343 # We hardcode all known cache types in a single place, here.
344 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
345 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
346
347 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
348
349 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
350 # This is the entry point, no further check needed!
351 for var in caches_name_array:
352 try:
353 module_name, cache_name = var.split(':')
354 module = __import__(module_name, fromlist=(cache_name,))
355 self.caches_array.append(getattr(module, cache_name))
356 except ImportError as exc:
357 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
358 sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
359
360 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
361 self.databuilder.parseBaseConfiguration()
362 self.data = self.databuilder.data
363 self.data_hash = self.databuilder.data_hash
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500364 self.extraconfigdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500365
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500366 if consolelog:
367 self.data.setVar("BB_CONSOLELOG", consolelog)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500368
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500369 self.data.setVar('BB_CMDLINE', self.ui_cmdline)
370
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500371 #
372 # Copy of the data store which has been expanded.
373 # Used for firing events and accessing variables where expansion needs to be accounted for
374 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500375 bb.parse.init_parser(self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500376
377 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
378 self.disableDataTracking()
379
Brad Bishop15ae2502019-06-18 21:44:24 -0400380 for mc in self.databuilder.mcdata.values():
381 mc.renameVar("__depends", "__base_depends")
382 self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500383
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500384 self.baseconfig_valid = True
385 self.parsecache_valid = False
386
387 def handlePRServ(self):
388 # Setup a PR Server based on the new configuration
389 try:
390 self.prhost = prserv.serv.auto_start(self.data)
391 except prserv.serv.PRServiceConfigError as e:
392 bb.fatal("Unable to start PR Server, exitting")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500393
394 def enableDataTracking(self):
395 self.configuration.tracking = True
396 if hasattr(self, "data"):
397 self.data.enableTracking()
398
399 def disableDataTracking(self):
400 self.configuration.tracking = False
401 if hasattr(self, "data"):
402 self.data.disableTracking()
403
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500404 def parseConfiguration(self):
405 # Set log file verbosity
406 verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
407 if verboselogs:
408 bb.msg.loggerVerboseLogs = True
409
410 # Change nice level if we're asked to
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500411 nice = self.data.getVar("BB_NICE_LEVEL")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500412 if nice:
413 curnice = os.nice(0)
414 nice = int(nice) - curnice
415 buildlog.verbose("Renice to %s " % os.nice(nice))
416
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600417 if self.recipecaches:
418 del self.recipecaches
419 self.multiconfigs = self.databuilder.mcdata.keys()
420 self.recipecaches = {}
421 for mc in self.multiconfigs:
422 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500423
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500424 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500425
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500426 self.parsecache_valid = False
427
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500428 def updateConfigOpts(self, options, environment, cmdline):
429 self.ui_cmdline = cmdline
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500430 clean = True
431 for o in options:
432 if o in ['prefile', 'postfile']:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500433 # Only these options may require a reparse
434 try:
435 if getattr(self.configuration, o) == options[o]:
436 # Value is the same, no need to mark dirty
437 continue
438 except AttributeError:
439 pass
440 logger.debug(1, "Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
441 print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500442 clean = False
443 setattr(self.configuration, o, options[o])
444 for k in bb.utils.approved_variables():
445 if k in environment and k not in self.configuration.env:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500446 logger.debug(1, "Updating new environment variable %s to %s" % (k, environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500447 self.configuration.env[k] = environment[k]
448 clean = False
449 if k in self.configuration.env and k not in environment:
450 logger.debug(1, "Updating environment variable %s (deleted)" % (k))
451 del self.configuration.env[k]
452 clean = False
453 if k not in self.configuration.env and k not in environment:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500454 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500455 if environment[k] != self.configuration.env[k]:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500456 logger.debug(1, "Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500457 self.configuration.env[k] = environment[k]
458 clean = False
459 if not clean:
460 logger.debug(1, "Base environment change, triggering reparse")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500461 self.reset()
462
463 def runCommands(self, server, data, abort):
464 """
465 Run any queued asynchronous command
466 This is done by the idle handler so it runs in true context rather than
467 tied to any UI.
468 """
469
470 return self.command.runAsyncCommand()
471
472 def showVersions(self):
473
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500474 (latest_versions, preferred_versions) = self.findProviders()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500475
476 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
477 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
478
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500479 for p in sorted(self.recipecaches[''].pkg_pn):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500480 pref = preferred_versions[p]
481 latest = latest_versions[p]
482
483 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
484 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
485
486 if pref == latest:
487 prefstr = ""
488
489 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
490
491 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
492 """
493 Show the outer or per-recipe environment
494 """
495 fn = None
496 envdata = None
Brad Bishop15ae2502019-06-18 21:44:24 -0400497 mc = ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500498 if not pkgs_to_build:
499 pkgs_to_build = []
500
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500501 orig_tracking = self.configuration.tracking
502 if not orig_tracking:
503 self.enableDataTracking()
504 self.reset()
505
Brad Bishop15ae2502019-06-18 21:44:24 -0400506 def mc_base(p):
507 if p.startswith('mc:'):
508 s = p.split(':')
509 if len(s) == 2:
510 return s[1]
511 return None
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500512
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500513 if buildfile:
514 # Parse the configuration here. We need to do it explicitly here since
515 # this showEnvironment() code path doesn't use the cache
516 self.parseConfiguration()
517
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600518 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500519 fn = self.matchFile(fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600520 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500521 elif len(pkgs_to_build) == 1:
Brad Bishop15ae2502019-06-18 21:44:24 -0400522 mc = mc_base(pkgs_to_build[0])
523 if not mc:
524 ignore = self.data.getVar("ASSUME_PROVIDED") or ""
525 if pkgs_to_build[0] in set(ignore.split()):
526 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500527
Brad Bishop15ae2502019-06-18 21:44:24 -0400528 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500529
Brad Bishop15ae2502019-06-18 21:44:24 -0400530 mc = runlist[0][0]
531 fn = runlist[0][3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500532
533 if fn:
534 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600535 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
536 envdata = bb_cache.loadDataFull(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500537 except Exception as e:
538 parselog.exception("Unable to read %s", fn)
539 raise
Brad Bishop15ae2502019-06-18 21:44:24 -0400540 else:
541 if not mc in self.databuilder.mcdata:
542 bb.fatal('Not multiconfig named "%s" found' % mc)
543 envdata = self.databuilder.mcdata[mc]
544 data.expandKeys(envdata)
545 parse.ast.runAnonFuncs(envdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500546
547 # Display history
548 with closing(StringIO()) as env:
549 self.data.inchistory.emit(env)
550 logger.plain(env.getvalue())
551
552 # emit variables and shell functions
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500553 with closing(StringIO()) as env:
554 data.emit_env(env, envdata, True)
555 logger.plain(env.getvalue())
556
557 # emit the metadata which isnt valid shell
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500558 for e in sorted(envdata.keys()):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600559 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500560 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500561
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500562 if not orig_tracking:
563 self.disableDataTracking()
564 self.reset()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500565
566 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
567 """
568 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
569 """
570 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
571
572 # A task of None means use the default task
573 if task is None:
574 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500575 if not task.startswith("do_"):
576 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500577
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500578 targetlist = self.checkPackages(pkgs_to_build, task)
579 fulltargetlist = []
580 defaulttask_implicit = ''
581 defaulttask_explicit = False
582 wildcard = False
583
584 # Wild card expansion:
Brad Bishop15ae2502019-06-18 21:44:24 -0400585 # Replace string such as "mc:*:bash"
586 # into "mc:A:bash mc:B:bash bash"
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500587 for k in targetlist:
Brad Bishop15ae2502019-06-18 21:44:24 -0400588 if k.startswith("mc:"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500589 if wildcard:
590 bb.fatal('multiconfig conflict')
591 if k.split(":")[1] == "*":
592 wildcard = True
593 for mc in self.multiconfigs:
594 if mc:
595 fulltargetlist.append(k.replace('*', mc))
596 # implicit default task
597 else:
598 defaulttask_implicit = k.split(":")[2]
599 else:
600 fulltargetlist.append(k)
601 else:
602 defaulttask_explicit = True
603 fulltargetlist.append(k)
604
605 if not defaulttask_explicit and defaulttask_implicit != '':
606 fulltargetlist.append(defaulttask_implicit)
607
608 bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600609 taskdata = {}
610 localdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500611
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600612 for mc in self.multiconfigs:
613 taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
614 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600615 bb.data.expandKeys(localdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500616
617 current = 0
618 runlist = []
619 for k in fulltargetlist:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600620 mc = ""
Brad Bishop15ae2502019-06-18 21:44:24 -0400621 if k.startswith("mc:"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600622 mc = k.split(":")[1]
623 k = ":".join(k.split(":")[2:])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500624 ktask = task
625 if ":do_" in k:
626 k2 = k.split(":do_")
627 k = k2[0]
628 ktask = k2[1]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600629 taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500630 current += 1
631 if not ktask.startswith("do_"):
632 ktask = "do_%s" % ktask
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600633 if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
634 # e.g. in ASSUME_PROVIDED
635 continue
636 fn = taskdata[mc].build_targets[k][0]
637 runlist.append([mc, k, ktask, fn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500638 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600639
Brad Bishop15ae2502019-06-18 21:44:24 -0400640 havemc = False
641 for mc in self.multiconfigs:
642 if taskdata[mc].get_mcdepends():
643 havemc = True
Brad Bishopf058f492019-01-28 23:50:33 -0500644
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800645 # No need to do check providers if there are no mcdeps or not an mc build
Brad Bishop15ae2502019-06-18 21:44:24 -0400646 if havemc or len(self.multiconfigs) > 1:
Andrew Geissler99467da2019-02-25 18:54:23 -0600647 seen = set()
648 new = True
649 # Make sure we can provide the multiconfig dependency
650 while new:
651 mcdeps = set()
652 # Add unresolved first, so we can get multiconfig indirect dependencies on time
653 for mc in self.multiconfigs:
654 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
655 mcdeps |= set(taskdata[mc].get_mcdepends())
656 new = False
657 for mc in self.multiconfigs:
658 for k in mcdeps:
659 if k in seen:
660 continue
661 l = k.split(':')
662 depmc = l[2]
663 if depmc not in self.multiconfigs:
664 bb.fatal("Multiconfig dependency %s depends on nonexistent mc configuration %s" % (k,depmc))
665 else:
666 logger.debug(1, "Adding providers for multiconfig dependency %s" % l[3])
667 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
668 seen.add(k)
669 new = True
Brad Bishopf058f492019-01-28 23:50:33 -0500670
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600671 for mc in self.multiconfigs:
672 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
673
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500674 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600675 return taskdata, runlist
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500676
677 def prepareTreeData(self, pkgs_to_build, task):
678 """
679 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
680 """
681
682 # We set abort to False here to prevent unbuildable targets raising
683 # an exception when we're just generating data
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600684 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500685
686 return runlist, taskdata
687
688 ######## WARNING : this function requires cache_extra to be enabled ########
689
690 def generateTaskDepTreeData(self, pkgs_to_build, task):
691 """
692 Create a dependency graph of pkgs_to_build including reverse dependency
693 information.
694 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500695 if not task.startswith("do_"):
696 task = "do_%s" % task
697
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500698 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600699 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500700 rq.rqdata.prepare()
701 return self.buildDependTree(rq, taskdata)
702
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600703 @staticmethod
704 def add_mc_prefix(mc, pn):
705 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -0400706 return "mc:%s:%s" % (mc, pn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600707 return pn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500708
709 def buildDependTree(self, rq, taskdata):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600710 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500711 depend_tree = {}
712 depend_tree["depends"] = {}
713 depend_tree["tdepends"] = {}
714 depend_tree["pn"] = {}
715 depend_tree["rdepends-pn"] = {}
716 depend_tree["packages"] = {}
717 depend_tree["rdepends-pkg"] = {}
718 depend_tree["rrecs-pkg"] = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500719 depend_tree['providermap'] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600720 depend_tree["layer-priorities"] = self.bbfile_config_priorities
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500721
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600722 for mc in taskdata:
723 for name, fn in list(taskdata[mc].get_providermap().items()):
724 pn = self.recipecaches[mc].pkg_fn[fn]
725 pn = self.add_mc_prefix(mc, pn)
726 if name != pn:
727 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
728 depend_tree['providermap'][name] = (pn, version)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500729
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600730 for tid in rq.rqdata.runtaskentries:
731 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
732 pn = self.recipecaches[mc].pkg_fn[taskfn]
733 pn = self.add_mc_prefix(mc, pn)
734 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500735 if pn not in depend_tree["pn"]:
736 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600737 depend_tree["pn"][pn]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500738 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600739 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500740
741 # if we have extra caches, list all attributes they bring in
742 extra_info = []
743 for cache_class in self.caches_array:
744 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
745 cachefields = getattr(cache_class, 'cachefields', [])
746 extra_info = extra_info + cachefields
747
748 # for all attributes stored, add them to the dependency tree
749 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600750 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500751
752
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500753 dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
754 if not dotname in depend_tree["tdepends"]:
755 depend_tree["tdepends"][dotname] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600756 for dep in rq.rqdata.runtaskentries[tid].depends:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800757 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
758 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600759 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
760 if taskfn not in seen_fns:
761 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500762 packages = []
763
764 depend_tree["depends"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600765 for dep in taskdata[mc].depids[taskfn]:
766 depend_tree["depends"][pn].append(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500767
768 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600769 for rdep in taskdata[mc].rdepids[taskfn]:
770 depend_tree["rdepends-pn"][pn].append(rdep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500771
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600772 rdepends = self.recipecaches[mc].rundeps[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500773 for package in rdepends:
774 depend_tree["rdepends-pkg"][package] = []
775 for rdepend in rdepends[package]:
776 depend_tree["rdepends-pkg"][package].append(rdepend)
777 packages.append(package)
778
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600779 rrecs = self.recipecaches[mc].runrecs[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500780 for package in rrecs:
781 depend_tree["rrecs-pkg"][package] = []
782 for rdepend in rrecs[package]:
783 depend_tree["rrecs-pkg"][package].append(rdepend)
784 if not package in packages:
785 packages.append(package)
786
787 for package in packages:
788 if package not in depend_tree["packages"]:
789 depend_tree["packages"][package] = {}
790 depend_tree["packages"][package]["pn"] = pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600791 depend_tree["packages"][package]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500792 depend_tree["packages"][package]["version"] = version
793
794 return depend_tree
795
796 ######## WARNING : this function requires cache_extra to be enabled ########
797 def generatePkgDepTreeData(self, pkgs_to_build, task):
798 """
799 Create a dependency tree of pkgs_to_build, returning the data.
800 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500801 if not task.startswith("do_"):
802 task = "do_%s" % task
803
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500804 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500805
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600806 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500807 depend_tree = {}
808 depend_tree["depends"] = {}
809 depend_tree["pn"] = {}
810 depend_tree["rdepends-pn"] = {}
811 depend_tree["rdepends-pkg"] = {}
812 depend_tree["rrecs-pkg"] = {}
813
814 # if we have extra caches, list all attributes they bring in
815 extra_info = []
816 for cache_class in self.caches_array:
817 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
818 cachefields = getattr(cache_class, 'cachefields', [])
819 extra_info = extra_info + cachefields
820
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600821 tids = []
822 for mc in taskdata:
823 for tid in taskdata[mc].taskentries:
824 tids.append(tid)
825
826 for tid in tids:
827 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
828
829 pn = self.recipecaches[mc].pkg_fn[taskfn]
830 pn = self.add_mc_prefix(mc, pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500831
832 if pn not in depend_tree["pn"]:
833 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600834 depend_tree["pn"][pn]["filename"] = taskfn
835 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500836 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600837 rdepends = self.recipecaches[mc].rundeps[taskfn]
838 rrecs = self.recipecaches[mc].runrecs[taskfn]
839 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500840
841 # for all extra attributes stored, add them to the dependency tree
842 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600843 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500844
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600845 if taskfn not in seen_fns:
846 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500847
848 depend_tree["depends"][pn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500849 for dep in taskdata[mc].depids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500850 pn_provider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600851 if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
852 fn_provider = taskdata[mc].build_targets[dep][0]
853 pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500854 else:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500855 pn_provider = dep
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600856 pn_provider = self.add_mc_prefix(mc, pn_provider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500857 depend_tree["depends"][pn].append(pn_provider)
858
859 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600860 for rdep in taskdata[mc].rdepids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500861 pn_rprovider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600862 if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
863 fn_rprovider = taskdata[mc].run_targets[rdep][0]
864 pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500865 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600866 pn_rprovider = rdep
867 pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500868 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
869
870 depend_tree["rdepends-pkg"].update(rdepends)
871 depend_tree["rrecs-pkg"].update(rrecs)
872
873 return depend_tree
874
875 def generateDepTreeEvent(self, pkgs_to_build, task):
876 """
877 Create a task dependency graph of pkgs_to_build.
878 Generate an event with the result
879 """
880 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
881 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
882
883 def generateDotGraphFiles(self, pkgs_to_build, task):
884 """
885 Create a task dependency graph of pkgs_to_build.
886 Save the result to a set of .dot files.
887 """
888
889 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
890
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500891 with open('pn-buildlist', 'w') as f:
892 for pn in depgraph["pn"]:
893 f.write(pn + "\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500894 logger.info("PN build list saved to 'pn-buildlist'")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500895
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500896 # Remove old format output files to ensure no confusion with stale data
897 try:
898 os.unlink('pn-depends.dot')
899 except FileNotFoundError:
900 pass
901 try:
902 os.unlink('package-depends.dot')
903 except FileNotFoundError:
904 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500905
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500906 with open('task-depends.dot', 'w') as f:
907 f.write("digraph depends {\n")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400908 for task in sorted(depgraph["tdepends"]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500909 (pn, taskname) = task.rsplit(".", 1)
910 fn = depgraph["pn"][pn]["filename"]
911 version = depgraph["pn"][pn]["version"]
912 f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400913 for dep in sorted(depgraph["tdepends"][task]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500914 f.write('"%s" -> "%s"\n' % (task, dep))
915 f.write("}\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500916 logger.info("Task dependencies saved to 'task-depends.dot'")
917
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500918 with open('recipe-depends.dot', 'w') as f:
919 f.write("digraph depends {\n")
920 pndeps = {}
Brad Bishop316dfdd2018-06-25 12:45:53 -0400921 for task in sorted(depgraph["tdepends"]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500922 (pn, taskname) = task.rsplit(".", 1)
923 if pn not in pndeps:
924 pndeps[pn] = set()
Brad Bishop316dfdd2018-06-25 12:45:53 -0400925 for dep in sorted(depgraph["tdepends"][task]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500926 (deppn, deptaskname) = dep.rsplit(".", 1)
927 pndeps[pn].add(deppn)
Brad Bishop316dfdd2018-06-25 12:45:53 -0400928 for pn in sorted(pndeps):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500929 fn = depgraph["pn"][pn]["filename"]
930 version = depgraph["pn"][pn]["version"]
931 f.write('"%s" [label="%s\\n%s\\n%s"]\n' % (pn, pn, version, fn))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400932 for dep in sorted(pndeps[pn]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500933 if dep == pn:
934 continue
935 f.write('"%s" -> "%s"\n' % (pn, dep))
936 f.write("}\n")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400937 logger.info("Flattened recipe dependencies saved to 'recipe-depends.dot'")
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500938
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500939 def show_appends_with_no_recipes(self):
940 # Determine which bbappends haven't been applied
941
942 # First get list of recipes, including skipped
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600943 recipefns = list(self.recipecaches[''].pkg_fn.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500944 recipefns.extend(self.skiplist.keys())
945
946 # Work out list of bbappends that have been applied
947 applied_appends = []
948 for fn in recipefns:
949 applied_appends.extend(self.collection.get_file_appends(fn))
950
951 appends_without_recipes = []
952 for _, appendfn in self.collection.bbappends:
953 if not appendfn in applied_appends:
954 appends_without_recipes.append(appendfn)
955
956 if appends_without_recipes:
957 msg = 'No recipes available for:\n %s' % '\n '.join(appends_without_recipes)
958 warn_only = self.data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
959 False) or "no"
960 if warn_only.lower() in ("1", "yes", "true"):
961 bb.warn(msg)
962 else:
963 bb.fatal(msg)
964
965 def handlePrefProviders(self):
966
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600967 for mc in self.multiconfigs:
968 localdata = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600969 bb.data.expandKeys(localdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500970
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600971 # Handle PREFERRED_PROVIDERS
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500972 for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600973 try:
974 (providee, provider) = p.split(':')
975 except:
976 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
977 continue
978 if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
979 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
980 self.recipecaches[mc].preferred[providee] = provider
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500981
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500982 def findConfigFilePath(self, configfile):
983 """
984 Find the location on disk of configfile and if it exists and was parsed by BitBake
985 emit the ConfigFilePathFound event with the path to the file.
986 """
987 path = bb.cookerdata.findConfigFile(configfile, self.data)
988 if not path:
989 return
990
991 # Generate a list of parsed configuration files by searching the files
992 # listed in the __depends and __base_depends variables with a .conf suffix.
993 conffiles = []
994 dep_files = self.data.getVar('__base_depends', False) or []
995 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
996
997 for f in dep_files:
998 if f[0].endswith(".conf"):
999 conffiles.append(f[0])
1000
1001 _, conf, conffile = path.rpartition("conf/")
1002 match = os.path.join(conf, conffile)
1003 # Try and find matches for conf/conffilename.conf as we don't always
1004 # have the full path to the file.
1005 for cfg in conffiles:
1006 if cfg.endswith(match):
1007 bb.event.fire(bb.event.ConfigFilePathFound(path),
1008 self.data)
1009 break
1010
1011 def findFilesMatchingInDir(self, filepattern, directory):
1012 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001013 Searches for files containing the substring 'filepattern' which are children of
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001014 'directory' in each BBPATH. i.e. to find all rootfs package classes available
1015 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1016 or to find all machine configuration files one could call:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001017 findFilesMatchingInDir(self, '.conf', 'conf/machine')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001018 """
1019
1020 matches = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001021 bbpaths = self.data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001022 for path in bbpaths:
1023 dirpath = os.path.join(path, directory)
1024 if os.path.exists(dirpath):
1025 for root, dirs, files in os.walk(dirpath):
1026 for f in files:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001027 if filepattern in f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001028 matches.append(f)
1029
1030 if matches:
1031 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1032
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001033 def findProviders(self, mc=''):
1034 return bb.providers.findProviders(self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1035
1036 def findBestProvider(self, pn, mc=''):
1037 if pn in self.recipecaches[mc].providers:
1038 filenames = self.recipecaches[mc].providers[pn]
1039 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.data, self.recipecaches[mc])
1040 filename = eligible[0]
1041 return None, None, None, filename
1042 elif pn in self.recipecaches[mc].pkg_pn:
1043 return bb.providers.findBestProvider(pn, self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1044 else:
1045 return None, None, None, None
1046
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001047 def findConfigFiles(self, varname):
1048 """
1049 Find config files which are appropriate values for varname.
1050 i.e. MACHINE, DISTRO
1051 """
1052 possible = []
1053 var = varname.lower()
1054
1055 data = self.data
1056 # iterate configs
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001057 bbpaths = data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001058 for path in bbpaths:
1059 confpath = os.path.join(path, "conf", var)
1060 if os.path.exists(confpath):
1061 for root, dirs, files in os.walk(confpath):
1062 # get all child files, these are appropriate values
1063 for f in files:
1064 val, sep, end = f.rpartition('.')
1065 if end == 'conf':
1066 possible.append(val)
1067
1068 if possible:
1069 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1070
1071 def findInheritsClass(self, klass):
1072 """
1073 Find all recipes which inherit the specified class
1074 """
1075 pkg_list = []
1076
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001077 for pfn in self.recipecaches[''].pkg_fn:
1078 inherits = self.recipecaches[''].inherits.get(pfn, None)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001079 if inherits and klass in inherits:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001080 pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001081
1082 return pkg_list
1083
1084 def generateTargetsTree(self, klass=None, pkgs=None):
1085 """
1086 Generate a dependency tree of buildable targets
1087 Generate an event with the result
1088 """
1089 # if the caller hasn't specified a pkgs list default to universe
1090 if not pkgs:
1091 pkgs = ['universe']
1092 # if inherited_class passed ensure all recipes which inherit the
1093 # specified class are included in pkgs
1094 if klass:
1095 extra_pkgs = self.findInheritsClass(klass)
1096 pkgs = pkgs + extra_pkgs
1097
1098 # generate a dependency tree for all our packages
1099 tree = self.generatePkgDepTreeData(pkgs, 'build')
1100 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1101
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001102 def interactiveMode( self ):
1103 """Drop off into a shell"""
1104 try:
1105 from bb import shell
1106 except ImportError:
1107 parselog.exception("Interactive mode not available")
1108 sys.exit(1)
1109 else:
1110 shell.start( self )
1111
1112
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001113 def handleCollections(self, collections):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001114 """Handle collections"""
1115 errors = False
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001116 self.bbfile_config_priorities = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001117 if collections:
1118 collection_priorities = {}
1119 collection_depends = {}
1120 collection_list = collections.split()
1121 min_prio = 0
1122 for c in collection_list:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001123 bb.debug(1,'Processing %s in collection list' % (c))
1124
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001125 # Get collection priority if defined explicitly
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001126 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001127 if priority:
1128 try:
1129 prio = int(priority)
1130 except ValueError:
1131 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1132 errors = True
1133 if min_prio == 0 or prio < min_prio:
1134 min_prio = prio
1135 collection_priorities[c] = prio
1136 else:
1137 collection_priorities[c] = None
1138
1139 # Check dependencies and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001140 deps = self.data.getVar("LAYERDEPENDS_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001141 if deps:
1142 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001143 depDict = bb.utils.explode_dep_versions2(deps)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001144 except bb.utils.VersionStringException as vse:
1145 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001146 for dep, oplist in list(depDict.items()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001147 if dep in collection_list:
1148 for opstr in oplist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001149 layerver = self.data.getVar("LAYERVERSION_%s" % dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001150 (op, depver) = opstr.split()
1151 if layerver:
1152 try:
1153 res = bb.utils.vercmp_string_op(layerver, depver, op)
1154 except bb.utils.VersionStringException as vse:
1155 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1156 if not res:
1157 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1158 errors = True
1159 else:
1160 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1161 errors = True
1162 else:
1163 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1164 errors = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001165 collection_depends[c] = list(depDict.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001166 else:
1167 collection_depends[c] = []
1168
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001169 # Check recommends and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001170 recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001171 if recs:
1172 try:
1173 recDict = bb.utils.explode_dep_versions2(recs)
1174 except bb.utils.VersionStringException as vse:
1175 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1176 for rec, oplist in list(recDict.items()):
1177 if rec in collection_list:
1178 if oplist:
1179 opstr = oplist[0]
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001180 layerver = self.data.getVar("LAYERVERSION_%s" % rec)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001181 if layerver:
1182 (op, recver) = opstr.split()
1183 try:
1184 res = bb.utils.vercmp_string_op(layerver, recver, op)
1185 except bb.utils.VersionStringException as vse:
1186 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1187 if not res:
1188 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1189 continue
1190 else:
1191 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1192 continue
1193 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1194 collection_depends[c].append(rec)
1195 else:
1196 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1197
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001198 # Recursively work out collection priorities based on dependencies
1199 def calc_layer_priority(collection):
1200 if not collection_priorities[collection]:
1201 max_depprio = min_prio
1202 for dep in collection_depends[collection]:
1203 calc_layer_priority(dep)
1204 depprio = collection_priorities[dep]
1205 if depprio > max_depprio:
1206 max_depprio = depprio
1207 max_depprio += 1
1208 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1209 collection_priorities[collection] = max_depprio
1210
1211 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1212 for c in collection_list:
1213 calc_layer_priority(c)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001214 regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001215 if regex == None:
1216 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1217 errors = True
1218 continue
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001219 elif regex == "":
1220 parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
Brad Bishop19323692019-04-05 15:28:33 -04001221 cre = re.compile('^NULL$')
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001222 errors = False
1223 else:
1224 try:
1225 cre = re.compile(regex)
1226 except re.error:
1227 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1228 errors = True
1229 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001230 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001231 if errors:
1232 # We've already printed the actual error(s)
1233 raise CollectionError("Errors during parsing layer configuration")
1234
1235 def buildSetVars(self):
1236 """
1237 Setup any variables needed before starting a build
1238 """
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001239 t = time.gmtime()
1240 for mc in self.databuilder.mcdata:
1241 ds = self.databuilder.mcdata[mc]
1242 if not ds.getVar("BUILDNAME", False):
1243 ds.setVar("BUILDNAME", "${DATE}${TIME}")
1244 ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1245 ds.setVar("DATE", time.strftime('%Y%m%d', t))
1246 ds.setVar("TIME", time.strftime('%H%M%S', t))
1247
1248 def reset_mtime_caches(self):
1249 """
1250 Reset mtime caches - this is particularly important when memory resident as something
1251 which is cached is not unlikely to have changed since the last invocation (e.g. a
1252 file associated with a recipe might have been modified by the user).
1253 """
1254 build.reset_cache()
1255 bb.fetch._checksum_cache.mtime_cache.clear()
1256 siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1257 if siggen_cache:
1258 bb.parse.siggen.checksum_cache.mtime_cache.clear()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001259
1260 def matchFiles(self, bf):
1261 """
1262 Find the .bb files which match the expression in 'buildfile'.
1263 """
1264 if bf.startswith("/") or bf.startswith("../"):
1265 bf = os.path.abspath(bf)
1266
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001267 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001268 filelist, masked, searchdirs = self.collection.collect_bbfiles(self.data, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001269 try:
1270 os.stat(bf)
1271 bf = os.path.abspath(bf)
1272 return [bf]
1273 except OSError:
1274 regexp = re.compile(bf)
1275 matches = []
1276 for f in filelist:
1277 if regexp.search(f) and os.path.isfile(f):
1278 matches.append(f)
1279 return matches
1280
1281 def matchFile(self, buildfile):
1282 """
1283 Find the .bb file which matches the expression in 'buildfile'.
1284 Raise an error if multiple files
1285 """
1286 matches = self.matchFiles(buildfile)
1287 if len(matches) != 1:
1288 if matches:
1289 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1290 if matches:
1291 for f in matches:
1292 msg += "\n %s" % f
1293 parselog.error(msg)
1294 else:
1295 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1296 raise NoSpecificMatch
1297 return matches[0]
1298
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001299 def buildFile(self, buildfile, task):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001300 """
1301 Build the file matching regexp buildfile
1302 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001303 bb.event.fire(bb.event.BuildInit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001304
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001305 # Too many people use -b because they think it's how you normally
1306 # specify a target to be built, so show a warning
1307 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1308
1309 self.buildFileInternal(buildfile, task)
1310
1311 def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1312 """
1313 Build the file matching regexp buildfile
1314 """
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001315
1316 # Parse the configuration here. We need to do it explicitly here since
1317 # buildFile() doesn't use the cache
1318 self.parseConfiguration()
1319
1320 # If we are told to do the None task then query the default task
1321 if (task == None):
1322 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001323 if not task.startswith("do_"):
1324 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001325
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001326 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001327 fn = self.matchFile(fn)
1328
1329 self.buildSetVars()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001330 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001331
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001332 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
1333
1334 infos = bb_cache.parse(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001335 infos = dict(infos)
1336
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001337 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001338 try:
1339 info_array = infos[fn]
1340 except KeyError:
1341 bb.fatal("%s does not exist" % fn)
1342
1343 if info_array[0].skipped:
1344 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1345
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001346 self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001347
1348 # Tweak some variables
1349 item = info_array[0].pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001350 self.recipecaches[mc].ignored_dependencies = set()
1351 self.recipecaches[mc].bbfile_priority[fn] = 1
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001352 self.configuration.limited_deps = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001353
1354 # Remove external dependencies
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001355 self.recipecaches[mc].task_deps[fn]['depends'] = {}
1356 self.recipecaches[mc].deps[fn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001357 self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1358 self.recipecaches[mc].runrecs[fn] = defaultdict(list)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001359
1360 # Invalidate task for target if force mode active
1361 if self.configuration.force:
1362 logger.verbose("Invalidate task %s, %s", task, fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001363 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001364
1365 # Setup taskdata structure
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001366 taskdata = {}
1367 taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001368 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001369
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001370 if quietlog:
1371 rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1372 bb.runqueue.logger.setLevel(logging.WARNING)
1373
1374 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1375 if fireevents:
1376 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001377
1378 # Execute the runqueue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001379 runlist = [[mc, item, task, fn]]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001380
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001381 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001382
1383 def buildFileIdle(server, rq, abort):
1384
1385 msg = None
1386 interrupted = 0
1387 if abort or self.state == state.forceshutdown:
1388 rq.finish_runqueue(True)
1389 msg = "Forced shutdown"
1390 interrupted = 2
1391 elif self.state == state.shutdown:
1392 rq.finish_runqueue(False)
1393 msg = "Stopped build"
1394 interrupted = 1
1395 failures = 0
1396 try:
1397 retval = rq.execute_runqueue()
1398 except runqueue.TaskFailure as exc:
1399 failures += len(exc.args)
1400 retval = False
1401 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001402 self.command.finishAsyncCommand(str(exc))
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001403 if quietlog:
1404 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001405 return False
1406
1407 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001408 if fireevents:
1409 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001410 self.command.finishAsyncCommand(msg)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001411 # We trashed self.recipecaches above
1412 self.parsecache_valid = False
1413 self.configuration.limited_deps = False
1414 bb.parse.siggen.reset(self.data)
1415 if quietlog:
1416 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001417 return False
1418 if retval is True:
1419 return True
1420 return retval
1421
1422 self.configuration.server_register_idlecallback(buildFileIdle, rq)
1423
1424 def buildTargets(self, targets, task):
1425 """
1426 Attempt to build the targets specified
1427 """
1428
1429 def buildTargetsIdle(server, rq, abort):
1430 msg = None
1431 interrupted = 0
1432 if abort or self.state == state.forceshutdown:
1433 rq.finish_runqueue(True)
1434 msg = "Forced shutdown"
1435 interrupted = 2
1436 elif self.state == state.shutdown:
1437 rq.finish_runqueue(False)
1438 msg = "Stopped build"
1439 interrupted = 1
1440 failures = 0
1441 try:
1442 retval = rq.execute_runqueue()
1443 except runqueue.TaskFailure as exc:
1444 failures += len(exc.args)
1445 retval = False
1446 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001447 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001448 return False
1449
1450 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001451 try:
1452 for mc in self.multiconfigs:
1453 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1454 finally:
1455 self.command.finishAsyncCommand(msg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001456 return False
1457 if retval is True:
1458 return True
1459 return retval
1460
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001461 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001462 self.buildSetVars()
1463
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001464 # If we are told to do the None task then query the default task
1465 if (task == None):
1466 task = self.configuration.cmd
1467
1468 if not task.startswith("do_"):
1469 task = "do_%s" % task
1470
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001471 packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1472
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001473 bb.event.fire(bb.event.BuildInit(packages), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001474
1475 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001476
1477 buildname = self.data.getVar("BUILDNAME", False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001478
1479 # make targets to always look as <target>:do_<task>
1480 ntargets = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001481 for target in runlist:
1482 if target[0]:
Brad Bishop15ae2502019-06-18 21:44:24 -04001483 ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2]))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001484 ntargets.append("%s:%s" % (target[1], target[2]))
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001485
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001486 for mc in self.multiconfigs:
1487 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001488
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001489 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001490 if 'universe' in targets:
1491 rq.rqdata.warn_multi_bb = True
1492
1493 self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
1494
1495
1496 def getAllKeysWithFlags(self, flaglist):
1497 dump = {}
1498 for k in self.data.keys():
1499 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001500 expand = True
1501 flags = self.data.getVarFlags(k)
1502 if flags and "func" in flags and "python" in flags:
1503 expand = False
1504 v = self.data.getVar(k, expand)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001505 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1506 dump[k] = {
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001507 'v' : str(v) ,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001508 'history' : self.data.varhistory.variable(k),
1509 }
1510 for d in flaglist:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001511 if flags and d in flags:
1512 dump[k][d] = flags[d]
1513 else:
1514 dump[k][d] = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001515 except Exception as e:
1516 print(e)
1517 return dump
1518
1519
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001520 def updateCacheSync(self):
1521 if self.state == state.running:
1522 return
1523
1524 # reload files for which we got notifications
1525 for p in self.inotify_modified_files:
1526 bb.parse.update_cache(p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001527 if p in bb.parse.BBHandler.cached_statements:
1528 del bb.parse.BBHandler.cached_statements[p]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001529 self.inotify_modified_files = []
1530
1531 if not self.baseconfig_valid:
1532 logger.debug(1, "Reloading base configuration data")
1533 self.initConfigurationData()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001534 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001535
1536 # This is called for all async commands when self.state != running
1537 def updateCache(self):
1538 if self.state == state.running:
1539 return
1540
1541 if self.state in (state.shutdown, state.forceshutdown, state.error):
1542 if hasattr(self.parser, 'shutdown'):
1543 self.parser.shutdown(clean=False, force = True)
1544 raise bb.BBHandledException()
1545
1546 if self.state != state.parsing:
1547 self.updateCacheSync()
1548
1549 if self.state != state.parsing and not self.parsecache_valid:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001550 bb.parse.siggen.reset(self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001551 self.parseConfiguration ()
1552 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001553 for mc in self.multiconfigs:
1554 bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001555
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001556 for mc in self.multiconfigs:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001557 ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001558 self.recipecaches[mc].ignored_dependencies = set(ignore.split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001559
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001560 for dep in self.configuration.extra_assume_provided:
1561 self.recipecaches[mc].ignored_dependencies.add(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001562
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001563 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001564 (filelist, masked, searchdirs) = self.collection.collect_bbfiles(self.data, self.data)
1565
1566 # Add inotify watches for directories searched for bb/bbappend files
1567 for dirent in searchdirs:
1568 self.add_filewatch([[dirent]], dirs=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001569
1570 self.parser = CookerParser(self, filelist, masked)
1571 self.parsecache_valid = True
1572
1573 self.state = state.parsing
1574
1575 if not self.parser.parse_next():
1576 collectlog.debug(1, "parsing complete")
1577 if self.parser.error:
1578 raise bb.BBHandledException()
1579 self.show_appends_with_no_recipes()
1580 self.handlePrefProviders()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001581 for mc in self.multiconfigs:
1582 self.recipecaches[mc].bbfile_priority = self.collection.collection_priorities(self.recipecaches[mc].pkg_fn, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001583 self.state = state.running
1584
1585 # Send an event listing all stamps reachable after parsing
1586 # which the metadata may use to clean up stale data
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001587 for mc in self.multiconfigs:
1588 event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1589 bb.event.fire(event, self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001590 return None
1591
1592 return True
1593
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001594 def checkPackages(self, pkgs_to_build, task=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001595
1596 # Return a copy, don't modify the original
1597 pkgs_to_build = pkgs_to_build[:]
1598
1599 if len(pkgs_to_build) == 0:
1600 raise NothingToBuild
1601
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001602 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001603 for pkg in pkgs_to_build:
1604 if pkg in ignore:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001605 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
Brad Bishop15ae2502019-06-18 21:44:24 -04001606 if pkg.startswith("multiconfig:"):
1607 pkgs_to_build.remove(pkg)
1608 pkgs_to_build.append(pkg.replace("multiconfig:", "mc:"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001609
1610 if 'world' in pkgs_to_build:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001611 pkgs_to_build.remove('world')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001612 for mc in self.multiconfigs:
1613 bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1614 for t in self.recipecaches[mc].world_target:
1615 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001616 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001617 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001618
1619 if 'universe' in pkgs_to_build:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001620 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001621 parselog.debug(1, "collating packages for \"universe\"")
1622 pkgs_to_build.remove('universe')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001623 for mc in self.multiconfigs:
1624 for t in self.recipecaches[mc].universe_target:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001625 if task:
1626 foundtask = False
1627 for provider_fn in self.recipecaches[mc].providers[t]:
1628 if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1629 foundtask = True
1630 break
1631 if not foundtask:
1632 bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1633 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001634 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001635 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001636 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001637
1638 return pkgs_to_build
1639
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001640 def pre_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001641 # We now are in our own process so we can call this here.
1642 # PRServ exits if its parent process exits
1643 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001644 return
1645
1646 def post_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001647 prserv.serv.auto_shutdown()
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001648 bb.event.fire(CookerExit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001649
1650
1651 def shutdown(self, force = False):
1652 if force:
1653 self.state = state.forceshutdown
1654 else:
1655 self.state = state.shutdown
1656
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001657 if self.parser:
1658 self.parser.shutdown(clean=not force, force=force)
1659
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001660 def finishcommand(self):
1661 self.state = state.initial
1662
1663 def reset(self):
1664 self.initConfigurationData()
1665
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001666 def clientComplete(self):
1667 """Called when the client is done using the server"""
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001668 self.finishcommand()
1669 self.extraconfigdata = {}
1670 self.command.reset()
1671 self.databuilder.reset()
1672 self.data = self.databuilder.data
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001673
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001674
1675class CookerExit(bb.event.Event):
1676 """
1677 Notify clients of the Cooker shutdown
1678 """
1679
1680 def __init__(self):
1681 bb.event.Event.__init__(self)
1682
1683
1684class CookerCollectFiles(object):
1685 def __init__(self, priorities):
1686 self.bbappends = []
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001687 # Priorities is a list of tupples, with the second element as the pattern.
1688 # We need to sort the list with the longest pattern first, and so on to
1689 # the shortest. This allows nested layers to be properly evaluated.
1690 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001691
1692 def calc_bbfile_priority( self, filename, matched = None ):
1693 for _, _, regex, pri in self.bbfile_config_priorities:
1694 if regex.match(filename):
1695 if matched != None:
1696 if not regex in matched:
1697 matched.add(regex)
1698 return pri
1699 return 0
1700
1701 def get_bbfiles(self):
1702 """Get list of default .bb files by reading out the current directory"""
1703 path = os.getcwd()
1704 contents = os.listdir(path)
1705 bbfiles = []
1706 for f in contents:
1707 if f.endswith(".bb"):
1708 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1709 return bbfiles
1710
1711 def find_bbfiles(self, path):
1712 """Find all the .bb and .bbappend files in a directory"""
1713 found = []
1714 for dir, dirs, files in os.walk(path):
1715 for ignored in ('SCCS', 'CVS', '.svn'):
1716 if ignored in dirs:
1717 dirs.remove(ignored)
1718 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1719
1720 return found
1721
1722 def collect_bbfiles(self, config, eventdata):
1723 """Collect all available .bb build files"""
1724 masked = 0
1725
1726 collectlog.debug(1, "collecting .bb files")
1727
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001728 files = (config.getVar( "BBFILES") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001729 config.setVar("BBFILES", " ".join(files))
1730
1731 # Sort files by priority
1732 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
1733
1734 if not len(files):
1735 files = self.get_bbfiles()
1736
1737 if not len(files):
1738 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1739 bb.event.fire(CookerExit(), eventdata)
1740
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001741 # We need to track where we look so that we can add inotify watches. There
1742 # is no nice way to do this, this is horrid. We intercept the os.listdir()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001743 # (or os.scandir() for python 3.6+) calls while we run glob().
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001744 origlistdir = os.listdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001745 if hasattr(os, 'scandir'):
1746 origscandir = os.scandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001747 searchdirs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001748
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001749 def ourlistdir(d):
1750 searchdirs.append(d)
1751 return origlistdir(d)
1752
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001753 def ourscandir(d):
1754 searchdirs.append(d)
1755 return origscandir(d)
1756
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001757 os.listdir = ourlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001758 if hasattr(os, 'scandir'):
1759 os.scandir = ourscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001760 try:
1761 # Can't use set here as order is important
1762 newfiles = []
1763 for f in files:
1764 if os.path.isdir(f):
1765 dirfiles = self.find_bbfiles(f)
1766 for g in dirfiles:
1767 if g not in newfiles:
1768 newfiles.append(g)
1769 else:
1770 globbed = glob.glob(f)
1771 if not globbed and os.path.exists(f):
1772 globbed = [f]
1773 # glob gives files in order on disk. Sort to be deterministic.
1774 for g in sorted(globbed):
1775 if g not in newfiles:
1776 newfiles.append(g)
1777 finally:
1778 os.listdir = origlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001779 if hasattr(os, 'scandir'):
1780 os.scandir = origscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001781
1782 bbmask = config.getVar('BBMASK')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001783
1784 if bbmask:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001785 # First validate the individual regular expressions and ignore any
1786 # that do not compile
1787 bbmasks = []
1788 for mask in bbmask.split():
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001789 # When constructing an older style single regex, it's possible for BBMASK
1790 # to end up beginning with '|', which matches and masks _everything_.
1791 if mask.startswith("|"):
1792 collectlog.warn("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
1793 mask = mask[1:]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001794 try:
1795 re.compile(mask)
1796 bbmasks.append(mask)
1797 except sre_constants.error:
1798 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1799
1800 # Then validate the combined regular expressions. This should never
1801 # fail, but better safe than sorry...
1802 bbmask = "|".join(bbmasks)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001803 try:
1804 bbmask_compiled = re.compile(bbmask)
1805 except sre_constants.error:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001806 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1807 bbmask = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001808
1809 bbfiles = []
1810 bbappend = []
1811 for f in newfiles:
1812 if bbmask and bbmask_compiled.search(f):
1813 collectlog.debug(1, "skipping masked file %s", f)
1814 masked += 1
1815 continue
1816 if f.endswith('.bb'):
1817 bbfiles.append(f)
1818 elif f.endswith('.bbappend'):
1819 bbappend.append(f)
1820 else:
1821 collectlog.debug(1, "skipping %s: unknown file extension", f)
1822
1823 # Build a list of .bbappend files for each .bb file
1824 for f in bbappend:
1825 base = os.path.basename(f).replace('.bbappend', '.bb')
1826 self.bbappends.append((base, f))
1827
1828 # Find overlayed recipes
1829 # bbfiles will be in priority order which makes this easy
1830 bbfile_seen = dict()
1831 self.overlayed = defaultdict(list)
1832 for f in reversed(bbfiles):
1833 base = os.path.basename(f)
1834 if base not in bbfile_seen:
1835 bbfile_seen[base] = f
1836 else:
1837 topfile = bbfile_seen[base]
1838 self.overlayed[topfile].append(f)
1839
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001840 return (bbfiles, masked, searchdirs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001841
1842 def get_file_appends(self, fn):
1843 """
1844 Returns a list of .bbappend files to apply to fn
1845 """
1846 filelist = []
1847 f = os.path.basename(fn)
1848 for b in self.bbappends:
1849 (bbappend, filename) = b
1850 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1851 filelist.append(filename)
1852 return filelist
1853
1854 def collection_priorities(self, pkgfns, d):
1855
1856 priorities = {}
1857
1858 # Calculate priorities for each file
1859 matched = set()
1860 for p in pkgfns:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001861 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001862 priorities[p] = self.calc_bbfile_priority(realfn, matched)
1863
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001864 unmatched = set()
1865 for _, _, regex, pri in self.bbfile_config_priorities:
1866 if not regex in matched:
1867 unmatched.add(regex)
1868
Brad Bishop316dfdd2018-06-25 12:45:53 -04001869 # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
1870 def find_bbappend_match(regex):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001871 for b in self.bbappends:
1872 (bbfile, append) = b
1873 if regex.match(append):
Brad Bishop316dfdd2018-06-25 12:45:53 -04001874 # If the bbappend is matched by already "matched set", return False
1875 for matched_regex in matched:
1876 if matched_regex.match(append):
1877 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001878 return True
1879 return False
1880
1881 for unmatch in unmatched.copy():
Brad Bishop316dfdd2018-06-25 12:45:53 -04001882 if find_bbappend_match(unmatch):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001883 unmatched.remove(unmatch)
1884
1885 for collection, pattern, regex, _ in self.bbfile_config_priorities:
1886 if regex in unmatched:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001887 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001888 collectlog.warning("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001889
1890 return priorities
1891
1892class ParsingFailure(Exception):
1893 def __init__(self, realexception, recipe):
1894 self.realexception = realexception
1895 self.recipe = recipe
1896 Exception.__init__(self, realexception, recipe)
1897
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001898class Parser(multiprocessing.Process):
1899 def __init__(self, jobs, results, quit, init, profile):
1900 self.jobs = jobs
1901 self.results = results
1902 self.quit = quit
1903 self.init = init
1904 multiprocessing.Process.__init__(self)
1905 self.context = bb.utils.get_context().copy()
1906 self.handlers = bb.event.get_class_handlers().copy()
1907 self.profile = profile
1908
1909 def run(self):
1910
1911 if not self.profile:
1912 self.realrun()
1913 return
1914
1915 try:
1916 import cProfile as profile
1917 except:
1918 import profile
1919 prof = profile.Profile()
1920 try:
1921 profile.Profile.runcall(prof, self.realrun)
1922 finally:
1923 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1924 prof.dump_stats(logfile)
1925
1926 def realrun(self):
1927 if self.init:
1928 self.init()
1929
1930 pending = []
1931 while True:
1932 try:
1933 self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001934 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001935 pass
1936 else:
1937 self.results.cancel_join_thread()
1938 break
1939
1940 if pending:
1941 result = pending.pop()
1942 else:
1943 try:
Brad Bishop19323692019-04-05 15:28:33 -04001944 job = self.jobs.pop()
1945 except IndexError:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001946 break
1947 result = self.parse(*job)
1948
1949 try:
1950 self.results.put(result, timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001951 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001952 pending.append(result)
1953
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001954 def parse(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001955 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001956 # Record the filename we're parsing into any events generated
1957 def parse_filter(self, record):
1958 record.taskpid = bb.event.worker_pid
1959 record.fn = filename
1960 return True
1961
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001962 # Reset our environment and handlers to the original settings
1963 bb.utils.set_context(self.context.copy())
1964 bb.event.set_class_handlers(self.handlers.copy())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001965 bb.event.LogHandler.filter = parse_filter
1966
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001967 return True, self.bb_cache.parse(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001968 except Exception as exc:
1969 tb = sys.exc_info()[2]
1970 exc.recipe = filename
1971 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
1972 return True, exc
1973 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
1974 # and for example a worker thread doesn't just exit on its own in response to
1975 # a SystemExit event for example.
1976 except BaseException as exc:
1977 return True, ParsingFailure(exc, filename)
1978
1979class CookerParser(object):
1980 def __init__(self, cooker, filelist, masked):
1981 self.filelist = filelist
1982 self.cooker = cooker
1983 self.cfgdata = cooker.data
1984 self.cfghash = cooker.data_hash
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001985 self.cfgbuilder = cooker.databuilder
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001986
1987 # Accounting statistics
1988 self.parsed = 0
1989 self.cached = 0
1990 self.error = 0
1991 self.masked = masked
1992
1993 self.skipped = 0
1994 self.virtuals = 0
1995 self.total = len(filelist)
1996
1997 self.current = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001998 self.process_names = []
1999
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002000 self.bb_cache = bb.cache.Cache(self.cfgbuilder, self.cfghash, cooker.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002001 self.fromcache = []
2002 self.willparse = []
2003 for filename in self.filelist:
2004 appends = self.cooker.collection.get_file_appends(filename)
2005 if not self.bb_cache.cacheValid(filename, appends):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002006 self.willparse.append((filename, appends))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002007 else:
2008 self.fromcache.append((filename, appends))
2009 self.toparse = self.total - len(self.fromcache)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002010 self.progress_chunk = int(max(self.toparse / 100, 1))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002011
Brad Bishop6e60e8b2018-02-01 10:27:11 -05002012 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002013 multiprocessing.cpu_count()), len(self.willparse))
2014
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002015 self.start()
2016 self.haveshutdown = False
2017
2018 def start(self):
2019 self.results = self.load_cached()
2020 self.processes = []
2021 if self.toparse:
2022 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2023 def init():
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002024 Parser.bb_cache = self.bb_cache
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002025 bb.utils.set_process_name(multiprocessing.current_process().name)
2026 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2027 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002028
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002029 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002030 self.result_queue = multiprocessing.Queue()
Brad Bishop19323692019-04-05 15:28:33 -04002031
2032 def chunkify(lst,n):
2033 return [lst[i::n] for i in range(n)]
2034 self.jobs = chunkify(self.willparse, self.num_processes)
2035
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002036 for i in range(0, self.num_processes):
Brad Bishop19323692019-04-05 15:28:33 -04002037 parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002038 parser.start()
2039 self.process_names.append(parser.name)
2040 self.processes.append(parser)
2041
2042 self.results = itertools.chain(self.results, self.parse_generator())
2043
2044 def shutdown(self, clean=True, force=False):
2045 if not self.toparse:
2046 return
2047 if self.haveshutdown:
2048 return
2049 self.haveshutdown = True
2050
2051 if clean:
2052 event = bb.event.ParseCompleted(self.cached, self.parsed,
2053 self.skipped, self.masked,
2054 self.virtuals, self.error,
2055 self.total)
2056
2057 bb.event.fire(event, self.cfgdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002058 for process in self.processes:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002059 self.parser_quit.put(None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002060 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002061 self.parser_quit.cancel_join_thread()
2062 for process in self.processes:
2063 self.parser_quit.put(None)
2064
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002065 for process in self.processes:
2066 if force:
2067 process.join(.1)
2068 process.terminate()
2069 else:
2070 process.join()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002071
2072 sync = threading.Thread(target=self.bb_cache.sync)
2073 sync.start()
2074 multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002075 bb.codeparser.parser_cache_savemerge()
2076 bb.fetch.fetcher_parse_done()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002077 if self.cooker.configuration.profile:
2078 profiles = []
2079 for i in self.process_names:
2080 logfile = "profile-parse-%s.log" % i
2081 if os.path.exists(logfile):
2082 profiles.append(logfile)
2083
2084 pout = "profile-parse.log.processed"
2085 bb.utils.process_profilelog(profiles, pout = pout)
2086 print("Processed parsing statistics saved to %s" % (pout))
2087
2088 def load_cached(self):
2089 for filename, appends in self.fromcache:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002090 cached, infos = self.bb_cache.load(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002091 yield not cached, infos
2092
2093 def parse_generator(self):
2094 while True:
2095 if self.parsed >= self.toparse:
2096 break
2097
2098 try:
2099 result = self.result_queue.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002100 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002101 pass
2102 else:
2103 value = result[1]
2104 if isinstance(value, BaseException):
2105 raise value
2106 else:
2107 yield result
2108
2109 def parse_next(self):
2110 result = []
2111 parsed = None
2112 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002113 parsed, result = next(self.results)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002114 except StopIteration:
2115 self.shutdown()
2116 return False
2117 except bb.BBHandledException as exc:
2118 self.error += 1
2119 logger.error('Failed to parse recipe: %s' % exc.recipe)
2120 self.shutdown(clean=False)
2121 return False
2122 except ParsingFailure as exc:
2123 self.error += 1
2124 logger.error('Unable to parse %s: %s' %
2125 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2126 self.shutdown(clean=False)
2127 return False
2128 except bb.parse.ParseError as exc:
2129 self.error += 1
2130 logger.error(str(exc))
2131 self.shutdown(clean=False)
2132 return False
2133 except bb.data_smart.ExpansionError as exc:
2134 self.error += 1
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002135 bbdir = os.path.dirname(__file__) + os.sep
2136 etype, value, _ = sys.exc_info()
2137 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2138 logger.error('ExpansionError during parsing %s', value.recipe,
2139 exc_info=(etype, value, tb))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002140 self.shutdown(clean=False)
2141 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002142 except Exception as exc:
2143 self.error += 1
2144 etype, value, tb = sys.exc_info()
2145 if hasattr(value, "recipe"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002146 logger.error('Unable to parse %s' % value.recipe,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002147 exc_info=(etype, value, exc.traceback))
2148 else:
2149 # Most likely, an exception occurred during raising an exception
2150 import traceback
2151 logger.error('Exception during parse: %s' % traceback.format_exc())
2152 self.shutdown(clean=False)
2153 return False
2154
2155 self.current += 1
2156 self.virtuals += len(result)
2157 if parsed:
2158 self.parsed += 1
2159 if self.parsed % self.progress_chunk == 0:
2160 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2161 self.cfgdata)
2162 else:
2163 self.cached += 1
2164
2165 for virtualfn, info_array in result:
2166 if info_array[0].skipped:
2167 self.skipped += 1
2168 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002169 (fn, cls, mc) = bb.cache.virtualfn2realfn(virtualfn)
2170 self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002171 parsed=parsed, watcher = self.cooker.add_filewatch)
2172 return True
2173
2174 def reparse(self, filename):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002175 infos = self.bb_cache.parse(filename, self.cooker.collection.get_file_appends(filename))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002176 for vfn, info_array in infos:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002177 (fn, cls, mc) = bb.cache.virtualfn2realfn(vfn)
2178 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)