blob: 0c540028aeddaabd876250c7dd3924d486aa0e94 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# Copyright (C) 2003, 2004 Chris Larson
3# Copyright (C) 2003, 2004 Phil Blundell
4# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
5# Copyright (C) 2005 Holger Hans Peter Freyther
6# Copyright (C) 2005 ROAD GmbH
7# Copyright (C) 2006 - 2007 Richard Purdie
8#
Brad Bishopc342db32019-05-15 21:57:59 -04009# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050010#
Patrick Williamsc0f7c042017-02-23 20:41:17 -060011
Patrick Williamsc124f4f2015-09-15 14:41:29 -050012import sys, os, glob, os.path, re, time
13import atexit
14import itertools
15import logging
16import multiprocessing
17import sre_constants
18import threading
Patrick Williamsc0f7c042017-02-23 20:41:17 -060019from io import StringIO, UnsupportedOperation
Patrick Williamsc124f4f2015-09-15 14:41:29 -050020from contextlib import closing
21from functools import wraps
Patrick Williamsc0f7c042017-02-23 20:41:17 -060022from collections import defaultdict, namedtuple
Patrick Williamsc124f4f2015-09-15 14:41:29 -050023import bb, bb.exceptions, bb.command
24from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
Patrick Williamsc0f7c042017-02-23 20:41:17 -060025import queue
Patrick Williamsc124f4f2015-09-15 14:41:29 -050026import signal
27import subprocess
28import errno
29import prserv.serv
30import pyinotify
Patrick Williamsc0f7c042017-02-23 20:41:17 -060031import json
32import pickle
33import codecs
Brad Bishop08902b02019-08-20 09:16:51 -040034import hashserv
Patrick Williamsc124f4f2015-09-15 14:41:29 -050035
36logger = logging.getLogger("BitBake")
37collectlog = logging.getLogger("BitBake.Collection")
38buildlog = logging.getLogger("BitBake.Build")
39parselog = logging.getLogger("BitBake.Parsing")
40providerlog = logging.getLogger("BitBake.Provider")
41
42class NoSpecificMatch(bb.BBHandledException):
43 """
44 Exception raised when no or multiple file matches are found
45 """
46
47class NothingToBuild(Exception):
48 """
49 Exception raised when there is nothing to build
50 """
51
52class CollectionError(bb.BBHandledException):
53 """
54 Exception raised when layer configuration is incorrect
55 """
56
57class state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060058 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050059
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050060 @classmethod
61 def get_name(cls, code):
62 for name in dir(cls):
63 value = getattr(cls, name)
64 if type(value) == type(cls.initial) and value == code:
65 return name
66 raise ValueError("Invalid status code: %s" % code)
67
Patrick Williamsc124f4f2015-09-15 14:41:29 -050068
69class SkippedPackage:
70 def __init__(self, info = None, reason = None):
71 self.pn = None
72 self.skipreason = None
73 self.provides = None
74 self.rprovides = None
75
76 if info:
77 self.pn = info.pn
78 self.skipreason = info.skipreason
79 self.provides = info.provides
80 self.rprovides = info.rprovides
81 elif reason:
82 self.skipreason = reason
83
84
85class CookerFeatures(object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -060086 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050087
88 def __init__(self):
89 self._features=set()
90
91 def setFeature(self, f):
92 # validate we got a request for a feature we support
93 if f not in CookerFeatures._feature_list:
94 return
95 self._features.add(f)
96
97 def __contains__(self, f):
98 return f in self._features
99
100 def __iter__(self):
101 return self._features.__iter__()
102
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600103 def __next__(self):
104 return next(self._features)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500105
106
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600107class EventWriter:
108 def __init__(self, cooker, eventfile):
109 self.file_inited = None
110 self.cooker = cooker
111 self.eventfile = eventfile
112 self.event_queue = []
113
114 def write_event(self, event):
115 with open(self.eventfile, "a") as f:
116 try:
117 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
118 f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
119 "vars": str_event}))
120 except Exception as err:
121 import traceback
122 print(err, traceback.format_exc())
123
124 def send(self, event):
125 if self.file_inited:
126 # we have the file, just write the event
127 self.write_event(event)
128 else:
129 # init on bb.event.BuildStarted
130 name = "%s.%s" % (event.__module__, event.__class__.__name__)
131 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
132 with open(self.eventfile, "w") as f:
133 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
134
135 self.file_inited = True
136
137 # write pending events
138 for evt in self.event_queue:
139 self.write_event(evt)
140
141 # also write the current event
142 self.write_event(event)
143 else:
144 # queue all events until the file is inited
145 self.event_queue.append(event)
146
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500147#============================================================================#
148# BBCooker
149#============================================================================#
150class BBCooker:
151 """
152 Manages one bitbake build run
153 """
154
155 def __init__(self, configuration, featureSet=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600156 self.recipecaches = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500157 self.skiplist = {}
158 self.featureset = CookerFeatures()
159 if featureSet:
160 for f in featureSet:
161 self.featureset.setFeature(f)
162
163 self.configuration = configuration
164
Brad Bishopf058f492019-01-28 23:50:33 -0500165 bb.debug(1, "BBCooker starting %s" % time.time())
166 sys.stdout.flush()
167
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500168 self.configwatcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500169 bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
170 sys.stdout.flush()
171
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500172 self.configwatcher.bbseen = []
173 self.configwatcher.bbwatchedfiles = []
174 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
Brad Bishopf058f492019-01-28 23:50:33 -0500175 bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
176 sys.stdout.flush()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500177 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
178 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500179 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500180 self.watcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500181 bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
182 sys.stdout.flush()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500183 self.watcher.bbseen = []
184 self.watcher.bbwatchedfiles = []
185 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
186
Brad Bishopf058f492019-01-28 23:50:33 -0500187 bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
188 sys.stdout.flush()
189
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500190 # If being called by something like tinfoil, we need to clean cached data
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500191 # which may now be invalid
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500192 bb.parse.clear_cache()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500193 bb.parse.BBHandler.cached_statements = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500194
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500195 self.ui_cmdline = None
Brad Bishop08902b02019-08-20 09:16:51 -0400196 self.hashserv = None
Brad Bishopa34c0302019-09-23 22:34:48 -0400197 self.hashservaddr = None
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500198
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500199 self.initConfigurationData()
200
Brad Bishopf058f492019-01-28 23:50:33 -0500201 bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
202 sys.stdout.flush()
203
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600204 # we log all events to a file if so directed
205 if self.configuration.writeeventlog:
206 # register the log file writer as UI Handler
207 writer = EventWriter(self, self.configuration.writeeventlog)
208 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
209 bb.event.register_UIHhandler(EventLogWriteHandler(writer))
210
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500211 self.inotify_modified_files = []
212
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500213 def _process_inotify_updates(server, cooker, abort):
214 cooker.process_inotify_updates()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500215 return 1.0
216
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500217 self.configuration.server_register_idlecallback(_process_inotify_updates, self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500218
219 # TOSTOP must not be set or our children will hang when they output
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600220 try:
221 fd = sys.stdout.fileno()
222 if os.isatty(fd):
223 import termios
224 tcattr = termios.tcgetattr(fd)
225 if tcattr[3] & termios.TOSTOP:
226 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
227 tcattr[3] = tcattr[3] & ~termios.TOSTOP
228 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
229 except UnsupportedOperation:
230 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500231
232 self.command = bb.command.Command(self)
233 self.state = state.initial
234
235 self.parser = None
236
237 signal.signal(signal.SIGTERM, self.sigterm_exception)
238 # Let SIGHUP exit as SIGTERM
239 signal.signal(signal.SIGHUP, self.sigterm_exception)
240
Brad Bishopf058f492019-01-28 23:50:33 -0500241 bb.debug(1, "BBCooker startup complete %s" % time.time())
242 sys.stdout.flush()
243
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500244 def process_inotify_updates(self):
245 for n in [self.confignotifier, self.notifier]:
246 if n.check_events(timeout=0):
247 # read notified events and enqeue them
248 n.read_events()
249 n.process_events()
250
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500251 def config_notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500252 if event.maskname == "IN_Q_OVERFLOW":
253 bb.warn("inotify event queue overflowed, invalidating caches.")
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500254 self.parsecache_valid = False
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500255 self.baseconfig_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500256 bb.parse.clear_cache()
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500257 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500258 if not event.pathname in self.configwatcher.bbwatchedfiles:
259 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500260 if not event.pathname in self.inotify_modified_files:
261 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500262 self.baseconfig_valid = False
263
264 def notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500265 if event.maskname == "IN_Q_OVERFLOW":
266 bb.warn("inotify event queue overflowed, invalidating caches.")
267 self.parsecache_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500268 bb.parse.clear_cache()
269 return
270 if event.pathname.endswith("bitbake-cookerdaemon.log") \
271 or event.pathname.endswith("bitbake.lock"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500272 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500273 if not event.pathname in self.inotify_modified_files:
274 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500275 self.parsecache_valid = False
276
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500277 def add_filewatch(self, deps, watcher=None, dirs=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500278 if not watcher:
279 watcher = self.watcher
280 for i in deps:
281 watcher.bbwatchedfiles.append(i[0])
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500282 if dirs:
283 f = i[0]
284 else:
285 f = os.path.dirname(i[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500286 if f in watcher.bbseen:
287 continue
288 watcher.bbseen.append(f)
289 watchtarget = None
290 while True:
291 # We try and add watches for files that don't exist but if they did, would influence
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500292 # the parser. The parent directory of these files may not exist, in which case we need
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500293 # to watch any parent that does exist for changes.
294 try:
295 watcher.add_watch(f, self.watchmask, quiet=False)
296 if watchtarget:
297 watcher.bbwatchedfiles.append(watchtarget)
298 break
299 except pyinotify.WatchManagerError as e:
300 if 'ENOENT' in str(e):
301 watchtarget = f
302 f = os.path.dirname(f)
303 if f in watcher.bbseen:
304 break
305 watcher.bbseen.append(f)
306 continue
307 if 'ENOSPC' in str(e):
308 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
309 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
310 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
311 providerlog.error("Root privilege is required to modify max_user_watches.")
312 raise
313
314 def sigterm_exception(self, signum, stackframe):
315 if signum == signal.SIGTERM:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500316 bb.warn("Cooker received SIGTERM, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500317 elif signum == signal.SIGHUP:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500318 bb.warn("Cooker received SIGHUP, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500319 self.state = state.forceshutdown
320
321 def setFeatures(self, features):
322 # we only accept a new feature set if we're in state initial, so we can reset without problems
323 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
324 raise Exception("Illegal state for feature set change")
325 original_featureset = list(self.featureset)
326 for feature in features:
327 self.featureset.setFeature(feature)
328 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
329 if (original_featureset != list(self.featureset)) and self.state != state.error:
330 self.reset()
331
332 def initConfigurationData(self):
333
334 self.state = state.initial
335 self.caches_array = []
336
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500337 # Need to preserve BB_CONSOLELOG over resets
338 consolelog = None
339 if hasattr(self, "data"):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500340 consolelog = self.data.getVar("BB_CONSOLELOG")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500341
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500342 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
343 self.enableDataTracking()
344
345 all_extra_cache_names = []
346 # We hardcode all known cache types in a single place, here.
347 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
348 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
349
350 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
351
352 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
353 # This is the entry point, no further check needed!
354 for var in caches_name_array:
355 try:
356 module_name, cache_name = var.split(':')
357 module = __import__(module_name, fromlist=(cache_name,))
358 self.caches_array.append(getattr(module, cache_name))
359 except ImportError as exc:
360 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
361 sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
362
363 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
364 self.databuilder.parseBaseConfiguration()
365 self.data = self.databuilder.data
366 self.data_hash = self.databuilder.data_hash
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500367 self.extraconfigdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500368
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500369 if consolelog:
370 self.data.setVar("BB_CONSOLELOG", consolelog)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500371
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500372 self.data.setVar('BB_CMDLINE', self.ui_cmdline)
373
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500374 #
375 # Copy of the data store which has been expanded.
376 # Used for firing events and accessing variables where expansion needs to be accounted for
377 #
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500378 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
379 self.disableDataTracking()
380
Brad Bishop15ae2502019-06-18 21:44:24 -0400381 for mc in self.databuilder.mcdata.values():
382 mc.renameVar("__depends", "__base_depends")
383 self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500384
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500385 self.baseconfig_valid = True
386 self.parsecache_valid = False
387
388 def handlePRServ(self):
389 # Setup a PR Server based on the new configuration
390 try:
391 self.prhost = prserv.serv.auto_start(self.data)
392 except prserv.serv.PRServiceConfigError as e:
393 bb.fatal("Unable to start PR Server, exitting")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500394
Brad Bishopa34c0302019-09-23 22:34:48 -0400395 if self.data.getVar("BB_HASHSERVE") == "auto":
396 # Create a new hash server bound to a unix domain socket
Brad Bishop08902b02019-08-20 09:16:51 -0400397 if not self.hashserv:
398 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
Brad Bishopa34c0302019-09-23 22:34:48 -0400399 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
400 self.hashserv = hashserv.create_server(self.hashservaddr, dbfile, sync=False)
Brad Bishop08902b02019-08-20 09:16:51 -0400401 self.hashserv.process = multiprocessing.Process(target=self.hashserv.serve_forever)
402 self.hashserv.process.daemon = True
403 self.hashserv.process.start()
Brad Bishopa34c0302019-09-23 22:34:48 -0400404 self.data.setVar("BB_HASHSERVE", self.hashservaddr)
405 self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr)
406 self.databuilder.data.setVar("BB_HASHSERVE", self.hashservaddr)
Brad Bishop08902b02019-08-20 09:16:51 -0400407 for mc in self.databuilder.mcdata:
Brad Bishopa34c0302019-09-23 22:34:48 -0400408 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
Brad Bishop08902b02019-08-20 09:16:51 -0400409
410 bb.parse.init_parser(self.data)
411
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500412 def enableDataTracking(self):
413 self.configuration.tracking = True
414 if hasattr(self, "data"):
415 self.data.enableTracking()
416
417 def disableDataTracking(self):
418 self.configuration.tracking = False
419 if hasattr(self, "data"):
420 self.data.disableTracking()
421
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500422 def parseConfiguration(self):
423 # Set log file verbosity
424 verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
425 if verboselogs:
426 bb.msg.loggerVerboseLogs = True
427
428 # Change nice level if we're asked to
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500429 nice = self.data.getVar("BB_NICE_LEVEL")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500430 if nice:
431 curnice = os.nice(0)
432 nice = int(nice) - curnice
433 buildlog.verbose("Renice to %s " % os.nice(nice))
434
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600435 if self.recipecaches:
436 del self.recipecaches
437 self.multiconfigs = self.databuilder.mcdata.keys()
438 self.recipecaches = {}
439 for mc in self.multiconfigs:
440 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500441
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500442 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500443
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500444 self.parsecache_valid = False
445
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500446 def updateConfigOpts(self, options, environment, cmdline):
447 self.ui_cmdline = cmdline
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500448 clean = True
449 for o in options:
450 if o in ['prefile', 'postfile']:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500451 # Only these options may require a reparse
452 try:
453 if getattr(self.configuration, o) == options[o]:
454 # Value is the same, no need to mark dirty
455 continue
456 except AttributeError:
457 pass
458 logger.debug(1, "Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
459 print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500460 clean = False
461 setattr(self.configuration, o, options[o])
462 for k in bb.utils.approved_variables():
463 if k in environment and k not in self.configuration.env:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500464 logger.debug(1, "Updating new environment variable %s to %s" % (k, environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500465 self.configuration.env[k] = environment[k]
466 clean = False
467 if k in self.configuration.env and k not in environment:
468 logger.debug(1, "Updating environment variable %s (deleted)" % (k))
469 del self.configuration.env[k]
470 clean = False
471 if k not in self.configuration.env and k not in environment:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500472 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500473 if environment[k] != self.configuration.env[k]:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500474 logger.debug(1, "Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500475 self.configuration.env[k] = environment[k]
476 clean = False
477 if not clean:
478 logger.debug(1, "Base environment change, triggering reparse")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500479 self.reset()
480
481 def runCommands(self, server, data, abort):
482 """
483 Run any queued asynchronous command
484 This is done by the idle handler so it runs in true context rather than
485 tied to any UI.
486 """
487
488 return self.command.runAsyncCommand()
489
490 def showVersions(self):
491
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500492 (latest_versions, preferred_versions) = self.findProviders()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500493
494 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
495 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
496
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500497 for p in sorted(self.recipecaches[''].pkg_pn):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500498 pref = preferred_versions[p]
499 latest = latest_versions[p]
500
501 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
502 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
503
504 if pref == latest:
505 prefstr = ""
506
507 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
508
509 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
510 """
511 Show the outer or per-recipe environment
512 """
513 fn = None
514 envdata = None
Brad Bishop15ae2502019-06-18 21:44:24 -0400515 mc = ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500516 if not pkgs_to_build:
517 pkgs_to_build = []
518
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500519 orig_tracking = self.configuration.tracking
520 if not orig_tracking:
521 self.enableDataTracking()
522 self.reset()
523
Brad Bishop15ae2502019-06-18 21:44:24 -0400524 def mc_base(p):
525 if p.startswith('mc:'):
526 s = p.split(':')
527 if len(s) == 2:
528 return s[1]
529 return None
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500530
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500531 if buildfile:
532 # Parse the configuration here. We need to do it explicitly here since
533 # this showEnvironment() code path doesn't use the cache
534 self.parseConfiguration()
535
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600536 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500537 fn = self.matchFile(fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600538 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500539 elif len(pkgs_to_build) == 1:
Brad Bishop15ae2502019-06-18 21:44:24 -0400540 mc = mc_base(pkgs_to_build[0])
541 if not mc:
542 ignore = self.data.getVar("ASSUME_PROVIDED") or ""
543 if pkgs_to_build[0] in set(ignore.split()):
544 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500545
Brad Bishop15ae2502019-06-18 21:44:24 -0400546 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500547
Brad Bishop15ae2502019-06-18 21:44:24 -0400548 mc = runlist[0][0]
549 fn = runlist[0][3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500550
551 if fn:
552 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600553 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
554 envdata = bb_cache.loadDataFull(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500555 except Exception as e:
556 parselog.exception("Unable to read %s", fn)
557 raise
Brad Bishop15ae2502019-06-18 21:44:24 -0400558 else:
559 if not mc in self.databuilder.mcdata:
560 bb.fatal('Not multiconfig named "%s" found' % mc)
561 envdata = self.databuilder.mcdata[mc]
562 data.expandKeys(envdata)
563 parse.ast.runAnonFuncs(envdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500564
565 # Display history
566 with closing(StringIO()) as env:
567 self.data.inchistory.emit(env)
568 logger.plain(env.getvalue())
569
570 # emit variables and shell functions
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500571 with closing(StringIO()) as env:
572 data.emit_env(env, envdata, True)
573 logger.plain(env.getvalue())
574
575 # emit the metadata which isnt valid shell
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500576 for e in sorted(envdata.keys()):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600577 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500578 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500579
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500580 if not orig_tracking:
581 self.disableDataTracking()
582 self.reset()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500583
584 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
585 """
586 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
587 """
588 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
589
590 # A task of None means use the default task
591 if task is None:
592 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500593 if not task.startswith("do_"):
594 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500595
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500596 targetlist = self.checkPackages(pkgs_to_build, task)
597 fulltargetlist = []
598 defaulttask_implicit = ''
599 defaulttask_explicit = False
600 wildcard = False
601
602 # Wild card expansion:
Brad Bishop15ae2502019-06-18 21:44:24 -0400603 # Replace string such as "mc:*:bash"
604 # into "mc:A:bash mc:B:bash bash"
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500605 for k in targetlist:
Brad Bishop15ae2502019-06-18 21:44:24 -0400606 if k.startswith("mc:"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500607 if wildcard:
608 bb.fatal('multiconfig conflict')
609 if k.split(":")[1] == "*":
610 wildcard = True
611 for mc in self.multiconfigs:
612 if mc:
613 fulltargetlist.append(k.replace('*', mc))
614 # implicit default task
615 else:
616 defaulttask_implicit = k.split(":")[2]
617 else:
618 fulltargetlist.append(k)
619 else:
620 defaulttask_explicit = True
621 fulltargetlist.append(k)
622
623 if not defaulttask_explicit and defaulttask_implicit != '':
624 fulltargetlist.append(defaulttask_implicit)
625
626 bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600627 taskdata = {}
628 localdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500629
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600630 for mc in self.multiconfigs:
631 taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
632 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600633 bb.data.expandKeys(localdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500634
635 current = 0
636 runlist = []
637 for k in fulltargetlist:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600638 mc = ""
Brad Bishop15ae2502019-06-18 21:44:24 -0400639 if k.startswith("mc:"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600640 mc = k.split(":")[1]
641 k = ":".join(k.split(":")[2:])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500642 ktask = task
643 if ":do_" in k:
644 k2 = k.split(":do_")
645 k = k2[0]
646 ktask = k2[1]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600647 taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500648 current += 1
649 if not ktask.startswith("do_"):
650 ktask = "do_%s" % ktask
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600651 if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
652 # e.g. in ASSUME_PROVIDED
653 continue
654 fn = taskdata[mc].build_targets[k][0]
655 runlist.append([mc, k, ktask, fn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500656 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600657
Brad Bishop15ae2502019-06-18 21:44:24 -0400658 havemc = False
659 for mc in self.multiconfigs:
660 if taskdata[mc].get_mcdepends():
661 havemc = True
Brad Bishopf058f492019-01-28 23:50:33 -0500662
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800663 # No need to do check providers if there are no mcdeps or not an mc build
Brad Bishop15ae2502019-06-18 21:44:24 -0400664 if havemc or len(self.multiconfigs) > 1:
Andrew Geissler99467da2019-02-25 18:54:23 -0600665 seen = set()
666 new = True
667 # Make sure we can provide the multiconfig dependency
668 while new:
669 mcdeps = set()
670 # Add unresolved first, so we can get multiconfig indirect dependencies on time
671 for mc in self.multiconfigs:
672 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
673 mcdeps |= set(taskdata[mc].get_mcdepends())
674 new = False
675 for mc in self.multiconfigs:
676 for k in mcdeps:
677 if k in seen:
678 continue
679 l = k.split(':')
680 depmc = l[2]
681 if depmc not in self.multiconfigs:
682 bb.fatal("Multiconfig dependency %s depends on nonexistent mc configuration %s" % (k,depmc))
683 else:
684 logger.debug(1, "Adding providers for multiconfig dependency %s" % l[3])
685 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
686 seen.add(k)
687 new = True
Brad Bishopf058f492019-01-28 23:50:33 -0500688
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600689 for mc in self.multiconfigs:
690 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
691
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500692 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600693 return taskdata, runlist
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500694
695 def prepareTreeData(self, pkgs_to_build, task):
696 """
697 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
698 """
699
700 # We set abort to False here to prevent unbuildable targets raising
701 # an exception when we're just generating data
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600702 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500703
704 return runlist, taskdata
705
706 ######## WARNING : this function requires cache_extra to be enabled ########
707
708 def generateTaskDepTreeData(self, pkgs_to_build, task):
709 """
710 Create a dependency graph of pkgs_to_build including reverse dependency
711 information.
712 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500713 if not task.startswith("do_"):
714 task = "do_%s" % task
715
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500716 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600717 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500718 rq.rqdata.prepare()
719 return self.buildDependTree(rq, taskdata)
720
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600721 @staticmethod
722 def add_mc_prefix(mc, pn):
723 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -0400724 return "mc:%s:%s" % (mc, pn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600725 return pn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500726
727 def buildDependTree(self, rq, taskdata):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600728 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500729 depend_tree = {}
730 depend_tree["depends"] = {}
731 depend_tree["tdepends"] = {}
732 depend_tree["pn"] = {}
733 depend_tree["rdepends-pn"] = {}
734 depend_tree["packages"] = {}
735 depend_tree["rdepends-pkg"] = {}
736 depend_tree["rrecs-pkg"] = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500737 depend_tree['providermap'] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600738 depend_tree["layer-priorities"] = self.bbfile_config_priorities
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500739
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600740 for mc in taskdata:
741 for name, fn in list(taskdata[mc].get_providermap().items()):
742 pn = self.recipecaches[mc].pkg_fn[fn]
743 pn = self.add_mc_prefix(mc, pn)
744 if name != pn:
745 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
746 depend_tree['providermap'][name] = (pn, version)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500747
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600748 for tid in rq.rqdata.runtaskentries:
749 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
750 pn = self.recipecaches[mc].pkg_fn[taskfn]
751 pn = self.add_mc_prefix(mc, pn)
752 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500753 if pn not in depend_tree["pn"]:
754 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600755 depend_tree["pn"][pn]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500756 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600757 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500758
759 # if we have extra caches, list all attributes they bring in
760 extra_info = []
761 for cache_class in self.caches_array:
762 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
763 cachefields = getattr(cache_class, 'cachefields', [])
764 extra_info = extra_info + cachefields
765
766 # for all attributes stored, add them to the dependency tree
767 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600768 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500769
770
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500771 dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
772 if not dotname in depend_tree["tdepends"]:
773 depend_tree["tdepends"][dotname] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600774 for dep in rq.rqdata.runtaskentries[tid].depends:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800775 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
776 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600777 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
778 if taskfn not in seen_fns:
779 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500780 packages = []
781
782 depend_tree["depends"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600783 for dep in taskdata[mc].depids[taskfn]:
784 depend_tree["depends"][pn].append(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500785
786 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600787 for rdep in taskdata[mc].rdepids[taskfn]:
788 depend_tree["rdepends-pn"][pn].append(rdep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500789
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600790 rdepends = self.recipecaches[mc].rundeps[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500791 for package in rdepends:
792 depend_tree["rdepends-pkg"][package] = []
793 for rdepend in rdepends[package]:
794 depend_tree["rdepends-pkg"][package].append(rdepend)
795 packages.append(package)
796
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600797 rrecs = self.recipecaches[mc].runrecs[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500798 for package in rrecs:
799 depend_tree["rrecs-pkg"][package] = []
800 for rdepend in rrecs[package]:
801 depend_tree["rrecs-pkg"][package].append(rdepend)
802 if not package in packages:
803 packages.append(package)
804
805 for package in packages:
806 if package not in depend_tree["packages"]:
807 depend_tree["packages"][package] = {}
808 depend_tree["packages"][package]["pn"] = pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600809 depend_tree["packages"][package]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500810 depend_tree["packages"][package]["version"] = version
811
812 return depend_tree
813
814 ######## WARNING : this function requires cache_extra to be enabled ########
815 def generatePkgDepTreeData(self, pkgs_to_build, task):
816 """
817 Create a dependency tree of pkgs_to_build, returning the data.
818 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500819 if not task.startswith("do_"):
820 task = "do_%s" % task
821
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500822 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500823
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600824 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500825 depend_tree = {}
826 depend_tree["depends"] = {}
827 depend_tree["pn"] = {}
828 depend_tree["rdepends-pn"] = {}
829 depend_tree["rdepends-pkg"] = {}
830 depend_tree["rrecs-pkg"] = {}
831
832 # if we have extra caches, list all attributes they bring in
833 extra_info = []
834 for cache_class in self.caches_array:
835 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
836 cachefields = getattr(cache_class, 'cachefields', [])
837 extra_info = extra_info + cachefields
838
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600839 tids = []
840 for mc in taskdata:
841 for tid in taskdata[mc].taskentries:
842 tids.append(tid)
843
844 for tid in tids:
845 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
846
847 pn = self.recipecaches[mc].pkg_fn[taskfn]
848 pn = self.add_mc_prefix(mc, pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500849
850 if pn not in depend_tree["pn"]:
851 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600852 depend_tree["pn"][pn]["filename"] = taskfn
853 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500854 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600855 rdepends = self.recipecaches[mc].rundeps[taskfn]
856 rrecs = self.recipecaches[mc].runrecs[taskfn]
857 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500858
859 # for all extra attributes stored, add them to the dependency tree
860 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600861 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500862
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600863 if taskfn not in seen_fns:
864 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500865
866 depend_tree["depends"][pn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500867 for dep in taskdata[mc].depids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500868 pn_provider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600869 if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
870 fn_provider = taskdata[mc].build_targets[dep][0]
871 pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500872 else:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500873 pn_provider = dep
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600874 pn_provider = self.add_mc_prefix(mc, pn_provider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500875 depend_tree["depends"][pn].append(pn_provider)
876
877 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600878 for rdep in taskdata[mc].rdepids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500879 pn_rprovider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600880 if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
881 fn_rprovider = taskdata[mc].run_targets[rdep][0]
882 pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500883 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600884 pn_rprovider = rdep
885 pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500886 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
887
888 depend_tree["rdepends-pkg"].update(rdepends)
889 depend_tree["rrecs-pkg"].update(rrecs)
890
891 return depend_tree
892
893 def generateDepTreeEvent(self, pkgs_to_build, task):
894 """
895 Create a task dependency graph of pkgs_to_build.
896 Generate an event with the result
897 """
898 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
899 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
900
901 def generateDotGraphFiles(self, pkgs_to_build, task):
902 """
903 Create a task dependency graph of pkgs_to_build.
904 Save the result to a set of .dot files.
905 """
906
907 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
908
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500909 with open('pn-buildlist', 'w') as f:
910 for pn in depgraph["pn"]:
911 f.write(pn + "\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500912 logger.info("PN build list saved to 'pn-buildlist'")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500913
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500914 # Remove old format output files to ensure no confusion with stale data
915 try:
916 os.unlink('pn-depends.dot')
917 except FileNotFoundError:
918 pass
919 try:
920 os.unlink('package-depends.dot')
921 except FileNotFoundError:
922 pass
Brad Bishop79641f22019-09-10 07:20:22 -0400923 try:
924 os.unlink('recipe-depends.dot')
925 except FileNotFoundError:
926 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500927
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500928 with open('task-depends.dot', 'w') as f:
929 f.write("digraph depends {\n")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400930 for task in sorted(depgraph["tdepends"]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500931 (pn, taskname) = task.rsplit(".", 1)
932 fn = depgraph["pn"][pn]["filename"]
933 version = depgraph["pn"][pn]["version"]
934 f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400935 for dep in sorted(depgraph["tdepends"][task]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500936 f.write('"%s" -> "%s"\n' % (task, dep))
937 f.write("}\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500938 logger.info("Task dependencies saved to 'task-depends.dot'")
939
940 def show_appends_with_no_recipes(self):
941 # Determine which bbappends haven't been applied
942
943 # First get list of recipes, including skipped
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600944 recipefns = list(self.recipecaches[''].pkg_fn.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500945 recipefns.extend(self.skiplist.keys())
946
947 # Work out list of bbappends that have been applied
948 applied_appends = []
949 for fn in recipefns:
950 applied_appends.extend(self.collection.get_file_appends(fn))
951
952 appends_without_recipes = []
953 for _, appendfn in self.collection.bbappends:
954 if not appendfn in applied_appends:
955 appends_without_recipes.append(appendfn)
956
957 if appends_without_recipes:
958 msg = 'No recipes available for:\n %s' % '\n '.join(appends_without_recipes)
959 warn_only = self.data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
960 False) or "no"
961 if warn_only.lower() in ("1", "yes", "true"):
962 bb.warn(msg)
963 else:
964 bb.fatal(msg)
965
966 def handlePrefProviders(self):
967
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600968 for mc in self.multiconfigs:
969 localdata = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600970 bb.data.expandKeys(localdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500971
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600972 # Handle PREFERRED_PROVIDERS
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500973 for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600974 try:
975 (providee, provider) = p.split(':')
976 except:
977 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
978 continue
979 if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
980 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
981 self.recipecaches[mc].preferred[providee] = provider
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500982
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500983 def findConfigFilePath(self, configfile):
984 """
985 Find the location on disk of configfile and if it exists and was parsed by BitBake
986 emit the ConfigFilePathFound event with the path to the file.
987 """
988 path = bb.cookerdata.findConfigFile(configfile, self.data)
989 if not path:
990 return
991
992 # Generate a list of parsed configuration files by searching the files
993 # listed in the __depends and __base_depends variables with a .conf suffix.
994 conffiles = []
995 dep_files = self.data.getVar('__base_depends', False) or []
996 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
997
998 for f in dep_files:
999 if f[0].endswith(".conf"):
1000 conffiles.append(f[0])
1001
1002 _, conf, conffile = path.rpartition("conf/")
1003 match = os.path.join(conf, conffile)
1004 # Try and find matches for conf/conffilename.conf as we don't always
1005 # have the full path to the file.
1006 for cfg in conffiles:
1007 if cfg.endswith(match):
1008 bb.event.fire(bb.event.ConfigFilePathFound(path),
1009 self.data)
1010 break
1011
1012 def findFilesMatchingInDir(self, filepattern, directory):
1013 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001014 Searches for files containing the substring 'filepattern' which are children of
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001015 'directory' in each BBPATH. i.e. to find all rootfs package classes available
1016 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1017 or to find all machine configuration files one could call:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001018 findFilesMatchingInDir(self, '.conf', 'conf/machine')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001019 """
1020
1021 matches = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001022 bbpaths = self.data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001023 for path in bbpaths:
1024 dirpath = os.path.join(path, directory)
1025 if os.path.exists(dirpath):
1026 for root, dirs, files in os.walk(dirpath):
1027 for f in files:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001028 if filepattern in f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001029 matches.append(f)
1030
1031 if matches:
1032 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1033
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001034 def findProviders(self, mc=''):
1035 return bb.providers.findProviders(self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1036
1037 def findBestProvider(self, pn, mc=''):
1038 if pn in self.recipecaches[mc].providers:
1039 filenames = self.recipecaches[mc].providers[pn]
1040 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.data, self.recipecaches[mc])
1041 filename = eligible[0]
1042 return None, None, None, filename
1043 elif pn in self.recipecaches[mc].pkg_pn:
1044 return bb.providers.findBestProvider(pn, self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1045 else:
1046 return None, None, None, None
1047
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001048 def findConfigFiles(self, varname):
1049 """
1050 Find config files which are appropriate values for varname.
1051 i.e. MACHINE, DISTRO
1052 """
1053 possible = []
1054 var = varname.lower()
1055
1056 data = self.data
1057 # iterate configs
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001058 bbpaths = data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001059 for path in bbpaths:
1060 confpath = os.path.join(path, "conf", var)
1061 if os.path.exists(confpath):
1062 for root, dirs, files in os.walk(confpath):
1063 # get all child files, these are appropriate values
1064 for f in files:
1065 val, sep, end = f.rpartition('.')
1066 if end == 'conf':
1067 possible.append(val)
1068
1069 if possible:
1070 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1071
1072 def findInheritsClass(self, klass):
1073 """
1074 Find all recipes which inherit the specified class
1075 """
1076 pkg_list = []
1077
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001078 for pfn in self.recipecaches[''].pkg_fn:
1079 inherits = self.recipecaches[''].inherits.get(pfn, None)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001080 if inherits and klass in inherits:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001081 pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001082
1083 return pkg_list
1084
1085 def generateTargetsTree(self, klass=None, pkgs=None):
1086 """
1087 Generate a dependency tree of buildable targets
1088 Generate an event with the result
1089 """
1090 # if the caller hasn't specified a pkgs list default to universe
1091 if not pkgs:
1092 pkgs = ['universe']
1093 # if inherited_class passed ensure all recipes which inherit the
1094 # specified class are included in pkgs
1095 if klass:
1096 extra_pkgs = self.findInheritsClass(klass)
1097 pkgs = pkgs + extra_pkgs
1098
1099 # generate a dependency tree for all our packages
1100 tree = self.generatePkgDepTreeData(pkgs, 'build')
1101 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1102
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001103 def interactiveMode( self ):
1104 """Drop off into a shell"""
1105 try:
1106 from bb import shell
1107 except ImportError:
1108 parselog.exception("Interactive mode not available")
1109 sys.exit(1)
1110 else:
1111 shell.start( self )
1112
1113
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001114 def handleCollections(self, collections):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001115 """Handle collections"""
1116 errors = False
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001117 self.bbfile_config_priorities = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001118 if collections:
1119 collection_priorities = {}
1120 collection_depends = {}
1121 collection_list = collections.split()
1122 min_prio = 0
1123 for c in collection_list:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001124 bb.debug(1,'Processing %s in collection list' % (c))
1125
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001126 # Get collection priority if defined explicitly
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001127 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001128 if priority:
1129 try:
1130 prio = int(priority)
1131 except ValueError:
1132 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1133 errors = True
1134 if min_prio == 0 or prio < min_prio:
1135 min_prio = prio
1136 collection_priorities[c] = prio
1137 else:
1138 collection_priorities[c] = None
1139
1140 # Check dependencies and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001141 deps = self.data.getVar("LAYERDEPENDS_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001142 if deps:
1143 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001144 depDict = bb.utils.explode_dep_versions2(deps)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001145 except bb.utils.VersionStringException as vse:
1146 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001147 for dep, oplist in list(depDict.items()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001148 if dep in collection_list:
1149 for opstr in oplist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001150 layerver = self.data.getVar("LAYERVERSION_%s" % dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001151 (op, depver) = opstr.split()
1152 if layerver:
1153 try:
1154 res = bb.utils.vercmp_string_op(layerver, depver, op)
1155 except bb.utils.VersionStringException as vse:
1156 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1157 if not res:
1158 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1159 errors = True
1160 else:
1161 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1162 errors = True
1163 else:
1164 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1165 errors = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001166 collection_depends[c] = list(depDict.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001167 else:
1168 collection_depends[c] = []
1169
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001170 # Check recommends and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001171 recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001172 if recs:
1173 try:
1174 recDict = bb.utils.explode_dep_versions2(recs)
1175 except bb.utils.VersionStringException as vse:
1176 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1177 for rec, oplist in list(recDict.items()):
1178 if rec in collection_list:
1179 if oplist:
1180 opstr = oplist[0]
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001181 layerver = self.data.getVar("LAYERVERSION_%s" % rec)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001182 if layerver:
1183 (op, recver) = opstr.split()
1184 try:
1185 res = bb.utils.vercmp_string_op(layerver, recver, op)
1186 except bb.utils.VersionStringException as vse:
1187 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1188 if not res:
1189 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1190 continue
1191 else:
1192 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1193 continue
1194 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1195 collection_depends[c].append(rec)
1196 else:
1197 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1198
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001199 # Recursively work out collection priorities based on dependencies
1200 def calc_layer_priority(collection):
1201 if not collection_priorities[collection]:
1202 max_depprio = min_prio
1203 for dep in collection_depends[collection]:
1204 calc_layer_priority(dep)
1205 depprio = collection_priorities[dep]
1206 if depprio > max_depprio:
1207 max_depprio = depprio
1208 max_depprio += 1
1209 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1210 collection_priorities[collection] = max_depprio
1211
1212 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1213 for c in collection_list:
1214 calc_layer_priority(c)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001215 regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001216 if regex == None:
1217 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1218 errors = True
1219 continue
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001220 elif regex == "":
1221 parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
Brad Bishop19323692019-04-05 15:28:33 -04001222 cre = re.compile('^NULL$')
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001223 errors = False
1224 else:
1225 try:
1226 cre = re.compile(regex)
1227 except re.error:
1228 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1229 errors = True
1230 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001231 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001232 if errors:
1233 # We've already printed the actual error(s)
1234 raise CollectionError("Errors during parsing layer configuration")
1235
1236 def buildSetVars(self):
1237 """
1238 Setup any variables needed before starting a build
1239 """
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001240 t = time.gmtime()
1241 for mc in self.databuilder.mcdata:
1242 ds = self.databuilder.mcdata[mc]
1243 if not ds.getVar("BUILDNAME", False):
1244 ds.setVar("BUILDNAME", "${DATE}${TIME}")
1245 ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1246 ds.setVar("DATE", time.strftime('%Y%m%d', t))
1247 ds.setVar("TIME", time.strftime('%H%M%S', t))
1248
1249 def reset_mtime_caches(self):
1250 """
1251 Reset mtime caches - this is particularly important when memory resident as something
1252 which is cached is not unlikely to have changed since the last invocation (e.g. a
1253 file associated with a recipe might have been modified by the user).
1254 """
1255 build.reset_cache()
1256 bb.fetch._checksum_cache.mtime_cache.clear()
1257 siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1258 if siggen_cache:
1259 bb.parse.siggen.checksum_cache.mtime_cache.clear()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001260
1261 def matchFiles(self, bf):
1262 """
1263 Find the .bb files which match the expression in 'buildfile'.
1264 """
1265 if bf.startswith("/") or bf.startswith("../"):
1266 bf = os.path.abspath(bf)
1267
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001268 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001269 filelist, masked, searchdirs = self.collection.collect_bbfiles(self.data, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001270 try:
1271 os.stat(bf)
1272 bf = os.path.abspath(bf)
1273 return [bf]
1274 except OSError:
1275 regexp = re.compile(bf)
1276 matches = []
1277 for f in filelist:
1278 if regexp.search(f) and os.path.isfile(f):
1279 matches.append(f)
1280 return matches
1281
1282 def matchFile(self, buildfile):
1283 """
1284 Find the .bb file which matches the expression in 'buildfile'.
1285 Raise an error if multiple files
1286 """
1287 matches = self.matchFiles(buildfile)
1288 if len(matches) != 1:
1289 if matches:
1290 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1291 if matches:
1292 for f in matches:
1293 msg += "\n %s" % f
1294 parselog.error(msg)
1295 else:
1296 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1297 raise NoSpecificMatch
1298 return matches[0]
1299
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001300 def buildFile(self, buildfile, task):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001301 """
1302 Build the file matching regexp buildfile
1303 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001304 bb.event.fire(bb.event.BuildInit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001305
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001306 # Too many people use -b because they think it's how you normally
1307 # specify a target to be built, so show a warning
1308 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1309
1310 self.buildFileInternal(buildfile, task)
1311
1312 def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1313 """
1314 Build the file matching regexp buildfile
1315 """
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001316
1317 # Parse the configuration here. We need to do it explicitly here since
1318 # buildFile() doesn't use the cache
1319 self.parseConfiguration()
1320
1321 # If we are told to do the None task then query the default task
1322 if (task == None):
1323 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001324 if not task.startswith("do_"):
1325 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001326
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001327 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001328 fn = self.matchFile(fn)
1329
1330 self.buildSetVars()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001331 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001332
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001333 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
1334
1335 infos = bb_cache.parse(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001336 infos = dict(infos)
1337
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001338 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001339 try:
1340 info_array = infos[fn]
1341 except KeyError:
1342 bb.fatal("%s does not exist" % fn)
1343
1344 if info_array[0].skipped:
1345 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1346
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001347 self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001348
1349 # Tweak some variables
1350 item = info_array[0].pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001351 self.recipecaches[mc].ignored_dependencies = set()
1352 self.recipecaches[mc].bbfile_priority[fn] = 1
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001353 self.configuration.limited_deps = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001354
1355 # Remove external dependencies
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001356 self.recipecaches[mc].task_deps[fn]['depends'] = {}
1357 self.recipecaches[mc].deps[fn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001358 self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1359 self.recipecaches[mc].runrecs[fn] = defaultdict(list)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001360
1361 # Invalidate task for target if force mode active
1362 if self.configuration.force:
1363 logger.verbose("Invalidate task %s, %s", task, fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001364 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001365
1366 # Setup taskdata structure
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001367 taskdata = {}
1368 taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001369 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001370
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001371 if quietlog:
1372 rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1373 bb.runqueue.logger.setLevel(logging.WARNING)
1374
1375 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1376 if fireevents:
1377 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001378
1379 # Execute the runqueue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001380 runlist = [[mc, item, task, fn]]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001381
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001382 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001383
1384 def buildFileIdle(server, rq, abort):
1385
1386 msg = None
1387 interrupted = 0
1388 if abort or self.state == state.forceshutdown:
1389 rq.finish_runqueue(True)
1390 msg = "Forced shutdown"
1391 interrupted = 2
1392 elif self.state == state.shutdown:
1393 rq.finish_runqueue(False)
1394 msg = "Stopped build"
1395 interrupted = 1
1396 failures = 0
1397 try:
1398 retval = rq.execute_runqueue()
1399 except runqueue.TaskFailure as exc:
1400 failures += len(exc.args)
1401 retval = False
1402 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001403 self.command.finishAsyncCommand(str(exc))
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001404 if quietlog:
1405 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001406 return False
1407
1408 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001409 if fireevents:
1410 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001411 self.command.finishAsyncCommand(msg)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001412 # We trashed self.recipecaches above
1413 self.parsecache_valid = False
1414 self.configuration.limited_deps = False
1415 bb.parse.siggen.reset(self.data)
1416 if quietlog:
1417 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001418 return False
1419 if retval is True:
1420 return True
1421 return retval
1422
1423 self.configuration.server_register_idlecallback(buildFileIdle, rq)
1424
1425 def buildTargets(self, targets, task):
1426 """
1427 Attempt to build the targets specified
1428 """
1429
1430 def buildTargetsIdle(server, rq, abort):
1431 msg = None
1432 interrupted = 0
1433 if abort or self.state == state.forceshutdown:
1434 rq.finish_runqueue(True)
1435 msg = "Forced shutdown"
1436 interrupted = 2
1437 elif self.state == state.shutdown:
1438 rq.finish_runqueue(False)
1439 msg = "Stopped build"
1440 interrupted = 1
1441 failures = 0
1442 try:
1443 retval = rq.execute_runqueue()
1444 except runqueue.TaskFailure as exc:
1445 failures += len(exc.args)
1446 retval = False
1447 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001448 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001449 return False
1450
1451 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001452 try:
1453 for mc in self.multiconfigs:
1454 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1455 finally:
1456 self.command.finishAsyncCommand(msg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001457 return False
1458 if retval is True:
1459 return True
1460 return retval
1461
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001462 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001463 self.buildSetVars()
1464
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001465 # If we are told to do the None task then query the default task
1466 if (task == None):
1467 task = self.configuration.cmd
1468
1469 if not task.startswith("do_"):
1470 task = "do_%s" % task
1471
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001472 packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1473
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001474 bb.event.fire(bb.event.BuildInit(packages), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001475
1476 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001477
1478 buildname = self.data.getVar("BUILDNAME", False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001479
1480 # make targets to always look as <target>:do_<task>
1481 ntargets = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001482 for target in runlist:
1483 if target[0]:
Brad Bishop15ae2502019-06-18 21:44:24 -04001484 ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2]))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001485 ntargets.append("%s:%s" % (target[1], target[2]))
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001486
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001487 for mc in self.multiconfigs:
1488 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001489
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001490 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001491 if 'universe' in targets:
1492 rq.rqdata.warn_multi_bb = True
1493
1494 self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
1495
1496
1497 def getAllKeysWithFlags(self, flaglist):
1498 dump = {}
1499 for k in self.data.keys():
1500 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001501 expand = True
1502 flags = self.data.getVarFlags(k)
1503 if flags and "func" in flags and "python" in flags:
1504 expand = False
1505 v = self.data.getVar(k, expand)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001506 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1507 dump[k] = {
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001508 'v' : str(v) ,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001509 'history' : self.data.varhistory.variable(k),
1510 }
1511 for d in flaglist:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001512 if flags and d in flags:
1513 dump[k][d] = flags[d]
1514 else:
1515 dump[k][d] = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001516 except Exception as e:
1517 print(e)
1518 return dump
1519
1520
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001521 def updateCacheSync(self):
1522 if self.state == state.running:
1523 return
1524
1525 # reload files for which we got notifications
1526 for p in self.inotify_modified_files:
1527 bb.parse.update_cache(p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001528 if p in bb.parse.BBHandler.cached_statements:
1529 del bb.parse.BBHandler.cached_statements[p]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001530 self.inotify_modified_files = []
1531
1532 if not self.baseconfig_valid:
1533 logger.debug(1, "Reloading base configuration data")
1534 self.initConfigurationData()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001535 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001536
1537 # This is called for all async commands when self.state != running
1538 def updateCache(self):
1539 if self.state == state.running:
1540 return
1541
1542 if self.state in (state.shutdown, state.forceshutdown, state.error):
1543 if hasattr(self.parser, 'shutdown'):
1544 self.parser.shutdown(clean=False, force = True)
1545 raise bb.BBHandledException()
1546
1547 if self.state != state.parsing:
1548 self.updateCacheSync()
1549
1550 if self.state != state.parsing and not self.parsecache_valid:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001551 bb.parse.siggen.reset(self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001552 self.parseConfiguration ()
1553 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001554 for mc in self.multiconfigs:
1555 bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001556
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001557 for mc in self.multiconfigs:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001558 ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001559 self.recipecaches[mc].ignored_dependencies = set(ignore.split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001560
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001561 for dep in self.configuration.extra_assume_provided:
1562 self.recipecaches[mc].ignored_dependencies.add(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001563
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001564 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001565 (filelist, masked, searchdirs) = self.collection.collect_bbfiles(self.data, self.data)
1566
1567 # Add inotify watches for directories searched for bb/bbappend files
1568 for dirent in searchdirs:
1569 self.add_filewatch([[dirent]], dirs=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001570
1571 self.parser = CookerParser(self, filelist, masked)
1572 self.parsecache_valid = True
1573
1574 self.state = state.parsing
1575
1576 if not self.parser.parse_next():
1577 collectlog.debug(1, "parsing complete")
1578 if self.parser.error:
1579 raise bb.BBHandledException()
1580 self.show_appends_with_no_recipes()
1581 self.handlePrefProviders()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001582 for mc in self.multiconfigs:
1583 self.recipecaches[mc].bbfile_priority = self.collection.collection_priorities(self.recipecaches[mc].pkg_fn, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001584 self.state = state.running
1585
1586 # Send an event listing all stamps reachable after parsing
1587 # which the metadata may use to clean up stale data
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001588 for mc in self.multiconfigs:
1589 event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1590 bb.event.fire(event, self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001591 return None
1592
1593 return True
1594
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001595 def checkPackages(self, pkgs_to_build, task=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001596
1597 # Return a copy, don't modify the original
1598 pkgs_to_build = pkgs_to_build[:]
1599
1600 if len(pkgs_to_build) == 0:
1601 raise NothingToBuild
1602
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001603 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001604 for pkg in pkgs_to_build:
1605 if pkg in ignore:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001606 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
Brad Bishop15ae2502019-06-18 21:44:24 -04001607 if pkg.startswith("multiconfig:"):
1608 pkgs_to_build.remove(pkg)
1609 pkgs_to_build.append(pkg.replace("multiconfig:", "mc:"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001610
1611 if 'world' in pkgs_to_build:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001612 pkgs_to_build.remove('world')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001613 for mc in self.multiconfigs:
1614 bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1615 for t in self.recipecaches[mc].world_target:
1616 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001617 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001618 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001619
1620 if 'universe' in pkgs_to_build:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001621 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001622 parselog.debug(1, "collating packages for \"universe\"")
1623 pkgs_to_build.remove('universe')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001624 for mc in self.multiconfigs:
1625 for t in self.recipecaches[mc].universe_target:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001626 if task:
1627 foundtask = False
1628 for provider_fn in self.recipecaches[mc].providers[t]:
1629 if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1630 foundtask = True
1631 break
1632 if not foundtask:
1633 bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1634 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001635 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001636 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001637 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001638
1639 return pkgs_to_build
1640
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001641 def pre_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001642 # We now are in our own process so we can call this here.
1643 # PRServ exits if its parent process exits
1644 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001645 return
1646
1647 def post_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001648 prserv.serv.auto_shutdown()
Brad Bishop08902b02019-08-20 09:16:51 -04001649 if self.hashserv:
1650 self.hashserv.process.terminate()
1651 self.hashserv.process.join()
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001652 bb.event.fire(CookerExit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001653
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001654 def shutdown(self, force = False):
1655 if force:
1656 self.state = state.forceshutdown
1657 else:
1658 self.state = state.shutdown
1659
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001660 if self.parser:
1661 self.parser.shutdown(clean=not force, force=force)
1662
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001663 def finishcommand(self):
1664 self.state = state.initial
1665
1666 def reset(self):
1667 self.initConfigurationData()
Brad Bishop08902b02019-08-20 09:16:51 -04001668 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001669
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001670 def clientComplete(self):
1671 """Called when the client is done using the server"""
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001672 self.finishcommand()
1673 self.extraconfigdata = {}
1674 self.command.reset()
1675 self.databuilder.reset()
1676 self.data = self.databuilder.data
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001677
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001678
1679class CookerExit(bb.event.Event):
1680 """
1681 Notify clients of the Cooker shutdown
1682 """
1683
1684 def __init__(self):
1685 bb.event.Event.__init__(self)
1686
1687
1688class CookerCollectFiles(object):
1689 def __init__(self, priorities):
1690 self.bbappends = []
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001691 # Priorities is a list of tupples, with the second element as the pattern.
1692 # We need to sort the list with the longest pattern first, and so on to
1693 # the shortest. This allows nested layers to be properly evaluated.
1694 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001695
1696 def calc_bbfile_priority( self, filename, matched = None ):
1697 for _, _, regex, pri in self.bbfile_config_priorities:
1698 if regex.match(filename):
1699 if matched != None:
1700 if not regex in matched:
1701 matched.add(regex)
1702 return pri
1703 return 0
1704
1705 def get_bbfiles(self):
1706 """Get list of default .bb files by reading out the current directory"""
1707 path = os.getcwd()
1708 contents = os.listdir(path)
1709 bbfiles = []
1710 for f in contents:
1711 if f.endswith(".bb"):
1712 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1713 return bbfiles
1714
1715 def find_bbfiles(self, path):
1716 """Find all the .bb and .bbappend files in a directory"""
1717 found = []
1718 for dir, dirs, files in os.walk(path):
1719 for ignored in ('SCCS', 'CVS', '.svn'):
1720 if ignored in dirs:
1721 dirs.remove(ignored)
1722 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1723
1724 return found
1725
1726 def collect_bbfiles(self, config, eventdata):
1727 """Collect all available .bb build files"""
1728 masked = 0
1729
1730 collectlog.debug(1, "collecting .bb files")
1731
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001732 files = (config.getVar( "BBFILES") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001733 config.setVar("BBFILES", " ".join(files))
1734
1735 # Sort files by priority
1736 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
1737
1738 if not len(files):
1739 files = self.get_bbfiles()
1740
1741 if not len(files):
1742 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1743 bb.event.fire(CookerExit(), eventdata)
1744
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001745 # We need to track where we look so that we can add inotify watches. There
1746 # is no nice way to do this, this is horrid. We intercept the os.listdir()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001747 # (or os.scandir() for python 3.6+) calls while we run glob().
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001748 origlistdir = os.listdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001749 if hasattr(os, 'scandir'):
1750 origscandir = os.scandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001751 searchdirs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001752
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001753 def ourlistdir(d):
1754 searchdirs.append(d)
1755 return origlistdir(d)
1756
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001757 def ourscandir(d):
1758 searchdirs.append(d)
1759 return origscandir(d)
1760
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001761 os.listdir = ourlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001762 if hasattr(os, 'scandir'):
1763 os.scandir = ourscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001764 try:
1765 # Can't use set here as order is important
1766 newfiles = []
1767 for f in files:
1768 if os.path.isdir(f):
1769 dirfiles = self.find_bbfiles(f)
1770 for g in dirfiles:
1771 if g not in newfiles:
1772 newfiles.append(g)
1773 else:
1774 globbed = glob.glob(f)
1775 if not globbed and os.path.exists(f):
1776 globbed = [f]
1777 # glob gives files in order on disk. Sort to be deterministic.
1778 for g in sorted(globbed):
1779 if g not in newfiles:
1780 newfiles.append(g)
1781 finally:
1782 os.listdir = origlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001783 if hasattr(os, 'scandir'):
1784 os.scandir = origscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001785
1786 bbmask = config.getVar('BBMASK')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001787
1788 if bbmask:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001789 # First validate the individual regular expressions and ignore any
1790 # that do not compile
1791 bbmasks = []
1792 for mask in bbmask.split():
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001793 # When constructing an older style single regex, it's possible for BBMASK
1794 # to end up beginning with '|', which matches and masks _everything_.
1795 if mask.startswith("|"):
1796 collectlog.warn("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
1797 mask = mask[1:]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001798 try:
1799 re.compile(mask)
1800 bbmasks.append(mask)
1801 except sre_constants.error:
1802 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1803
1804 # Then validate the combined regular expressions. This should never
1805 # fail, but better safe than sorry...
1806 bbmask = "|".join(bbmasks)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001807 try:
1808 bbmask_compiled = re.compile(bbmask)
1809 except sre_constants.error:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001810 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1811 bbmask = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001812
1813 bbfiles = []
1814 bbappend = []
1815 for f in newfiles:
1816 if bbmask and bbmask_compiled.search(f):
1817 collectlog.debug(1, "skipping masked file %s", f)
1818 masked += 1
1819 continue
1820 if f.endswith('.bb'):
1821 bbfiles.append(f)
1822 elif f.endswith('.bbappend'):
1823 bbappend.append(f)
1824 else:
1825 collectlog.debug(1, "skipping %s: unknown file extension", f)
1826
1827 # Build a list of .bbappend files for each .bb file
1828 for f in bbappend:
1829 base = os.path.basename(f).replace('.bbappend', '.bb')
1830 self.bbappends.append((base, f))
1831
1832 # Find overlayed recipes
1833 # bbfiles will be in priority order which makes this easy
1834 bbfile_seen = dict()
1835 self.overlayed = defaultdict(list)
1836 for f in reversed(bbfiles):
1837 base = os.path.basename(f)
1838 if base not in bbfile_seen:
1839 bbfile_seen[base] = f
1840 else:
1841 topfile = bbfile_seen[base]
1842 self.overlayed[topfile].append(f)
1843
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001844 return (bbfiles, masked, searchdirs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001845
1846 def get_file_appends(self, fn):
1847 """
1848 Returns a list of .bbappend files to apply to fn
1849 """
1850 filelist = []
1851 f = os.path.basename(fn)
1852 for b in self.bbappends:
1853 (bbappend, filename) = b
1854 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1855 filelist.append(filename)
1856 return filelist
1857
1858 def collection_priorities(self, pkgfns, d):
1859
1860 priorities = {}
1861
1862 # Calculate priorities for each file
1863 matched = set()
1864 for p in pkgfns:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001865 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001866 priorities[p] = self.calc_bbfile_priority(realfn, matched)
1867
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001868 unmatched = set()
1869 for _, _, regex, pri in self.bbfile_config_priorities:
1870 if not regex in matched:
1871 unmatched.add(regex)
1872
Brad Bishop316dfdd2018-06-25 12:45:53 -04001873 # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
1874 def find_bbappend_match(regex):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001875 for b in self.bbappends:
1876 (bbfile, append) = b
1877 if regex.match(append):
Brad Bishop316dfdd2018-06-25 12:45:53 -04001878 # If the bbappend is matched by already "matched set", return False
1879 for matched_regex in matched:
1880 if matched_regex.match(append):
1881 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001882 return True
1883 return False
1884
1885 for unmatch in unmatched.copy():
Brad Bishop316dfdd2018-06-25 12:45:53 -04001886 if find_bbappend_match(unmatch):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001887 unmatched.remove(unmatch)
1888
1889 for collection, pattern, regex, _ in self.bbfile_config_priorities:
1890 if regex in unmatched:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001891 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001892 collectlog.warning("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001893
1894 return priorities
1895
1896class ParsingFailure(Exception):
1897 def __init__(self, realexception, recipe):
1898 self.realexception = realexception
1899 self.recipe = recipe
1900 Exception.__init__(self, realexception, recipe)
1901
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001902class Parser(multiprocessing.Process):
1903 def __init__(self, jobs, results, quit, init, profile):
1904 self.jobs = jobs
1905 self.results = results
1906 self.quit = quit
1907 self.init = init
1908 multiprocessing.Process.__init__(self)
1909 self.context = bb.utils.get_context().copy()
1910 self.handlers = bb.event.get_class_handlers().copy()
1911 self.profile = profile
1912
1913 def run(self):
1914
1915 if not self.profile:
1916 self.realrun()
1917 return
1918
1919 try:
1920 import cProfile as profile
1921 except:
1922 import profile
1923 prof = profile.Profile()
1924 try:
1925 profile.Profile.runcall(prof, self.realrun)
1926 finally:
1927 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1928 prof.dump_stats(logfile)
1929
1930 def realrun(self):
1931 if self.init:
1932 self.init()
1933
1934 pending = []
1935 while True:
1936 try:
1937 self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001938 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001939 pass
1940 else:
1941 self.results.cancel_join_thread()
1942 break
1943
1944 if pending:
1945 result = pending.pop()
1946 else:
1947 try:
Brad Bishop19323692019-04-05 15:28:33 -04001948 job = self.jobs.pop()
1949 except IndexError:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001950 break
1951 result = self.parse(*job)
1952
1953 try:
1954 self.results.put(result, timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001955 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001956 pending.append(result)
1957
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001958 def parse(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001959 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001960 # Record the filename we're parsing into any events generated
1961 def parse_filter(self, record):
1962 record.taskpid = bb.event.worker_pid
1963 record.fn = filename
1964 return True
1965
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001966 # Reset our environment and handlers to the original settings
1967 bb.utils.set_context(self.context.copy())
1968 bb.event.set_class_handlers(self.handlers.copy())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001969 bb.event.LogHandler.filter = parse_filter
1970
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001971 return True, self.bb_cache.parse(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001972 except Exception as exc:
1973 tb = sys.exc_info()[2]
1974 exc.recipe = filename
1975 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
1976 return True, exc
1977 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
1978 # and for example a worker thread doesn't just exit on its own in response to
1979 # a SystemExit event for example.
1980 except BaseException as exc:
1981 return True, ParsingFailure(exc, filename)
1982
1983class CookerParser(object):
1984 def __init__(self, cooker, filelist, masked):
1985 self.filelist = filelist
1986 self.cooker = cooker
1987 self.cfgdata = cooker.data
1988 self.cfghash = cooker.data_hash
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001989 self.cfgbuilder = cooker.databuilder
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001990
1991 # Accounting statistics
1992 self.parsed = 0
1993 self.cached = 0
1994 self.error = 0
1995 self.masked = masked
1996
1997 self.skipped = 0
1998 self.virtuals = 0
1999 self.total = len(filelist)
2000
2001 self.current = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002002 self.process_names = []
2003
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002004 self.bb_cache = bb.cache.Cache(self.cfgbuilder, self.cfghash, cooker.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002005 self.fromcache = []
2006 self.willparse = []
2007 for filename in self.filelist:
2008 appends = self.cooker.collection.get_file_appends(filename)
2009 if not self.bb_cache.cacheValid(filename, appends):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002010 self.willparse.append((filename, appends))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002011 else:
2012 self.fromcache.append((filename, appends))
2013 self.toparse = self.total - len(self.fromcache)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002014 self.progress_chunk = int(max(self.toparse / 100, 1))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002015
Brad Bishop6e60e8b2018-02-01 10:27:11 -05002016 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002017 multiprocessing.cpu_count()), len(self.willparse))
2018
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002019 self.start()
2020 self.haveshutdown = False
2021
2022 def start(self):
2023 self.results = self.load_cached()
2024 self.processes = []
2025 if self.toparse:
2026 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2027 def init():
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002028 Parser.bb_cache = self.bb_cache
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002029 bb.utils.set_process_name(multiprocessing.current_process().name)
2030 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2031 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002032
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002033 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002034 self.result_queue = multiprocessing.Queue()
Brad Bishop19323692019-04-05 15:28:33 -04002035
2036 def chunkify(lst,n):
2037 return [lst[i::n] for i in range(n)]
2038 self.jobs = chunkify(self.willparse, self.num_processes)
2039
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002040 for i in range(0, self.num_processes):
Brad Bishop19323692019-04-05 15:28:33 -04002041 parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002042 parser.start()
2043 self.process_names.append(parser.name)
2044 self.processes.append(parser)
2045
2046 self.results = itertools.chain(self.results, self.parse_generator())
2047
2048 def shutdown(self, clean=True, force=False):
2049 if not self.toparse:
2050 return
2051 if self.haveshutdown:
2052 return
2053 self.haveshutdown = True
2054
2055 if clean:
2056 event = bb.event.ParseCompleted(self.cached, self.parsed,
2057 self.skipped, self.masked,
2058 self.virtuals, self.error,
2059 self.total)
2060
2061 bb.event.fire(event, self.cfgdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002062 for process in self.processes:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002063 self.parser_quit.put(None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002064 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002065 self.parser_quit.cancel_join_thread()
2066 for process in self.processes:
2067 self.parser_quit.put(None)
2068
Brad Bishop08902b02019-08-20 09:16:51 -04002069 # Cleanup the queue before call process.join(), otherwise there might be
2070 # deadlocks.
2071 while True:
2072 try:
2073 self.result_queue.get(timeout=0.25)
2074 except queue.Empty:
2075 break
2076
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002077 for process in self.processes:
2078 if force:
2079 process.join(.1)
2080 process.terminate()
2081 else:
2082 process.join()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002083
2084 sync = threading.Thread(target=self.bb_cache.sync)
2085 sync.start()
2086 multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002087 bb.codeparser.parser_cache_savemerge()
2088 bb.fetch.fetcher_parse_done()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002089 if self.cooker.configuration.profile:
2090 profiles = []
2091 for i in self.process_names:
2092 logfile = "profile-parse-%s.log" % i
2093 if os.path.exists(logfile):
2094 profiles.append(logfile)
2095
2096 pout = "profile-parse.log.processed"
2097 bb.utils.process_profilelog(profiles, pout = pout)
2098 print("Processed parsing statistics saved to %s" % (pout))
2099
2100 def load_cached(self):
2101 for filename, appends in self.fromcache:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002102 cached, infos = self.bb_cache.load(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002103 yield not cached, infos
2104
2105 def parse_generator(self):
2106 while True:
2107 if self.parsed >= self.toparse:
2108 break
2109
2110 try:
2111 result = self.result_queue.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002112 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002113 pass
2114 else:
2115 value = result[1]
2116 if isinstance(value, BaseException):
2117 raise value
2118 else:
2119 yield result
2120
2121 def parse_next(self):
2122 result = []
2123 parsed = None
2124 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002125 parsed, result = next(self.results)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002126 except StopIteration:
2127 self.shutdown()
2128 return False
2129 except bb.BBHandledException as exc:
2130 self.error += 1
2131 logger.error('Failed to parse recipe: %s' % exc.recipe)
2132 self.shutdown(clean=False)
2133 return False
2134 except ParsingFailure as exc:
2135 self.error += 1
2136 logger.error('Unable to parse %s: %s' %
2137 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2138 self.shutdown(clean=False)
2139 return False
2140 except bb.parse.ParseError as exc:
2141 self.error += 1
2142 logger.error(str(exc))
2143 self.shutdown(clean=False)
2144 return False
2145 except bb.data_smart.ExpansionError as exc:
2146 self.error += 1
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002147 bbdir = os.path.dirname(__file__) + os.sep
2148 etype, value, _ = sys.exc_info()
2149 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2150 logger.error('ExpansionError during parsing %s', value.recipe,
2151 exc_info=(etype, value, tb))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002152 self.shutdown(clean=False)
2153 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002154 except Exception as exc:
2155 self.error += 1
2156 etype, value, tb = sys.exc_info()
2157 if hasattr(value, "recipe"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002158 logger.error('Unable to parse %s' % value.recipe,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002159 exc_info=(etype, value, exc.traceback))
2160 else:
2161 # Most likely, an exception occurred during raising an exception
2162 import traceback
2163 logger.error('Exception during parse: %s' % traceback.format_exc())
2164 self.shutdown(clean=False)
2165 return False
2166
2167 self.current += 1
2168 self.virtuals += len(result)
2169 if parsed:
2170 self.parsed += 1
2171 if self.parsed % self.progress_chunk == 0:
2172 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2173 self.cfgdata)
2174 else:
2175 self.cached += 1
2176
2177 for virtualfn, info_array in result:
2178 if info_array[0].skipped:
2179 self.skipped += 1
2180 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002181 (fn, cls, mc) = bb.cache.virtualfn2realfn(virtualfn)
2182 self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002183 parsed=parsed, watcher = self.cooker.add_filewatch)
2184 return True
2185
2186 def reparse(self, filename):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002187 infos = self.bb_cache.parse(filename, self.cooker.collection.get_file_appends(filename))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002188 for vfn, info_array in infos:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002189 (fn, cls, mc) = bb.cache.virtualfn2realfn(vfn)
2190 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)