blob: 5840aa75e0cabef52f968c871a5ae4b6144ddbc7 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# Copyright (C) 2003, 2004 Chris Larson
3# Copyright (C) 2003, 2004 Phil Blundell
4# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
5# Copyright (C) 2005 Holger Hans Peter Freyther
6# Copyright (C) 2005 ROAD GmbH
7# Copyright (C) 2006 - 2007 Richard Purdie
8#
Brad Bishopc342db32019-05-15 21:57:59 -04009# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050010#
Patrick Williamsc0f7c042017-02-23 20:41:17 -060011
Patrick Williamsc124f4f2015-09-15 14:41:29 -050012import sys, os, glob, os.path, re, time
13import atexit
14import itertools
15import logging
16import multiprocessing
17import sre_constants
18import threading
Patrick Williamsc0f7c042017-02-23 20:41:17 -060019from io import StringIO, UnsupportedOperation
Patrick Williamsc124f4f2015-09-15 14:41:29 -050020from contextlib import closing
21from functools import wraps
Patrick Williamsc0f7c042017-02-23 20:41:17 -060022from collections import defaultdict, namedtuple
Patrick Williamsc124f4f2015-09-15 14:41:29 -050023import bb, bb.exceptions, bb.command
24from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
Patrick Williamsc0f7c042017-02-23 20:41:17 -060025import queue
Patrick Williamsc124f4f2015-09-15 14:41:29 -050026import signal
27import subprocess
28import errno
29import prserv.serv
30import pyinotify
Patrick Williamsc0f7c042017-02-23 20:41:17 -060031import json
32import pickle
33import codecs
Brad Bishop08902b02019-08-20 09:16:51 -040034import hashserv
Patrick Williamsc124f4f2015-09-15 14:41:29 -050035
36logger = logging.getLogger("BitBake")
37collectlog = logging.getLogger("BitBake.Collection")
38buildlog = logging.getLogger("BitBake.Build")
39parselog = logging.getLogger("BitBake.Parsing")
40providerlog = logging.getLogger("BitBake.Provider")
41
42class NoSpecificMatch(bb.BBHandledException):
43 """
44 Exception raised when no or multiple file matches are found
45 """
46
47class NothingToBuild(Exception):
48 """
49 Exception raised when there is nothing to build
50 """
51
52class CollectionError(bb.BBHandledException):
53 """
54 Exception raised when layer configuration is incorrect
55 """
56
57class state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060058 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050059
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050060 @classmethod
61 def get_name(cls, code):
62 for name in dir(cls):
63 value = getattr(cls, name)
64 if type(value) == type(cls.initial) and value == code:
65 return name
66 raise ValueError("Invalid status code: %s" % code)
67
Patrick Williamsc124f4f2015-09-15 14:41:29 -050068
69class SkippedPackage:
70 def __init__(self, info = None, reason = None):
71 self.pn = None
72 self.skipreason = None
73 self.provides = None
74 self.rprovides = None
75
76 if info:
77 self.pn = info.pn
78 self.skipreason = info.skipreason
79 self.provides = info.provides
80 self.rprovides = info.rprovides
81 elif reason:
82 self.skipreason = reason
83
84
85class CookerFeatures(object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -060086 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050087
88 def __init__(self):
89 self._features=set()
90
91 def setFeature(self, f):
92 # validate we got a request for a feature we support
93 if f not in CookerFeatures._feature_list:
94 return
95 self._features.add(f)
96
97 def __contains__(self, f):
98 return f in self._features
99
100 def __iter__(self):
101 return self._features.__iter__()
102
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600103 def __next__(self):
104 return next(self._features)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500105
106
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600107class EventWriter:
108 def __init__(self, cooker, eventfile):
109 self.file_inited = None
110 self.cooker = cooker
111 self.eventfile = eventfile
112 self.event_queue = []
113
114 def write_event(self, event):
115 with open(self.eventfile, "a") as f:
116 try:
117 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
118 f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
119 "vars": str_event}))
120 except Exception as err:
121 import traceback
122 print(err, traceback.format_exc())
123
124 def send(self, event):
125 if self.file_inited:
126 # we have the file, just write the event
127 self.write_event(event)
128 else:
129 # init on bb.event.BuildStarted
130 name = "%s.%s" % (event.__module__, event.__class__.__name__)
131 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
132 with open(self.eventfile, "w") as f:
133 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
134
135 self.file_inited = True
136
137 # write pending events
138 for evt in self.event_queue:
139 self.write_event(evt)
140
141 # also write the current event
142 self.write_event(event)
143 else:
144 # queue all events until the file is inited
145 self.event_queue.append(event)
146
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500147#============================================================================#
148# BBCooker
149#============================================================================#
150class BBCooker:
151 """
152 Manages one bitbake build run
153 """
154
155 def __init__(self, configuration, featureSet=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600156 self.recipecaches = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500157 self.skiplist = {}
158 self.featureset = CookerFeatures()
159 if featureSet:
160 for f in featureSet:
161 self.featureset.setFeature(f)
162
163 self.configuration = configuration
164
Brad Bishopf058f492019-01-28 23:50:33 -0500165 bb.debug(1, "BBCooker starting %s" % time.time())
166 sys.stdout.flush()
167
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500168 self.configwatcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500169 bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
170 sys.stdout.flush()
171
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500172 self.configwatcher.bbseen = []
173 self.configwatcher.bbwatchedfiles = []
174 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
Brad Bishopf058f492019-01-28 23:50:33 -0500175 bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
176 sys.stdout.flush()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500177 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
178 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500179 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500180 self.watcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500181 bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
182 sys.stdout.flush()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500183 self.watcher.bbseen = []
184 self.watcher.bbwatchedfiles = []
185 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
186
Brad Bishopf058f492019-01-28 23:50:33 -0500187 bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
188 sys.stdout.flush()
189
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500190 # If being called by something like tinfoil, we need to clean cached data
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500191 # which may now be invalid
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500192 bb.parse.clear_cache()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500193 bb.parse.BBHandler.cached_statements = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500194
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500195 self.ui_cmdline = None
Brad Bishop08902b02019-08-20 09:16:51 -0400196 self.hashserv = None
197 self.hashservport = None
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500198
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500199 self.initConfigurationData()
200
Brad Bishopf058f492019-01-28 23:50:33 -0500201 bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
202 sys.stdout.flush()
203
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600204 # we log all events to a file if so directed
205 if self.configuration.writeeventlog:
206 # register the log file writer as UI Handler
207 writer = EventWriter(self, self.configuration.writeeventlog)
208 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
209 bb.event.register_UIHhandler(EventLogWriteHandler(writer))
210
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500211 self.inotify_modified_files = []
212
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500213 def _process_inotify_updates(server, cooker, abort):
214 cooker.process_inotify_updates()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500215 return 1.0
216
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500217 self.configuration.server_register_idlecallback(_process_inotify_updates, self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500218
219 # TOSTOP must not be set or our children will hang when they output
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600220 try:
221 fd = sys.stdout.fileno()
222 if os.isatty(fd):
223 import termios
224 tcattr = termios.tcgetattr(fd)
225 if tcattr[3] & termios.TOSTOP:
226 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
227 tcattr[3] = tcattr[3] & ~termios.TOSTOP
228 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
229 except UnsupportedOperation:
230 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500231
232 self.command = bb.command.Command(self)
233 self.state = state.initial
234
235 self.parser = None
236
237 signal.signal(signal.SIGTERM, self.sigterm_exception)
238 # Let SIGHUP exit as SIGTERM
239 signal.signal(signal.SIGHUP, self.sigterm_exception)
240
Brad Bishopf058f492019-01-28 23:50:33 -0500241 bb.debug(1, "BBCooker startup complete %s" % time.time())
242 sys.stdout.flush()
243
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500244 def process_inotify_updates(self):
245 for n in [self.confignotifier, self.notifier]:
246 if n.check_events(timeout=0):
247 # read notified events and enqeue them
248 n.read_events()
249 n.process_events()
250
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500251 def config_notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500252 if event.maskname == "IN_Q_OVERFLOW":
253 bb.warn("inotify event queue overflowed, invalidating caches.")
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500254 self.parsecache_valid = False
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500255 self.baseconfig_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500256 bb.parse.clear_cache()
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500257 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500258 if not event.pathname in self.configwatcher.bbwatchedfiles:
259 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500260 if not event.pathname in self.inotify_modified_files:
261 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500262 self.baseconfig_valid = False
263
264 def notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500265 if event.maskname == "IN_Q_OVERFLOW":
266 bb.warn("inotify event queue overflowed, invalidating caches.")
267 self.parsecache_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500268 bb.parse.clear_cache()
269 return
270 if event.pathname.endswith("bitbake-cookerdaemon.log") \
271 or event.pathname.endswith("bitbake.lock"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500272 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500273 if not event.pathname in self.inotify_modified_files:
274 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500275 self.parsecache_valid = False
276
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500277 def add_filewatch(self, deps, watcher=None, dirs=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500278 if not watcher:
279 watcher = self.watcher
280 for i in deps:
281 watcher.bbwatchedfiles.append(i[0])
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500282 if dirs:
283 f = i[0]
284 else:
285 f = os.path.dirname(i[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500286 if f in watcher.bbseen:
287 continue
288 watcher.bbseen.append(f)
289 watchtarget = None
290 while True:
291 # We try and add watches for files that don't exist but if they did, would influence
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500292 # the parser. The parent directory of these files may not exist, in which case we need
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500293 # to watch any parent that does exist for changes.
294 try:
295 watcher.add_watch(f, self.watchmask, quiet=False)
296 if watchtarget:
297 watcher.bbwatchedfiles.append(watchtarget)
298 break
299 except pyinotify.WatchManagerError as e:
300 if 'ENOENT' in str(e):
301 watchtarget = f
302 f = os.path.dirname(f)
303 if f in watcher.bbseen:
304 break
305 watcher.bbseen.append(f)
306 continue
307 if 'ENOSPC' in str(e):
308 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
309 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
310 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
311 providerlog.error("Root privilege is required to modify max_user_watches.")
312 raise
313
314 def sigterm_exception(self, signum, stackframe):
315 if signum == signal.SIGTERM:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500316 bb.warn("Cooker received SIGTERM, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500317 elif signum == signal.SIGHUP:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500318 bb.warn("Cooker received SIGHUP, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500319 self.state = state.forceshutdown
320
321 def setFeatures(self, features):
322 # we only accept a new feature set if we're in state initial, so we can reset without problems
323 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
324 raise Exception("Illegal state for feature set change")
325 original_featureset = list(self.featureset)
326 for feature in features:
327 self.featureset.setFeature(feature)
328 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
329 if (original_featureset != list(self.featureset)) and self.state != state.error:
330 self.reset()
331
332 def initConfigurationData(self):
333
334 self.state = state.initial
335 self.caches_array = []
336
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500337 # Need to preserve BB_CONSOLELOG over resets
338 consolelog = None
339 if hasattr(self, "data"):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500340 consolelog = self.data.getVar("BB_CONSOLELOG")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500341
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500342 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
343 self.enableDataTracking()
344
345 all_extra_cache_names = []
346 # We hardcode all known cache types in a single place, here.
347 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
348 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
349
350 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
351
352 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
353 # This is the entry point, no further check needed!
354 for var in caches_name_array:
355 try:
356 module_name, cache_name = var.split(':')
357 module = __import__(module_name, fromlist=(cache_name,))
358 self.caches_array.append(getattr(module, cache_name))
359 except ImportError as exc:
360 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
361 sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
362
363 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
364 self.databuilder.parseBaseConfiguration()
365 self.data = self.databuilder.data
366 self.data_hash = self.databuilder.data_hash
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500367 self.extraconfigdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500368
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500369 if consolelog:
370 self.data.setVar("BB_CONSOLELOG", consolelog)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500371
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500372 self.data.setVar('BB_CMDLINE', self.ui_cmdline)
373
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500374 #
375 # Copy of the data store which has been expanded.
376 # Used for firing events and accessing variables where expansion needs to be accounted for
377 #
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500378 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
379 self.disableDataTracking()
380
Brad Bishop15ae2502019-06-18 21:44:24 -0400381 for mc in self.databuilder.mcdata.values():
382 mc.renameVar("__depends", "__base_depends")
383 self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500384
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500385 self.baseconfig_valid = True
386 self.parsecache_valid = False
387
388 def handlePRServ(self):
389 # Setup a PR Server based on the new configuration
390 try:
391 self.prhost = prserv.serv.auto_start(self.data)
392 except prserv.serv.PRServiceConfigError as e:
393 bb.fatal("Unable to start PR Server, exitting")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500394
Brad Bishop08902b02019-08-20 09:16:51 -0400395 if self.data.getVar("BB_HASHSERVE") == "localhost:0":
396 if not self.hashserv:
397 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
398 self.hashserv = hashserv.create_server(('localhost', 0), dbfile, '')
399 self.hashservport = "localhost:" + str(self.hashserv.server_port)
400 self.hashserv.process = multiprocessing.Process(target=self.hashserv.serve_forever)
401 self.hashserv.process.daemon = True
402 self.hashserv.process.start()
403 self.data.setVar("BB_HASHSERVE", self.hashservport)
404 self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservport)
405 self.databuilder.data.setVar("BB_HASHSERVE", self.hashservport)
406 for mc in self.databuilder.mcdata:
407 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservport)
408
409 bb.parse.init_parser(self.data)
410
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500411 def enableDataTracking(self):
412 self.configuration.tracking = True
413 if hasattr(self, "data"):
414 self.data.enableTracking()
415
416 def disableDataTracking(self):
417 self.configuration.tracking = False
418 if hasattr(self, "data"):
419 self.data.disableTracking()
420
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500421 def parseConfiguration(self):
422 # Set log file verbosity
423 verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
424 if verboselogs:
425 bb.msg.loggerVerboseLogs = True
426
427 # Change nice level if we're asked to
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500428 nice = self.data.getVar("BB_NICE_LEVEL")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500429 if nice:
430 curnice = os.nice(0)
431 nice = int(nice) - curnice
432 buildlog.verbose("Renice to %s " % os.nice(nice))
433
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600434 if self.recipecaches:
435 del self.recipecaches
436 self.multiconfigs = self.databuilder.mcdata.keys()
437 self.recipecaches = {}
438 for mc in self.multiconfigs:
439 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500440
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500441 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500442
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500443 self.parsecache_valid = False
444
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500445 def updateConfigOpts(self, options, environment, cmdline):
446 self.ui_cmdline = cmdline
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500447 clean = True
448 for o in options:
449 if o in ['prefile', 'postfile']:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500450 # Only these options may require a reparse
451 try:
452 if getattr(self.configuration, o) == options[o]:
453 # Value is the same, no need to mark dirty
454 continue
455 except AttributeError:
456 pass
457 logger.debug(1, "Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
458 print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500459 clean = False
460 setattr(self.configuration, o, options[o])
461 for k in bb.utils.approved_variables():
462 if k in environment and k not in self.configuration.env:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500463 logger.debug(1, "Updating new environment variable %s to %s" % (k, environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500464 self.configuration.env[k] = environment[k]
465 clean = False
466 if k in self.configuration.env and k not in environment:
467 logger.debug(1, "Updating environment variable %s (deleted)" % (k))
468 del self.configuration.env[k]
469 clean = False
470 if k not in self.configuration.env and k not in environment:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500471 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500472 if environment[k] != self.configuration.env[k]:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500473 logger.debug(1, "Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500474 self.configuration.env[k] = environment[k]
475 clean = False
476 if not clean:
477 logger.debug(1, "Base environment change, triggering reparse")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500478 self.reset()
479
480 def runCommands(self, server, data, abort):
481 """
482 Run any queued asynchronous command
483 This is done by the idle handler so it runs in true context rather than
484 tied to any UI.
485 """
486
487 return self.command.runAsyncCommand()
488
489 def showVersions(self):
490
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500491 (latest_versions, preferred_versions) = self.findProviders()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500492
493 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
494 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
495
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500496 for p in sorted(self.recipecaches[''].pkg_pn):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500497 pref = preferred_versions[p]
498 latest = latest_versions[p]
499
500 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
501 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
502
503 if pref == latest:
504 prefstr = ""
505
506 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
507
508 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
509 """
510 Show the outer or per-recipe environment
511 """
512 fn = None
513 envdata = None
Brad Bishop15ae2502019-06-18 21:44:24 -0400514 mc = ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500515 if not pkgs_to_build:
516 pkgs_to_build = []
517
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500518 orig_tracking = self.configuration.tracking
519 if not orig_tracking:
520 self.enableDataTracking()
521 self.reset()
522
Brad Bishop15ae2502019-06-18 21:44:24 -0400523 def mc_base(p):
524 if p.startswith('mc:'):
525 s = p.split(':')
526 if len(s) == 2:
527 return s[1]
528 return None
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500529
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500530 if buildfile:
531 # Parse the configuration here. We need to do it explicitly here since
532 # this showEnvironment() code path doesn't use the cache
533 self.parseConfiguration()
534
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600535 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500536 fn = self.matchFile(fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600537 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500538 elif len(pkgs_to_build) == 1:
Brad Bishop15ae2502019-06-18 21:44:24 -0400539 mc = mc_base(pkgs_to_build[0])
540 if not mc:
541 ignore = self.data.getVar("ASSUME_PROVIDED") or ""
542 if pkgs_to_build[0] in set(ignore.split()):
543 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500544
Brad Bishop15ae2502019-06-18 21:44:24 -0400545 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500546
Brad Bishop15ae2502019-06-18 21:44:24 -0400547 mc = runlist[0][0]
548 fn = runlist[0][3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500549
550 if fn:
551 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600552 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
553 envdata = bb_cache.loadDataFull(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500554 except Exception as e:
555 parselog.exception("Unable to read %s", fn)
556 raise
Brad Bishop15ae2502019-06-18 21:44:24 -0400557 else:
558 if not mc in self.databuilder.mcdata:
559 bb.fatal('Not multiconfig named "%s" found' % mc)
560 envdata = self.databuilder.mcdata[mc]
561 data.expandKeys(envdata)
562 parse.ast.runAnonFuncs(envdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500563
564 # Display history
565 with closing(StringIO()) as env:
566 self.data.inchistory.emit(env)
567 logger.plain(env.getvalue())
568
569 # emit variables and shell functions
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500570 with closing(StringIO()) as env:
571 data.emit_env(env, envdata, True)
572 logger.plain(env.getvalue())
573
574 # emit the metadata which isnt valid shell
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500575 for e in sorted(envdata.keys()):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600576 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500577 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500578
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500579 if not orig_tracking:
580 self.disableDataTracking()
581 self.reset()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500582
583 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
584 """
585 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
586 """
587 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
588
589 # A task of None means use the default task
590 if task is None:
591 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500592 if not task.startswith("do_"):
593 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500594
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500595 targetlist = self.checkPackages(pkgs_to_build, task)
596 fulltargetlist = []
597 defaulttask_implicit = ''
598 defaulttask_explicit = False
599 wildcard = False
600
601 # Wild card expansion:
Brad Bishop15ae2502019-06-18 21:44:24 -0400602 # Replace string such as "mc:*:bash"
603 # into "mc:A:bash mc:B:bash bash"
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500604 for k in targetlist:
Brad Bishop15ae2502019-06-18 21:44:24 -0400605 if k.startswith("mc:"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500606 if wildcard:
607 bb.fatal('multiconfig conflict')
608 if k.split(":")[1] == "*":
609 wildcard = True
610 for mc in self.multiconfigs:
611 if mc:
612 fulltargetlist.append(k.replace('*', mc))
613 # implicit default task
614 else:
615 defaulttask_implicit = k.split(":")[2]
616 else:
617 fulltargetlist.append(k)
618 else:
619 defaulttask_explicit = True
620 fulltargetlist.append(k)
621
622 if not defaulttask_explicit and defaulttask_implicit != '':
623 fulltargetlist.append(defaulttask_implicit)
624
625 bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600626 taskdata = {}
627 localdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500628
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600629 for mc in self.multiconfigs:
630 taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
631 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600632 bb.data.expandKeys(localdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500633
634 current = 0
635 runlist = []
636 for k in fulltargetlist:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600637 mc = ""
Brad Bishop15ae2502019-06-18 21:44:24 -0400638 if k.startswith("mc:"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600639 mc = k.split(":")[1]
640 k = ":".join(k.split(":")[2:])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500641 ktask = task
642 if ":do_" in k:
643 k2 = k.split(":do_")
644 k = k2[0]
645 ktask = k2[1]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600646 taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500647 current += 1
648 if not ktask.startswith("do_"):
649 ktask = "do_%s" % ktask
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600650 if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
651 # e.g. in ASSUME_PROVIDED
652 continue
653 fn = taskdata[mc].build_targets[k][0]
654 runlist.append([mc, k, ktask, fn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500655 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600656
Brad Bishop15ae2502019-06-18 21:44:24 -0400657 havemc = False
658 for mc in self.multiconfigs:
659 if taskdata[mc].get_mcdepends():
660 havemc = True
Brad Bishopf058f492019-01-28 23:50:33 -0500661
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800662 # No need to do check providers if there are no mcdeps or not an mc build
Brad Bishop15ae2502019-06-18 21:44:24 -0400663 if havemc or len(self.multiconfigs) > 1:
Andrew Geissler99467da2019-02-25 18:54:23 -0600664 seen = set()
665 new = True
666 # Make sure we can provide the multiconfig dependency
667 while new:
668 mcdeps = set()
669 # Add unresolved first, so we can get multiconfig indirect dependencies on time
670 for mc in self.multiconfigs:
671 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
672 mcdeps |= set(taskdata[mc].get_mcdepends())
673 new = False
674 for mc in self.multiconfigs:
675 for k in mcdeps:
676 if k in seen:
677 continue
678 l = k.split(':')
679 depmc = l[2]
680 if depmc not in self.multiconfigs:
681 bb.fatal("Multiconfig dependency %s depends on nonexistent mc configuration %s" % (k,depmc))
682 else:
683 logger.debug(1, "Adding providers for multiconfig dependency %s" % l[3])
684 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
685 seen.add(k)
686 new = True
Brad Bishopf058f492019-01-28 23:50:33 -0500687
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600688 for mc in self.multiconfigs:
689 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
690
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500691 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600692 return taskdata, runlist
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500693
694 def prepareTreeData(self, pkgs_to_build, task):
695 """
696 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
697 """
698
699 # We set abort to False here to prevent unbuildable targets raising
700 # an exception when we're just generating data
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600701 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500702
703 return runlist, taskdata
704
705 ######## WARNING : this function requires cache_extra to be enabled ########
706
707 def generateTaskDepTreeData(self, pkgs_to_build, task):
708 """
709 Create a dependency graph of pkgs_to_build including reverse dependency
710 information.
711 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500712 if not task.startswith("do_"):
713 task = "do_%s" % task
714
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500715 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600716 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500717 rq.rqdata.prepare()
718 return self.buildDependTree(rq, taskdata)
719
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600720 @staticmethod
721 def add_mc_prefix(mc, pn):
722 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -0400723 return "mc:%s:%s" % (mc, pn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600724 return pn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500725
726 def buildDependTree(self, rq, taskdata):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600727 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500728 depend_tree = {}
729 depend_tree["depends"] = {}
730 depend_tree["tdepends"] = {}
731 depend_tree["pn"] = {}
732 depend_tree["rdepends-pn"] = {}
733 depend_tree["packages"] = {}
734 depend_tree["rdepends-pkg"] = {}
735 depend_tree["rrecs-pkg"] = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500736 depend_tree['providermap'] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600737 depend_tree["layer-priorities"] = self.bbfile_config_priorities
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500738
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600739 for mc in taskdata:
740 for name, fn in list(taskdata[mc].get_providermap().items()):
741 pn = self.recipecaches[mc].pkg_fn[fn]
742 pn = self.add_mc_prefix(mc, pn)
743 if name != pn:
744 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
745 depend_tree['providermap'][name] = (pn, version)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500746
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600747 for tid in rq.rqdata.runtaskentries:
748 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
749 pn = self.recipecaches[mc].pkg_fn[taskfn]
750 pn = self.add_mc_prefix(mc, pn)
751 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500752 if pn not in depend_tree["pn"]:
753 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600754 depend_tree["pn"][pn]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500755 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600756 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500757
758 # if we have extra caches, list all attributes they bring in
759 extra_info = []
760 for cache_class in self.caches_array:
761 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
762 cachefields = getattr(cache_class, 'cachefields', [])
763 extra_info = extra_info + cachefields
764
765 # for all attributes stored, add them to the dependency tree
766 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600767 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500768
769
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500770 dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
771 if not dotname in depend_tree["tdepends"]:
772 depend_tree["tdepends"][dotname] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600773 for dep in rq.rqdata.runtaskentries[tid].depends:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800774 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
775 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600776 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
777 if taskfn not in seen_fns:
778 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500779 packages = []
780
781 depend_tree["depends"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600782 for dep in taskdata[mc].depids[taskfn]:
783 depend_tree["depends"][pn].append(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500784
785 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600786 for rdep in taskdata[mc].rdepids[taskfn]:
787 depend_tree["rdepends-pn"][pn].append(rdep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500788
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600789 rdepends = self.recipecaches[mc].rundeps[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500790 for package in rdepends:
791 depend_tree["rdepends-pkg"][package] = []
792 for rdepend in rdepends[package]:
793 depend_tree["rdepends-pkg"][package].append(rdepend)
794 packages.append(package)
795
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600796 rrecs = self.recipecaches[mc].runrecs[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500797 for package in rrecs:
798 depend_tree["rrecs-pkg"][package] = []
799 for rdepend in rrecs[package]:
800 depend_tree["rrecs-pkg"][package].append(rdepend)
801 if not package in packages:
802 packages.append(package)
803
804 for package in packages:
805 if package not in depend_tree["packages"]:
806 depend_tree["packages"][package] = {}
807 depend_tree["packages"][package]["pn"] = pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600808 depend_tree["packages"][package]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500809 depend_tree["packages"][package]["version"] = version
810
811 return depend_tree
812
813 ######## WARNING : this function requires cache_extra to be enabled ########
814 def generatePkgDepTreeData(self, pkgs_to_build, task):
815 """
816 Create a dependency tree of pkgs_to_build, returning the data.
817 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500818 if not task.startswith("do_"):
819 task = "do_%s" % task
820
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500821 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500822
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600823 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500824 depend_tree = {}
825 depend_tree["depends"] = {}
826 depend_tree["pn"] = {}
827 depend_tree["rdepends-pn"] = {}
828 depend_tree["rdepends-pkg"] = {}
829 depend_tree["rrecs-pkg"] = {}
830
831 # if we have extra caches, list all attributes they bring in
832 extra_info = []
833 for cache_class in self.caches_array:
834 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
835 cachefields = getattr(cache_class, 'cachefields', [])
836 extra_info = extra_info + cachefields
837
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600838 tids = []
839 for mc in taskdata:
840 for tid in taskdata[mc].taskentries:
841 tids.append(tid)
842
843 for tid in tids:
844 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
845
846 pn = self.recipecaches[mc].pkg_fn[taskfn]
847 pn = self.add_mc_prefix(mc, pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500848
849 if pn not in depend_tree["pn"]:
850 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600851 depend_tree["pn"][pn]["filename"] = taskfn
852 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500853 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600854 rdepends = self.recipecaches[mc].rundeps[taskfn]
855 rrecs = self.recipecaches[mc].runrecs[taskfn]
856 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500857
858 # for all extra attributes stored, add them to the dependency tree
859 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600860 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500861
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600862 if taskfn not in seen_fns:
863 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500864
865 depend_tree["depends"][pn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500866 for dep in taskdata[mc].depids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500867 pn_provider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600868 if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
869 fn_provider = taskdata[mc].build_targets[dep][0]
870 pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500871 else:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500872 pn_provider = dep
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600873 pn_provider = self.add_mc_prefix(mc, pn_provider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500874 depend_tree["depends"][pn].append(pn_provider)
875
876 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600877 for rdep in taskdata[mc].rdepids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500878 pn_rprovider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600879 if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
880 fn_rprovider = taskdata[mc].run_targets[rdep][0]
881 pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500882 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600883 pn_rprovider = rdep
884 pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500885 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
886
887 depend_tree["rdepends-pkg"].update(rdepends)
888 depend_tree["rrecs-pkg"].update(rrecs)
889
890 return depend_tree
891
892 def generateDepTreeEvent(self, pkgs_to_build, task):
893 """
894 Create a task dependency graph of pkgs_to_build.
895 Generate an event with the result
896 """
897 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
898 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
899
900 def generateDotGraphFiles(self, pkgs_to_build, task):
901 """
902 Create a task dependency graph of pkgs_to_build.
903 Save the result to a set of .dot files.
904 """
905
906 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
907
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500908 with open('pn-buildlist', 'w') as f:
909 for pn in depgraph["pn"]:
910 f.write(pn + "\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500911 logger.info("PN build list saved to 'pn-buildlist'")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500912
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500913 # Remove old format output files to ensure no confusion with stale data
914 try:
915 os.unlink('pn-depends.dot')
916 except FileNotFoundError:
917 pass
918 try:
919 os.unlink('package-depends.dot')
920 except FileNotFoundError:
921 pass
Brad Bishop79641f22019-09-10 07:20:22 -0400922 try:
923 os.unlink('recipe-depends.dot')
924 except FileNotFoundError:
925 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500926
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500927 with open('task-depends.dot', 'w') as f:
928 f.write("digraph depends {\n")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400929 for task in sorted(depgraph["tdepends"]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500930 (pn, taskname) = task.rsplit(".", 1)
931 fn = depgraph["pn"][pn]["filename"]
932 version = depgraph["pn"][pn]["version"]
933 f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400934 for dep in sorted(depgraph["tdepends"][task]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500935 f.write('"%s" -> "%s"\n' % (task, dep))
936 f.write("}\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500937 logger.info("Task dependencies saved to 'task-depends.dot'")
938
939 def show_appends_with_no_recipes(self):
940 # Determine which bbappends haven't been applied
941
942 # First get list of recipes, including skipped
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600943 recipefns = list(self.recipecaches[''].pkg_fn.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500944 recipefns.extend(self.skiplist.keys())
945
946 # Work out list of bbappends that have been applied
947 applied_appends = []
948 for fn in recipefns:
949 applied_appends.extend(self.collection.get_file_appends(fn))
950
951 appends_without_recipes = []
952 for _, appendfn in self.collection.bbappends:
953 if not appendfn in applied_appends:
954 appends_without_recipes.append(appendfn)
955
956 if appends_without_recipes:
957 msg = 'No recipes available for:\n %s' % '\n '.join(appends_without_recipes)
958 warn_only = self.data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
959 False) or "no"
960 if warn_only.lower() in ("1", "yes", "true"):
961 bb.warn(msg)
962 else:
963 bb.fatal(msg)
964
965 def handlePrefProviders(self):
966
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600967 for mc in self.multiconfigs:
968 localdata = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600969 bb.data.expandKeys(localdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500970
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600971 # Handle PREFERRED_PROVIDERS
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500972 for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600973 try:
974 (providee, provider) = p.split(':')
975 except:
976 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
977 continue
978 if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
979 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
980 self.recipecaches[mc].preferred[providee] = provider
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500981
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500982 def findConfigFilePath(self, configfile):
983 """
984 Find the location on disk of configfile and if it exists and was parsed by BitBake
985 emit the ConfigFilePathFound event with the path to the file.
986 """
987 path = bb.cookerdata.findConfigFile(configfile, self.data)
988 if not path:
989 return
990
991 # Generate a list of parsed configuration files by searching the files
992 # listed in the __depends and __base_depends variables with a .conf suffix.
993 conffiles = []
994 dep_files = self.data.getVar('__base_depends', False) or []
995 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
996
997 for f in dep_files:
998 if f[0].endswith(".conf"):
999 conffiles.append(f[0])
1000
1001 _, conf, conffile = path.rpartition("conf/")
1002 match = os.path.join(conf, conffile)
1003 # Try and find matches for conf/conffilename.conf as we don't always
1004 # have the full path to the file.
1005 for cfg in conffiles:
1006 if cfg.endswith(match):
1007 bb.event.fire(bb.event.ConfigFilePathFound(path),
1008 self.data)
1009 break
1010
1011 def findFilesMatchingInDir(self, filepattern, directory):
1012 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001013 Searches for files containing the substring 'filepattern' which are children of
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001014 'directory' in each BBPATH. i.e. to find all rootfs package classes available
1015 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1016 or to find all machine configuration files one could call:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001017 findFilesMatchingInDir(self, '.conf', 'conf/machine')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001018 """
1019
1020 matches = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001021 bbpaths = self.data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001022 for path in bbpaths:
1023 dirpath = os.path.join(path, directory)
1024 if os.path.exists(dirpath):
1025 for root, dirs, files in os.walk(dirpath):
1026 for f in files:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001027 if filepattern in f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001028 matches.append(f)
1029
1030 if matches:
1031 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1032
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001033 def findProviders(self, mc=''):
1034 return bb.providers.findProviders(self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1035
1036 def findBestProvider(self, pn, mc=''):
1037 if pn in self.recipecaches[mc].providers:
1038 filenames = self.recipecaches[mc].providers[pn]
1039 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.data, self.recipecaches[mc])
1040 filename = eligible[0]
1041 return None, None, None, filename
1042 elif pn in self.recipecaches[mc].pkg_pn:
1043 return bb.providers.findBestProvider(pn, self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1044 else:
1045 return None, None, None, None
1046
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001047 def findConfigFiles(self, varname):
1048 """
1049 Find config files which are appropriate values for varname.
1050 i.e. MACHINE, DISTRO
1051 """
1052 possible = []
1053 var = varname.lower()
1054
1055 data = self.data
1056 # iterate configs
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001057 bbpaths = data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001058 for path in bbpaths:
1059 confpath = os.path.join(path, "conf", var)
1060 if os.path.exists(confpath):
1061 for root, dirs, files in os.walk(confpath):
1062 # get all child files, these are appropriate values
1063 for f in files:
1064 val, sep, end = f.rpartition('.')
1065 if end == 'conf':
1066 possible.append(val)
1067
1068 if possible:
1069 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1070
1071 def findInheritsClass(self, klass):
1072 """
1073 Find all recipes which inherit the specified class
1074 """
1075 pkg_list = []
1076
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001077 for pfn in self.recipecaches[''].pkg_fn:
1078 inherits = self.recipecaches[''].inherits.get(pfn, None)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001079 if inherits and klass in inherits:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001080 pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001081
1082 return pkg_list
1083
1084 def generateTargetsTree(self, klass=None, pkgs=None):
1085 """
1086 Generate a dependency tree of buildable targets
1087 Generate an event with the result
1088 """
1089 # if the caller hasn't specified a pkgs list default to universe
1090 if not pkgs:
1091 pkgs = ['universe']
1092 # if inherited_class passed ensure all recipes which inherit the
1093 # specified class are included in pkgs
1094 if klass:
1095 extra_pkgs = self.findInheritsClass(klass)
1096 pkgs = pkgs + extra_pkgs
1097
1098 # generate a dependency tree for all our packages
1099 tree = self.generatePkgDepTreeData(pkgs, 'build')
1100 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1101
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001102 def interactiveMode( self ):
1103 """Drop off into a shell"""
1104 try:
1105 from bb import shell
1106 except ImportError:
1107 parselog.exception("Interactive mode not available")
1108 sys.exit(1)
1109 else:
1110 shell.start( self )
1111
1112
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001113 def handleCollections(self, collections):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001114 """Handle collections"""
1115 errors = False
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001116 self.bbfile_config_priorities = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001117 if collections:
1118 collection_priorities = {}
1119 collection_depends = {}
1120 collection_list = collections.split()
1121 min_prio = 0
1122 for c in collection_list:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001123 bb.debug(1,'Processing %s in collection list' % (c))
1124
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001125 # Get collection priority if defined explicitly
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001126 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001127 if priority:
1128 try:
1129 prio = int(priority)
1130 except ValueError:
1131 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1132 errors = True
1133 if min_prio == 0 or prio < min_prio:
1134 min_prio = prio
1135 collection_priorities[c] = prio
1136 else:
1137 collection_priorities[c] = None
1138
1139 # Check dependencies and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001140 deps = self.data.getVar("LAYERDEPENDS_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001141 if deps:
1142 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001143 depDict = bb.utils.explode_dep_versions2(deps)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001144 except bb.utils.VersionStringException as vse:
1145 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001146 for dep, oplist in list(depDict.items()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001147 if dep in collection_list:
1148 for opstr in oplist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001149 layerver = self.data.getVar("LAYERVERSION_%s" % dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001150 (op, depver) = opstr.split()
1151 if layerver:
1152 try:
1153 res = bb.utils.vercmp_string_op(layerver, depver, op)
1154 except bb.utils.VersionStringException as vse:
1155 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1156 if not res:
1157 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1158 errors = True
1159 else:
1160 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1161 errors = True
1162 else:
1163 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1164 errors = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001165 collection_depends[c] = list(depDict.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001166 else:
1167 collection_depends[c] = []
1168
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001169 # Check recommends and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001170 recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001171 if recs:
1172 try:
1173 recDict = bb.utils.explode_dep_versions2(recs)
1174 except bb.utils.VersionStringException as vse:
1175 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1176 for rec, oplist in list(recDict.items()):
1177 if rec in collection_list:
1178 if oplist:
1179 opstr = oplist[0]
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001180 layerver = self.data.getVar("LAYERVERSION_%s" % rec)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001181 if layerver:
1182 (op, recver) = opstr.split()
1183 try:
1184 res = bb.utils.vercmp_string_op(layerver, recver, op)
1185 except bb.utils.VersionStringException as vse:
1186 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1187 if not res:
1188 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1189 continue
1190 else:
1191 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1192 continue
1193 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1194 collection_depends[c].append(rec)
1195 else:
1196 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1197
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001198 # Recursively work out collection priorities based on dependencies
1199 def calc_layer_priority(collection):
1200 if not collection_priorities[collection]:
1201 max_depprio = min_prio
1202 for dep in collection_depends[collection]:
1203 calc_layer_priority(dep)
1204 depprio = collection_priorities[dep]
1205 if depprio > max_depprio:
1206 max_depprio = depprio
1207 max_depprio += 1
1208 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1209 collection_priorities[collection] = max_depprio
1210
1211 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1212 for c in collection_list:
1213 calc_layer_priority(c)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001214 regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001215 if regex == None:
1216 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1217 errors = True
1218 continue
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001219 elif regex == "":
1220 parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
Brad Bishop19323692019-04-05 15:28:33 -04001221 cre = re.compile('^NULL$')
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001222 errors = False
1223 else:
1224 try:
1225 cre = re.compile(regex)
1226 except re.error:
1227 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1228 errors = True
1229 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001230 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001231 if errors:
1232 # We've already printed the actual error(s)
1233 raise CollectionError("Errors during parsing layer configuration")
1234
1235 def buildSetVars(self):
1236 """
1237 Setup any variables needed before starting a build
1238 """
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001239 t = time.gmtime()
1240 for mc in self.databuilder.mcdata:
1241 ds = self.databuilder.mcdata[mc]
1242 if not ds.getVar("BUILDNAME", False):
1243 ds.setVar("BUILDNAME", "${DATE}${TIME}")
1244 ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1245 ds.setVar("DATE", time.strftime('%Y%m%d', t))
1246 ds.setVar("TIME", time.strftime('%H%M%S', t))
1247
1248 def reset_mtime_caches(self):
1249 """
1250 Reset mtime caches - this is particularly important when memory resident as something
1251 which is cached is not unlikely to have changed since the last invocation (e.g. a
1252 file associated with a recipe might have been modified by the user).
1253 """
1254 build.reset_cache()
1255 bb.fetch._checksum_cache.mtime_cache.clear()
1256 siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1257 if siggen_cache:
1258 bb.parse.siggen.checksum_cache.mtime_cache.clear()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001259
1260 def matchFiles(self, bf):
1261 """
1262 Find the .bb files which match the expression in 'buildfile'.
1263 """
1264 if bf.startswith("/") or bf.startswith("../"):
1265 bf = os.path.abspath(bf)
1266
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001267 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001268 filelist, masked, searchdirs = self.collection.collect_bbfiles(self.data, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001269 try:
1270 os.stat(bf)
1271 bf = os.path.abspath(bf)
1272 return [bf]
1273 except OSError:
1274 regexp = re.compile(bf)
1275 matches = []
1276 for f in filelist:
1277 if regexp.search(f) and os.path.isfile(f):
1278 matches.append(f)
1279 return matches
1280
1281 def matchFile(self, buildfile):
1282 """
1283 Find the .bb file which matches the expression in 'buildfile'.
1284 Raise an error if multiple files
1285 """
1286 matches = self.matchFiles(buildfile)
1287 if len(matches) != 1:
1288 if matches:
1289 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1290 if matches:
1291 for f in matches:
1292 msg += "\n %s" % f
1293 parselog.error(msg)
1294 else:
1295 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1296 raise NoSpecificMatch
1297 return matches[0]
1298
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001299 def buildFile(self, buildfile, task):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001300 """
1301 Build the file matching regexp buildfile
1302 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001303 bb.event.fire(bb.event.BuildInit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001304
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001305 # Too many people use -b because they think it's how you normally
1306 # specify a target to be built, so show a warning
1307 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1308
1309 self.buildFileInternal(buildfile, task)
1310
1311 def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1312 """
1313 Build the file matching regexp buildfile
1314 """
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001315
1316 # Parse the configuration here. We need to do it explicitly here since
1317 # buildFile() doesn't use the cache
1318 self.parseConfiguration()
1319
1320 # If we are told to do the None task then query the default task
1321 if (task == None):
1322 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001323 if not task.startswith("do_"):
1324 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001325
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001326 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001327 fn = self.matchFile(fn)
1328
1329 self.buildSetVars()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001330 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001331
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001332 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
1333
1334 infos = bb_cache.parse(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001335 infos = dict(infos)
1336
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001337 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001338 try:
1339 info_array = infos[fn]
1340 except KeyError:
1341 bb.fatal("%s does not exist" % fn)
1342
1343 if info_array[0].skipped:
1344 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1345
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001346 self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001347
1348 # Tweak some variables
1349 item = info_array[0].pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001350 self.recipecaches[mc].ignored_dependencies = set()
1351 self.recipecaches[mc].bbfile_priority[fn] = 1
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001352 self.configuration.limited_deps = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001353
1354 # Remove external dependencies
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001355 self.recipecaches[mc].task_deps[fn]['depends'] = {}
1356 self.recipecaches[mc].deps[fn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001357 self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1358 self.recipecaches[mc].runrecs[fn] = defaultdict(list)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001359
1360 # Invalidate task for target if force mode active
1361 if self.configuration.force:
1362 logger.verbose("Invalidate task %s, %s", task, fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001363 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001364
1365 # Setup taskdata structure
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001366 taskdata = {}
1367 taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001368 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001369
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001370 if quietlog:
1371 rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1372 bb.runqueue.logger.setLevel(logging.WARNING)
1373
1374 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1375 if fireevents:
1376 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001377
1378 # Execute the runqueue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001379 runlist = [[mc, item, task, fn]]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001380
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001381 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001382
1383 def buildFileIdle(server, rq, abort):
1384
1385 msg = None
1386 interrupted = 0
1387 if abort or self.state == state.forceshutdown:
1388 rq.finish_runqueue(True)
1389 msg = "Forced shutdown"
1390 interrupted = 2
1391 elif self.state == state.shutdown:
1392 rq.finish_runqueue(False)
1393 msg = "Stopped build"
1394 interrupted = 1
1395 failures = 0
1396 try:
1397 retval = rq.execute_runqueue()
1398 except runqueue.TaskFailure as exc:
1399 failures += len(exc.args)
1400 retval = False
1401 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001402 self.command.finishAsyncCommand(str(exc))
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001403 if quietlog:
1404 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001405 return False
1406
1407 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001408 if fireevents:
1409 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001410 self.command.finishAsyncCommand(msg)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001411 # We trashed self.recipecaches above
1412 self.parsecache_valid = False
1413 self.configuration.limited_deps = False
1414 bb.parse.siggen.reset(self.data)
1415 if quietlog:
1416 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001417 return False
1418 if retval is True:
1419 return True
1420 return retval
1421
1422 self.configuration.server_register_idlecallback(buildFileIdle, rq)
1423
1424 def buildTargets(self, targets, task):
1425 """
1426 Attempt to build the targets specified
1427 """
1428
1429 def buildTargetsIdle(server, rq, abort):
1430 msg = None
1431 interrupted = 0
1432 if abort or self.state == state.forceshutdown:
1433 rq.finish_runqueue(True)
1434 msg = "Forced shutdown"
1435 interrupted = 2
1436 elif self.state == state.shutdown:
1437 rq.finish_runqueue(False)
1438 msg = "Stopped build"
1439 interrupted = 1
1440 failures = 0
1441 try:
1442 retval = rq.execute_runqueue()
1443 except runqueue.TaskFailure as exc:
1444 failures += len(exc.args)
1445 retval = False
1446 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001447 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001448 return False
1449
1450 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001451 try:
1452 for mc in self.multiconfigs:
1453 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1454 finally:
1455 self.command.finishAsyncCommand(msg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001456 return False
1457 if retval is True:
1458 return True
1459 return retval
1460
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001461 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001462 self.buildSetVars()
1463
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001464 # If we are told to do the None task then query the default task
1465 if (task == None):
1466 task = self.configuration.cmd
1467
1468 if not task.startswith("do_"):
1469 task = "do_%s" % task
1470
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001471 packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1472
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001473 bb.event.fire(bb.event.BuildInit(packages), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001474
1475 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001476
1477 buildname = self.data.getVar("BUILDNAME", False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001478
1479 # make targets to always look as <target>:do_<task>
1480 ntargets = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001481 for target in runlist:
1482 if target[0]:
Brad Bishop15ae2502019-06-18 21:44:24 -04001483 ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2]))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001484 ntargets.append("%s:%s" % (target[1], target[2]))
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001485
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001486 for mc in self.multiconfigs:
1487 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001488
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001489 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001490 if 'universe' in targets:
1491 rq.rqdata.warn_multi_bb = True
1492
1493 self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
1494
1495
1496 def getAllKeysWithFlags(self, flaglist):
1497 dump = {}
1498 for k in self.data.keys():
1499 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001500 expand = True
1501 flags = self.data.getVarFlags(k)
1502 if flags and "func" in flags and "python" in flags:
1503 expand = False
1504 v = self.data.getVar(k, expand)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001505 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1506 dump[k] = {
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001507 'v' : str(v) ,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001508 'history' : self.data.varhistory.variable(k),
1509 }
1510 for d in flaglist:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001511 if flags and d in flags:
1512 dump[k][d] = flags[d]
1513 else:
1514 dump[k][d] = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001515 except Exception as e:
1516 print(e)
1517 return dump
1518
1519
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001520 def updateCacheSync(self):
1521 if self.state == state.running:
1522 return
1523
1524 # reload files for which we got notifications
1525 for p in self.inotify_modified_files:
1526 bb.parse.update_cache(p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001527 if p in bb.parse.BBHandler.cached_statements:
1528 del bb.parse.BBHandler.cached_statements[p]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001529 self.inotify_modified_files = []
1530
1531 if not self.baseconfig_valid:
1532 logger.debug(1, "Reloading base configuration data")
1533 self.initConfigurationData()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001534 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001535
1536 # This is called for all async commands when self.state != running
1537 def updateCache(self):
1538 if self.state == state.running:
1539 return
1540
1541 if self.state in (state.shutdown, state.forceshutdown, state.error):
1542 if hasattr(self.parser, 'shutdown'):
1543 self.parser.shutdown(clean=False, force = True)
1544 raise bb.BBHandledException()
1545
1546 if self.state != state.parsing:
1547 self.updateCacheSync()
1548
1549 if self.state != state.parsing and not self.parsecache_valid:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001550 bb.parse.siggen.reset(self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001551 self.parseConfiguration ()
1552 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001553 for mc in self.multiconfigs:
1554 bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001555
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001556 for mc in self.multiconfigs:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001557 ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001558 self.recipecaches[mc].ignored_dependencies = set(ignore.split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001559
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001560 for dep in self.configuration.extra_assume_provided:
1561 self.recipecaches[mc].ignored_dependencies.add(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001562
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001563 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001564 (filelist, masked, searchdirs) = self.collection.collect_bbfiles(self.data, self.data)
1565
1566 # Add inotify watches for directories searched for bb/bbappend files
1567 for dirent in searchdirs:
1568 self.add_filewatch([[dirent]], dirs=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001569
1570 self.parser = CookerParser(self, filelist, masked)
1571 self.parsecache_valid = True
1572
1573 self.state = state.parsing
1574
1575 if not self.parser.parse_next():
1576 collectlog.debug(1, "parsing complete")
1577 if self.parser.error:
1578 raise bb.BBHandledException()
1579 self.show_appends_with_no_recipes()
1580 self.handlePrefProviders()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001581 for mc in self.multiconfigs:
1582 self.recipecaches[mc].bbfile_priority = self.collection.collection_priorities(self.recipecaches[mc].pkg_fn, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001583 self.state = state.running
1584
1585 # Send an event listing all stamps reachable after parsing
1586 # which the metadata may use to clean up stale data
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001587 for mc in self.multiconfigs:
1588 event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1589 bb.event.fire(event, self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001590 return None
1591
1592 return True
1593
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001594 def checkPackages(self, pkgs_to_build, task=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001595
1596 # Return a copy, don't modify the original
1597 pkgs_to_build = pkgs_to_build[:]
1598
1599 if len(pkgs_to_build) == 0:
1600 raise NothingToBuild
1601
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001602 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001603 for pkg in pkgs_to_build:
1604 if pkg in ignore:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001605 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
Brad Bishop15ae2502019-06-18 21:44:24 -04001606 if pkg.startswith("multiconfig:"):
1607 pkgs_to_build.remove(pkg)
1608 pkgs_to_build.append(pkg.replace("multiconfig:", "mc:"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001609
1610 if 'world' in pkgs_to_build:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001611 pkgs_to_build.remove('world')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001612 for mc in self.multiconfigs:
1613 bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1614 for t in self.recipecaches[mc].world_target:
1615 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001616 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001617 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001618
1619 if 'universe' in pkgs_to_build:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001620 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001621 parselog.debug(1, "collating packages for \"universe\"")
1622 pkgs_to_build.remove('universe')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001623 for mc in self.multiconfigs:
1624 for t in self.recipecaches[mc].universe_target:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001625 if task:
1626 foundtask = False
1627 for provider_fn in self.recipecaches[mc].providers[t]:
1628 if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1629 foundtask = True
1630 break
1631 if not foundtask:
1632 bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1633 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001634 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001635 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001636 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001637
1638 return pkgs_to_build
1639
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001640 def pre_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001641 # We now are in our own process so we can call this here.
1642 # PRServ exits if its parent process exits
1643 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001644 return
1645
1646 def post_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001647 prserv.serv.auto_shutdown()
Brad Bishop08902b02019-08-20 09:16:51 -04001648 if self.hashserv:
1649 self.hashserv.process.terminate()
1650 self.hashserv.process.join()
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001651 bb.event.fire(CookerExit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001652
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001653 def shutdown(self, force = False):
1654 if force:
1655 self.state = state.forceshutdown
1656 else:
1657 self.state = state.shutdown
1658
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001659 if self.parser:
1660 self.parser.shutdown(clean=not force, force=force)
1661
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001662 def finishcommand(self):
1663 self.state = state.initial
1664
1665 def reset(self):
1666 self.initConfigurationData()
Brad Bishop08902b02019-08-20 09:16:51 -04001667 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001668
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001669 def clientComplete(self):
1670 """Called when the client is done using the server"""
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001671 self.finishcommand()
1672 self.extraconfigdata = {}
1673 self.command.reset()
1674 self.databuilder.reset()
1675 self.data = self.databuilder.data
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001676
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001677
1678class CookerExit(bb.event.Event):
1679 """
1680 Notify clients of the Cooker shutdown
1681 """
1682
1683 def __init__(self):
1684 bb.event.Event.__init__(self)
1685
1686
1687class CookerCollectFiles(object):
1688 def __init__(self, priorities):
1689 self.bbappends = []
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001690 # Priorities is a list of tupples, with the second element as the pattern.
1691 # We need to sort the list with the longest pattern first, and so on to
1692 # the shortest. This allows nested layers to be properly evaluated.
1693 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001694
1695 def calc_bbfile_priority( self, filename, matched = None ):
1696 for _, _, regex, pri in self.bbfile_config_priorities:
1697 if regex.match(filename):
1698 if matched != None:
1699 if not regex in matched:
1700 matched.add(regex)
1701 return pri
1702 return 0
1703
1704 def get_bbfiles(self):
1705 """Get list of default .bb files by reading out the current directory"""
1706 path = os.getcwd()
1707 contents = os.listdir(path)
1708 bbfiles = []
1709 for f in contents:
1710 if f.endswith(".bb"):
1711 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1712 return bbfiles
1713
1714 def find_bbfiles(self, path):
1715 """Find all the .bb and .bbappend files in a directory"""
1716 found = []
1717 for dir, dirs, files in os.walk(path):
1718 for ignored in ('SCCS', 'CVS', '.svn'):
1719 if ignored in dirs:
1720 dirs.remove(ignored)
1721 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1722
1723 return found
1724
1725 def collect_bbfiles(self, config, eventdata):
1726 """Collect all available .bb build files"""
1727 masked = 0
1728
1729 collectlog.debug(1, "collecting .bb files")
1730
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001731 files = (config.getVar( "BBFILES") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001732 config.setVar("BBFILES", " ".join(files))
1733
1734 # Sort files by priority
1735 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
1736
1737 if not len(files):
1738 files = self.get_bbfiles()
1739
1740 if not len(files):
1741 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1742 bb.event.fire(CookerExit(), eventdata)
1743
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001744 # We need to track where we look so that we can add inotify watches. There
1745 # is no nice way to do this, this is horrid. We intercept the os.listdir()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001746 # (or os.scandir() for python 3.6+) calls while we run glob().
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001747 origlistdir = os.listdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001748 if hasattr(os, 'scandir'):
1749 origscandir = os.scandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001750 searchdirs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001751
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001752 def ourlistdir(d):
1753 searchdirs.append(d)
1754 return origlistdir(d)
1755
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001756 def ourscandir(d):
1757 searchdirs.append(d)
1758 return origscandir(d)
1759
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001760 os.listdir = ourlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001761 if hasattr(os, 'scandir'):
1762 os.scandir = ourscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001763 try:
1764 # Can't use set here as order is important
1765 newfiles = []
1766 for f in files:
1767 if os.path.isdir(f):
1768 dirfiles = self.find_bbfiles(f)
1769 for g in dirfiles:
1770 if g not in newfiles:
1771 newfiles.append(g)
1772 else:
1773 globbed = glob.glob(f)
1774 if not globbed and os.path.exists(f):
1775 globbed = [f]
1776 # glob gives files in order on disk. Sort to be deterministic.
1777 for g in sorted(globbed):
1778 if g not in newfiles:
1779 newfiles.append(g)
1780 finally:
1781 os.listdir = origlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001782 if hasattr(os, 'scandir'):
1783 os.scandir = origscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001784
1785 bbmask = config.getVar('BBMASK')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001786
1787 if bbmask:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001788 # First validate the individual regular expressions and ignore any
1789 # that do not compile
1790 bbmasks = []
1791 for mask in bbmask.split():
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001792 # When constructing an older style single regex, it's possible for BBMASK
1793 # to end up beginning with '|', which matches and masks _everything_.
1794 if mask.startswith("|"):
1795 collectlog.warn("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
1796 mask = mask[1:]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001797 try:
1798 re.compile(mask)
1799 bbmasks.append(mask)
1800 except sre_constants.error:
1801 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1802
1803 # Then validate the combined regular expressions. This should never
1804 # fail, but better safe than sorry...
1805 bbmask = "|".join(bbmasks)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001806 try:
1807 bbmask_compiled = re.compile(bbmask)
1808 except sre_constants.error:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001809 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1810 bbmask = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001811
1812 bbfiles = []
1813 bbappend = []
1814 for f in newfiles:
1815 if bbmask and bbmask_compiled.search(f):
1816 collectlog.debug(1, "skipping masked file %s", f)
1817 masked += 1
1818 continue
1819 if f.endswith('.bb'):
1820 bbfiles.append(f)
1821 elif f.endswith('.bbappend'):
1822 bbappend.append(f)
1823 else:
1824 collectlog.debug(1, "skipping %s: unknown file extension", f)
1825
1826 # Build a list of .bbappend files for each .bb file
1827 for f in bbappend:
1828 base = os.path.basename(f).replace('.bbappend', '.bb')
1829 self.bbappends.append((base, f))
1830
1831 # Find overlayed recipes
1832 # bbfiles will be in priority order which makes this easy
1833 bbfile_seen = dict()
1834 self.overlayed = defaultdict(list)
1835 for f in reversed(bbfiles):
1836 base = os.path.basename(f)
1837 if base not in bbfile_seen:
1838 bbfile_seen[base] = f
1839 else:
1840 topfile = bbfile_seen[base]
1841 self.overlayed[topfile].append(f)
1842
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001843 return (bbfiles, masked, searchdirs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001844
1845 def get_file_appends(self, fn):
1846 """
1847 Returns a list of .bbappend files to apply to fn
1848 """
1849 filelist = []
1850 f = os.path.basename(fn)
1851 for b in self.bbappends:
1852 (bbappend, filename) = b
1853 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1854 filelist.append(filename)
Brad Bishop79641f22019-09-10 07:20:22 -04001855 filelist.sort()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001856 return filelist
1857
1858 def collection_priorities(self, pkgfns, d):
1859
1860 priorities = {}
1861
1862 # Calculate priorities for each file
1863 matched = set()
1864 for p in pkgfns:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001865 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001866 priorities[p] = self.calc_bbfile_priority(realfn, matched)
1867
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001868 unmatched = set()
1869 for _, _, regex, pri in self.bbfile_config_priorities:
1870 if not regex in matched:
1871 unmatched.add(regex)
1872
Brad Bishop316dfdd2018-06-25 12:45:53 -04001873 # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
1874 def find_bbappend_match(regex):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001875 for b in self.bbappends:
1876 (bbfile, append) = b
1877 if regex.match(append):
Brad Bishop316dfdd2018-06-25 12:45:53 -04001878 # If the bbappend is matched by already "matched set", return False
1879 for matched_regex in matched:
1880 if matched_regex.match(append):
1881 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001882 return True
1883 return False
1884
1885 for unmatch in unmatched.copy():
Brad Bishop316dfdd2018-06-25 12:45:53 -04001886 if find_bbappend_match(unmatch):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001887 unmatched.remove(unmatch)
1888
1889 for collection, pattern, regex, _ in self.bbfile_config_priorities:
1890 if regex in unmatched:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001891 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001892 collectlog.warning("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001893
1894 return priorities
1895
1896class ParsingFailure(Exception):
1897 def __init__(self, realexception, recipe):
1898 self.realexception = realexception
1899 self.recipe = recipe
1900 Exception.__init__(self, realexception, recipe)
1901
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001902class Parser(multiprocessing.Process):
1903 def __init__(self, jobs, results, quit, init, profile):
1904 self.jobs = jobs
1905 self.results = results
1906 self.quit = quit
1907 self.init = init
1908 multiprocessing.Process.__init__(self)
1909 self.context = bb.utils.get_context().copy()
1910 self.handlers = bb.event.get_class_handlers().copy()
1911 self.profile = profile
1912
1913 def run(self):
1914
1915 if not self.profile:
1916 self.realrun()
1917 return
1918
1919 try:
1920 import cProfile as profile
1921 except:
1922 import profile
1923 prof = profile.Profile()
1924 try:
1925 profile.Profile.runcall(prof, self.realrun)
1926 finally:
1927 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1928 prof.dump_stats(logfile)
1929
1930 def realrun(self):
1931 if self.init:
1932 self.init()
1933
1934 pending = []
1935 while True:
1936 try:
1937 self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001938 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001939 pass
1940 else:
1941 self.results.cancel_join_thread()
1942 break
1943
1944 if pending:
1945 result = pending.pop()
1946 else:
1947 try:
Brad Bishop19323692019-04-05 15:28:33 -04001948 job = self.jobs.pop()
1949 except IndexError:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001950 break
1951 result = self.parse(*job)
1952
1953 try:
1954 self.results.put(result, timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001955 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001956 pending.append(result)
1957
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001958 def parse(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001959 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001960 # Record the filename we're parsing into any events generated
1961 def parse_filter(self, record):
1962 record.taskpid = bb.event.worker_pid
1963 record.fn = filename
1964 return True
1965
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001966 # Reset our environment and handlers to the original settings
1967 bb.utils.set_context(self.context.copy())
1968 bb.event.set_class_handlers(self.handlers.copy())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001969 bb.event.LogHandler.filter = parse_filter
1970
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001971 return True, self.bb_cache.parse(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001972 except Exception as exc:
1973 tb = sys.exc_info()[2]
1974 exc.recipe = filename
1975 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
1976 return True, exc
1977 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
1978 # and for example a worker thread doesn't just exit on its own in response to
1979 # a SystemExit event for example.
1980 except BaseException as exc:
1981 return True, ParsingFailure(exc, filename)
1982
1983class CookerParser(object):
1984 def __init__(self, cooker, filelist, masked):
1985 self.filelist = filelist
1986 self.cooker = cooker
1987 self.cfgdata = cooker.data
1988 self.cfghash = cooker.data_hash
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001989 self.cfgbuilder = cooker.databuilder
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001990
1991 # Accounting statistics
1992 self.parsed = 0
1993 self.cached = 0
1994 self.error = 0
1995 self.masked = masked
1996
1997 self.skipped = 0
1998 self.virtuals = 0
1999 self.total = len(filelist)
2000
2001 self.current = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002002 self.process_names = []
2003
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002004 self.bb_cache = bb.cache.Cache(self.cfgbuilder, self.cfghash, cooker.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002005 self.fromcache = []
2006 self.willparse = []
2007 for filename in self.filelist:
2008 appends = self.cooker.collection.get_file_appends(filename)
2009 if not self.bb_cache.cacheValid(filename, appends):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002010 self.willparse.append((filename, appends))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002011 else:
2012 self.fromcache.append((filename, appends))
2013 self.toparse = self.total - len(self.fromcache)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002014 self.progress_chunk = int(max(self.toparse / 100, 1))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002015
Brad Bishop6e60e8b2018-02-01 10:27:11 -05002016 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002017 multiprocessing.cpu_count()), len(self.willparse))
2018
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002019 self.start()
2020 self.haveshutdown = False
2021
2022 def start(self):
2023 self.results = self.load_cached()
2024 self.processes = []
2025 if self.toparse:
2026 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2027 def init():
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002028 Parser.bb_cache = self.bb_cache
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002029 bb.utils.set_process_name(multiprocessing.current_process().name)
2030 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2031 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002032
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002033 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002034 self.result_queue = multiprocessing.Queue()
Brad Bishop19323692019-04-05 15:28:33 -04002035
2036 def chunkify(lst,n):
2037 return [lst[i::n] for i in range(n)]
2038 self.jobs = chunkify(self.willparse, self.num_processes)
2039
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002040 for i in range(0, self.num_processes):
Brad Bishop19323692019-04-05 15:28:33 -04002041 parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002042 parser.start()
2043 self.process_names.append(parser.name)
2044 self.processes.append(parser)
2045
2046 self.results = itertools.chain(self.results, self.parse_generator())
2047
2048 def shutdown(self, clean=True, force=False):
2049 if not self.toparse:
2050 return
2051 if self.haveshutdown:
2052 return
2053 self.haveshutdown = True
2054
2055 if clean:
2056 event = bb.event.ParseCompleted(self.cached, self.parsed,
2057 self.skipped, self.masked,
2058 self.virtuals, self.error,
2059 self.total)
2060
2061 bb.event.fire(event, self.cfgdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002062 for process in self.processes:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002063 self.parser_quit.put(None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002064 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002065 self.parser_quit.cancel_join_thread()
2066 for process in self.processes:
2067 self.parser_quit.put(None)
2068
Brad Bishop08902b02019-08-20 09:16:51 -04002069 # Cleanup the queue before call process.join(), otherwise there might be
2070 # deadlocks.
2071 while True:
2072 try:
2073 self.result_queue.get(timeout=0.25)
2074 except queue.Empty:
2075 break
2076
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002077 for process in self.processes:
2078 if force:
2079 process.join(.1)
2080 process.terminate()
2081 else:
2082 process.join()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002083
2084 sync = threading.Thread(target=self.bb_cache.sync)
2085 sync.start()
2086 multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002087 bb.codeparser.parser_cache_savemerge()
2088 bb.fetch.fetcher_parse_done()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002089 if self.cooker.configuration.profile:
2090 profiles = []
2091 for i in self.process_names:
2092 logfile = "profile-parse-%s.log" % i
2093 if os.path.exists(logfile):
2094 profiles.append(logfile)
2095
2096 pout = "profile-parse.log.processed"
2097 bb.utils.process_profilelog(profiles, pout = pout)
2098 print("Processed parsing statistics saved to %s" % (pout))
2099
2100 def load_cached(self):
2101 for filename, appends in self.fromcache:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002102 cached, infos = self.bb_cache.load(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002103 yield not cached, infos
2104
2105 def parse_generator(self):
2106 while True:
2107 if self.parsed >= self.toparse:
2108 break
2109
2110 try:
2111 result = self.result_queue.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002112 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002113 pass
2114 else:
2115 value = result[1]
2116 if isinstance(value, BaseException):
2117 raise value
2118 else:
2119 yield result
2120
2121 def parse_next(self):
2122 result = []
2123 parsed = None
2124 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002125 parsed, result = next(self.results)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002126 except StopIteration:
2127 self.shutdown()
2128 return False
2129 except bb.BBHandledException as exc:
2130 self.error += 1
2131 logger.error('Failed to parse recipe: %s' % exc.recipe)
2132 self.shutdown(clean=False)
2133 return False
2134 except ParsingFailure as exc:
2135 self.error += 1
2136 logger.error('Unable to parse %s: %s' %
2137 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2138 self.shutdown(clean=False)
2139 return False
2140 except bb.parse.ParseError as exc:
2141 self.error += 1
2142 logger.error(str(exc))
2143 self.shutdown(clean=False)
2144 return False
2145 except bb.data_smart.ExpansionError as exc:
2146 self.error += 1
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002147 bbdir = os.path.dirname(__file__) + os.sep
2148 etype, value, _ = sys.exc_info()
2149 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2150 logger.error('ExpansionError during parsing %s', value.recipe,
2151 exc_info=(etype, value, tb))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002152 self.shutdown(clean=False)
2153 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002154 except Exception as exc:
2155 self.error += 1
2156 etype, value, tb = sys.exc_info()
2157 if hasattr(value, "recipe"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002158 logger.error('Unable to parse %s' % value.recipe,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002159 exc_info=(etype, value, exc.traceback))
2160 else:
2161 # Most likely, an exception occurred during raising an exception
2162 import traceback
2163 logger.error('Exception during parse: %s' % traceback.format_exc())
2164 self.shutdown(clean=False)
2165 return False
2166
2167 self.current += 1
2168 self.virtuals += len(result)
2169 if parsed:
2170 self.parsed += 1
2171 if self.parsed % self.progress_chunk == 0:
2172 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2173 self.cfgdata)
2174 else:
2175 self.cached += 1
2176
2177 for virtualfn, info_array in result:
2178 if info_array[0].skipped:
2179 self.skipped += 1
2180 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002181 (fn, cls, mc) = bb.cache.virtualfn2realfn(virtualfn)
2182 self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002183 parsed=parsed, watcher = self.cooker.add_filewatch)
2184 return True
2185
2186 def reparse(self, filename):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002187 infos = self.bb_cache.parse(filename, self.cooker.collection.get_file_appends(filename))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002188 for vfn, info_array in infos:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002189 (fn, cls, mc) = bb.cache.virtualfn2realfn(vfn)
2190 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)