blob: 0607fcc708f2f08d3f18ac79139847d803e61d5e [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# Copyright (C) 2003, 2004 Chris Larson
3# Copyright (C) 2003, 2004 Phil Blundell
4# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
5# Copyright (C) 2005 Holger Hans Peter Freyther
6# Copyright (C) 2005 ROAD GmbH
7# Copyright (C) 2006 - 2007 Richard Purdie
8#
Brad Bishopc342db32019-05-15 21:57:59 -04009# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050010#
Patrick Williamsc0f7c042017-02-23 20:41:17 -060011
Patrick Williamsc124f4f2015-09-15 14:41:29 -050012import sys, os, glob, os.path, re, time
13import atexit
14import itertools
15import logging
16import multiprocessing
17import sre_constants
18import threading
Patrick Williamsc0f7c042017-02-23 20:41:17 -060019from io import StringIO, UnsupportedOperation
Patrick Williamsc124f4f2015-09-15 14:41:29 -050020from contextlib import closing
21from functools import wraps
Patrick Williamsc0f7c042017-02-23 20:41:17 -060022from collections import defaultdict, namedtuple
Patrick Williamsc124f4f2015-09-15 14:41:29 -050023import bb, bb.exceptions, bb.command
24from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
Patrick Williamsc0f7c042017-02-23 20:41:17 -060025import queue
Patrick Williamsc124f4f2015-09-15 14:41:29 -050026import signal
27import subprocess
28import errno
29import prserv.serv
30import pyinotify
Patrick Williamsc0f7c042017-02-23 20:41:17 -060031import json
32import pickle
33import codecs
Brad Bishop08902b02019-08-20 09:16:51 -040034import hashserv
Patrick Williamsc124f4f2015-09-15 14:41:29 -050035
36logger = logging.getLogger("BitBake")
37collectlog = logging.getLogger("BitBake.Collection")
38buildlog = logging.getLogger("BitBake.Build")
39parselog = logging.getLogger("BitBake.Parsing")
40providerlog = logging.getLogger("BitBake.Provider")
41
42class NoSpecificMatch(bb.BBHandledException):
43 """
44 Exception raised when no or multiple file matches are found
45 """
46
47class NothingToBuild(Exception):
48 """
49 Exception raised when there is nothing to build
50 """
51
52class CollectionError(bb.BBHandledException):
53 """
54 Exception raised when layer configuration is incorrect
55 """
56
57class state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060058 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050059
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050060 @classmethod
61 def get_name(cls, code):
62 for name in dir(cls):
63 value = getattr(cls, name)
64 if type(value) == type(cls.initial) and value == code:
65 return name
66 raise ValueError("Invalid status code: %s" % code)
67
Patrick Williamsc124f4f2015-09-15 14:41:29 -050068
69class SkippedPackage:
70 def __init__(self, info = None, reason = None):
71 self.pn = None
72 self.skipreason = None
73 self.provides = None
74 self.rprovides = None
75
76 if info:
77 self.pn = info.pn
78 self.skipreason = info.skipreason
79 self.provides = info.provides
80 self.rprovides = info.rprovides
81 elif reason:
82 self.skipreason = reason
83
84
85class CookerFeatures(object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -060086 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050087
88 def __init__(self):
89 self._features=set()
90
91 def setFeature(self, f):
92 # validate we got a request for a feature we support
93 if f not in CookerFeatures._feature_list:
94 return
95 self._features.add(f)
96
97 def __contains__(self, f):
98 return f in self._features
99
100 def __iter__(self):
101 return self._features.__iter__()
102
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600103 def __next__(self):
104 return next(self._features)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500105
106
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600107class EventWriter:
108 def __init__(self, cooker, eventfile):
109 self.file_inited = None
110 self.cooker = cooker
111 self.eventfile = eventfile
112 self.event_queue = []
113
114 def write_event(self, event):
115 with open(self.eventfile, "a") as f:
116 try:
117 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
118 f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
119 "vars": str_event}))
120 except Exception as err:
121 import traceback
122 print(err, traceback.format_exc())
123
124 def send(self, event):
125 if self.file_inited:
126 # we have the file, just write the event
127 self.write_event(event)
128 else:
129 # init on bb.event.BuildStarted
130 name = "%s.%s" % (event.__module__, event.__class__.__name__)
131 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
132 with open(self.eventfile, "w") as f:
133 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
134
135 self.file_inited = True
136
137 # write pending events
138 for evt in self.event_queue:
139 self.write_event(evt)
140
141 # also write the current event
142 self.write_event(event)
143 else:
144 # queue all events until the file is inited
145 self.event_queue.append(event)
146
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500147#============================================================================#
148# BBCooker
149#============================================================================#
150class BBCooker:
151 """
152 Manages one bitbake build run
153 """
154
155 def __init__(self, configuration, featureSet=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600156 self.recipecaches = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500157 self.skiplist = {}
158 self.featureset = CookerFeatures()
159 if featureSet:
160 for f in featureSet:
161 self.featureset.setFeature(f)
162
163 self.configuration = configuration
164
Brad Bishopf058f492019-01-28 23:50:33 -0500165 bb.debug(1, "BBCooker starting %s" % time.time())
166 sys.stdout.flush()
167
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500168 self.configwatcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500169 bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
170 sys.stdout.flush()
171
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500172 self.configwatcher.bbseen = []
173 self.configwatcher.bbwatchedfiles = []
174 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
Brad Bishopf058f492019-01-28 23:50:33 -0500175 bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
176 sys.stdout.flush()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500177 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
178 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500179 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500180 self.watcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500181 bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
182 sys.stdout.flush()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500183 self.watcher.bbseen = []
184 self.watcher.bbwatchedfiles = []
185 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
186
Brad Bishopf058f492019-01-28 23:50:33 -0500187 bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
188 sys.stdout.flush()
189
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500190 # If being called by something like tinfoil, we need to clean cached data
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500191 # which may now be invalid
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500192 bb.parse.clear_cache()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500193 bb.parse.BBHandler.cached_statements = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500194
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500195 self.ui_cmdline = None
Brad Bishop08902b02019-08-20 09:16:51 -0400196 self.hashserv = None
197 self.hashservport = None
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500198
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500199 self.initConfigurationData()
200
Brad Bishopf058f492019-01-28 23:50:33 -0500201 bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
202 sys.stdout.flush()
203
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600204 # we log all events to a file if so directed
205 if self.configuration.writeeventlog:
206 # register the log file writer as UI Handler
207 writer = EventWriter(self, self.configuration.writeeventlog)
208 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
209 bb.event.register_UIHhandler(EventLogWriteHandler(writer))
210
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500211 self.inotify_modified_files = []
212
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500213 def _process_inotify_updates(server, cooker, abort):
214 cooker.process_inotify_updates()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500215 return 1.0
216
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500217 self.configuration.server_register_idlecallback(_process_inotify_updates, self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500218
219 # TOSTOP must not be set or our children will hang when they output
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600220 try:
221 fd = sys.stdout.fileno()
222 if os.isatty(fd):
223 import termios
224 tcattr = termios.tcgetattr(fd)
225 if tcattr[3] & termios.TOSTOP:
226 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
227 tcattr[3] = tcattr[3] & ~termios.TOSTOP
228 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
229 except UnsupportedOperation:
230 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500231
232 self.command = bb.command.Command(self)
233 self.state = state.initial
234
235 self.parser = None
236
237 signal.signal(signal.SIGTERM, self.sigterm_exception)
238 # Let SIGHUP exit as SIGTERM
239 signal.signal(signal.SIGHUP, self.sigterm_exception)
240
Brad Bishopf058f492019-01-28 23:50:33 -0500241 bb.debug(1, "BBCooker startup complete %s" % time.time())
242 sys.stdout.flush()
243
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500244 def process_inotify_updates(self):
245 for n in [self.confignotifier, self.notifier]:
246 if n.check_events(timeout=0):
247 # read notified events and enqeue them
248 n.read_events()
249 n.process_events()
250
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500251 def config_notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500252 if event.maskname == "IN_Q_OVERFLOW":
253 bb.warn("inotify event queue overflowed, invalidating caches.")
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500254 self.parsecache_valid = False
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500255 self.baseconfig_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500256 bb.parse.clear_cache()
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500257 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500258 if not event.pathname in self.configwatcher.bbwatchedfiles:
259 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500260 if not event.pathname in self.inotify_modified_files:
261 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500262 self.baseconfig_valid = False
263
264 def notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500265 if event.maskname == "IN_Q_OVERFLOW":
266 bb.warn("inotify event queue overflowed, invalidating caches.")
267 self.parsecache_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500268 bb.parse.clear_cache()
269 return
270 if event.pathname.endswith("bitbake-cookerdaemon.log") \
271 or event.pathname.endswith("bitbake.lock"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500272 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500273 if not event.pathname in self.inotify_modified_files:
274 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500275 self.parsecache_valid = False
276
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500277 def add_filewatch(self, deps, watcher=None, dirs=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500278 if not watcher:
279 watcher = self.watcher
280 for i in deps:
281 watcher.bbwatchedfiles.append(i[0])
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500282 if dirs:
283 f = i[0]
284 else:
285 f = os.path.dirname(i[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500286 if f in watcher.bbseen:
287 continue
288 watcher.bbseen.append(f)
289 watchtarget = None
290 while True:
291 # We try and add watches for files that don't exist but if they did, would influence
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500292 # the parser. The parent directory of these files may not exist, in which case we need
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500293 # to watch any parent that does exist for changes.
294 try:
295 watcher.add_watch(f, self.watchmask, quiet=False)
296 if watchtarget:
297 watcher.bbwatchedfiles.append(watchtarget)
298 break
299 except pyinotify.WatchManagerError as e:
300 if 'ENOENT' in str(e):
301 watchtarget = f
302 f = os.path.dirname(f)
303 if f in watcher.bbseen:
304 break
305 watcher.bbseen.append(f)
306 continue
307 if 'ENOSPC' in str(e):
308 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
309 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
310 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
311 providerlog.error("Root privilege is required to modify max_user_watches.")
312 raise
313
314 def sigterm_exception(self, signum, stackframe):
315 if signum == signal.SIGTERM:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500316 bb.warn("Cooker received SIGTERM, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500317 elif signum == signal.SIGHUP:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500318 bb.warn("Cooker received SIGHUP, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500319 self.state = state.forceshutdown
320
321 def setFeatures(self, features):
322 # we only accept a new feature set if we're in state initial, so we can reset without problems
323 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
324 raise Exception("Illegal state for feature set change")
325 original_featureset = list(self.featureset)
326 for feature in features:
327 self.featureset.setFeature(feature)
328 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
329 if (original_featureset != list(self.featureset)) and self.state != state.error:
330 self.reset()
331
332 def initConfigurationData(self):
333
334 self.state = state.initial
335 self.caches_array = []
336
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500337 # Need to preserve BB_CONSOLELOG over resets
338 consolelog = None
339 if hasattr(self, "data"):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500340 consolelog = self.data.getVar("BB_CONSOLELOG")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500341
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500342 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
343 self.enableDataTracking()
344
345 all_extra_cache_names = []
346 # We hardcode all known cache types in a single place, here.
347 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
348 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
349
350 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
351
352 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
353 # This is the entry point, no further check needed!
354 for var in caches_name_array:
355 try:
356 module_name, cache_name = var.split(':')
357 module = __import__(module_name, fromlist=(cache_name,))
358 self.caches_array.append(getattr(module, cache_name))
359 except ImportError as exc:
360 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
361 sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
362
363 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
364 self.databuilder.parseBaseConfiguration()
365 self.data = self.databuilder.data
366 self.data_hash = self.databuilder.data_hash
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500367 self.extraconfigdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500368
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500369 if consolelog:
370 self.data.setVar("BB_CONSOLELOG", consolelog)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500371
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500372 self.data.setVar('BB_CMDLINE', self.ui_cmdline)
373
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500374 #
375 # Copy of the data store which has been expanded.
376 # Used for firing events and accessing variables where expansion needs to be accounted for
377 #
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500378 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
379 self.disableDataTracking()
380
Brad Bishop15ae2502019-06-18 21:44:24 -0400381 for mc in self.databuilder.mcdata.values():
382 mc.renameVar("__depends", "__base_depends")
383 self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500384
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500385 self.baseconfig_valid = True
386 self.parsecache_valid = False
387
388 def handlePRServ(self):
389 # Setup a PR Server based on the new configuration
390 try:
391 self.prhost = prserv.serv.auto_start(self.data)
392 except prserv.serv.PRServiceConfigError as e:
393 bb.fatal("Unable to start PR Server, exitting")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500394
Brad Bishop08902b02019-08-20 09:16:51 -0400395 if self.data.getVar("BB_HASHSERVE") == "localhost:0":
396 if not self.hashserv:
397 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
398 self.hashserv = hashserv.create_server(('localhost', 0), dbfile, '')
399 self.hashservport = "localhost:" + str(self.hashserv.server_port)
400 self.hashserv.process = multiprocessing.Process(target=self.hashserv.serve_forever)
401 self.hashserv.process.daemon = True
402 self.hashserv.process.start()
403 self.data.setVar("BB_HASHSERVE", self.hashservport)
404 self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservport)
405 self.databuilder.data.setVar("BB_HASHSERVE", self.hashservport)
406 for mc in self.databuilder.mcdata:
407 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservport)
408
409 bb.parse.init_parser(self.data)
410
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500411 def enableDataTracking(self):
412 self.configuration.tracking = True
413 if hasattr(self, "data"):
414 self.data.enableTracking()
415
416 def disableDataTracking(self):
417 self.configuration.tracking = False
418 if hasattr(self, "data"):
419 self.data.disableTracking()
420
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500421 def parseConfiguration(self):
422 # Set log file verbosity
423 verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
424 if verboselogs:
425 bb.msg.loggerVerboseLogs = True
426
427 # Change nice level if we're asked to
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500428 nice = self.data.getVar("BB_NICE_LEVEL")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500429 if nice:
430 curnice = os.nice(0)
431 nice = int(nice) - curnice
432 buildlog.verbose("Renice to %s " % os.nice(nice))
433
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600434 if self.recipecaches:
435 del self.recipecaches
436 self.multiconfigs = self.databuilder.mcdata.keys()
437 self.recipecaches = {}
438 for mc in self.multiconfigs:
439 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500440
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500441 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500442
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500443 self.parsecache_valid = False
444
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500445 def updateConfigOpts(self, options, environment, cmdline):
446 self.ui_cmdline = cmdline
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500447 clean = True
448 for o in options:
449 if o in ['prefile', 'postfile']:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500450 # Only these options may require a reparse
451 try:
452 if getattr(self.configuration, o) == options[o]:
453 # Value is the same, no need to mark dirty
454 continue
455 except AttributeError:
456 pass
457 logger.debug(1, "Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
458 print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500459 clean = False
460 setattr(self.configuration, o, options[o])
461 for k in bb.utils.approved_variables():
462 if k in environment and k not in self.configuration.env:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500463 logger.debug(1, "Updating new environment variable %s to %s" % (k, environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500464 self.configuration.env[k] = environment[k]
465 clean = False
466 if k in self.configuration.env and k not in environment:
467 logger.debug(1, "Updating environment variable %s (deleted)" % (k))
468 del self.configuration.env[k]
469 clean = False
470 if k not in self.configuration.env and k not in environment:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500471 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500472 if environment[k] != self.configuration.env[k]:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500473 logger.debug(1, "Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500474 self.configuration.env[k] = environment[k]
475 clean = False
476 if not clean:
477 logger.debug(1, "Base environment change, triggering reparse")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500478 self.reset()
479
480 def runCommands(self, server, data, abort):
481 """
482 Run any queued asynchronous command
483 This is done by the idle handler so it runs in true context rather than
484 tied to any UI.
485 """
486
487 return self.command.runAsyncCommand()
488
489 def showVersions(self):
490
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500491 (latest_versions, preferred_versions) = self.findProviders()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500492
493 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
494 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
495
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500496 for p in sorted(self.recipecaches[''].pkg_pn):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500497 pref = preferred_versions[p]
498 latest = latest_versions[p]
499
500 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
501 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
502
503 if pref == latest:
504 prefstr = ""
505
506 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
507
508 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
509 """
510 Show the outer or per-recipe environment
511 """
512 fn = None
513 envdata = None
Brad Bishop15ae2502019-06-18 21:44:24 -0400514 mc = ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500515 if not pkgs_to_build:
516 pkgs_to_build = []
517
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500518 orig_tracking = self.configuration.tracking
519 if not orig_tracking:
520 self.enableDataTracking()
521 self.reset()
522
Brad Bishop15ae2502019-06-18 21:44:24 -0400523 def mc_base(p):
524 if p.startswith('mc:'):
525 s = p.split(':')
526 if len(s) == 2:
527 return s[1]
528 return None
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500529
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500530 if buildfile:
531 # Parse the configuration here. We need to do it explicitly here since
532 # this showEnvironment() code path doesn't use the cache
533 self.parseConfiguration()
534
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600535 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500536 fn = self.matchFile(fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600537 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500538 elif len(pkgs_to_build) == 1:
Brad Bishop15ae2502019-06-18 21:44:24 -0400539 mc = mc_base(pkgs_to_build[0])
540 if not mc:
541 ignore = self.data.getVar("ASSUME_PROVIDED") or ""
542 if pkgs_to_build[0] in set(ignore.split()):
543 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500544
Brad Bishop15ae2502019-06-18 21:44:24 -0400545 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500546
Brad Bishop15ae2502019-06-18 21:44:24 -0400547 mc = runlist[0][0]
548 fn = runlist[0][3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500549
550 if fn:
551 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600552 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
553 envdata = bb_cache.loadDataFull(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500554 except Exception as e:
555 parselog.exception("Unable to read %s", fn)
556 raise
Brad Bishop15ae2502019-06-18 21:44:24 -0400557 else:
558 if not mc in self.databuilder.mcdata:
559 bb.fatal('Not multiconfig named "%s" found' % mc)
560 envdata = self.databuilder.mcdata[mc]
561 data.expandKeys(envdata)
562 parse.ast.runAnonFuncs(envdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500563
564 # Display history
565 with closing(StringIO()) as env:
566 self.data.inchistory.emit(env)
567 logger.plain(env.getvalue())
568
569 # emit variables and shell functions
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500570 with closing(StringIO()) as env:
571 data.emit_env(env, envdata, True)
572 logger.plain(env.getvalue())
573
574 # emit the metadata which isnt valid shell
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500575 for e in sorted(envdata.keys()):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600576 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500577 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500578
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500579 if not orig_tracking:
580 self.disableDataTracking()
581 self.reset()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500582
583 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
584 """
585 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
586 """
587 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
588
589 # A task of None means use the default task
590 if task is None:
591 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500592 if not task.startswith("do_"):
593 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500594
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500595 targetlist = self.checkPackages(pkgs_to_build, task)
596 fulltargetlist = []
597 defaulttask_implicit = ''
598 defaulttask_explicit = False
599 wildcard = False
600
601 # Wild card expansion:
Brad Bishop15ae2502019-06-18 21:44:24 -0400602 # Replace string such as "mc:*:bash"
603 # into "mc:A:bash mc:B:bash bash"
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500604 for k in targetlist:
Brad Bishop15ae2502019-06-18 21:44:24 -0400605 if k.startswith("mc:"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500606 if wildcard:
607 bb.fatal('multiconfig conflict')
608 if k.split(":")[1] == "*":
609 wildcard = True
610 for mc in self.multiconfigs:
611 if mc:
612 fulltargetlist.append(k.replace('*', mc))
613 # implicit default task
614 else:
615 defaulttask_implicit = k.split(":")[2]
616 else:
617 fulltargetlist.append(k)
618 else:
619 defaulttask_explicit = True
620 fulltargetlist.append(k)
621
622 if not defaulttask_explicit and defaulttask_implicit != '':
623 fulltargetlist.append(defaulttask_implicit)
624
625 bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600626 taskdata = {}
627 localdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500628
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600629 for mc in self.multiconfigs:
630 taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
631 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600632 bb.data.expandKeys(localdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500633
634 current = 0
635 runlist = []
636 for k in fulltargetlist:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600637 mc = ""
Brad Bishop15ae2502019-06-18 21:44:24 -0400638 if k.startswith("mc:"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600639 mc = k.split(":")[1]
640 k = ":".join(k.split(":")[2:])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500641 ktask = task
642 if ":do_" in k:
643 k2 = k.split(":do_")
644 k = k2[0]
645 ktask = k2[1]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600646 taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500647 current += 1
648 if not ktask.startswith("do_"):
649 ktask = "do_%s" % ktask
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600650 if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
651 # e.g. in ASSUME_PROVIDED
652 continue
653 fn = taskdata[mc].build_targets[k][0]
654 runlist.append([mc, k, ktask, fn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500655 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600656
Brad Bishop15ae2502019-06-18 21:44:24 -0400657 havemc = False
658 for mc in self.multiconfigs:
659 if taskdata[mc].get_mcdepends():
660 havemc = True
Brad Bishopf058f492019-01-28 23:50:33 -0500661
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800662 # No need to do check providers if there are no mcdeps or not an mc build
Brad Bishop15ae2502019-06-18 21:44:24 -0400663 if havemc or len(self.multiconfigs) > 1:
Andrew Geissler99467da2019-02-25 18:54:23 -0600664 seen = set()
665 new = True
666 # Make sure we can provide the multiconfig dependency
667 while new:
668 mcdeps = set()
669 # Add unresolved first, so we can get multiconfig indirect dependencies on time
670 for mc in self.multiconfigs:
671 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
672 mcdeps |= set(taskdata[mc].get_mcdepends())
673 new = False
674 for mc in self.multiconfigs:
675 for k in mcdeps:
676 if k in seen:
677 continue
678 l = k.split(':')
679 depmc = l[2]
680 if depmc not in self.multiconfigs:
681 bb.fatal("Multiconfig dependency %s depends on nonexistent mc configuration %s" % (k,depmc))
682 else:
683 logger.debug(1, "Adding providers for multiconfig dependency %s" % l[3])
684 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
685 seen.add(k)
686 new = True
Brad Bishopf058f492019-01-28 23:50:33 -0500687
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600688 for mc in self.multiconfigs:
689 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
690
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500691 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600692 return taskdata, runlist
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500693
694 def prepareTreeData(self, pkgs_to_build, task):
695 """
696 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
697 """
698
699 # We set abort to False here to prevent unbuildable targets raising
700 # an exception when we're just generating data
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600701 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500702
703 return runlist, taskdata
704
705 ######## WARNING : this function requires cache_extra to be enabled ########
706
707 def generateTaskDepTreeData(self, pkgs_to_build, task):
708 """
709 Create a dependency graph of pkgs_to_build including reverse dependency
710 information.
711 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500712 if not task.startswith("do_"):
713 task = "do_%s" % task
714
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500715 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600716 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500717 rq.rqdata.prepare()
718 return self.buildDependTree(rq, taskdata)
719
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600720 @staticmethod
721 def add_mc_prefix(mc, pn):
722 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -0400723 return "mc:%s:%s" % (mc, pn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600724 return pn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500725
726 def buildDependTree(self, rq, taskdata):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600727 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500728 depend_tree = {}
729 depend_tree["depends"] = {}
730 depend_tree["tdepends"] = {}
731 depend_tree["pn"] = {}
732 depend_tree["rdepends-pn"] = {}
733 depend_tree["packages"] = {}
734 depend_tree["rdepends-pkg"] = {}
735 depend_tree["rrecs-pkg"] = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500736 depend_tree['providermap'] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600737 depend_tree["layer-priorities"] = self.bbfile_config_priorities
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500738
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600739 for mc in taskdata:
740 for name, fn in list(taskdata[mc].get_providermap().items()):
741 pn = self.recipecaches[mc].pkg_fn[fn]
742 pn = self.add_mc_prefix(mc, pn)
743 if name != pn:
744 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
745 depend_tree['providermap'][name] = (pn, version)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500746
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600747 for tid in rq.rqdata.runtaskentries:
748 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
749 pn = self.recipecaches[mc].pkg_fn[taskfn]
750 pn = self.add_mc_prefix(mc, pn)
751 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500752 if pn not in depend_tree["pn"]:
753 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600754 depend_tree["pn"][pn]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500755 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600756 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500757
758 # if we have extra caches, list all attributes they bring in
759 extra_info = []
760 for cache_class in self.caches_array:
761 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
762 cachefields = getattr(cache_class, 'cachefields', [])
763 extra_info = extra_info + cachefields
764
765 # for all attributes stored, add them to the dependency tree
766 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600767 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500768
769
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500770 dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
771 if not dotname in depend_tree["tdepends"]:
772 depend_tree["tdepends"][dotname] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600773 for dep in rq.rqdata.runtaskentries[tid].depends:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800774 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
775 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600776 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
777 if taskfn not in seen_fns:
778 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500779 packages = []
780
781 depend_tree["depends"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600782 for dep in taskdata[mc].depids[taskfn]:
783 depend_tree["depends"][pn].append(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500784
785 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600786 for rdep in taskdata[mc].rdepids[taskfn]:
787 depend_tree["rdepends-pn"][pn].append(rdep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500788
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600789 rdepends = self.recipecaches[mc].rundeps[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500790 for package in rdepends:
791 depend_tree["rdepends-pkg"][package] = []
792 for rdepend in rdepends[package]:
793 depend_tree["rdepends-pkg"][package].append(rdepend)
794 packages.append(package)
795
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600796 rrecs = self.recipecaches[mc].runrecs[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500797 for package in rrecs:
798 depend_tree["rrecs-pkg"][package] = []
799 for rdepend in rrecs[package]:
800 depend_tree["rrecs-pkg"][package].append(rdepend)
801 if not package in packages:
802 packages.append(package)
803
804 for package in packages:
805 if package not in depend_tree["packages"]:
806 depend_tree["packages"][package] = {}
807 depend_tree["packages"][package]["pn"] = pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600808 depend_tree["packages"][package]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500809 depend_tree["packages"][package]["version"] = version
810
811 return depend_tree
812
813 ######## WARNING : this function requires cache_extra to be enabled ########
814 def generatePkgDepTreeData(self, pkgs_to_build, task):
815 """
816 Create a dependency tree of pkgs_to_build, returning the data.
817 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500818 if not task.startswith("do_"):
819 task = "do_%s" % task
820
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500821 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500822
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600823 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500824 depend_tree = {}
825 depend_tree["depends"] = {}
826 depend_tree["pn"] = {}
827 depend_tree["rdepends-pn"] = {}
828 depend_tree["rdepends-pkg"] = {}
829 depend_tree["rrecs-pkg"] = {}
830
831 # if we have extra caches, list all attributes they bring in
832 extra_info = []
833 for cache_class in self.caches_array:
834 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
835 cachefields = getattr(cache_class, 'cachefields', [])
836 extra_info = extra_info + cachefields
837
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600838 tids = []
839 for mc in taskdata:
840 for tid in taskdata[mc].taskentries:
841 tids.append(tid)
842
843 for tid in tids:
844 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
845
846 pn = self.recipecaches[mc].pkg_fn[taskfn]
847 pn = self.add_mc_prefix(mc, pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500848
849 if pn not in depend_tree["pn"]:
850 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600851 depend_tree["pn"][pn]["filename"] = taskfn
852 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500853 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600854 rdepends = self.recipecaches[mc].rundeps[taskfn]
855 rrecs = self.recipecaches[mc].runrecs[taskfn]
856 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500857
858 # for all extra attributes stored, add them to the dependency tree
859 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600860 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500861
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600862 if taskfn not in seen_fns:
863 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500864
865 depend_tree["depends"][pn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500866 for dep in taskdata[mc].depids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500867 pn_provider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600868 if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
869 fn_provider = taskdata[mc].build_targets[dep][0]
870 pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500871 else:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500872 pn_provider = dep
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600873 pn_provider = self.add_mc_prefix(mc, pn_provider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500874 depend_tree["depends"][pn].append(pn_provider)
875
876 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600877 for rdep in taskdata[mc].rdepids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500878 pn_rprovider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600879 if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
880 fn_rprovider = taskdata[mc].run_targets[rdep][0]
881 pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500882 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600883 pn_rprovider = rdep
884 pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500885 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
886
887 depend_tree["rdepends-pkg"].update(rdepends)
888 depend_tree["rrecs-pkg"].update(rrecs)
889
890 return depend_tree
891
892 def generateDepTreeEvent(self, pkgs_to_build, task):
893 """
894 Create a task dependency graph of pkgs_to_build.
895 Generate an event with the result
896 """
897 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
898 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
899
900 def generateDotGraphFiles(self, pkgs_to_build, task):
901 """
902 Create a task dependency graph of pkgs_to_build.
903 Save the result to a set of .dot files.
904 """
905
906 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
907
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500908 with open('pn-buildlist', 'w') as f:
909 for pn in depgraph["pn"]:
910 f.write(pn + "\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500911 logger.info("PN build list saved to 'pn-buildlist'")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500912
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500913 # Remove old format output files to ensure no confusion with stale data
914 try:
915 os.unlink('pn-depends.dot')
916 except FileNotFoundError:
917 pass
918 try:
919 os.unlink('package-depends.dot')
920 except FileNotFoundError:
921 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500922
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500923 with open('task-depends.dot', 'w') as f:
924 f.write("digraph depends {\n")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400925 for task in sorted(depgraph["tdepends"]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500926 (pn, taskname) = task.rsplit(".", 1)
927 fn = depgraph["pn"][pn]["filename"]
928 version = depgraph["pn"][pn]["version"]
929 f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400930 for dep in sorted(depgraph["tdepends"][task]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500931 f.write('"%s" -> "%s"\n' % (task, dep))
932 f.write("}\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500933 logger.info("Task dependencies saved to 'task-depends.dot'")
934
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500935 with open('recipe-depends.dot', 'w') as f:
936 f.write("digraph depends {\n")
937 pndeps = {}
Brad Bishop316dfdd2018-06-25 12:45:53 -0400938 for task in sorted(depgraph["tdepends"]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500939 (pn, taskname) = task.rsplit(".", 1)
940 if pn not in pndeps:
941 pndeps[pn] = set()
Brad Bishop316dfdd2018-06-25 12:45:53 -0400942 for dep in sorted(depgraph["tdepends"][task]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500943 (deppn, deptaskname) = dep.rsplit(".", 1)
944 pndeps[pn].add(deppn)
Brad Bishop316dfdd2018-06-25 12:45:53 -0400945 for pn in sorted(pndeps):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500946 fn = depgraph["pn"][pn]["filename"]
947 version = depgraph["pn"][pn]["version"]
948 f.write('"%s" [label="%s\\n%s\\n%s"]\n' % (pn, pn, version, fn))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400949 for dep in sorted(pndeps[pn]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500950 if dep == pn:
951 continue
952 f.write('"%s" -> "%s"\n' % (pn, dep))
953 f.write("}\n")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400954 logger.info("Flattened recipe dependencies saved to 'recipe-depends.dot'")
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500955
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500956 def show_appends_with_no_recipes(self):
957 # Determine which bbappends haven't been applied
958
959 # First get list of recipes, including skipped
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600960 recipefns = list(self.recipecaches[''].pkg_fn.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500961 recipefns.extend(self.skiplist.keys())
962
963 # Work out list of bbappends that have been applied
964 applied_appends = []
965 for fn in recipefns:
966 applied_appends.extend(self.collection.get_file_appends(fn))
967
968 appends_without_recipes = []
969 for _, appendfn in self.collection.bbappends:
970 if not appendfn in applied_appends:
971 appends_without_recipes.append(appendfn)
972
973 if appends_without_recipes:
974 msg = 'No recipes available for:\n %s' % '\n '.join(appends_without_recipes)
975 warn_only = self.data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
976 False) or "no"
977 if warn_only.lower() in ("1", "yes", "true"):
978 bb.warn(msg)
979 else:
980 bb.fatal(msg)
981
982 def handlePrefProviders(self):
983
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600984 for mc in self.multiconfigs:
985 localdata = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600986 bb.data.expandKeys(localdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500987
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600988 # Handle PREFERRED_PROVIDERS
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500989 for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600990 try:
991 (providee, provider) = p.split(':')
992 except:
993 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
994 continue
995 if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
996 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
997 self.recipecaches[mc].preferred[providee] = provider
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500998
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500999 def findConfigFilePath(self, configfile):
1000 """
1001 Find the location on disk of configfile and if it exists and was parsed by BitBake
1002 emit the ConfigFilePathFound event with the path to the file.
1003 """
1004 path = bb.cookerdata.findConfigFile(configfile, self.data)
1005 if not path:
1006 return
1007
1008 # Generate a list of parsed configuration files by searching the files
1009 # listed in the __depends and __base_depends variables with a .conf suffix.
1010 conffiles = []
1011 dep_files = self.data.getVar('__base_depends', False) or []
1012 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
1013
1014 for f in dep_files:
1015 if f[0].endswith(".conf"):
1016 conffiles.append(f[0])
1017
1018 _, conf, conffile = path.rpartition("conf/")
1019 match = os.path.join(conf, conffile)
1020 # Try and find matches for conf/conffilename.conf as we don't always
1021 # have the full path to the file.
1022 for cfg in conffiles:
1023 if cfg.endswith(match):
1024 bb.event.fire(bb.event.ConfigFilePathFound(path),
1025 self.data)
1026 break
1027
1028 def findFilesMatchingInDir(self, filepattern, directory):
1029 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001030 Searches for files containing the substring 'filepattern' which are children of
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001031 'directory' in each BBPATH. i.e. to find all rootfs package classes available
1032 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1033 or to find all machine configuration files one could call:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001034 findFilesMatchingInDir(self, '.conf', 'conf/machine')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001035 """
1036
1037 matches = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001038 bbpaths = self.data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001039 for path in bbpaths:
1040 dirpath = os.path.join(path, directory)
1041 if os.path.exists(dirpath):
1042 for root, dirs, files in os.walk(dirpath):
1043 for f in files:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001044 if filepattern in f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001045 matches.append(f)
1046
1047 if matches:
1048 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1049
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001050 def findProviders(self, mc=''):
1051 return bb.providers.findProviders(self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1052
1053 def findBestProvider(self, pn, mc=''):
1054 if pn in self.recipecaches[mc].providers:
1055 filenames = self.recipecaches[mc].providers[pn]
1056 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.data, self.recipecaches[mc])
1057 filename = eligible[0]
1058 return None, None, None, filename
1059 elif pn in self.recipecaches[mc].pkg_pn:
1060 return bb.providers.findBestProvider(pn, self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1061 else:
1062 return None, None, None, None
1063
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001064 def findConfigFiles(self, varname):
1065 """
1066 Find config files which are appropriate values for varname.
1067 i.e. MACHINE, DISTRO
1068 """
1069 possible = []
1070 var = varname.lower()
1071
1072 data = self.data
1073 # iterate configs
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001074 bbpaths = data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001075 for path in bbpaths:
1076 confpath = os.path.join(path, "conf", var)
1077 if os.path.exists(confpath):
1078 for root, dirs, files in os.walk(confpath):
1079 # get all child files, these are appropriate values
1080 for f in files:
1081 val, sep, end = f.rpartition('.')
1082 if end == 'conf':
1083 possible.append(val)
1084
1085 if possible:
1086 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1087
1088 def findInheritsClass(self, klass):
1089 """
1090 Find all recipes which inherit the specified class
1091 """
1092 pkg_list = []
1093
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001094 for pfn in self.recipecaches[''].pkg_fn:
1095 inherits = self.recipecaches[''].inherits.get(pfn, None)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001096 if inherits and klass in inherits:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001097 pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001098
1099 return pkg_list
1100
1101 def generateTargetsTree(self, klass=None, pkgs=None):
1102 """
1103 Generate a dependency tree of buildable targets
1104 Generate an event with the result
1105 """
1106 # if the caller hasn't specified a pkgs list default to universe
1107 if not pkgs:
1108 pkgs = ['universe']
1109 # if inherited_class passed ensure all recipes which inherit the
1110 # specified class are included in pkgs
1111 if klass:
1112 extra_pkgs = self.findInheritsClass(klass)
1113 pkgs = pkgs + extra_pkgs
1114
1115 # generate a dependency tree for all our packages
1116 tree = self.generatePkgDepTreeData(pkgs, 'build')
1117 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1118
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001119 def interactiveMode( self ):
1120 """Drop off into a shell"""
1121 try:
1122 from bb import shell
1123 except ImportError:
1124 parselog.exception("Interactive mode not available")
1125 sys.exit(1)
1126 else:
1127 shell.start( self )
1128
1129
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001130 def handleCollections(self, collections):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001131 """Handle collections"""
1132 errors = False
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001133 self.bbfile_config_priorities = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001134 if collections:
1135 collection_priorities = {}
1136 collection_depends = {}
1137 collection_list = collections.split()
1138 min_prio = 0
1139 for c in collection_list:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001140 bb.debug(1,'Processing %s in collection list' % (c))
1141
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001142 # Get collection priority if defined explicitly
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001143 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001144 if priority:
1145 try:
1146 prio = int(priority)
1147 except ValueError:
1148 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1149 errors = True
1150 if min_prio == 0 or prio < min_prio:
1151 min_prio = prio
1152 collection_priorities[c] = prio
1153 else:
1154 collection_priorities[c] = None
1155
1156 # Check dependencies and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001157 deps = self.data.getVar("LAYERDEPENDS_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001158 if deps:
1159 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001160 depDict = bb.utils.explode_dep_versions2(deps)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001161 except bb.utils.VersionStringException as vse:
1162 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001163 for dep, oplist in list(depDict.items()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001164 if dep in collection_list:
1165 for opstr in oplist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001166 layerver = self.data.getVar("LAYERVERSION_%s" % dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001167 (op, depver) = opstr.split()
1168 if layerver:
1169 try:
1170 res = bb.utils.vercmp_string_op(layerver, depver, op)
1171 except bb.utils.VersionStringException as vse:
1172 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1173 if not res:
1174 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1175 errors = True
1176 else:
1177 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1178 errors = True
1179 else:
1180 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1181 errors = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001182 collection_depends[c] = list(depDict.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001183 else:
1184 collection_depends[c] = []
1185
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001186 # Check recommends and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001187 recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001188 if recs:
1189 try:
1190 recDict = bb.utils.explode_dep_versions2(recs)
1191 except bb.utils.VersionStringException as vse:
1192 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1193 for rec, oplist in list(recDict.items()):
1194 if rec in collection_list:
1195 if oplist:
1196 opstr = oplist[0]
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001197 layerver = self.data.getVar("LAYERVERSION_%s" % rec)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001198 if layerver:
1199 (op, recver) = opstr.split()
1200 try:
1201 res = bb.utils.vercmp_string_op(layerver, recver, op)
1202 except bb.utils.VersionStringException as vse:
1203 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1204 if not res:
1205 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1206 continue
1207 else:
1208 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1209 continue
1210 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1211 collection_depends[c].append(rec)
1212 else:
1213 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1214
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001215 # Recursively work out collection priorities based on dependencies
1216 def calc_layer_priority(collection):
1217 if not collection_priorities[collection]:
1218 max_depprio = min_prio
1219 for dep in collection_depends[collection]:
1220 calc_layer_priority(dep)
1221 depprio = collection_priorities[dep]
1222 if depprio > max_depprio:
1223 max_depprio = depprio
1224 max_depprio += 1
1225 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1226 collection_priorities[collection] = max_depprio
1227
1228 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1229 for c in collection_list:
1230 calc_layer_priority(c)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001231 regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001232 if regex == None:
1233 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1234 errors = True
1235 continue
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001236 elif regex == "":
1237 parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
Brad Bishop19323692019-04-05 15:28:33 -04001238 cre = re.compile('^NULL$')
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001239 errors = False
1240 else:
1241 try:
1242 cre = re.compile(regex)
1243 except re.error:
1244 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1245 errors = True
1246 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001247 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001248 if errors:
1249 # We've already printed the actual error(s)
1250 raise CollectionError("Errors during parsing layer configuration")
1251
1252 def buildSetVars(self):
1253 """
1254 Setup any variables needed before starting a build
1255 """
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001256 t = time.gmtime()
1257 for mc in self.databuilder.mcdata:
1258 ds = self.databuilder.mcdata[mc]
1259 if not ds.getVar("BUILDNAME", False):
1260 ds.setVar("BUILDNAME", "${DATE}${TIME}")
1261 ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1262 ds.setVar("DATE", time.strftime('%Y%m%d', t))
1263 ds.setVar("TIME", time.strftime('%H%M%S', t))
1264
1265 def reset_mtime_caches(self):
1266 """
1267 Reset mtime caches - this is particularly important when memory resident as something
1268 which is cached is not unlikely to have changed since the last invocation (e.g. a
1269 file associated with a recipe might have been modified by the user).
1270 """
1271 build.reset_cache()
1272 bb.fetch._checksum_cache.mtime_cache.clear()
1273 siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1274 if siggen_cache:
1275 bb.parse.siggen.checksum_cache.mtime_cache.clear()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001276
1277 def matchFiles(self, bf):
1278 """
1279 Find the .bb files which match the expression in 'buildfile'.
1280 """
1281 if bf.startswith("/") or bf.startswith("../"):
1282 bf = os.path.abspath(bf)
1283
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001284 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001285 filelist, masked, searchdirs = self.collection.collect_bbfiles(self.data, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001286 try:
1287 os.stat(bf)
1288 bf = os.path.abspath(bf)
1289 return [bf]
1290 except OSError:
1291 regexp = re.compile(bf)
1292 matches = []
1293 for f in filelist:
1294 if regexp.search(f) and os.path.isfile(f):
1295 matches.append(f)
1296 return matches
1297
1298 def matchFile(self, buildfile):
1299 """
1300 Find the .bb file which matches the expression in 'buildfile'.
1301 Raise an error if multiple files
1302 """
1303 matches = self.matchFiles(buildfile)
1304 if len(matches) != 1:
1305 if matches:
1306 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1307 if matches:
1308 for f in matches:
1309 msg += "\n %s" % f
1310 parselog.error(msg)
1311 else:
1312 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1313 raise NoSpecificMatch
1314 return matches[0]
1315
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001316 def buildFile(self, buildfile, task):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001317 """
1318 Build the file matching regexp buildfile
1319 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001320 bb.event.fire(bb.event.BuildInit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001321
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001322 # Too many people use -b because they think it's how you normally
1323 # specify a target to be built, so show a warning
1324 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1325
1326 self.buildFileInternal(buildfile, task)
1327
1328 def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1329 """
1330 Build the file matching regexp buildfile
1331 """
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001332
1333 # Parse the configuration here. We need to do it explicitly here since
1334 # buildFile() doesn't use the cache
1335 self.parseConfiguration()
1336
1337 # If we are told to do the None task then query the default task
1338 if (task == None):
1339 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001340 if not task.startswith("do_"):
1341 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001342
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001343 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001344 fn = self.matchFile(fn)
1345
1346 self.buildSetVars()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001347 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001348
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001349 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
1350
1351 infos = bb_cache.parse(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001352 infos = dict(infos)
1353
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001354 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001355 try:
1356 info_array = infos[fn]
1357 except KeyError:
1358 bb.fatal("%s does not exist" % fn)
1359
1360 if info_array[0].skipped:
1361 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1362
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001363 self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001364
1365 # Tweak some variables
1366 item = info_array[0].pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001367 self.recipecaches[mc].ignored_dependencies = set()
1368 self.recipecaches[mc].bbfile_priority[fn] = 1
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001369 self.configuration.limited_deps = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001370
1371 # Remove external dependencies
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001372 self.recipecaches[mc].task_deps[fn]['depends'] = {}
1373 self.recipecaches[mc].deps[fn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001374 self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1375 self.recipecaches[mc].runrecs[fn] = defaultdict(list)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001376
1377 # Invalidate task for target if force mode active
1378 if self.configuration.force:
1379 logger.verbose("Invalidate task %s, %s", task, fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001380 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001381
1382 # Setup taskdata structure
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001383 taskdata = {}
1384 taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001385 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001386
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001387 if quietlog:
1388 rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1389 bb.runqueue.logger.setLevel(logging.WARNING)
1390
1391 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1392 if fireevents:
1393 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001394
1395 # Execute the runqueue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001396 runlist = [[mc, item, task, fn]]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001397
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001398 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001399
1400 def buildFileIdle(server, rq, abort):
1401
1402 msg = None
1403 interrupted = 0
1404 if abort or self.state == state.forceshutdown:
1405 rq.finish_runqueue(True)
1406 msg = "Forced shutdown"
1407 interrupted = 2
1408 elif self.state == state.shutdown:
1409 rq.finish_runqueue(False)
1410 msg = "Stopped build"
1411 interrupted = 1
1412 failures = 0
1413 try:
1414 retval = rq.execute_runqueue()
1415 except runqueue.TaskFailure as exc:
1416 failures += len(exc.args)
1417 retval = False
1418 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001419 self.command.finishAsyncCommand(str(exc))
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001420 if quietlog:
1421 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001422 return False
1423
1424 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001425 if fireevents:
1426 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001427 self.command.finishAsyncCommand(msg)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001428 # We trashed self.recipecaches above
1429 self.parsecache_valid = False
1430 self.configuration.limited_deps = False
1431 bb.parse.siggen.reset(self.data)
1432 if quietlog:
1433 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001434 return False
1435 if retval is True:
1436 return True
1437 return retval
1438
1439 self.configuration.server_register_idlecallback(buildFileIdle, rq)
1440
1441 def buildTargets(self, targets, task):
1442 """
1443 Attempt to build the targets specified
1444 """
1445
1446 def buildTargetsIdle(server, rq, abort):
1447 msg = None
1448 interrupted = 0
1449 if abort or self.state == state.forceshutdown:
1450 rq.finish_runqueue(True)
1451 msg = "Forced shutdown"
1452 interrupted = 2
1453 elif self.state == state.shutdown:
1454 rq.finish_runqueue(False)
1455 msg = "Stopped build"
1456 interrupted = 1
1457 failures = 0
1458 try:
1459 retval = rq.execute_runqueue()
1460 except runqueue.TaskFailure as exc:
1461 failures += len(exc.args)
1462 retval = False
1463 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001464 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001465 return False
1466
1467 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001468 try:
1469 for mc in self.multiconfigs:
1470 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1471 finally:
1472 self.command.finishAsyncCommand(msg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001473 return False
1474 if retval is True:
1475 return True
1476 return retval
1477
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001478 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001479 self.buildSetVars()
1480
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001481 # If we are told to do the None task then query the default task
1482 if (task == None):
1483 task = self.configuration.cmd
1484
1485 if not task.startswith("do_"):
1486 task = "do_%s" % task
1487
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001488 packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1489
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001490 bb.event.fire(bb.event.BuildInit(packages), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001491
1492 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001493
1494 buildname = self.data.getVar("BUILDNAME", False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001495
1496 # make targets to always look as <target>:do_<task>
1497 ntargets = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001498 for target in runlist:
1499 if target[0]:
Brad Bishop15ae2502019-06-18 21:44:24 -04001500 ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2]))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001501 ntargets.append("%s:%s" % (target[1], target[2]))
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001502
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001503 for mc in self.multiconfigs:
1504 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001505
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001506 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001507 if 'universe' in targets:
1508 rq.rqdata.warn_multi_bb = True
1509
1510 self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
1511
1512
1513 def getAllKeysWithFlags(self, flaglist):
1514 dump = {}
1515 for k in self.data.keys():
1516 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001517 expand = True
1518 flags = self.data.getVarFlags(k)
1519 if flags and "func" in flags and "python" in flags:
1520 expand = False
1521 v = self.data.getVar(k, expand)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001522 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1523 dump[k] = {
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001524 'v' : str(v) ,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001525 'history' : self.data.varhistory.variable(k),
1526 }
1527 for d in flaglist:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001528 if flags and d in flags:
1529 dump[k][d] = flags[d]
1530 else:
1531 dump[k][d] = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001532 except Exception as e:
1533 print(e)
1534 return dump
1535
1536
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001537 def updateCacheSync(self):
1538 if self.state == state.running:
1539 return
1540
1541 # reload files for which we got notifications
1542 for p in self.inotify_modified_files:
1543 bb.parse.update_cache(p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001544 if p in bb.parse.BBHandler.cached_statements:
1545 del bb.parse.BBHandler.cached_statements[p]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001546 self.inotify_modified_files = []
1547
1548 if not self.baseconfig_valid:
1549 logger.debug(1, "Reloading base configuration data")
1550 self.initConfigurationData()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001551 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001552
1553 # This is called for all async commands when self.state != running
1554 def updateCache(self):
1555 if self.state == state.running:
1556 return
1557
1558 if self.state in (state.shutdown, state.forceshutdown, state.error):
1559 if hasattr(self.parser, 'shutdown'):
1560 self.parser.shutdown(clean=False, force = True)
1561 raise bb.BBHandledException()
1562
1563 if self.state != state.parsing:
1564 self.updateCacheSync()
1565
1566 if self.state != state.parsing and not self.parsecache_valid:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001567 bb.parse.siggen.reset(self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001568 self.parseConfiguration ()
1569 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001570 for mc in self.multiconfigs:
1571 bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001572
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001573 for mc in self.multiconfigs:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001574 ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001575 self.recipecaches[mc].ignored_dependencies = set(ignore.split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001576
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001577 for dep in self.configuration.extra_assume_provided:
1578 self.recipecaches[mc].ignored_dependencies.add(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001579
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001580 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001581 (filelist, masked, searchdirs) = self.collection.collect_bbfiles(self.data, self.data)
1582
1583 # Add inotify watches for directories searched for bb/bbappend files
1584 for dirent in searchdirs:
1585 self.add_filewatch([[dirent]], dirs=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001586
1587 self.parser = CookerParser(self, filelist, masked)
1588 self.parsecache_valid = True
1589
1590 self.state = state.parsing
1591
1592 if not self.parser.parse_next():
1593 collectlog.debug(1, "parsing complete")
1594 if self.parser.error:
1595 raise bb.BBHandledException()
1596 self.show_appends_with_no_recipes()
1597 self.handlePrefProviders()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001598 for mc in self.multiconfigs:
1599 self.recipecaches[mc].bbfile_priority = self.collection.collection_priorities(self.recipecaches[mc].pkg_fn, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001600 self.state = state.running
1601
1602 # Send an event listing all stamps reachable after parsing
1603 # which the metadata may use to clean up stale data
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001604 for mc in self.multiconfigs:
1605 event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1606 bb.event.fire(event, self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001607 return None
1608
1609 return True
1610
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001611 def checkPackages(self, pkgs_to_build, task=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001612
1613 # Return a copy, don't modify the original
1614 pkgs_to_build = pkgs_to_build[:]
1615
1616 if len(pkgs_to_build) == 0:
1617 raise NothingToBuild
1618
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001619 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001620 for pkg in pkgs_to_build:
1621 if pkg in ignore:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001622 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
Brad Bishop15ae2502019-06-18 21:44:24 -04001623 if pkg.startswith("multiconfig:"):
1624 pkgs_to_build.remove(pkg)
1625 pkgs_to_build.append(pkg.replace("multiconfig:", "mc:"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001626
1627 if 'world' in pkgs_to_build:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001628 pkgs_to_build.remove('world')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001629 for mc in self.multiconfigs:
1630 bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1631 for t in self.recipecaches[mc].world_target:
1632 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001633 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001634 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001635
1636 if 'universe' in pkgs_to_build:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001637 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001638 parselog.debug(1, "collating packages for \"universe\"")
1639 pkgs_to_build.remove('universe')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001640 for mc in self.multiconfigs:
1641 for t in self.recipecaches[mc].universe_target:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001642 if task:
1643 foundtask = False
1644 for provider_fn in self.recipecaches[mc].providers[t]:
1645 if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1646 foundtask = True
1647 break
1648 if not foundtask:
1649 bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1650 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001651 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001652 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001653 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001654
1655 return pkgs_to_build
1656
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001657 def pre_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001658 # We now are in our own process so we can call this here.
1659 # PRServ exits if its parent process exits
1660 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001661 return
1662
1663 def post_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001664 prserv.serv.auto_shutdown()
Brad Bishop08902b02019-08-20 09:16:51 -04001665 if self.hashserv:
1666 self.hashserv.process.terminate()
1667 self.hashserv.process.join()
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001668 bb.event.fire(CookerExit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001669
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001670 def shutdown(self, force = False):
1671 if force:
1672 self.state = state.forceshutdown
1673 else:
1674 self.state = state.shutdown
1675
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001676 if self.parser:
1677 self.parser.shutdown(clean=not force, force=force)
1678
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001679 def finishcommand(self):
1680 self.state = state.initial
1681
1682 def reset(self):
1683 self.initConfigurationData()
Brad Bishop08902b02019-08-20 09:16:51 -04001684 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001685
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001686 def clientComplete(self):
1687 """Called when the client is done using the server"""
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001688 self.finishcommand()
1689 self.extraconfigdata = {}
1690 self.command.reset()
1691 self.databuilder.reset()
1692 self.data = self.databuilder.data
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001693
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001694
1695class CookerExit(bb.event.Event):
1696 """
1697 Notify clients of the Cooker shutdown
1698 """
1699
1700 def __init__(self):
1701 bb.event.Event.__init__(self)
1702
1703
1704class CookerCollectFiles(object):
1705 def __init__(self, priorities):
1706 self.bbappends = []
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001707 # Priorities is a list of tupples, with the second element as the pattern.
1708 # We need to sort the list with the longest pattern first, and so on to
1709 # the shortest. This allows nested layers to be properly evaluated.
1710 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001711
1712 def calc_bbfile_priority( self, filename, matched = None ):
1713 for _, _, regex, pri in self.bbfile_config_priorities:
1714 if regex.match(filename):
1715 if matched != None:
1716 if not regex in matched:
1717 matched.add(regex)
1718 return pri
1719 return 0
1720
1721 def get_bbfiles(self):
1722 """Get list of default .bb files by reading out the current directory"""
1723 path = os.getcwd()
1724 contents = os.listdir(path)
1725 bbfiles = []
1726 for f in contents:
1727 if f.endswith(".bb"):
1728 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1729 return bbfiles
1730
1731 def find_bbfiles(self, path):
1732 """Find all the .bb and .bbappend files in a directory"""
1733 found = []
1734 for dir, dirs, files in os.walk(path):
1735 for ignored in ('SCCS', 'CVS', '.svn'):
1736 if ignored in dirs:
1737 dirs.remove(ignored)
1738 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1739
1740 return found
1741
1742 def collect_bbfiles(self, config, eventdata):
1743 """Collect all available .bb build files"""
1744 masked = 0
1745
1746 collectlog.debug(1, "collecting .bb files")
1747
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001748 files = (config.getVar( "BBFILES") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001749 config.setVar("BBFILES", " ".join(files))
1750
1751 # Sort files by priority
1752 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
1753
1754 if not len(files):
1755 files = self.get_bbfiles()
1756
1757 if not len(files):
1758 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1759 bb.event.fire(CookerExit(), eventdata)
1760
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001761 # We need to track where we look so that we can add inotify watches. There
1762 # is no nice way to do this, this is horrid. We intercept the os.listdir()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001763 # (or os.scandir() for python 3.6+) calls while we run glob().
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001764 origlistdir = os.listdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001765 if hasattr(os, 'scandir'):
1766 origscandir = os.scandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001767 searchdirs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001768
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001769 def ourlistdir(d):
1770 searchdirs.append(d)
1771 return origlistdir(d)
1772
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001773 def ourscandir(d):
1774 searchdirs.append(d)
1775 return origscandir(d)
1776
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001777 os.listdir = ourlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001778 if hasattr(os, 'scandir'):
1779 os.scandir = ourscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001780 try:
1781 # Can't use set here as order is important
1782 newfiles = []
1783 for f in files:
1784 if os.path.isdir(f):
1785 dirfiles = self.find_bbfiles(f)
1786 for g in dirfiles:
1787 if g not in newfiles:
1788 newfiles.append(g)
1789 else:
1790 globbed = glob.glob(f)
1791 if not globbed and os.path.exists(f):
1792 globbed = [f]
1793 # glob gives files in order on disk. Sort to be deterministic.
1794 for g in sorted(globbed):
1795 if g not in newfiles:
1796 newfiles.append(g)
1797 finally:
1798 os.listdir = origlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001799 if hasattr(os, 'scandir'):
1800 os.scandir = origscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001801
1802 bbmask = config.getVar('BBMASK')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001803
1804 if bbmask:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001805 # First validate the individual regular expressions and ignore any
1806 # that do not compile
1807 bbmasks = []
1808 for mask in bbmask.split():
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001809 # When constructing an older style single regex, it's possible for BBMASK
1810 # to end up beginning with '|', which matches and masks _everything_.
1811 if mask.startswith("|"):
1812 collectlog.warn("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
1813 mask = mask[1:]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001814 try:
1815 re.compile(mask)
1816 bbmasks.append(mask)
1817 except sre_constants.error:
1818 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1819
1820 # Then validate the combined regular expressions. This should never
1821 # fail, but better safe than sorry...
1822 bbmask = "|".join(bbmasks)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001823 try:
1824 bbmask_compiled = re.compile(bbmask)
1825 except sre_constants.error:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001826 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1827 bbmask = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001828
1829 bbfiles = []
1830 bbappend = []
1831 for f in newfiles:
1832 if bbmask and bbmask_compiled.search(f):
1833 collectlog.debug(1, "skipping masked file %s", f)
1834 masked += 1
1835 continue
1836 if f.endswith('.bb'):
1837 bbfiles.append(f)
1838 elif f.endswith('.bbappend'):
1839 bbappend.append(f)
1840 else:
1841 collectlog.debug(1, "skipping %s: unknown file extension", f)
1842
1843 # Build a list of .bbappend files for each .bb file
1844 for f in bbappend:
1845 base = os.path.basename(f).replace('.bbappend', '.bb')
1846 self.bbappends.append((base, f))
1847
1848 # Find overlayed recipes
1849 # bbfiles will be in priority order which makes this easy
1850 bbfile_seen = dict()
1851 self.overlayed = defaultdict(list)
1852 for f in reversed(bbfiles):
1853 base = os.path.basename(f)
1854 if base not in bbfile_seen:
1855 bbfile_seen[base] = f
1856 else:
1857 topfile = bbfile_seen[base]
1858 self.overlayed[topfile].append(f)
1859
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001860 return (bbfiles, masked, searchdirs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001861
1862 def get_file_appends(self, fn):
1863 """
1864 Returns a list of .bbappend files to apply to fn
1865 """
1866 filelist = []
1867 f = os.path.basename(fn)
1868 for b in self.bbappends:
1869 (bbappend, filename) = b
1870 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1871 filelist.append(filename)
1872 return filelist
1873
1874 def collection_priorities(self, pkgfns, d):
1875
1876 priorities = {}
1877
1878 # Calculate priorities for each file
1879 matched = set()
1880 for p in pkgfns:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001881 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001882 priorities[p] = self.calc_bbfile_priority(realfn, matched)
1883
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001884 unmatched = set()
1885 for _, _, regex, pri in self.bbfile_config_priorities:
1886 if not regex in matched:
1887 unmatched.add(regex)
1888
Brad Bishop316dfdd2018-06-25 12:45:53 -04001889 # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
1890 def find_bbappend_match(regex):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001891 for b in self.bbappends:
1892 (bbfile, append) = b
1893 if regex.match(append):
Brad Bishop316dfdd2018-06-25 12:45:53 -04001894 # If the bbappend is matched by already "matched set", return False
1895 for matched_regex in matched:
1896 if matched_regex.match(append):
1897 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001898 return True
1899 return False
1900
1901 for unmatch in unmatched.copy():
Brad Bishop316dfdd2018-06-25 12:45:53 -04001902 if find_bbappend_match(unmatch):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001903 unmatched.remove(unmatch)
1904
1905 for collection, pattern, regex, _ in self.bbfile_config_priorities:
1906 if regex in unmatched:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001907 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001908 collectlog.warning("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001909
1910 return priorities
1911
1912class ParsingFailure(Exception):
1913 def __init__(self, realexception, recipe):
1914 self.realexception = realexception
1915 self.recipe = recipe
1916 Exception.__init__(self, realexception, recipe)
1917
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001918class Parser(multiprocessing.Process):
1919 def __init__(self, jobs, results, quit, init, profile):
1920 self.jobs = jobs
1921 self.results = results
1922 self.quit = quit
1923 self.init = init
1924 multiprocessing.Process.__init__(self)
1925 self.context = bb.utils.get_context().copy()
1926 self.handlers = bb.event.get_class_handlers().copy()
1927 self.profile = profile
1928
1929 def run(self):
1930
1931 if not self.profile:
1932 self.realrun()
1933 return
1934
1935 try:
1936 import cProfile as profile
1937 except:
1938 import profile
1939 prof = profile.Profile()
1940 try:
1941 profile.Profile.runcall(prof, self.realrun)
1942 finally:
1943 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1944 prof.dump_stats(logfile)
1945
1946 def realrun(self):
1947 if self.init:
1948 self.init()
1949
1950 pending = []
1951 while True:
1952 try:
1953 self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001954 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001955 pass
1956 else:
1957 self.results.cancel_join_thread()
1958 break
1959
1960 if pending:
1961 result = pending.pop()
1962 else:
1963 try:
Brad Bishop19323692019-04-05 15:28:33 -04001964 job = self.jobs.pop()
1965 except IndexError:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001966 break
1967 result = self.parse(*job)
1968
1969 try:
1970 self.results.put(result, timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001971 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001972 pending.append(result)
1973
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001974 def parse(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001975 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001976 # Record the filename we're parsing into any events generated
1977 def parse_filter(self, record):
1978 record.taskpid = bb.event.worker_pid
1979 record.fn = filename
1980 return True
1981
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001982 # Reset our environment and handlers to the original settings
1983 bb.utils.set_context(self.context.copy())
1984 bb.event.set_class_handlers(self.handlers.copy())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001985 bb.event.LogHandler.filter = parse_filter
1986
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001987 return True, self.bb_cache.parse(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001988 except Exception as exc:
1989 tb = sys.exc_info()[2]
1990 exc.recipe = filename
1991 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
1992 return True, exc
1993 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
1994 # and for example a worker thread doesn't just exit on its own in response to
1995 # a SystemExit event for example.
1996 except BaseException as exc:
1997 return True, ParsingFailure(exc, filename)
1998
1999class CookerParser(object):
2000 def __init__(self, cooker, filelist, masked):
2001 self.filelist = filelist
2002 self.cooker = cooker
2003 self.cfgdata = cooker.data
2004 self.cfghash = cooker.data_hash
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002005 self.cfgbuilder = cooker.databuilder
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002006
2007 # Accounting statistics
2008 self.parsed = 0
2009 self.cached = 0
2010 self.error = 0
2011 self.masked = masked
2012
2013 self.skipped = 0
2014 self.virtuals = 0
2015 self.total = len(filelist)
2016
2017 self.current = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002018 self.process_names = []
2019
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002020 self.bb_cache = bb.cache.Cache(self.cfgbuilder, self.cfghash, cooker.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002021 self.fromcache = []
2022 self.willparse = []
2023 for filename in self.filelist:
2024 appends = self.cooker.collection.get_file_appends(filename)
2025 if not self.bb_cache.cacheValid(filename, appends):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002026 self.willparse.append((filename, appends))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002027 else:
2028 self.fromcache.append((filename, appends))
2029 self.toparse = self.total - len(self.fromcache)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002030 self.progress_chunk = int(max(self.toparse / 100, 1))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002031
Brad Bishop6e60e8b2018-02-01 10:27:11 -05002032 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002033 multiprocessing.cpu_count()), len(self.willparse))
2034
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002035 self.start()
2036 self.haveshutdown = False
2037
2038 def start(self):
2039 self.results = self.load_cached()
2040 self.processes = []
2041 if self.toparse:
2042 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2043 def init():
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002044 Parser.bb_cache = self.bb_cache
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002045 bb.utils.set_process_name(multiprocessing.current_process().name)
2046 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2047 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002048
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002049 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002050 self.result_queue = multiprocessing.Queue()
Brad Bishop19323692019-04-05 15:28:33 -04002051
2052 def chunkify(lst,n):
2053 return [lst[i::n] for i in range(n)]
2054 self.jobs = chunkify(self.willparse, self.num_processes)
2055
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002056 for i in range(0, self.num_processes):
Brad Bishop19323692019-04-05 15:28:33 -04002057 parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002058 parser.start()
2059 self.process_names.append(parser.name)
2060 self.processes.append(parser)
2061
2062 self.results = itertools.chain(self.results, self.parse_generator())
2063
2064 def shutdown(self, clean=True, force=False):
2065 if not self.toparse:
2066 return
2067 if self.haveshutdown:
2068 return
2069 self.haveshutdown = True
2070
2071 if clean:
2072 event = bb.event.ParseCompleted(self.cached, self.parsed,
2073 self.skipped, self.masked,
2074 self.virtuals, self.error,
2075 self.total)
2076
2077 bb.event.fire(event, self.cfgdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002078 for process in self.processes:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002079 self.parser_quit.put(None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002080 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002081 self.parser_quit.cancel_join_thread()
2082 for process in self.processes:
2083 self.parser_quit.put(None)
2084
Brad Bishop08902b02019-08-20 09:16:51 -04002085 # Cleanup the queue before call process.join(), otherwise there might be
2086 # deadlocks.
2087 while True:
2088 try:
2089 self.result_queue.get(timeout=0.25)
2090 except queue.Empty:
2091 break
2092
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002093 for process in self.processes:
2094 if force:
2095 process.join(.1)
2096 process.terminate()
2097 else:
2098 process.join()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002099
2100 sync = threading.Thread(target=self.bb_cache.sync)
2101 sync.start()
2102 multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002103 bb.codeparser.parser_cache_savemerge()
2104 bb.fetch.fetcher_parse_done()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002105 if self.cooker.configuration.profile:
2106 profiles = []
2107 for i in self.process_names:
2108 logfile = "profile-parse-%s.log" % i
2109 if os.path.exists(logfile):
2110 profiles.append(logfile)
2111
2112 pout = "profile-parse.log.processed"
2113 bb.utils.process_profilelog(profiles, pout = pout)
2114 print("Processed parsing statistics saved to %s" % (pout))
2115
2116 def load_cached(self):
2117 for filename, appends in self.fromcache:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002118 cached, infos = self.bb_cache.load(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002119 yield not cached, infos
2120
2121 def parse_generator(self):
2122 while True:
2123 if self.parsed >= self.toparse:
2124 break
2125
2126 try:
2127 result = self.result_queue.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002128 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002129 pass
2130 else:
2131 value = result[1]
2132 if isinstance(value, BaseException):
2133 raise value
2134 else:
2135 yield result
2136
2137 def parse_next(self):
2138 result = []
2139 parsed = None
2140 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002141 parsed, result = next(self.results)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002142 except StopIteration:
2143 self.shutdown()
2144 return False
2145 except bb.BBHandledException as exc:
2146 self.error += 1
2147 logger.error('Failed to parse recipe: %s' % exc.recipe)
2148 self.shutdown(clean=False)
2149 return False
2150 except ParsingFailure as exc:
2151 self.error += 1
2152 logger.error('Unable to parse %s: %s' %
2153 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2154 self.shutdown(clean=False)
2155 return False
2156 except bb.parse.ParseError as exc:
2157 self.error += 1
2158 logger.error(str(exc))
2159 self.shutdown(clean=False)
2160 return False
2161 except bb.data_smart.ExpansionError as exc:
2162 self.error += 1
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002163 bbdir = os.path.dirname(__file__) + os.sep
2164 etype, value, _ = sys.exc_info()
2165 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2166 logger.error('ExpansionError during parsing %s', value.recipe,
2167 exc_info=(etype, value, tb))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002168 self.shutdown(clean=False)
2169 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002170 except Exception as exc:
2171 self.error += 1
2172 etype, value, tb = sys.exc_info()
2173 if hasattr(value, "recipe"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002174 logger.error('Unable to parse %s' % value.recipe,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002175 exc_info=(etype, value, exc.traceback))
2176 else:
2177 # Most likely, an exception occurred during raising an exception
2178 import traceback
2179 logger.error('Exception during parse: %s' % traceback.format_exc())
2180 self.shutdown(clean=False)
2181 return False
2182
2183 self.current += 1
2184 self.virtuals += len(result)
2185 if parsed:
2186 self.parsed += 1
2187 if self.parsed % self.progress_chunk == 0:
2188 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2189 self.cfgdata)
2190 else:
2191 self.cached += 1
2192
2193 for virtualfn, info_array in result:
2194 if info_array[0].skipped:
2195 self.skipped += 1
2196 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002197 (fn, cls, mc) = bb.cache.virtualfn2realfn(virtualfn)
2198 self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002199 parsed=parsed, watcher = self.cooker.add_filewatch)
2200 return True
2201
2202 def reparse(self, filename):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002203 infos = self.bb_cache.parse(filename, self.cooker.collection.get_file_appends(filename))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002204 for vfn, info_array in infos:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002205 (fn, cls, mc) = bb.cache.virtualfn2realfn(vfn)
2206 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)