blob: 0008c2fde1a57fce1b7aa442803736ec629b7145 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#!/usr/bin/env python
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002#
3# Copyright (C) 2003, 2004 Chris Larson
4# Copyright (C) 2003, 2004 Phil Blundell
5# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
6# Copyright (C) 2005 Holger Hans Peter Freyther
7# Copyright (C) 2005 ROAD GmbH
8# Copyright (C) 2006 - 2007 Richard Purdie
9#
Brad Bishopc342db32019-05-15 21:57:59 -040010# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -050011#
Patrick Williamsc0f7c042017-02-23 20:41:17 -060012
Patrick Williamsc124f4f2015-09-15 14:41:29 -050013import sys, os, glob, os.path, re, time
14import atexit
15import itertools
16import logging
17import multiprocessing
18import sre_constants
19import threading
Patrick Williamsc0f7c042017-02-23 20:41:17 -060020from io import StringIO, UnsupportedOperation
Patrick Williamsc124f4f2015-09-15 14:41:29 -050021from contextlib import closing
22from functools import wraps
Patrick Williamsc0f7c042017-02-23 20:41:17 -060023from collections import defaultdict, namedtuple
Patrick Williamsc124f4f2015-09-15 14:41:29 -050024import bb, bb.exceptions, bb.command
25from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
Patrick Williamsc0f7c042017-02-23 20:41:17 -060026import queue
Patrick Williamsc124f4f2015-09-15 14:41:29 -050027import signal
28import subprocess
29import errno
30import prserv.serv
31import pyinotify
Patrick Williamsc0f7c042017-02-23 20:41:17 -060032import json
33import pickle
34import codecs
Patrick Williamsc124f4f2015-09-15 14:41:29 -050035
36logger = logging.getLogger("BitBake")
37collectlog = logging.getLogger("BitBake.Collection")
38buildlog = logging.getLogger("BitBake.Build")
39parselog = logging.getLogger("BitBake.Parsing")
40providerlog = logging.getLogger("BitBake.Provider")
41
42class NoSpecificMatch(bb.BBHandledException):
43 """
44 Exception raised when no or multiple file matches are found
45 """
46
47class NothingToBuild(Exception):
48 """
49 Exception raised when there is nothing to build
50 """
51
52class CollectionError(bb.BBHandledException):
53 """
54 Exception raised when layer configuration is incorrect
55 """
56
57class state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060058 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050059
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050060 @classmethod
61 def get_name(cls, code):
62 for name in dir(cls):
63 value = getattr(cls, name)
64 if type(value) == type(cls.initial) and value == code:
65 return name
66 raise ValueError("Invalid status code: %s" % code)
67
Patrick Williamsc124f4f2015-09-15 14:41:29 -050068
69class SkippedPackage:
70 def __init__(self, info = None, reason = None):
71 self.pn = None
72 self.skipreason = None
73 self.provides = None
74 self.rprovides = None
75
76 if info:
77 self.pn = info.pn
78 self.skipreason = info.skipreason
79 self.provides = info.provides
80 self.rprovides = info.rprovides
81 elif reason:
82 self.skipreason = reason
83
84
85class CookerFeatures(object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -060086 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050087
88 def __init__(self):
89 self._features=set()
90
91 def setFeature(self, f):
92 # validate we got a request for a feature we support
93 if f not in CookerFeatures._feature_list:
94 return
95 self._features.add(f)
96
97 def __contains__(self, f):
98 return f in self._features
99
100 def __iter__(self):
101 return self._features.__iter__()
102
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600103 def __next__(self):
104 return next(self._features)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500105
106
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600107class EventWriter:
108 def __init__(self, cooker, eventfile):
109 self.file_inited = None
110 self.cooker = cooker
111 self.eventfile = eventfile
112 self.event_queue = []
113
114 def write_event(self, event):
115 with open(self.eventfile, "a") as f:
116 try:
117 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
118 f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
119 "vars": str_event}))
120 except Exception as err:
121 import traceback
122 print(err, traceback.format_exc())
123
124 def send(self, event):
125 if self.file_inited:
126 # we have the file, just write the event
127 self.write_event(event)
128 else:
129 # init on bb.event.BuildStarted
130 name = "%s.%s" % (event.__module__, event.__class__.__name__)
131 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
132 with open(self.eventfile, "w") as f:
133 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
134
135 self.file_inited = True
136
137 # write pending events
138 for evt in self.event_queue:
139 self.write_event(evt)
140
141 # also write the current event
142 self.write_event(event)
143 else:
144 # queue all events until the file is inited
145 self.event_queue.append(event)
146
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500147#============================================================================#
148# BBCooker
149#============================================================================#
150class BBCooker:
151 """
152 Manages one bitbake build run
153 """
154
155 def __init__(self, configuration, featureSet=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600156 self.recipecaches = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500157 self.skiplist = {}
158 self.featureset = CookerFeatures()
159 if featureSet:
160 for f in featureSet:
161 self.featureset.setFeature(f)
162
163 self.configuration = configuration
164
Brad Bishopf058f492019-01-28 23:50:33 -0500165 bb.debug(1, "BBCooker starting %s" % time.time())
166 sys.stdout.flush()
167
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500168 self.configwatcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500169 bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
170 sys.stdout.flush()
171
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500172 self.configwatcher.bbseen = []
173 self.configwatcher.bbwatchedfiles = []
174 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
Brad Bishopf058f492019-01-28 23:50:33 -0500175 bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
176 sys.stdout.flush()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500177 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
178 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500179 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500180 self.watcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500181 bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
182 sys.stdout.flush()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500183 self.watcher.bbseen = []
184 self.watcher.bbwatchedfiles = []
185 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
186
Brad Bishopf058f492019-01-28 23:50:33 -0500187 bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
188 sys.stdout.flush()
189
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500190 # If being called by something like tinfoil, we need to clean cached data
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500191 # which may now be invalid
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500192 bb.parse.clear_cache()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500193 bb.parse.BBHandler.cached_statements = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500194
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500195 self.ui_cmdline = None
196
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500197 self.initConfigurationData()
198
Brad Bishopf058f492019-01-28 23:50:33 -0500199 bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
200 sys.stdout.flush()
201
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600202 # we log all events to a file if so directed
203 if self.configuration.writeeventlog:
204 # register the log file writer as UI Handler
205 writer = EventWriter(self, self.configuration.writeeventlog)
206 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
207 bb.event.register_UIHhandler(EventLogWriteHandler(writer))
208
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500209 self.inotify_modified_files = []
210
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500211 def _process_inotify_updates(server, cooker, abort):
212 cooker.process_inotify_updates()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500213 return 1.0
214
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500215 self.configuration.server_register_idlecallback(_process_inotify_updates, self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500216
217 # TOSTOP must not be set or our children will hang when they output
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600218 try:
219 fd = sys.stdout.fileno()
220 if os.isatty(fd):
221 import termios
222 tcattr = termios.tcgetattr(fd)
223 if tcattr[3] & termios.TOSTOP:
224 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
225 tcattr[3] = tcattr[3] & ~termios.TOSTOP
226 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
227 except UnsupportedOperation:
228 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500229
230 self.command = bb.command.Command(self)
231 self.state = state.initial
232
233 self.parser = None
234
235 signal.signal(signal.SIGTERM, self.sigterm_exception)
236 # Let SIGHUP exit as SIGTERM
237 signal.signal(signal.SIGHUP, self.sigterm_exception)
238
Brad Bishopf058f492019-01-28 23:50:33 -0500239 bb.debug(1, "BBCooker startup complete %s" % time.time())
240 sys.stdout.flush()
241
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500242 def process_inotify_updates(self):
243 for n in [self.confignotifier, self.notifier]:
244 if n.check_events(timeout=0):
245 # read notified events and enqeue them
246 n.read_events()
247 n.process_events()
248
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500249 def config_notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500250 if event.maskname == "IN_Q_OVERFLOW":
251 bb.warn("inotify event queue overflowed, invalidating caches.")
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500252 self.parsecache_valid = False
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500253 self.baseconfig_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500254 bb.parse.clear_cache()
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500255 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500256 if not event.pathname in self.configwatcher.bbwatchedfiles:
257 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500258 if not event.pathname in self.inotify_modified_files:
259 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500260 self.baseconfig_valid = False
261
262 def notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500263 if event.maskname == "IN_Q_OVERFLOW":
264 bb.warn("inotify event queue overflowed, invalidating caches.")
265 self.parsecache_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500266 bb.parse.clear_cache()
267 return
268 if event.pathname.endswith("bitbake-cookerdaemon.log") \
269 or event.pathname.endswith("bitbake.lock"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500270 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500271 if not event.pathname in self.inotify_modified_files:
272 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500273 self.parsecache_valid = False
274
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500275 def add_filewatch(self, deps, watcher=None, dirs=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500276 if not watcher:
277 watcher = self.watcher
278 for i in deps:
279 watcher.bbwatchedfiles.append(i[0])
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500280 if dirs:
281 f = i[0]
282 else:
283 f = os.path.dirname(i[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500284 if f in watcher.bbseen:
285 continue
286 watcher.bbseen.append(f)
287 watchtarget = None
288 while True:
289 # We try and add watches for files that don't exist but if they did, would influence
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500290 # the parser. The parent directory of these files may not exist, in which case we need
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500291 # to watch any parent that does exist for changes.
292 try:
293 watcher.add_watch(f, self.watchmask, quiet=False)
294 if watchtarget:
295 watcher.bbwatchedfiles.append(watchtarget)
296 break
297 except pyinotify.WatchManagerError as e:
298 if 'ENOENT' in str(e):
299 watchtarget = f
300 f = os.path.dirname(f)
301 if f in watcher.bbseen:
302 break
303 watcher.bbseen.append(f)
304 continue
305 if 'ENOSPC' in str(e):
306 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
307 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
308 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
309 providerlog.error("Root privilege is required to modify max_user_watches.")
310 raise
311
312 def sigterm_exception(self, signum, stackframe):
313 if signum == signal.SIGTERM:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500314 bb.warn("Cooker received SIGTERM, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500315 elif signum == signal.SIGHUP:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500316 bb.warn("Cooker received SIGHUP, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500317 self.state = state.forceshutdown
318
319 def setFeatures(self, features):
320 # we only accept a new feature set if we're in state initial, so we can reset without problems
321 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
322 raise Exception("Illegal state for feature set change")
323 original_featureset = list(self.featureset)
324 for feature in features:
325 self.featureset.setFeature(feature)
326 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
327 if (original_featureset != list(self.featureset)) and self.state != state.error:
328 self.reset()
329
330 def initConfigurationData(self):
331
332 self.state = state.initial
333 self.caches_array = []
334
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500335 # Need to preserve BB_CONSOLELOG over resets
336 consolelog = None
337 if hasattr(self, "data"):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500338 consolelog = self.data.getVar("BB_CONSOLELOG")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500339
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500340 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
341 self.enableDataTracking()
342
343 all_extra_cache_names = []
344 # We hardcode all known cache types in a single place, here.
345 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
346 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
347
348 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
349
350 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
351 # This is the entry point, no further check needed!
352 for var in caches_name_array:
353 try:
354 module_name, cache_name = var.split(':')
355 module = __import__(module_name, fromlist=(cache_name,))
356 self.caches_array.append(getattr(module, cache_name))
357 except ImportError as exc:
358 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
359 sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
360
361 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
362 self.databuilder.parseBaseConfiguration()
363 self.data = self.databuilder.data
364 self.data_hash = self.databuilder.data_hash
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500365 self.extraconfigdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500366
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500367 if consolelog:
368 self.data.setVar("BB_CONSOLELOG", consolelog)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500369
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500370 self.data.setVar('BB_CMDLINE', self.ui_cmdline)
371
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500372 #
373 # Copy of the data store which has been expanded.
374 # Used for firing events and accessing variables where expansion needs to be accounted for
375 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500376 bb.parse.init_parser(self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500377
378 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
379 self.disableDataTracking()
380
Brad Bishop15ae2502019-06-18 21:44:24 -0400381 for mc in self.databuilder.mcdata.values():
382 mc.renameVar("__depends", "__base_depends")
383 self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500384
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500385 self.baseconfig_valid = True
386 self.parsecache_valid = False
387
388 def handlePRServ(self):
389 # Setup a PR Server based on the new configuration
390 try:
391 self.prhost = prserv.serv.auto_start(self.data)
392 except prserv.serv.PRServiceConfigError as e:
393 bb.fatal("Unable to start PR Server, exitting")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500394
395 def enableDataTracking(self):
396 self.configuration.tracking = True
397 if hasattr(self, "data"):
398 self.data.enableTracking()
399
400 def disableDataTracking(self):
401 self.configuration.tracking = False
402 if hasattr(self, "data"):
403 self.data.disableTracking()
404
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500405 def parseConfiguration(self):
406 # Set log file verbosity
407 verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
408 if verboselogs:
409 bb.msg.loggerVerboseLogs = True
410
411 # Change nice level if we're asked to
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500412 nice = self.data.getVar("BB_NICE_LEVEL")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500413 if nice:
414 curnice = os.nice(0)
415 nice = int(nice) - curnice
416 buildlog.verbose("Renice to %s " % os.nice(nice))
417
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600418 if self.recipecaches:
419 del self.recipecaches
420 self.multiconfigs = self.databuilder.mcdata.keys()
421 self.recipecaches = {}
422 for mc in self.multiconfigs:
423 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500424
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500425 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500426
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500427 self.parsecache_valid = False
428
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500429 def updateConfigOpts(self, options, environment, cmdline):
430 self.ui_cmdline = cmdline
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500431 clean = True
432 for o in options:
433 if o in ['prefile', 'postfile']:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500434 # Only these options may require a reparse
435 try:
436 if getattr(self.configuration, o) == options[o]:
437 # Value is the same, no need to mark dirty
438 continue
439 except AttributeError:
440 pass
441 logger.debug(1, "Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
442 print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500443 clean = False
444 setattr(self.configuration, o, options[o])
445 for k in bb.utils.approved_variables():
446 if k in environment and k not in self.configuration.env:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500447 logger.debug(1, "Updating new environment variable %s to %s" % (k, environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500448 self.configuration.env[k] = environment[k]
449 clean = False
450 if k in self.configuration.env and k not in environment:
451 logger.debug(1, "Updating environment variable %s (deleted)" % (k))
452 del self.configuration.env[k]
453 clean = False
454 if k not in self.configuration.env and k not in environment:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500455 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500456 if environment[k] != self.configuration.env[k]:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500457 logger.debug(1, "Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500458 self.configuration.env[k] = environment[k]
459 clean = False
460 if not clean:
461 logger.debug(1, "Base environment change, triggering reparse")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500462 self.reset()
463
464 def runCommands(self, server, data, abort):
465 """
466 Run any queued asynchronous command
467 This is done by the idle handler so it runs in true context rather than
468 tied to any UI.
469 """
470
471 return self.command.runAsyncCommand()
472
473 def showVersions(self):
474
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500475 (latest_versions, preferred_versions) = self.findProviders()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500476
477 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
478 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
479
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500480 for p in sorted(self.recipecaches[''].pkg_pn):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500481 pref = preferred_versions[p]
482 latest = latest_versions[p]
483
484 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
485 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
486
487 if pref == latest:
488 prefstr = ""
489
490 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
491
492 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
493 """
494 Show the outer or per-recipe environment
495 """
496 fn = None
497 envdata = None
Brad Bishop15ae2502019-06-18 21:44:24 -0400498 mc = ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500499 if not pkgs_to_build:
500 pkgs_to_build = []
501
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500502 orig_tracking = self.configuration.tracking
503 if not orig_tracking:
504 self.enableDataTracking()
505 self.reset()
506
Brad Bishop15ae2502019-06-18 21:44:24 -0400507 def mc_base(p):
508 if p.startswith('mc:'):
509 s = p.split(':')
510 if len(s) == 2:
511 return s[1]
512 return None
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500513
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500514 if buildfile:
515 # Parse the configuration here. We need to do it explicitly here since
516 # this showEnvironment() code path doesn't use the cache
517 self.parseConfiguration()
518
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600519 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500520 fn = self.matchFile(fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600521 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500522 elif len(pkgs_to_build) == 1:
Brad Bishop15ae2502019-06-18 21:44:24 -0400523 mc = mc_base(pkgs_to_build[0])
524 if not mc:
525 ignore = self.data.getVar("ASSUME_PROVIDED") or ""
526 if pkgs_to_build[0] in set(ignore.split()):
527 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500528
Brad Bishop15ae2502019-06-18 21:44:24 -0400529 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500530
Brad Bishop15ae2502019-06-18 21:44:24 -0400531 mc = runlist[0][0]
532 fn = runlist[0][3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500533
534 if fn:
535 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600536 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
537 envdata = bb_cache.loadDataFull(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500538 except Exception as e:
539 parselog.exception("Unable to read %s", fn)
540 raise
Brad Bishop15ae2502019-06-18 21:44:24 -0400541 else:
542 if not mc in self.databuilder.mcdata:
543 bb.fatal('Not multiconfig named "%s" found' % mc)
544 envdata = self.databuilder.mcdata[mc]
545 data.expandKeys(envdata)
546 parse.ast.runAnonFuncs(envdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500547
548 # Display history
549 with closing(StringIO()) as env:
550 self.data.inchistory.emit(env)
551 logger.plain(env.getvalue())
552
553 # emit variables and shell functions
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500554 with closing(StringIO()) as env:
555 data.emit_env(env, envdata, True)
556 logger.plain(env.getvalue())
557
558 # emit the metadata which isnt valid shell
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500559 for e in sorted(envdata.keys()):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600560 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500561 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500562
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500563 if not orig_tracking:
564 self.disableDataTracking()
565 self.reset()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500566
567 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
568 """
569 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
570 """
571 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
572
573 # A task of None means use the default task
574 if task is None:
575 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500576 if not task.startswith("do_"):
577 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500578
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500579 targetlist = self.checkPackages(pkgs_to_build, task)
580 fulltargetlist = []
581 defaulttask_implicit = ''
582 defaulttask_explicit = False
583 wildcard = False
584
585 # Wild card expansion:
Brad Bishop15ae2502019-06-18 21:44:24 -0400586 # Replace string such as "mc:*:bash"
587 # into "mc:A:bash mc:B:bash bash"
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500588 for k in targetlist:
Brad Bishop15ae2502019-06-18 21:44:24 -0400589 if k.startswith("mc:"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500590 if wildcard:
591 bb.fatal('multiconfig conflict')
592 if k.split(":")[1] == "*":
593 wildcard = True
594 for mc in self.multiconfigs:
595 if mc:
596 fulltargetlist.append(k.replace('*', mc))
597 # implicit default task
598 else:
599 defaulttask_implicit = k.split(":")[2]
600 else:
601 fulltargetlist.append(k)
602 else:
603 defaulttask_explicit = True
604 fulltargetlist.append(k)
605
606 if not defaulttask_explicit and defaulttask_implicit != '':
607 fulltargetlist.append(defaulttask_implicit)
608
609 bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600610 taskdata = {}
611 localdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500612
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600613 for mc in self.multiconfigs:
614 taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
615 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600616 bb.data.expandKeys(localdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500617
618 current = 0
619 runlist = []
620 for k in fulltargetlist:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600621 mc = ""
Brad Bishop15ae2502019-06-18 21:44:24 -0400622 if k.startswith("mc:"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600623 mc = k.split(":")[1]
624 k = ":".join(k.split(":")[2:])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500625 ktask = task
626 if ":do_" in k:
627 k2 = k.split(":do_")
628 k = k2[0]
629 ktask = k2[1]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600630 taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500631 current += 1
632 if not ktask.startswith("do_"):
633 ktask = "do_%s" % ktask
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600634 if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
635 # e.g. in ASSUME_PROVIDED
636 continue
637 fn = taskdata[mc].build_targets[k][0]
638 runlist.append([mc, k, ktask, fn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500639 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600640
Brad Bishop15ae2502019-06-18 21:44:24 -0400641 havemc = False
642 for mc in self.multiconfigs:
643 if taskdata[mc].get_mcdepends():
644 havemc = True
Brad Bishopf058f492019-01-28 23:50:33 -0500645
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800646 # No need to do check providers if there are no mcdeps or not an mc build
Brad Bishop15ae2502019-06-18 21:44:24 -0400647 if havemc or len(self.multiconfigs) > 1:
Andrew Geissler99467da2019-02-25 18:54:23 -0600648 seen = set()
649 new = True
650 # Make sure we can provide the multiconfig dependency
651 while new:
652 mcdeps = set()
653 # Add unresolved first, so we can get multiconfig indirect dependencies on time
654 for mc in self.multiconfigs:
655 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
656 mcdeps |= set(taskdata[mc].get_mcdepends())
657 new = False
658 for mc in self.multiconfigs:
659 for k in mcdeps:
660 if k in seen:
661 continue
662 l = k.split(':')
663 depmc = l[2]
664 if depmc not in self.multiconfigs:
665 bb.fatal("Multiconfig dependency %s depends on nonexistent mc configuration %s" % (k,depmc))
666 else:
667 logger.debug(1, "Adding providers for multiconfig dependency %s" % l[3])
668 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
669 seen.add(k)
670 new = True
Brad Bishopf058f492019-01-28 23:50:33 -0500671
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600672 for mc in self.multiconfigs:
673 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
674
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500675 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600676 return taskdata, runlist
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500677
678 def prepareTreeData(self, pkgs_to_build, task):
679 """
680 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
681 """
682
683 # We set abort to False here to prevent unbuildable targets raising
684 # an exception when we're just generating data
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600685 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500686
687 return runlist, taskdata
688
689 ######## WARNING : this function requires cache_extra to be enabled ########
690
691 def generateTaskDepTreeData(self, pkgs_to_build, task):
692 """
693 Create a dependency graph of pkgs_to_build including reverse dependency
694 information.
695 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500696 if not task.startswith("do_"):
697 task = "do_%s" % task
698
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500699 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600700 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500701 rq.rqdata.prepare()
702 return self.buildDependTree(rq, taskdata)
703
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600704 @staticmethod
705 def add_mc_prefix(mc, pn):
706 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -0400707 return "mc:%s:%s" % (mc, pn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600708 return pn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500709
710 def buildDependTree(self, rq, taskdata):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600711 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500712 depend_tree = {}
713 depend_tree["depends"] = {}
714 depend_tree["tdepends"] = {}
715 depend_tree["pn"] = {}
716 depend_tree["rdepends-pn"] = {}
717 depend_tree["packages"] = {}
718 depend_tree["rdepends-pkg"] = {}
719 depend_tree["rrecs-pkg"] = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500720 depend_tree['providermap'] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600721 depend_tree["layer-priorities"] = self.bbfile_config_priorities
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500722
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600723 for mc in taskdata:
724 for name, fn in list(taskdata[mc].get_providermap().items()):
725 pn = self.recipecaches[mc].pkg_fn[fn]
726 pn = self.add_mc_prefix(mc, pn)
727 if name != pn:
728 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
729 depend_tree['providermap'][name] = (pn, version)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500730
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600731 for tid in rq.rqdata.runtaskentries:
732 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
733 pn = self.recipecaches[mc].pkg_fn[taskfn]
734 pn = self.add_mc_prefix(mc, pn)
735 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500736 if pn not in depend_tree["pn"]:
737 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600738 depend_tree["pn"][pn]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500739 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600740 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500741
742 # if we have extra caches, list all attributes they bring in
743 extra_info = []
744 for cache_class in self.caches_array:
745 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
746 cachefields = getattr(cache_class, 'cachefields', [])
747 extra_info = extra_info + cachefields
748
749 # for all attributes stored, add them to the dependency tree
750 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600751 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500752
753
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500754 dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
755 if not dotname in depend_tree["tdepends"]:
756 depend_tree["tdepends"][dotname] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600757 for dep in rq.rqdata.runtaskentries[tid].depends:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800758 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
759 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600760 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
761 if taskfn not in seen_fns:
762 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500763 packages = []
764
765 depend_tree["depends"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600766 for dep in taskdata[mc].depids[taskfn]:
767 depend_tree["depends"][pn].append(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500768
769 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600770 for rdep in taskdata[mc].rdepids[taskfn]:
771 depend_tree["rdepends-pn"][pn].append(rdep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500772
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600773 rdepends = self.recipecaches[mc].rundeps[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500774 for package in rdepends:
775 depend_tree["rdepends-pkg"][package] = []
776 for rdepend in rdepends[package]:
777 depend_tree["rdepends-pkg"][package].append(rdepend)
778 packages.append(package)
779
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600780 rrecs = self.recipecaches[mc].runrecs[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500781 for package in rrecs:
782 depend_tree["rrecs-pkg"][package] = []
783 for rdepend in rrecs[package]:
784 depend_tree["rrecs-pkg"][package].append(rdepend)
785 if not package in packages:
786 packages.append(package)
787
788 for package in packages:
789 if package not in depend_tree["packages"]:
790 depend_tree["packages"][package] = {}
791 depend_tree["packages"][package]["pn"] = pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600792 depend_tree["packages"][package]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500793 depend_tree["packages"][package]["version"] = version
794
795 return depend_tree
796
797 ######## WARNING : this function requires cache_extra to be enabled ########
798 def generatePkgDepTreeData(self, pkgs_to_build, task):
799 """
800 Create a dependency tree of pkgs_to_build, returning the data.
801 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500802 if not task.startswith("do_"):
803 task = "do_%s" % task
804
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500805 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500806
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600807 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500808 depend_tree = {}
809 depend_tree["depends"] = {}
810 depend_tree["pn"] = {}
811 depend_tree["rdepends-pn"] = {}
812 depend_tree["rdepends-pkg"] = {}
813 depend_tree["rrecs-pkg"] = {}
814
815 # if we have extra caches, list all attributes they bring in
816 extra_info = []
817 for cache_class in self.caches_array:
818 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
819 cachefields = getattr(cache_class, 'cachefields', [])
820 extra_info = extra_info + cachefields
821
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600822 tids = []
823 for mc in taskdata:
824 for tid in taskdata[mc].taskentries:
825 tids.append(tid)
826
827 for tid in tids:
828 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
829
830 pn = self.recipecaches[mc].pkg_fn[taskfn]
831 pn = self.add_mc_prefix(mc, pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500832
833 if pn not in depend_tree["pn"]:
834 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600835 depend_tree["pn"][pn]["filename"] = taskfn
836 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500837 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600838 rdepends = self.recipecaches[mc].rundeps[taskfn]
839 rrecs = self.recipecaches[mc].runrecs[taskfn]
840 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500841
842 # for all extra attributes stored, add them to the dependency tree
843 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600844 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500845
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600846 if taskfn not in seen_fns:
847 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500848
849 depend_tree["depends"][pn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500850 for dep in taskdata[mc].depids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500851 pn_provider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600852 if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
853 fn_provider = taskdata[mc].build_targets[dep][0]
854 pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500855 else:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500856 pn_provider = dep
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600857 pn_provider = self.add_mc_prefix(mc, pn_provider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500858 depend_tree["depends"][pn].append(pn_provider)
859
860 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600861 for rdep in taskdata[mc].rdepids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500862 pn_rprovider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600863 if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
864 fn_rprovider = taskdata[mc].run_targets[rdep][0]
865 pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500866 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600867 pn_rprovider = rdep
868 pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500869 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
870
871 depend_tree["rdepends-pkg"].update(rdepends)
872 depend_tree["rrecs-pkg"].update(rrecs)
873
874 return depend_tree
875
876 def generateDepTreeEvent(self, pkgs_to_build, task):
877 """
878 Create a task dependency graph of pkgs_to_build.
879 Generate an event with the result
880 """
881 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
882 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
883
884 def generateDotGraphFiles(self, pkgs_to_build, task):
885 """
886 Create a task dependency graph of pkgs_to_build.
887 Save the result to a set of .dot files.
888 """
889
890 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
891
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500892 with open('pn-buildlist', 'w') as f:
893 for pn in depgraph["pn"]:
894 f.write(pn + "\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500895 logger.info("PN build list saved to 'pn-buildlist'")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500896
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500897 # Remove old format output files to ensure no confusion with stale data
898 try:
899 os.unlink('pn-depends.dot')
900 except FileNotFoundError:
901 pass
902 try:
903 os.unlink('package-depends.dot')
904 except FileNotFoundError:
905 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500906
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500907 with open('task-depends.dot', 'w') as f:
908 f.write("digraph depends {\n")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400909 for task in sorted(depgraph["tdepends"]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500910 (pn, taskname) = task.rsplit(".", 1)
911 fn = depgraph["pn"][pn]["filename"]
912 version = depgraph["pn"][pn]["version"]
913 f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400914 for dep in sorted(depgraph["tdepends"][task]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500915 f.write('"%s" -> "%s"\n' % (task, dep))
916 f.write("}\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500917 logger.info("Task dependencies saved to 'task-depends.dot'")
918
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500919 with open('recipe-depends.dot', 'w') as f:
920 f.write("digraph depends {\n")
921 pndeps = {}
Brad Bishop316dfdd2018-06-25 12:45:53 -0400922 for task in sorted(depgraph["tdepends"]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500923 (pn, taskname) = task.rsplit(".", 1)
924 if pn not in pndeps:
925 pndeps[pn] = set()
Brad Bishop316dfdd2018-06-25 12:45:53 -0400926 for dep in sorted(depgraph["tdepends"][task]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500927 (deppn, deptaskname) = dep.rsplit(".", 1)
928 pndeps[pn].add(deppn)
Brad Bishop316dfdd2018-06-25 12:45:53 -0400929 for pn in sorted(pndeps):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500930 fn = depgraph["pn"][pn]["filename"]
931 version = depgraph["pn"][pn]["version"]
932 f.write('"%s" [label="%s\\n%s\\n%s"]\n' % (pn, pn, version, fn))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400933 for dep in sorted(pndeps[pn]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500934 if dep == pn:
935 continue
936 f.write('"%s" -> "%s"\n' % (pn, dep))
937 f.write("}\n")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400938 logger.info("Flattened recipe dependencies saved to 'recipe-depends.dot'")
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500939
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500940 def show_appends_with_no_recipes(self):
941 # Determine which bbappends haven't been applied
942
943 # First get list of recipes, including skipped
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600944 recipefns = list(self.recipecaches[''].pkg_fn.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500945 recipefns.extend(self.skiplist.keys())
946
947 # Work out list of bbappends that have been applied
948 applied_appends = []
949 for fn in recipefns:
950 applied_appends.extend(self.collection.get_file_appends(fn))
951
952 appends_without_recipes = []
953 for _, appendfn in self.collection.bbappends:
954 if not appendfn in applied_appends:
955 appends_without_recipes.append(appendfn)
956
957 if appends_without_recipes:
958 msg = 'No recipes available for:\n %s' % '\n '.join(appends_without_recipes)
959 warn_only = self.data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
960 False) or "no"
961 if warn_only.lower() in ("1", "yes", "true"):
962 bb.warn(msg)
963 else:
964 bb.fatal(msg)
965
966 def handlePrefProviders(self):
967
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600968 for mc in self.multiconfigs:
969 localdata = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600970 bb.data.expandKeys(localdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500971
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600972 # Handle PREFERRED_PROVIDERS
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500973 for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600974 try:
975 (providee, provider) = p.split(':')
976 except:
977 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
978 continue
979 if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
980 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
981 self.recipecaches[mc].preferred[providee] = provider
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500982
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500983 def findConfigFilePath(self, configfile):
984 """
985 Find the location on disk of configfile and if it exists and was parsed by BitBake
986 emit the ConfigFilePathFound event with the path to the file.
987 """
988 path = bb.cookerdata.findConfigFile(configfile, self.data)
989 if not path:
990 return
991
992 # Generate a list of parsed configuration files by searching the files
993 # listed in the __depends and __base_depends variables with a .conf suffix.
994 conffiles = []
995 dep_files = self.data.getVar('__base_depends', False) or []
996 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
997
998 for f in dep_files:
999 if f[0].endswith(".conf"):
1000 conffiles.append(f[0])
1001
1002 _, conf, conffile = path.rpartition("conf/")
1003 match = os.path.join(conf, conffile)
1004 # Try and find matches for conf/conffilename.conf as we don't always
1005 # have the full path to the file.
1006 for cfg in conffiles:
1007 if cfg.endswith(match):
1008 bb.event.fire(bb.event.ConfigFilePathFound(path),
1009 self.data)
1010 break
1011
1012 def findFilesMatchingInDir(self, filepattern, directory):
1013 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001014 Searches for files containing the substring 'filepattern' which are children of
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001015 'directory' in each BBPATH. i.e. to find all rootfs package classes available
1016 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1017 or to find all machine configuration files one could call:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001018 findFilesMatchingInDir(self, '.conf', 'conf/machine')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001019 """
1020
1021 matches = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001022 bbpaths = self.data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001023 for path in bbpaths:
1024 dirpath = os.path.join(path, directory)
1025 if os.path.exists(dirpath):
1026 for root, dirs, files in os.walk(dirpath):
1027 for f in files:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001028 if filepattern in f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001029 matches.append(f)
1030
1031 if matches:
1032 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1033
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001034 def findProviders(self, mc=''):
1035 return bb.providers.findProviders(self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1036
1037 def findBestProvider(self, pn, mc=''):
1038 if pn in self.recipecaches[mc].providers:
1039 filenames = self.recipecaches[mc].providers[pn]
1040 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.data, self.recipecaches[mc])
1041 filename = eligible[0]
1042 return None, None, None, filename
1043 elif pn in self.recipecaches[mc].pkg_pn:
1044 return bb.providers.findBestProvider(pn, self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1045 else:
1046 return None, None, None, None
1047
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001048 def findConfigFiles(self, varname):
1049 """
1050 Find config files which are appropriate values for varname.
1051 i.e. MACHINE, DISTRO
1052 """
1053 possible = []
1054 var = varname.lower()
1055
1056 data = self.data
1057 # iterate configs
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001058 bbpaths = data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001059 for path in bbpaths:
1060 confpath = os.path.join(path, "conf", var)
1061 if os.path.exists(confpath):
1062 for root, dirs, files in os.walk(confpath):
1063 # get all child files, these are appropriate values
1064 for f in files:
1065 val, sep, end = f.rpartition('.')
1066 if end == 'conf':
1067 possible.append(val)
1068
1069 if possible:
1070 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1071
1072 def findInheritsClass(self, klass):
1073 """
1074 Find all recipes which inherit the specified class
1075 """
1076 pkg_list = []
1077
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001078 for pfn in self.recipecaches[''].pkg_fn:
1079 inherits = self.recipecaches[''].inherits.get(pfn, None)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001080 if inherits and klass in inherits:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001081 pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001082
1083 return pkg_list
1084
1085 def generateTargetsTree(self, klass=None, pkgs=None):
1086 """
1087 Generate a dependency tree of buildable targets
1088 Generate an event with the result
1089 """
1090 # if the caller hasn't specified a pkgs list default to universe
1091 if not pkgs:
1092 pkgs = ['universe']
1093 # if inherited_class passed ensure all recipes which inherit the
1094 # specified class are included in pkgs
1095 if klass:
1096 extra_pkgs = self.findInheritsClass(klass)
1097 pkgs = pkgs + extra_pkgs
1098
1099 # generate a dependency tree for all our packages
1100 tree = self.generatePkgDepTreeData(pkgs, 'build')
1101 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1102
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001103 def interactiveMode( self ):
1104 """Drop off into a shell"""
1105 try:
1106 from bb import shell
1107 except ImportError:
1108 parselog.exception("Interactive mode not available")
1109 sys.exit(1)
1110 else:
1111 shell.start( self )
1112
1113
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001114 def handleCollections(self, collections):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001115 """Handle collections"""
1116 errors = False
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001117 self.bbfile_config_priorities = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001118 if collections:
1119 collection_priorities = {}
1120 collection_depends = {}
1121 collection_list = collections.split()
1122 min_prio = 0
1123 for c in collection_list:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001124 bb.debug(1,'Processing %s in collection list' % (c))
1125
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001126 # Get collection priority if defined explicitly
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001127 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001128 if priority:
1129 try:
1130 prio = int(priority)
1131 except ValueError:
1132 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1133 errors = True
1134 if min_prio == 0 or prio < min_prio:
1135 min_prio = prio
1136 collection_priorities[c] = prio
1137 else:
1138 collection_priorities[c] = None
1139
1140 # Check dependencies and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001141 deps = self.data.getVar("LAYERDEPENDS_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001142 if deps:
1143 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001144 depDict = bb.utils.explode_dep_versions2(deps)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001145 except bb.utils.VersionStringException as vse:
1146 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001147 for dep, oplist in list(depDict.items()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001148 if dep in collection_list:
1149 for opstr in oplist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001150 layerver = self.data.getVar("LAYERVERSION_%s" % dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001151 (op, depver) = opstr.split()
1152 if layerver:
1153 try:
1154 res = bb.utils.vercmp_string_op(layerver, depver, op)
1155 except bb.utils.VersionStringException as vse:
1156 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1157 if not res:
1158 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1159 errors = True
1160 else:
1161 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1162 errors = True
1163 else:
1164 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1165 errors = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001166 collection_depends[c] = list(depDict.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001167 else:
1168 collection_depends[c] = []
1169
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001170 # Check recommends and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001171 recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001172 if recs:
1173 try:
1174 recDict = bb.utils.explode_dep_versions2(recs)
1175 except bb.utils.VersionStringException as vse:
1176 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1177 for rec, oplist in list(recDict.items()):
1178 if rec in collection_list:
1179 if oplist:
1180 opstr = oplist[0]
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001181 layerver = self.data.getVar("LAYERVERSION_%s" % rec)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001182 if layerver:
1183 (op, recver) = opstr.split()
1184 try:
1185 res = bb.utils.vercmp_string_op(layerver, recver, op)
1186 except bb.utils.VersionStringException as vse:
1187 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1188 if not res:
1189 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1190 continue
1191 else:
1192 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1193 continue
1194 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1195 collection_depends[c].append(rec)
1196 else:
1197 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1198
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001199 # Recursively work out collection priorities based on dependencies
1200 def calc_layer_priority(collection):
1201 if not collection_priorities[collection]:
1202 max_depprio = min_prio
1203 for dep in collection_depends[collection]:
1204 calc_layer_priority(dep)
1205 depprio = collection_priorities[dep]
1206 if depprio > max_depprio:
1207 max_depprio = depprio
1208 max_depprio += 1
1209 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1210 collection_priorities[collection] = max_depprio
1211
1212 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1213 for c in collection_list:
1214 calc_layer_priority(c)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001215 regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001216 if regex == None:
1217 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1218 errors = True
1219 continue
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001220 elif regex == "":
1221 parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
Brad Bishop19323692019-04-05 15:28:33 -04001222 cre = re.compile('^NULL$')
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001223 errors = False
1224 else:
1225 try:
1226 cre = re.compile(regex)
1227 except re.error:
1228 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1229 errors = True
1230 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001231 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001232 if errors:
1233 # We've already printed the actual error(s)
1234 raise CollectionError("Errors during parsing layer configuration")
1235
1236 def buildSetVars(self):
1237 """
1238 Setup any variables needed before starting a build
1239 """
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001240 t = time.gmtime()
1241 for mc in self.databuilder.mcdata:
1242 ds = self.databuilder.mcdata[mc]
1243 if not ds.getVar("BUILDNAME", False):
1244 ds.setVar("BUILDNAME", "${DATE}${TIME}")
1245 ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1246 ds.setVar("DATE", time.strftime('%Y%m%d', t))
1247 ds.setVar("TIME", time.strftime('%H%M%S', t))
1248
1249 def reset_mtime_caches(self):
1250 """
1251 Reset mtime caches - this is particularly important when memory resident as something
1252 which is cached is not unlikely to have changed since the last invocation (e.g. a
1253 file associated with a recipe might have been modified by the user).
1254 """
1255 build.reset_cache()
1256 bb.fetch._checksum_cache.mtime_cache.clear()
1257 siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1258 if siggen_cache:
1259 bb.parse.siggen.checksum_cache.mtime_cache.clear()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001260
1261 def matchFiles(self, bf):
1262 """
1263 Find the .bb files which match the expression in 'buildfile'.
1264 """
1265 if bf.startswith("/") or bf.startswith("../"):
1266 bf = os.path.abspath(bf)
1267
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001268 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001269 filelist, masked, searchdirs = self.collection.collect_bbfiles(self.data, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001270 try:
1271 os.stat(bf)
1272 bf = os.path.abspath(bf)
1273 return [bf]
1274 except OSError:
1275 regexp = re.compile(bf)
1276 matches = []
1277 for f in filelist:
1278 if regexp.search(f) and os.path.isfile(f):
1279 matches.append(f)
1280 return matches
1281
1282 def matchFile(self, buildfile):
1283 """
1284 Find the .bb file which matches the expression in 'buildfile'.
1285 Raise an error if multiple files
1286 """
1287 matches = self.matchFiles(buildfile)
1288 if len(matches) != 1:
1289 if matches:
1290 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1291 if matches:
1292 for f in matches:
1293 msg += "\n %s" % f
1294 parselog.error(msg)
1295 else:
1296 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1297 raise NoSpecificMatch
1298 return matches[0]
1299
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001300 def buildFile(self, buildfile, task):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001301 """
1302 Build the file matching regexp buildfile
1303 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001304 bb.event.fire(bb.event.BuildInit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001305
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001306 # Too many people use -b because they think it's how you normally
1307 # specify a target to be built, so show a warning
1308 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1309
1310 self.buildFileInternal(buildfile, task)
1311
1312 def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1313 """
1314 Build the file matching regexp buildfile
1315 """
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001316
1317 # Parse the configuration here. We need to do it explicitly here since
1318 # buildFile() doesn't use the cache
1319 self.parseConfiguration()
1320
1321 # If we are told to do the None task then query the default task
1322 if (task == None):
1323 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001324 if not task.startswith("do_"):
1325 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001326
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001327 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001328 fn = self.matchFile(fn)
1329
1330 self.buildSetVars()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001331 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001332
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001333 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
1334
1335 infos = bb_cache.parse(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001336 infos = dict(infos)
1337
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001338 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001339 try:
1340 info_array = infos[fn]
1341 except KeyError:
1342 bb.fatal("%s does not exist" % fn)
1343
1344 if info_array[0].skipped:
1345 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1346
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001347 self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001348
1349 # Tweak some variables
1350 item = info_array[0].pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001351 self.recipecaches[mc].ignored_dependencies = set()
1352 self.recipecaches[mc].bbfile_priority[fn] = 1
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001353 self.configuration.limited_deps = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001354
1355 # Remove external dependencies
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001356 self.recipecaches[mc].task_deps[fn]['depends'] = {}
1357 self.recipecaches[mc].deps[fn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001358 self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1359 self.recipecaches[mc].runrecs[fn] = defaultdict(list)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001360
1361 # Invalidate task for target if force mode active
1362 if self.configuration.force:
1363 logger.verbose("Invalidate task %s, %s", task, fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001364 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001365
1366 # Setup taskdata structure
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001367 taskdata = {}
1368 taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001369 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001370
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001371 if quietlog:
1372 rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1373 bb.runqueue.logger.setLevel(logging.WARNING)
1374
1375 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1376 if fireevents:
1377 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001378
1379 # Execute the runqueue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001380 runlist = [[mc, item, task, fn]]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001381
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001382 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001383
1384 def buildFileIdle(server, rq, abort):
1385
1386 msg = None
1387 interrupted = 0
1388 if abort or self.state == state.forceshutdown:
1389 rq.finish_runqueue(True)
1390 msg = "Forced shutdown"
1391 interrupted = 2
1392 elif self.state == state.shutdown:
1393 rq.finish_runqueue(False)
1394 msg = "Stopped build"
1395 interrupted = 1
1396 failures = 0
1397 try:
1398 retval = rq.execute_runqueue()
1399 except runqueue.TaskFailure as exc:
1400 failures += len(exc.args)
1401 retval = False
1402 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001403 self.command.finishAsyncCommand(str(exc))
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001404 if quietlog:
1405 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001406 return False
1407
1408 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001409 if fireevents:
1410 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001411 self.command.finishAsyncCommand(msg)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001412 # We trashed self.recipecaches above
1413 self.parsecache_valid = False
1414 self.configuration.limited_deps = False
1415 bb.parse.siggen.reset(self.data)
1416 if quietlog:
1417 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001418 return False
1419 if retval is True:
1420 return True
1421 return retval
1422
1423 self.configuration.server_register_idlecallback(buildFileIdle, rq)
1424
1425 def buildTargets(self, targets, task):
1426 """
1427 Attempt to build the targets specified
1428 """
1429
1430 def buildTargetsIdle(server, rq, abort):
1431 msg = None
1432 interrupted = 0
1433 if abort or self.state == state.forceshutdown:
1434 rq.finish_runqueue(True)
1435 msg = "Forced shutdown"
1436 interrupted = 2
1437 elif self.state == state.shutdown:
1438 rq.finish_runqueue(False)
1439 msg = "Stopped build"
1440 interrupted = 1
1441 failures = 0
1442 try:
1443 retval = rq.execute_runqueue()
1444 except runqueue.TaskFailure as exc:
1445 failures += len(exc.args)
1446 retval = False
1447 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001448 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001449 return False
1450
1451 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001452 try:
1453 for mc in self.multiconfigs:
1454 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1455 finally:
1456 self.command.finishAsyncCommand(msg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001457 return False
1458 if retval is True:
1459 return True
1460 return retval
1461
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001462 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001463 self.buildSetVars()
1464
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001465 # If we are told to do the None task then query the default task
1466 if (task == None):
1467 task = self.configuration.cmd
1468
1469 if not task.startswith("do_"):
1470 task = "do_%s" % task
1471
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001472 packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1473
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001474 bb.event.fire(bb.event.BuildInit(packages), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001475
1476 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001477
1478 buildname = self.data.getVar("BUILDNAME", False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001479
1480 # make targets to always look as <target>:do_<task>
1481 ntargets = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001482 for target in runlist:
1483 if target[0]:
Brad Bishop15ae2502019-06-18 21:44:24 -04001484 ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2]))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001485 ntargets.append("%s:%s" % (target[1], target[2]))
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001486
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001487 for mc in self.multiconfigs:
1488 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001489
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001490 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001491 if 'universe' in targets:
1492 rq.rqdata.warn_multi_bb = True
1493
1494 self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
1495
1496
1497 def getAllKeysWithFlags(self, flaglist):
1498 dump = {}
1499 for k in self.data.keys():
1500 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001501 expand = True
1502 flags = self.data.getVarFlags(k)
1503 if flags and "func" in flags and "python" in flags:
1504 expand = False
1505 v = self.data.getVar(k, expand)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001506 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1507 dump[k] = {
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001508 'v' : str(v) ,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001509 'history' : self.data.varhistory.variable(k),
1510 }
1511 for d in flaglist:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001512 if flags and d in flags:
1513 dump[k][d] = flags[d]
1514 else:
1515 dump[k][d] = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001516 except Exception as e:
1517 print(e)
1518 return dump
1519
1520
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001521 def updateCacheSync(self):
1522 if self.state == state.running:
1523 return
1524
1525 # reload files for which we got notifications
1526 for p in self.inotify_modified_files:
1527 bb.parse.update_cache(p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001528 if p in bb.parse.BBHandler.cached_statements:
1529 del bb.parse.BBHandler.cached_statements[p]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001530 self.inotify_modified_files = []
1531
1532 if not self.baseconfig_valid:
1533 logger.debug(1, "Reloading base configuration data")
1534 self.initConfigurationData()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001535 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001536
1537 # This is called for all async commands when self.state != running
1538 def updateCache(self):
1539 if self.state == state.running:
1540 return
1541
1542 if self.state in (state.shutdown, state.forceshutdown, state.error):
1543 if hasattr(self.parser, 'shutdown'):
1544 self.parser.shutdown(clean=False, force = True)
1545 raise bb.BBHandledException()
1546
1547 if self.state != state.parsing:
1548 self.updateCacheSync()
1549
1550 if self.state != state.parsing and not self.parsecache_valid:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001551 bb.parse.siggen.reset(self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001552 self.parseConfiguration ()
1553 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001554 for mc in self.multiconfigs:
1555 bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001556
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001557 for mc in self.multiconfigs:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001558 ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001559 self.recipecaches[mc].ignored_dependencies = set(ignore.split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001560
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001561 for dep in self.configuration.extra_assume_provided:
1562 self.recipecaches[mc].ignored_dependencies.add(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001563
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001564 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001565 (filelist, masked, searchdirs) = self.collection.collect_bbfiles(self.data, self.data)
1566
1567 # Add inotify watches for directories searched for bb/bbappend files
1568 for dirent in searchdirs:
1569 self.add_filewatch([[dirent]], dirs=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001570
1571 self.parser = CookerParser(self, filelist, masked)
1572 self.parsecache_valid = True
1573
1574 self.state = state.parsing
1575
1576 if not self.parser.parse_next():
1577 collectlog.debug(1, "parsing complete")
1578 if self.parser.error:
1579 raise bb.BBHandledException()
1580 self.show_appends_with_no_recipes()
1581 self.handlePrefProviders()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001582 for mc in self.multiconfigs:
1583 self.recipecaches[mc].bbfile_priority = self.collection.collection_priorities(self.recipecaches[mc].pkg_fn, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001584 self.state = state.running
1585
1586 # Send an event listing all stamps reachable after parsing
1587 # which the metadata may use to clean up stale data
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001588 for mc in self.multiconfigs:
1589 event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1590 bb.event.fire(event, self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001591 return None
1592
1593 return True
1594
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001595 def checkPackages(self, pkgs_to_build, task=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001596
1597 # Return a copy, don't modify the original
1598 pkgs_to_build = pkgs_to_build[:]
1599
1600 if len(pkgs_to_build) == 0:
1601 raise NothingToBuild
1602
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001603 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001604 for pkg in pkgs_to_build:
1605 if pkg in ignore:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001606 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
Brad Bishop15ae2502019-06-18 21:44:24 -04001607 if pkg.startswith("multiconfig:"):
1608 pkgs_to_build.remove(pkg)
1609 pkgs_to_build.append(pkg.replace("multiconfig:", "mc:"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001610
1611 if 'world' in pkgs_to_build:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001612 pkgs_to_build.remove('world')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001613 for mc in self.multiconfigs:
1614 bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1615 for t in self.recipecaches[mc].world_target:
1616 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001617 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001618 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001619
1620 if 'universe' in pkgs_to_build:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001621 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001622 parselog.debug(1, "collating packages for \"universe\"")
1623 pkgs_to_build.remove('universe')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001624 for mc in self.multiconfigs:
1625 for t in self.recipecaches[mc].universe_target:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001626 if task:
1627 foundtask = False
1628 for provider_fn in self.recipecaches[mc].providers[t]:
1629 if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1630 foundtask = True
1631 break
1632 if not foundtask:
1633 bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1634 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001635 if mc:
Brad Bishop15ae2502019-06-18 21:44:24 -04001636 t = "mc:" + mc + ":" + t
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001637 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001638
1639 return pkgs_to_build
1640
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001641 def pre_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001642 # We now are in our own process so we can call this here.
1643 # PRServ exits if its parent process exits
1644 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001645 return
1646
1647 def post_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001648 prserv.serv.auto_shutdown()
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001649 bb.event.fire(CookerExit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001650
1651
1652 def shutdown(self, force = False):
1653 if force:
1654 self.state = state.forceshutdown
1655 else:
1656 self.state = state.shutdown
1657
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001658 if self.parser:
1659 self.parser.shutdown(clean=not force, force=force)
1660
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001661 def finishcommand(self):
1662 self.state = state.initial
1663
1664 def reset(self):
1665 self.initConfigurationData()
1666
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001667 def clientComplete(self):
1668 """Called when the client is done using the server"""
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001669 self.finishcommand()
1670 self.extraconfigdata = {}
1671 self.command.reset()
1672 self.databuilder.reset()
1673 self.data = self.databuilder.data
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001674
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001675
1676class CookerExit(bb.event.Event):
1677 """
1678 Notify clients of the Cooker shutdown
1679 """
1680
1681 def __init__(self):
1682 bb.event.Event.__init__(self)
1683
1684
1685class CookerCollectFiles(object):
1686 def __init__(self, priorities):
1687 self.bbappends = []
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001688 # Priorities is a list of tupples, with the second element as the pattern.
1689 # We need to sort the list with the longest pattern first, and so on to
1690 # the shortest. This allows nested layers to be properly evaluated.
1691 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001692
1693 def calc_bbfile_priority( self, filename, matched = None ):
1694 for _, _, regex, pri in self.bbfile_config_priorities:
1695 if regex.match(filename):
1696 if matched != None:
1697 if not regex in matched:
1698 matched.add(regex)
1699 return pri
1700 return 0
1701
1702 def get_bbfiles(self):
1703 """Get list of default .bb files by reading out the current directory"""
1704 path = os.getcwd()
1705 contents = os.listdir(path)
1706 bbfiles = []
1707 for f in contents:
1708 if f.endswith(".bb"):
1709 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1710 return bbfiles
1711
1712 def find_bbfiles(self, path):
1713 """Find all the .bb and .bbappend files in a directory"""
1714 found = []
1715 for dir, dirs, files in os.walk(path):
1716 for ignored in ('SCCS', 'CVS', '.svn'):
1717 if ignored in dirs:
1718 dirs.remove(ignored)
1719 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1720
1721 return found
1722
1723 def collect_bbfiles(self, config, eventdata):
1724 """Collect all available .bb build files"""
1725 masked = 0
1726
1727 collectlog.debug(1, "collecting .bb files")
1728
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001729 files = (config.getVar( "BBFILES") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001730 config.setVar("BBFILES", " ".join(files))
1731
1732 # Sort files by priority
1733 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
1734
1735 if not len(files):
1736 files = self.get_bbfiles()
1737
1738 if not len(files):
1739 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1740 bb.event.fire(CookerExit(), eventdata)
1741
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001742 # We need to track where we look so that we can add inotify watches. There
1743 # is no nice way to do this, this is horrid. We intercept the os.listdir()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001744 # (or os.scandir() for python 3.6+) calls while we run glob().
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001745 origlistdir = os.listdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001746 if hasattr(os, 'scandir'):
1747 origscandir = os.scandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001748 searchdirs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001749
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001750 def ourlistdir(d):
1751 searchdirs.append(d)
1752 return origlistdir(d)
1753
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001754 def ourscandir(d):
1755 searchdirs.append(d)
1756 return origscandir(d)
1757
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001758 os.listdir = ourlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001759 if hasattr(os, 'scandir'):
1760 os.scandir = ourscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001761 try:
1762 # Can't use set here as order is important
1763 newfiles = []
1764 for f in files:
1765 if os.path.isdir(f):
1766 dirfiles = self.find_bbfiles(f)
1767 for g in dirfiles:
1768 if g not in newfiles:
1769 newfiles.append(g)
1770 else:
1771 globbed = glob.glob(f)
1772 if not globbed and os.path.exists(f):
1773 globbed = [f]
1774 # glob gives files in order on disk. Sort to be deterministic.
1775 for g in sorted(globbed):
1776 if g not in newfiles:
1777 newfiles.append(g)
1778 finally:
1779 os.listdir = origlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001780 if hasattr(os, 'scandir'):
1781 os.scandir = origscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001782
1783 bbmask = config.getVar('BBMASK')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001784
1785 if bbmask:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001786 # First validate the individual regular expressions and ignore any
1787 # that do not compile
1788 bbmasks = []
1789 for mask in bbmask.split():
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001790 # When constructing an older style single regex, it's possible for BBMASK
1791 # to end up beginning with '|', which matches and masks _everything_.
1792 if mask.startswith("|"):
1793 collectlog.warn("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
1794 mask = mask[1:]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001795 try:
1796 re.compile(mask)
1797 bbmasks.append(mask)
1798 except sre_constants.error:
1799 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1800
1801 # Then validate the combined regular expressions. This should never
1802 # fail, but better safe than sorry...
1803 bbmask = "|".join(bbmasks)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001804 try:
1805 bbmask_compiled = re.compile(bbmask)
1806 except sre_constants.error:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001807 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1808 bbmask = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001809
1810 bbfiles = []
1811 bbappend = []
1812 for f in newfiles:
1813 if bbmask and bbmask_compiled.search(f):
1814 collectlog.debug(1, "skipping masked file %s", f)
1815 masked += 1
1816 continue
1817 if f.endswith('.bb'):
1818 bbfiles.append(f)
1819 elif f.endswith('.bbappend'):
1820 bbappend.append(f)
1821 else:
1822 collectlog.debug(1, "skipping %s: unknown file extension", f)
1823
1824 # Build a list of .bbappend files for each .bb file
1825 for f in bbappend:
1826 base = os.path.basename(f).replace('.bbappend', '.bb')
1827 self.bbappends.append((base, f))
1828
1829 # Find overlayed recipes
1830 # bbfiles will be in priority order which makes this easy
1831 bbfile_seen = dict()
1832 self.overlayed = defaultdict(list)
1833 for f in reversed(bbfiles):
1834 base = os.path.basename(f)
1835 if base not in bbfile_seen:
1836 bbfile_seen[base] = f
1837 else:
1838 topfile = bbfile_seen[base]
1839 self.overlayed[topfile].append(f)
1840
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001841 return (bbfiles, masked, searchdirs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001842
1843 def get_file_appends(self, fn):
1844 """
1845 Returns a list of .bbappend files to apply to fn
1846 """
1847 filelist = []
1848 f = os.path.basename(fn)
1849 for b in self.bbappends:
1850 (bbappend, filename) = b
1851 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1852 filelist.append(filename)
1853 return filelist
1854
1855 def collection_priorities(self, pkgfns, d):
1856
1857 priorities = {}
1858
1859 # Calculate priorities for each file
1860 matched = set()
1861 for p in pkgfns:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001862 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001863 priorities[p] = self.calc_bbfile_priority(realfn, matched)
1864
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001865 unmatched = set()
1866 for _, _, regex, pri in self.bbfile_config_priorities:
1867 if not regex in matched:
1868 unmatched.add(regex)
1869
Brad Bishop316dfdd2018-06-25 12:45:53 -04001870 # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
1871 def find_bbappend_match(regex):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001872 for b in self.bbappends:
1873 (bbfile, append) = b
1874 if regex.match(append):
Brad Bishop316dfdd2018-06-25 12:45:53 -04001875 # If the bbappend is matched by already "matched set", return False
1876 for matched_regex in matched:
1877 if matched_regex.match(append):
1878 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001879 return True
1880 return False
1881
1882 for unmatch in unmatched.copy():
Brad Bishop316dfdd2018-06-25 12:45:53 -04001883 if find_bbappend_match(unmatch):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001884 unmatched.remove(unmatch)
1885
1886 for collection, pattern, regex, _ in self.bbfile_config_priorities:
1887 if regex in unmatched:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001888 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001889 collectlog.warning("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001890
1891 return priorities
1892
1893class ParsingFailure(Exception):
1894 def __init__(self, realexception, recipe):
1895 self.realexception = realexception
1896 self.recipe = recipe
1897 Exception.__init__(self, realexception, recipe)
1898
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001899class Parser(multiprocessing.Process):
1900 def __init__(self, jobs, results, quit, init, profile):
1901 self.jobs = jobs
1902 self.results = results
1903 self.quit = quit
1904 self.init = init
1905 multiprocessing.Process.__init__(self)
1906 self.context = bb.utils.get_context().copy()
1907 self.handlers = bb.event.get_class_handlers().copy()
1908 self.profile = profile
1909
1910 def run(self):
1911
1912 if not self.profile:
1913 self.realrun()
1914 return
1915
1916 try:
1917 import cProfile as profile
1918 except:
1919 import profile
1920 prof = profile.Profile()
1921 try:
1922 profile.Profile.runcall(prof, self.realrun)
1923 finally:
1924 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1925 prof.dump_stats(logfile)
1926
1927 def realrun(self):
1928 if self.init:
1929 self.init()
1930
1931 pending = []
1932 while True:
1933 try:
1934 self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001935 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001936 pass
1937 else:
1938 self.results.cancel_join_thread()
1939 break
1940
1941 if pending:
1942 result = pending.pop()
1943 else:
1944 try:
Brad Bishop19323692019-04-05 15:28:33 -04001945 job = self.jobs.pop()
1946 except IndexError:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001947 break
1948 result = self.parse(*job)
1949
1950 try:
1951 self.results.put(result, timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001952 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001953 pending.append(result)
1954
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001955 def parse(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001956 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001957 # Record the filename we're parsing into any events generated
1958 def parse_filter(self, record):
1959 record.taskpid = bb.event.worker_pid
1960 record.fn = filename
1961 return True
1962
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001963 # Reset our environment and handlers to the original settings
1964 bb.utils.set_context(self.context.copy())
1965 bb.event.set_class_handlers(self.handlers.copy())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001966 bb.event.LogHandler.filter = parse_filter
1967
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001968 return True, self.bb_cache.parse(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001969 except Exception as exc:
1970 tb = sys.exc_info()[2]
1971 exc.recipe = filename
1972 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
1973 return True, exc
1974 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
1975 # and for example a worker thread doesn't just exit on its own in response to
1976 # a SystemExit event for example.
1977 except BaseException as exc:
1978 return True, ParsingFailure(exc, filename)
1979
1980class CookerParser(object):
1981 def __init__(self, cooker, filelist, masked):
1982 self.filelist = filelist
1983 self.cooker = cooker
1984 self.cfgdata = cooker.data
1985 self.cfghash = cooker.data_hash
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001986 self.cfgbuilder = cooker.databuilder
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001987
1988 # Accounting statistics
1989 self.parsed = 0
1990 self.cached = 0
1991 self.error = 0
1992 self.masked = masked
1993
1994 self.skipped = 0
1995 self.virtuals = 0
1996 self.total = len(filelist)
1997
1998 self.current = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001999 self.process_names = []
2000
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002001 self.bb_cache = bb.cache.Cache(self.cfgbuilder, self.cfghash, cooker.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002002 self.fromcache = []
2003 self.willparse = []
2004 for filename in self.filelist:
2005 appends = self.cooker.collection.get_file_appends(filename)
2006 if not self.bb_cache.cacheValid(filename, appends):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002007 self.willparse.append((filename, appends))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002008 else:
2009 self.fromcache.append((filename, appends))
2010 self.toparse = self.total - len(self.fromcache)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002011 self.progress_chunk = int(max(self.toparse / 100, 1))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002012
Brad Bishop6e60e8b2018-02-01 10:27:11 -05002013 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002014 multiprocessing.cpu_count()), len(self.willparse))
2015
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002016 self.start()
2017 self.haveshutdown = False
2018
2019 def start(self):
2020 self.results = self.load_cached()
2021 self.processes = []
2022 if self.toparse:
2023 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2024 def init():
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002025 Parser.bb_cache = self.bb_cache
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002026 bb.utils.set_process_name(multiprocessing.current_process().name)
2027 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2028 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002029
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002030 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002031 self.result_queue = multiprocessing.Queue()
Brad Bishop19323692019-04-05 15:28:33 -04002032
2033 def chunkify(lst,n):
2034 return [lst[i::n] for i in range(n)]
2035 self.jobs = chunkify(self.willparse, self.num_processes)
2036
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002037 for i in range(0, self.num_processes):
Brad Bishop19323692019-04-05 15:28:33 -04002038 parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002039 parser.start()
2040 self.process_names.append(parser.name)
2041 self.processes.append(parser)
2042
2043 self.results = itertools.chain(self.results, self.parse_generator())
2044
2045 def shutdown(self, clean=True, force=False):
2046 if not self.toparse:
2047 return
2048 if self.haveshutdown:
2049 return
2050 self.haveshutdown = True
2051
2052 if clean:
2053 event = bb.event.ParseCompleted(self.cached, self.parsed,
2054 self.skipped, self.masked,
2055 self.virtuals, self.error,
2056 self.total)
2057
2058 bb.event.fire(event, self.cfgdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002059 for process in self.processes:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002060 self.parser_quit.put(None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002061 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002062 self.parser_quit.cancel_join_thread()
2063 for process in self.processes:
2064 self.parser_quit.put(None)
2065
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002066 for process in self.processes:
2067 if force:
2068 process.join(.1)
2069 process.terminate()
2070 else:
2071 process.join()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002072
2073 sync = threading.Thread(target=self.bb_cache.sync)
2074 sync.start()
2075 multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002076 bb.codeparser.parser_cache_savemerge()
2077 bb.fetch.fetcher_parse_done()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002078 if self.cooker.configuration.profile:
2079 profiles = []
2080 for i in self.process_names:
2081 logfile = "profile-parse-%s.log" % i
2082 if os.path.exists(logfile):
2083 profiles.append(logfile)
2084
2085 pout = "profile-parse.log.processed"
2086 bb.utils.process_profilelog(profiles, pout = pout)
2087 print("Processed parsing statistics saved to %s" % (pout))
2088
2089 def load_cached(self):
2090 for filename, appends in self.fromcache:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002091 cached, infos = self.bb_cache.load(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002092 yield not cached, infos
2093
2094 def parse_generator(self):
2095 while True:
2096 if self.parsed >= self.toparse:
2097 break
2098
2099 try:
2100 result = self.result_queue.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002101 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002102 pass
2103 else:
2104 value = result[1]
2105 if isinstance(value, BaseException):
2106 raise value
2107 else:
2108 yield result
2109
2110 def parse_next(self):
2111 result = []
2112 parsed = None
2113 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002114 parsed, result = next(self.results)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002115 except StopIteration:
2116 self.shutdown()
2117 return False
2118 except bb.BBHandledException as exc:
2119 self.error += 1
2120 logger.error('Failed to parse recipe: %s' % exc.recipe)
2121 self.shutdown(clean=False)
2122 return False
2123 except ParsingFailure as exc:
2124 self.error += 1
2125 logger.error('Unable to parse %s: %s' %
2126 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2127 self.shutdown(clean=False)
2128 return False
2129 except bb.parse.ParseError as exc:
2130 self.error += 1
2131 logger.error(str(exc))
2132 self.shutdown(clean=False)
2133 return False
2134 except bb.data_smart.ExpansionError as exc:
2135 self.error += 1
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002136 bbdir = os.path.dirname(__file__) + os.sep
2137 etype, value, _ = sys.exc_info()
2138 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2139 logger.error('ExpansionError during parsing %s', value.recipe,
2140 exc_info=(etype, value, tb))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002141 self.shutdown(clean=False)
2142 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002143 except Exception as exc:
2144 self.error += 1
2145 etype, value, tb = sys.exc_info()
2146 if hasattr(value, "recipe"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002147 logger.error('Unable to parse %s' % value.recipe,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002148 exc_info=(etype, value, exc.traceback))
2149 else:
2150 # Most likely, an exception occurred during raising an exception
2151 import traceback
2152 logger.error('Exception during parse: %s' % traceback.format_exc())
2153 self.shutdown(clean=False)
2154 return False
2155
2156 self.current += 1
2157 self.virtuals += len(result)
2158 if parsed:
2159 self.parsed += 1
2160 if self.parsed % self.progress_chunk == 0:
2161 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2162 self.cfgdata)
2163 else:
2164 self.cached += 1
2165
2166 for virtualfn, info_array in result:
2167 if info_array[0].skipped:
2168 self.skipped += 1
2169 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002170 (fn, cls, mc) = bb.cache.virtualfn2realfn(virtualfn)
2171 self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002172 parsed=parsed, watcher = self.cooker.add_filewatch)
2173 return True
2174
2175 def reparse(self, filename):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002176 infos = self.bb_cache.parse(filename, self.cooker.collection.get_file_appends(filename))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002177 for vfn, info_array in infos:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002178 (fn, cls, mc) = bb.cache.virtualfn2realfn(vfn)
2179 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)