blob: 1fda40dd417221016a6e29ef7f26096f6cb3d016 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# Copyright (C) 2003, 2004 Chris Larson
6# Copyright (C) 2003, 2004 Phil Blundell
7# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
8# Copyright (C) 2005 Holger Hans Peter Freyther
9# Copyright (C) 2005 ROAD GmbH
10# Copyright (C) 2006 - 2007 Richard Purdie
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
Patrick Williamsc0f7c042017-02-23 20:41:17 -060025
Patrick Williamsc124f4f2015-09-15 14:41:29 -050026import sys, os, glob, os.path, re, time
27import atexit
28import itertools
29import logging
30import multiprocessing
31import sre_constants
32import threading
Patrick Williamsc0f7c042017-02-23 20:41:17 -060033from io import StringIO, UnsupportedOperation
Patrick Williamsc124f4f2015-09-15 14:41:29 -050034from contextlib import closing
35from functools import wraps
Patrick Williamsc0f7c042017-02-23 20:41:17 -060036from collections import defaultdict, namedtuple
Patrick Williamsc124f4f2015-09-15 14:41:29 -050037import bb, bb.exceptions, bb.command
38from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
Patrick Williamsc0f7c042017-02-23 20:41:17 -060039import queue
Patrick Williamsc124f4f2015-09-15 14:41:29 -050040import signal
41import subprocess
42import errno
43import prserv.serv
44import pyinotify
Patrick Williamsc0f7c042017-02-23 20:41:17 -060045import json
46import pickle
47import codecs
Patrick Williamsc124f4f2015-09-15 14:41:29 -050048
49logger = logging.getLogger("BitBake")
50collectlog = logging.getLogger("BitBake.Collection")
51buildlog = logging.getLogger("BitBake.Build")
52parselog = logging.getLogger("BitBake.Parsing")
53providerlog = logging.getLogger("BitBake.Provider")
54
55class NoSpecificMatch(bb.BBHandledException):
56 """
57 Exception raised when no or multiple file matches are found
58 """
59
60class NothingToBuild(Exception):
61 """
62 Exception raised when there is nothing to build
63 """
64
65class CollectionError(bb.BBHandledException):
66 """
67 Exception raised when layer configuration is incorrect
68 """
69
70class state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060071 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050072
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050073 @classmethod
74 def get_name(cls, code):
75 for name in dir(cls):
76 value = getattr(cls, name)
77 if type(value) == type(cls.initial) and value == code:
78 return name
79 raise ValueError("Invalid status code: %s" % code)
80
Patrick Williamsc124f4f2015-09-15 14:41:29 -050081
82class SkippedPackage:
83 def __init__(self, info = None, reason = None):
84 self.pn = None
85 self.skipreason = None
86 self.provides = None
87 self.rprovides = None
88
89 if info:
90 self.pn = info.pn
91 self.skipreason = info.skipreason
92 self.provides = info.provides
93 self.rprovides = info.rprovides
94 elif reason:
95 self.skipreason = reason
96
97
98class CookerFeatures(object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -060099 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500100
101 def __init__(self):
102 self._features=set()
103
104 def setFeature(self, f):
105 # validate we got a request for a feature we support
106 if f not in CookerFeatures._feature_list:
107 return
108 self._features.add(f)
109
110 def __contains__(self, f):
111 return f in self._features
112
113 def __iter__(self):
114 return self._features.__iter__()
115
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600116 def __next__(self):
117 return next(self._features)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500118
119
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600120class EventWriter:
121 def __init__(self, cooker, eventfile):
122 self.file_inited = None
123 self.cooker = cooker
124 self.eventfile = eventfile
125 self.event_queue = []
126
127 def write_event(self, event):
128 with open(self.eventfile, "a") as f:
129 try:
130 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
131 f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
132 "vars": str_event}))
133 except Exception as err:
134 import traceback
135 print(err, traceback.format_exc())
136
137 def send(self, event):
138 if self.file_inited:
139 # we have the file, just write the event
140 self.write_event(event)
141 else:
142 # init on bb.event.BuildStarted
143 name = "%s.%s" % (event.__module__, event.__class__.__name__)
144 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
145 with open(self.eventfile, "w") as f:
146 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
147
148 self.file_inited = True
149
150 # write pending events
151 for evt in self.event_queue:
152 self.write_event(evt)
153
154 # also write the current event
155 self.write_event(event)
156 else:
157 # queue all events until the file is inited
158 self.event_queue.append(event)
159
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500160#============================================================================#
161# BBCooker
162#============================================================================#
163class BBCooker:
164 """
165 Manages one bitbake build run
166 """
167
168 def __init__(self, configuration, featureSet=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600169 self.recipecaches = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500170 self.skiplist = {}
171 self.featureset = CookerFeatures()
172 if featureSet:
173 for f in featureSet:
174 self.featureset.setFeature(f)
175
176 self.configuration = configuration
177
178 self.configwatcher = pyinotify.WatchManager()
179 self.configwatcher.bbseen = []
180 self.configwatcher.bbwatchedfiles = []
181 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
182 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
183 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500184 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500185 self.watcher = pyinotify.WatchManager()
186 self.watcher.bbseen = []
187 self.watcher.bbwatchedfiles = []
188 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
189
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500190 # If being called by something like tinfoil, we need to clean cached data
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500191 # which may now be invalid
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500192 bb.parse.clear_cache()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500193 bb.parse.BBHandler.cached_statements = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500194
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500195 self.ui_cmdline = None
196
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500197 self.initConfigurationData()
198
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600199 # we log all events to a file if so directed
200 if self.configuration.writeeventlog:
201 # register the log file writer as UI Handler
202 writer = EventWriter(self, self.configuration.writeeventlog)
203 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
204 bb.event.register_UIHhandler(EventLogWriteHandler(writer))
205
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500206 self.inotify_modified_files = []
207
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500208 def _process_inotify_updates(server, cooker, abort):
209 cooker.process_inotify_updates()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500210 return 1.0
211
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500212 self.configuration.server_register_idlecallback(_process_inotify_updates, self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500213
214 # TOSTOP must not be set or our children will hang when they output
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600215 try:
216 fd = sys.stdout.fileno()
217 if os.isatty(fd):
218 import termios
219 tcattr = termios.tcgetattr(fd)
220 if tcattr[3] & termios.TOSTOP:
221 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
222 tcattr[3] = tcattr[3] & ~termios.TOSTOP
223 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
224 except UnsupportedOperation:
225 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500226
227 self.command = bb.command.Command(self)
228 self.state = state.initial
229
230 self.parser = None
231
232 signal.signal(signal.SIGTERM, self.sigterm_exception)
233 # Let SIGHUP exit as SIGTERM
234 signal.signal(signal.SIGHUP, self.sigterm_exception)
235
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500236 def process_inotify_updates(self):
237 for n in [self.confignotifier, self.notifier]:
238 if n.check_events(timeout=0):
239 # read notified events and enqeue them
240 n.read_events()
241 n.process_events()
242
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500243 def config_notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500244 if event.maskname == "IN_Q_OVERFLOW":
245 bb.warn("inotify event queue overflowed, invalidating caches.")
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500246 self.parsecache_valid = False
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500247 self.baseconfig_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500248 bb.parse.clear_cache()
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500249 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500250 if not event.pathname in self.configwatcher.bbwatchedfiles:
251 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500252 if not event.pathname in self.inotify_modified_files:
253 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500254 self.baseconfig_valid = False
255
256 def notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500257 if event.maskname == "IN_Q_OVERFLOW":
258 bb.warn("inotify event queue overflowed, invalidating caches.")
259 self.parsecache_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500260 bb.parse.clear_cache()
261 return
262 if event.pathname.endswith("bitbake-cookerdaemon.log") \
263 or event.pathname.endswith("bitbake.lock"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500264 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500265 if not event.pathname in self.inotify_modified_files:
266 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500267 self.parsecache_valid = False
268
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500269 def add_filewatch(self, deps, watcher=None, dirs=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500270 if not watcher:
271 watcher = self.watcher
272 for i in deps:
273 watcher.bbwatchedfiles.append(i[0])
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500274 if dirs:
275 f = i[0]
276 else:
277 f = os.path.dirname(i[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500278 if f in watcher.bbseen:
279 continue
280 watcher.bbseen.append(f)
281 watchtarget = None
282 while True:
283 # We try and add watches for files that don't exist but if they did, would influence
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500284 # the parser. The parent directory of these files may not exist, in which case we need
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500285 # to watch any parent that does exist for changes.
286 try:
287 watcher.add_watch(f, self.watchmask, quiet=False)
288 if watchtarget:
289 watcher.bbwatchedfiles.append(watchtarget)
290 break
291 except pyinotify.WatchManagerError as e:
292 if 'ENOENT' in str(e):
293 watchtarget = f
294 f = os.path.dirname(f)
295 if f in watcher.bbseen:
296 break
297 watcher.bbseen.append(f)
298 continue
299 if 'ENOSPC' in str(e):
300 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
301 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
302 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
303 providerlog.error("Root privilege is required to modify max_user_watches.")
304 raise
305
306 def sigterm_exception(self, signum, stackframe):
307 if signum == signal.SIGTERM:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500308 bb.warn("Cooker received SIGTERM, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500309 elif signum == signal.SIGHUP:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500310 bb.warn("Cooker received SIGHUP, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500311 self.state = state.forceshutdown
312
313 def setFeatures(self, features):
314 # we only accept a new feature set if we're in state initial, so we can reset without problems
315 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
316 raise Exception("Illegal state for feature set change")
317 original_featureset = list(self.featureset)
318 for feature in features:
319 self.featureset.setFeature(feature)
320 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
321 if (original_featureset != list(self.featureset)) and self.state != state.error:
322 self.reset()
323
324 def initConfigurationData(self):
325
326 self.state = state.initial
327 self.caches_array = []
328
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500329 # Need to preserve BB_CONSOLELOG over resets
330 consolelog = None
331 if hasattr(self, "data"):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500332 consolelog = self.data.getVar("BB_CONSOLELOG")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500333
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500334 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
335 self.enableDataTracking()
336
337 all_extra_cache_names = []
338 # We hardcode all known cache types in a single place, here.
339 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
340 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
341
342 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
343
344 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
345 # This is the entry point, no further check needed!
346 for var in caches_name_array:
347 try:
348 module_name, cache_name = var.split(':')
349 module = __import__(module_name, fromlist=(cache_name,))
350 self.caches_array.append(getattr(module, cache_name))
351 except ImportError as exc:
352 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
353 sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
354
355 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
356 self.databuilder.parseBaseConfiguration()
357 self.data = self.databuilder.data
358 self.data_hash = self.databuilder.data_hash
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500359 self.extraconfigdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500360
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500361 if consolelog:
362 self.data.setVar("BB_CONSOLELOG", consolelog)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500363
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500364 self.data.setVar('BB_CMDLINE', self.ui_cmdline)
365
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500366 #
367 # Copy of the data store which has been expanded.
368 # Used for firing events and accessing variables where expansion needs to be accounted for
369 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500370 bb.parse.init_parser(self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500371
372 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
373 self.disableDataTracking()
374
375 self.data.renameVar("__depends", "__base_depends")
376 self.add_filewatch(self.data.getVar("__base_depends", False), self.configwatcher)
377
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500378 self.baseconfig_valid = True
379 self.parsecache_valid = False
380
381 def handlePRServ(self):
382 # Setup a PR Server based on the new configuration
383 try:
384 self.prhost = prserv.serv.auto_start(self.data)
385 except prserv.serv.PRServiceConfigError as e:
386 bb.fatal("Unable to start PR Server, exitting")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500387
388 def enableDataTracking(self):
389 self.configuration.tracking = True
390 if hasattr(self, "data"):
391 self.data.enableTracking()
392
393 def disableDataTracking(self):
394 self.configuration.tracking = False
395 if hasattr(self, "data"):
396 self.data.disableTracking()
397
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500398 def parseConfiguration(self):
399 # Set log file verbosity
400 verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
401 if verboselogs:
402 bb.msg.loggerVerboseLogs = True
403
404 # Change nice level if we're asked to
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500405 nice = self.data.getVar("BB_NICE_LEVEL")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500406 if nice:
407 curnice = os.nice(0)
408 nice = int(nice) - curnice
409 buildlog.verbose("Renice to %s " % os.nice(nice))
410
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600411 if self.recipecaches:
412 del self.recipecaches
413 self.multiconfigs = self.databuilder.mcdata.keys()
414 self.recipecaches = {}
415 for mc in self.multiconfigs:
416 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500417
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500418 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500419
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500420 self.parsecache_valid = False
421
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500422 def updateConfigOpts(self, options, environment, cmdline):
423 self.ui_cmdline = cmdline
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500424 clean = True
425 for o in options:
426 if o in ['prefile', 'postfile']:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500427 # Only these options may require a reparse
428 try:
429 if getattr(self.configuration, o) == options[o]:
430 # Value is the same, no need to mark dirty
431 continue
432 except AttributeError:
433 pass
434 logger.debug(1, "Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
435 print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500436 clean = False
437 setattr(self.configuration, o, options[o])
438 for k in bb.utils.approved_variables():
439 if k in environment and k not in self.configuration.env:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500440 logger.debug(1, "Updating new environment variable %s to %s" % (k, environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500441 self.configuration.env[k] = environment[k]
442 clean = False
443 if k in self.configuration.env and k not in environment:
444 logger.debug(1, "Updating environment variable %s (deleted)" % (k))
445 del self.configuration.env[k]
446 clean = False
447 if k not in self.configuration.env and k not in environment:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500448 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500449 if environment[k] != self.configuration.env[k]:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500450 logger.debug(1, "Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500451 self.configuration.env[k] = environment[k]
452 clean = False
453 if not clean:
454 logger.debug(1, "Base environment change, triggering reparse")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500455 self.reset()
456
457 def runCommands(self, server, data, abort):
458 """
459 Run any queued asynchronous command
460 This is done by the idle handler so it runs in true context rather than
461 tied to any UI.
462 """
463
464 return self.command.runAsyncCommand()
465
466 def showVersions(self):
467
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500468 (latest_versions, preferred_versions) = self.findProviders()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500469
470 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
471 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
472
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500473 for p in sorted(self.recipecaches[''].pkg_pn):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500474 pref = preferred_versions[p]
475 latest = latest_versions[p]
476
477 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
478 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
479
480 if pref == latest:
481 prefstr = ""
482
483 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
484
485 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
486 """
487 Show the outer or per-recipe environment
488 """
489 fn = None
490 envdata = None
491 if not pkgs_to_build:
492 pkgs_to_build = []
493
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500494 orig_tracking = self.configuration.tracking
495 if not orig_tracking:
496 self.enableDataTracking()
497 self.reset()
498
499
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500500 if buildfile:
501 # Parse the configuration here. We need to do it explicitly here since
502 # this showEnvironment() code path doesn't use the cache
503 self.parseConfiguration()
504
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600505 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500506 fn = self.matchFile(fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600507 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500508 elif len(pkgs_to_build) == 1:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500509 ignore = self.data.getVar("ASSUME_PROVIDED") or ""
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500510 if pkgs_to_build[0] in set(ignore.split()):
511 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
512
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600513 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500514
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600515 mc = runlist[0][0]
516 fn = runlist[0][3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500517 else:
518 envdata = self.data
Brad Bishop316dfdd2018-06-25 12:45:53 -0400519 data.expandKeys(envdata)
520 parse.ast.runAnonFuncs(envdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500521
522 if fn:
523 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600524 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
525 envdata = bb_cache.loadDataFull(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500526 except Exception as e:
527 parselog.exception("Unable to read %s", fn)
528 raise
529
530 # Display history
531 with closing(StringIO()) as env:
532 self.data.inchistory.emit(env)
533 logger.plain(env.getvalue())
534
535 # emit variables and shell functions
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500536 with closing(StringIO()) as env:
537 data.emit_env(env, envdata, True)
538 logger.plain(env.getvalue())
539
540 # emit the metadata which isnt valid shell
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500541 for e in sorted(envdata.keys()):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600542 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500543 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500544
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500545 if not orig_tracking:
546 self.disableDataTracking()
547 self.reset()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500548
549 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
550 """
551 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
552 """
553 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
554
555 # A task of None means use the default task
556 if task is None:
557 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500558 if not task.startswith("do_"):
559 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500560
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500561 targetlist = self.checkPackages(pkgs_to_build, task)
562 fulltargetlist = []
563 defaulttask_implicit = ''
564 defaulttask_explicit = False
565 wildcard = False
566
567 # Wild card expansion:
568 # Replace string such as "multiconfig:*:bash"
569 # into "multiconfig:A:bash multiconfig:B:bash bash"
570 for k in targetlist:
571 if k.startswith("multiconfig:"):
572 if wildcard:
573 bb.fatal('multiconfig conflict')
574 if k.split(":")[1] == "*":
575 wildcard = True
576 for mc in self.multiconfigs:
577 if mc:
578 fulltargetlist.append(k.replace('*', mc))
579 # implicit default task
580 else:
581 defaulttask_implicit = k.split(":")[2]
582 else:
583 fulltargetlist.append(k)
584 else:
585 defaulttask_explicit = True
586 fulltargetlist.append(k)
587
588 if not defaulttask_explicit and defaulttask_implicit != '':
589 fulltargetlist.append(defaulttask_implicit)
590
591 bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600592 taskdata = {}
593 localdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500594
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600595 for mc in self.multiconfigs:
596 taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
597 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600598 bb.data.expandKeys(localdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500599
600 current = 0
601 runlist = []
602 for k in fulltargetlist:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600603 mc = ""
604 if k.startswith("multiconfig:"):
605 mc = k.split(":")[1]
606 k = ":".join(k.split(":")[2:])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500607 ktask = task
608 if ":do_" in k:
609 k2 = k.split(":do_")
610 k = k2[0]
611 ktask = k2[1]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600612 taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500613 current += 1
614 if not ktask.startswith("do_"):
615 ktask = "do_%s" % ktask
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600616 if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
617 # e.g. in ASSUME_PROVIDED
618 continue
619 fn = taskdata[mc].build_targets[k][0]
620 runlist.append([mc, k, ktask, fn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500621 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600622
623 for mc in self.multiconfigs:
624 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
625
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500626 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600627 return taskdata, runlist
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500628
629 def prepareTreeData(self, pkgs_to_build, task):
630 """
631 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
632 """
633
634 # We set abort to False here to prevent unbuildable targets raising
635 # an exception when we're just generating data
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600636 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500637
638 return runlist, taskdata
639
640 ######## WARNING : this function requires cache_extra to be enabled ########
641
642 def generateTaskDepTreeData(self, pkgs_to_build, task):
643 """
644 Create a dependency graph of pkgs_to_build including reverse dependency
645 information.
646 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500647 if not task.startswith("do_"):
648 task = "do_%s" % task
649
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500650 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600651 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500652 rq.rqdata.prepare()
653 return self.buildDependTree(rq, taskdata)
654
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600655 @staticmethod
656 def add_mc_prefix(mc, pn):
657 if mc:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500658 return "multiconfig:%s:%s" % (mc, pn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600659 return pn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500660
661 def buildDependTree(self, rq, taskdata):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600662 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500663 depend_tree = {}
664 depend_tree["depends"] = {}
665 depend_tree["tdepends"] = {}
666 depend_tree["pn"] = {}
667 depend_tree["rdepends-pn"] = {}
668 depend_tree["packages"] = {}
669 depend_tree["rdepends-pkg"] = {}
670 depend_tree["rrecs-pkg"] = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500671 depend_tree['providermap'] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600672 depend_tree["layer-priorities"] = self.bbfile_config_priorities
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500673
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600674 for mc in taskdata:
675 for name, fn in list(taskdata[mc].get_providermap().items()):
676 pn = self.recipecaches[mc].pkg_fn[fn]
677 pn = self.add_mc_prefix(mc, pn)
678 if name != pn:
679 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
680 depend_tree['providermap'][name] = (pn, version)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500681
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600682 for tid in rq.rqdata.runtaskentries:
683 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
684 pn = self.recipecaches[mc].pkg_fn[taskfn]
685 pn = self.add_mc_prefix(mc, pn)
686 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500687 if pn not in depend_tree["pn"]:
688 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600689 depend_tree["pn"][pn]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500690 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600691 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500692
693 # if we have extra caches, list all attributes they bring in
694 extra_info = []
695 for cache_class in self.caches_array:
696 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
697 cachefields = getattr(cache_class, 'cachefields', [])
698 extra_info = extra_info + cachefields
699
700 # for all attributes stored, add them to the dependency tree
701 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600702 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500703
704
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500705 dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
706 if not dotname in depend_tree["tdepends"]:
707 depend_tree["tdepends"][dotname] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600708 for dep in rq.rqdata.runtaskentries[tid].depends:
709 (depmc, depfn, deptaskname, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
710 deppn = self.recipecaches[mc].pkg_fn[deptaskfn]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600711 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
712 if taskfn not in seen_fns:
713 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500714 packages = []
715
716 depend_tree["depends"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600717 for dep in taskdata[mc].depids[taskfn]:
718 depend_tree["depends"][pn].append(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500719
720 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600721 for rdep in taskdata[mc].rdepids[taskfn]:
722 depend_tree["rdepends-pn"][pn].append(rdep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500723
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600724 rdepends = self.recipecaches[mc].rundeps[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500725 for package in rdepends:
726 depend_tree["rdepends-pkg"][package] = []
727 for rdepend in rdepends[package]:
728 depend_tree["rdepends-pkg"][package].append(rdepend)
729 packages.append(package)
730
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600731 rrecs = self.recipecaches[mc].runrecs[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500732 for package in rrecs:
733 depend_tree["rrecs-pkg"][package] = []
734 for rdepend in rrecs[package]:
735 depend_tree["rrecs-pkg"][package].append(rdepend)
736 if not package in packages:
737 packages.append(package)
738
739 for package in packages:
740 if package not in depend_tree["packages"]:
741 depend_tree["packages"][package] = {}
742 depend_tree["packages"][package]["pn"] = pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600743 depend_tree["packages"][package]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500744 depend_tree["packages"][package]["version"] = version
745
746 return depend_tree
747
748 ######## WARNING : this function requires cache_extra to be enabled ########
749 def generatePkgDepTreeData(self, pkgs_to_build, task):
750 """
751 Create a dependency tree of pkgs_to_build, returning the data.
752 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500753 if not task.startswith("do_"):
754 task = "do_%s" % task
755
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500756 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500757
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600758 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500759 depend_tree = {}
760 depend_tree["depends"] = {}
761 depend_tree["pn"] = {}
762 depend_tree["rdepends-pn"] = {}
763 depend_tree["rdepends-pkg"] = {}
764 depend_tree["rrecs-pkg"] = {}
765
766 # if we have extra caches, list all attributes they bring in
767 extra_info = []
768 for cache_class in self.caches_array:
769 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
770 cachefields = getattr(cache_class, 'cachefields', [])
771 extra_info = extra_info + cachefields
772
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600773 tids = []
774 for mc in taskdata:
775 for tid in taskdata[mc].taskentries:
776 tids.append(tid)
777
778 for tid in tids:
779 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
780
781 pn = self.recipecaches[mc].pkg_fn[taskfn]
782 pn = self.add_mc_prefix(mc, pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500783
784 if pn not in depend_tree["pn"]:
785 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600786 depend_tree["pn"][pn]["filename"] = taskfn
787 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500788 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600789 rdepends = self.recipecaches[mc].rundeps[taskfn]
790 rrecs = self.recipecaches[mc].runrecs[taskfn]
791 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500792
793 # for all extra attributes stored, add them to the dependency tree
794 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600795 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500796
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600797 if taskfn not in seen_fns:
798 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500799
800 depend_tree["depends"][pn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500801 for dep in taskdata[mc].depids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500802 pn_provider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600803 if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
804 fn_provider = taskdata[mc].build_targets[dep][0]
805 pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500806 else:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500807 pn_provider = dep
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600808 pn_provider = self.add_mc_prefix(mc, pn_provider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500809 depend_tree["depends"][pn].append(pn_provider)
810
811 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600812 for rdep in taskdata[mc].rdepids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500813 pn_rprovider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600814 if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
815 fn_rprovider = taskdata[mc].run_targets[rdep][0]
816 pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500817 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600818 pn_rprovider = rdep
819 pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500820 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
821
822 depend_tree["rdepends-pkg"].update(rdepends)
823 depend_tree["rrecs-pkg"].update(rrecs)
824
825 return depend_tree
826
827 def generateDepTreeEvent(self, pkgs_to_build, task):
828 """
829 Create a task dependency graph of pkgs_to_build.
830 Generate an event with the result
831 """
832 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
833 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
834
835 def generateDotGraphFiles(self, pkgs_to_build, task):
836 """
837 Create a task dependency graph of pkgs_to_build.
838 Save the result to a set of .dot files.
839 """
840
841 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
842
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500843 with open('pn-buildlist', 'w') as f:
844 for pn in depgraph["pn"]:
845 f.write(pn + "\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500846 logger.info("PN build list saved to 'pn-buildlist'")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500847
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500848 # Remove old format output files to ensure no confusion with stale data
849 try:
850 os.unlink('pn-depends.dot')
851 except FileNotFoundError:
852 pass
853 try:
854 os.unlink('package-depends.dot')
855 except FileNotFoundError:
856 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500857
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500858 with open('task-depends.dot', 'w') as f:
859 f.write("digraph depends {\n")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400860 for task in sorted(depgraph["tdepends"]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500861 (pn, taskname) = task.rsplit(".", 1)
862 fn = depgraph["pn"][pn]["filename"]
863 version = depgraph["pn"][pn]["version"]
864 f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400865 for dep in sorted(depgraph["tdepends"][task]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500866 f.write('"%s" -> "%s"\n' % (task, dep))
867 f.write("}\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500868 logger.info("Task dependencies saved to 'task-depends.dot'")
869
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500870 with open('recipe-depends.dot', 'w') as f:
871 f.write("digraph depends {\n")
872 pndeps = {}
Brad Bishop316dfdd2018-06-25 12:45:53 -0400873 for task in sorted(depgraph["tdepends"]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500874 (pn, taskname) = task.rsplit(".", 1)
875 if pn not in pndeps:
876 pndeps[pn] = set()
Brad Bishop316dfdd2018-06-25 12:45:53 -0400877 for dep in sorted(depgraph["tdepends"][task]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500878 (deppn, deptaskname) = dep.rsplit(".", 1)
879 pndeps[pn].add(deppn)
Brad Bishop316dfdd2018-06-25 12:45:53 -0400880 for pn in sorted(pndeps):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500881 fn = depgraph["pn"][pn]["filename"]
882 version = depgraph["pn"][pn]["version"]
883 f.write('"%s" [label="%s\\n%s\\n%s"]\n' % (pn, pn, version, fn))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400884 for dep in sorted(pndeps[pn]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500885 if dep == pn:
886 continue
887 f.write('"%s" -> "%s"\n' % (pn, dep))
888 f.write("}\n")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400889 logger.info("Flattened recipe dependencies saved to 'recipe-depends.dot'")
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500890
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500891 def show_appends_with_no_recipes(self):
892 # Determine which bbappends haven't been applied
893
894 # First get list of recipes, including skipped
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600895 recipefns = list(self.recipecaches[''].pkg_fn.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500896 recipefns.extend(self.skiplist.keys())
897
898 # Work out list of bbappends that have been applied
899 applied_appends = []
900 for fn in recipefns:
901 applied_appends.extend(self.collection.get_file_appends(fn))
902
903 appends_without_recipes = []
904 for _, appendfn in self.collection.bbappends:
905 if not appendfn in applied_appends:
906 appends_without_recipes.append(appendfn)
907
908 if appends_without_recipes:
909 msg = 'No recipes available for:\n %s' % '\n '.join(appends_without_recipes)
910 warn_only = self.data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
911 False) or "no"
912 if warn_only.lower() in ("1", "yes", "true"):
913 bb.warn(msg)
914 else:
915 bb.fatal(msg)
916
917 def handlePrefProviders(self):
918
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600919 for mc in self.multiconfigs:
920 localdata = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600921 bb.data.expandKeys(localdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500922
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600923 # Handle PREFERRED_PROVIDERS
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500924 for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600925 try:
926 (providee, provider) = p.split(':')
927 except:
928 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
929 continue
930 if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
931 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
932 self.recipecaches[mc].preferred[providee] = provider
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500933
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500934 def findConfigFilePath(self, configfile):
935 """
936 Find the location on disk of configfile and if it exists and was parsed by BitBake
937 emit the ConfigFilePathFound event with the path to the file.
938 """
939 path = bb.cookerdata.findConfigFile(configfile, self.data)
940 if not path:
941 return
942
943 # Generate a list of parsed configuration files by searching the files
944 # listed in the __depends and __base_depends variables with a .conf suffix.
945 conffiles = []
946 dep_files = self.data.getVar('__base_depends', False) or []
947 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
948
949 for f in dep_files:
950 if f[0].endswith(".conf"):
951 conffiles.append(f[0])
952
953 _, conf, conffile = path.rpartition("conf/")
954 match = os.path.join(conf, conffile)
955 # Try and find matches for conf/conffilename.conf as we don't always
956 # have the full path to the file.
957 for cfg in conffiles:
958 if cfg.endswith(match):
959 bb.event.fire(bb.event.ConfigFilePathFound(path),
960 self.data)
961 break
962
963 def findFilesMatchingInDir(self, filepattern, directory):
964 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500965 Searches for files containing the substring 'filepattern' which are children of
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500966 'directory' in each BBPATH. i.e. to find all rootfs package classes available
967 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
968 or to find all machine configuration files one could call:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500969 findFilesMatchingInDir(self, '.conf', 'conf/machine')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500970 """
971
972 matches = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500973 bbpaths = self.data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500974 for path in bbpaths:
975 dirpath = os.path.join(path, directory)
976 if os.path.exists(dirpath):
977 for root, dirs, files in os.walk(dirpath):
978 for f in files:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500979 if filepattern in f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500980 matches.append(f)
981
982 if matches:
983 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
984
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500985 def findProviders(self, mc=''):
986 return bb.providers.findProviders(self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
987
988 def findBestProvider(self, pn, mc=''):
989 if pn in self.recipecaches[mc].providers:
990 filenames = self.recipecaches[mc].providers[pn]
991 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.data, self.recipecaches[mc])
992 filename = eligible[0]
993 return None, None, None, filename
994 elif pn in self.recipecaches[mc].pkg_pn:
995 return bb.providers.findBestProvider(pn, self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
996 else:
997 return None, None, None, None
998
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500999 def findConfigFiles(self, varname):
1000 """
1001 Find config files which are appropriate values for varname.
1002 i.e. MACHINE, DISTRO
1003 """
1004 possible = []
1005 var = varname.lower()
1006
1007 data = self.data
1008 # iterate configs
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001009 bbpaths = data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001010 for path in bbpaths:
1011 confpath = os.path.join(path, "conf", var)
1012 if os.path.exists(confpath):
1013 for root, dirs, files in os.walk(confpath):
1014 # get all child files, these are appropriate values
1015 for f in files:
1016 val, sep, end = f.rpartition('.')
1017 if end == 'conf':
1018 possible.append(val)
1019
1020 if possible:
1021 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1022
1023 def findInheritsClass(self, klass):
1024 """
1025 Find all recipes which inherit the specified class
1026 """
1027 pkg_list = []
1028
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001029 for pfn in self.recipecaches[''].pkg_fn:
1030 inherits = self.recipecaches[''].inherits.get(pfn, None)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001031 if inherits and klass in inherits:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001032 pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001033
1034 return pkg_list
1035
1036 def generateTargetsTree(self, klass=None, pkgs=None):
1037 """
1038 Generate a dependency tree of buildable targets
1039 Generate an event with the result
1040 """
1041 # if the caller hasn't specified a pkgs list default to universe
1042 if not pkgs:
1043 pkgs = ['universe']
1044 # if inherited_class passed ensure all recipes which inherit the
1045 # specified class are included in pkgs
1046 if klass:
1047 extra_pkgs = self.findInheritsClass(klass)
1048 pkgs = pkgs + extra_pkgs
1049
1050 # generate a dependency tree for all our packages
1051 tree = self.generatePkgDepTreeData(pkgs, 'build')
1052 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1053
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001054 def interactiveMode( self ):
1055 """Drop off into a shell"""
1056 try:
1057 from bb import shell
1058 except ImportError:
1059 parselog.exception("Interactive mode not available")
1060 sys.exit(1)
1061 else:
1062 shell.start( self )
1063
1064
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001065 def handleCollections(self, collections):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001066 """Handle collections"""
1067 errors = False
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001068 self.bbfile_config_priorities = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001069 if collections:
1070 collection_priorities = {}
1071 collection_depends = {}
1072 collection_list = collections.split()
1073 min_prio = 0
1074 for c in collection_list:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001075 bb.debug(1,'Processing %s in collection list' % (c))
1076
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001077 # Get collection priority if defined explicitly
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001078 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001079 if priority:
1080 try:
1081 prio = int(priority)
1082 except ValueError:
1083 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1084 errors = True
1085 if min_prio == 0 or prio < min_prio:
1086 min_prio = prio
1087 collection_priorities[c] = prio
1088 else:
1089 collection_priorities[c] = None
1090
1091 # Check dependencies and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001092 deps = self.data.getVar("LAYERDEPENDS_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001093 if deps:
1094 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001095 depDict = bb.utils.explode_dep_versions2(deps)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001096 except bb.utils.VersionStringException as vse:
1097 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001098 for dep, oplist in list(depDict.items()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001099 if dep in collection_list:
1100 for opstr in oplist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001101 layerver = self.data.getVar("LAYERVERSION_%s" % dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001102 (op, depver) = opstr.split()
1103 if layerver:
1104 try:
1105 res = bb.utils.vercmp_string_op(layerver, depver, op)
1106 except bb.utils.VersionStringException as vse:
1107 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1108 if not res:
1109 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1110 errors = True
1111 else:
1112 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1113 errors = True
1114 else:
1115 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1116 errors = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001117 collection_depends[c] = list(depDict.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001118 else:
1119 collection_depends[c] = []
1120
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001121 # Check recommends and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001122 recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001123 if recs:
1124 try:
1125 recDict = bb.utils.explode_dep_versions2(recs)
1126 except bb.utils.VersionStringException as vse:
1127 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1128 for rec, oplist in list(recDict.items()):
1129 if rec in collection_list:
1130 if oplist:
1131 opstr = oplist[0]
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001132 layerver = self.data.getVar("LAYERVERSION_%s" % rec)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001133 if layerver:
1134 (op, recver) = opstr.split()
1135 try:
1136 res = bb.utils.vercmp_string_op(layerver, recver, op)
1137 except bb.utils.VersionStringException as vse:
1138 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1139 if not res:
1140 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1141 continue
1142 else:
1143 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1144 continue
1145 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1146 collection_depends[c].append(rec)
1147 else:
1148 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1149
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001150 # Recursively work out collection priorities based on dependencies
1151 def calc_layer_priority(collection):
1152 if not collection_priorities[collection]:
1153 max_depprio = min_prio
1154 for dep in collection_depends[collection]:
1155 calc_layer_priority(dep)
1156 depprio = collection_priorities[dep]
1157 if depprio > max_depprio:
1158 max_depprio = depprio
1159 max_depprio += 1
1160 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1161 collection_priorities[collection] = max_depprio
1162
1163 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1164 for c in collection_list:
1165 calc_layer_priority(c)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001166 regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001167 if regex == None:
1168 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1169 errors = True
1170 continue
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001171 elif regex == "":
1172 parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
1173 errors = False
Brad Bishop316dfdd2018-06-25 12:45:53 -04001174 continue
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001175 else:
1176 try:
1177 cre = re.compile(regex)
1178 except re.error:
1179 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1180 errors = True
1181 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001182 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001183 if errors:
1184 # We've already printed the actual error(s)
1185 raise CollectionError("Errors during parsing layer configuration")
1186
1187 def buildSetVars(self):
1188 """
1189 Setup any variables needed before starting a build
1190 """
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001191 t = time.gmtime()
1192 for mc in self.databuilder.mcdata:
1193 ds = self.databuilder.mcdata[mc]
1194 if not ds.getVar("BUILDNAME", False):
1195 ds.setVar("BUILDNAME", "${DATE}${TIME}")
1196 ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1197 ds.setVar("DATE", time.strftime('%Y%m%d', t))
1198 ds.setVar("TIME", time.strftime('%H%M%S', t))
1199
1200 def reset_mtime_caches(self):
1201 """
1202 Reset mtime caches - this is particularly important when memory resident as something
1203 which is cached is not unlikely to have changed since the last invocation (e.g. a
1204 file associated with a recipe might have been modified by the user).
1205 """
1206 build.reset_cache()
1207 bb.fetch._checksum_cache.mtime_cache.clear()
1208 siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1209 if siggen_cache:
1210 bb.parse.siggen.checksum_cache.mtime_cache.clear()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001211
1212 def matchFiles(self, bf):
1213 """
1214 Find the .bb files which match the expression in 'buildfile'.
1215 """
1216 if bf.startswith("/") or bf.startswith("../"):
1217 bf = os.path.abspath(bf)
1218
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001219 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001220 filelist, masked, searchdirs = self.collection.collect_bbfiles(self.data, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001221 try:
1222 os.stat(bf)
1223 bf = os.path.abspath(bf)
1224 return [bf]
1225 except OSError:
1226 regexp = re.compile(bf)
1227 matches = []
1228 for f in filelist:
1229 if regexp.search(f) and os.path.isfile(f):
1230 matches.append(f)
1231 return matches
1232
1233 def matchFile(self, buildfile):
1234 """
1235 Find the .bb file which matches the expression in 'buildfile'.
1236 Raise an error if multiple files
1237 """
1238 matches = self.matchFiles(buildfile)
1239 if len(matches) != 1:
1240 if matches:
1241 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1242 if matches:
1243 for f in matches:
1244 msg += "\n %s" % f
1245 parselog.error(msg)
1246 else:
1247 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1248 raise NoSpecificMatch
1249 return matches[0]
1250
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001251 def buildFile(self, buildfile, task):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001252 """
1253 Build the file matching regexp buildfile
1254 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001255 bb.event.fire(bb.event.BuildInit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001256
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001257 # Too many people use -b because they think it's how you normally
1258 # specify a target to be built, so show a warning
1259 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1260
1261 self.buildFileInternal(buildfile, task)
1262
1263 def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1264 """
1265 Build the file matching regexp buildfile
1266 """
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001267
1268 # Parse the configuration here. We need to do it explicitly here since
1269 # buildFile() doesn't use the cache
1270 self.parseConfiguration()
1271
1272 # If we are told to do the None task then query the default task
1273 if (task == None):
1274 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001275 if not task.startswith("do_"):
1276 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001277
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001278 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001279 fn = self.matchFile(fn)
1280
1281 self.buildSetVars()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001282 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001283
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001284 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
1285
1286 infos = bb_cache.parse(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001287 infos = dict(infos)
1288
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001289 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001290 try:
1291 info_array = infos[fn]
1292 except KeyError:
1293 bb.fatal("%s does not exist" % fn)
1294
1295 if info_array[0].skipped:
1296 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1297
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001298 self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001299
1300 # Tweak some variables
1301 item = info_array[0].pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001302 self.recipecaches[mc].ignored_dependencies = set()
1303 self.recipecaches[mc].bbfile_priority[fn] = 1
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001304 self.configuration.limited_deps = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001305
1306 # Remove external dependencies
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001307 self.recipecaches[mc].task_deps[fn]['depends'] = {}
1308 self.recipecaches[mc].deps[fn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001309 self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1310 self.recipecaches[mc].runrecs[fn] = defaultdict(list)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001311
1312 # Invalidate task for target if force mode active
1313 if self.configuration.force:
1314 logger.verbose("Invalidate task %s, %s", task, fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001315 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001316
1317 # Setup taskdata structure
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001318 taskdata = {}
1319 taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001320 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001321
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001322 if quietlog:
1323 rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1324 bb.runqueue.logger.setLevel(logging.WARNING)
1325
1326 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1327 if fireevents:
1328 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001329
1330 # Execute the runqueue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001331 runlist = [[mc, item, task, fn]]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001332
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001333 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001334
1335 def buildFileIdle(server, rq, abort):
1336
1337 msg = None
1338 interrupted = 0
1339 if abort or self.state == state.forceshutdown:
1340 rq.finish_runqueue(True)
1341 msg = "Forced shutdown"
1342 interrupted = 2
1343 elif self.state == state.shutdown:
1344 rq.finish_runqueue(False)
1345 msg = "Stopped build"
1346 interrupted = 1
1347 failures = 0
1348 try:
1349 retval = rq.execute_runqueue()
1350 except runqueue.TaskFailure as exc:
1351 failures += len(exc.args)
1352 retval = False
1353 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001354 self.command.finishAsyncCommand(str(exc))
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001355 if quietlog:
1356 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001357 return False
1358
1359 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001360 if fireevents:
1361 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001362 self.command.finishAsyncCommand(msg)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001363 # We trashed self.recipecaches above
1364 self.parsecache_valid = False
1365 self.configuration.limited_deps = False
1366 bb.parse.siggen.reset(self.data)
1367 if quietlog:
1368 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001369 return False
1370 if retval is True:
1371 return True
1372 return retval
1373
1374 self.configuration.server_register_idlecallback(buildFileIdle, rq)
1375
1376 def buildTargets(self, targets, task):
1377 """
1378 Attempt to build the targets specified
1379 """
1380
1381 def buildTargetsIdle(server, rq, abort):
1382 msg = None
1383 interrupted = 0
1384 if abort or self.state == state.forceshutdown:
1385 rq.finish_runqueue(True)
1386 msg = "Forced shutdown"
1387 interrupted = 2
1388 elif self.state == state.shutdown:
1389 rq.finish_runqueue(False)
1390 msg = "Stopped build"
1391 interrupted = 1
1392 failures = 0
1393 try:
1394 retval = rq.execute_runqueue()
1395 except runqueue.TaskFailure as exc:
1396 failures += len(exc.args)
1397 retval = False
1398 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001399 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001400 return False
1401
1402 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001403 try:
1404 for mc in self.multiconfigs:
1405 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1406 finally:
1407 self.command.finishAsyncCommand(msg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001408 return False
1409 if retval is True:
1410 return True
1411 return retval
1412
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001413 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001414 self.buildSetVars()
1415
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001416 # If we are told to do the None task then query the default task
1417 if (task == None):
1418 task = self.configuration.cmd
1419
1420 if not task.startswith("do_"):
1421 task = "do_%s" % task
1422
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001423 packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1424
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001425 bb.event.fire(bb.event.BuildInit(packages), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001426
1427 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001428
1429 buildname = self.data.getVar("BUILDNAME", False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001430
1431 # make targets to always look as <target>:do_<task>
1432 ntargets = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001433 for target in runlist:
1434 if target[0]:
1435 ntargets.append("multiconfig:%s:%s:%s" % (target[0], target[1], target[2]))
1436 ntargets.append("%s:%s" % (target[1], target[2]))
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001437
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001438 for mc in self.multiconfigs:
1439 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001440
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001441 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001442 if 'universe' in targets:
1443 rq.rqdata.warn_multi_bb = True
1444
1445 self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
1446
1447
1448 def getAllKeysWithFlags(self, flaglist):
1449 dump = {}
1450 for k in self.data.keys():
1451 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001452 expand = True
1453 flags = self.data.getVarFlags(k)
1454 if flags and "func" in flags and "python" in flags:
1455 expand = False
1456 v = self.data.getVar(k, expand)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001457 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1458 dump[k] = {
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001459 'v' : str(v) ,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001460 'history' : self.data.varhistory.variable(k),
1461 }
1462 for d in flaglist:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001463 if flags and d in flags:
1464 dump[k][d] = flags[d]
1465 else:
1466 dump[k][d] = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001467 except Exception as e:
1468 print(e)
1469 return dump
1470
1471
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001472 def updateCacheSync(self):
1473 if self.state == state.running:
1474 return
1475
1476 # reload files for which we got notifications
1477 for p in self.inotify_modified_files:
1478 bb.parse.update_cache(p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001479 if p in bb.parse.BBHandler.cached_statements:
1480 del bb.parse.BBHandler.cached_statements[p]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001481 self.inotify_modified_files = []
1482
1483 if not self.baseconfig_valid:
1484 logger.debug(1, "Reloading base configuration data")
1485 self.initConfigurationData()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001486 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001487
1488 # This is called for all async commands when self.state != running
1489 def updateCache(self):
1490 if self.state == state.running:
1491 return
1492
1493 if self.state in (state.shutdown, state.forceshutdown, state.error):
1494 if hasattr(self.parser, 'shutdown'):
1495 self.parser.shutdown(clean=False, force = True)
1496 raise bb.BBHandledException()
1497
1498 if self.state != state.parsing:
1499 self.updateCacheSync()
1500
1501 if self.state != state.parsing and not self.parsecache_valid:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001502 bb.parse.siggen.reset(self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001503 self.parseConfiguration ()
1504 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001505 for mc in self.multiconfigs:
1506 bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001507
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001508 for mc in self.multiconfigs:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001509 ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001510 self.recipecaches[mc].ignored_dependencies = set(ignore.split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001511
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001512 for dep in self.configuration.extra_assume_provided:
1513 self.recipecaches[mc].ignored_dependencies.add(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001514
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001515 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001516 (filelist, masked, searchdirs) = self.collection.collect_bbfiles(self.data, self.data)
1517
1518 # Add inotify watches for directories searched for bb/bbappend files
1519 for dirent in searchdirs:
1520 self.add_filewatch([[dirent]], dirs=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001521
1522 self.parser = CookerParser(self, filelist, masked)
1523 self.parsecache_valid = True
1524
1525 self.state = state.parsing
1526
1527 if not self.parser.parse_next():
1528 collectlog.debug(1, "parsing complete")
1529 if self.parser.error:
1530 raise bb.BBHandledException()
1531 self.show_appends_with_no_recipes()
1532 self.handlePrefProviders()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001533 for mc in self.multiconfigs:
1534 self.recipecaches[mc].bbfile_priority = self.collection.collection_priorities(self.recipecaches[mc].pkg_fn, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001535 self.state = state.running
1536
1537 # Send an event listing all stamps reachable after parsing
1538 # which the metadata may use to clean up stale data
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001539 for mc in self.multiconfigs:
1540 event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1541 bb.event.fire(event, self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001542 return None
1543
1544 return True
1545
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001546 def checkPackages(self, pkgs_to_build, task=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001547
1548 # Return a copy, don't modify the original
1549 pkgs_to_build = pkgs_to_build[:]
1550
1551 if len(pkgs_to_build) == 0:
1552 raise NothingToBuild
1553
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001554 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001555 for pkg in pkgs_to_build:
1556 if pkg in ignore:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001557 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001558
1559 if 'world' in pkgs_to_build:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001560 pkgs_to_build.remove('world')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001561 for mc in self.multiconfigs:
1562 bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1563 for t in self.recipecaches[mc].world_target:
1564 if mc:
1565 t = "multiconfig:" + mc + ":" + t
1566 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001567
1568 if 'universe' in pkgs_to_build:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001569 parselog.warning("The \"universe\" target is only intended for testing and may produce errors.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001570 parselog.debug(1, "collating packages for \"universe\"")
1571 pkgs_to_build.remove('universe')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001572 for mc in self.multiconfigs:
1573 for t in self.recipecaches[mc].universe_target:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001574 if task:
1575 foundtask = False
1576 for provider_fn in self.recipecaches[mc].providers[t]:
1577 if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1578 foundtask = True
1579 break
1580 if not foundtask:
1581 bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1582 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001583 if mc:
1584 t = "multiconfig:" + mc + ":" + t
1585 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001586
1587 return pkgs_to_build
1588
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001589 def pre_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001590 # We now are in our own process so we can call this here.
1591 # PRServ exits if its parent process exits
1592 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001593 return
1594
1595 def post_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001596 prserv.serv.auto_shutdown()
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001597 bb.event.fire(CookerExit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001598
1599
1600 def shutdown(self, force = False):
1601 if force:
1602 self.state = state.forceshutdown
1603 else:
1604 self.state = state.shutdown
1605
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001606 if self.parser:
1607 self.parser.shutdown(clean=not force, force=force)
1608
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001609 def finishcommand(self):
1610 self.state = state.initial
1611
1612 def reset(self):
1613 self.initConfigurationData()
1614
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001615 def clientComplete(self):
1616 """Called when the client is done using the server"""
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001617 self.finishcommand()
1618 self.extraconfigdata = {}
1619 self.command.reset()
1620 self.databuilder.reset()
1621 self.data = self.databuilder.data
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001622
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001623
1624class CookerExit(bb.event.Event):
1625 """
1626 Notify clients of the Cooker shutdown
1627 """
1628
1629 def __init__(self):
1630 bb.event.Event.__init__(self)
1631
1632
1633class CookerCollectFiles(object):
1634 def __init__(self, priorities):
1635 self.bbappends = []
1636 self.bbfile_config_priorities = priorities
1637
1638 def calc_bbfile_priority( self, filename, matched = None ):
1639 for _, _, regex, pri in self.bbfile_config_priorities:
1640 if regex.match(filename):
1641 if matched != None:
1642 if not regex in matched:
1643 matched.add(regex)
1644 return pri
1645 return 0
1646
1647 def get_bbfiles(self):
1648 """Get list of default .bb files by reading out the current directory"""
1649 path = os.getcwd()
1650 contents = os.listdir(path)
1651 bbfiles = []
1652 for f in contents:
1653 if f.endswith(".bb"):
1654 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1655 return bbfiles
1656
1657 def find_bbfiles(self, path):
1658 """Find all the .bb and .bbappend files in a directory"""
1659 found = []
1660 for dir, dirs, files in os.walk(path):
1661 for ignored in ('SCCS', 'CVS', '.svn'):
1662 if ignored in dirs:
1663 dirs.remove(ignored)
1664 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1665
1666 return found
1667
1668 def collect_bbfiles(self, config, eventdata):
1669 """Collect all available .bb build files"""
1670 masked = 0
1671
1672 collectlog.debug(1, "collecting .bb files")
1673
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001674 files = (config.getVar( "BBFILES") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001675 config.setVar("BBFILES", " ".join(files))
1676
1677 # Sort files by priority
1678 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
1679
1680 if not len(files):
1681 files = self.get_bbfiles()
1682
1683 if not len(files):
1684 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1685 bb.event.fire(CookerExit(), eventdata)
1686
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001687 # We need to track where we look so that we can add inotify watches. There
1688 # is no nice way to do this, this is horrid. We intercept the os.listdir()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001689 # (or os.scandir() for python 3.6+) calls while we run glob().
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001690 origlistdir = os.listdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001691 if hasattr(os, 'scandir'):
1692 origscandir = os.scandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001693 searchdirs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001694
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001695 def ourlistdir(d):
1696 searchdirs.append(d)
1697 return origlistdir(d)
1698
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001699 def ourscandir(d):
1700 searchdirs.append(d)
1701 return origscandir(d)
1702
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001703 os.listdir = ourlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001704 if hasattr(os, 'scandir'):
1705 os.scandir = ourscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001706 try:
1707 # Can't use set here as order is important
1708 newfiles = []
1709 for f in files:
1710 if os.path.isdir(f):
1711 dirfiles = self.find_bbfiles(f)
1712 for g in dirfiles:
1713 if g not in newfiles:
1714 newfiles.append(g)
1715 else:
1716 globbed = glob.glob(f)
1717 if not globbed and os.path.exists(f):
1718 globbed = [f]
1719 # glob gives files in order on disk. Sort to be deterministic.
1720 for g in sorted(globbed):
1721 if g not in newfiles:
1722 newfiles.append(g)
1723 finally:
1724 os.listdir = origlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001725 if hasattr(os, 'scandir'):
1726 os.scandir = origscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001727
1728 bbmask = config.getVar('BBMASK')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001729
1730 if bbmask:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001731 # First validate the individual regular expressions and ignore any
1732 # that do not compile
1733 bbmasks = []
1734 for mask in bbmask.split():
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001735 # When constructing an older style single regex, it's possible for BBMASK
1736 # to end up beginning with '|', which matches and masks _everything_.
1737 if mask.startswith("|"):
1738 collectlog.warn("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
1739 mask = mask[1:]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001740 try:
1741 re.compile(mask)
1742 bbmasks.append(mask)
1743 except sre_constants.error:
1744 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1745
1746 # Then validate the combined regular expressions. This should never
1747 # fail, but better safe than sorry...
1748 bbmask = "|".join(bbmasks)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001749 try:
1750 bbmask_compiled = re.compile(bbmask)
1751 except sre_constants.error:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001752 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1753 bbmask = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001754
1755 bbfiles = []
1756 bbappend = []
1757 for f in newfiles:
1758 if bbmask and bbmask_compiled.search(f):
1759 collectlog.debug(1, "skipping masked file %s", f)
1760 masked += 1
1761 continue
1762 if f.endswith('.bb'):
1763 bbfiles.append(f)
1764 elif f.endswith('.bbappend'):
1765 bbappend.append(f)
1766 else:
1767 collectlog.debug(1, "skipping %s: unknown file extension", f)
1768
1769 # Build a list of .bbappend files for each .bb file
1770 for f in bbappend:
1771 base = os.path.basename(f).replace('.bbappend', '.bb')
1772 self.bbappends.append((base, f))
1773
1774 # Find overlayed recipes
1775 # bbfiles will be in priority order which makes this easy
1776 bbfile_seen = dict()
1777 self.overlayed = defaultdict(list)
1778 for f in reversed(bbfiles):
1779 base = os.path.basename(f)
1780 if base not in bbfile_seen:
1781 bbfile_seen[base] = f
1782 else:
1783 topfile = bbfile_seen[base]
1784 self.overlayed[topfile].append(f)
1785
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001786 return (bbfiles, masked, searchdirs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001787
1788 def get_file_appends(self, fn):
1789 """
1790 Returns a list of .bbappend files to apply to fn
1791 """
1792 filelist = []
1793 f = os.path.basename(fn)
1794 for b in self.bbappends:
1795 (bbappend, filename) = b
1796 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1797 filelist.append(filename)
1798 return filelist
1799
1800 def collection_priorities(self, pkgfns, d):
1801
1802 priorities = {}
1803
1804 # Calculate priorities for each file
1805 matched = set()
1806 for p in pkgfns:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001807 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001808 priorities[p] = self.calc_bbfile_priority(realfn, matched)
1809
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001810 unmatched = set()
1811 for _, _, regex, pri in self.bbfile_config_priorities:
1812 if not regex in matched:
1813 unmatched.add(regex)
1814
Brad Bishop316dfdd2018-06-25 12:45:53 -04001815 # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
1816 def find_bbappend_match(regex):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001817 for b in self.bbappends:
1818 (bbfile, append) = b
1819 if regex.match(append):
Brad Bishop316dfdd2018-06-25 12:45:53 -04001820 # If the bbappend is matched by already "matched set", return False
1821 for matched_regex in matched:
1822 if matched_regex.match(append):
1823 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001824 return True
1825 return False
1826
1827 for unmatch in unmatched.copy():
Brad Bishop316dfdd2018-06-25 12:45:53 -04001828 if find_bbappend_match(unmatch):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001829 unmatched.remove(unmatch)
1830
1831 for collection, pattern, regex, _ in self.bbfile_config_priorities:
1832 if regex in unmatched:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001833 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001834 collectlog.warning("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001835
1836 return priorities
1837
1838class ParsingFailure(Exception):
1839 def __init__(self, realexception, recipe):
1840 self.realexception = realexception
1841 self.recipe = recipe
1842 Exception.__init__(self, realexception, recipe)
1843
1844class Feeder(multiprocessing.Process):
1845 def __init__(self, jobs, to_parsers, quit):
1846 self.quit = quit
1847 self.jobs = jobs
1848 self.to_parsers = to_parsers
1849 multiprocessing.Process.__init__(self)
1850
1851 def run(self):
1852 while True:
1853 try:
1854 quit = self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001855 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001856 pass
1857 else:
1858 if quit == 'cancel':
1859 self.to_parsers.cancel_join_thread()
1860 break
1861
1862 try:
1863 job = self.jobs.pop()
1864 except IndexError:
1865 break
1866
1867 try:
1868 self.to_parsers.put(job, timeout=0.5)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001869 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001870 self.jobs.insert(0, job)
1871 continue
1872
1873class Parser(multiprocessing.Process):
1874 def __init__(self, jobs, results, quit, init, profile):
1875 self.jobs = jobs
1876 self.results = results
1877 self.quit = quit
1878 self.init = init
1879 multiprocessing.Process.__init__(self)
1880 self.context = bb.utils.get_context().copy()
1881 self.handlers = bb.event.get_class_handlers().copy()
1882 self.profile = profile
1883
1884 def run(self):
1885
1886 if not self.profile:
1887 self.realrun()
1888 return
1889
1890 try:
1891 import cProfile as profile
1892 except:
1893 import profile
1894 prof = profile.Profile()
1895 try:
1896 profile.Profile.runcall(prof, self.realrun)
1897 finally:
1898 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1899 prof.dump_stats(logfile)
1900
1901 def realrun(self):
1902 if self.init:
1903 self.init()
1904
1905 pending = []
1906 while True:
1907 try:
1908 self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001909 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001910 pass
1911 else:
1912 self.results.cancel_join_thread()
1913 break
1914
1915 if pending:
1916 result = pending.pop()
1917 else:
1918 try:
1919 job = self.jobs.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001920 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001921 continue
1922
1923 if job is None:
1924 break
1925 result = self.parse(*job)
1926
1927 try:
1928 self.results.put(result, timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001929 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001930 pending.append(result)
1931
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001932 def parse(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001933 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001934 # Record the filename we're parsing into any events generated
1935 def parse_filter(self, record):
1936 record.taskpid = bb.event.worker_pid
1937 record.fn = filename
1938 return True
1939
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001940 # Reset our environment and handlers to the original settings
1941 bb.utils.set_context(self.context.copy())
1942 bb.event.set_class_handlers(self.handlers.copy())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001943 bb.event.LogHandler.filter = parse_filter
1944
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001945 return True, self.bb_cache.parse(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001946 except Exception as exc:
1947 tb = sys.exc_info()[2]
1948 exc.recipe = filename
1949 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
1950 return True, exc
1951 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
1952 # and for example a worker thread doesn't just exit on its own in response to
1953 # a SystemExit event for example.
1954 except BaseException as exc:
1955 return True, ParsingFailure(exc, filename)
1956
1957class CookerParser(object):
1958 def __init__(self, cooker, filelist, masked):
1959 self.filelist = filelist
1960 self.cooker = cooker
1961 self.cfgdata = cooker.data
1962 self.cfghash = cooker.data_hash
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001963 self.cfgbuilder = cooker.databuilder
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001964
1965 # Accounting statistics
1966 self.parsed = 0
1967 self.cached = 0
1968 self.error = 0
1969 self.masked = masked
1970
1971 self.skipped = 0
1972 self.virtuals = 0
1973 self.total = len(filelist)
1974
1975 self.current = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001976 self.process_names = []
1977
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001978 self.bb_cache = bb.cache.Cache(self.cfgbuilder, self.cfghash, cooker.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001979 self.fromcache = []
1980 self.willparse = []
1981 for filename in self.filelist:
1982 appends = self.cooker.collection.get_file_appends(filename)
1983 if not self.bb_cache.cacheValid(filename, appends):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001984 self.willparse.append((filename, appends))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001985 else:
1986 self.fromcache.append((filename, appends))
1987 self.toparse = self.total - len(self.fromcache)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001988 self.progress_chunk = int(max(self.toparse / 100, 1))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001989
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001990 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001991 multiprocessing.cpu_count()), len(self.willparse))
1992
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001993 self.start()
1994 self.haveshutdown = False
1995
1996 def start(self):
1997 self.results = self.load_cached()
1998 self.processes = []
1999 if self.toparse:
2000 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2001 def init():
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002002 Parser.bb_cache = self.bb_cache
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002003 bb.utils.set_process_name(multiprocessing.current_process().name)
2004 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2005 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002006
2007 self.feeder_quit = multiprocessing.Queue(maxsize=1)
2008 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
2009 self.jobs = multiprocessing.Queue(maxsize=self.num_processes)
2010 self.result_queue = multiprocessing.Queue()
2011 self.feeder = Feeder(self.willparse, self.jobs, self.feeder_quit)
2012 self.feeder.start()
2013 for i in range(0, self.num_processes):
2014 parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
2015 parser.start()
2016 self.process_names.append(parser.name)
2017 self.processes.append(parser)
2018
2019 self.results = itertools.chain(self.results, self.parse_generator())
2020
2021 def shutdown(self, clean=True, force=False):
2022 if not self.toparse:
2023 return
2024 if self.haveshutdown:
2025 return
2026 self.haveshutdown = True
2027
2028 if clean:
2029 event = bb.event.ParseCompleted(self.cached, self.parsed,
2030 self.skipped, self.masked,
2031 self.virtuals, self.error,
2032 self.total)
2033
2034 bb.event.fire(event, self.cfgdata)
2035 self.feeder_quit.put(None)
2036 for process in self.processes:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002037 self.parser_quit.put(None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002038 else:
2039 self.feeder_quit.put('cancel')
2040
2041 self.parser_quit.cancel_join_thread()
2042 for process in self.processes:
2043 self.parser_quit.put(None)
2044
2045 self.jobs.cancel_join_thread()
2046
2047 for process in self.processes:
2048 if force:
2049 process.join(.1)
2050 process.terminate()
2051 else:
2052 process.join()
2053 self.feeder.join()
2054
2055 sync = threading.Thread(target=self.bb_cache.sync)
2056 sync.start()
2057 multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002058 bb.codeparser.parser_cache_savemerge()
2059 bb.fetch.fetcher_parse_done()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002060 if self.cooker.configuration.profile:
2061 profiles = []
2062 for i in self.process_names:
2063 logfile = "profile-parse-%s.log" % i
2064 if os.path.exists(logfile):
2065 profiles.append(logfile)
2066
2067 pout = "profile-parse.log.processed"
2068 bb.utils.process_profilelog(profiles, pout = pout)
2069 print("Processed parsing statistics saved to %s" % (pout))
2070
2071 def load_cached(self):
2072 for filename, appends in self.fromcache:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002073 cached, infos = self.bb_cache.load(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002074 yield not cached, infos
2075
2076 def parse_generator(self):
2077 while True:
2078 if self.parsed >= self.toparse:
2079 break
2080
2081 try:
2082 result = self.result_queue.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002083 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002084 pass
2085 else:
2086 value = result[1]
2087 if isinstance(value, BaseException):
2088 raise value
2089 else:
2090 yield result
2091
2092 def parse_next(self):
2093 result = []
2094 parsed = None
2095 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002096 parsed, result = next(self.results)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002097 except StopIteration:
2098 self.shutdown()
2099 return False
2100 except bb.BBHandledException as exc:
2101 self.error += 1
2102 logger.error('Failed to parse recipe: %s' % exc.recipe)
2103 self.shutdown(clean=False)
2104 return False
2105 except ParsingFailure as exc:
2106 self.error += 1
2107 logger.error('Unable to parse %s: %s' %
2108 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2109 self.shutdown(clean=False)
2110 return False
2111 except bb.parse.ParseError as exc:
2112 self.error += 1
2113 logger.error(str(exc))
2114 self.shutdown(clean=False)
2115 return False
2116 except bb.data_smart.ExpansionError as exc:
2117 self.error += 1
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002118 bbdir = os.path.dirname(__file__) + os.sep
2119 etype, value, _ = sys.exc_info()
2120 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2121 logger.error('ExpansionError during parsing %s', value.recipe,
2122 exc_info=(etype, value, tb))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002123 self.shutdown(clean=False)
2124 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002125 except Exception as exc:
2126 self.error += 1
2127 etype, value, tb = sys.exc_info()
2128 if hasattr(value, "recipe"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002129 logger.error('Unable to parse %s' % value.recipe,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002130 exc_info=(etype, value, exc.traceback))
2131 else:
2132 # Most likely, an exception occurred during raising an exception
2133 import traceback
2134 logger.error('Exception during parse: %s' % traceback.format_exc())
2135 self.shutdown(clean=False)
2136 return False
2137
2138 self.current += 1
2139 self.virtuals += len(result)
2140 if parsed:
2141 self.parsed += 1
2142 if self.parsed % self.progress_chunk == 0:
2143 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2144 self.cfgdata)
2145 else:
2146 self.cached += 1
2147
2148 for virtualfn, info_array in result:
2149 if info_array[0].skipped:
2150 self.skipped += 1
2151 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002152 (fn, cls, mc) = bb.cache.virtualfn2realfn(virtualfn)
2153 self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002154 parsed=parsed, watcher = self.cooker.add_filewatch)
2155 return True
2156
2157 def reparse(self, filename):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002158 infos = self.bb_cache.parse(filename, self.cooker.collection.get_file_appends(filename))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002159 for vfn, info_array in infos:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002160 (fn, cls, mc) = bb.cache.virtualfn2realfn(vfn)
2161 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)