blob: 16681ba2442e24b830823e1b78898e232835689e [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# Copyright (C) 2003, 2004 Chris Larson
6# Copyright (C) 2003, 2004 Phil Blundell
7# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
8# Copyright (C) 2005 Holger Hans Peter Freyther
9# Copyright (C) 2005 ROAD GmbH
10# Copyright (C) 2006 - 2007 Richard Purdie
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
Patrick Williamsc0f7c042017-02-23 20:41:17 -060025
Patrick Williamsc124f4f2015-09-15 14:41:29 -050026import sys, os, glob, os.path, re, time
27import atexit
28import itertools
29import logging
30import multiprocessing
31import sre_constants
32import threading
Patrick Williamsc0f7c042017-02-23 20:41:17 -060033from io import StringIO, UnsupportedOperation
Patrick Williamsc124f4f2015-09-15 14:41:29 -050034from contextlib import closing
35from functools import wraps
Patrick Williamsc0f7c042017-02-23 20:41:17 -060036from collections import defaultdict, namedtuple
Patrick Williamsc124f4f2015-09-15 14:41:29 -050037import bb, bb.exceptions, bb.command
38from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
Patrick Williamsc0f7c042017-02-23 20:41:17 -060039import queue
Patrick Williamsc124f4f2015-09-15 14:41:29 -050040import signal
41import subprocess
42import errno
43import prserv.serv
44import pyinotify
Patrick Williamsc0f7c042017-02-23 20:41:17 -060045import json
46import pickle
47import codecs
Patrick Williamsc124f4f2015-09-15 14:41:29 -050048
49logger = logging.getLogger("BitBake")
50collectlog = logging.getLogger("BitBake.Collection")
51buildlog = logging.getLogger("BitBake.Build")
52parselog = logging.getLogger("BitBake.Parsing")
53providerlog = logging.getLogger("BitBake.Provider")
54
55class NoSpecificMatch(bb.BBHandledException):
56 """
57 Exception raised when no or multiple file matches are found
58 """
59
60class NothingToBuild(Exception):
61 """
62 Exception raised when there is nothing to build
63 """
64
65class CollectionError(bb.BBHandledException):
66 """
67 Exception raised when layer configuration is incorrect
68 """
69
70class state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060071 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050072
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050073 @classmethod
74 def get_name(cls, code):
75 for name in dir(cls):
76 value = getattr(cls, name)
77 if type(value) == type(cls.initial) and value == code:
78 return name
79 raise ValueError("Invalid status code: %s" % code)
80
Patrick Williamsc124f4f2015-09-15 14:41:29 -050081
82class SkippedPackage:
83 def __init__(self, info = None, reason = None):
84 self.pn = None
85 self.skipreason = None
86 self.provides = None
87 self.rprovides = None
88
89 if info:
90 self.pn = info.pn
91 self.skipreason = info.skipreason
92 self.provides = info.provides
93 self.rprovides = info.rprovides
94 elif reason:
95 self.skipreason = reason
96
97
98class CookerFeatures(object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -060099 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500100
101 def __init__(self):
102 self._features=set()
103
104 def setFeature(self, f):
105 # validate we got a request for a feature we support
106 if f not in CookerFeatures._feature_list:
107 return
108 self._features.add(f)
109
110 def __contains__(self, f):
111 return f in self._features
112
113 def __iter__(self):
114 return self._features.__iter__()
115
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600116 def __next__(self):
117 return next(self._features)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500118
119
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600120class EventWriter:
121 def __init__(self, cooker, eventfile):
122 self.file_inited = None
123 self.cooker = cooker
124 self.eventfile = eventfile
125 self.event_queue = []
126
127 def write_event(self, event):
128 with open(self.eventfile, "a") as f:
129 try:
130 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
131 f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
132 "vars": str_event}))
133 except Exception as err:
134 import traceback
135 print(err, traceback.format_exc())
136
137 def send(self, event):
138 if self.file_inited:
139 # we have the file, just write the event
140 self.write_event(event)
141 else:
142 # init on bb.event.BuildStarted
143 name = "%s.%s" % (event.__module__, event.__class__.__name__)
144 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
145 with open(self.eventfile, "w") as f:
146 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
147
148 self.file_inited = True
149
150 # write pending events
151 for evt in self.event_queue:
152 self.write_event(evt)
153
154 # also write the current event
155 self.write_event(event)
156 else:
157 # queue all events until the file is inited
158 self.event_queue.append(event)
159
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500160#============================================================================#
161# BBCooker
162#============================================================================#
163class BBCooker:
164 """
165 Manages one bitbake build run
166 """
167
168 def __init__(self, configuration, featureSet=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600169 self.recipecaches = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500170 self.skiplist = {}
171 self.featureset = CookerFeatures()
172 if featureSet:
173 for f in featureSet:
174 self.featureset.setFeature(f)
175
176 self.configuration = configuration
177
178 self.configwatcher = pyinotify.WatchManager()
179 self.configwatcher.bbseen = []
180 self.configwatcher.bbwatchedfiles = []
181 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
182 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
183 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500184 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500185 self.watcher = pyinotify.WatchManager()
186 self.watcher.bbseen = []
187 self.watcher.bbwatchedfiles = []
188 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
189
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500190 # If being called by something like tinfoil, we need to clean cached data
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500191 # which may now be invalid
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500192 bb.parse.clear_cache()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500193 bb.parse.BBHandler.cached_statements = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500194
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500195 self.ui_cmdline = None
196
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500197 self.initConfigurationData()
198
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600199 # we log all events to a file if so directed
200 if self.configuration.writeeventlog:
201 # register the log file writer as UI Handler
202 writer = EventWriter(self, self.configuration.writeeventlog)
203 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
204 bb.event.register_UIHhandler(EventLogWriteHandler(writer))
205
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500206 self.inotify_modified_files = []
207
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500208 def _process_inotify_updates(server, cooker, abort):
209 cooker.process_inotify_updates()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500210 return 1.0
211
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500212 self.configuration.server_register_idlecallback(_process_inotify_updates, self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500213
214 # TOSTOP must not be set or our children will hang when they output
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600215 try:
216 fd = sys.stdout.fileno()
217 if os.isatty(fd):
218 import termios
219 tcattr = termios.tcgetattr(fd)
220 if tcattr[3] & termios.TOSTOP:
221 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
222 tcattr[3] = tcattr[3] & ~termios.TOSTOP
223 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
224 except UnsupportedOperation:
225 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500226
227 self.command = bb.command.Command(self)
228 self.state = state.initial
229
230 self.parser = None
231
232 signal.signal(signal.SIGTERM, self.sigterm_exception)
233 # Let SIGHUP exit as SIGTERM
234 signal.signal(signal.SIGHUP, self.sigterm_exception)
235
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500236 def process_inotify_updates(self):
237 for n in [self.confignotifier, self.notifier]:
238 if n.check_events(timeout=0):
239 # read notified events and enqeue them
240 n.read_events()
241 n.process_events()
242
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500243 def config_notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500244 if event.maskname == "IN_Q_OVERFLOW":
245 bb.warn("inotify event queue overflowed, invalidating caches.")
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500246 self.parsecache_valid = False
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500247 self.baseconfig_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500248 bb.parse.clear_cache()
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500249 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500250 if not event.pathname in self.configwatcher.bbwatchedfiles:
251 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500252 if not event.pathname in self.inotify_modified_files:
253 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500254 self.baseconfig_valid = False
255
256 def notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500257 if event.maskname == "IN_Q_OVERFLOW":
258 bb.warn("inotify event queue overflowed, invalidating caches.")
259 self.parsecache_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500260 bb.parse.clear_cache()
261 return
262 if event.pathname.endswith("bitbake-cookerdaemon.log") \
263 or event.pathname.endswith("bitbake.lock"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500264 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500265 if not event.pathname in self.inotify_modified_files:
266 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500267 self.parsecache_valid = False
268
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500269 def add_filewatch(self, deps, watcher=None, dirs=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500270 if not watcher:
271 watcher = self.watcher
272 for i in deps:
273 watcher.bbwatchedfiles.append(i[0])
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500274 if dirs:
275 f = i[0]
276 else:
277 f = os.path.dirname(i[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500278 if f in watcher.bbseen:
279 continue
280 watcher.bbseen.append(f)
281 watchtarget = None
282 while True:
283 # We try and add watches for files that don't exist but if they did, would influence
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500284 # the parser. The parent directory of these files may not exist, in which case we need
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500285 # to watch any parent that does exist for changes.
286 try:
287 watcher.add_watch(f, self.watchmask, quiet=False)
288 if watchtarget:
289 watcher.bbwatchedfiles.append(watchtarget)
290 break
291 except pyinotify.WatchManagerError as e:
292 if 'ENOENT' in str(e):
293 watchtarget = f
294 f = os.path.dirname(f)
295 if f in watcher.bbseen:
296 break
297 watcher.bbseen.append(f)
298 continue
299 if 'ENOSPC' in str(e):
300 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
301 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
302 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
303 providerlog.error("Root privilege is required to modify max_user_watches.")
304 raise
305
306 def sigterm_exception(self, signum, stackframe):
307 if signum == signal.SIGTERM:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500308 bb.warn("Cooker received SIGTERM, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500309 elif signum == signal.SIGHUP:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500310 bb.warn("Cooker received SIGHUP, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500311 self.state = state.forceshutdown
312
313 def setFeatures(self, features):
314 # we only accept a new feature set if we're in state initial, so we can reset without problems
315 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
316 raise Exception("Illegal state for feature set change")
317 original_featureset = list(self.featureset)
318 for feature in features:
319 self.featureset.setFeature(feature)
320 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
321 if (original_featureset != list(self.featureset)) and self.state != state.error:
322 self.reset()
323
324 def initConfigurationData(self):
325
326 self.state = state.initial
327 self.caches_array = []
328
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500329 # Need to preserve BB_CONSOLELOG over resets
330 consolelog = None
331 if hasattr(self, "data"):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500332 consolelog = self.data.getVar("BB_CONSOLELOG")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500333
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500334 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
335 self.enableDataTracking()
336
337 all_extra_cache_names = []
338 # We hardcode all known cache types in a single place, here.
339 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
340 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
341
342 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
343
344 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
345 # This is the entry point, no further check needed!
346 for var in caches_name_array:
347 try:
348 module_name, cache_name = var.split(':')
349 module = __import__(module_name, fromlist=(cache_name,))
350 self.caches_array.append(getattr(module, cache_name))
351 except ImportError as exc:
352 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
353 sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
354
355 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
356 self.databuilder.parseBaseConfiguration()
357 self.data = self.databuilder.data
358 self.data_hash = self.databuilder.data_hash
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500359 self.extraconfigdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500360
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500361 if consolelog:
362 self.data.setVar("BB_CONSOLELOG", consolelog)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500363
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500364 self.data.setVar('BB_CMDLINE', self.ui_cmdline)
365
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500366 #
367 # Copy of the data store which has been expanded.
368 # Used for firing events and accessing variables where expansion needs to be accounted for
369 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500370 bb.parse.init_parser(self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500371
372 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
373 self.disableDataTracking()
374
375 self.data.renameVar("__depends", "__base_depends")
376 self.add_filewatch(self.data.getVar("__base_depends", False), self.configwatcher)
377
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500378 self.baseconfig_valid = True
379 self.parsecache_valid = False
380
381 def handlePRServ(self):
382 # Setup a PR Server based on the new configuration
383 try:
384 self.prhost = prserv.serv.auto_start(self.data)
385 except prserv.serv.PRServiceConfigError as e:
386 bb.fatal("Unable to start PR Server, exitting")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500387
388 def enableDataTracking(self):
389 self.configuration.tracking = True
390 if hasattr(self, "data"):
391 self.data.enableTracking()
392
393 def disableDataTracking(self):
394 self.configuration.tracking = False
395 if hasattr(self, "data"):
396 self.data.disableTracking()
397
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500398 def parseConfiguration(self):
399 # Set log file verbosity
400 verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
401 if verboselogs:
402 bb.msg.loggerVerboseLogs = True
403
404 # Change nice level if we're asked to
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500405 nice = self.data.getVar("BB_NICE_LEVEL")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500406 if nice:
407 curnice = os.nice(0)
408 nice = int(nice) - curnice
409 buildlog.verbose("Renice to %s " % os.nice(nice))
410
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600411 if self.recipecaches:
412 del self.recipecaches
413 self.multiconfigs = self.databuilder.mcdata.keys()
414 self.recipecaches = {}
415 for mc in self.multiconfigs:
416 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500417
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500418 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500419
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500420 self.parsecache_valid = False
421
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500422 def updateConfigOpts(self, options, environment, cmdline):
423 self.ui_cmdline = cmdline
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500424 clean = True
425 for o in options:
426 if o in ['prefile', 'postfile']:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500427 # Only these options may require a reparse
428 try:
429 if getattr(self.configuration, o) == options[o]:
430 # Value is the same, no need to mark dirty
431 continue
432 except AttributeError:
433 pass
434 logger.debug(1, "Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
435 print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500436 clean = False
437 setattr(self.configuration, o, options[o])
438 for k in bb.utils.approved_variables():
439 if k in environment and k not in self.configuration.env:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500440 logger.debug(1, "Updating new environment variable %s to %s" % (k, environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500441 self.configuration.env[k] = environment[k]
442 clean = False
443 if k in self.configuration.env and k not in environment:
444 logger.debug(1, "Updating environment variable %s (deleted)" % (k))
445 del self.configuration.env[k]
446 clean = False
447 if k not in self.configuration.env and k not in environment:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500448 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500449 if environment[k] != self.configuration.env[k]:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500450 logger.debug(1, "Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500451 self.configuration.env[k] = environment[k]
452 clean = False
453 if not clean:
454 logger.debug(1, "Base environment change, triggering reparse")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500455 self.reset()
456
457 def runCommands(self, server, data, abort):
458 """
459 Run any queued asynchronous command
460 This is done by the idle handler so it runs in true context rather than
461 tied to any UI.
462 """
463
464 return self.command.runAsyncCommand()
465
466 def showVersions(self):
467
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500468 (latest_versions, preferred_versions) = self.findProviders()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500469
470 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
471 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
472
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500473 for p in sorted(self.recipecaches[''].pkg_pn):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500474 pref = preferred_versions[p]
475 latest = latest_versions[p]
476
477 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
478 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
479
480 if pref == latest:
481 prefstr = ""
482
483 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
484
485 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
486 """
487 Show the outer or per-recipe environment
488 """
489 fn = None
490 envdata = None
491 if not pkgs_to_build:
492 pkgs_to_build = []
493
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500494 orig_tracking = self.configuration.tracking
495 if not orig_tracking:
496 self.enableDataTracking()
497 self.reset()
498
499
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500500 if buildfile:
501 # Parse the configuration here. We need to do it explicitly here since
502 # this showEnvironment() code path doesn't use the cache
503 self.parseConfiguration()
504
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600505 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500506 fn = self.matchFile(fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600507 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500508 elif len(pkgs_to_build) == 1:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500509 ignore = self.data.getVar("ASSUME_PROVIDED") or ""
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500510 if pkgs_to_build[0] in set(ignore.split()):
511 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
512
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600513 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500514
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600515 mc = runlist[0][0]
516 fn = runlist[0][3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500517 else:
518 envdata = self.data
Brad Bishop316dfdd2018-06-25 12:45:53 -0400519 data.expandKeys(envdata)
520 parse.ast.runAnonFuncs(envdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500521
522 if fn:
523 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600524 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
525 envdata = bb_cache.loadDataFull(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500526 except Exception as e:
527 parselog.exception("Unable to read %s", fn)
528 raise
529
530 # Display history
531 with closing(StringIO()) as env:
532 self.data.inchistory.emit(env)
533 logger.plain(env.getvalue())
534
535 # emit variables and shell functions
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500536 with closing(StringIO()) as env:
537 data.emit_env(env, envdata, True)
538 logger.plain(env.getvalue())
539
540 # emit the metadata which isnt valid shell
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500541 for e in sorted(envdata.keys()):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600542 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500543 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500544
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500545 if not orig_tracking:
546 self.disableDataTracking()
547 self.reset()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500548
549 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
550 """
551 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
552 """
553 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
554
555 # A task of None means use the default task
556 if task is None:
557 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500558 if not task.startswith("do_"):
559 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500560
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500561 targetlist = self.checkPackages(pkgs_to_build, task)
562 fulltargetlist = []
563 defaulttask_implicit = ''
564 defaulttask_explicit = False
565 wildcard = False
566
567 # Wild card expansion:
568 # Replace string such as "multiconfig:*:bash"
569 # into "multiconfig:A:bash multiconfig:B:bash bash"
570 for k in targetlist:
571 if k.startswith("multiconfig:"):
572 if wildcard:
573 bb.fatal('multiconfig conflict')
574 if k.split(":")[1] == "*":
575 wildcard = True
576 for mc in self.multiconfigs:
577 if mc:
578 fulltargetlist.append(k.replace('*', mc))
579 # implicit default task
580 else:
581 defaulttask_implicit = k.split(":")[2]
582 else:
583 fulltargetlist.append(k)
584 else:
585 defaulttask_explicit = True
586 fulltargetlist.append(k)
587
588 if not defaulttask_explicit and defaulttask_implicit != '':
589 fulltargetlist.append(defaulttask_implicit)
590
591 bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600592 taskdata = {}
593 localdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500594
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600595 for mc in self.multiconfigs:
596 taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
597 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600598 bb.data.expandKeys(localdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500599
600 current = 0
601 runlist = []
602 for k in fulltargetlist:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600603 mc = ""
604 if k.startswith("multiconfig:"):
605 mc = k.split(":")[1]
606 k = ":".join(k.split(":")[2:])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500607 ktask = task
608 if ":do_" in k:
609 k2 = k.split(":do_")
610 k = k2[0]
611 ktask = k2[1]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600612 taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500613 current += 1
614 if not ktask.startswith("do_"):
615 ktask = "do_%s" % ktask
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600616 if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
617 # e.g. in ASSUME_PROVIDED
618 continue
619 fn = taskdata[mc].build_targets[k][0]
620 runlist.append([mc, k, ktask, fn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500621 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600622
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800623 mcdeps = taskdata[mc].get_mcdepends()
624 # No need to do check providers if there are no mcdeps or not an mc build
625 if mcdeps and mc:
626 # Make sure we can provide the multiconfig dependency
627 seen = set()
628 new = True
629 while new:
630 new = False
631 for mc in self.multiconfigs:
632 for k in mcdeps:
633 if k in seen:
634 continue
635 l = k.split(':')
636 depmc = l[2]
637 if depmc not in self.multiconfigs:
638 bb.fatal("Multiconfig dependency %s depends on nonexistent mc configuration %s" % (k,depmc))
639 else:
640 logger.debug(1, "Adding providers for multiconfig dependency %s" % l[3])
641 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
642 seen.add(k)
643 new = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600644 for mc in self.multiconfigs:
645 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
646
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500647 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600648 return taskdata, runlist
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500649
650 def prepareTreeData(self, pkgs_to_build, task):
651 """
652 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
653 """
654
655 # We set abort to False here to prevent unbuildable targets raising
656 # an exception when we're just generating data
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600657 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500658
659 return runlist, taskdata
660
661 ######## WARNING : this function requires cache_extra to be enabled ########
662
663 def generateTaskDepTreeData(self, pkgs_to_build, task):
664 """
665 Create a dependency graph of pkgs_to_build including reverse dependency
666 information.
667 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500668 if not task.startswith("do_"):
669 task = "do_%s" % task
670
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500671 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600672 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500673 rq.rqdata.prepare()
674 return self.buildDependTree(rq, taskdata)
675
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600676 @staticmethod
677 def add_mc_prefix(mc, pn):
678 if mc:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500679 return "multiconfig:%s:%s" % (mc, pn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600680 return pn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500681
682 def buildDependTree(self, rq, taskdata):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600683 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500684 depend_tree = {}
685 depend_tree["depends"] = {}
686 depend_tree["tdepends"] = {}
687 depend_tree["pn"] = {}
688 depend_tree["rdepends-pn"] = {}
689 depend_tree["packages"] = {}
690 depend_tree["rdepends-pkg"] = {}
691 depend_tree["rrecs-pkg"] = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500692 depend_tree['providermap'] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600693 depend_tree["layer-priorities"] = self.bbfile_config_priorities
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500694
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600695 for mc in taskdata:
696 for name, fn in list(taskdata[mc].get_providermap().items()):
697 pn = self.recipecaches[mc].pkg_fn[fn]
698 pn = self.add_mc_prefix(mc, pn)
699 if name != pn:
700 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
701 depend_tree['providermap'][name] = (pn, version)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500702
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600703 for tid in rq.rqdata.runtaskentries:
704 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
705 pn = self.recipecaches[mc].pkg_fn[taskfn]
706 pn = self.add_mc_prefix(mc, pn)
707 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500708 if pn not in depend_tree["pn"]:
709 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600710 depend_tree["pn"][pn]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500711 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600712 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500713
714 # if we have extra caches, list all attributes they bring in
715 extra_info = []
716 for cache_class in self.caches_array:
717 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
718 cachefields = getattr(cache_class, 'cachefields', [])
719 extra_info = extra_info + cachefields
720
721 # for all attributes stored, add them to the dependency tree
722 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600723 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500724
725
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500726 dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
727 if not dotname in depend_tree["tdepends"]:
728 depend_tree["tdepends"][dotname] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600729 for dep in rq.rqdata.runtaskentries[tid].depends:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800730 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
731 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600732 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
733 if taskfn not in seen_fns:
734 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500735 packages = []
736
737 depend_tree["depends"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600738 for dep in taskdata[mc].depids[taskfn]:
739 depend_tree["depends"][pn].append(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500740
741 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600742 for rdep in taskdata[mc].rdepids[taskfn]:
743 depend_tree["rdepends-pn"][pn].append(rdep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500744
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600745 rdepends = self.recipecaches[mc].rundeps[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500746 for package in rdepends:
747 depend_tree["rdepends-pkg"][package] = []
748 for rdepend in rdepends[package]:
749 depend_tree["rdepends-pkg"][package].append(rdepend)
750 packages.append(package)
751
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600752 rrecs = self.recipecaches[mc].runrecs[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500753 for package in rrecs:
754 depend_tree["rrecs-pkg"][package] = []
755 for rdepend in rrecs[package]:
756 depend_tree["rrecs-pkg"][package].append(rdepend)
757 if not package in packages:
758 packages.append(package)
759
760 for package in packages:
761 if package not in depend_tree["packages"]:
762 depend_tree["packages"][package] = {}
763 depend_tree["packages"][package]["pn"] = pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600764 depend_tree["packages"][package]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500765 depend_tree["packages"][package]["version"] = version
766
767 return depend_tree
768
769 ######## WARNING : this function requires cache_extra to be enabled ########
770 def generatePkgDepTreeData(self, pkgs_to_build, task):
771 """
772 Create a dependency tree of pkgs_to_build, returning the data.
773 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500774 if not task.startswith("do_"):
775 task = "do_%s" % task
776
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500777 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500778
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600779 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500780 depend_tree = {}
781 depend_tree["depends"] = {}
782 depend_tree["pn"] = {}
783 depend_tree["rdepends-pn"] = {}
784 depend_tree["rdepends-pkg"] = {}
785 depend_tree["rrecs-pkg"] = {}
786
787 # if we have extra caches, list all attributes they bring in
788 extra_info = []
789 for cache_class in self.caches_array:
790 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
791 cachefields = getattr(cache_class, 'cachefields', [])
792 extra_info = extra_info + cachefields
793
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600794 tids = []
795 for mc in taskdata:
796 for tid in taskdata[mc].taskentries:
797 tids.append(tid)
798
799 for tid in tids:
800 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
801
802 pn = self.recipecaches[mc].pkg_fn[taskfn]
803 pn = self.add_mc_prefix(mc, pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500804
805 if pn not in depend_tree["pn"]:
806 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600807 depend_tree["pn"][pn]["filename"] = taskfn
808 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500809 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600810 rdepends = self.recipecaches[mc].rundeps[taskfn]
811 rrecs = self.recipecaches[mc].runrecs[taskfn]
812 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500813
814 # for all extra attributes stored, add them to the dependency tree
815 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600816 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500817
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600818 if taskfn not in seen_fns:
819 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500820
821 depend_tree["depends"][pn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500822 for dep in taskdata[mc].depids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500823 pn_provider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600824 if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
825 fn_provider = taskdata[mc].build_targets[dep][0]
826 pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500827 else:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500828 pn_provider = dep
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600829 pn_provider = self.add_mc_prefix(mc, pn_provider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500830 depend_tree["depends"][pn].append(pn_provider)
831
832 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600833 for rdep in taskdata[mc].rdepids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500834 pn_rprovider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600835 if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
836 fn_rprovider = taskdata[mc].run_targets[rdep][0]
837 pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500838 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600839 pn_rprovider = rdep
840 pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500841 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
842
843 depend_tree["rdepends-pkg"].update(rdepends)
844 depend_tree["rrecs-pkg"].update(rrecs)
845
846 return depend_tree
847
848 def generateDepTreeEvent(self, pkgs_to_build, task):
849 """
850 Create a task dependency graph of pkgs_to_build.
851 Generate an event with the result
852 """
853 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
854 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
855
856 def generateDotGraphFiles(self, pkgs_to_build, task):
857 """
858 Create a task dependency graph of pkgs_to_build.
859 Save the result to a set of .dot files.
860 """
861
862 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
863
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500864 with open('pn-buildlist', 'w') as f:
865 for pn in depgraph["pn"]:
866 f.write(pn + "\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500867 logger.info("PN build list saved to 'pn-buildlist'")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500868
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500869 # Remove old format output files to ensure no confusion with stale data
870 try:
871 os.unlink('pn-depends.dot')
872 except FileNotFoundError:
873 pass
874 try:
875 os.unlink('package-depends.dot')
876 except FileNotFoundError:
877 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500878
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500879 with open('task-depends.dot', 'w') as f:
880 f.write("digraph depends {\n")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400881 for task in sorted(depgraph["tdepends"]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500882 (pn, taskname) = task.rsplit(".", 1)
883 fn = depgraph["pn"][pn]["filename"]
884 version = depgraph["pn"][pn]["version"]
885 f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400886 for dep in sorted(depgraph["tdepends"][task]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500887 f.write('"%s" -> "%s"\n' % (task, dep))
888 f.write("}\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500889 logger.info("Task dependencies saved to 'task-depends.dot'")
890
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500891 with open('recipe-depends.dot', 'w') as f:
892 f.write("digraph depends {\n")
893 pndeps = {}
Brad Bishop316dfdd2018-06-25 12:45:53 -0400894 for task in sorted(depgraph["tdepends"]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500895 (pn, taskname) = task.rsplit(".", 1)
896 if pn not in pndeps:
897 pndeps[pn] = set()
Brad Bishop316dfdd2018-06-25 12:45:53 -0400898 for dep in sorted(depgraph["tdepends"][task]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500899 (deppn, deptaskname) = dep.rsplit(".", 1)
900 pndeps[pn].add(deppn)
Brad Bishop316dfdd2018-06-25 12:45:53 -0400901 for pn in sorted(pndeps):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500902 fn = depgraph["pn"][pn]["filename"]
903 version = depgraph["pn"][pn]["version"]
904 f.write('"%s" [label="%s\\n%s\\n%s"]\n' % (pn, pn, version, fn))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400905 for dep in sorted(pndeps[pn]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500906 if dep == pn:
907 continue
908 f.write('"%s" -> "%s"\n' % (pn, dep))
909 f.write("}\n")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400910 logger.info("Flattened recipe dependencies saved to 'recipe-depends.dot'")
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500911
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500912 def show_appends_with_no_recipes(self):
913 # Determine which bbappends haven't been applied
914
915 # First get list of recipes, including skipped
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600916 recipefns = list(self.recipecaches[''].pkg_fn.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500917 recipefns.extend(self.skiplist.keys())
918
919 # Work out list of bbappends that have been applied
920 applied_appends = []
921 for fn in recipefns:
922 applied_appends.extend(self.collection.get_file_appends(fn))
923
924 appends_without_recipes = []
925 for _, appendfn in self.collection.bbappends:
926 if not appendfn in applied_appends:
927 appends_without_recipes.append(appendfn)
928
929 if appends_without_recipes:
930 msg = 'No recipes available for:\n %s' % '\n '.join(appends_without_recipes)
931 warn_only = self.data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
932 False) or "no"
933 if warn_only.lower() in ("1", "yes", "true"):
934 bb.warn(msg)
935 else:
936 bb.fatal(msg)
937
938 def handlePrefProviders(self):
939
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600940 for mc in self.multiconfigs:
941 localdata = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600942 bb.data.expandKeys(localdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500943
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600944 # Handle PREFERRED_PROVIDERS
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500945 for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600946 try:
947 (providee, provider) = p.split(':')
948 except:
949 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
950 continue
951 if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
952 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
953 self.recipecaches[mc].preferred[providee] = provider
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500954
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500955 def findConfigFilePath(self, configfile):
956 """
957 Find the location on disk of configfile and if it exists and was parsed by BitBake
958 emit the ConfigFilePathFound event with the path to the file.
959 """
960 path = bb.cookerdata.findConfigFile(configfile, self.data)
961 if not path:
962 return
963
964 # Generate a list of parsed configuration files by searching the files
965 # listed in the __depends and __base_depends variables with a .conf suffix.
966 conffiles = []
967 dep_files = self.data.getVar('__base_depends', False) or []
968 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
969
970 for f in dep_files:
971 if f[0].endswith(".conf"):
972 conffiles.append(f[0])
973
974 _, conf, conffile = path.rpartition("conf/")
975 match = os.path.join(conf, conffile)
976 # Try and find matches for conf/conffilename.conf as we don't always
977 # have the full path to the file.
978 for cfg in conffiles:
979 if cfg.endswith(match):
980 bb.event.fire(bb.event.ConfigFilePathFound(path),
981 self.data)
982 break
983
984 def findFilesMatchingInDir(self, filepattern, directory):
985 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500986 Searches for files containing the substring 'filepattern' which are children of
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500987 'directory' in each BBPATH. i.e. to find all rootfs package classes available
988 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
989 or to find all machine configuration files one could call:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500990 findFilesMatchingInDir(self, '.conf', 'conf/machine')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500991 """
992
993 matches = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500994 bbpaths = self.data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500995 for path in bbpaths:
996 dirpath = os.path.join(path, directory)
997 if os.path.exists(dirpath):
998 for root, dirs, files in os.walk(dirpath):
999 for f in files:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001000 if filepattern in f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001001 matches.append(f)
1002
1003 if matches:
1004 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1005
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001006 def findProviders(self, mc=''):
1007 return bb.providers.findProviders(self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1008
1009 def findBestProvider(self, pn, mc=''):
1010 if pn in self.recipecaches[mc].providers:
1011 filenames = self.recipecaches[mc].providers[pn]
1012 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.data, self.recipecaches[mc])
1013 filename = eligible[0]
1014 return None, None, None, filename
1015 elif pn in self.recipecaches[mc].pkg_pn:
1016 return bb.providers.findBestProvider(pn, self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1017 else:
1018 return None, None, None, None
1019
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001020 def findConfigFiles(self, varname):
1021 """
1022 Find config files which are appropriate values for varname.
1023 i.e. MACHINE, DISTRO
1024 """
1025 possible = []
1026 var = varname.lower()
1027
1028 data = self.data
1029 # iterate configs
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001030 bbpaths = data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001031 for path in bbpaths:
1032 confpath = os.path.join(path, "conf", var)
1033 if os.path.exists(confpath):
1034 for root, dirs, files in os.walk(confpath):
1035 # get all child files, these are appropriate values
1036 for f in files:
1037 val, sep, end = f.rpartition('.')
1038 if end == 'conf':
1039 possible.append(val)
1040
1041 if possible:
1042 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1043
1044 def findInheritsClass(self, klass):
1045 """
1046 Find all recipes which inherit the specified class
1047 """
1048 pkg_list = []
1049
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001050 for pfn in self.recipecaches[''].pkg_fn:
1051 inherits = self.recipecaches[''].inherits.get(pfn, None)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001052 if inherits and klass in inherits:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001053 pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001054
1055 return pkg_list
1056
1057 def generateTargetsTree(self, klass=None, pkgs=None):
1058 """
1059 Generate a dependency tree of buildable targets
1060 Generate an event with the result
1061 """
1062 # if the caller hasn't specified a pkgs list default to universe
1063 if not pkgs:
1064 pkgs = ['universe']
1065 # if inherited_class passed ensure all recipes which inherit the
1066 # specified class are included in pkgs
1067 if klass:
1068 extra_pkgs = self.findInheritsClass(klass)
1069 pkgs = pkgs + extra_pkgs
1070
1071 # generate a dependency tree for all our packages
1072 tree = self.generatePkgDepTreeData(pkgs, 'build')
1073 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1074
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001075 def interactiveMode( self ):
1076 """Drop off into a shell"""
1077 try:
1078 from bb import shell
1079 except ImportError:
1080 parselog.exception("Interactive mode not available")
1081 sys.exit(1)
1082 else:
1083 shell.start( self )
1084
1085
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001086 def handleCollections(self, collections):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001087 """Handle collections"""
1088 errors = False
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001089 self.bbfile_config_priorities = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001090 if collections:
1091 collection_priorities = {}
1092 collection_depends = {}
1093 collection_list = collections.split()
1094 min_prio = 0
1095 for c in collection_list:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001096 bb.debug(1,'Processing %s in collection list' % (c))
1097
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001098 # Get collection priority if defined explicitly
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001099 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001100 if priority:
1101 try:
1102 prio = int(priority)
1103 except ValueError:
1104 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1105 errors = True
1106 if min_prio == 0 or prio < min_prio:
1107 min_prio = prio
1108 collection_priorities[c] = prio
1109 else:
1110 collection_priorities[c] = None
1111
1112 # Check dependencies and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001113 deps = self.data.getVar("LAYERDEPENDS_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001114 if deps:
1115 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001116 depDict = bb.utils.explode_dep_versions2(deps)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001117 except bb.utils.VersionStringException as vse:
1118 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001119 for dep, oplist in list(depDict.items()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001120 if dep in collection_list:
1121 for opstr in oplist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001122 layerver = self.data.getVar("LAYERVERSION_%s" % dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001123 (op, depver) = opstr.split()
1124 if layerver:
1125 try:
1126 res = bb.utils.vercmp_string_op(layerver, depver, op)
1127 except bb.utils.VersionStringException as vse:
1128 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1129 if not res:
1130 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1131 errors = True
1132 else:
1133 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1134 errors = True
1135 else:
1136 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1137 errors = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001138 collection_depends[c] = list(depDict.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001139 else:
1140 collection_depends[c] = []
1141
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001142 # Check recommends and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001143 recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001144 if recs:
1145 try:
1146 recDict = bb.utils.explode_dep_versions2(recs)
1147 except bb.utils.VersionStringException as vse:
1148 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1149 for rec, oplist in list(recDict.items()):
1150 if rec in collection_list:
1151 if oplist:
1152 opstr = oplist[0]
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001153 layerver = self.data.getVar("LAYERVERSION_%s" % rec)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001154 if layerver:
1155 (op, recver) = opstr.split()
1156 try:
1157 res = bb.utils.vercmp_string_op(layerver, recver, op)
1158 except bb.utils.VersionStringException as vse:
1159 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1160 if not res:
1161 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1162 continue
1163 else:
1164 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1165 continue
1166 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1167 collection_depends[c].append(rec)
1168 else:
1169 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1170
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001171 # Recursively work out collection priorities based on dependencies
1172 def calc_layer_priority(collection):
1173 if not collection_priorities[collection]:
1174 max_depprio = min_prio
1175 for dep in collection_depends[collection]:
1176 calc_layer_priority(dep)
1177 depprio = collection_priorities[dep]
1178 if depprio > max_depprio:
1179 max_depprio = depprio
1180 max_depprio += 1
1181 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1182 collection_priorities[collection] = max_depprio
1183
1184 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1185 for c in collection_list:
1186 calc_layer_priority(c)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001187 regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001188 if regex == None:
1189 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1190 errors = True
1191 continue
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001192 elif regex == "":
1193 parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
1194 errors = False
Brad Bishop316dfdd2018-06-25 12:45:53 -04001195 continue
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001196 else:
1197 try:
1198 cre = re.compile(regex)
1199 except re.error:
1200 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1201 errors = True
1202 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001203 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001204 if errors:
1205 # We've already printed the actual error(s)
1206 raise CollectionError("Errors during parsing layer configuration")
1207
1208 def buildSetVars(self):
1209 """
1210 Setup any variables needed before starting a build
1211 """
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001212 t = time.gmtime()
1213 for mc in self.databuilder.mcdata:
1214 ds = self.databuilder.mcdata[mc]
1215 if not ds.getVar("BUILDNAME", False):
1216 ds.setVar("BUILDNAME", "${DATE}${TIME}")
1217 ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1218 ds.setVar("DATE", time.strftime('%Y%m%d', t))
1219 ds.setVar("TIME", time.strftime('%H%M%S', t))
1220
1221 def reset_mtime_caches(self):
1222 """
1223 Reset mtime caches - this is particularly important when memory resident as something
1224 which is cached is not unlikely to have changed since the last invocation (e.g. a
1225 file associated with a recipe might have been modified by the user).
1226 """
1227 build.reset_cache()
1228 bb.fetch._checksum_cache.mtime_cache.clear()
1229 siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1230 if siggen_cache:
1231 bb.parse.siggen.checksum_cache.mtime_cache.clear()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001232
1233 def matchFiles(self, bf):
1234 """
1235 Find the .bb files which match the expression in 'buildfile'.
1236 """
1237 if bf.startswith("/") or bf.startswith("../"):
1238 bf = os.path.abspath(bf)
1239
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001240 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001241 filelist, masked, searchdirs = self.collection.collect_bbfiles(self.data, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001242 try:
1243 os.stat(bf)
1244 bf = os.path.abspath(bf)
1245 return [bf]
1246 except OSError:
1247 regexp = re.compile(bf)
1248 matches = []
1249 for f in filelist:
1250 if regexp.search(f) and os.path.isfile(f):
1251 matches.append(f)
1252 return matches
1253
1254 def matchFile(self, buildfile):
1255 """
1256 Find the .bb file which matches the expression in 'buildfile'.
1257 Raise an error if multiple files
1258 """
1259 matches = self.matchFiles(buildfile)
1260 if len(matches) != 1:
1261 if matches:
1262 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1263 if matches:
1264 for f in matches:
1265 msg += "\n %s" % f
1266 parselog.error(msg)
1267 else:
1268 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1269 raise NoSpecificMatch
1270 return matches[0]
1271
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001272 def buildFile(self, buildfile, task):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001273 """
1274 Build the file matching regexp buildfile
1275 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001276 bb.event.fire(bb.event.BuildInit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001277
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001278 # Too many people use -b because they think it's how you normally
1279 # specify a target to be built, so show a warning
1280 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1281
1282 self.buildFileInternal(buildfile, task)
1283
1284 def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1285 """
1286 Build the file matching regexp buildfile
1287 """
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001288
1289 # Parse the configuration here. We need to do it explicitly here since
1290 # buildFile() doesn't use the cache
1291 self.parseConfiguration()
1292
1293 # If we are told to do the None task then query the default task
1294 if (task == None):
1295 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001296 if not task.startswith("do_"):
1297 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001298
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001299 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001300 fn = self.matchFile(fn)
1301
1302 self.buildSetVars()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001303 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001304
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001305 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
1306
1307 infos = bb_cache.parse(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001308 infos = dict(infos)
1309
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001310 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001311 try:
1312 info_array = infos[fn]
1313 except KeyError:
1314 bb.fatal("%s does not exist" % fn)
1315
1316 if info_array[0].skipped:
1317 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1318
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001319 self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001320
1321 # Tweak some variables
1322 item = info_array[0].pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001323 self.recipecaches[mc].ignored_dependencies = set()
1324 self.recipecaches[mc].bbfile_priority[fn] = 1
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001325 self.configuration.limited_deps = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001326
1327 # Remove external dependencies
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001328 self.recipecaches[mc].task_deps[fn]['depends'] = {}
1329 self.recipecaches[mc].deps[fn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001330 self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1331 self.recipecaches[mc].runrecs[fn] = defaultdict(list)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001332
1333 # Invalidate task for target if force mode active
1334 if self.configuration.force:
1335 logger.verbose("Invalidate task %s, %s", task, fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001336 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001337
1338 # Setup taskdata structure
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001339 taskdata = {}
1340 taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001341 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001342
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001343 if quietlog:
1344 rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1345 bb.runqueue.logger.setLevel(logging.WARNING)
1346
1347 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1348 if fireevents:
1349 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001350
1351 # Execute the runqueue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001352 runlist = [[mc, item, task, fn]]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001353
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001354 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001355
1356 def buildFileIdle(server, rq, abort):
1357
1358 msg = None
1359 interrupted = 0
1360 if abort or self.state == state.forceshutdown:
1361 rq.finish_runqueue(True)
1362 msg = "Forced shutdown"
1363 interrupted = 2
1364 elif self.state == state.shutdown:
1365 rq.finish_runqueue(False)
1366 msg = "Stopped build"
1367 interrupted = 1
1368 failures = 0
1369 try:
1370 retval = rq.execute_runqueue()
1371 except runqueue.TaskFailure as exc:
1372 failures += len(exc.args)
1373 retval = False
1374 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001375 self.command.finishAsyncCommand(str(exc))
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001376 if quietlog:
1377 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001378 return False
1379
1380 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001381 if fireevents:
1382 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001383 self.command.finishAsyncCommand(msg)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001384 # We trashed self.recipecaches above
1385 self.parsecache_valid = False
1386 self.configuration.limited_deps = False
1387 bb.parse.siggen.reset(self.data)
1388 if quietlog:
1389 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001390 return False
1391 if retval is True:
1392 return True
1393 return retval
1394
1395 self.configuration.server_register_idlecallback(buildFileIdle, rq)
1396
1397 def buildTargets(self, targets, task):
1398 """
1399 Attempt to build the targets specified
1400 """
1401
1402 def buildTargetsIdle(server, rq, abort):
1403 msg = None
1404 interrupted = 0
1405 if abort or self.state == state.forceshutdown:
1406 rq.finish_runqueue(True)
1407 msg = "Forced shutdown"
1408 interrupted = 2
1409 elif self.state == state.shutdown:
1410 rq.finish_runqueue(False)
1411 msg = "Stopped build"
1412 interrupted = 1
1413 failures = 0
1414 try:
1415 retval = rq.execute_runqueue()
1416 except runqueue.TaskFailure as exc:
1417 failures += len(exc.args)
1418 retval = False
1419 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001420 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001421 return False
1422
1423 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001424 try:
1425 for mc in self.multiconfigs:
1426 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1427 finally:
1428 self.command.finishAsyncCommand(msg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001429 return False
1430 if retval is True:
1431 return True
1432 return retval
1433
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001434 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001435 self.buildSetVars()
1436
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001437 # If we are told to do the None task then query the default task
1438 if (task == None):
1439 task = self.configuration.cmd
1440
1441 if not task.startswith("do_"):
1442 task = "do_%s" % task
1443
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001444 packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1445
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001446 bb.event.fire(bb.event.BuildInit(packages), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001447
1448 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001449
1450 buildname = self.data.getVar("BUILDNAME", False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001451
1452 # make targets to always look as <target>:do_<task>
1453 ntargets = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001454 for target in runlist:
1455 if target[0]:
1456 ntargets.append("multiconfig:%s:%s:%s" % (target[0], target[1], target[2]))
1457 ntargets.append("%s:%s" % (target[1], target[2]))
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001458
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001459 for mc in self.multiconfigs:
1460 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001461
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001462 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001463 if 'universe' in targets:
1464 rq.rqdata.warn_multi_bb = True
1465
1466 self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
1467
1468
1469 def getAllKeysWithFlags(self, flaglist):
1470 dump = {}
1471 for k in self.data.keys():
1472 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001473 expand = True
1474 flags = self.data.getVarFlags(k)
1475 if flags and "func" in flags and "python" in flags:
1476 expand = False
1477 v = self.data.getVar(k, expand)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001478 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1479 dump[k] = {
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001480 'v' : str(v) ,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001481 'history' : self.data.varhistory.variable(k),
1482 }
1483 for d in flaglist:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001484 if flags and d in flags:
1485 dump[k][d] = flags[d]
1486 else:
1487 dump[k][d] = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001488 except Exception as e:
1489 print(e)
1490 return dump
1491
1492
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001493 def updateCacheSync(self):
1494 if self.state == state.running:
1495 return
1496
1497 # reload files for which we got notifications
1498 for p in self.inotify_modified_files:
1499 bb.parse.update_cache(p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001500 if p in bb.parse.BBHandler.cached_statements:
1501 del bb.parse.BBHandler.cached_statements[p]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001502 self.inotify_modified_files = []
1503
1504 if not self.baseconfig_valid:
1505 logger.debug(1, "Reloading base configuration data")
1506 self.initConfigurationData()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001507 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001508
1509 # This is called for all async commands when self.state != running
1510 def updateCache(self):
1511 if self.state == state.running:
1512 return
1513
1514 if self.state in (state.shutdown, state.forceshutdown, state.error):
1515 if hasattr(self.parser, 'shutdown'):
1516 self.parser.shutdown(clean=False, force = True)
1517 raise bb.BBHandledException()
1518
1519 if self.state != state.parsing:
1520 self.updateCacheSync()
1521
1522 if self.state != state.parsing and not self.parsecache_valid:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001523 bb.parse.siggen.reset(self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001524 self.parseConfiguration ()
1525 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001526 for mc in self.multiconfigs:
1527 bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001528
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001529 for mc in self.multiconfigs:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001530 ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001531 self.recipecaches[mc].ignored_dependencies = set(ignore.split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001532
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001533 for dep in self.configuration.extra_assume_provided:
1534 self.recipecaches[mc].ignored_dependencies.add(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001535
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001536 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001537 (filelist, masked, searchdirs) = self.collection.collect_bbfiles(self.data, self.data)
1538
1539 # Add inotify watches for directories searched for bb/bbappend files
1540 for dirent in searchdirs:
1541 self.add_filewatch([[dirent]], dirs=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001542
1543 self.parser = CookerParser(self, filelist, masked)
1544 self.parsecache_valid = True
1545
1546 self.state = state.parsing
1547
1548 if not self.parser.parse_next():
1549 collectlog.debug(1, "parsing complete")
1550 if self.parser.error:
1551 raise bb.BBHandledException()
1552 self.show_appends_with_no_recipes()
1553 self.handlePrefProviders()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001554 for mc in self.multiconfigs:
1555 self.recipecaches[mc].bbfile_priority = self.collection.collection_priorities(self.recipecaches[mc].pkg_fn, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001556 self.state = state.running
1557
1558 # Send an event listing all stamps reachable after parsing
1559 # which the metadata may use to clean up stale data
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001560 for mc in self.multiconfigs:
1561 event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1562 bb.event.fire(event, self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001563 return None
1564
1565 return True
1566
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001567 def checkPackages(self, pkgs_to_build, task=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001568
1569 # Return a copy, don't modify the original
1570 pkgs_to_build = pkgs_to_build[:]
1571
1572 if len(pkgs_to_build) == 0:
1573 raise NothingToBuild
1574
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001575 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001576 for pkg in pkgs_to_build:
1577 if pkg in ignore:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001578 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001579
1580 if 'world' in pkgs_to_build:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001581 pkgs_to_build.remove('world')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001582 for mc in self.multiconfigs:
1583 bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1584 for t in self.recipecaches[mc].world_target:
1585 if mc:
1586 t = "multiconfig:" + mc + ":" + t
1587 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001588
1589 if 'universe' in pkgs_to_build:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001590 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001591 parselog.debug(1, "collating packages for \"universe\"")
1592 pkgs_to_build.remove('universe')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001593 for mc in self.multiconfigs:
1594 for t in self.recipecaches[mc].universe_target:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001595 if task:
1596 foundtask = False
1597 for provider_fn in self.recipecaches[mc].providers[t]:
1598 if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1599 foundtask = True
1600 break
1601 if not foundtask:
1602 bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1603 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001604 if mc:
1605 t = "multiconfig:" + mc + ":" + t
1606 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001607
1608 return pkgs_to_build
1609
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001610 def pre_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001611 # We now are in our own process so we can call this here.
1612 # PRServ exits if its parent process exits
1613 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001614 return
1615
1616 def post_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001617 prserv.serv.auto_shutdown()
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001618 bb.event.fire(CookerExit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001619
1620
1621 def shutdown(self, force = False):
1622 if force:
1623 self.state = state.forceshutdown
1624 else:
1625 self.state = state.shutdown
1626
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001627 if self.parser:
1628 self.parser.shutdown(clean=not force, force=force)
1629
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001630 def finishcommand(self):
1631 self.state = state.initial
1632
1633 def reset(self):
1634 self.initConfigurationData()
1635
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001636 def clientComplete(self):
1637 """Called when the client is done using the server"""
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001638 self.finishcommand()
1639 self.extraconfigdata = {}
1640 self.command.reset()
1641 self.databuilder.reset()
1642 self.data = self.databuilder.data
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001643
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001644
1645class CookerExit(bb.event.Event):
1646 """
1647 Notify clients of the Cooker shutdown
1648 """
1649
1650 def __init__(self):
1651 bb.event.Event.__init__(self)
1652
1653
1654class CookerCollectFiles(object):
1655 def __init__(self, priorities):
1656 self.bbappends = []
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001657 # Priorities is a list of tupples, with the second element as the pattern.
1658 # We need to sort the list with the longest pattern first, and so on to
1659 # the shortest. This allows nested layers to be properly evaluated.
1660 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001661
1662 def calc_bbfile_priority( self, filename, matched = None ):
1663 for _, _, regex, pri in self.bbfile_config_priorities:
1664 if regex.match(filename):
1665 if matched != None:
1666 if not regex in matched:
1667 matched.add(regex)
1668 return pri
1669 return 0
1670
1671 def get_bbfiles(self):
1672 """Get list of default .bb files by reading out the current directory"""
1673 path = os.getcwd()
1674 contents = os.listdir(path)
1675 bbfiles = []
1676 for f in contents:
1677 if f.endswith(".bb"):
1678 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1679 return bbfiles
1680
1681 def find_bbfiles(self, path):
1682 """Find all the .bb and .bbappend files in a directory"""
1683 found = []
1684 for dir, dirs, files in os.walk(path):
1685 for ignored in ('SCCS', 'CVS', '.svn'):
1686 if ignored in dirs:
1687 dirs.remove(ignored)
1688 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1689
1690 return found
1691
1692 def collect_bbfiles(self, config, eventdata):
1693 """Collect all available .bb build files"""
1694 masked = 0
1695
1696 collectlog.debug(1, "collecting .bb files")
1697
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001698 files = (config.getVar( "BBFILES") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001699 config.setVar("BBFILES", " ".join(files))
1700
1701 # Sort files by priority
1702 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
1703
1704 if not len(files):
1705 files = self.get_bbfiles()
1706
1707 if not len(files):
1708 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1709 bb.event.fire(CookerExit(), eventdata)
1710
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001711 # We need to track where we look so that we can add inotify watches. There
1712 # is no nice way to do this, this is horrid. We intercept the os.listdir()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001713 # (or os.scandir() for python 3.6+) calls while we run glob().
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001714 origlistdir = os.listdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001715 if hasattr(os, 'scandir'):
1716 origscandir = os.scandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001717 searchdirs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001718
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001719 def ourlistdir(d):
1720 searchdirs.append(d)
1721 return origlistdir(d)
1722
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001723 def ourscandir(d):
1724 searchdirs.append(d)
1725 return origscandir(d)
1726
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001727 os.listdir = ourlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001728 if hasattr(os, 'scandir'):
1729 os.scandir = ourscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001730 try:
1731 # Can't use set here as order is important
1732 newfiles = []
1733 for f in files:
1734 if os.path.isdir(f):
1735 dirfiles = self.find_bbfiles(f)
1736 for g in dirfiles:
1737 if g not in newfiles:
1738 newfiles.append(g)
1739 else:
1740 globbed = glob.glob(f)
1741 if not globbed and os.path.exists(f):
1742 globbed = [f]
1743 # glob gives files in order on disk. Sort to be deterministic.
1744 for g in sorted(globbed):
1745 if g not in newfiles:
1746 newfiles.append(g)
1747 finally:
1748 os.listdir = origlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001749 if hasattr(os, 'scandir'):
1750 os.scandir = origscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001751
1752 bbmask = config.getVar('BBMASK')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001753
1754 if bbmask:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001755 # First validate the individual regular expressions and ignore any
1756 # that do not compile
1757 bbmasks = []
1758 for mask in bbmask.split():
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001759 # When constructing an older style single regex, it's possible for BBMASK
1760 # to end up beginning with '|', which matches and masks _everything_.
1761 if mask.startswith("|"):
1762 collectlog.warn("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
1763 mask = mask[1:]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001764 try:
1765 re.compile(mask)
1766 bbmasks.append(mask)
1767 except sre_constants.error:
1768 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1769
1770 # Then validate the combined regular expressions. This should never
1771 # fail, but better safe than sorry...
1772 bbmask = "|".join(bbmasks)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001773 try:
1774 bbmask_compiled = re.compile(bbmask)
1775 except sre_constants.error:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001776 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1777 bbmask = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001778
1779 bbfiles = []
1780 bbappend = []
1781 for f in newfiles:
1782 if bbmask and bbmask_compiled.search(f):
1783 collectlog.debug(1, "skipping masked file %s", f)
1784 masked += 1
1785 continue
1786 if f.endswith('.bb'):
1787 bbfiles.append(f)
1788 elif f.endswith('.bbappend'):
1789 bbappend.append(f)
1790 else:
1791 collectlog.debug(1, "skipping %s: unknown file extension", f)
1792
1793 # Build a list of .bbappend files for each .bb file
1794 for f in bbappend:
1795 base = os.path.basename(f).replace('.bbappend', '.bb')
1796 self.bbappends.append((base, f))
1797
1798 # Find overlayed recipes
1799 # bbfiles will be in priority order which makes this easy
1800 bbfile_seen = dict()
1801 self.overlayed = defaultdict(list)
1802 for f in reversed(bbfiles):
1803 base = os.path.basename(f)
1804 if base not in bbfile_seen:
1805 bbfile_seen[base] = f
1806 else:
1807 topfile = bbfile_seen[base]
1808 self.overlayed[topfile].append(f)
1809
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001810 return (bbfiles, masked, searchdirs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001811
1812 def get_file_appends(self, fn):
1813 """
1814 Returns a list of .bbappend files to apply to fn
1815 """
1816 filelist = []
1817 f = os.path.basename(fn)
1818 for b in self.bbappends:
1819 (bbappend, filename) = b
1820 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1821 filelist.append(filename)
1822 return filelist
1823
1824 def collection_priorities(self, pkgfns, d):
1825
1826 priorities = {}
1827
1828 # Calculate priorities for each file
1829 matched = set()
1830 for p in pkgfns:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001831 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001832 priorities[p] = self.calc_bbfile_priority(realfn, matched)
1833
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001834 unmatched = set()
1835 for _, _, regex, pri in self.bbfile_config_priorities:
1836 if not regex in matched:
1837 unmatched.add(regex)
1838
Brad Bishop316dfdd2018-06-25 12:45:53 -04001839 # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
1840 def find_bbappend_match(regex):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001841 for b in self.bbappends:
1842 (bbfile, append) = b
1843 if regex.match(append):
Brad Bishop316dfdd2018-06-25 12:45:53 -04001844 # If the bbappend is matched by already "matched set", return False
1845 for matched_regex in matched:
1846 if matched_regex.match(append):
1847 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001848 return True
1849 return False
1850
1851 for unmatch in unmatched.copy():
Brad Bishop316dfdd2018-06-25 12:45:53 -04001852 if find_bbappend_match(unmatch):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001853 unmatched.remove(unmatch)
1854
1855 for collection, pattern, regex, _ in self.bbfile_config_priorities:
1856 if regex in unmatched:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001857 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001858 collectlog.warning("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001859
1860 return priorities
1861
1862class ParsingFailure(Exception):
1863 def __init__(self, realexception, recipe):
1864 self.realexception = realexception
1865 self.recipe = recipe
1866 Exception.__init__(self, realexception, recipe)
1867
1868class Feeder(multiprocessing.Process):
1869 def __init__(self, jobs, to_parsers, quit):
1870 self.quit = quit
1871 self.jobs = jobs
1872 self.to_parsers = to_parsers
1873 multiprocessing.Process.__init__(self)
1874
1875 def run(self):
1876 while True:
1877 try:
1878 quit = self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001879 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001880 pass
1881 else:
1882 if quit == 'cancel':
1883 self.to_parsers.cancel_join_thread()
1884 break
1885
1886 try:
1887 job = self.jobs.pop()
1888 except IndexError:
1889 break
1890
1891 try:
1892 self.to_parsers.put(job, timeout=0.5)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001893 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001894 self.jobs.insert(0, job)
1895 continue
1896
1897class Parser(multiprocessing.Process):
1898 def __init__(self, jobs, results, quit, init, profile):
1899 self.jobs = jobs
1900 self.results = results
1901 self.quit = quit
1902 self.init = init
1903 multiprocessing.Process.__init__(self)
1904 self.context = bb.utils.get_context().copy()
1905 self.handlers = bb.event.get_class_handlers().copy()
1906 self.profile = profile
1907
1908 def run(self):
1909
1910 if not self.profile:
1911 self.realrun()
1912 return
1913
1914 try:
1915 import cProfile as profile
1916 except:
1917 import profile
1918 prof = profile.Profile()
1919 try:
1920 profile.Profile.runcall(prof, self.realrun)
1921 finally:
1922 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1923 prof.dump_stats(logfile)
1924
1925 def realrun(self):
1926 if self.init:
1927 self.init()
1928
1929 pending = []
1930 while True:
1931 try:
1932 self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001933 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001934 pass
1935 else:
1936 self.results.cancel_join_thread()
1937 break
1938
1939 if pending:
1940 result = pending.pop()
1941 else:
1942 try:
1943 job = self.jobs.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001944 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001945 continue
1946
1947 if job is None:
1948 break
1949 result = self.parse(*job)
1950
1951 try:
1952 self.results.put(result, timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001953 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001954 pending.append(result)
1955
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001956 def parse(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001957 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001958 # Record the filename we're parsing into any events generated
1959 def parse_filter(self, record):
1960 record.taskpid = bb.event.worker_pid
1961 record.fn = filename
1962 return True
1963
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001964 # Reset our environment and handlers to the original settings
1965 bb.utils.set_context(self.context.copy())
1966 bb.event.set_class_handlers(self.handlers.copy())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001967 bb.event.LogHandler.filter = parse_filter
1968
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001969 return True, self.bb_cache.parse(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001970 except Exception as exc:
1971 tb = sys.exc_info()[2]
1972 exc.recipe = filename
1973 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
1974 return True, exc
1975 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
1976 # and for example a worker thread doesn't just exit on its own in response to
1977 # a SystemExit event for example.
1978 except BaseException as exc:
1979 return True, ParsingFailure(exc, filename)
1980
1981class CookerParser(object):
1982 def __init__(self, cooker, filelist, masked):
1983 self.filelist = filelist
1984 self.cooker = cooker
1985 self.cfgdata = cooker.data
1986 self.cfghash = cooker.data_hash
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001987 self.cfgbuilder = cooker.databuilder
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001988
1989 # Accounting statistics
1990 self.parsed = 0
1991 self.cached = 0
1992 self.error = 0
1993 self.masked = masked
1994
1995 self.skipped = 0
1996 self.virtuals = 0
1997 self.total = len(filelist)
1998
1999 self.current = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002000 self.process_names = []
2001
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002002 self.bb_cache = bb.cache.Cache(self.cfgbuilder, self.cfghash, cooker.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002003 self.fromcache = []
2004 self.willparse = []
2005 for filename in self.filelist:
2006 appends = self.cooker.collection.get_file_appends(filename)
2007 if not self.bb_cache.cacheValid(filename, appends):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002008 self.willparse.append((filename, appends))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002009 else:
2010 self.fromcache.append((filename, appends))
2011 self.toparse = self.total - len(self.fromcache)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002012 self.progress_chunk = int(max(self.toparse / 100, 1))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002013
Brad Bishop6e60e8b2018-02-01 10:27:11 -05002014 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002015 multiprocessing.cpu_count()), len(self.willparse))
2016
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002017 self.start()
2018 self.haveshutdown = False
2019
2020 def start(self):
2021 self.results = self.load_cached()
2022 self.processes = []
2023 if self.toparse:
2024 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2025 def init():
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002026 Parser.bb_cache = self.bb_cache
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002027 bb.utils.set_process_name(multiprocessing.current_process().name)
2028 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2029 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002030
2031 self.feeder_quit = multiprocessing.Queue(maxsize=1)
2032 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
2033 self.jobs = multiprocessing.Queue(maxsize=self.num_processes)
2034 self.result_queue = multiprocessing.Queue()
2035 self.feeder = Feeder(self.willparse, self.jobs, self.feeder_quit)
2036 self.feeder.start()
2037 for i in range(0, self.num_processes):
2038 parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
2039 parser.start()
2040 self.process_names.append(parser.name)
2041 self.processes.append(parser)
2042
2043 self.results = itertools.chain(self.results, self.parse_generator())
2044
2045 def shutdown(self, clean=True, force=False):
2046 if not self.toparse:
2047 return
2048 if self.haveshutdown:
2049 return
2050 self.haveshutdown = True
2051
2052 if clean:
2053 event = bb.event.ParseCompleted(self.cached, self.parsed,
2054 self.skipped, self.masked,
2055 self.virtuals, self.error,
2056 self.total)
2057
2058 bb.event.fire(event, self.cfgdata)
2059 self.feeder_quit.put(None)
2060 for process in self.processes:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002061 self.parser_quit.put(None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002062 else:
2063 self.feeder_quit.put('cancel')
2064
2065 self.parser_quit.cancel_join_thread()
2066 for process in self.processes:
2067 self.parser_quit.put(None)
2068
2069 self.jobs.cancel_join_thread()
2070
2071 for process in self.processes:
2072 if force:
2073 process.join(.1)
2074 process.terminate()
2075 else:
2076 process.join()
2077 self.feeder.join()
2078
2079 sync = threading.Thread(target=self.bb_cache.sync)
2080 sync.start()
2081 multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002082 bb.codeparser.parser_cache_savemerge()
2083 bb.fetch.fetcher_parse_done()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002084 if self.cooker.configuration.profile:
2085 profiles = []
2086 for i in self.process_names:
2087 logfile = "profile-parse-%s.log" % i
2088 if os.path.exists(logfile):
2089 profiles.append(logfile)
2090
2091 pout = "profile-parse.log.processed"
2092 bb.utils.process_profilelog(profiles, pout = pout)
2093 print("Processed parsing statistics saved to %s" % (pout))
2094
2095 def load_cached(self):
2096 for filename, appends in self.fromcache:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002097 cached, infos = self.bb_cache.load(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002098 yield not cached, infos
2099
2100 def parse_generator(self):
2101 while True:
2102 if self.parsed >= self.toparse:
2103 break
2104
2105 try:
2106 result = self.result_queue.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002107 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002108 pass
2109 else:
2110 value = result[1]
2111 if isinstance(value, BaseException):
2112 raise value
2113 else:
2114 yield result
2115
2116 def parse_next(self):
2117 result = []
2118 parsed = None
2119 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002120 parsed, result = next(self.results)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002121 except StopIteration:
2122 self.shutdown()
2123 return False
2124 except bb.BBHandledException as exc:
2125 self.error += 1
2126 logger.error('Failed to parse recipe: %s' % exc.recipe)
2127 self.shutdown(clean=False)
2128 return False
2129 except ParsingFailure as exc:
2130 self.error += 1
2131 logger.error('Unable to parse %s: %s' %
2132 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2133 self.shutdown(clean=False)
2134 return False
2135 except bb.parse.ParseError as exc:
2136 self.error += 1
2137 logger.error(str(exc))
2138 self.shutdown(clean=False)
2139 return False
2140 except bb.data_smart.ExpansionError as exc:
2141 self.error += 1
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002142 bbdir = os.path.dirname(__file__) + os.sep
2143 etype, value, _ = sys.exc_info()
2144 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2145 logger.error('ExpansionError during parsing %s', value.recipe,
2146 exc_info=(etype, value, tb))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002147 self.shutdown(clean=False)
2148 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002149 except Exception as exc:
2150 self.error += 1
2151 etype, value, tb = sys.exc_info()
2152 if hasattr(value, "recipe"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002153 logger.error('Unable to parse %s' % value.recipe,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002154 exc_info=(etype, value, exc.traceback))
2155 else:
2156 # Most likely, an exception occurred during raising an exception
2157 import traceback
2158 logger.error('Exception during parse: %s' % traceback.format_exc())
2159 self.shutdown(clean=False)
2160 return False
2161
2162 self.current += 1
2163 self.virtuals += len(result)
2164 if parsed:
2165 self.parsed += 1
2166 if self.parsed % self.progress_chunk == 0:
2167 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2168 self.cfgdata)
2169 else:
2170 self.cached += 1
2171
2172 for virtualfn, info_array in result:
2173 if info_array[0].skipped:
2174 self.skipped += 1
2175 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002176 (fn, cls, mc) = bb.cache.virtualfn2realfn(virtualfn)
2177 self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002178 parsed=parsed, watcher = self.cooker.add_filewatch)
2179 return True
2180
2181 def reparse(self, filename):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002182 infos = self.bb_cache.parse(filename, self.cooker.collection.get_file_appends(filename))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002183 for vfn, info_array in infos:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002184 (fn, cls, mc) = bb.cache.virtualfn2realfn(vfn)
2185 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)