blob: adc41014e649e3ba0ec4d32f3d29a9de1b8c749a [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# Copyright (C) 2003, 2004 Chris Larson
6# Copyright (C) 2003, 2004 Phil Blundell
7# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
8# Copyright (C) 2005 Holger Hans Peter Freyther
9# Copyright (C) 2005 ROAD GmbH
10# Copyright (C) 2006 - 2007 Richard Purdie
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
Patrick Williamsc0f7c042017-02-23 20:41:17 -060025
Patrick Williamsc124f4f2015-09-15 14:41:29 -050026import sys, os, glob, os.path, re, time
27import atexit
28import itertools
29import logging
30import multiprocessing
31import sre_constants
32import threading
Patrick Williamsc0f7c042017-02-23 20:41:17 -060033from io import StringIO, UnsupportedOperation
Patrick Williamsc124f4f2015-09-15 14:41:29 -050034from contextlib import closing
35from functools import wraps
Patrick Williamsc0f7c042017-02-23 20:41:17 -060036from collections import defaultdict, namedtuple
Patrick Williamsc124f4f2015-09-15 14:41:29 -050037import bb, bb.exceptions, bb.command
38from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
Patrick Williamsc0f7c042017-02-23 20:41:17 -060039import queue
Patrick Williamsc124f4f2015-09-15 14:41:29 -050040import signal
41import subprocess
42import errno
43import prserv.serv
44import pyinotify
Patrick Williamsc0f7c042017-02-23 20:41:17 -060045import json
46import pickle
47import codecs
Patrick Williamsc124f4f2015-09-15 14:41:29 -050048
49logger = logging.getLogger("BitBake")
50collectlog = logging.getLogger("BitBake.Collection")
51buildlog = logging.getLogger("BitBake.Build")
52parselog = logging.getLogger("BitBake.Parsing")
53providerlog = logging.getLogger("BitBake.Provider")
54
55class NoSpecificMatch(bb.BBHandledException):
56 """
57 Exception raised when no or multiple file matches are found
58 """
59
60class NothingToBuild(Exception):
61 """
62 Exception raised when there is nothing to build
63 """
64
65class CollectionError(bb.BBHandledException):
66 """
67 Exception raised when layer configuration is incorrect
68 """
69
70class state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060071 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050072
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050073 @classmethod
74 def get_name(cls, code):
75 for name in dir(cls):
76 value = getattr(cls, name)
77 if type(value) == type(cls.initial) and value == code:
78 return name
79 raise ValueError("Invalid status code: %s" % code)
80
Patrick Williamsc124f4f2015-09-15 14:41:29 -050081
82class SkippedPackage:
83 def __init__(self, info = None, reason = None):
84 self.pn = None
85 self.skipreason = None
86 self.provides = None
87 self.rprovides = None
88
89 if info:
90 self.pn = info.pn
91 self.skipreason = info.skipreason
92 self.provides = info.provides
93 self.rprovides = info.rprovides
94 elif reason:
95 self.skipreason = reason
96
97
98class CookerFeatures(object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -060099 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500100
101 def __init__(self):
102 self._features=set()
103
104 def setFeature(self, f):
105 # validate we got a request for a feature we support
106 if f not in CookerFeatures._feature_list:
107 return
108 self._features.add(f)
109
110 def __contains__(self, f):
111 return f in self._features
112
113 def __iter__(self):
114 return self._features.__iter__()
115
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600116 def __next__(self):
117 return next(self._features)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500118
119
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600120class EventWriter:
121 def __init__(self, cooker, eventfile):
122 self.file_inited = None
123 self.cooker = cooker
124 self.eventfile = eventfile
125 self.event_queue = []
126
127 def write_event(self, event):
128 with open(self.eventfile, "a") as f:
129 try:
130 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
131 f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
132 "vars": str_event}))
133 except Exception as err:
134 import traceback
135 print(err, traceback.format_exc())
136
137 def send(self, event):
138 if self.file_inited:
139 # we have the file, just write the event
140 self.write_event(event)
141 else:
142 # init on bb.event.BuildStarted
143 name = "%s.%s" % (event.__module__, event.__class__.__name__)
144 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
145 with open(self.eventfile, "w") as f:
146 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
147
148 self.file_inited = True
149
150 # write pending events
151 for evt in self.event_queue:
152 self.write_event(evt)
153
154 # also write the current event
155 self.write_event(event)
156 else:
157 # queue all events until the file is inited
158 self.event_queue.append(event)
159
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500160#============================================================================#
161# BBCooker
162#============================================================================#
163class BBCooker:
164 """
165 Manages one bitbake build run
166 """
167
168 def __init__(self, configuration, featureSet=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600169 self.recipecaches = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500170 self.skiplist = {}
171 self.featureset = CookerFeatures()
172 if featureSet:
173 for f in featureSet:
174 self.featureset.setFeature(f)
175
176 self.configuration = configuration
177
Brad Bishopf058f492019-01-28 23:50:33 -0500178 bb.debug(1, "BBCooker starting %s" % time.time())
179 sys.stdout.flush()
180
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500181 self.configwatcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500182 bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
183 sys.stdout.flush()
184
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500185 self.configwatcher.bbseen = []
186 self.configwatcher.bbwatchedfiles = []
187 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
Brad Bishopf058f492019-01-28 23:50:33 -0500188 bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
189 sys.stdout.flush()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500190 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
191 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500192 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500193 self.watcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500194 bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
195 sys.stdout.flush()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500196 self.watcher.bbseen = []
197 self.watcher.bbwatchedfiles = []
198 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
199
Brad Bishopf058f492019-01-28 23:50:33 -0500200 bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
201 sys.stdout.flush()
202
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500203 # If being called by something like tinfoil, we need to clean cached data
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500204 # which may now be invalid
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500205 bb.parse.clear_cache()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500206 bb.parse.BBHandler.cached_statements = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500207
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500208 self.ui_cmdline = None
209
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500210 self.initConfigurationData()
211
Brad Bishopf058f492019-01-28 23:50:33 -0500212 bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
213 sys.stdout.flush()
214
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600215 # we log all events to a file if so directed
216 if self.configuration.writeeventlog:
217 # register the log file writer as UI Handler
218 writer = EventWriter(self, self.configuration.writeeventlog)
219 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
220 bb.event.register_UIHhandler(EventLogWriteHandler(writer))
221
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500222 self.inotify_modified_files = []
223
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500224 def _process_inotify_updates(server, cooker, abort):
225 cooker.process_inotify_updates()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500226 return 1.0
227
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500228 self.configuration.server_register_idlecallback(_process_inotify_updates, self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500229
230 # TOSTOP must not be set or our children will hang when they output
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600231 try:
232 fd = sys.stdout.fileno()
233 if os.isatty(fd):
234 import termios
235 tcattr = termios.tcgetattr(fd)
236 if tcattr[3] & termios.TOSTOP:
237 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
238 tcattr[3] = tcattr[3] & ~termios.TOSTOP
239 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
240 except UnsupportedOperation:
241 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500242
243 self.command = bb.command.Command(self)
244 self.state = state.initial
245
246 self.parser = None
247
248 signal.signal(signal.SIGTERM, self.sigterm_exception)
249 # Let SIGHUP exit as SIGTERM
250 signal.signal(signal.SIGHUP, self.sigterm_exception)
251
Brad Bishopf058f492019-01-28 23:50:33 -0500252 bb.debug(1, "BBCooker startup complete %s" % time.time())
253 sys.stdout.flush()
254
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500255 def process_inotify_updates(self):
256 for n in [self.confignotifier, self.notifier]:
257 if n.check_events(timeout=0):
258 # read notified events and enqeue them
259 n.read_events()
260 n.process_events()
261
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500262 def config_notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500263 if event.maskname == "IN_Q_OVERFLOW":
264 bb.warn("inotify event queue overflowed, invalidating caches.")
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500265 self.parsecache_valid = False
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500266 self.baseconfig_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500267 bb.parse.clear_cache()
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500268 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500269 if not event.pathname in self.configwatcher.bbwatchedfiles:
270 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500271 if not event.pathname in self.inotify_modified_files:
272 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500273 self.baseconfig_valid = False
274
275 def notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500276 if event.maskname == "IN_Q_OVERFLOW":
277 bb.warn("inotify event queue overflowed, invalidating caches.")
278 self.parsecache_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500279 bb.parse.clear_cache()
280 return
281 if event.pathname.endswith("bitbake-cookerdaemon.log") \
282 or event.pathname.endswith("bitbake.lock"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500283 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500284 if not event.pathname in self.inotify_modified_files:
285 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500286 self.parsecache_valid = False
287
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500288 def add_filewatch(self, deps, watcher=None, dirs=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500289 if not watcher:
290 watcher = self.watcher
291 for i in deps:
292 watcher.bbwatchedfiles.append(i[0])
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500293 if dirs:
294 f = i[0]
295 else:
296 f = os.path.dirname(i[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500297 if f in watcher.bbseen:
298 continue
299 watcher.bbseen.append(f)
300 watchtarget = None
301 while True:
302 # We try and add watches for files that don't exist but if they did, would influence
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500303 # the parser. The parent directory of these files may not exist, in which case we need
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500304 # to watch any parent that does exist for changes.
305 try:
306 watcher.add_watch(f, self.watchmask, quiet=False)
307 if watchtarget:
308 watcher.bbwatchedfiles.append(watchtarget)
309 break
310 except pyinotify.WatchManagerError as e:
311 if 'ENOENT' in str(e):
312 watchtarget = f
313 f = os.path.dirname(f)
314 if f in watcher.bbseen:
315 break
316 watcher.bbseen.append(f)
317 continue
318 if 'ENOSPC' in str(e):
319 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
320 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
321 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
322 providerlog.error("Root privilege is required to modify max_user_watches.")
323 raise
324
325 def sigterm_exception(self, signum, stackframe):
326 if signum == signal.SIGTERM:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500327 bb.warn("Cooker received SIGTERM, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500328 elif signum == signal.SIGHUP:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500329 bb.warn("Cooker received SIGHUP, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500330 self.state = state.forceshutdown
331
332 def setFeatures(self, features):
333 # we only accept a new feature set if we're in state initial, so we can reset without problems
334 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
335 raise Exception("Illegal state for feature set change")
336 original_featureset = list(self.featureset)
337 for feature in features:
338 self.featureset.setFeature(feature)
339 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
340 if (original_featureset != list(self.featureset)) and self.state != state.error:
341 self.reset()
342
343 def initConfigurationData(self):
344
345 self.state = state.initial
346 self.caches_array = []
347
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500348 # Need to preserve BB_CONSOLELOG over resets
349 consolelog = None
350 if hasattr(self, "data"):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500351 consolelog = self.data.getVar("BB_CONSOLELOG")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500352
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500353 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
354 self.enableDataTracking()
355
356 all_extra_cache_names = []
357 # We hardcode all known cache types in a single place, here.
358 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
359 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
360
361 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
362
363 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
364 # This is the entry point, no further check needed!
365 for var in caches_name_array:
366 try:
367 module_name, cache_name = var.split(':')
368 module = __import__(module_name, fromlist=(cache_name,))
369 self.caches_array.append(getattr(module, cache_name))
370 except ImportError as exc:
371 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
372 sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
373
374 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
375 self.databuilder.parseBaseConfiguration()
376 self.data = self.databuilder.data
377 self.data_hash = self.databuilder.data_hash
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500378 self.extraconfigdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500379
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500380 if consolelog:
381 self.data.setVar("BB_CONSOLELOG", consolelog)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500382
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500383 self.data.setVar('BB_CMDLINE', self.ui_cmdline)
384
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500385 #
386 # Copy of the data store which has been expanded.
387 # Used for firing events and accessing variables where expansion needs to be accounted for
388 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500389 bb.parse.init_parser(self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500390
391 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
392 self.disableDataTracking()
393
394 self.data.renameVar("__depends", "__base_depends")
395 self.add_filewatch(self.data.getVar("__base_depends", False), self.configwatcher)
396
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500397 self.baseconfig_valid = True
398 self.parsecache_valid = False
399
400 def handlePRServ(self):
401 # Setup a PR Server based on the new configuration
402 try:
403 self.prhost = prserv.serv.auto_start(self.data)
404 except prserv.serv.PRServiceConfigError as e:
405 bb.fatal("Unable to start PR Server, exitting")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500406
407 def enableDataTracking(self):
408 self.configuration.tracking = True
409 if hasattr(self, "data"):
410 self.data.enableTracking()
411
412 def disableDataTracking(self):
413 self.configuration.tracking = False
414 if hasattr(self, "data"):
415 self.data.disableTracking()
416
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500417 def parseConfiguration(self):
418 # Set log file verbosity
419 verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
420 if verboselogs:
421 bb.msg.loggerVerboseLogs = True
422
423 # Change nice level if we're asked to
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500424 nice = self.data.getVar("BB_NICE_LEVEL")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500425 if nice:
426 curnice = os.nice(0)
427 nice = int(nice) - curnice
428 buildlog.verbose("Renice to %s " % os.nice(nice))
429
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600430 if self.recipecaches:
431 del self.recipecaches
432 self.multiconfigs = self.databuilder.mcdata.keys()
433 self.recipecaches = {}
434 for mc in self.multiconfigs:
435 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500436
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500437 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500438
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500439 self.parsecache_valid = False
440
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500441 def updateConfigOpts(self, options, environment, cmdline):
442 self.ui_cmdline = cmdline
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500443 clean = True
444 for o in options:
445 if o in ['prefile', 'postfile']:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500446 # Only these options may require a reparse
447 try:
448 if getattr(self.configuration, o) == options[o]:
449 # Value is the same, no need to mark dirty
450 continue
451 except AttributeError:
452 pass
453 logger.debug(1, "Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
454 print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500455 clean = False
456 setattr(self.configuration, o, options[o])
457 for k in bb.utils.approved_variables():
458 if k in environment and k not in self.configuration.env:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500459 logger.debug(1, "Updating new environment variable %s to %s" % (k, environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500460 self.configuration.env[k] = environment[k]
461 clean = False
462 if k in self.configuration.env and k not in environment:
463 logger.debug(1, "Updating environment variable %s (deleted)" % (k))
464 del self.configuration.env[k]
465 clean = False
466 if k not in self.configuration.env and k not in environment:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500467 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500468 if environment[k] != self.configuration.env[k]:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500469 logger.debug(1, "Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500470 self.configuration.env[k] = environment[k]
471 clean = False
472 if not clean:
473 logger.debug(1, "Base environment change, triggering reparse")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500474 self.reset()
475
476 def runCommands(self, server, data, abort):
477 """
478 Run any queued asynchronous command
479 This is done by the idle handler so it runs in true context rather than
480 tied to any UI.
481 """
482
483 return self.command.runAsyncCommand()
484
485 def showVersions(self):
486
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500487 (latest_versions, preferred_versions) = self.findProviders()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500488
489 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
490 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
491
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500492 for p in sorted(self.recipecaches[''].pkg_pn):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500493 pref = preferred_versions[p]
494 latest = latest_versions[p]
495
496 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
497 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
498
499 if pref == latest:
500 prefstr = ""
501
502 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
503
504 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
505 """
506 Show the outer or per-recipe environment
507 """
508 fn = None
509 envdata = None
510 if not pkgs_to_build:
511 pkgs_to_build = []
512
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500513 orig_tracking = self.configuration.tracking
514 if not orig_tracking:
515 self.enableDataTracking()
516 self.reset()
517
518
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500519 if buildfile:
520 # Parse the configuration here. We need to do it explicitly here since
521 # this showEnvironment() code path doesn't use the cache
522 self.parseConfiguration()
523
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600524 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500525 fn = self.matchFile(fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600526 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500527 elif len(pkgs_to_build) == 1:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500528 ignore = self.data.getVar("ASSUME_PROVIDED") or ""
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500529 if pkgs_to_build[0] in set(ignore.split()):
530 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
531
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600532 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500533
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600534 mc = runlist[0][0]
535 fn = runlist[0][3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500536 else:
537 envdata = self.data
Brad Bishop316dfdd2018-06-25 12:45:53 -0400538 data.expandKeys(envdata)
539 parse.ast.runAnonFuncs(envdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500540
541 if fn:
542 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600543 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
544 envdata = bb_cache.loadDataFull(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500545 except Exception as e:
546 parselog.exception("Unable to read %s", fn)
547 raise
548
549 # Display history
550 with closing(StringIO()) as env:
551 self.data.inchistory.emit(env)
552 logger.plain(env.getvalue())
553
554 # emit variables and shell functions
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500555 with closing(StringIO()) as env:
556 data.emit_env(env, envdata, True)
557 logger.plain(env.getvalue())
558
559 # emit the metadata which isnt valid shell
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500560 for e in sorted(envdata.keys()):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600561 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500562 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500563
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500564 if not orig_tracking:
565 self.disableDataTracking()
566 self.reset()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500567
568 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
569 """
570 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
571 """
572 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
573
574 # A task of None means use the default task
575 if task is None:
576 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500577 if not task.startswith("do_"):
578 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500579
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500580 targetlist = self.checkPackages(pkgs_to_build, task)
581 fulltargetlist = []
582 defaulttask_implicit = ''
583 defaulttask_explicit = False
584 wildcard = False
585
586 # Wild card expansion:
587 # Replace string such as "multiconfig:*:bash"
588 # into "multiconfig:A:bash multiconfig:B:bash bash"
589 for k in targetlist:
590 if k.startswith("multiconfig:"):
591 if wildcard:
592 bb.fatal('multiconfig conflict')
593 if k.split(":")[1] == "*":
594 wildcard = True
595 for mc in self.multiconfigs:
596 if mc:
597 fulltargetlist.append(k.replace('*', mc))
598 # implicit default task
599 else:
600 defaulttask_implicit = k.split(":")[2]
601 else:
602 fulltargetlist.append(k)
603 else:
604 defaulttask_explicit = True
605 fulltargetlist.append(k)
606
607 if not defaulttask_explicit and defaulttask_implicit != '':
608 fulltargetlist.append(defaulttask_implicit)
609
610 bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600611 taskdata = {}
612 localdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500613
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600614 for mc in self.multiconfigs:
615 taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
616 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600617 bb.data.expandKeys(localdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500618
619 current = 0
620 runlist = []
621 for k in fulltargetlist:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600622 mc = ""
623 if k.startswith("multiconfig:"):
624 mc = k.split(":")[1]
625 k = ":".join(k.split(":")[2:])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500626 ktask = task
627 if ":do_" in k:
628 k2 = k.split(":do_")
629 k = k2[0]
630 ktask = k2[1]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600631 taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500632 current += 1
633 if not ktask.startswith("do_"):
634 ktask = "do_%s" % ktask
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600635 if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
636 # e.g. in ASSUME_PROVIDED
637 continue
638 fn = taskdata[mc].build_targets[k][0]
639 runlist.append([mc, k, ktask, fn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500640 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600641
Brad Bishopf058f492019-01-28 23:50:33 -0500642
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800643 # No need to do check providers if there are no mcdeps or not an mc build
Andrew Geissler99467da2019-02-25 18:54:23 -0600644 if len(self.multiconfigs) > 1:
645 seen = set()
646 new = True
647 # Make sure we can provide the multiconfig dependency
648 while new:
649 mcdeps = set()
650 # Add unresolved first, so we can get multiconfig indirect dependencies on time
651 for mc in self.multiconfigs:
652 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
653 mcdeps |= set(taskdata[mc].get_mcdepends())
654 new = False
655 for mc in self.multiconfigs:
656 for k in mcdeps:
657 if k in seen:
658 continue
659 l = k.split(':')
660 depmc = l[2]
661 if depmc not in self.multiconfigs:
662 bb.fatal("Multiconfig dependency %s depends on nonexistent mc configuration %s" % (k,depmc))
663 else:
664 logger.debug(1, "Adding providers for multiconfig dependency %s" % l[3])
665 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
666 seen.add(k)
667 new = True
Brad Bishopf058f492019-01-28 23:50:33 -0500668
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600669 for mc in self.multiconfigs:
670 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
671
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500672 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600673 return taskdata, runlist
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500674
675 def prepareTreeData(self, pkgs_to_build, task):
676 """
677 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
678 """
679
680 # We set abort to False here to prevent unbuildable targets raising
681 # an exception when we're just generating data
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600682 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500683
684 return runlist, taskdata
685
686 ######## WARNING : this function requires cache_extra to be enabled ########
687
688 def generateTaskDepTreeData(self, pkgs_to_build, task):
689 """
690 Create a dependency graph of pkgs_to_build including reverse dependency
691 information.
692 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500693 if not task.startswith("do_"):
694 task = "do_%s" % task
695
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500696 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600697 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500698 rq.rqdata.prepare()
699 return self.buildDependTree(rq, taskdata)
700
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600701 @staticmethod
702 def add_mc_prefix(mc, pn):
703 if mc:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500704 return "multiconfig:%s:%s" % (mc, pn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600705 return pn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500706
707 def buildDependTree(self, rq, taskdata):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600708 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500709 depend_tree = {}
710 depend_tree["depends"] = {}
711 depend_tree["tdepends"] = {}
712 depend_tree["pn"] = {}
713 depend_tree["rdepends-pn"] = {}
714 depend_tree["packages"] = {}
715 depend_tree["rdepends-pkg"] = {}
716 depend_tree["rrecs-pkg"] = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500717 depend_tree['providermap'] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600718 depend_tree["layer-priorities"] = self.bbfile_config_priorities
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500719
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600720 for mc in taskdata:
721 for name, fn in list(taskdata[mc].get_providermap().items()):
722 pn = self.recipecaches[mc].pkg_fn[fn]
723 pn = self.add_mc_prefix(mc, pn)
724 if name != pn:
725 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
726 depend_tree['providermap'][name] = (pn, version)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500727
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600728 for tid in rq.rqdata.runtaskentries:
729 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
730 pn = self.recipecaches[mc].pkg_fn[taskfn]
731 pn = self.add_mc_prefix(mc, pn)
732 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500733 if pn not in depend_tree["pn"]:
734 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600735 depend_tree["pn"][pn]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500736 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600737 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500738
739 # if we have extra caches, list all attributes they bring in
740 extra_info = []
741 for cache_class in self.caches_array:
742 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
743 cachefields = getattr(cache_class, 'cachefields', [])
744 extra_info = extra_info + cachefields
745
746 # for all attributes stored, add them to the dependency tree
747 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600748 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500749
750
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500751 dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
752 if not dotname in depend_tree["tdepends"]:
753 depend_tree["tdepends"][dotname] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600754 for dep in rq.rqdata.runtaskentries[tid].depends:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800755 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
756 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600757 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
758 if taskfn not in seen_fns:
759 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500760 packages = []
761
762 depend_tree["depends"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600763 for dep in taskdata[mc].depids[taskfn]:
764 depend_tree["depends"][pn].append(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500765
766 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600767 for rdep in taskdata[mc].rdepids[taskfn]:
768 depend_tree["rdepends-pn"][pn].append(rdep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500769
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600770 rdepends = self.recipecaches[mc].rundeps[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500771 for package in rdepends:
772 depend_tree["rdepends-pkg"][package] = []
773 for rdepend in rdepends[package]:
774 depend_tree["rdepends-pkg"][package].append(rdepend)
775 packages.append(package)
776
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600777 rrecs = self.recipecaches[mc].runrecs[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500778 for package in rrecs:
779 depend_tree["rrecs-pkg"][package] = []
780 for rdepend in rrecs[package]:
781 depend_tree["rrecs-pkg"][package].append(rdepend)
782 if not package in packages:
783 packages.append(package)
784
785 for package in packages:
786 if package not in depend_tree["packages"]:
787 depend_tree["packages"][package] = {}
788 depend_tree["packages"][package]["pn"] = pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600789 depend_tree["packages"][package]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500790 depend_tree["packages"][package]["version"] = version
791
792 return depend_tree
793
794 ######## WARNING : this function requires cache_extra to be enabled ########
795 def generatePkgDepTreeData(self, pkgs_to_build, task):
796 """
797 Create a dependency tree of pkgs_to_build, returning the data.
798 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500799 if not task.startswith("do_"):
800 task = "do_%s" % task
801
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500802 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500803
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600804 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500805 depend_tree = {}
806 depend_tree["depends"] = {}
807 depend_tree["pn"] = {}
808 depend_tree["rdepends-pn"] = {}
809 depend_tree["rdepends-pkg"] = {}
810 depend_tree["rrecs-pkg"] = {}
811
812 # if we have extra caches, list all attributes they bring in
813 extra_info = []
814 for cache_class in self.caches_array:
815 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
816 cachefields = getattr(cache_class, 'cachefields', [])
817 extra_info = extra_info + cachefields
818
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600819 tids = []
820 for mc in taskdata:
821 for tid in taskdata[mc].taskentries:
822 tids.append(tid)
823
824 for tid in tids:
825 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
826
827 pn = self.recipecaches[mc].pkg_fn[taskfn]
828 pn = self.add_mc_prefix(mc, pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500829
830 if pn not in depend_tree["pn"]:
831 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600832 depend_tree["pn"][pn]["filename"] = taskfn
833 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500834 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600835 rdepends = self.recipecaches[mc].rundeps[taskfn]
836 rrecs = self.recipecaches[mc].runrecs[taskfn]
837 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500838
839 # for all extra attributes stored, add them to the dependency tree
840 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600841 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500842
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600843 if taskfn not in seen_fns:
844 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500845
846 depend_tree["depends"][pn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500847 for dep in taskdata[mc].depids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500848 pn_provider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600849 if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
850 fn_provider = taskdata[mc].build_targets[dep][0]
851 pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500852 else:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500853 pn_provider = dep
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600854 pn_provider = self.add_mc_prefix(mc, pn_provider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500855 depend_tree["depends"][pn].append(pn_provider)
856
857 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600858 for rdep in taskdata[mc].rdepids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500859 pn_rprovider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600860 if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
861 fn_rprovider = taskdata[mc].run_targets[rdep][0]
862 pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500863 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600864 pn_rprovider = rdep
865 pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500866 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
867
868 depend_tree["rdepends-pkg"].update(rdepends)
869 depend_tree["rrecs-pkg"].update(rrecs)
870
871 return depend_tree
872
873 def generateDepTreeEvent(self, pkgs_to_build, task):
874 """
875 Create a task dependency graph of pkgs_to_build.
876 Generate an event with the result
877 """
878 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
879 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
880
881 def generateDotGraphFiles(self, pkgs_to_build, task):
882 """
883 Create a task dependency graph of pkgs_to_build.
884 Save the result to a set of .dot files.
885 """
886
887 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
888
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500889 with open('pn-buildlist', 'w') as f:
890 for pn in depgraph["pn"]:
891 f.write(pn + "\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500892 logger.info("PN build list saved to 'pn-buildlist'")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500893
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500894 # Remove old format output files to ensure no confusion with stale data
895 try:
896 os.unlink('pn-depends.dot')
897 except FileNotFoundError:
898 pass
899 try:
900 os.unlink('package-depends.dot')
901 except FileNotFoundError:
902 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500903
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500904 with open('task-depends.dot', 'w') as f:
905 f.write("digraph depends {\n")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400906 for task in sorted(depgraph["tdepends"]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500907 (pn, taskname) = task.rsplit(".", 1)
908 fn = depgraph["pn"][pn]["filename"]
909 version = depgraph["pn"][pn]["version"]
910 f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400911 for dep in sorted(depgraph["tdepends"][task]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500912 f.write('"%s" -> "%s"\n' % (task, dep))
913 f.write("}\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500914 logger.info("Task dependencies saved to 'task-depends.dot'")
915
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500916 with open('recipe-depends.dot', 'w') as f:
917 f.write("digraph depends {\n")
918 pndeps = {}
Brad Bishop316dfdd2018-06-25 12:45:53 -0400919 for task in sorted(depgraph["tdepends"]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500920 (pn, taskname) = task.rsplit(".", 1)
921 if pn not in pndeps:
922 pndeps[pn] = set()
Brad Bishop316dfdd2018-06-25 12:45:53 -0400923 for dep in sorted(depgraph["tdepends"][task]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500924 (deppn, deptaskname) = dep.rsplit(".", 1)
925 pndeps[pn].add(deppn)
Brad Bishop316dfdd2018-06-25 12:45:53 -0400926 for pn in sorted(pndeps):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500927 fn = depgraph["pn"][pn]["filename"]
928 version = depgraph["pn"][pn]["version"]
929 f.write('"%s" [label="%s\\n%s\\n%s"]\n' % (pn, pn, version, fn))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400930 for dep in sorted(pndeps[pn]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500931 if dep == pn:
932 continue
933 f.write('"%s" -> "%s"\n' % (pn, dep))
934 f.write("}\n")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400935 logger.info("Flattened recipe dependencies saved to 'recipe-depends.dot'")
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500936
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500937 def show_appends_with_no_recipes(self):
938 # Determine which bbappends haven't been applied
939
940 # First get list of recipes, including skipped
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600941 recipefns = list(self.recipecaches[''].pkg_fn.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500942 recipefns.extend(self.skiplist.keys())
943
944 # Work out list of bbappends that have been applied
945 applied_appends = []
946 for fn in recipefns:
947 applied_appends.extend(self.collection.get_file_appends(fn))
948
949 appends_without_recipes = []
950 for _, appendfn in self.collection.bbappends:
951 if not appendfn in applied_appends:
952 appends_without_recipes.append(appendfn)
953
954 if appends_without_recipes:
955 msg = 'No recipes available for:\n %s' % '\n '.join(appends_without_recipes)
956 warn_only = self.data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
957 False) or "no"
958 if warn_only.lower() in ("1", "yes", "true"):
959 bb.warn(msg)
960 else:
961 bb.fatal(msg)
962
963 def handlePrefProviders(self):
964
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600965 for mc in self.multiconfigs:
966 localdata = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600967 bb.data.expandKeys(localdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500968
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600969 # Handle PREFERRED_PROVIDERS
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500970 for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600971 try:
972 (providee, provider) = p.split(':')
973 except:
974 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
975 continue
976 if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
977 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
978 self.recipecaches[mc].preferred[providee] = provider
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500979
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500980 def findConfigFilePath(self, configfile):
981 """
982 Find the location on disk of configfile and if it exists and was parsed by BitBake
983 emit the ConfigFilePathFound event with the path to the file.
984 """
985 path = bb.cookerdata.findConfigFile(configfile, self.data)
986 if not path:
987 return
988
989 # Generate a list of parsed configuration files by searching the files
990 # listed in the __depends and __base_depends variables with a .conf suffix.
991 conffiles = []
992 dep_files = self.data.getVar('__base_depends', False) or []
993 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
994
995 for f in dep_files:
996 if f[0].endswith(".conf"):
997 conffiles.append(f[0])
998
999 _, conf, conffile = path.rpartition("conf/")
1000 match = os.path.join(conf, conffile)
1001 # Try and find matches for conf/conffilename.conf as we don't always
1002 # have the full path to the file.
1003 for cfg in conffiles:
1004 if cfg.endswith(match):
1005 bb.event.fire(bb.event.ConfigFilePathFound(path),
1006 self.data)
1007 break
1008
1009 def findFilesMatchingInDir(self, filepattern, directory):
1010 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001011 Searches for files containing the substring 'filepattern' which are children of
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001012 'directory' in each BBPATH. i.e. to find all rootfs package classes available
1013 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1014 or to find all machine configuration files one could call:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001015 findFilesMatchingInDir(self, '.conf', 'conf/machine')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001016 """
1017
1018 matches = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001019 bbpaths = self.data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001020 for path in bbpaths:
1021 dirpath = os.path.join(path, directory)
1022 if os.path.exists(dirpath):
1023 for root, dirs, files in os.walk(dirpath):
1024 for f in files:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001025 if filepattern in f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001026 matches.append(f)
1027
1028 if matches:
1029 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1030
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001031 def findProviders(self, mc=''):
1032 return bb.providers.findProviders(self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1033
1034 def findBestProvider(self, pn, mc=''):
1035 if pn in self.recipecaches[mc].providers:
1036 filenames = self.recipecaches[mc].providers[pn]
1037 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.data, self.recipecaches[mc])
1038 filename = eligible[0]
1039 return None, None, None, filename
1040 elif pn in self.recipecaches[mc].pkg_pn:
1041 return bb.providers.findBestProvider(pn, self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1042 else:
1043 return None, None, None, None
1044
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001045 def findConfigFiles(self, varname):
1046 """
1047 Find config files which are appropriate values for varname.
1048 i.e. MACHINE, DISTRO
1049 """
1050 possible = []
1051 var = varname.lower()
1052
1053 data = self.data
1054 # iterate configs
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001055 bbpaths = data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001056 for path in bbpaths:
1057 confpath = os.path.join(path, "conf", var)
1058 if os.path.exists(confpath):
1059 for root, dirs, files in os.walk(confpath):
1060 # get all child files, these are appropriate values
1061 for f in files:
1062 val, sep, end = f.rpartition('.')
1063 if end == 'conf':
1064 possible.append(val)
1065
1066 if possible:
1067 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1068
1069 def findInheritsClass(self, klass):
1070 """
1071 Find all recipes which inherit the specified class
1072 """
1073 pkg_list = []
1074
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001075 for pfn in self.recipecaches[''].pkg_fn:
1076 inherits = self.recipecaches[''].inherits.get(pfn, None)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001077 if inherits and klass in inherits:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001078 pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001079
1080 return pkg_list
1081
1082 def generateTargetsTree(self, klass=None, pkgs=None):
1083 """
1084 Generate a dependency tree of buildable targets
1085 Generate an event with the result
1086 """
1087 # if the caller hasn't specified a pkgs list default to universe
1088 if not pkgs:
1089 pkgs = ['universe']
1090 # if inherited_class passed ensure all recipes which inherit the
1091 # specified class are included in pkgs
1092 if klass:
1093 extra_pkgs = self.findInheritsClass(klass)
1094 pkgs = pkgs + extra_pkgs
1095
1096 # generate a dependency tree for all our packages
1097 tree = self.generatePkgDepTreeData(pkgs, 'build')
1098 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1099
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001100 def interactiveMode( self ):
1101 """Drop off into a shell"""
1102 try:
1103 from bb import shell
1104 except ImportError:
1105 parselog.exception("Interactive mode not available")
1106 sys.exit(1)
1107 else:
1108 shell.start( self )
1109
1110
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001111 def handleCollections(self, collections):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001112 """Handle collections"""
1113 errors = False
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001114 self.bbfile_config_priorities = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001115 if collections:
1116 collection_priorities = {}
1117 collection_depends = {}
1118 collection_list = collections.split()
1119 min_prio = 0
1120 for c in collection_list:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001121 bb.debug(1,'Processing %s in collection list' % (c))
1122
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001123 # Get collection priority if defined explicitly
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001124 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001125 if priority:
1126 try:
1127 prio = int(priority)
1128 except ValueError:
1129 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1130 errors = True
1131 if min_prio == 0 or prio < min_prio:
1132 min_prio = prio
1133 collection_priorities[c] = prio
1134 else:
1135 collection_priorities[c] = None
1136
1137 # Check dependencies and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001138 deps = self.data.getVar("LAYERDEPENDS_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001139 if deps:
1140 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001141 depDict = bb.utils.explode_dep_versions2(deps)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001142 except bb.utils.VersionStringException as vse:
1143 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001144 for dep, oplist in list(depDict.items()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001145 if dep in collection_list:
1146 for opstr in oplist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001147 layerver = self.data.getVar("LAYERVERSION_%s" % dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001148 (op, depver) = opstr.split()
1149 if layerver:
1150 try:
1151 res = bb.utils.vercmp_string_op(layerver, depver, op)
1152 except bb.utils.VersionStringException as vse:
1153 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1154 if not res:
1155 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1156 errors = True
1157 else:
1158 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1159 errors = True
1160 else:
1161 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1162 errors = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001163 collection_depends[c] = list(depDict.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001164 else:
1165 collection_depends[c] = []
1166
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001167 # Check recommends and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001168 recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001169 if recs:
1170 try:
1171 recDict = bb.utils.explode_dep_versions2(recs)
1172 except bb.utils.VersionStringException as vse:
1173 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1174 for rec, oplist in list(recDict.items()):
1175 if rec in collection_list:
1176 if oplist:
1177 opstr = oplist[0]
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001178 layerver = self.data.getVar("LAYERVERSION_%s" % rec)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001179 if layerver:
1180 (op, recver) = opstr.split()
1181 try:
1182 res = bb.utils.vercmp_string_op(layerver, recver, op)
1183 except bb.utils.VersionStringException as vse:
1184 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1185 if not res:
1186 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1187 continue
1188 else:
1189 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1190 continue
1191 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1192 collection_depends[c].append(rec)
1193 else:
1194 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1195
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001196 # Recursively work out collection priorities based on dependencies
1197 def calc_layer_priority(collection):
1198 if not collection_priorities[collection]:
1199 max_depprio = min_prio
1200 for dep in collection_depends[collection]:
1201 calc_layer_priority(dep)
1202 depprio = collection_priorities[dep]
1203 if depprio > max_depprio:
1204 max_depprio = depprio
1205 max_depprio += 1
1206 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1207 collection_priorities[collection] = max_depprio
1208
1209 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1210 for c in collection_list:
1211 calc_layer_priority(c)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001212 regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001213 if regex == None:
1214 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1215 errors = True
1216 continue
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001217 elif regex == "":
1218 parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
1219 errors = False
Brad Bishop316dfdd2018-06-25 12:45:53 -04001220 continue
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001221 else:
1222 try:
1223 cre = re.compile(regex)
1224 except re.error:
1225 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1226 errors = True
1227 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001228 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001229 if errors:
1230 # We've already printed the actual error(s)
1231 raise CollectionError("Errors during parsing layer configuration")
1232
1233 def buildSetVars(self):
1234 """
1235 Setup any variables needed before starting a build
1236 """
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001237 t = time.gmtime()
1238 for mc in self.databuilder.mcdata:
1239 ds = self.databuilder.mcdata[mc]
1240 if not ds.getVar("BUILDNAME", False):
1241 ds.setVar("BUILDNAME", "${DATE}${TIME}")
1242 ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1243 ds.setVar("DATE", time.strftime('%Y%m%d', t))
1244 ds.setVar("TIME", time.strftime('%H%M%S', t))
1245
1246 def reset_mtime_caches(self):
1247 """
1248 Reset mtime caches - this is particularly important when memory resident as something
1249 which is cached is not unlikely to have changed since the last invocation (e.g. a
1250 file associated with a recipe might have been modified by the user).
1251 """
1252 build.reset_cache()
1253 bb.fetch._checksum_cache.mtime_cache.clear()
1254 siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1255 if siggen_cache:
1256 bb.parse.siggen.checksum_cache.mtime_cache.clear()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001257
1258 def matchFiles(self, bf):
1259 """
1260 Find the .bb files which match the expression in 'buildfile'.
1261 """
1262 if bf.startswith("/") or bf.startswith("../"):
1263 bf = os.path.abspath(bf)
1264
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001265 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001266 filelist, masked, searchdirs = self.collection.collect_bbfiles(self.data, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001267 try:
1268 os.stat(bf)
1269 bf = os.path.abspath(bf)
1270 return [bf]
1271 except OSError:
1272 regexp = re.compile(bf)
1273 matches = []
1274 for f in filelist:
1275 if regexp.search(f) and os.path.isfile(f):
1276 matches.append(f)
1277 return matches
1278
1279 def matchFile(self, buildfile):
1280 """
1281 Find the .bb file which matches the expression in 'buildfile'.
1282 Raise an error if multiple files
1283 """
1284 matches = self.matchFiles(buildfile)
1285 if len(matches) != 1:
1286 if matches:
1287 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1288 if matches:
1289 for f in matches:
1290 msg += "\n %s" % f
1291 parselog.error(msg)
1292 else:
1293 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1294 raise NoSpecificMatch
1295 return matches[0]
1296
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001297 def buildFile(self, buildfile, task):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001298 """
1299 Build the file matching regexp buildfile
1300 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001301 bb.event.fire(bb.event.BuildInit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001302
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001303 # Too many people use -b because they think it's how you normally
1304 # specify a target to be built, so show a warning
1305 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1306
1307 self.buildFileInternal(buildfile, task)
1308
1309 def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1310 """
1311 Build the file matching regexp buildfile
1312 """
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001313
1314 # Parse the configuration here. We need to do it explicitly here since
1315 # buildFile() doesn't use the cache
1316 self.parseConfiguration()
1317
1318 # If we are told to do the None task then query the default task
1319 if (task == None):
1320 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001321 if not task.startswith("do_"):
1322 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001323
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001324 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001325 fn = self.matchFile(fn)
1326
1327 self.buildSetVars()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001328 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001329
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001330 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
1331
1332 infos = bb_cache.parse(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001333 infos = dict(infos)
1334
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001335 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001336 try:
1337 info_array = infos[fn]
1338 except KeyError:
1339 bb.fatal("%s does not exist" % fn)
1340
1341 if info_array[0].skipped:
1342 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1343
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001344 self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001345
1346 # Tweak some variables
1347 item = info_array[0].pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001348 self.recipecaches[mc].ignored_dependencies = set()
1349 self.recipecaches[mc].bbfile_priority[fn] = 1
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001350 self.configuration.limited_deps = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001351
1352 # Remove external dependencies
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001353 self.recipecaches[mc].task_deps[fn]['depends'] = {}
1354 self.recipecaches[mc].deps[fn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001355 self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1356 self.recipecaches[mc].runrecs[fn] = defaultdict(list)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001357
1358 # Invalidate task for target if force mode active
1359 if self.configuration.force:
1360 logger.verbose("Invalidate task %s, %s", task, fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001361 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001362
1363 # Setup taskdata structure
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001364 taskdata = {}
1365 taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001366 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001367
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001368 if quietlog:
1369 rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1370 bb.runqueue.logger.setLevel(logging.WARNING)
1371
1372 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1373 if fireevents:
1374 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001375
1376 # Execute the runqueue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001377 runlist = [[mc, item, task, fn]]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001378
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001379 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001380
1381 def buildFileIdle(server, rq, abort):
1382
1383 msg = None
1384 interrupted = 0
1385 if abort or self.state == state.forceshutdown:
1386 rq.finish_runqueue(True)
1387 msg = "Forced shutdown"
1388 interrupted = 2
1389 elif self.state == state.shutdown:
1390 rq.finish_runqueue(False)
1391 msg = "Stopped build"
1392 interrupted = 1
1393 failures = 0
1394 try:
1395 retval = rq.execute_runqueue()
1396 except runqueue.TaskFailure as exc:
1397 failures += len(exc.args)
1398 retval = False
1399 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001400 self.command.finishAsyncCommand(str(exc))
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001401 if quietlog:
1402 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001403 return False
1404
1405 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001406 if fireevents:
1407 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001408 self.command.finishAsyncCommand(msg)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001409 # We trashed self.recipecaches above
1410 self.parsecache_valid = False
1411 self.configuration.limited_deps = False
1412 bb.parse.siggen.reset(self.data)
1413 if quietlog:
1414 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001415 return False
1416 if retval is True:
1417 return True
1418 return retval
1419
1420 self.configuration.server_register_idlecallback(buildFileIdle, rq)
1421
1422 def buildTargets(self, targets, task):
1423 """
1424 Attempt to build the targets specified
1425 """
1426
1427 def buildTargetsIdle(server, rq, abort):
1428 msg = None
1429 interrupted = 0
1430 if abort or self.state == state.forceshutdown:
1431 rq.finish_runqueue(True)
1432 msg = "Forced shutdown"
1433 interrupted = 2
1434 elif self.state == state.shutdown:
1435 rq.finish_runqueue(False)
1436 msg = "Stopped build"
1437 interrupted = 1
1438 failures = 0
1439 try:
1440 retval = rq.execute_runqueue()
1441 except runqueue.TaskFailure as exc:
1442 failures += len(exc.args)
1443 retval = False
1444 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001445 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001446 return False
1447
1448 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001449 try:
1450 for mc in self.multiconfigs:
1451 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1452 finally:
1453 self.command.finishAsyncCommand(msg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001454 return False
1455 if retval is True:
1456 return True
1457 return retval
1458
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001459 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001460 self.buildSetVars()
1461
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001462 # If we are told to do the None task then query the default task
1463 if (task == None):
1464 task = self.configuration.cmd
1465
1466 if not task.startswith("do_"):
1467 task = "do_%s" % task
1468
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001469 packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1470
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001471 bb.event.fire(bb.event.BuildInit(packages), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001472
1473 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001474
1475 buildname = self.data.getVar("BUILDNAME", False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001476
1477 # make targets to always look as <target>:do_<task>
1478 ntargets = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001479 for target in runlist:
1480 if target[0]:
1481 ntargets.append("multiconfig:%s:%s:%s" % (target[0], target[1], target[2]))
1482 ntargets.append("%s:%s" % (target[1], target[2]))
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001483
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001484 for mc in self.multiconfigs:
1485 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001486
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001487 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001488 if 'universe' in targets:
1489 rq.rqdata.warn_multi_bb = True
1490
1491 self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
1492
1493
1494 def getAllKeysWithFlags(self, flaglist):
1495 dump = {}
1496 for k in self.data.keys():
1497 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001498 expand = True
1499 flags = self.data.getVarFlags(k)
1500 if flags and "func" in flags and "python" in flags:
1501 expand = False
1502 v = self.data.getVar(k, expand)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001503 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1504 dump[k] = {
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001505 'v' : str(v) ,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001506 'history' : self.data.varhistory.variable(k),
1507 }
1508 for d in flaglist:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001509 if flags and d in flags:
1510 dump[k][d] = flags[d]
1511 else:
1512 dump[k][d] = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001513 except Exception as e:
1514 print(e)
1515 return dump
1516
1517
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001518 def updateCacheSync(self):
1519 if self.state == state.running:
1520 return
1521
1522 # reload files for which we got notifications
1523 for p in self.inotify_modified_files:
1524 bb.parse.update_cache(p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001525 if p in bb.parse.BBHandler.cached_statements:
1526 del bb.parse.BBHandler.cached_statements[p]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001527 self.inotify_modified_files = []
1528
1529 if not self.baseconfig_valid:
1530 logger.debug(1, "Reloading base configuration data")
1531 self.initConfigurationData()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001532 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001533
1534 # This is called for all async commands when self.state != running
1535 def updateCache(self):
1536 if self.state == state.running:
1537 return
1538
1539 if self.state in (state.shutdown, state.forceshutdown, state.error):
1540 if hasattr(self.parser, 'shutdown'):
1541 self.parser.shutdown(clean=False, force = True)
1542 raise bb.BBHandledException()
1543
1544 if self.state != state.parsing:
1545 self.updateCacheSync()
1546
1547 if self.state != state.parsing and not self.parsecache_valid:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001548 bb.parse.siggen.reset(self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001549 self.parseConfiguration ()
1550 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001551 for mc in self.multiconfigs:
1552 bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001553
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001554 for mc in self.multiconfigs:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001555 ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001556 self.recipecaches[mc].ignored_dependencies = set(ignore.split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001557
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001558 for dep in self.configuration.extra_assume_provided:
1559 self.recipecaches[mc].ignored_dependencies.add(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001560
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001561 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001562 (filelist, masked, searchdirs) = self.collection.collect_bbfiles(self.data, self.data)
1563
1564 # Add inotify watches for directories searched for bb/bbappend files
1565 for dirent in searchdirs:
1566 self.add_filewatch([[dirent]], dirs=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001567
1568 self.parser = CookerParser(self, filelist, masked)
1569 self.parsecache_valid = True
1570
1571 self.state = state.parsing
1572
1573 if not self.parser.parse_next():
1574 collectlog.debug(1, "parsing complete")
1575 if self.parser.error:
1576 raise bb.BBHandledException()
1577 self.show_appends_with_no_recipes()
1578 self.handlePrefProviders()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001579 for mc in self.multiconfigs:
1580 self.recipecaches[mc].bbfile_priority = self.collection.collection_priorities(self.recipecaches[mc].pkg_fn, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001581 self.state = state.running
1582
1583 # Send an event listing all stamps reachable after parsing
1584 # which the metadata may use to clean up stale data
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001585 for mc in self.multiconfigs:
1586 event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1587 bb.event.fire(event, self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001588 return None
1589
1590 return True
1591
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001592 def checkPackages(self, pkgs_to_build, task=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001593
1594 # Return a copy, don't modify the original
1595 pkgs_to_build = pkgs_to_build[:]
1596
1597 if len(pkgs_to_build) == 0:
1598 raise NothingToBuild
1599
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001600 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001601 for pkg in pkgs_to_build:
1602 if pkg in ignore:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001603 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001604
1605 if 'world' in pkgs_to_build:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001606 pkgs_to_build.remove('world')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001607 for mc in self.multiconfigs:
1608 bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1609 for t in self.recipecaches[mc].world_target:
1610 if mc:
1611 t = "multiconfig:" + mc + ":" + t
1612 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001613
1614 if 'universe' in pkgs_to_build:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001615 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001616 parselog.debug(1, "collating packages for \"universe\"")
1617 pkgs_to_build.remove('universe')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001618 for mc in self.multiconfigs:
1619 for t in self.recipecaches[mc].universe_target:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001620 if task:
1621 foundtask = False
1622 for provider_fn in self.recipecaches[mc].providers[t]:
1623 if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1624 foundtask = True
1625 break
1626 if not foundtask:
1627 bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1628 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001629 if mc:
1630 t = "multiconfig:" + mc + ":" + t
1631 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001632
1633 return pkgs_to_build
1634
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001635 def pre_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001636 # We now are in our own process so we can call this here.
1637 # PRServ exits if its parent process exits
1638 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001639 return
1640
1641 def post_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001642 prserv.serv.auto_shutdown()
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001643 bb.event.fire(CookerExit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001644
1645
1646 def shutdown(self, force = False):
1647 if force:
1648 self.state = state.forceshutdown
1649 else:
1650 self.state = state.shutdown
1651
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001652 if self.parser:
1653 self.parser.shutdown(clean=not force, force=force)
1654
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001655 def finishcommand(self):
1656 self.state = state.initial
1657
1658 def reset(self):
1659 self.initConfigurationData()
1660
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001661 def clientComplete(self):
1662 """Called when the client is done using the server"""
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001663 self.finishcommand()
1664 self.extraconfigdata = {}
1665 self.command.reset()
1666 self.databuilder.reset()
1667 self.data = self.databuilder.data
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001668
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001669
1670class CookerExit(bb.event.Event):
1671 """
1672 Notify clients of the Cooker shutdown
1673 """
1674
1675 def __init__(self):
1676 bb.event.Event.__init__(self)
1677
1678
1679class CookerCollectFiles(object):
1680 def __init__(self, priorities):
1681 self.bbappends = []
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001682 # Priorities is a list of tupples, with the second element as the pattern.
1683 # We need to sort the list with the longest pattern first, and so on to
1684 # the shortest. This allows nested layers to be properly evaluated.
1685 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001686
1687 def calc_bbfile_priority( self, filename, matched = None ):
1688 for _, _, regex, pri in self.bbfile_config_priorities:
1689 if regex.match(filename):
1690 if matched != None:
1691 if not regex in matched:
1692 matched.add(regex)
1693 return pri
1694 return 0
1695
1696 def get_bbfiles(self):
1697 """Get list of default .bb files by reading out the current directory"""
1698 path = os.getcwd()
1699 contents = os.listdir(path)
1700 bbfiles = []
1701 for f in contents:
1702 if f.endswith(".bb"):
1703 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1704 return bbfiles
1705
1706 def find_bbfiles(self, path):
1707 """Find all the .bb and .bbappend files in a directory"""
1708 found = []
1709 for dir, dirs, files in os.walk(path):
1710 for ignored in ('SCCS', 'CVS', '.svn'):
1711 if ignored in dirs:
1712 dirs.remove(ignored)
1713 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1714
1715 return found
1716
1717 def collect_bbfiles(self, config, eventdata):
1718 """Collect all available .bb build files"""
1719 masked = 0
1720
1721 collectlog.debug(1, "collecting .bb files")
1722
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001723 files = (config.getVar( "BBFILES") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001724 config.setVar("BBFILES", " ".join(files))
1725
1726 # Sort files by priority
1727 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
1728
1729 if not len(files):
1730 files = self.get_bbfiles()
1731
1732 if not len(files):
1733 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1734 bb.event.fire(CookerExit(), eventdata)
1735
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001736 # We need to track where we look so that we can add inotify watches. There
1737 # is no nice way to do this, this is horrid. We intercept the os.listdir()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001738 # (or os.scandir() for python 3.6+) calls while we run glob().
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001739 origlistdir = os.listdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001740 if hasattr(os, 'scandir'):
1741 origscandir = os.scandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001742 searchdirs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001743
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001744 def ourlistdir(d):
1745 searchdirs.append(d)
1746 return origlistdir(d)
1747
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001748 def ourscandir(d):
1749 searchdirs.append(d)
1750 return origscandir(d)
1751
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001752 os.listdir = ourlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001753 if hasattr(os, 'scandir'):
1754 os.scandir = ourscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001755 try:
1756 # Can't use set here as order is important
1757 newfiles = []
1758 for f in files:
1759 if os.path.isdir(f):
1760 dirfiles = self.find_bbfiles(f)
1761 for g in dirfiles:
1762 if g not in newfiles:
1763 newfiles.append(g)
1764 else:
1765 globbed = glob.glob(f)
1766 if not globbed and os.path.exists(f):
1767 globbed = [f]
1768 # glob gives files in order on disk. Sort to be deterministic.
1769 for g in sorted(globbed):
1770 if g not in newfiles:
1771 newfiles.append(g)
1772 finally:
1773 os.listdir = origlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001774 if hasattr(os, 'scandir'):
1775 os.scandir = origscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001776
1777 bbmask = config.getVar('BBMASK')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001778
1779 if bbmask:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001780 # First validate the individual regular expressions and ignore any
1781 # that do not compile
1782 bbmasks = []
1783 for mask in bbmask.split():
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001784 # When constructing an older style single regex, it's possible for BBMASK
1785 # to end up beginning with '|', which matches and masks _everything_.
1786 if mask.startswith("|"):
1787 collectlog.warn("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
1788 mask = mask[1:]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001789 try:
1790 re.compile(mask)
1791 bbmasks.append(mask)
1792 except sre_constants.error:
1793 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1794
1795 # Then validate the combined regular expressions. This should never
1796 # fail, but better safe than sorry...
1797 bbmask = "|".join(bbmasks)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001798 try:
1799 bbmask_compiled = re.compile(bbmask)
1800 except sre_constants.error:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001801 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1802 bbmask = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001803
1804 bbfiles = []
1805 bbappend = []
1806 for f in newfiles:
1807 if bbmask and bbmask_compiled.search(f):
1808 collectlog.debug(1, "skipping masked file %s", f)
1809 masked += 1
1810 continue
1811 if f.endswith('.bb'):
1812 bbfiles.append(f)
1813 elif f.endswith('.bbappend'):
1814 bbappend.append(f)
1815 else:
1816 collectlog.debug(1, "skipping %s: unknown file extension", f)
1817
1818 # Build a list of .bbappend files for each .bb file
1819 for f in bbappend:
1820 base = os.path.basename(f).replace('.bbappend', '.bb')
1821 self.bbappends.append((base, f))
1822
1823 # Find overlayed recipes
1824 # bbfiles will be in priority order which makes this easy
1825 bbfile_seen = dict()
1826 self.overlayed = defaultdict(list)
1827 for f in reversed(bbfiles):
1828 base = os.path.basename(f)
1829 if base not in bbfile_seen:
1830 bbfile_seen[base] = f
1831 else:
1832 topfile = bbfile_seen[base]
1833 self.overlayed[topfile].append(f)
1834
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001835 return (bbfiles, masked, searchdirs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001836
1837 def get_file_appends(self, fn):
1838 """
1839 Returns a list of .bbappend files to apply to fn
1840 """
1841 filelist = []
1842 f = os.path.basename(fn)
1843 for b in self.bbappends:
1844 (bbappend, filename) = b
1845 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1846 filelist.append(filename)
1847 return filelist
1848
1849 def collection_priorities(self, pkgfns, d):
1850
1851 priorities = {}
1852
1853 # Calculate priorities for each file
1854 matched = set()
1855 for p in pkgfns:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001856 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001857 priorities[p] = self.calc_bbfile_priority(realfn, matched)
1858
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001859 unmatched = set()
1860 for _, _, regex, pri in self.bbfile_config_priorities:
1861 if not regex in matched:
1862 unmatched.add(regex)
1863
Brad Bishop316dfdd2018-06-25 12:45:53 -04001864 # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
1865 def find_bbappend_match(regex):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001866 for b in self.bbappends:
1867 (bbfile, append) = b
1868 if regex.match(append):
Brad Bishop316dfdd2018-06-25 12:45:53 -04001869 # If the bbappend is matched by already "matched set", return False
1870 for matched_regex in matched:
1871 if matched_regex.match(append):
1872 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001873 return True
1874 return False
1875
1876 for unmatch in unmatched.copy():
Brad Bishop316dfdd2018-06-25 12:45:53 -04001877 if find_bbappend_match(unmatch):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001878 unmatched.remove(unmatch)
1879
1880 for collection, pattern, regex, _ in self.bbfile_config_priorities:
1881 if regex in unmatched:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001882 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001883 collectlog.warning("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001884
1885 return priorities
1886
1887class ParsingFailure(Exception):
1888 def __init__(self, realexception, recipe):
1889 self.realexception = realexception
1890 self.recipe = recipe
1891 Exception.__init__(self, realexception, recipe)
1892
1893class Feeder(multiprocessing.Process):
1894 def __init__(self, jobs, to_parsers, quit):
1895 self.quit = quit
1896 self.jobs = jobs
1897 self.to_parsers = to_parsers
1898 multiprocessing.Process.__init__(self)
1899
1900 def run(self):
1901 while True:
1902 try:
1903 quit = self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001904 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001905 pass
1906 else:
1907 if quit == 'cancel':
1908 self.to_parsers.cancel_join_thread()
1909 break
1910
1911 try:
1912 job = self.jobs.pop()
1913 except IndexError:
1914 break
1915
1916 try:
1917 self.to_parsers.put(job, timeout=0.5)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001918 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001919 self.jobs.insert(0, job)
1920 continue
1921
1922class Parser(multiprocessing.Process):
1923 def __init__(self, jobs, results, quit, init, profile):
1924 self.jobs = jobs
1925 self.results = results
1926 self.quit = quit
1927 self.init = init
1928 multiprocessing.Process.__init__(self)
1929 self.context = bb.utils.get_context().copy()
1930 self.handlers = bb.event.get_class_handlers().copy()
1931 self.profile = profile
1932
1933 def run(self):
1934
1935 if not self.profile:
1936 self.realrun()
1937 return
1938
1939 try:
1940 import cProfile as profile
1941 except:
1942 import profile
1943 prof = profile.Profile()
1944 try:
1945 profile.Profile.runcall(prof, self.realrun)
1946 finally:
1947 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1948 prof.dump_stats(logfile)
1949
1950 def realrun(self):
1951 if self.init:
1952 self.init()
1953
1954 pending = []
1955 while True:
1956 try:
1957 self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001958 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001959 pass
1960 else:
1961 self.results.cancel_join_thread()
1962 break
1963
1964 if pending:
1965 result = pending.pop()
1966 else:
1967 try:
1968 job = self.jobs.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001969 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001970 continue
1971
1972 if job is None:
1973 break
1974 result = self.parse(*job)
1975
1976 try:
1977 self.results.put(result, timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001978 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001979 pending.append(result)
1980
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001981 def parse(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001982 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001983 # Record the filename we're parsing into any events generated
1984 def parse_filter(self, record):
1985 record.taskpid = bb.event.worker_pid
1986 record.fn = filename
1987 return True
1988
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001989 # Reset our environment and handlers to the original settings
1990 bb.utils.set_context(self.context.copy())
1991 bb.event.set_class_handlers(self.handlers.copy())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001992 bb.event.LogHandler.filter = parse_filter
1993
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001994 return True, self.bb_cache.parse(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001995 except Exception as exc:
1996 tb = sys.exc_info()[2]
1997 exc.recipe = filename
1998 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
1999 return True, exc
2000 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2001 # and for example a worker thread doesn't just exit on its own in response to
2002 # a SystemExit event for example.
2003 except BaseException as exc:
2004 return True, ParsingFailure(exc, filename)
2005
2006class CookerParser(object):
2007 def __init__(self, cooker, filelist, masked):
2008 self.filelist = filelist
2009 self.cooker = cooker
2010 self.cfgdata = cooker.data
2011 self.cfghash = cooker.data_hash
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002012 self.cfgbuilder = cooker.databuilder
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002013
2014 # Accounting statistics
2015 self.parsed = 0
2016 self.cached = 0
2017 self.error = 0
2018 self.masked = masked
2019
2020 self.skipped = 0
2021 self.virtuals = 0
2022 self.total = len(filelist)
2023
2024 self.current = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002025 self.process_names = []
2026
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002027 self.bb_cache = bb.cache.Cache(self.cfgbuilder, self.cfghash, cooker.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002028 self.fromcache = []
2029 self.willparse = []
2030 for filename in self.filelist:
2031 appends = self.cooker.collection.get_file_appends(filename)
2032 if not self.bb_cache.cacheValid(filename, appends):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002033 self.willparse.append((filename, appends))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002034 else:
2035 self.fromcache.append((filename, appends))
2036 self.toparse = self.total - len(self.fromcache)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002037 self.progress_chunk = int(max(self.toparse / 100, 1))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002038
Brad Bishop6e60e8b2018-02-01 10:27:11 -05002039 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002040 multiprocessing.cpu_count()), len(self.willparse))
2041
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002042 self.start()
2043 self.haveshutdown = False
2044
2045 def start(self):
2046 self.results = self.load_cached()
2047 self.processes = []
2048 if self.toparse:
2049 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2050 def init():
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002051 Parser.bb_cache = self.bb_cache
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002052 bb.utils.set_process_name(multiprocessing.current_process().name)
2053 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2054 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002055
2056 self.feeder_quit = multiprocessing.Queue(maxsize=1)
2057 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
2058 self.jobs = multiprocessing.Queue(maxsize=self.num_processes)
2059 self.result_queue = multiprocessing.Queue()
2060 self.feeder = Feeder(self.willparse, self.jobs, self.feeder_quit)
2061 self.feeder.start()
2062 for i in range(0, self.num_processes):
2063 parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
2064 parser.start()
2065 self.process_names.append(parser.name)
2066 self.processes.append(parser)
2067
2068 self.results = itertools.chain(self.results, self.parse_generator())
2069
2070 def shutdown(self, clean=True, force=False):
2071 if not self.toparse:
2072 return
2073 if self.haveshutdown:
2074 return
2075 self.haveshutdown = True
2076
2077 if clean:
2078 event = bb.event.ParseCompleted(self.cached, self.parsed,
2079 self.skipped, self.masked,
2080 self.virtuals, self.error,
2081 self.total)
2082
2083 bb.event.fire(event, self.cfgdata)
2084 self.feeder_quit.put(None)
2085 for process in self.processes:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002086 self.parser_quit.put(None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002087 else:
2088 self.feeder_quit.put('cancel')
2089
2090 self.parser_quit.cancel_join_thread()
2091 for process in self.processes:
2092 self.parser_quit.put(None)
2093
2094 self.jobs.cancel_join_thread()
2095
2096 for process in self.processes:
2097 if force:
2098 process.join(.1)
2099 process.terminate()
2100 else:
2101 process.join()
2102 self.feeder.join()
2103
2104 sync = threading.Thread(target=self.bb_cache.sync)
2105 sync.start()
2106 multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002107 bb.codeparser.parser_cache_savemerge()
2108 bb.fetch.fetcher_parse_done()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002109 if self.cooker.configuration.profile:
2110 profiles = []
2111 for i in self.process_names:
2112 logfile = "profile-parse-%s.log" % i
2113 if os.path.exists(logfile):
2114 profiles.append(logfile)
2115
2116 pout = "profile-parse.log.processed"
2117 bb.utils.process_profilelog(profiles, pout = pout)
2118 print("Processed parsing statistics saved to %s" % (pout))
2119
2120 def load_cached(self):
2121 for filename, appends in self.fromcache:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002122 cached, infos = self.bb_cache.load(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002123 yield not cached, infos
2124
2125 def parse_generator(self):
2126 while True:
2127 if self.parsed >= self.toparse:
2128 break
2129
2130 try:
2131 result = self.result_queue.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002132 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002133 pass
2134 else:
2135 value = result[1]
2136 if isinstance(value, BaseException):
2137 raise value
2138 else:
2139 yield result
2140
2141 def parse_next(self):
2142 result = []
2143 parsed = None
2144 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002145 parsed, result = next(self.results)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002146 except StopIteration:
2147 self.shutdown()
2148 return False
2149 except bb.BBHandledException as exc:
2150 self.error += 1
2151 logger.error('Failed to parse recipe: %s' % exc.recipe)
2152 self.shutdown(clean=False)
2153 return False
2154 except ParsingFailure as exc:
2155 self.error += 1
2156 logger.error('Unable to parse %s: %s' %
2157 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2158 self.shutdown(clean=False)
2159 return False
2160 except bb.parse.ParseError as exc:
2161 self.error += 1
2162 logger.error(str(exc))
2163 self.shutdown(clean=False)
2164 return False
2165 except bb.data_smart.ExpansionError as exc:
2166 self.error += 1
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002167 bbdir = os.path.dirname(__file__) + os.sep
2168 etype, value, _ = sys.exc_info()
2169 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2170 logger.error('ExpansionError during parsing %s', value.recipe,
2171 exc_info=(etype, value, tb))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002172 self.shutdown(clean=False)
2173 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002174 except Exception as exc:
2175 self.error += 1
2176 etype, value, tb = sys.exc_info()
2177 if hasattr(value, "recipe"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002178 logger.error('Unable to parse %s' % value.recipe,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002179 exc_info=(etype, value, exc.traceback))
2180 else:
2181 # Most likely, an exception occurred during raising an exception
2182 import traceback
2183 logger.error('Exception during parse: %s' % traceback.format_exc())
2184 self.shutdown(clean=False)
2185 return False
2186
2187 self.current += 1
2188 self.virtuals += len(result)
2189 if parsed:
2190 self.parsed += 1
2191 if self.parsed % self.progress_chunk == 0:
2192 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2193 self.cfgdata)
2194 else:
2195 self.cached += 1
2196
2197 for virtualfn, info_array in result:
2198 if info_array[0].skipped:
2199 self.skipped += 1
2200 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002201 (fn, cls, mc) = bb.cache.virtualfn2realfn(virtualfn)
2202 self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002203 parsed=parsed, watcher = self.cooker.add_filewatch)
2204 return True
2205
2206 def reparse(self, filename):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002207 infos = self.bb_cache.parse(filename, self.cooker.collection.get_file_appends(filename))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002208 for vfn, info_array in infos:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002209 (fn, cls, mc) = bb.cache.virtualfn2realfn(vfn)
2210 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)