blob: db52964c3a8965a6dbb6e1ffab20154489c86275 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# Copyright (C) 2003, 2004 Chris Larson
6# Copyright (C) 2003, 2004 Phil Blundell
7# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
8# Copyright (C) 2005 Holger Hans Peter Freyther
9# Copyright (C) 2005 ROAD GmbH
10# Copyright (C) 2006 - 2007 Richard Purdie
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
Patrick Williamsc0f7c042017-02-23 20:41:17 -060025
Patrick Williamsc124f4f2015-09-15 14:41:29 -050026import sys, os, glob, os.path, re, time
27import atexit
28import itertools
29import logging
30import multiprocessing
31import sre_constants
32import threading
Patrick Williamsc0f7c042017-02-23 20:41:17 -060033from io import StringIO, UnsupportedOperation
Patrick Williamsc124f4f2015-09-15 14:41:29 -050034from contextlib import closing
35from functools import wraps
Patrick Williamsc0f7c042017-02-23 20:41:17 -060036from collections import defaultdict, namedtuple
Patrick Williamsc124f4f2015-09-15 14:41:29 -050037import bb, bb.exceptions, bb.command
38from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
Patrick Williamsc0f7c042017-02-23 20:41:17 -060039import queue
Patrick Williamsc124f4f2015-09-15 14:41:29 -050040import signal
41import subprocess
42import errno
43import prserv.serv
44import pyinotify
Patrick Williamsc0f7c042017-02-23 20:41:17 -060045import json
46import pickle
47import codecs
Patrick Williamsc124f4f2015-09-15 14:41:29 -050048
49logger = logging.getLogger("BitBake")
50collectlog = logging.getLogger("BitBake.Collection")
51buildlog = logging.getLogger("BitBake.Build")
52parselog = logging.getLogger("BitBake.Parsing")
53providerlog = logging.getLogger("BitBake.Provider")
54
55class NoSpecificMatch(bb.BBHandledException):
56 """
57 Exception raised when no or multiple file matches are found
58 """
59
60class NothingToBuild(Exception):
61 """
62 Exception raised when there is nothing to build
63 """
64
65class CollectionError(bb.BBHandledException):
66 """
67 Exception raised when layer configuration is incorrect
68 """
69
70class state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060071 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050072
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050073 @classmethod
74 def get_name(cls, code):
75 for name in dir(cls):
76 value = getattr(cls, name)
77 if type(value) == type(cls.initial) and value == code:
78 return name
79 raise ValueError("Invalid status code: %s" % code)
80
Patrick Williamsc124f4f2015-09-15 14:41:29 -050081
82class SkippedPackage:
83 def __init__(self, info = None, reason = None):
84 self.pn = None
85 self.skipreason = None
86 self.provides = None
87 self.rprovides = None
88
89 if info:
90 self.pn = info.pn
91 self.skipreason = info.skipreason
92 self.provides = info.provides
93 self.rprovides = info.rprovides
94 elif reason:
95 self.skipreason = reason
96
97
98class CookerFeatures(object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -060099 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500100
101 def __init__(self):
102 self._features=set()
103
104 def setFeature(self, f):
105 # validate we got a request for a feature we support
106 if f not in CookerFeatures._feature_list:
107 return
108 self._features.add(f)
109
110 def __contains__(self, f):
111 return f in self._features
112
113 def __iter__(self):
114 return self._features.__iter__()
115
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600116 def __next__(self):
117 return next(self._features)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500118
119
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600120class EventWriter:
121 def __init__(self, cooker, eventfile):
122 self.file_inited = None
123 self.cooker = cooker
124 self.eventfile = eventfile
125 self.event_queue = []
126
127 def write_event(self, event):
128 with open(self.eventfile, "a") as f:
129 try:
130 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
131 f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
132 "vars": str_event}))
133 except Exception as err:
134 import traceback
135 print(err, traceback.format_exc())
136
137 def send(self, event):
138 if self.file_inited:
139 # we have the file, just write the event
140 self.write_event(event)
141 else:
142 # init on bb.event.BuildStarted
143 name = "%s.%s" % (event.__module__, event.__class__.__name__)
144 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
145 with open(self.eventfile, "w") as f:
146 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
147
148 self.file_inited = True
149
150 # write pending events
151 for evt in self.event_queue:
152 self.write_event(evt)
153
154 # also write the current event
155 self.write_event(event)
156 else:
157 # queue all events until the file is inited
158 self.event_queue.append(event)
159
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500160#============================================================================#
161# BBCooker
162#============================================================================#
163class BBCooker:
164 """
165 Manages one bitbake build run
166 """
167
168 def __init__(self, configuration, featureSet=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600169 self.recipecaches = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500170 self.skiplist = {}
171 self.featureset = CookerFeatures()
172 if featureSet:
173 for f in featureSet:
174 self.featureset.setFeature(f)
175
176 self.configuration = configuration
177
Brad Bishopf058f492019-01-28 23:50:33 -0500178 bb.debug(1, "BBCooker starting %s" % time.time())
179 sys.stdout.flush()
180
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500181 self.configwatcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500182 bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
183 sys.stdout.flush()
184
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500185 self.configwatcher.bbseen = []
186 self.configwatcher.bbwatchedfiles = []
187 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
Brad Bishopf058f492019-01-28 23:50:33 -0500188 bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
189 sys.stdout.flush()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500190 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
191 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500192 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500193 self.watcher = pyinotify.WatchManager()
Brad Bishopf058f492019-01-28 23:50:33 -0500194 bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
195 sys.stdout.flush()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500196 self.watcher.bbseen = []
197 self.watcher.bbwatchedfiles = []
198 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
199
Brad Bishopf058f492019-01-28 23:50:33 -0500200 bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
201 sys.stdout.flush()
202
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500203 # If being called by something like tinfoil, we need to clean cached data
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500204 # which may now be invalid
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500205 bb.parse.clear_cache()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500206 bb.parse.BBHandler.cached_statements = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500207
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500208 self.ui_cmdline = None
209
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500210 self.initConfigurationData()
211
Brad Bishopf058f492019-01-28 23:50:33 -0500212 bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
213 sys.stdout.flush()
214
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600215 # we log all events to a file if so directed
216 if self.configuration.writeeventlog:
217 # register the log file writer as UI Handler
218 writer = EventWriter(self, self.configuration.writeeventlog)
219 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
220 bb.event.register_UIHhandler(EventLogWriteHandler(writer))
221
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500222 self.inotify_modified_files = []
223
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500224 def _process_inotify_updates(server, cooker, abort):
225 cooker.process_inotify_updates()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500226 return 1.0
227
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500228 self.configuration.server_register_idlecallback(_process_inotify_updates, self)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500229
230 # TOSTOP must not be set or our children will hang when they output
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600231 try:
232 fd = sys.stdout.fileno()
233 if os.isatty(fd):
234 import termios
235 tcattr = termios.tcgetattr(fd)
236 if tcattr[3] & termios.TOSTOP:
237 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
238 tcattr[3] = tcattr[3] & ~termios.TOSTOP
239 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
240 except UnsupportedOperation:
241 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500242
243 self.command = bb.command.Command(self)
244 self.state = state.initial
245
246 self.parser = None
247
248 signal.signal(signal.SIGTERM, self.sigterm_exception)
249 # Let SIGHUP exit as SIGTERM
250 signal.signal(signal.SIGHUP, self.sigterm_exception)
251
Brad Bishopf058f492019-01-28 23:50:33 -0500252 bb.debug(1, "BBCooker startup complete %s" % time.time())
253 sys.stdout.flush()
254
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500255 def process_inotify_updates(self):
256 for n in [self.confignotifier, self.notifier]:
257 if n.check_events(timeout=0):
258 # read notified events and enqeue them
259 n.read_events()
260 n.process_events()
261
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500262 def config_notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500263 if event.maskname == "IN_Q_OVERFLOW":
264 bb.warn("inotify event queue overflowed, invalidating caches.")
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500265 self.parsecache_valid = False
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500266 self.baseconfig_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500267 bb.parse.clear_cache()
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500268 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500269 if not event.pathname in self.configwatcher.bbwatchedfiles:
270 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500271 if not event.pathname in self.inotify_modified_files:
272 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500273 self.baseconfig_valid = False
274
275 def notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500276 if event.maskname == "IN_Q_OVERFLOW":
277 bb.warn("inotify event queue overflowed, invalidating caches.")
278 self.parsecache_valid = False
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500279 bb.parse.clear_cache()
280 return
281 if event.pathname.endswith("bitbake-cookerdaemon.log") \
282 or event.pathname.endswith("bitbake.lock"):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500283 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500284 if not event.pathname in self.inotify_modified_files:
285 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500286 self.parsecache_valid = False
287
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500288 def add_filewatch(self, deps, watcher=None, dirs=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500289 if not watcher:
290 watcher = self.watcher
291 for i in deps:
292 watcher.bbwatchedfiles.append(i[0])
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500293 if dirs:
294 f = i[0]
295 else:
296 f = os.path.dirname(i[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500297 if f in watcher.bbseen:
298 continue
299 watcher.bbseen.append(f)
300 watchtarget = None
301 while True:
302 # We try and add watches for files that don't exist but if they did, would influence
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500303 # the parser. The parent directory of these files may not exist, in which case we need
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500304 # to watch any parent that does exist for changes.
305 try:
306 watcher.add_watch(f, self.watchmask, quiet=False)
307 if watchtarget:
308 watcher.bbwatchedfiles.append(watchtarget)
309 break
310 except pyinotify.WatchManagerError as e:
311 if 'ENOENT' in str(e):
312 watchtarget = f
313 f = os.path.dirname(f)
314 if f in watcher.bbseen:
315 break
316 watcher.bbseen.append(f)
317 continue
318 if 'ENOSPC' in str(e):
319 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
320 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
321 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
322 providerlog.error("Root privilege is required to modify max_user_watches.")
323 raise
324
325 def sigterm_exception(self, signum, stackframe):
326 if signum == signal.SIGTERM:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500327 bb.warn("Cooker received SIGTERM, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500328 elif signum == signal.SIGHUP:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500329 bb.warn("Cooker received SIGHUP, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500330 self.state = state.forceshutdown
331
332 def setFeatures(self, features):
333 # we only accept a new feature set if we're in state initial, so we can reset without problems
334 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
335 raise Exception("Illegal state for feature set change")
336 original_featureset = list(self.featureset)
337 for feature in features:
338 self.featureset.setFeature(feature)
339 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
340 if (original_featureset != list(self.featureset)) and self.state != state.error:
341 self.reset()
342
343 def initConfigurationData(self):
344
345 self.state = state.initial
346 self.caches_array = []
347
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500348 # Need to preserve BB_CONSOLELOG over resets
349 consolelog = None
350 if hasattr(self, "data"):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500351 consolelog = self.data.getVar("BB_CONSOLELOG")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500352
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500353 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
354 self.enableDataTracking()
355
356 all_extra_cache_names = []
357 # We hardcode all known cache types in a single place, here.
358 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
359 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
360
361 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
362
363 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
364 # This is the entry point, no further check needed!
365 for var in caches_name_array:
366 try:
367 module_name, cache_name = var.split(':')
368 module = __import__(module_name, fromlist=(cache_name,))
369 self.caches_array.append(getattr(module, cache_name))
370 except ImportError as exc:
371 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
372 sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
373
374 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
375 self.databuilder.parseBaseConfiguration()
376 self.data = self.databuilder.data
377 self.data_hash = self.databuilder.data_hash
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500378 self.extraconfigdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500379
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500380 if consolelog:
381 self.data.setVar("BB_CONSOLELOG", consolelog)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500382
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500383 self.data.setVar('BB_CMDLINE', self.ui_cmdline)
384
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500385 #
386 # Copy of the data store which has been expanded.
387 # Used for firing events and accessing variables where expansion needs to be accounted for
388 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500389 bb.parse.init_parser(self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500390
391 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
392 self.disableDataTracking()
393
394 self.data.renameVar("__depends", "__base_depends")
395 self.add_filewatch(self.data.getVar("__base_depends", False), self.configwatcher)
396
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500397 self.baseconfig_valid = True
398 self.parsecache_valid = False
399
400 def handlePRServ(self):
401 # Setup a PR Server based on the new configuration
402 try:
403 self.prhost = prserv.serv.auto_start(self.data)
404 except prserv.serv.PRServiceConfigError as e:
405 bb.fatal("Unable to start PR Server, exitting")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500406
407 def enableDataTracking(self):
408 self.configuration.tracking = True
409 if hasattr(self, "data"):
410 self.data.enableTracking()
411
412 def disableDataTracking(self):
413 self.configuration.tracking = False
414 if hasattr(self, "data"):
415 self.data.disableTracking()
416
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500417 def parseConfiguration(self):
418 # Set log file verbosity
419 verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
420 if verboselogs:
421 bb.msg.loggerVerboseLogs = True
422
423 # Change nice level if we're asked to
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500424 nice = self.data.getVar("BB_NICE_LEVEL")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500425 if nice:
426 curnice = os.nice(0)
427 nice = int(nice) - curnice
428 buildlog.verbose("Renice to %s " % os.nice(nice))
429
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600430 if self.recipecaches:
431 del self.recipecaches
432 self.multiconfigs = self.databuilder.mcdata.keys()
433 self.recipecaches = {}
434 for mc in self.multiconfigs:
435 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500436
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500437 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500438
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500439 self.parsecache_valid = False
440
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500441 def updateConfigOpts(self, options, environment, cmdline):
442 self.ui_cmdline = cmdline
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500443 clean = True
444 for o in options:
445 if o in ['prefile', 'postfile']:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500446 # Only these options may require a reparse
447 try:
448 if getattr(self.configuration, o) == options[o]:
449 # Value is the same, no need to mark dirty
450 continue
451 except AttributeError:
452 pass
453 logger.debug(1, "Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
454 print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500455 clean = False
456 setattr(self.configuration, o, options[o])
457 for k in bb.utils.approved_variables():
458 if k in environment and k not in self.configuration.env:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500459 logger.debug(1, "Updating new environment variable %s to %s" % (k, environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500460 self.configuration.env[k] = environment[k]
461 clean = False
462 if k in self.configuration.env and k not in environment:
463 logger.debug(1, "Updating environment variable %s (deleted)" % (k))
464 del self.configuration.env[k]
465 clean = False
466 if k not in self.configuration.env and k not in environment:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500467 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500468 if environment[k] != self.configuration.env[k]:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500469 logger.debug(1, "Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500470 self.configuration.env[k] = environment[k]
471 clean = False
472 if not clean:
473 logger.debug(1, "Base environment change, triggering reparse")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500474 self.reset()
475
476 def runCommands(self, server, data, abort):
477 """
478 Run any queued asynchronous command
479 This is done by the idle handler so it runs in true context rather than
480 tied to any UI.
481 """
482
483 return self.command.runAsyncCommand()
484
485 def showVersions(self):
486
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500487 (latest_versions, preferred_versions) = self.findProviders()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500488
489 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
490 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
491
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500492 for p in sorted(self.recipecaches[''].pkg_pn):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500493 pref = preferred_versions[p]
494 latest = latest_versions[p]
495
496 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
497 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
498
499 if pref == latest:
500 prefstr = ""
501
502 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
503
504 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
505 """
506 Show the outer or per-recipe environment
507 """
508 fn = None
509 envdata = None
510 if not pkgs_to_build:
511 pkgs_to_build = []
512
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500513 orig_tracking = self.configuration.tracking
514 if not orig_tracking:
515 self.enableDataTracking()
516 self.reset()
517
518
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500519 if buildfile:
520 # Parse the configuration here. We need to do it explicitly here since
521 # this showEnvironment() code path doesn't use the cache
522 self.parseConfiguration()
523
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600524 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500525 fn = self.matchFile(fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600526 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500527 elif len(pkgs_to_build) == 1:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500528 ignore = self.data.getVar("ASSUME_PROVIDED") or ""
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500529 if pkgs_to_build[0] in set(ignore.split()):
530 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
531
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600532 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500533
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600534 mc = runlist[0][0]
535 fn = runlist[0][3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500536 else:
537 envdata = self.data
Brad Bishop316dfdd2018-06-25 12:45:53 -0400538 data.expandKeys(envdata)
539 parse.ast.runAnonFuncs(envdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500540
541 if fn:
542 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600543 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
544 envdata = bb_cache.loadDataFull(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500545 except Exception as e:
546 parselog.exception("Unable to read %s", fn)
547 raise
548
549 # Display history
550 with closing(StringIO()) as env:
551 self.data.inchistory.emit(env)
552 logger.plain(env.getvalue())
553
554 # emit variables and shell functions
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500555 with closing(StringIO()) as env:
556 data.emit_env(env, envdata, True)
557 logger.plain(env.getvalue())
558
559 # emit the metadata which isnt valid shell
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500560 for e in sorted(envdata.keys()):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600561 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500562 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500563
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500564 if not orig_tracking:
565 self.disableDataTracking()
566 self.reset()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500567
568 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
569 """
570 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
571 """
572 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
573
574 # A task of None means use the default task
575 if task is None:
576 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500577 if not task.startswith("do_"):
578 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500579
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500580 targetlist = self.checkPackages(pkgs_to_build, task)
581 fulltargetlist = []
582 defaulttask_implicit = ''
583 defaulttask_explicit = False
584 wildcard = False
585
586 # Wild card expansion:
587 # Replace string such as "multiconfig:*:bash"
588 # into "multiconfig:A:bash multiconfig:B:bash bash"
589 for k in targetlist:
590 if k.startswith("multiconfig:"):
591 if wildcard:
592 bb.fatal('multiconfig conflict')
593 if k.split(":")[1] == "*":
594 wildcard = True
595 for mc in self.multiconfigs:
596 if mc:
597 fulltargetlist.append(k.replace('*', mc))
598 # implicit default task
599 else:
600 defaulttask_implicit = k.split(":")[2]
601 else:
602 fulltargetlist.append(k)
603 else:
604 defaulttask_explicit = True
605 fulltargetlist.append(k)
606
607 if not defaulttask_explicit and defaulttask_implicit != '':
608 fulltargetlist.append(defaulttask_implicit)
609
610 bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600611 taskdata = {}
612 localdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500613
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600614 for mc in self.multiconfigs:
615 taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
616 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600617 bb.data.expandKeys(localdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500618
619 current = 0
620 runlist = []
621 for k in fulltargetlist:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600622 mc = ""
623 if k.startswith("multiconfig:"):
624 mc = k.split(":")[1]
625 k = ":".join(k.split(":")[2:])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500626 ktask = task
627 if ":do_" in k:
628 k2 = k.split(":do_")
629 k = k2[0]
630 ktask = k2[1]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600631 taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500632 current += 1
633 if not ktask.startswith("do_"):
634 ktask = "do_%s" % ktask
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600635 if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
636 # e.g. in ASSUME_PROVIDED
637 continue
638 fn = taskdata[mc].build_targets[k][0]
639 runlist.append([mc, k, ktask, fn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500640 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600641
Brad Bishopf058f492019-01-28 23:50:33 -0500642
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800643 # No need to do check providers if there are no mcdeps or not an mc build
Brad Bishopf058f492019-01-28 23:50:33 -0500644 if mc:
645 # Add unresolved first, so we can get multiconfig indirect dependencies on time
646 for mcavailable in self.multiconfigs:
647 # The first element is empty
648 if mcavailable:
649 taskdata[mcavailable].add_unresolved(localdata[mcavailable], self.recipecaches[mcavailable])
650
651
652 mcdeps = taskdata[mc].get_mcdepends()
653
654 if mcdeps:
655 # Make sure we can provide the multiconfig dependency
656 seen = set()
657 new = True
658 while new:
659 new = False
660 for mc in self.multiconfigs:
661 for k in mcdeps:
662 if k in seen:
663 continue
664 l = k.split(':')
665 depmc = l[2]
666 if depmc not in self.multiconfigs:
667 bb.fatal("Multiconfig dependency %s depends on nonexistent mc configuration %s" % (k,depmc))
668 else:
669 logger.debug(1, "Adding providers for multiconfig dependency %s" % l[3])
670 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
671 seen.add(k)
672 new = True
673
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600674 for mc in self.multiconfigs:
675 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
676
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500677 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600678 return taskdata, runlist
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500679
680 def prepareTreeData(self, pkgs_to_build, task):
681 """
682 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
683 """
684
685 # We set abort to False here to prevent unbuildable targets raising
686 # an exception when we're just generating data
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600687 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500688
689 return runlist, taskdata
690
691 ######## WARNING : this function requires cache_extra to be enabled ########
692
693 def generateTaskDepTreeData(self, pkgs_to_build, task):
694 """
695 Create a dependency graph of pkgs_to_build including reverse dependency
696 information.
697 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500698 if not task.startswith("do_"):
699 task = "do_%s" % task
700
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500701 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600702 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500703 rq.rqdata.prepare()
704 return self.buildDependTree(rq, taskdata)
705
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600706 @staticmethod
707 def add_mc_prefix(mc, pn):
708 if mc:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500709 return "multiconfig:%s:%s" % (mc, pn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600710 return pn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500711
712 def buildDependTree(self, rq, taskdata):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600713 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500714 depend_tree = {}
715 depend_tree["depends"] = {}
716 depend_tree["tdepends"] = {}
717 depend_tree["pn"] = {}
718 depend_tree["rdepends-pn"] = {}
719 depend_tree["packages"] = {}
720 depend_tree["rdepends-pkg"] = {}
721 depend_tree["rrecs-pkg"] = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500722 depend_tree['providermap'] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600723 depend_tree["layer-priorities"] = self.bbfile_config_priorities
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500724
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600725 for mc in taskdata:
726 for name, fn in list(taskdata[mc].get_providermap().items()):
727 pn = self.recipecaches[mc].pkg_fn[fn]
728 pn = self.add_mc_prefix(mc, pn)
729 if name != pn:
730 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
731 depend_tree['providermap'][name] = (pn, version)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500732
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600733 for tid in rq.rqdata.runtaskentries:
734 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
735 pn = self.recipecaches[mc].pkg_fn[taskfn]
736 pn = self.add_mc_prefix(mc, pn)
737 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500738 if pn not in depend_tree["pn"]:
739 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600740 depend_tree["pn"][pn]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500741 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600742 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500743
744 # if we have extra caches, list all attributes they bring in
745 extra_info = []
746 for cache_class in self.caches_array:
747 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
748 cachefields = getattr(cache_class, 'cachefields', [])
749 extra_info = extra_info + cachefields
750
751 # for all attributes stored, add them to the dependency tree
752 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600753 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500754
755
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500756 dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
757 if not dotname in depend_tree["tdepends"]:
758 depend_tree["tdepends"][dotname] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600759 for dep in rq.rqdata.runtaskentries[tid].depends:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800760 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
761 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600762 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
763 if taskfn not in seen_fns:
764 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500765 packages = []
766
767 depend_tree["depends"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600768 for dep in taskdata[mc].depids[taskfn]:
769 depend_tree["depends"][pn].append(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500770
771 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600772 for rdep in taskdata[mc].rdepids[taskfn]:
773 depend_tree["rdepends-pn"][pn].append(rdep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500774
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600775 rdepends = self.recipecaches[mc].rundeps[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500776 for package in rdepends:
777 depend_tree["rdepends-pkg"][package] = []
778 for rdepend in rdepends[package]:
779 depend_tree["rdepends-pkg"][package].append(rdepend)
780 packages.append(package)
781
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600782 rrecs = self.recipecaches[mc].runrecs[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500783 for package in rrecs:
784 depend_tree["rrecs-pkg"][package] = []
785 for rdepend in rrecs[package]:
786 depend_tree["rrecs-pkg"][package].append(rdepend)
787 if not package in packages:
788 packages.append(package)
789
790 for package in packages:
791 if package not in depend_tree["packages"]:
792 depend_tree["packages"][package] = {}
793 depend_tree["packages"][package]["pn"] = pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600794 depend_tree["packages"][package]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500795 depend_tree["packages"][package]["version"] = version
796
797 return depend_tree
798
799 ######## WARNING : this function requires cache_extra to be enabled ########
800 def generatePkgDepTreeData(self, pkgs_to_build, task):
801 """
802 Create a dependency tree of pkgs_to_build, returning the data.
803 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500804 if not task.startswith("do_"):
805 task = "do_%s" % task
806
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500807 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500808
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600809 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500810 depend_tree = {}
811 depend_tree["depends"] = {}
812 depend_tree["pn"] = {}
813 depend_tree["rdepends-pn"] = {}
814 depend_tree["rdepends-pkg"] = {}
815 depend_tree["rrecs-pkg"] = {}
816
817 # if we have extra caches, list all attributes they bring in
818 extra_info = []
819 for cache_class in self.caches_array:
820 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
821 cachefields = getattr(cache_class, 'cachefields', [])
822 extra_info = extra_info + cachefields
823
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600824 tids = []
825 for mc in taskdata:
826 for tid in taskdata[mc].taskentries:
827 tids.append(tid)
828
829 for tid in tids:
830 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
831
832 pn = self.recipecaches[mc].pkg_fn[taskfn]
833 pn = self.add_mc_prefix(mc, pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500834
835 if pn not in depend_tree["pn"]:
836 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600837 depend_tree["pn"][pn]["filename"] = taskfn
838 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500839 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600840 rdepends = self.recipecaches[mc].rundeps[taskfn]
841 rrecs = self.recipecaches[mc].runrecs[taskfn]
842 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500843
844 # for all extra attributes stored, add them to the dependency tree
845 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600846 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500847
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600848 if taskfn not in seen_fns:
849 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500850
851 depend_tree["depends"][pn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500852 for dep in taskdata[mc].depids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500853 pn_provider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600854 if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
855 fn_provider = taskdata[mc].build_targets[dep][0]
856 pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500857 else:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500858 pn_provider = dep
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600859 pn_provider = self.add_mc_prefix(mc, pn_provider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500860 depend_tree["depends"][pn].append(pn_provider)
861
862 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600863 for rdep in taskdata[mc].rdepids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500864 pn_rprovider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600865 if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
866 fn_rprovider = taskdata[mc].run_targets[rdep][0]
867 pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500868 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600869 pn_rprovider = rdep
870 pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500871 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
872
873 depend_tree["rdepends-pkg"].update(rdepends)
874 depend_tree["rrecs-pkg"].update(rrecs)
875
876 return depend_tree
877
878 def generateDepTreeEvent(self, pkgs_to_build, task):
879 """
880 Create a task dependency graph of pkgs_to_build.
881 Generate an event with the result
882 """
883 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
884 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
885
886 def generateDotGraphFiles(self, pkgs_to_build, task):
887 """
888 Create a task dependency graph of pkgs_to_build.
889 Save the result to a set of .dot files.
890 """
891
892 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
893
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500894 with open('pn-buildlist', 'w') as f:
895 for pn in depgraph["pn"]:
896 f.write(pn + "\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500897 logger.info("PN build list saved to 'pn-buildlist'")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500898
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500899 # Remove old format output files to ensure no confusion with stale data
900 try:
901 os.unlink('pn-depends.dot')
902 except FileNotFoundError:
903 pass
904 try:
905 os.unlink('package-depends.dot')
906 except FileNotFoundError:
907 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500908
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500909 with open('task-depends.dot', 'w') as f:
910 f.write("digraph depends {\n")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400911 for task in sorted(depgraph["tdepends"]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500912 (pn, taskname) = task.rsplit(".", 1)
913 fn = depgraph["pn"][pn]["filename"]
914 version = depgraph["pn"][pn]["version"]
915 f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400916 for dep in sorted(depgraph["tdepends"][task]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500917 f.write('"%s" -> "%s"\n' % (task, dep))
918 f.write("}\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500919 logger.info("Task dependencies saved to 'task-depends.dot'")
920
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500921 with open('recipe-depends.dot', 'w') as f:
922 f.write("digraph depends {\n")
923 pndeps = {}
Brad Bishop316dfdd2018-06-25 12:45:53 -0400924 for task in sorted(depgraph["tdepends"]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500925 (pn, taskname) = task.rsplit(".", 1)
926 if pn not in pndeps:
927 pndeps[pn] = set()
Brad Bishop316dfdd2018-06-25 12:45:53 -0400928 for dep in sorted(depgraph["tdepends"][task]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500929 (deppn, deptaskname) = dep.rsplit(".", 1)
930 pndeps[pn].add(deppn)
Brad Bishop316dfdd2018-06-25 12:45:53 -0400931 for pn in sorted(pndeps):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500932 fn = depgraph["pn"][pn]["filename"]
933 version = depgraph["pn"][pn]["version"]
934 f.write('"%s" [label="%s\\n%s\\n%s"]\n' % (pn, pn, version, fn))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400935 for dep in sorted(pndeps[pn]):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500936 if dep == pn:
937 continue
938 f.write('"%s" -> "%s"\n' % (pn, dep))
939 f.write("}\n")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400940 logger.info("Flattened recipe dependencies saved to 'recipe-depends.dot'")
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500941
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500942 def show_appends_with_no_recipes(self):
943 # Determine which bbappends haven't been applied
944
945 # First get list of recipes, including skipped
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600946 recipefns = list(self.recipecaches[''].pkg_fn.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500947 recipefns.extend(self.skiplist.keys())
948
949 # Work out list of bbappends that have been applied
950 applied_appends = []
951 for fn in recipefns:
952 applied_appends.extend(self.collection.get_file_appends(fn))
953
954 appends_without_recipes = []
955 for _, appendfn in self.collection.bbappends:
956 if not appendfn in applied_appends:
957 appends_without_recipes.append(appendfn)
958
959 if appends_without_recipes:
960 msg = 'No recipes available for:\n %s' % '\n '.join(appends_without_recipes)
961 warn_only = self.data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
962 False) or "no"
963 if warn_only.lower() in ("1", "yes", "true"):
964 bb.warn(msg)
965 else:
966 bb.fatal(msg)
967
968 def handlePrefProviders(self):
969
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600970 for mc in self.multiconfigs:
971 localdata = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600972 bb.data.expandKeys(localdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500973
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600974 # Handle PREFERRED_PROVIDERS
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500975 for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600976 try:
977 (providee, provider) = p.split(':')
978 except:
979 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
980 continue
981 if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
982 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
983 self.recipecaches[mc].preferred[providee] = provider
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500984
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500985 def findConfigFilePath(self, configfile):
986 """
987 Find the location on disk of configfile and if it exists and was parsed by BitBake
988 emit the ConfigFilePathFound event with the path to the file.
989 """
990 path = bb.cookerdata.findConfigFile(configfile, self.data)
991 if not path:
992 return
993
994 # Generate a list of parsed configuration files by searching the files
995 # listed in the __depends and __base_depends variables with a .conf suffix.
996 conffiles = []
997 dep_files = self.data.getVar('__base_depends', False) or []
998 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
999
1000 for f in dep_files:
1001 if f[0].endswith(".conf"):
1002 conffiles.append(f[0])
1003
1004 _, conf, conffile = path.rpartition("conf/")
1005 match = os.path.join(conf, conffile)
1006 # Try and find matches for conf/conffilename.conf as we don't always
1007 # have the full path to the file.
1008 for cfg in conffiles:
1009 if cfg.endswith(match):
1010 bb.event.fire(bb.event.ConfigFilePathFound(path),
1011 self.data)
1012 break
1013
1014 def findFilesMatchingInDir(self, filepattern, directory):
1015 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001016 Searches for files containing the substring 'filepattern' which are children of
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001017 'directory' in each BBPATH. i.e. to find all rootfs package classes available
1018 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1019 or to find all machine configuration files one could call:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001020 findFilesMatchingInDir(self, '.conf', 'conf/machine')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001021 """
1022
1023 matches = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001024 bbpaths = self.data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001025 for path in bbpaths:
1026 dirpath = os.path.join(path, directory)
1027 if os.path.exists(dirpath):
1028 for root, dirs, files in os.walk(dirpath):
1029 for f in files:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001030 if filepattern in f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001031 matches.append(f)
1032
1033 if matches:
1034 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1035
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001036 def findProviders(self, mc=''):
1037 return bb.providers.findProviders(self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1038
1039 def findBestProvider(self, pn, mc=''):
1040 if pn in self.recipecaches[mc].providers:
1041 filenames = self.recipecaches[mc].providers[pn]
1042 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.data, self.recipecaches[mc])
1043 filename = eligible[0]
1044 return None, None, None, filename
1045 elif pn in self.recipecaches[mc].pkg_pn:
1046 return bb.providers.findBestProvider(pn, self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1047 else:
1048 return None, None, None, None
1049
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001050 def findConfigFiles(self, varname):
1051 """
1052 Find config files which are appropriate values for varname.
1053 i.e. MACHINE, DISTRO
1054 """
1055 possible = []
1056 var = varname.lower()
1057
1058 data = self.data
1059 # iterate configs
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001060 bbpaths = data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001061 for path in bbpaths:
1062 confpath = os.path.join(path, "conf", var)
1063 if os.path.exists(confpath):
1064 for root, dirs, files in os.walk(confpath):
1065 # get all child files, these are appropriate values
1066 for f in files:
1067 val, sep, end = f.rpartition('.')
1068 if end == 'conf':
1069 possible.append(val)
1070
1071 if possible:
1072 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1073
1074 def findInheritsClass(self, klass):
1075 """
1076 Find all recipes which inherit the specified class
1077 """
1078 pkg_list = []
1079
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001080 for pfn in self.recipecaches[''].pkg_fn:
1081 inherits = self.recipecaches[''].inherits.get(pfn, None)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001082 if inherits and klass in inherits:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001083 pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001084
1085 return pkg_list
1086
1087 def generateTargetsTree(self, klass=None, pkgs=None):
1088 """
1089 Generate a dependency tree of buildable targets
1090 Generate an event with the result
1091 """
1092 # if the caller hasn't specified a pkgs list default to universe
1093 if not pkgs:
1094 pkgs = ['universe']
1095 # if inherited_class passed ensure all recipes which inherit the
1096 # specified class are included in pkgs
1097 if klass:
1098 extra_pkgs = self.findInheritsClass(klass)
1099 pkgs = pkgs + extra_pkgs
1100
1101 # generate a dependency tree for all our packages
1102 tree = self.generatePkgDepTreeData(pkgs, 'build')
1103 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1104
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001105 def interactiveMode( self ):
1106 """Drop off into a shell"""
1107 try:
1108 from bb import shell
1109 except ImportError:
1110 parselog.exception("Interactive mode not available")
1111 sys.exit(1)
1112 else:
1113 shell.start( self )
1114
1115
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001116 def handleCollections(self, collections):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001117 """Handle collections"""
1118 errors = False
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001119 self.bbfile_config_priorities = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001120 if collections:
1121 collection_priorities = {}
1122 collection_depends = {}
1123 collection_list = collections.split()
1124 min_prio = 0
1125 for c in collection_list:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001126 bb.debug(1,'Processing %s in collection list' % (c))
1127
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001128 # Get collection priority if defined explicitly
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001129 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001130 if priority:
1131 try:
1132 prio = int(priority)
1133 except ValueError:
1134 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1135 errors = True
1136 if min_prio == 0 or prio < min_prio:
1137 min_prio = prio
1138 collection_priorities[c] = prio
1139 else:
1140 collection_priorities[c] = None
1141
1142 # Check dependencies and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001143 deps = self.data.getVar("LAYERDEPENDS_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001144 if deps:
1145 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001146 depDict = bb.utils.explode_dep_versions2(deps)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001147 except bb.utils.VersionStringException as vse:
1148 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001149 for dep, oplist in list(depDict.items()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001150 if dep in collection_list:
1151 for opstr in oplist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001152 layerver = self.data.getVar("LAYERVERSION_%s" % dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001153 (op, depver) = opstr.split()
1154 if layerver:
1155 try:
1156 res = bb.utils.vercmp_string_op(layerver, depver, op)
1157 except bb.utils.VersionStringException as vse:
1158 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1159 if not res:
1160 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1161 errors = True
1162 else:
1163 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1164 errors = True
1165 else:
1166 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1167 errors = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001168 collection_depends[c] = list(depDict.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001169 else:
1170 collection_depends[c] = []
1171
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001172 # Check recommends and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001173 recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001174 if recs:
1175 try:
1176 recDict = bb.utils.explode_dep_versions2(recs)
1177 except bb.utils.VersionStringException as vse:
1178 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1179 for rec, oplist in list(recDict.items()):
1180 if rec in collection_list:
1181 if oplist:
1182 opstr = oplist[0]
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001183 layerver = self.data.getVar("LAYERVERSION_%s" % rec)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001184 if layerver:
1185 (op, recver) = opstr.split()
1186 try:
1187 res = bb.utils.vercmp_string_op(layerver, recver, op)
1188 except bb.utils.VersionStringException as vse:
1189 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1190 if not res:
1191 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1192 continue
1193 else:
1194 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1195 continue
1196 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1197 collection_depends[c].append(rec)
1198 else:
1199 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1200
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001201 # Recursively work out collection priorities based on dependencies
1202 def calc_layer_priority(collection):
1203 if not collection_priorities[collection]:
1204 max_depprio = min_prio
1205 for dep in collection_depends[collection]:
1206 calc_layer_priority(dep)
1207 depprio = collection_priorities[dep]
1208 if depprio > max_depprio:
1209 max_depprio = depprio
1210 max_depprio += 1
1211 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1212 collection_priorities[collection] = max_depprio
1213
1214 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1215 for c in collection_list:
1216 calc_layer_priority(c)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001217 regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001218 if regex == None:
1219 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1220 errors = True
1221 continue
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001222 elif regex == "":
1223 parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
1224 errors = False
Brad Bishop316dfdd2018-06-25 12:45:53 -04001225 continue
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001226 else:
1227 try:
1228 cre = re.compile(regex)
1229 except re.error:
1230 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1231 errors = True
1232 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001233 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001234 if errors:
1235 # We've already printed the actual error(s)
1236 raise CollectionError("Errors during parsing layer configuration")
1237
1238 def buildSetVars(self):
1239 """
1240 Setup any variables needed before starting a build
1241 """
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001242 t = time.gmtime()
1243 for mc in self.databuilder.mcdata:
1244 ds = self.databuilder.mcdata[mc]
1245 if not ds.getVar("BUILDNAME", False):
1246 ds.setVar("BUILDNAME", "${DATE}${TIME}")
1247 ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1248 ds.setVar("DATE", time.strftime('%Y%m%d', t))
1249 ds.setVar("TIME", time.strftime('%H%M%S', t))
1250
1251 def reset_mtime_caches(self):
1252 """
1253 Reset mtime caches - this is particularly important when memory resident as something
1254 which is cached is not unlikely to have changed since the last invocation (e.g. a
1255 file associated with a recipe might have been modified by the user).
1256 """
1257 build.reset_cache()
1258 bb.fetch._checksum_cache.mtime_cache.clear()
1259 siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1260 if siggen_cache:
1261 bb.parse.siggen.checksum_cache.mtime_cache.clear()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001262
1263 def matchFiles(self, bf):
1264 """
1265 Find the .bb files which match the expression in 'buildfile'.
1266 """
1267 if bf.startswith("/") or bf.startswith("../"):
1268 bf = os.path.abspath(bf)
1269
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001270 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001271 filelist, masked, searchdirs = self.collection.collect_bbfiles(self.data, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001272 try:
1273 os.stat(bf)
1274 bf = os.path.abspath(bf)
1275 return [bf]
1276 except OSError:
1277 regexp = re.compile(bf)
1278 matches = []
1279 for f in filelist:
1280 if regexp.search(f) and os.path.isfile(f):
1281 matches.append(f)
1282 return matches
1283
1284 def matchFile(self, buildfile):
1285 """
1286 Find the .bb file which matches the expression in 'buildfile'.
1287 Raise an error if multiple files
1288 """
1289 matches = self.matchFiles(buildfile)
1290 if len(matches) != 1:
1291 if matches:
1292 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1293 if matches:
1294 for f in matches:
1295 msg += "\n %s" % f
1296 parselog.error(msg)
1297 else:
1298 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1299 raise NoSpecificMatch
1300 return matches[0]
1301
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001302 def buildFile(self, buildfile, task):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001303 """
1304 Build the file matching regexp buildfile
1305 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001306 bb.event.fire(bb.event.BuildInit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001307
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001308 # Too many people use -b because they think it's how you normally
1309 # specify a target to be built, so show a warning
1310 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1311
1312 self.buildFileInternal(buildfile, task)
1313
1314 def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1315 """
1316 Build the file matching regexp buildfile
1317 """
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001318
1319 # Parse the configuration here. We need to do it explicitly here since
1320 # buildFile() doesn't use the cache
1321 self.parseConfiguration()
1322
1323 # If we are told to do the None task then query the default task
1324 if (task == None):
1325 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001326 if not task.startswith("do_"):
1327 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001328
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001329 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001330 fn = self.matchFile(fn)
1331
1332 self.buildSetVars()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001333 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001334
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001335 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
1336
1337 infos = bb_cache.parse(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001338 infos = dict(infos)
1339
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001340 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001341 try:
1342 info_array = infos[fn]
1343 except KeyError:
1344 bb.fatal("%s does not exist" % fn)
1345
1346 if info_array[0].skipped:
1347 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1348
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001349 self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001350
1351 # Tweak some variables
1352 item = info_array[0].pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001353 self.recipecaches[mc].ignored_dependencies = set()
1354 self.recipecaches[mc].bbfile_priority[fn] = 1
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001355 self.configuration.limited_deps = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001356
1357 # Remove external dependencies
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001358 self.recipecaches[mc].task_deps[fn]['depends'] = {}
1359 self.recipecaches[mc].deps[fn] = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001360 self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1361 self.recipecaches[mc].runrecs[fn] = defaultdict(list)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001362
1363 # Invalidate task for target if force mode active
1364 if self.configuration.force:
1365 logger.verbose("Invalidate task %s, %s", task, fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001366 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001367
1368 # Setup taskdata structure
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001369 taskdata = {}
1370 taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001371 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001372
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001373 if quietlog:
1374 rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1375 bb.runqueue.logger.setLevel(logging.WARNING)
1376
1377 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1378 if fireevents:
1379 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001380
1381 # Execute the runqueue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001382 runlist = [[mc, item, task, fn]]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001383
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001384 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001385
1386 def buildFileIdle(server, rq, abort):
1387
1388 msg = None
1389 interrupted = 0
1390 if abort or self.state == state.forceshutdown:
1391 rq.finish_runqueue(True)
1392 msg = "Forced shutdown"
1393 interrupted = 2
1394 elif self.state == state.shutdown:
1395 rq.finish_runqueue(False)
1396 msg = "Stopped build"
1397 interrupted = 1
1398 failures = 0
1399 try:
1400 retval = rq.execute_runqueue()
1401 except runqueue.TaskFailure as exc:
1402 failures += len(exc.args)
1403 retval = False
1404 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001405 self.command.finishAsyncCommand(str(exc))
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001406 if quietlog:
1407 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001408 return False
1409
1410 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001411 if fireevents:
1412 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001413 self.command.finishAsyncCommand(msg)
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001414 # We trashed self.recipecaches above
1415 self.parsecache_valid = False
1416 self.configuration.limited_deps = False
1417 bb.parse.siggen.reset(self.data)
1418 if quietlog:
1419 bb.runqueue.logger.setLevel(rqloglevel)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001420 return False
1421 if retval is True:
1422 return True
1423 return retval
1424
1425 self.configuration.server_register_idlecallback(buildFileIdle, rq)
1426
1427 def buildTargets(self, targets, task):
1428 """
1429 Attempt to build the targets specified
1430 """
1431
1432 def buildTargetsIdle(server, rq, abort):
1433 msg = None
1434 interrupted = 0
1435 if abort or self.state == state.forceshutdown:
1436 rq.finish_runqueue(True)
1437 msg = "Forced shutdown"
1438 interrupted = 2
1439 elif self.state == state.shutdown:
1440 rq.finish_runqueue(False)
1441 msg = "Stopped build"
1442 interrupted = 1
1443 failures = 0
1444 try:
1445 retval = rq.execute_runqueue()
1446 except runqueue.TaskFailure as exc:
1447 failures += len(exc.args)
1448 retval = False
1449 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001450 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001451 return False
1452
1453 if not retval:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001454 try:
1455 for mc in self.multiconfigs:
1456 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1457 finally:
1458 self.command.finishAsyncCommand(msg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001459 return False
1460 if retval is True:
1461 return True
1462 return retval
1463
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001464 self.reset_mtime_caches()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001465 self.buildSetVars()
1466
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001467 # If we are told to do the None task then query the default task
1468 if (task == None):
1469 task = self.configuration.cmd
1470
1471 if not task.startswith("do_"):
1472 task = "do_%s" % task
1473
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001474 packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1475
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001476 bb.event.fire(bb.event.BuildInit(packages), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001477
1478 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001479
1480 buildname = self.data.getVar("BUILDNAME", False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001481
1482 # make targets to always look as <target>:do_<task>
1483 ntargets = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001484 for target in runlist:
1485 if target[0]:
1486 ntargets.append("multiconfig:%s:%s:%s" % (target[0], target[1], target[2]))
1487 ntargets.append("%s:%s" % (target[1], target[2]))
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001488
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001489 for mc in self.multiconfigs:
1490 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001491
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001492 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001493 if 'universe' in targets:
1494 rq.rqdata.warn_multi_bb = True
1495
1496 self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
1497
1498
1499 def getAllKeysWithFlags(self, flaglist):
1500 dump = {}
1501 for k in self.data.keys():
1502 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001503 expand = True
1504 flags = self.data.getVarFlags(k)
1505 if flags and "func" in flags and "python" in flags:
1506 expand = False
1507 v = self.data.getVar(k, expand)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001508 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1509 dump[k] = {
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001510 'v' : str(v) ,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001511 'history' : self.data.varhistory.variable(k),
1512 }
1513 for d in flaglist:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001514 if flags and d in flags:
1515 dump[k][d] = flags[d]
1516 else:
1517 dump[k][d] = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001518 except Exception as e:
1519 print(e)
1520 return dump
1521
1522
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001523 def updateCacheSync(self):
1524 if self.state == state.running:
1525 return
1526
1527 # reload files for which we got notifications
1528 for p in self.inotify_modified_files:
1529 bb.parse.update_cache(p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001530 if p in bb.parse.BBHandler.cached_statements:
1531 del bb.parse.BBHandler.cached_statements[p]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001532 self.inotify_modified_files = []
1533
1534 if not self.baseconfig_valid:
1535 logger.debug(1, "Reloading base configuration data")
1536 self.initConfigurationData()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001537 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001538
1539 # This is called for all async commands when self.state != running
1540 def updateCache(self):
1541 if self.state == state.running:
1542 return
1543
1544 if self.state in (state.shutdown, state.forceshutdown, state.error):
1545 if hasattr(self.parser, 'shutdown'):
1546 self.parser.shutdown(clean=False, force = True)
1547 raise bb.BBHandledException()
1548
1549 if self.state != state.parsing:
1550 self.updateCacheSync()
1551
1552 if self.state != state.parsing and not self.parsecache_valid:
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001553 bb.parse.siggen.reset(self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001554 self.parseConfiguration ()
1555 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001556 for mc in self.multiconfigs:
1557 bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001558
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001559 for mc in self.multiconfigs:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001560 ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001561 self.recipecaches[mc].ignored_dependencies = set(ignore.split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001562
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001563 for dep in self.configuration.extra_assume_provided:
1564 self.recipecaches[mc].ignored_dependencies.add(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001565
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001566 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001567 (filelist, masked, searchdirs) = self.collection.collect_bbfiles(self.data, self.data)
1568
1569 # Add inotify watches for directories searched for bb/bbappend files
1570 for dirent in searchdirs:
1571 self.add_filewatch([[dirent]], dirs=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001572
1573 self.parser = CookerParser(self, filelist, masked)
1574 self.parsecache_valid = True
1575
1576 self.state = state.parsing
1577
1578 if not self.parser.parse_next():
1579 collectlog.debug(1, "parsing complete")
1580 if self.parser.error:
1581 raise bb.BBHandledException()
1582 self.show_appends_with_no_recipes()
1583 self.handlePrefProviders()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001584 for mc in self.multiconfigs:
1585 self.recipecaches[mc].bbfile_priority = self.collection.collection_priorities(self.recipecaches[mc].pkg_fn, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001586 self.state = state.running
1587
1588 # Send an event listing all stamps reachable after parsing
1589 # which the metadata may use to clean up stale data
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001590 for mc in self.multiconfigs:
1591 event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1592 bb.event.fire(event, self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001593 return None
1594
1595 return True
1596
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001597 def checkPackages(self, pkgs_to_build, task=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001598
1599 # Return a copy, don't modify the original
1600 pkgs_to_build = pkgs_to_build[:]
1601
1602 if len(pkgs_to_build) == 0:
1603 raise NothingToBuild
1604
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001605 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001606 for pkg in pkgs_to_build:
1607 if pkg in ignore:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001608 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001609
1610 if 'world' in pkgs_to_build:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001611 pkgs_to_build.remove('world')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001612 for mc in self.multiconfigs:
1613 bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1614 for t in self.recipecaches[mc].world_target:
1615 if mc:
1616 t = "multiconfig:" + mc + ":" + t
1617 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001618
1619 if 'universe' in pkgs_to_build:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001620 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001621 parselog.debug(1, "collating packages for \"universe\"")
1622 pkgs_to_build.remove('universe')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001623 for mc in self.multiconfigs:
1624 for t in self.recipecaches[mc].universe_target:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001625 if task:
1626 foundtask = False
1627 for provider_fn in self.recipecaches[mc].providers[t]:
1628 if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1629 foundtask = True
1630 break
1631 if not foundtask:
1632 bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1633 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001634 if mc:
1635 t = "multiconfig:" + mc + ":" + t
1636 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001637
1638 return pkgs_to_build
1639
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001640 def pre_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001641 # We now are in our own process so we can call this here.
1642 # PRServ exits if its parent process exits
1643 self.handlePRServ()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001644 return
1645
1646 def post_serve(self):
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001647 prserv.serv.auto_shutdown()
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001648 bb.event.fire(CookerExit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001649
1650
1651 def shutdown(self, force = False):
1652 if force:
1653 self.state = state.forceshutdown
1654 else:
1655 self.state = state.shutdown
1656
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001657 if self.parser:
1658 self.parser.shutdown(clean=not force, force=force)
1659
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001660 def finishcommand(self):
1661 self.state = state.initial
1662
1663 def reset(self):
1664 self.initConfigurationData()
1665
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001666 def clientComplete(self):
1667 """Called when the client is done using the server"""
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001668 self.finishcommand()
1669 self.extraconfigdata = {}
1670 self.command.reset()
1671 self.databuilder.reset()
1672 self.data = self.databuilder.data
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001673
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001674
1675class CookerExit(bb.event.Event):
1676 """
1677 Notify clients of the Cooker shutdown
1678 """
1679
1680 def __init__(self):
1681 bb.event.Event.__init__(self)
1682
1683
1684class CookerCollectFiles(object):
1685 def __init__(self, priorities):
1686 self.bbappends = []
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001687 # Priorities is a list of tupples, with the second element as the pattern.
1688 # We need to sort the list with the longest pattern first, and so on to
1689 # the shortest. This allows nested layers to be properly evaluated.
1690 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001691
1692 def calc_bbfile_priority( self, filename, matched = None ):
1693 for _, _, regex, pri in self.bbfile_config_priorities:
1694 if regex.match(filename):
1695 if matched != None:
1696 if not regex in matched:
1697 matched.add(regex)
1698 return pri
1699 return 0
1700
1701 def get_bbfiles(self):
1702 """Get list of default .bb files by reading out the current directory"""
1703 path = os.getcwd()
1704 contents = os.listdir(path)
1705 bbfiles = []
1706 for f in contents:
1707 if f.endswith(".bb"):
1708 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1709 return bbfiles
1710
1711 def find_bbfiles(self, path):
1712 """Find all the .bb and .bbappend files in a directory"""
1713 found = []
1714 for dir, dirs, files in os.walk(path):
1715 for ignored in ('SCCS', 'CVS', '.svn'):
1716 if ignored in dirs:
1717 dirs.remove(ignored)
1718 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1719
1720 return found
1721
1722 def collect_bbfiles(self, config, eventdata):
1723 """Collect all available .bb build files"""
1724 masked = 0
1725
1726 collectlog.debug(1, "collecting .bb files")
1727
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001728 files = (config.getVar( "BBFILES") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001729 config.setVar("BBFILES", " ".join(files))
1730
1731 # Sort files by priority
1732 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
1733
1734 if not len(files):
1735 files = self.get_bbfiles()
1736
1737 if not len(files):
1738 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1739 bb.event.fire(CookerExit(), eventdata)
1740
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001741 # We need to track where we look so that we can add inotify watches. There
1742 # is no nice way to do this, this is horrid. We intercept the os.listdir()
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001743 # (or os.scandir() for python 3.6+) calls while we run glob().
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001744 origlistdir = os.listdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001745 if hasattr(os, 'scandir'):
1746 origscandir = os.scandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001747 searchdirs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001748
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001749 def ourlistdir(d):
1750 searchdirs.append(d)
1751 return origlistdir(d)
1752
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001753 def ourscandir(d):
1754 searchdirs.append(d)
1755 return origscandir(d)
1756
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001757 os.listdir = ourlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001758 if hasattr(os, 'scandir'):
1759 os.scandir = ourscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001760 try:
1761 # Can't use set here as order is important
1762 newfiles = []
1763 for f in files:
1764 if os.path.isdir(f):
1765 dirfiles = self.find_bbfiles(f)
1766 for g in dirfiles:
1767 if g not in newfiles:
1768 newfiles.append(g)
1769 else:
1770 globbed = glob.glob(f)
1771 if not globbed and os.path.exists(f):
1772 globbed = [f]
1773 # glob gives files in order on disk. Sort to be deterministic.
1774 for g in sorted(globbed):
1775 if g not in newfiles:
1776 newfiles.append(g)
1777 finally:
1778 os.listdir = origlistdir
Brad Bishopd7bf8c12018-02-25 22:55:05 -05001779 if hasattr(os, 'scandir'):
1780 os.scandir = origscandir
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001781
1782 bbmask = config.getVar('BBMASK')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001783
1784 if bbmask:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001785 # First validate the individual regular expressions and ignore any
1786 # that do not compile
1787 bbmasks = []
1788 for mask in bbmask.split():
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001789 # When constructing an older style single regex, it's possible for BBMASK
1790 # to end up beginning with '|', which matches and masks _everything_.
1791 if mask.startswith("|"):
1792 collectlog.warn("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
1793 mask = mask[1:]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001794 try:
1795 re.compile(mask)
1796 bbmasks.append(mask)
1797 except sre_constants.error:
1798 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1799
1800 # Then validate the combined regular expressions. This should never
1801 # fail, but better safe than sorry...
1802 bbmask = "|".join(bbmasks)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001803 try:
1804 bbmask_compiled = re.compile(bbmask)
1805 except sre_constants.error:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001806 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1807 bbmask = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001808
1809 bbfiles = []
1810 bbappend = []
1811 for f in newfiles:
1812 if bbmask and bbmask_compiled.search(f):
1813 collectlog.debug(1, "skipping masked file %s", f)
1814 masked += 1
1815 continue
1816 if f.endswith('.bb'):
1817 bbfiles.append(f)
1818 elif f.endswith('.bbappend'):
1819 bbappend.append(f)
1820 else:
1821 collectlog.debug(1, "skipping %s: unknown file extension", f)
1822
1823 # Build a list of .bbappend files for each .bb file
1824 for f in bbappend:
1825 base = os.path.basename(f).replace('.bbappend', '.bb')
1826 self.bbappends.append((base, f))
1827
1828 # Find overlayed recipes
1829 # bbfiles will be in priority order which makes this easy
1830 bbfile_seen = dict()
1831 self.overlayed = defaultdict(list)
1832 for f in reversed(bbfiles):
1833 base = os.path.basename(f)
1834 if base not in bbfile_seen:
1835 bbfile_seen[base] = f
1836 else:
1837 topfile = bbfile_seen[base]
1838 self.overlayed[topfile].append(f)
1839
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001840 return (bbfiles, masked, searchdirs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001841
1842 def get_file_appends(self, fn):
1843 """
1844 Returns a list of .bbappend files to apply to fn
1845 """
1846 filelist = []
1847 f = os.path.basename(fn)
1848 for b in self.bbappends:
1849 (bbappend, filename) = b
1850 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1851 filelist.append(filename)
1852 return filelist
1853
1854 def collection_priorities(self, pkgfns, d):
1855
1856 priorities = {}
1857
1858 # Calculate priorities for each file
1859 matched = set()
1860 for p in pkgfns:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001861 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001862 priorities[p] = self.calc_bbfile_priority(realfn, matched)
1863
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001864 unmatched = set()
1865 for _, _, regex, pri in self.bbfile_config_priorities:
1866 if not regex in matched:
1867 unmatched.add(regex)
1868
Brad Bishop316dfdd2018-06-25 12:45:53 -04001869 # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
1870 def find_bbappend_match(regex):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001871 for b in self.bbappends:
1872 (bbfile, append) = b
1873 if regex.match(append):
Brad Bishop316dfdd2018-06-25 12:45:53 -04001874 # If the bbappend is matched by already "matched set", return False
1875 for matched_regex in matched:
1876 if matched_regex.match(append):
1877 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001878 return True
1879 return False
1880
1881 for unmatch in unmatched.copy():
Brad Bishop316dfdd2018-06-25 12:45:53 -04001882 if find_bbappend_match(unmatch):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001883 unmatched.remove(unmatch)
1884
1885 for collection, pattern, regex, _ in self.bbfile_config_priorities:
1886 if regex in unmatched:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001887 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001888 collectlog.warning("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001889
1890 return priorities
1891
1892class ParsingFailure(Exception):
1893 def __init__(self, realexception, recipe):
1894 self.realexception = realexception
1895 self.recipe = recipe
1896 Exception.__init__(self, realexception, recipe)
1897
1898class Feeder(multiprocessing.Process):
1899 def __init__(self, jobs, to_parsers, quit):
1900 self.quit = quit
1901 self.jobs = jobs
1902 self.to_parsers = to_parsers
1903 multiprocessing.Process.__init__(self)
1904
1905 def run(self):
1906 while True:
1907 try:
1908 quit = self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001909 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001910 pass
1911 else:
1912 if quit == 'cancel':
1913 self.to_parsers.cancel_join_thread()
1914 break
1915
1916 try:
1917 job = self.jobs.pop()
1918 except IndexError:
1919 break
1920
1921 try:
1922 self.to_parsers.put(job, timeout=0.5)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001923 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001924 self.jobs.insert(0, job)
1925 continue
1926
1927class Parser(multiprocessing.Process):
1928 def __init__(self, jobs, results, quit, init, profile):
1929 self.jobs = jobs
1930 self.results = results
1931 self.quit = quit
1932 self.init = init
1933 multiprocessing.Process.__init__(self)
1934 self.context = bb.utils.get_context().copy()
1935 self.handlers = bb.event.get_class_handlers().copy()
1936 self.profile = profile
1937
1938 def run(self):
1939
1940 if not self.profile:
1941 self.realrun()
1942 return
1943
1944 try:
1945 import cProfile as profile
1946 except:
1947 import profile
1948 prof = profile.Profile()
1949 try:
1950 profile.Profile.runcall(prof, self.realrun)
1951 finally:
1952 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1953 prof.dump_stats(logfile)
1954
1955 def realrun(self):
1956 if self.init:
1957 self.init()
1958
1959 pending = []
1960 while True:
1961 try:
1962 self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001963 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001964 pass
1965 else:
1966 self.results.cancel_join_thread()
1967 break
1968
1969 if pending:
1970 result = pending.pop()
1971 else:
1972 try:
1973 job = self.jobs.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001974 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001975 continue
1976
1977 if job is None:
1978 break
1979 result = self.parse(*job)
1980
1981 try:
1982 self.results.put(result, timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001983 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001984 pending.append(result)
1985
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001986 def parse(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001987 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001988 # Record the filename we're parsing into any events generated
1989 def parse_filter(self, record):
1990 record.taskpid = bb.event.worker_pid
1991 record.fn = filename
1992 return True
1993
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001994 # Reset our environment and handlers to the original settings
1995 bb.utils.set_context(self.context.copy())
1996 bb.event.set_class_handlers(self.handlers.copy())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001997 bb.event.LogHandler.filter = parse_filter
1998
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001999 return True, self.bb_cache.parse(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002000 except Exception as exc:
2001 tb = sys.exc_info()[2]
2002 exc.recipe = filename
2003 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
2004 return True, exc
2005 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2006 # and for example a worker thread doesn't just exit on its own in response to
2007 # a SystemExit event for example.
2008 except BaseException as exc:
2009 return True, ParsingFailure(exc, filename)
2010
2011class CookerParser(object):
2012 def __init__(self, cooker, filelist, masked):
2013 self.filelist = filelist
2014 self.cooker = cooker
2015 self.cfgdata = cooker.data
2016 self.cfghash = cooker.data_hash
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002017 self.cfgbuilder = cooker.databuilder
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002018
2019 # Accounting statistics
2020 self.parsed = 0
2021 self.cached = 0
2022 self.error = 0
2023 self.masked = masked
2024
2025 self.skipped = 0
2026 self.virtuals = 0
2027 self.total = len(filelist)
2028
2029 self.current = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002030 self.process_names = []
2031
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002032 self.bb_cache = bb.cache.Cache(self.cfgbuilder, self.cfghash, cooker.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002033 self.fromcache = []
2034 self.willparse = []
2035 for filename in self.filelist:
2036 appends = self.cooker.collection.get_file_appends(filename)
2037 if not self.bb_cache.cacheValid(filename, appends):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002038 self.willparse.append((filename, appends))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002039 else:
2040 self.fromcache.append((filename, appends))
2041 self.toparse = self.total - len(self.fromcache)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002042 self.progress_chunk = int(max(self.toparse / 100, 1))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002043
Brad Bishop6e60e8b2018-02-01 10:27:11 -05002044 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002045 multiprocessing.cpu_count()), len(self.willparse))
2046
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002047 self.start()
2048 self.haveshutdown = False
2049
2050 def start(self):
2051 self.results = self.load_cached()
2052 self.processes = []
2053 if self.toparse:
2054 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2055 def init():
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002056 Parser.bb_cache = self.bb_cache
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002057 bb.utils.set_process_name(multiprocessing.current_process().name)
2058 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2059 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002060
2061 self.feeder_quit = multiprocessing.Queue(maxsize=1)
2062 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
2063 self.jobs = multiprocessing.Queue(maxsize=self.num_processes)
2064 self.result_queue = multiprocessing.Queue()
2065 self.feeder = Feeder(self.willparse, self.jobs, self.feeder_quit)
2066 self.feeder.start()
2067 for i in range(0, self.num_processes):
2068 parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
2069 parser.start()
2070 self.process_names.append(parser.name)
2071 self.processes.append(parser)
2072
2073 self.results = itertools.chain(self.results, self.parse_generator())
2074
2075 def shutdown(self, clean=True, force=False):
2076 if not self.toparse:
2077 return
2078 if self.haveshutdown:
2079 return
2080 self.haveshutdown = True
2081
2082 if clean:
2083 event = bb.event.ParseCompleted(self.cached, self.parsed,
2084 self.skipped, self.masked,
2085 self.virtuals, self.error,
2086 self.total)
2087
2088 bb.event.fire(event, self.cfgdata)
2089 self.feeder_quit.put(None)
2090 for process in self.processes:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002091 self.parser_quit.put(None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002092 else:
2093 self.feeder_quit.put('cancel')
2094
2095 self.parser_quit.cancel_join_thread()
2096 for process in self.processes:
2097 self.parser_quit.put(None)
2098
2099 self.jobs.cancel_join_thread()
2100
2101 for process in self.processes:
2102 if force:
2103 process.join(.1)
2104 process.terminate()
2105 else:
2106 process.join()
2107 self.feeder.join()
2108
2109 sync = threading.Thread(target=self.bb_cache.sync)
2110 sync.start()
2111 multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002112 bb.codeparser.parser_cache_savemerge()
2113 bb.fetch.fetcher_parse_done()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002114 if self.cooker.configuration.profile:
2115 profiles = []
2116 for i in self.process_names:
2117 logfile = "profile-parse-%s.log" % i
2118 if os.path.exists(logfile):
2119 profiles.append(logfile)
2120
2121 pout = "profile-parse.log.processed"
2122 bb.utils.process_profilelog(profiles, pout = pout)
2123 print("Processed parsing statistics saved to %s" % (pout))
2124
2125 def load_cached(self):
2126 for filename, appends in self.fromcache:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002127 cached, infos = self.bb_cache.load(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002128 yield not cached, infos
2129
2130 def parse_generator(self):
2131 while True:
2132 if self.parsed >= self.toparse:
2133 break
2134
2135 try:
2136 result = self.result_queue.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002137 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002138 pass
2139 else:
2140 value = result[1]
2141 if isinstance(value, BaseException):
2142 raise value
2143 else:
2144 yield result
2145
2146 def parse_next(self):
2147 result = []
2148 parsed = None
2149 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002150 parsed, result = next(self.results)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002151 except StopIteration:
2152 self.shutdown()
2153 return False
2154 except bb.BBHandledException as exc:
2155 self.error += 1
2156 logger.error('Failed to parse recipe: %s' % exc.recipe)
2157 self.shutdown(clean=False)
2158 return False
2159 except ParsingFailure as exc:
2160 self.error += 1
2161 logger.error('Unable to parse %s: %s' %
2162 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2163 self.shutdown(clean=False)
2164 return False
2165 except bb.parse.ParseError as exc:
2166 self.error += 1
2167 logger.error(str(exc))
2168 self.shutdown(clean=False)
2169 return False
2170 except bb.data_smart.ExpansionError as exc:
2171 self.error += 1
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002172 bbdir = os.path.dirname(__file__) + os.sep
2173 etype, value, _ = sys.exc_info()
2174 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2175 logger.error('ExpansionError during parsing %s', value.recipe,
2176 exc_info=(etype, value, tb))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002177 self.shutdown(clean=False)
2178 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002179 except Exception as exc:
2180 self.error += 1
2181 etype, value, tb = sys.exc_info()
2182 if hasattr(value, "recipe"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002183 logger.error('Unable to parse %s' % value.recipe,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002184 exc_info=(etype, value, exc.traceback))
2185 else:
2186 # Most likely, an exception occurred during raising an exception
2187 import traceback
2188 logger.error('Exception during parse: %s' % traceback.format_exc())
2189 self.shutdown(clean=False)
2190 return False
2191
2192 self.current += 1
2193 self.virtuals += len(result)
2194 if parsed:
2195 self.parsed += 1
2196 if self.parsed % self.progress_chunk == 0:
2197 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2198 self.cfgdata)
2199 else:
2200 self.cached += 1
2201
2202 for virtualfn, info_array in result:
2203 if info_array[0].skipped:
2204 self.skipped += 1
2205 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002206 (fn, cls, mc) = bb.cache.virtualfn2realfn(virtualfn)
2207 self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002208 parsed=parsed, watcher = self.cooker.add_filewatch)
2209 return True
2210
2211 def reparse(self, filename):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002212 infos = self.bb_cache.parse(filename, self.cooker.collection.get_file_appends(filename))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002213 for vfn, info_array in infos:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002214 (fn, cls, mc) = bb.cache.virtualfn2realfn(vfn)
2215 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)