blob: 3c9e88cd249aa063ae80a06200d8cd54b738c995 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# Copyright (C) 2003, 2004 Chris Larson
6# Copyright (C) 2003, 2004 Phil Blundell
7# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
8# Copyright (C) 2005 Holger Hans Peter Freyther
9# Copyright (C) 2005 ROAD GmbH
10# Copyright (C) 2006 - 2007 Richard Purdie
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
Patrick Williamsc0f7c042017-02-23 20:41:17 -060025
Patrick Williamsc124f4f2015-09-15 14:41:29 -050026import sys, os, glob, os.path, re, time
27import atexit
28import itertools
29import logging
30import multiprocessing
31import sre_constants
32import threading
Patrick Williamsc0f7c042017-02-23 20:41:17 -060033from io import StringIO, UnsupportedOperation
Patrick Williamsc124f4f2015-09-15 14:41:29 -050034from contextlib import closing
35from functools import wraps
Patrick Williamsc0f7c042017-02-23 20:41:17 -060036from collections import defaultdict, namedtuple
Patrick Williamsc124f4f2015-09-15 14:41:29 -050037import bb, bb.exceptions, bb.command
38from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
Patrick Williamsc0f7c042017-02-23 20:41:17 -060039import queue
Patrick Williamsc124f4f2015-09-15 14:41:29 -050040import signal
41import subprocess
42import errno
43import prserv.serv
44import pyinotify
Patrick Williamsc0f7c042017-02-23 20:41:17 -060045import json
46import pickle
47import codecs
Patrick Williamsc124f4f2015-09-15 14:41:29 -050048
49logger = logging.getLogger("BitBake")
50collectlog = logging.getLogger("BitBake.Collection")
51buildlog = logging.getLogger("BitBake.Build")
52parselog = logging.getLogger("BitBake.Parsing")
53providerlog = logging.getLogger("BitBake.Provider")
54
55class NoSpecificMatch(bb.BBHandledException):
56 """
57 Exception raised when no or multiple file matches are found
58 """
59
60class NothingToBuild(Exception):
61 """
62 Exception raised when there is nothing to build
63 """
64
65class CollectionError(bb.BBHandledException):
66 """
67 Exception raised when layer configuration is incorrect
68 """
69
70class state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060071 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050072
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050073 @classmethod
74 def get_name(cls, code):
75 for name in dir(cls):
76 value = getattr(cls, name)
77 if type(value) == type(cls.initial) and value == code:
78 return name
79 raise ValueError("Invalid status code: %s" % code)
80
Patrick Williamsc124f4f2015-09-15 14:41:29 -050081
82class SkippedPackage:
83 def __init__(self, info = None, reason = None):
84 self.pn = None
85 self.skipreason = None
86 self.provides = None
87 self.rprovides = None
88
89 if info:
90 self.pn = info.pn
91 self.skipreason = info.skipreason
92 self.provides = info.provides
93 self.rprovides = info.rprovides
94 elif reason:
95 self.skipreason = reason
96
97
98class CookerFeatures(object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -060099 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500100
101 def __init__(self):
102 self._features=set()
103
104 def setFeature(self, f):
105 # validate we got a request for a feature we support
106 if f not in CookerFeatures._feature_list:
107 return
108 self._features.add(f)
109
110 def __contains__(self, f):
111 return f in self._features
112
113 def __iter__(self):
114 return self._features.__iter__()
115
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600116 def __next__(self):
117 return next(self._features)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500118
119
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600120class EventWriter:
121 def __init__(self, cooker, eventfile):
122 self.file_inited = None
123 self.cooker = cooker
124 self.eventfile = eventfile
125 self.event_queue = []
126
127 def write_event(self, event):
128 with open(self.eventfile, "a") as f:
129 try:
130 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
131 f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
132 "vars": str_event}))
133 except Exception as err:
134 import traceback
135 print(err, traceback.format_exc())
136
137 def send(self, event):
138 if self.file_inited:
139 # we have the file, just write the event
140 self.write_event(event)
141 else:
142 # init on bb.event.BuildStarted
143 name = "%s.%s" % (event.__module__, event.__class__.__name__)
144 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
145 with open(self.eventfile, "w") as f:
146 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
147
148 self.file_inited = True
149
150 # write pending events
151 for evt in self.event_queue:
152 self.write_event(evt)
153
154 # also write the current event
155 self.write_event(event)
156 else:
157 # queue all events until the file is inited
158 self.event_queue.append(event)
159
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500160#============================================================================#
161# BBCooker
162#============================================================================#
163class BBCooker:
164 """
165 Manages one bitbake build run
166 """
167
168 def __init__(self, configuration, featureSet=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600169 self.recipecaches = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500170 self.skiplist = {}
171 self.featureset = CookerFeatures()
172 if featureSet:
173 for f in featureSet:
174 self.featureset.setFeature(f)
175
176 self.configuration = configuration
177
178 self.configwatcher = pyinotify.WatchManager()
179 self.configwatcher.bbseen = []
180 self.configwatcher.bbwatchedfiles = []
181 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
182 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
183 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
184 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
185 self.watcher = pyinotify.WatchManager()
186 self.watcher.bbseen = []
187 self.watcher.bbwatchedfiles = []
188 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
189
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500190 # If being called by something like tinfoil, we need to clean cached data
191 # which may now be invalid
192 bb.parse.__mtime_cache = {}
193 bb.parse.BBHandler.cached_statements = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500194
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500195 self.ui_cmdline = None
196
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500197 self.initConfigurationData()
198
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600199 # we log all events to a file if so directed
200 if self.configuration.writeeventlog:
201 # register the log file writer as UI Handler
202 writer = EventWriter(self, self.configuration.writeeventlog)
203 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
204 bb.event.register_UIHhandler(EventLogWriteHandler(writer))
205
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500206 self.inotify_modified_files = []
207
208 def _process_inotify_updates(server, notifier_list, abort):
209 for n in notifier_list:
210 if n.check_events(timeout=0):
211 # read notified events and enqeue them
212 n.read_events()
213 n.process_events()
214 return 1.0
215
216 self.configuration.server_register_idlecallback(_process_inotify_updates, [self.confignotifier, self.notifier])
217
218 self.baseconfig_valid = True
219 self.parsecache_valid = False
220
221 # Take a lock so only one copy of bitbake can run against a given build
222 # directory at a time
223 if not self.lockBitbake():
224 bb.fatal("Only one copy of bitbake should be run against a build directory")
225 try:
226 self.lock.seek(0)
227 self.lock.truncate()
228 if len(configuration.interface) >= 2:
229 self.lock.write("%s:%s\n" % (configuration.interface[0], configuration.interface[1]));
230 self.lock.flush()
231 except:
232 pass
233
234 # TOSTOP must not be set or our children will hang when they output
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600235 try:
236 fd = sys.stdout.fileno()
237 if os.isatty(fd):
238 import termios
239 tcattr = termios.tcgetattr(fd)
240 if tcattr[3] & termios.TOSTOP:
241 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
242 tcattr[3] = tcattr[3] & ~termios.TOSTOP
243 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
244 except UnsupportedOperation:
245 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500246
247 self.command = bb.command.Command(self)
248 self.state = state.initial
249
250 self.parser = None
251
252 signal.signal(signal.SIGTERM, self.sigterm_exception)
253 # Let SIGHUP exit as SIGTERM
254 signal.signal(signal.SIGHUP, self.sigterm_exception)
255
256 def config_notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500257 if event.maskname == "IN_Q_OVERFLOW":
258 bb.warn("inotify event queue overflowed, invalidating caches.")
259 self.baseconfig_valid = False
260 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500261 if not event.pathname in self.configwatcher.bbwatchedfiles:
262 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500263 if not event.pathname in self.inotify_modified_files:
264 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500265 self.baseconfig_valid = False
266
267 def notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500268 if event.maskname == "IN_Q_OVERFLOW":
269 bb.warn("inotify event queue overflowed, invalidating caches.")
270 self.parsecache_valid = False
271 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500272 if not event.pathname in self.inotify_modified_files:
273 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500274 self.parsecache_valid = False
275
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500276 def add_filewatch(self, deps, watcher=None, dirs=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500277 if not watcher:
278 watcher = self.watcher
279 for i in deps:
280 watcher.bbwatchedfiles.append(i[0])
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500281 if dirs:
282 f = i[0]
283 else:
284 f = os.path.dirname(i[0])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500285 if f in watcher.bbseen:
286 continue
287 watcher.bbseen.append(f)
288 watchtarget = None
289 while True:
290 # We try and add watches for files that don't exist but if they did, would influence
291 # the parser. The parent directory of these files may not exist, in which case we need
292 # to watch any parent that does exist for changes.
293 try:
294 watcher.add_watch(f, self.watchmask, quiet=False)
295 if watchtarget:
296 watcher.bbwatchedfiles.append(watchtarget)
297 break
298 except pyinotify.WatchManagerError as e:
299 if 'ENOENT' in str(e):
300 watchtarget = f
301 f = os.path.dirname(f)
302 if f in watcher.bbseen:
303 break
304 watcher.bbseen.append(f)
305 continue
306 if 'ENOSPC' in str(e):
307 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
308 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
309 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
310 providerlog.error("Root privilege is required to modify max_user_watches.")
311 raise
312
313 def sigterm_exception(self, signum, stackframe):
314 if signum == signal.SIGTERM:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500315 bb.warn("Cooker received SIGTERM, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500316 elif signum == signal.SIGHUP:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500317 bb.warn("Cooker received SIGHUP, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500318 self.state = state.forceshutdown
319
320 def setFeatures(self, features):
321 # we only accept a new feature set if we're in state initial, so we can reset without problems
322 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
323 raise Exception("Illegal state for feature set change")
324 original_featureset = list(self.featureset)
325 for feature in features:
326 self.featureset.setFeature(feature)
327 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
328 if (original_featureset != list(self.featureset)) and self.state != state.error:
329 self.reset()
330
331 def initConfigurationData(self):
332
333 self.state = state.initial
334 self.caches_array = []
335
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500336 # Need to preserve BB_CONSOLELOG over resets
337 consolelog = None
338 if hasattr(self, "data"):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500339 consolelog = self.data.getVar("BB_CONSOLELOG")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500340
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500341 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
342 self.enableDataTracking()
343
344 all_extra_cache_names = []
345 # We hardcode all known cache types in a single place, here.
346 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
347 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
348
349 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
350
351 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
352 # This is the entry point, no further check needed!
353 for var in caches_name_array:
354 try:
355 module_name, cache_name = var.split(':')
356 module = __import__(module_name, fromlist=(cache_name,))
357 self.caches_array.append(getattr(module, cache_name))
358 except ImportError as exc:
359 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
360 sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
361
362 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
363 self.databuilder.parseBaseConfiguration()
364 self.data = self.databuilder.data
365 self.data_hash = self.databuilder.data_hash
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500366 self.extraconfigdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500367
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500368 if consolelog:
369 self.data.setVar("BB_CONSOLELOG", consolelog)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500370
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500371 self.data.setVar('BB_CMDLINE', self.ui_cmdline)
372
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500373 #
374 # Copy of the data store which has been expanded.
375 # Used for firing events and accessing variables where expansion needs to be accounted for
376 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500377 bb.parse.init_parser(self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500378
379 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
380 self.disableDataTracking()
381
382 self.data.renameVar("__depends", "__base_depends")
383 self.add_filewatch(self.data.getVar("__base_depends", False), self.configwatcher)
384
385
386 def enableDataTracking(self):
387 self.configuration.tracking = True
388 if hasattr(self, "data"):
389 self.data.enableTracking()
390
391 def disableDataTracking(self):
392 self.configuration.tracking = False
393 if hasattr(self, "data"):
394 self.data.disableTracking()
395
396 def modifyConfigurationVar(self, var, val, default_file, op):
397 if op == "append":
398 self.appendConfigurationVar(var, val, default_file)
399 elif op == "set":
400 self.saveConfigurationVar(var, val, default_file, "=")
401 elif op == "earlyAssign":
402 self.saveConfigurationVar(var, val, default_file, "?=")
403
404
405 def appendConfigurationVar(self, var, val, default_file):
406 #add append var operation to the end of default_file
407 default_file = bb.cookerdata.findConfigFile(default_file, self.data)
408
409 total = "#added by hob"
410 total += "\n%s += \"%s\"\n" % (var, val)
411
412 with open(default_file, 'a') as f:
413 f.write(total)
414
415 #add to history
416 loginfo = {"op":"append", "file":default_file, "line":total.count("\n")}
417 self.data.appendVar(var, val, **loginfo)
418
419 def saveConfigurationVar(self, var, val, default_file, op):
420
421 replaced = False
422 #do not save if nothing changed
423 if str(val) == self.data.getVar(var, False):
424 return
425
426 conf_files = self.data.varhistory.get_variable_files(var)
427
428 #format the value when it is a list
429 if isinstance(val, list):
430 listval = ""
431 for value in val:
432 listval += "%s " % value
433 val = listval
434
435 topdir = self.data.getVar("TOPDIR", False)
436
437 #comment or replace operations made on var
438 for conf_file in conf_files:
439 if topdir in conf_file:
440 with open(conf_file, 'r') as f:
441 contents = f.readlines()
442
443 lines = self.data.varhistory.get_variable_lines(var, conf_file)
444 for line in lines:
445 total = ""
446 i = 0
447 for c in contents:
448 total += c
449 i = i + 1
450 if i==int(line):
451 end_index = len(total)
452 index = total.rfind(var, 0, end_index)
453
454 begin_line = total.count("\n",0,index)
455 end_line = int(line)
456
457 #check if the variable was saved before in the same way
458 #if true it replace the place where the variable was declared
459 #else it comments it
460 if contents[begin_line-1]== "#added by hob\n":
461 contents[begin_line] = "%s %s \"%s\"\n" % (var, op, val)
462 replaced = True
463 else:
464 for ii in range(begin_line, end_line):
465 contents[ii] = "#" + contents[ii]
466
467 with open(conf_file, 'w') as f:
468 f.writelines(contents)
469
470 if replaced == False:
471 #remove var from history
472 self.data.varhistory.del_var_history(var)
473
474 #add var to the end of default_file
475 default_file = bb.cookerdata.findConfigFile(default_file, self.data)
476
477 #add the variable on a single line, to be easy to replace the second time
478 total = "\n#added by hob"
479 total += "\n%s %s \"%s\"\n" % (var, op, val)
480
481 with open(default_file, 'a') as f:
482 f.write(total)
483
484 #add to history
485 loginfo = {"op":"set", "file":default_file, "line":total.count("\n")}
486 self.data.setVar(var, val, **loginfo)
487
488 def removeConfigurationVar(self, var):
489 conf_files = self.data.varhistory.get_variable_files(var)
490 topdir = self.data.getVar("TOPDIR", False)
491
492 for conf_file in conf_files:
493 if topdir in conf_file:
494 with open(conf_file, 'r') as f:
495 contents = f.readlines()
496
497 lines = self.data.varhistory.get_variable_lines(var, conf_file)
498 for line in lines:
499 total = ""
500 i = 0
501 for c in contents:
502 total += c
503 i = i + 1
504 if i==int(line):
505 end_index = len(total)
506 index = total.rfind(var, 0, end_index)
507
508 begin_line = total.count("\n",0,index)
509
510 #check if the variable was saved before in the same way
511 if contents[begin_line-1]== "#added by hob\n":
512 contents[begin_line-1] = contents[begin_line] = "\n"
513 else:
514 contents[begin_line] = "\n"
515 #remove var from history
516 self.data.varhistory.del_var_history(var, conf_file, line)
517 #remove variable
518 self.data.delVar(var)
519
520 with open(conf_file, 'w') as f:
521 f.writelines(contents)
522
523 def createConfigFile(self, name):
524 path = os.getcwd()
525 confpath = os.path.join(path, "conf", name)
526 open(confpath, 'w').close()
527
528 def parseConfiguration(self):
529 # Set log file verbosity
530 verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
531 if verboselogs:
532 bb.msg.loggerVerboseLogs = True
533
534 # Change nice level if we're asked to
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500535 nice = self.data.getVar("BB_NICE_LEVEL")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500536 if nice:
537 curnice = os.nice(0)
538 nice = int(nice) - curnice
539 buildlog.verbose("Renice to %s " % os.nice(nice))
540
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600541 if self.recipecaches:
542 del self.recipecaches
543 self.multiconfigs = self.databuilder.mcdata.keys()
544 self.recipecaches = {}
545 for mc in self.multiconfigs:
546 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500547
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500548 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500549
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500550 def updateConfigOpts(self, options, environment, cmdline):
551 self.ui_cmdline = cmdline
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500552 clean = True
553 for o in options:
554 if o in ['prefile', 'postfile']:
555 clean = False
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500556 server_val = getattr(self.configuration, "%s_server" % o)
557 if not options[o] and server_val:
558 # restore value provided on server start
559 setattr(self.configuration, o, server_val)
560 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500561 setattr(self.configuration, o, options[o])
562 for k in bb.utils.approved_variables():
563 if k in environment and k not in self.configuration.env:
564 logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
565 self.configuration.env[k] = environment[k]
566 clean = False
567 if k in self.configuration.env and k not in environment:
568 logger.debug(1, "Updating environment variable %s (deleted)" % (k))
569 del self.configuration.env[k]
570 clean = False
571 if k not in self.configuration.env and k not in environment:
572 continue
573 if environment[k] != self.configuration.env[k]:
574 logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
575 self.configuration.env[k] = environment[k]
576 clean = False
577 if not clean:
578 logger.debug(1, "Base environment change, triggering reparse")
579 self.baseconfig_valid = False
580 self.reset()
581
582 def runCommands(self, server, data, abort):
583 """
584 Run any queued asynchronous command
585 This is done by the idle handler so it runs in true context rather than
586 tied to any UI.
587 """
588
589 return self.command.runAsyncCommand()
590
591 def showVersions(self):
592
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500593 (latest_versions, preferred_versions) = self.findProviders()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500594
595 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
596 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
597
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500598 for p in sorted(self.recipecaches[''].pkg_pn):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500599 pref = preferred_versions[p]
600 latest = latest_versions[p]
601
602 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
603 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
604
605 if pref == latest:
606 prefstr = ""
607
608 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
609
610 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
611 """
612 Show the outer or per-recipe environment
613 """
614 fn = None
615 envdata = None
616 if not pkgs_to_build:
617 pkgs_to_build = []
618
619 if buildfile:
620 # Parse the configuration here. We need to do it explicitly here since
621 # this showEnvironment() code path doesn't use the cache
622 self.parseConfiguration()
623
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600624 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500625 fn = self.matchFile(fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600626 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500627 elif len(pkgs_to_build) == 1:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500628 ignore = self.data.getVar("ASSUME_PROVIDED") or ""
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500629 if pkgs_to_build[0] in set(ignore.split()):
630 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
631
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600632 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500633
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600634 mc = runlist[0][0]
635 fn = runlist[0][3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500636 else:
637 envdata = self.data
638
639 if fn:
640 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600641 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
642 envdata = bb_cache.loadDataFull(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500643 except Exception as e:
644 parselog.exception("Unable to read %s", fn)
645 raise
646
647 # Display history
648 with closing(StringIO()) as env:
649 self.data.inchistory.emit(env)
650 logger.plain(env.getvalue())
651
652 # emit variables and shell functions
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500653 with closing(StringIO()) as env:
654 data.emit_env(env, envdata, True)
655 logger.plain(env.getvalue())
656
657 # emit the metadata which isnt valid shell
658 data.expandKeys(envdata)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500659 for e in sorted(envdata.keys()):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600660 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500661 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500662
663
664 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
665 """
666 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
667 """
668 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
669
670 # A task of None means use the default task
671 if task is None:
672 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500673 if not task.startswith("do_"):
674 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500675
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500676 targetlist = self.checkPackages(pkgs_to_build, task)
677 fulltargetlist = []
678 defaulttask_implicit = ''
679 defaulttask_explicit = False
680 wildcard = False
681
682 # Wild card expansion:
683 # Replace string such as "multiconfig:*:bash"
684 # into "multiconfig:A:bash multiconfig:B:bash bash"
685 for k in targetlist:
686 if k.startswith("multiconfig:"):
687 if wildcard:
688 bb.fatal('multiconfig conflict')
689 if k.split(":")[1] == "*":
690 wildcard = True
691 for mc in self.multiconfigs:
692 if mc:
693 fulltargetlist.append(k.replace('*', mc))
694 # implicit default task
695 else:
696 defaulttask_implicit = k.split(":")[2]
697 else:
698 fulltargetlist.append(k)
699 else:
700 defaulttask_explicit = True
701 fulltargetlist.append(k)
702
703 if not defaulttask_explicit and defaulttask_implicit != '':
704 fulltargetlist.append(defaulttask_implicit)
705
706 bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600707 taskdata = {}
708 localdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500709
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600710 for mc in self.multiconfigs:
711 taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
712 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600713 bb.data.expandKeys(localdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500714
715 current = 0
716 runlist = []
717 for k in fulltargetlist:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600718 mc = ""
719 if k.startswith("multiconfig:"):
720 mc = k.split(":")[1]
721 k = ":".join(k.split(":")[2:])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500722 ktask = task
723 if ":do_" in k:
724 k2 = k.split(":do_")
725 k = k2[0]
726 ktask = k2[1]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600727 taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500728 current += 1
729 if not ktask.startswith("do_"):
730 ktask = "do_%s" % ktask
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600731 if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
732 # e.g. in ASSUME_PROVIDED
733 continue
734 fn = taskdata[mc].build_targets[k][0]
735 runlist.append([mc, k, ktask, fn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500736 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600737
738 for mc in self.multiconfigs:
739 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
740
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500741 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600742 return taskdata, runlist
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500743
744 def prepareTreeData(self, pkgs_to_build, task):
745 """
746 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
747 """
748
749 # We set abort to False here to prevent unbuildable targets raising
750 # an exception when we're just generating data
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600751 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500752
753 return runlist, taskdata
754
755 ######## WARNING : this function requires cache_extra to be enabled ########
756
757 def generateTaskDepTreeData(self, pkgs_to_build, task):
758 """
759 Create a dependency graph of pkgs_to_build including reverse dependency
760 information.
761 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500762 if not task.startswith("do_"):
763 task = "do_%s" % task
764
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500765 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600766 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500767 rq.rqdata.prepare()
768 return self.buildDependTree(rq, taskdata)
769
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600770 @staticmethod
771 def add_mc_prefix(mc, pn):
772 if mc:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500773 return "multiconfig:%s:%s" % (mc, pn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600774 return pn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500775
776 def buildDependTree(self, rq, taskdata):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600777 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500778 depend_tree = {}
779 depend_tree["depends"] = {}
780 depend_tree["tdepends"] = {}
781 depend_tree["pn"] = {}
782 depend_tree["rdepends-pn"] = {}
783 depend_tree["packages"] = {}
784 depend_tree["rdepends-pkg"] = {}
785 depend_tree["rrecs-pkg"] = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500786 depend_tree['providermap'] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600787 depend_tree["layer-priorities"] = self.bbfile_config_priorities
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500788
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600789 for mc in taskdata:
790 for name, fn in list(taskdata[mc].get_providermap().items()):
791 pn = self.recipecaches[mc].pkg_fn[fn]
792 pn = self.add_mc_prefix(mc, pn)
793 if name != pn:
794 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
795 depend_tree['providermap'][name] = (pn, version)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500796
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600797 for tid in rq.rqdata.runtaskentries:
798 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
799 pn = self.recipecaches[mc].pkg_fn[taskfn]
800 pn = self.add_mc_prefix(mc, pn)
801 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500802 if pn not in depend_tree["pn"]:
803 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600804 depend_tree["pn"][pn]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500805 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600806 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500807
808 # if we have extra caches, list all attributes they bring in
809 extra_info = []
810 for cache_class in self.caches_array:
811 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
812 cachefields = getattr(cache_class, 'cachefields', [])
813 extra_info = extra_info + cachefields
814
815 # for all attributes stored, add them to the dependency tree
816 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600817 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500818
819
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600820 for dep in rq.rqdata.runtaskentries[tid].depends:
821 (depmc, depfn, deptaskname, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
822 deppn = self.recipecaches[mc].pkg_fn[deptaskfn]
823 dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500824 if not dotname in depend_tree["tdepends"]:
825 depend_tree["tdepends"][dotname] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600826 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
827 if taskfn not in seen_fns:
828 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500829 packages = []
830
831 depend_tree["depends"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600832 for dep in taskdata[mc].depids[taskfn]:
833 depend_tree["depends"][pn].append(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500834
835 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600836 for rdep in taskdata[mc].rdepids[taskfn]:
837 depend_tree["rdepends-pn"][pn].append(rdep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500838
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600839 rdepends = self.recipecaches[mc].rundeps[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500840 for package in rdepends:
841 depend_tree["rdepends-pkg"][package] = []
842 for rdepend in rdepends[package]:
843 depend_tree["rdepends-pkg"][package].append(rdepend)
844 packages.append(package)
845
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600846 rrecs = self.recipecaches[mc].runrecs[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500847 for package in rrecs:
848 depend_tree["rrecs-pkg"][package] = []
849 for rdepend in rrecs[package]:
850 depend_tree["rrecs-pkg"][package].append(rdepend)
851 if not package in packages:
852 packages.append(package)
853
854 for package in packages:
855 if package not in depend_tree["packages"]:
856 depend_tree["packages"][package] = {}
857 depend_tree["packages"][package]["pn"] = pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600858 depend_tree["packages"][package]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500859 depend_tree["packages"][package]["version"] = version
860
861 return depend_tree
862
863 ######## WARNING : this function requires cache_extra to be enabled ########
864 def generatePkgDepTreeData(self, pkgs_to_build, task):
865 """
866 Create a dependency tree of pkgs_to_build, returning the data.
867 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500868 if not task.startswith("do_"):
869 task = "do_%s" % task
870
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500871 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500872
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600873 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500874 depend_tree = {}
875 depend_tree["depends"] = {}
876 depend_tree["pn"] = {}
877 depend_tree["rdepends-pn"] = {}
878 depend_tree["rdepends-pkg"] = {}
879 depend_tree["rrecs-pkg"] = {}
880
881 # if we have extra caches, list all attributes they bring in
882 extra_info = []
883 for cache_class in self.caches_array:
884 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
885 cachefields = getattr(cache_class, 'cachefields', [])
886 extra_info = extra_info + cachefields
887
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600888 tids = []
889 for mc in taskdata:
890 for tid in taskdata[mc].taskentries:
891 tids.append(tid)
892
893 for tid in tids:
894 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
895
896 pn = self.recipecaches[mc].pkg_fn[taskfn]
897 pn = self.add_mc_prefix(mc, pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500898
899 if pn not in depend_tree["pn"]:
900 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600901 depend_tree["pn"][pn]["filename"] = taskfn
902 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500903 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600904 rdepends = self.recipecaches[mc].rundeps[taskfn]
905 rrecs = self.recipecaches[mc].runrecs[taskfn]
906 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500907
908 # for all extra attributes stored, add them to the dependency tree
909 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600910 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500911
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600912 if taskfn not in seen_fns:
913 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500914
915 depend_tree["depends"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600916 for item in taskdata[mc].depids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500917 pn_provider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600918 if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
919 fn_provider = taskdata[mc].build_targets[dep][0]
920 pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500921 else:
922 pn_provider = item
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600923 pn_provider = self.add_mc_prefix(mc, pn_provider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500924 depend_tree["depends"][pn].append(pn_provider)
925
926 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600927 for rdep in taskdata[mc].rdepids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500928 pn_rprovider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600929 if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
930 fn_rprovider = taskdata[mc].run_targets[rdep][0]
931 pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500932 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600933 pn_rprovider = rdep
934 pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500935 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
936
937 depend_tree["rdepends-pkg"].update(rdepends)
938 depend_tree["rrecs-pkg"].update(rrecs)
939
940 return depend_tree
941
942 def generateDepTreeEvent(self, pkgs_to_build, task):
943 """
944 Create a task dependency graph of pkgs_to_build.
945 Generate an event with the result
946 """
947 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
948 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
949
950 def generateDotGraphFiles(self, pkgs_to_build, task):
951 """
952 Create a task dependency graph of pkgs_to_build.
953 Save the result to a set of .dot files.
954 """
955
956 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
957
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500958 with open('pn-buildlist', 'w') as f:
959 for pn in depgraph["pn"]:
960 f.write(pn + "\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500961 logger.info("PN build list saved to 'pn-buildlist'")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500962
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500963 # Remove old format output files to ensure no confusion with stale data
964 try:
965 os.unlink('pn-depends.dot')
966 except FileNotFoundError:
967 pass
968 try:
969 os.unlink('package-depends.dot')
970 except FileNotFoundError:
971 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500972
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500973 with open('task-depends.dot', 'w') as f:
974 f.write("digraph depends {\n")
975 for task in depgraph["tdepends"]:
976 (pn, taskname) = task.rsplit(".", 1)
977 fn = depgraph["pn"][pn]["filename"]
978 version = depgraph["pn"][pn]["version"]
979 f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
980 for dep in depgraph["tdepends"][task]:
981 f.write('"%s" -> "%s"\n' % (task, dep))
982 f.write("}\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500983 logger.info("Task dependencies saved to 'task-depends.dot'")
984
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500985 with open('recipe-depends.dot', 'w') as f:
986 f.write("digraph depends {\n")
987 pndeps = {}
988 for task in depgraph["tdepends"]:
989 (pn, taskname) = task.rsplit(".", 1)
990 if pn not in pndeps:
991 pndeps[pn] = set()
992 for dep in depgraph["tdepends"][task]:
993 (deppn, deptaskname) = dep.rsplit(".", 1)
994 pndeps[pn].add(deppn)
995 for pn in pndeps:
996 fn = depgraph["pn"][pn]["filename"]
997 version = depgraph["pn"][pn]["version"]
998 f.write('"%s" [label="%s\\n%s\\n%s"]\n' % (pn, pn, version, fn))
999 for dep in pndeps[pn]:
1000 if dep == pn:
1001 continue
1002 f.write('"%s" -> "%s"\n' % (pn, dep))
1003 f.write("}\n")
1004 logger.info("Flatened recipe dependencies saved to 'recipe-depends.dot'")
1005
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001006 def show_appends_with_no_recipes(self):
1007 # Determine which bbappends haven't been applied
1008
1009 # First get list of recipes, including skipped
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001010 recipefns = list(self.recipecaches[''].pkg_fn.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001011 recipefns.extend(self.skiplist.keys())
1012
1013 # Work out list of bbappends that have been applied
1014 applied_appends = []
1015 for fn in recipefns:
1016 applied_appends.extend(self.collection.get_file_appends(fn))
1017
1018 appends_without_recipes = []
1019 for _, appendfn in self.collection.bbappends:
1020 if not appendfn in applied_appends:
1021 appends_without_recipes.append(appendfn)
1022
1023 if appends_without_recipes:
1024 msg = 'No recipes available for:\n %s' % '\n '.join(appends_without_recipes)
1025 warn_only = self.data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
1026 False) or "no"
1027 if warn_only.lower() in ("1", "yes", "true"):
1028 bb.warn(msg)
1029 else:
1030 bb.fatal(msg)
1031
1032 def handlePrefProviders(self):
1033
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001034 for mc in self.multiconfigs:
1035 localdata = data.createCopy(self.databuilder.mcdata[mc])
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001036 bb.data.expandKeys(localdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001037
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001038 # Handle PREFERRED_PROVIDERS
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001039 for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001040 try:
1041 (providee, provider) = p.split(':')
1042 except:
1043 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
1044 continue
1045 if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
1046 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
1047 self.recipecaches[mc].preferred[providee] = provider
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001048
1049 def findCoreBaseFiles(self, subdir, configfile):
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001050 corebase = self.data.getVar('COREBASE') or ""
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001051 paths = []
1052 for root, dirs, files in os.walk(corebase + '/' + subdir):
1053 for d in dirs:
1054 configfilepath = os.path.join(root, d, configfile)
1055 if os.path.exists(configfilepath):
1056 paths.append(os.path.join(root, d))
1057
1058 if paths:
1059 bb.event.fire(bb.event.CoreBaseFilesFound(paths), self.data)
1060
1061 def findConfigFilePath(self, configfile):
1062 """
1063 Find the location on disk of configfile and if it exists and was parsed by BitBake
1064 emit the ConfigFilePathFound event with the path to the file.
1065 """
1066 path = bb.cookerdata.findConfigFile(configfile, self.data)
1067 if not path:
1068 return
1069
1070 # Generate a list of parsed configuration files by searching the files
1071 # listed in the __depends and __base_depends variables with a .conf suffix.
1072 conffiles = []
1073 dep_files = self.data.getVar('__base_depends', False) or []
1074 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
1075
1076 for f in dep_files:
1077 if f[0].endswith(".conf"):
1078 conffiles.append(f[0])
1079
1080 _, conf, conffile = path.rpartition("conf/")
1081 match = os.path.join(conf, conffile)
1082 # Try and find matches for conf/conffilename.conf as we don't always
1083 # have the full path to the file.
1084 for cfg in conffiles:
1085 if cfg.endswith(match):
1086 bb.event.fire(bb.event.ConfigFilePathFound(path),
1087 self.data)
1088 break
1089
1090 def findFilesMatchingInDir(self, filepattern, directory):
1091 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001092 Searches for files containing the substring 'filepattern' which are children of
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001093 'directory' in each BBPATH. i.e. to find all rootfs package classes available
1094 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1095 or to find all machine configuration files one could call:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001096 findFilesMatchingInDir(self, '.conf', 'conf/machine')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001097 """
1098
1099 matches = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001100 bbpaths = self.data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001101 for path in bbpaths:
1102 dirpath = os.path.join(path, directory)
1103 if os.path.exists(dirpath):
1104 for root, dirs, files in os.walk(dirpath):
1105 for f in files:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001106 if filepattern in f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001107 matches.append(f)
1108
1109 if matches:
1110 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1111
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001112 def findProviders(self, mc=''):
1113 return bb.providers.findProviders(self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1114
1115 def findBestProvider(self, pn, mc=''):
1116 if pn in self.recipecaches[mc].providers:
1117 filenames = self.recipecaches[mc].providers[pn]
1118 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.data, self.recipecaches[mc])
1119 filename = eligible[0]
1120 return None, None, None, filename
1121 elif pn in self.recipecaches[mc].pkg_pn:
1122 return bb.providers.findBestProvider(pn, self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1123 else:
1124 return None, None, None, None
1125
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001126 def findConfigFiles(self, varname):
1127 """
1128 Find config files which are appropriate values for varname.
1129 i.e. MACHINE, DISTRO
1130 """
1131 possible = []
1132 var = varname.lower()
1133
1134 data = self.data
1135 # iterate configs
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001136 bbpaths = data.getVar('BBPATH').split(':')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001137 for path in bbpaths:
1138 confpath = os.path.join(path, "conf", var)
1139 if os.path.exists(confpath):
1140 for root, dirs, files in os.walk(confpath):
1141 # get all child files, these are appropriate values
1142 for f in files:
1143 val, sep, end = f.rpartition('.')
1144 if end == 'conf':
1145 possible.append(val)
1146
1147 if possible:
1148 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1149
1150 def findInheritsClass(self, klass):
1151 """
1152 Find all recipes which inherit the specified class
1153 """
1154 pkg_list = []
1155
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001156 for pfn in self.recipecaches[''].pkg_fn:
1157 inherits = self.recipecaches[''].inherits.get(pfn, None)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001158 if inherits and klass in inherits:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001159 pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001160
1161 return pkg_list
1162
1163 def generateTargetsTree(self, klass=None, pkgs=None):
1164 """
1165 Generate a dependency tree of buildable targets
1166 Generate an event with the result
1167 """
1168 # if the caller hasn't specified a pkgs list default to universe
1169 if not pkgs:
1170 pkgs = ['universe']
1171 # if inherited_class passed ensure all recipes which inherit the
1172 # specified class are included in pkgs
1173 if klass:
1174 extra_pkgs = self.findInheritsClass(klass)
1175 pkgs = pkgs + extra_pkgs
1176
1177 # generate a dependency tree for all our packages
1178 tree = self.generatePkgDepTreeData(pkgs, 'build')
1179 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1180
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001181 def interactiveMode( self ):
1182 """Drop off into a shell"""
1183 try:
1184 from bb import shell
1185 except ImportError:
1186 parselog.exception("Interactive mode not available")
1187 sys.exit(1)
1188 else:
1189 shell.start( self )
1190
1191
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001192 def handleCollections(self, collections):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001193 """Handle collections"""
1194 errors = False
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001195 self.bbfile_config_priorities = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001196 if collections:
1197 collection_priorities = {}
1198 collection_depends = {}
1199 collection_list = collections.split()
1200 min_prio = 0
1201 for c in collection_list:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001202 bb.debug(1,'Processing %s in collection list' % (c))
1203
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001204 # Get collection priority if defined explicitly
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001205 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001206 if priority:
1207 try:
1208 prio = int(priority)
1209 except ValueError:
1210 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1211 errors = True
1212 if min_prio == 0 or prio < min_prio:
1213 min_prio = prio
1214 collection_priorities[c] = prio
1215 else:
1216 collection_priorities[c] = None
1217
1218 # Check dependencies and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001219 deps = self.data.getVar("LAYERDEPENDS_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001220 if deps:
1221 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001222 depDict = bb.utils.explode_dep_versions2(deps)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001223 except bb.utils.VersionStringException as vse:
1224 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001225 for dep, oplist in list(depDict.items()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001226 if dep in collection_list:
1227 for opstr in oplist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001228 layerver = self.data.getVar("LAYERVERSION_%s" % dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001229 (op, depver) = opstr.split()
1230 if layerver:
1231 try:
1232 res = bb.utils.vercmp_string_op(layerver, depver, op)
1233 except bb.utils.VersionStringException as vse:
1234 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1235 if not res:
1236 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1237 errors = True
1238 else:
1239 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1240 errors = True
1241 else:
1242 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1243 errors = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001244 collection_depends[c] = list(depDict.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001245 else:
1246 collection_depends[c] = []
1247
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001248 # Check recommends and store information for priority calculation
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001249 recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001250 if recs:
1251 try:
1252 recDict = bb.utils.explode_dep_versions2(recs)
1253 except bb.utils.VersionStringException as vse:
1254 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1255 for rec, oplist in list(recDict.items()):
1256 if rec in collection_list:
1257 if oplist:
1258 opstr = oplist[0]
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001259 layerver = self.data.getVar("LAYERVERSION_%s" % rec)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001260 if layerver:
1261 (op, recver) = opstr.split()
1262 try:
1263 res = bb.utils.vercmp_string_op(layerver, recver, op)
1264 except bb.utils.VersionStringException as vse:
1265 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1266 if not res:
1267 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1268 continue
1269 else:
1270 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1271 continue
1272 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1273 collection_depends[c].append(rec)
1274 else:
1275 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1276
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001277 # Recursively work out collection priorities based on dependencies
1278 def calc_layer_priority(collection):
1279 if not collection_priorities[collection]:
1280 max_depprio = min_prio
1281 for dep in collection_depends[collection]:
1282 calc_layer_priority(dep)
1283 depprio = collection_priorities[dep]
1284 if depprio > max_depprio:
1285 max_depprio = depprio
1286 max_depprio += 1
1287 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1288 collection_priorities[collection] = max_depprio
1289
1290 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1291 for c in collection_list:
1292 calc_layer_priority(c)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001293 regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001294 if regex == None:
1295 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1296 errors = True
1297 continue
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001298 elif regex == "":
1299 parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
1300 errors = False
1301 else:
1302 try:
1303 cre = re.compile(regex)
1304 except re.error:
1305 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1306 errors = True
1307 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001308 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001309 if errors:
1310 # We've already printed the actual error(s)
1311 raise CollectionError("Errors during parsing layer configuration")
1312
1313 def buildSetVars(self):
1314 """
1315 Setup any variables needed before starting a build
1316 """
1317 t = time.gmtime()
1318 if not self.data.getVar("BUILDNAME", False):
1319 self.data.setVar("BUILDNAME", "${DATE}${TIME}")
1320 self.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1321 self.data.setVar("DATE", time.strftime('%Y%m%d', t))
1322 self.data.setVar("TIME", time.strftime('%H%M%S', t))
1323
1324 def matchFiles(self, bf):
1325 """
1326 Find the .bb files which match the expression in 'buildfile'.
1327 """
1328 if bf.startswith("/") or bf.startswith("../"):
1329 bf = os.path.abspath(bf)
1330
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001331 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001332 filelist, masked, searchdirs = self.collection.collect_bbfiles(self.data, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001333 try:
1334 os.stat(bf)
1335 bf = os.path.abspath(bf)
1336 return [bf]
1337 except OSError:
1338 regexp = re.compile(bf)
1339 matches = []
1340 for f in filelist:
1341 if regexp.search(f) and os.path.isfile(f):
1342 matches.append(f)
1343 return matches
1344
1345 def matchFile(self, buildfile):
1346 """
1347 Find the .bb file which matches the expression in 'buildfile'.
1348 Raise an error if multiple files
1349 """
1350 matches = self.matchFiles(buildfile)
1351 if len(matches) != 1:
1352 if matches:
1353 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1354 if matches:
1355 for f in matches:
1356 msg += "\n %s" % f
1357 parselog.error(msg)
1358 else:
1359 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1360 raise NoSpecificMatch
1361 return matches[0]
1362
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001363 def buildFile(self, buildfile, task, hidewarning=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001364 """
1365 Build the file matching regexp buildfile
1366 """
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001367 bb.event.fire(bb.event.BuildInit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001368
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001369 if not hidewarning:
1370 # Too many people use -b because they think it's how you normally
1371 # specify a target to be built, so show a warning
1372 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001373
1374 # Parse the configuration here. We need to do it explicitly here since
1375 # buildFile() doesn't use the cache
1376 self.parseConfiguration()
1377
1378 # If we are told to do the None task then query the default task
1379 if (task == None):
1380 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001381 if not task.startswith("do_"):
1382 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001383
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001384 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001385 fn = self.matchFile(fn)
1386
1387 self.buildSetVars()
1388
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001389 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
1390
1391 infos = bb_cache.parse(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001392 infos = dict(infos)
1393
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001394 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001395 try:
1396 info_array = infos[fn]
1397 except KeyError:
1398 bb.fatal("%s does not exist" % fn)
1399
1400 if info_array[0].skipped:
1401 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1402
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001403 self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001404
1405 # Tweak some variables
1406 item = info_array[0].pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001407 self.recipecaches[mc].ignored_dependencies = set()
1408 self.recipecaches[mc].bbfile_priority[fn] = 1
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001409 self.configuration.limited_deps = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001410
1411 # Remove external dependencies
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001412 self.recipecaches[mc].task_deps[fn]['depends'] = {}
1413 self.recipecaches[mc].deps[fn] = []
1414 self.recipecaches[mc].rundeps[fn] = []
1415 self.recipecaches[mc].runrecs[fn] = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001416
1417 # Invalidate task for target if force mode active
1418 if self.configuration.force:
1419 logger.verbose("Invalidate task %s, %s", task, fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001420 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001421
1422 # Setup taskdata structure
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001423 taskdata = {}
1424 taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
1425 taskdata[mc].add_provider(self.data, self.recipecaches[mc], item)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001426
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001427 buildname = self.data.getVar("BUILDNAME")
1428 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001429
1430 # Execute the runqueue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001431 runlist = [[mc, item, task, fn]]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001432
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001433 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001434
1435 def buildFileIdle(server, rq, abort):
1436
1437 msg = None
1438 interrupted = 0
1439 if abort or self.state == state.forceshutdown:
1440 rq.finish_runqueue(True)
1441 msg = "Forced shutdown"
1442 interrupted = 2
1443 elif self.state == state.shutdown:
1444 rq.finish_runqueue(False)
1445 msg = "Stopped build"
1446 interrupted = 1
1447 failures = 0
1448 try:
1449 retval = rq.execute_runqueue()
1450 except runqueue.TaskFailure as exc:
1451 failures += len(exc.args)
1452 retval = False
1453 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001454 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001455 return False
1456
1457 if not retval:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001458 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001459 self.command.finishAsyncCommand(msg)
1460 return False
1461 if retval is True:
1462 return True
1463 return retval
1464
1465 self.configuration.server_register_idlecallback(buildFileIdle, rq)
1466
1467 def buildTargets(self, targets, task):
1468 """
1469 Attempt to build the targets specified
1470 """
1471
1472 def buildTargetsIdle(server, rq, abort):
1473 msg = None
1474 interrupted = 0
1475 if abort or self.state == state.forceshutdown:
1476 rq.finish_runqueue(True)
1477 msg = "Forced shutdown"
1478 interrupted = 2
1479 elif self.state == state.shutdown:
1480 rq.finish_runqueue(False)
1481 msg = "Stopped build"
1482 interrupted = 1
1483 failures = 0
1484 try:
1485 retval = rq.execute_runqueue()
1486 except runqueue.TaskFailure as exc:
1487 failures += len(exc.args)
1488 retval = False
1489 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001490 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001491 return False
1492
1493 if not retval:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001494 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001495 self.command.finishAsyncCommand(msg)
1496 return False
1497 if retval is True:
1498 return True
1499 return retval
1500
1501 build.reset_cache()
1502 self.buildSetVars()
1503
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001504 # If we are told to do the None task then query the default task
1505 if (task == None):
1506 task = self.configuration.cmd
1507
1508 if not task.startswith("do_"):
1509 task = "do_%s" % task
1510
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001511 packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1512
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001513 bb.event.fire(bb.event.BuildInit(packages), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001514
1515 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001516
1517 buildname = self.data.getVar("BUILDNAME", False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001518
1519 # make targets to always look as <target>:do_<task>
1520 ntargets = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001521 for target in runlist:
1522 if target[0]:
1523 ntargets.append("multiconfig:%s:%s:%s" % (target[0], target[1], target[2]))
1524 ntargets.append("%s:%s" % (target[1], target[2]))
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001525
1526 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001527
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001528 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001529 if 'universe' in targets:
1530 rq.rqdata.warn_multi_bb = True
1531
1532 self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
1533
1534
1535 def getAllKeysWithFlags(self, flaglist):
1536 dump = {}
1537 for k in self.data.keys():
1538 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001539 expand = True
1540 flags = self.data.getVarFlags(k)
1541 if flags and "func" in flags and "python" in flags:
1542 expand = False
1543 v = self.data.getVar(k, expand)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001544 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1545 dump[k] = {
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001546 'v' : str(v) ,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001547 'history' : self.data.varhistory.variable(k),
1548 }
1549 for d in flaglist:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001550 if flags and d in flags:
1551 dump[k][d] = flags[d]
1552 else:
1553 dump[k][d] = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001554 except Exception as e:
1555 print(e)
1556 return dump
1557
1558
1559 def generateNewImage(self, image, base_image, package_queue, timestamp, description):
1560 '''
1561 Create a new image with a "require"/"inherit" base_image statement
1562 '''
1563 if timestamp:
1564 image_name = os.path.splitext(image)[0]
1565 timestr = time.strftime("-%Y%m%d-%H%M%S")
1566 dest = image_name + str(timestr) + ".bb"
1567 else:
1568 if not image.endswith(".bb"):
1569 dest = image + ".bb"
1570 else:
1571 dest = image
1572
1573 basename = False
1574 if base_image:
1575 with open(base_image, 'r') as f:
1576 require_line = f.readline()
1577 p = re.compile("IMAGE_BASENAME *=")
1578 for line in f:
1579 if p.search(line):
1580 basename = True
1581
1582 with open(dest, "w") as imagefile:
1583 if base_image is None:
1584 imagefile.write("inherit core-image\n")
1585 else:
1586 topdir = self.data.getVar("TOPDIR", False)
1587 if topdir in base_image:
1588 base_image = require_line.split()[1]
1589 imagefile.write("require " + base_image + "\n")
1590 image_install = "IMAGE_INSTALL = \""
1591 for package in package_queue:
1592 image_install += str(package) + " "
1593 image_install += "\"\n"
1594 imagefile.write(image_install)
1595
1596 description_var = "DESCRIPTION = \"" + description + "\"\n"
1597 imagefile.write(description_var)
1598
1599 if basename:
1600 # If this is overwritten in a inherited image, reset it to default
1601 image_basename = "IMAGE_BASENAME = \"${PN}\"\n"
1602 imagefile.write(image_basename)
1603
1604 self.state = state.initial
1605 if timestamp:
1606 return timestr
1607
1608 def updateCacheSync(self):
1609 if self.state == state.running:
1610 return
1611
1612 # reload files for which we got notifications
1613 for p in self.inotify_modified_files:
1614 bb.parse.update_cache(p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001615 if p in bb.parse.BBHandler.cached_statements:
1616 del bb.parse.BBHandler.cached_statements[p]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001617 self.inotify_modified_files = []
1618
1619 if not self.baseconfig_valid:
1620 logger.debug(1, "Reloading base configuration data")
1621 self.initConfigurationData()
1622 self.baseconfig_valid = True
1623 self.parsecache_valid = False
1624
1625 # This is called for all async commands when self.state != running
1626 def updateCache(self):
1627 if self.state == state.running:
1628 return
1629
1630 if self.state in (state.shutdown, state.forceshutdown, state.error):
1631 if hasattr(self.parser, 'shutdown'):
1632 self.parser.shutdown(clean=False, force = True)
1633 raise bb.BBHandledException()
1634
1635 if self.state != state.parsing:
1636 self.updateCacheSync()
1637
1638 if self.state != state.parsing and not self.parsecache_valid:
1639 self.parseConfiguration ()
1640 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001641 for mc in self.multiconfigs:
1642 bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001643
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001644 for mc in self.multiconfigs:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001645 ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001646 self.recipecaches[mc].ignored_dependencies = set(ignore.split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001647
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001648 for dep in self.configuration.extra_assume_provided:
1649 self.recipecaches[mc].ignored_dependencies.add(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001650
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001651 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001652 (filelist, masked, searchdirs) = self.collection.collect_bbfiles(self.data, self.data)
1653
1654 # Add inotify watches for directories searched for bb/bbappend files
1655 for dirent in searchdirs:
1656 self.add_filewatch([[dirent]], dirs=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001657
1658 self.parser = CookerParser(self, filelist, masked)
1659 self.parsecache_valid = True
1660
1661 self.state = state.parsing
1662
1663 if not self.parser.parse_next():
1664 collectlog.debug(1, "parsing complete")
1665 if self.parser.error:
1666 raise bb.BBHandledException()
1667 self.show_appends_with_no_recipes()
1668 self.handlePrefProviders()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001669 for mc in self.multiconfigs:
1670 self.recipecaches[mc].bbfile_priority = self.collection.collection_priorities(self.recipecaches[mc].pkg_fn, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001671 self.state = state.running
1672
1673 # Send an event listing all stamps reachable after parsing
1674 # which the metadata may use to clean up stale data
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001675 for mc in self.multiconfigs:
1676 event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1677 bb.event.fire(event, self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001678 return None
1679
1680 return True
1681
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001682 def checkPackages(self, pkgs_to_build, task=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001683
1684 # Return a copy, don't modify the original
1685 pkgs_to_build = pkgs_to_build[:]
1686
1687 if len(pkgs_to_build) == 0:
1688 raise NothingToBuild
1689
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001690 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001691 for pkg in pkgs_to_build:
1692 if pkg in ignore:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001693 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001694
1695 if 'world' in pkgs_to_build:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001696 pkgs_to_build.remove('world')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001697 for mc in self.multiconfigs:
1698 bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1699 for t in self.recipecaches[mc].world_target:
1700 if mc:
1701 t = "multiconfig:" + mc + ":" + t
1702 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001703
1704 if 'universe' in pkgs_to_build:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001705 parselog.warning("The \"universe\" target is only intended for testing and may produce errors.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001706 parselog.debug(1, "collating packages for \"universe\"")
1707 pkgs_to_build.remove('universe')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001708 for mc in self.multiconfigs:
1709 for t in self.recipecaches[mc].universe_target:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001710 if task:
1711 foundtask = False
1712 for provider_fn in self.recipecaches[mc].providers[t]:
1713 if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1714 foundtask = True
1715 break
1716 if not foundtask:
1717 bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1718 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001719 if mc:
1720 t = "multiconfig:" + mc + ":" + t
1721 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001722
1723 return pkgs_to_build
1724
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001725 def pre_serve(self):
1726 # Empty the environment. The environment will be populated as
1727 # necessary from the data store.
1728 #bb.utils.empty_environment()
1729 try:
1730 self.prhost = prserv.serv.auto_start(self.data)
1731 except prserv.serv.PRServiceConfigError:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001732 bb.event.fire(CookerExit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001733 self.state = state.error
1734 return
1735
1736 def post_serve(self):
1737 prserv.serv.auto_shutdown(self.data)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001738 bb.event.fire(CookerExit(), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001739 lockfile = self.lock.name
1740 self.lock.close()
1741 self.lock = None
1742
1743 while not self.lock:
1744 with bb.utils.timeout(3):
1745 self.lock = bb.utils.lockfile(lockfile, shared=False, retry=False, block=True)
1746 if not self.lock:
1747 # Some systems may not have lsof available
1748 procs = None
1749 try:
1750 procs = subprocess.check_output(["lsof", '-w', lockfile], stderr=subprocess.STDOUT)
1751 except OSError as e:
1752 if e.errno != errno.ENOENT:
1753 raise
1754 if procs is None:
1755 # Fall back to fuser if lsof is unavailable
1756 try:
1757 procs = subprocess.check_output(["fuser", '-v', lockfile], stderr=subprocess.STDOUT)
1758 except OSError as e:
1759 if e.errno != errno.ENOENT:
1760 raise
1761
1762 msg = "Delaying shutdown due to active processes which appear to be holding bitbake.lock"
1763 if procs:
1764 msg += ":\n%s" % str(procs)
1765 print(msg)
1766
1767
1768 def shutdown(self, force = False):
1769 if force:
1770 self.state = state.forceshutdown
1771 else:
1772 self.state = state.shutdown
1773
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001774 if self.parser:
1775 self.parser.shutdown(clean=not force, force=force)
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001776 self.notifier.stop()
1777 self.confignotifier.stop()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001778
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001779 def finishcommand(self):
1780 self.state = state.initial
1781
1782 def reset(self):
1783 self.initConfigurationData()
1784
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001785 def clientComplete(self):
1786 """Called when the client is done using the server"""
1787 if self.configuration.server_only:
1788 self.finishcommand()
1789 else:
1790 self.shutdown(True)
1791
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001792 def lockBitbake(self):
1793 if not hasattr(self, 'lock'):
1794 self.lock = None
1795 if self.data:
1796 lockfile = self.data.expand("${TOPDIR}/bitbake.lock")
1797 if lockfile:
1798 self.lock = bb.utils.lockfile(lockfile, False, False)
1799 return self.lock
1800
1801 def unlockBitbake(self):
1802 if hasattr(self, 'lock') and self.lock:
1803 bb.utils.unlockfile(self.lock)
1804
1805def server_main(cooker, func, *args):
1806 cooker.pre_serve()
1807
1808 if cooker.configuration.profile:
1809 try:
1810 import cProfile as profile
1811 except:
1812 import profile
1813 prof = profile.Profile()
1814
1815 ret = profile.Profile.runcall(prof, func, *args)
1816
1817 prof.dump_stats("profile.log")
1818 bb.utils.process_profilelog("profile.log")
1819 print("Raw profiling information saved to profile.log and processed statistics to profile.log.processed")
1820
1821 else:
1822 ret = func(*args)
1823
1824 cooker.post_serve()
1825
1826 return ret
1827
1828class CookerExit(bb.event.Event):
1829 """
1830 Notify clients of the Cooker shutdown
1831 """
1832
1833 def __init__(self):
1834 bb.event.Event.__init__(self)
1835
1836
1837class CookerCollectFiles(object):
1838 def __init__(self, priorities):
1839 self.bbappends = []
1840 self.bbfile_config_priorities = priorities
1841
1842 def calc_bbfile_priority( self, filename, matched = None ):
1843 for _, _, regex, pri in self.bbfile_config_priorities:
1844 if regex.match(filename):
1845 if matched != None:
1846 if not regex in matched:
1847 matched.add(regex)
1848 return pri
1849 return 0
1850
1851 def get_bbfiles(self):
1852 """Get list of default .bb files by reading out the current directory"""
1853 path = os.getcwd()
1854 contents = os.listdir(path)
1855 bbfiles = []
1856 for f in contents:
1857 if f.endswith(".bb"):
1858 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1859 return bbfiles
1860
1861 def find_bbfiles(self, path):
1862 """Find all the .bb and .bbappend files in a directory"""
1863 found = []
1864 for dir, dirs, files in os.walk(path):
1865 for ignored in ('SCCS', 'CVS', '.svn'):
1866 if ignored in dirs:
1867 dirs.remove(ignored)
1868 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1869
1870 return found
1871
1872 def collect_bbfiles(self, config, eventdata):
1873 """Collect all available .bb build files"""
1874 masked = 0
1875
1876 collectlog.debug(1, "collecting .bb files")
1877
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001878 files = (config.getVar( "BBFILES") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001879 config.setVar("BBFILES", " ".join(files))
1880
1881 # Sort files by priority
1882 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
1883
1884 if not len(files):
1885 files = self.get_bbfiles()
1886
1887 if not len(files):
1888 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1889 bb.event.fire(CookerExit(), eventdata)
1890
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001891 # We need to track where we look so that we can add inotify watches. There
1892 # is no nice way to do this, this is horrid. We intercept the os.listdir()
1893 # calls while we run glob().
1894 origlistdir = os.listdir
1895 searchdirs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001896
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001897 def ourlistdir(d):
1898 searchdirs.append(d)
1899 return origlistdir(d)
1900
1901 os.listdir = ourlistdir
1902 try:
1903 # Can't use set here as order is important
1904 newfiles = []
1905 for f in files:
1906 if os.path.isdir(f):
1907 dirfiles = self.find_bbfiles(f)
1908 for g in dirfiles:
1909 if g not in newfiles:
1910 newfiles.append(g)
1911 else:
1912 globbed = glob.glob(f)
1913 if not globbed and os.path.exists(f):
1914 globbed = [f]
1915 # glob gives files in order on disk. Sort to be deterministic.
1916 for g in sorted(globbed):
1917 if g not in newfiles:
1918 newfiles.append(g)
1919 finally:
1920 os.listdir = origlistdir
1921
1922 bbmask = config.getVar('BBMASK')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001923
1924 if bbmask:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001925 # First validate the individual regular expressions and ignore any
1926 # that do not compile
1927 bbmasks = []
1928 for mask in bbmask.split():
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001929 # When constructing an older style single regex, it's possible for BBMASK
1930 # to end up beginning with '|', which matches and masks _everything_.
1931 if mask.startswith("|"):
1932 collectlog.warn("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
1933 mask = mask[1:]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001934 try:
1935 re.compile(mask)
1936 bbmasks.append(mask)
1937 except sre_constants.error:
1938 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1939
1940 # Then validate the combined regular expressions. This should never
1941 # fail, but better safe than sorry...
1942 bbmask = "|".join(bbmasks)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001943 try:
1944 bbmask_compiled = re.compile(bbmask)
1945 except sre_constants.error:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001946 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1947 bbmask = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001948
1949 bbfiles = []
1950 bbappend = []
1951 for f in newfiles:
1952 if bbmask and bbmask_compiled.search(f):
1953 collectlog.debug(1, "skipping masked file %s", f)
1954 masked += 1
1955 continue
1956 if f.endswith('.bb'):
1957 bbfiles.append(f)
1958 elif f.endswith('.bbappend'):
1959 bbappend.append(f)
1960 else:
1961 collectlog.debug(1, "skipping %s: unknown file extension", f)
1962
1963 # Build a list of .bbappend files for each .bb file
1964 for f in bbappend:
1965 base = os.path.basename(f).replace('.bbappend', '.bb')
1966 self.bbappends.append((base, f))
1967
1968 # Find overlayed recipes
1969 # bbfiles will be in priority order which makes this easy
1970 bbfile_seen = dict()
1971 self.overlayed = defaultdict(list)
1972 for f in reversed(bbfiles):
1973 base = os.path.basename(f)
1974 if base not in bbfile_seen:
1975 bbfile_seen[base] = f
1976 else:
1977 topfile = bbfile_seen[base]
1978 self.overlayed[topfile].append(f)
1979
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001980 return (bbfiles, masked, searchdirs)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001981
1982 def get_file_appends(self, fn):
1983 """
1984 Returns a list of .bbappend files to apply to fn
1985 """
1986 filelist = []
1987 f = os.path.basename(fn)
1988 for b in self.bbappends:
1989 (bbappend, filename) = b
1990 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1991 filelist.append(filename)
1992 return filelist
1993
1994 def collection_priorities(self, pkgfns, d):
1995
1996 priorities = {}
1997
1998 # Calculate priorities for each file
1999 matched = set()
2000 for p in pkgfns:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002001 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002002 priorities[p] = self.calc_bbfile_priority(realfn, matched)
2003
2004 # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
2005 unmatched = set()
2006 for _, _, regex, pri in self.bbfile_config_priorities:
2007 if not regex in matched:
2008 unmatched.add(regex)
2009
2010 def findmatch(regex):
2011 for b in self.bbappends:
2012 (bbfile, append) = b
2013 if regex.match(append):
2014 return True
2015 return False
2016
2017 for unmatch in unmatched.copy():
2018 if findmatch(unmatch):
2019 unmatched.remove(unmatch)
2020
2021 for collection, pattern, regex, _ in self.bbfile_config_priorities:
2022 if regex in unmatched:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05002023 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002024 collectlog.warning("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002025
2026 return priorities
2027
2028class ParsingFailure(Exception):
2029 def __init__(self, realexception, recipe):
2030 self.realexception = realexception
2031 self.recipe = recipe
2032 Exception.__init__(self, realexception, recipe)
2033
2034class Feeder(multiprocessing.Process):
2035 def __init__(self, jobs, to_parsers, quit):
2036 self.quit = quit
2037 self.jobs = jobs
2038 self.to_parsers = to_parsers
2039 multiprocessing.Process.__init__(self)
2040
2041 def run(self):
2042 while True:
2043 try:
2044 quit = self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002045 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002046 pass
2047 else:
2048 if quit == 'cancel':
2049 self.to_parsers.cancel_join_thread()
2050 break
2051
2052 try:
2053 job = self.jobs.pop()
2054 except IndexError:
2055 break
2056
2057 try:
2058 self.to_parsers.put(job, timeout=0.5)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002059 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002060 self.jobs.insert(0, job)
2061 continue
2062
2063class Parser(multiprocessing.Process):
2064 def __init__(self, jobs, results, quit, init, profile):
2065 self.jobs = jobs
2066 self.results = results
2067 self.quit = quit
2068 self.init = init
2069 multiprocessing.Process.__init__(self)
2070 self.context = bb.utils.get_context().copy()
2071 self.handlers = bb.event.get_class_handlers().copy()
2072 self.profile = profile
2073
2074 def run(self):
2075
2076 if not self.profile:
2077 self.realrun()
2078 return
2079
2080 try:
2081 import cProfile as profile
2082 except:
2083 import profile
2084 prof = profile.Profile()
2085 try:
2086 profile.Profile.runcall(prof, self.realrun)
2087 finally:
2088 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
2089 prof.dump_stats(logfile)
2090
2091 def realrun(self):
2092 if self.init:
2093 self.init()
2094
2095 pending = []
2096 while True:
2097 try:
2098 self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002099 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002100 pass
2101 else:
2102 self.results.cancel_join_thread()
2103 break
2104
2105 if pending:
2106 result = pending.pop()
2107 else:
2108 try:
2109 job = self.jobs.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002110 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002111 continue
2112
2113 if job is None:
2114 break
2115 result = self.parse(*job)
2116
2117 try:
2118 self.results.put(result, timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002119 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002120 pending.append(result)
2121
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002122 def parse(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002123 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002124 # Record the filename we're parsing into any events generated
2125 def parse_filter(self, record):
2126 record.taskpid = bb.event.worker_pid
2127 record.fn = filename
2128 return True
2129
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002130 # Reset our environment and handlers to the original settings
2131 bb.utils.set_context(self.context.copy())
2132 bb.event.set_class_handlers(self.handlers.copy())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002133 bb.event.LogHandler.filter = parse_filter
2134
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002135 return True, self.bb_cache.parse(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002136 except Exception as exc:
2137 tb = sys.exc_info()[2]
2138 exc.recipe = filename
2139 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
2140 return True, exc
2141 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2142 # and for example a worker thread doesn't just exit on its own in response to
2143 # a SystemExit event for example.
2144 except BaseException as exc:
2145 return True, ParsingFailure(exc, filename)
2146
2147class CookerParser(object):
2148 def __init__(self, cooker, filelist, masked):
2149 self.filelist = filelist
2150 self.cooker = cooker
2151 self.cfgdata = cooker.data
2152 self.cfghash = cooker.data_hash
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002153 self.cfgbuilder = cooker.databuilder
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002154
2155 # Accounting statistics
2156 self.parsed = 0
2157 self.cached = 0
2158 self.error = 0
2159 self.masked = masked
2160
2161 self.skipped = 0
2162 self.virtuals = 0
2163 self.total = len(filelist)
2164
2165 self.current = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002166 self.process_names = []
2167
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002168 self.bb_cache = bb.cache.Cache(self.cfgbuilder, self.cfghash, cooker.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002169 self.fromcache = []
2170 self.willparse = []
2171 for filename in self.filelist:
2172 appends = self.cooker.collection.get_file_appends(filename)
2173 if not self.bb_cache.cacheValid(filename, appends):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002174 self.willparse.append((filename, appends))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002175 else:
2176 self.fromcache.append((filename, appends))
2177 self.toparse = self.total - len(self.fromcache)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002178 self.progress_chunk = int(max(self.toparse / 100, 1))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002179
Brad Bishop6e60e8b2018-02-01 10:27:11 -05002180 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002181 multiprocessing.cpu_count()), len(self.willparse))
2182
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002183 self.start()
2184 self.haveshutdown = False
2185
2186 def start(self):
2187 self.results = self.load_cached()
2188 self.processes = []
2189 if self.toparse:
2190 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2191 def init():
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002192 Parser.bb_cache = self.bb_cache
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002193 bb.utils.set_process_name(multiprocessing.current_process().name)
2194 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2195 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002196
2197 self.feeder_quit = multiprocessing.Queue(maxsize=1)
2198 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
2199 self.jobs = multiprocessing.Queue(maxsize=self.num_processes)
2200 self.result_queue = multiprocessing.Queue()
2201 self.feeder = Feeder(self.willparse, self.jobs, self.feeder_quit)
2202 self.feeder.start()
2203 for i in range(0, self.num_processes):
2204 parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
2205 parser.start()
2206 self.process_names.append(parser.name)
2207 self.processes.append(parser)
2208
2209 self.results = itertools.chain(self.results, self.parse_generator())
2210
2211 def shutdown(self, clean=True, force=False):
2212 if not self.toparse:
2213 return
2214 if self.haveshutdown:
2215 return
2216 self.haveshutdown = True
2217
2218 if clean:
2219 event = bb.event.ParseCompleted(self.cached, self.parsed,
2220 self.skipped, self.masked,
2221 self.virtuals, self.error,
2222 self.total)
2223
2224 bb.event.fire(event, self.cfgdata)
2225 self.feeder_quit.put(None)
2226 for process in self.processes:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002227 self.parser_quit.put(None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002228 else:
2229 self.feeder_quit.put('cancel')
2230
2231 self.parser_quit.cancel_join_thread()
2232 for process in self.processes:
2233 self.parser_quit.put(None)
2234
2235 self.jobs.cancel_join_thread()
2236
2237 for process in self.processes:
2238 if force:
2239 process.join(.1)
2240 process.terminate()
2241 else:
2242 process.join()
2243 self.feeder.join()
2244
2245 sync = threading.Thread(target=self.bb_cache.sync)
2246 sync.start()
2247 multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002248 bb.codeparser.parser_cache_savemerge()
2249 bb.fetch.fetcher_parse_done()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002250 if self.cooker.configuration.profile:
2251 profiles = []
2252 for i in self.process_names:
2253 logfile = "profile-parse-%s.log" % i
2254 if os.path.exists(logfile):
2255 profiles.append(logfile)
2256
2257 pout = "profile-parse.log.processed"
2258 bb.utils.process_profilelog(profiles, pout = pout)
2259 print("Processed parsing statistics saved to %s" % (pout))
2260
2261 def load_cached(self):
2262 for filename, appends in self.fromcache:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002263 cached, infos = self.bb_cache.load(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002264 yield not cached, infos
2265
2266 def parse_generator(self):
2267 while True:
2268 if self.parsed >= self.toparse:
2269 break
2270
2271 try:
2272 result = self.result_queue.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002273 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002274 pass
2275 else:
2276 value = result[1]
2277 if isinstance(value, BaseException):
2278 raise value
2279 else:
2280 yield result
2281
2282 def parse_next(self):
2283 result = []
2284 parsed = None
2285 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002286 parsed, result = next(self.results)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002287 except StopIteration:
2288 self.shutdown()
2289 return False
2290 except bb.BBHandledException as exc:
2291 self.error += 1
2292 logger.error('Failed to parse recipe: %s' % exc.recipe)
2293 self.shutdown(clean=False)
2294 return False
2295 except ParsingFailure as exc:
2296 self.error += 1
2297 logger.error('Unable to parse %s: %s' %
2298 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2299 self.shutdown(clean=False)
2300 return False
2301 except bb.parse.ParseError as exc:
2302 self.error += 1
2303 logger.error(str(exc))
2304 self.shutdown(clean=False)
2305 return False
2306 except bb.data_smart.ExpansionError as exc:
2307 self.error += 1
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002308 bbdir = os.path.dirname(__file__) + os.sep
2309 etype, value, _ = sys.exc_info()
2310 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2311 logger.error('ExpansionError during parsing %s', value.recipe,
2312 exc_info=(etype, value, tb))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002313 self.shutdown(clean=False)
2314 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002315 except Exception as exc:
2316 self.error += 1
2317 etype, value, tb = sys.exc_info()
2318 if hasattr(value, "recipe"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002319 logger.error('Unable to parse %s' % value.recipe,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002320 exc_info=(etype, value, exc.traceback))
2321 else:
2322 # Most likely, an exception occurred during raising an exception
2323 import traceback
2324 logger.error('Exception during parse: %s' % traceback.format_exc())
2325 self.shutdown(clean=False)
2326 return False
2327
2328 self.current += 1
2329 self.virtuals += len(result)
2330 if parsed:
2331 self.parsed += 1
2332 if self.parsed % self.progress_chunk == 0:
2333 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2334 self.cfgdata)
2335 else:
2336 self.cached += 1
2337
2338 for virtualfn, info_array in result:
2339 if info_array[0].skipped:
2340 self.skipped += 1
2341 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002342 (fn, cls, mc) = bb.cache.virtualfn2realfn(virtualfn)
2343 self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002344 parsed=parsed, watcher = self.cooker.add_filewatch)
2345 return True
2346
2347 def reparse(self, filename):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002348 infos = self.bb_cache.parse(filename, self.cooker.collection.get_file_appends(filename))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002349 for vfn, info_array in infos:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002350 (fn, cls, mc) = bb.cache.virtualfn2realfn(vfn)
2351 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)