blob: 07897be2796e1d1b2b17e3fff7bae4597ede31fe [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# Copyright (C) 2003, 2004 Chris Larson
6# Copyright (C) 2003, 2004 Phil Blundell
7# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
8# Copyright (C) 2005 Holger Hans Peter Freyther
9# Copyright (C) 2005 ROAD GmbH
10# Copyright (C) 2006 - 2007 Richard Purdie
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
Patrick Williamsc0f7c042017-02-23 20:41:17 -060025
Patrick Williamsc124f4f2015-09-15 14:41:29 -050026import sys, os, glob, os.path, re, time
27import atexit
28import itertools
29import logging
30import multiprocessing
31import sre_constants
32import threading
Patrick Williamsc0f7c042017-02-23 20:41:17 -060033from io import StringIO, UnsupportedOperation
Patrick Williamsc124f4f2015-09-15 14:41:29 -050034from contextlib import closing
35from functools import wraps
Patrick Williamsc0f7c042017-02-23 20:41:17 -060036from collections import defaultdict, namedtuple
Patrick Williamsc124f4f2015-09-15 14:41:29 -050037import bb, bb.exceptions, bb.command
38from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
Patrick Williamsc0f7c042017-02-23 20:41:17 -060039import queue
Patrick Williamsc124f4f2015-09-15 14:41:29 -050040import signal
41import subprocess
42import errno
43import prserv.serv
44import pyinotify
Patrick Williamsc0f7c042017-02-23 20:41:17 -060045import json
46import pickle
47import codecs
Patrick Williamsc124f4f2015-09-15 14:41:29 -050048
49logger = logging.getLogger("BitBake")
50collectlog = logging.getLogger("BitBake.Collection")
51buildlog = logging.getLogger("BitBake.Build")
52parselog = logging.getLogger("BitBake.Parsing")
53providerlog = logging.getLogger("BitBake.Provider")
54
55class NoSpecificMatch(bb.BBHandledException):
56 """
57 Exception raised when no or multiple file matches are found
58 """
59
60class NothingToBuild(Exception):
61 """
62 Exception raised when there is nothing to build
63 """
64
65class CollectionError(bb.BBHandledException):
66 """
67 Exception raised when layer configuration is incorrect
68 """
69
70class state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060071 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050072
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050073 @classmethod
74 def get_name(cls, code):
75 for name in dir(cls):
76 value = getattr(cls, name)
77 if type(value) == type(cls.initial) and value == code:
78 return name
79 raise ValueError("Invalid status code: %s" % code)
80
Patrick Williamsc124f4f2015-09-15 14:41:29 -050081
82class SkippedPackage:
83 def __init__(self, info = None, reason = None):
84 self.pn = None
85 self.skipreason = None
86 self.provides = None
87 self.rprovides = None
88
89 if info:
90 self.pn = info.pn
91 self.skipreason = info.skipreason
92 self.provides = info.provides
93 self.rprovides = info.rprovides
94 elif reason:
95 self.skipreason = reason
96
97
98class CookerFeatures(object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -060099 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500100
101 def __init__(self):
102 self._features=set()
103
104 def setFeature(self, f):
105 # validate we got a request for a feature we support
106 if f not in CookerFeatures._feature_list:
107 return
108 self._features.add(f)
109
110 def __contains__(self, f):
111 return f in self._features
112
113 def __iter__(self):
114 return self._features.__iter__()
115
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600116 def __next__(self):
117 return next(self._features)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500118
119
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600120class EventWriter:
121 def __init__(self, cooker, eventfile):
122 self.file_inited = None
123 self.cooker = cooker
124 self.eventfile = eventfile
125 self.event_queue = []
126
127 def write_event(self, event):
128 with open(self.eventfile, "a") as f:
129 try:
130 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
131 f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
132 "vars": str_event}))
133 except Exception as err:
134 import traceback
135 print(err, traceback.format_exc())
136
137 def send(self, event):
138 if self.file_inited:
139 # we have the file, just write the event
140 self.write_event(event)
141 else:
142 # init on bb.event.BuildStarted
143 name = "%s.%s" % (event.__module__, event.__class__.__name__)
144 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
145 with open(self.eventfile, "w") as f:
146 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
147
148 self.file_inited = True
149
150 # write pending events
151 for evt in self.event_queue:
152 self.write_event(evt)
153
154 # also write the current event
155 self.write_event(event)
156 else:
157 # queue all events until the file is inited
158 self.event_queue.append(event)
159
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500160#============================================================================#
161# BBCooker
162#============================================================================#
163class BBCooker:
164 """
165 Manages one bitbake build run
166 """
167
168 def __init__(self, configuration, featureSet=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600169 self.recipecaches = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500170 self.skiplist = {}
171 self.featureset = CookerFeatures()
172 if featureSet:
173 for f in featureSet:
174 self.featureset.setFeature(f)
175
176 self.configuration = configuration
177
178 self.configwatcher = pyinotify.WatchManager()
179 self.configwatcher.bbseen = []
180 self.configwatcher.bbwatchedfiles = []
181 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
182 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
183 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
184 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
185 self.watcher = pyinotify.WatchManager()
186 self.watcher.bbseen = []
187 self.watcher.bbwatchedfiles = []
188 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
189
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500190 # If being called by something like tinfoil, we need to clean cached data
191 # which may now be invalid
192 bb.parse.__mtime_cache = {}
193 bb.parse.BBHandler.cached_statements = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500194
195 self.initConfigurationData()
196
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600197 # we log all events to a file if so directed
198 if self.configuration.writeeventlog:
199 # register the log file writer as UI Handler
200 writer = EventWriter(self, self.configuration.writeeventlog)
201 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
202 bb.event.register_UIHhandler(EventLogWriteHandler(writer))
203
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500204 self.inotify_modified_files = []
205
206 def _process_inotify_updates(server, notifier_list, abort):
207 for n in notifier_list:
208 if n.check_events(timeout=0):
209 # read notified events and enqeue them
210 n.read_events()
211 n.process_events()
212 return 1.0
213
214 self.configuration.server_register_idlecallback(_process_inotify_updates, [self.confignotifier, self.notifier])
215
216 self.baseconfig_valid = True
217 self.parsecache_valid = False
218
219 # Take a lock so only one copy of bitbake can run against a given build
220 # directory at a time
221 if not self.lockBitbake():
222 bb.fatal("Only one copy of bitbake should be run against a build directory")
223 try:
224 self.lock.seek(0)
225 self.lock.truncate()
226 if len(configuration.interface) >= 2:
227 self.lock.write("%s:%s\n" % (configuration.interface[0], configuration.interface[1]));
228 self.lock.flush()
229 except:
230 pass
231
232 # TOSTOP must not be set or our children will hang when they output
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600233 try:
234 fd = sys.stdout.fileno()
235 if os.isatty(fd):
236 import termios
237 tcattr = termios.tcgetattr(fd)
238 if tcattr[3] & termios.TOSTOP:
239 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
240 tcattr[3] = tcattr[3] & ~termios.TOSTOP
241 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
242 except UnsupportedOperation:
243 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500244
245 self.command = bb.command.Command(self)
246 self.state = state.initial
247
248 self.parser = None
249
250 signal.signal(signal.SIGTERM, self.sigterm_exception)
251 # Let SIGHUP exit as SIGTERM
252 signal.signal(signal.SIGHUP, self.sigterm_exception)
253
254 def config_notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500255 if event.maskname == "IN_Q_OVERFLOW":
256 bb.warn("inotify event queue overflowed, invalidating caches.")
257 self.baseconfig_valid = False
258 return
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500259 if not event.pathname in self.configwatcher.bbwatchedfiles:
260 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500261 if not event.pathname in self.inotify_modified_files:
262 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500263 self.baseconfig_valid = False
264
265 def notifications(self, event):
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500266 if event.maskname == "IN_Q_OVERFLOW":
267 bb.warn("inotify event queue overflowed, invalidating caches.")
268 self.parsecache_valid = False
269 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500270 if not event.pathname in self.inotify_modified_files:
271 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500272 self.parsecache_valid = False
273
274 def add_filewatch(self, deps, watcher=None):
275 if not watcher:
276 watcher = self.watcher
277 for i in deps:
278 watcher.bbwatchedfiles.append(i[0])
279 f = os.path.dirname(i[0])
280 if f in watcher.bbseen:
281 continue
282 watcher.bbseen.append(f)
283 watchtarget = None
284 while True:
285 # We try and add watches for files that don't exist but if they did, would influence
286 # the parser. The parent directory of these files may not exist, in which case we need
287 # to watch any parent that does exist for changes.
288 try:
289 watcher.add_watch(f, self.watchmask, quiet=False)
290 if watchtarget:
291 watcher.bbwatchedfiles.append(watchtarget)
292 break
293 except pyinotify.WatchManagerError as e:
294 if 'ENOENT' in str(e):
295 watchtarget = f
296 f = os.path.dirname(f)
297 if f in watcher.bbseen:
298 break
299 watcher.bbseen.append(f)
300 continue
301 if 'ENOSPC' in str(e):
302 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
303 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
304 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
305 providerlog.error("Root privilege is required to modify max_user_watches.")
306 raise
307
308 def sigterm_exception(self, signum, stackframe):
309 if signum == signal.SIGTERM:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500310 bb.warn("Cooker received SIGTERM, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500311 elif signum == signal.SIGHUP:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500312 bb.warn("Cooker received SIGHUP, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500313 self.state = state.forceshutdown
314
315 def setFeatures(self, features):
316 # we only accept a new feature set if we're in state initial, so we can reset without problems
317 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
318 raise Exception("Illegal state for feature set change")
319 original_featureset = list(self.featureset)
320 for feature in features:
321 self.featureset.setFeature(feature)
322 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
323 if (original_featureset != list(self.featureset)) and self.state != state.error:
324 self.reset()
325
326 def initConfigurationData(self):
327
328 self.state = state.initial
329 self.caches_array = []
330
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500331 # Need to preserve BB_CONSOLELOG over resets
332 consolelog = None
333 if hasattr(self, "data"):
334 consolelog = self.data.getVar("BB_CONSOLELOG", True)
335
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500336 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
337 self.enableDataTracking()
338
339 all_extra_cache_names = []
340 # We hardcode all known cache types in a single place, here.
341 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
342 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
343
344 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
345
346 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
347 # This is the entry point, no further check needed!
348 for var in caches_name_array:
349 try:
350 module_name, cache_name = var.split(':')
351 module = __import__(module_name, fromlist=(cache_name,))
352 self.caches_array.append(getattr(module, cache_name))
353 except ImportError as exc:
354 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
355 sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
356
357 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
358 self.databuilder.parseBaseConfiguration()
359 self.data = self.databuilder.data
360 self.data_hash = self.databuilder.data_hash
361
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500362 if consolelog:
363 self.data.setVar("BB_CONSOLELOG", consolelog)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500364
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500365 #
366 # Copy of the data store which has been expanded.
367 # Used for firing events and accessing variables where expansion needs to be accounted for
368 #
369 self.expanded_data = bb.data.createCopy(self.data)
370 bb.data.update_data(self.expanded_data)
371 bb.parse.init_parser(self.expanded_data)
372
373 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
374 self.disableDataTracking()
375
376 self.data.renameVar("__depends", "__base_depends")
377 self.add_filewatch(self.data.getVar("__base_depends", False), self.configwatcher)
378
379
380 def enableDataTracking(self):
381 self.configuration.tracking = True
382 if hasattr(self, "data"):
383 self.data.enableTracking()
384
385 def disableDataTracking(self):
386 self.configuration.tracking = False
387 if hasattr(self, "data"):
388 self.data.disableTracking()
389
390 def modifyConfigurationVar(self, var, val, default_file, op):
391 if op == "append":
392 self.appendConfigurationVar(var, val, default_file)
393 elif op == "set":
394 self.saveConfigurationVar(var, val, default_file, "=")
395 elif op == "earlyAssign":
396 self.saveConfigurationVar(var, val, default_file, "?=")
397
398
399 def appendConfigurationVar(self, var, val, default_file):
400 #add append var operation to the end of default_file
401 default_file = bb.cookerdata.findConfigFile(default_file, self.data)
402
403 total = "#added by hob"
404 total += "\n%s += \"%s\"\n" % (var, val)
405
406 with open(default_file, 'a') as f:
407 f.write(total)
408
409 #add to history
410 loginfo = {"op":"append", "file":default_file, "line":total.count("\n")}
411 self.data.appendVar(var, val, **loginfo)
412
413 def saveConfigurationVar(self, var, val, default_file, op):
414
415 replaced = False
416 #do not save if nothing changed
417 if str(val) == self.data.getVar(var, False):
418 return
419
420 conf_files = self.data.varhistory.get_variable_files(var)
421
422 #format the value when it is a list
423 if isinstance(val, list):
424 listval = ""
425 for value in val:
426 listval += "%s " % value
427 val = listval
428
429 topdir = self.data.getVar("TOPDIR", False)
430
431 #comment or replace operations made on var
432 for conf_file in conf_files:
433 if topdir in conf_file:
434 with open(conf_file, 'r') as f:
435 contents = f.readlines()
436
437 lines = self.data.varhistory.get_variable_lines(var, conf_file)
438 for line in lines:
439 total = ""
440 i = 0
441 for c in contents:
442 total += c
443 i = i + 1
444 if i==int(line):
445 end_index = len(total)
446 index = total.rfind(var, 0, end_index)
447
448 begin_line = total.count("\n",0,index)
449 end_line = int(line)
450
451 #check if the variable was saved before in the same way
452 #if true it replace the place where the variable was declared
453 #else it comments it
454 if contents[begin_line-1]== "#added by hob\n":
455 contents[begin_line] = "%s %s \"%s\"\n" % (var, op, val)
456 replaced = True
457 else:
458 for ii in range(begin_line, end_line):
459 contents[ii] = "#" + contents[ii]
460
461 with open(conf_file, 'w') as f:
462 f.writelines(contents)
463
464 if replaced == False:
465 #remove var from history
466 self.data.varhistory.del_var_history(var)
467
468 #add var to the end of default_file
469 default_file = bb.cookerdata.findConfigFile(default_file, self.data)
470
471 #add the variable on a single line, to be easy to replace the second time
472 total = "\n#added by hob"
473 total += "\n%s %s \"%s\"\n" % (var, op, val)
474
475 with open(default_file, 'a') as f:
476 f.write(total)
477
478 #add to history
479 loginfo = {"op":"set", "file":default_file, "line":total.count("\n")}
480 self.data.setVar(var, val, **loginfo)
481
482 def removeConfigurationVar(self, var):
483 conf_files = self.data.varhistory.get_variable_files(var)
484 topdir = self.data.getVar("TOPDIR", False)
485
486 for conf_file in conf_files:
487 if topdir in conf_file:
488 with open(conf_file, 'r') as f:
489 contents = f.readlines()
490
491 lines = self.data.varhistory.get_variable_lines(var, conf_file)
492 for line in lines:
493 total = ""
494 i = 0
495 for c in contents:
496 total += c
497 i = i + 1
498 if i==int(line):
499 end_index = len(total)
500 index = total.rfind(var, 0, end_index)
501
502 begin_line = total.count("\n",0,index)
503
504 #check if the variable was saved before in the same way
505 if contents[begin_line-1]== "#added by hob\n":
506 contents[begin_line-1] = contents[begin_line] = "\n"
507 else:
508 contents[begin_line] = "\n"
509 #remove var from history
510 self.data.varhistory.del_var_history(var, conf_file, line)
511 #remove variable
512 self.data.delVar(var)
513
514 with open(conf_file, 'w') as f:
515 f.writelines(contents)
516
517 def createConfigFile(self, name):
518 path = os.getcwd()
519 confpath = os.path.join(path, "conf", name)
520 open(confpath, 'w').close()
521
522 def parseConfiguration(self):
523 # Set log file verbosity
524 verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
525 if verboselogs:
526 bb.msg.loggerVerboseLogs = True
527
528 # Change nice level if we're asked to
529 nice = self.data.getVar("BB_NICE_LEVEL", True)
530 if nice:
531 curnice = os.nice(0)
532 nice = int(nice) - curnice
533 buildlog.verbose("Renice to %s " % os.nice(nice))
534
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600535 if self.recipecaches:
536 del self.recipecaches
537 self.multiconfigs = self.databuilder.mcdata.keys()
538 self.recipecaches = {}
539 for mc in self.multiconfigs:
540 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500541
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600542 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS", True))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500543
544 def updateConfigOpts(self, options, environment):
545 clean = True
546 for o in options:
547 if o in ['prefile', 'postfile']:
548 clean = False
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500549 server_val = getattr(self.configuration, "%s_server" % o)
550 if not options[o] and server_val:
551 # restore value provided on server start
552 setattr(self.configuration, o, server_val)
553 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500554 setattr(self.configuration, o, options[o])
555 for k in bb.utils.approved_variables():
556 if k in environment and k not in self.configuration.env:
557 logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
558 self.configuration.env[k] = environment[k]
559 clean = False
560 if k in self.configuration.env and k not in environment:
561 logger.debug(1, "Updating environment variable %s (deleted)" % (k))
562 del self.configuration.env[k]
563 clean = False
564 if k not in self.configuration.env and k not in environment:
565 continue
566 if environment[k] != self.configuration.env[k]:
567 logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
568 self.configuration.env[k] = environment[k]
569 clean = False
570 if not clean:
571 logger.debug(1, "Base environment change, triggering reparse")
572 self.baseconfig_valid = False
573 self.reset()
574
575 def runCommands(self, server, data, abort):
576 """
577 Run any queued asynchronous command
578 This is done by the idle handler so it runs in true context rather than
579 tied to any UI.
580 """
581
582 return self.command.runAsyncCommand()
583
584 def showVersions(self):
585
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600586 pkg_pn = self.recipecaches[''].pkg_pn
587 (latest_versions, preferred_versions) = bb.providers.findProviders(self.data, self.recipecaches[''], pkg_pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500588
589 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
590 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
591
592 for p in sorted(pkg_pn):
593 pref = preferred_versions[p]
594 latest = latest_versions[p]
595
596 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
597 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
598
599 if pref == latest:
600 prefstr = ""
601
602 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
603
604 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
605 """
606 Show the outer or per-recipe environment
607 """
608 fn = None
609 envdata = None
610 if not pkgs_to_build:
611 pkgs_to_build = []
612
613 if buildfile:
614 # Parse the configuration here. We need to do it explicitly here since
615 # this showEnvironment() code path doesn't use the cache
616 self.parseConfiguration()
617
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600618 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500619 fn = self.matchFile(fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600620 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500621 elif len(pkgs_to_build) == 1:
622 ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or ""
623 if pkgs_to_build[0] in set(ignore.split()):
624 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
625
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600626 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500627
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600628 mc = runlist[0][0]
629 fn = runlist[0][3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500630 else:
631 envdata = self.data
632
633 if fn:
634 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600635 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
636 envdata = bb_cache.loadDataFull(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500637 except Exception as e:
638 parselog.exception("Unable to read %s", fn)
639 raise
640
641 # Display history
642 with closing(StringIO()) as env:
643 self.data.inchistory.emit(env)
644 logger.plain(env.getvalue())
645
646 # emit variables and shell functions
647 data.update_data(envdata)
648 with closing(StringIO()) as env:
649 data.emit_env(env, envdata, True)
650 logger.plain(env.getvalue())
651
652 # emit the metadata which isnt valid shell
653 data.expandKeys(envdata)
654 for e in envdata.keys():
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600655 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500656 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500657
658
659 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
660 """
661 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
662 """
663 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
664
665 # A task of None means use the default task
666 if task is None:
667 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500668 if not task.startswith("do_"):
669 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500670
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500671 targetlist = self.checkPackages(pkgs_to_build, task)
672 fulltargetlist = []
673 defaulttask_implicit = ''
674 defaulttask_explicit = False
675 wildcard = False
676
677 # Wild card expansion:
678 # Replace string such as "multiconfig:*:bash"
679 # into "multiconfig:A:bash multiconfig:B:bash bash"
680 for k in targetlist:
681 if k.startswith("multiconfig:"):
682 if wildcard:
683 bb.fatal('multiconfig conflict')
684 if k.split(":")[1] == "*":
685 wildcard = True
686 for mc in self.multiconfigs:
687 if mc:
688 fulltargetlist.append(k.replace('*', mc))
689 # implicit default task
690 else:
691 defaulttask_implicit = k.split(":")[2]
692 else:
693 fulltargetlist.append(k)
694 else:
695 defaulttask_explicit = True
696 fulltargetlist.append(k)
697
698 if not defaulttask_explicit and defaulttask_implicit != '':
699 fulltargetlist.append(defaulttask_implicit)
700
701 bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600702 taskdata = {}
703 localdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500704
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600705 for mc in self.multiconfigs:
706 taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
707 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
708 bb.data.update_data(localdata[mc])
709 bb.data.expandKeys(localdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500710
711 current = 0
712 runlist = []
713 for k in fulltargetlist:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600714 mc = ""
715 if k.startswith("multiconfig:"):
716 mc = k.split(":")[1]
717 k = ":".join(k.split(":")[2:])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500718 ktask = task
719 if ":do_" in k:
720 k2 = k.split(":do_")
721 k = k2[0]
722 ktask = k2[1]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600723 taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500724 current += 1
725 if not ktask.startswith("do_"):
726 ktask = "do_%s" % ktask
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600727 if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
728 # e.g. in ASSUME_PROVIDED
729 continue
730 fn = taskdata[mc].build_targets[k][0]
731 runlist.append([mc, k, ktask, fn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500732 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600733
734 for mc in self.multiconfigs:
735 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
736
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500737 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600738 return taskdata, runlist
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500739
740 def prepareTreeData(self, pkgs_to_build, task):
741 """
742 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
743 """
744
745 # We set abort to False here to prevent unbuildable targets raising
746 # an exception when we're just generating data
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600747 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500748
749 return runlist, taskdata
750
751 ######## WARNING : this function requires cache_extra to be enabled ########
752
753 def generateTaskDepTreeData(self, pkgs_to_build, task):
754 """
755 Create a dependency graph of pkgs_to_build including reverse dependency
756 information.
757 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500758 if not task.startswith("do_"):
759 task = "do_%s" % task
760
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500761 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600762 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500763 rq.rqdata.prepare()
764 return self.buildDependTree(rq, taskdata)
765
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600766 @staticmethod
767 def add_mc_prefix(mc, pn):
768 if mc:
769 return "multiconfig:%s.%s" % (mc, pn)
770 return pn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500771
772 def buildDependTree(self, rq, taskdata):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600773 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500774 depend_tree = {}
775 depend_tree["depends"] = {}
776 depend_tree["tdepends"] = {}
777 depend_tree["pn"] = {}
778 depend_tree["rdepends-pn"] = {}
779 depend_tree["packages"] = {}
780 depend_tree["rdepends-pkg"] = {}
781 depend_tree["rrecs-pkg"] = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500782 depend_tree['providermap'] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600783 depend_tree["layer-priorities"] = self.bbfile_config_priorities
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500784
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600785 for mc in taskdata:
786 for name, fn in list(taskdata[mc].get_providermap().items()):
787 pn = self.recipecaches[mc].pkg_fn[fn]
788 pn = self.add_mc_prefix(mc, pn)
789 if name != pn:
790 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
791 depend_tree['providermap'][name] = (pn, version)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500792
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600793 for tid in rq.rqdata.runtaskentries:
794 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
795 pn = self.recipecaches[mc].pkg_fn[taskfn]
796 pn = self.add_mc_prefix(mc, pn)
797 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500798 if pn not in depend_tree["pn"]:
799 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600800 depend_tree["pn"][pn]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500801 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600802 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500803
804 # if we have extra caches, list all attributes they bring in
805 extra_info = []
806 for cache_class in self.caches_array:
807 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
808 cachefields = getattr(cache_class, 'cachefields', [])
809 extra_info = extra_info + cachefields
810
811 # for all attributes stored, add them to the dependency tree
812 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600813 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500814
815
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600816 for dep in rq.rqdata.runtaskentries[tid].depends:
817 (depmc, depfn, deptaskname, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
818 deppn = self.recipecaches[mc].pkg_fn[deptaskfn]
819 dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500820 if not dotname in depend_tree["tdepends"]:
821 depend_tree["tdepends"][dotname] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600822 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
823 if taskfn not in seen_fns:
824 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500825 packages = []
826
827 depend_tree["depends"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600828 for dep in taskdata[mc].depids[taskfn]:
829 depend_tree["depends"][pn].append(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500830
831 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600832 for rdep in taskdata[mc].rdepids[taskfn]:
833 depend_tree["rdepends-pn"][pn].append(rdep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500834
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600835 rdepends = self.recipecaches[mc].rundeps[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500836 for package in rdepends:
837 depend_tree["rdepends-pkg"][package] = []
838 for rdepend in rdepends[package]:
839 depend_tree["rdepends-pkg"][package].append(rdepend)
840 packages.append(package)
841
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600842 rrecs = self.recipecaches[mc].runrecs[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500843 for package in rrecs:
844 depend_tree["rrecs-pkg"][package] = []
845 for rdepend in rrecs[package]:
846 depend_tree["rrecs-pkg"][package].append(rdepend)
847 if not package in packages:
848 packages.append(package)
849
850 for package in packages:
851 if package not in depend_tree["packages"]:
852 depend_tree["packages"][package] = {}
853 depend_tree["packages"][package]["pn"] = pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600854 depend_tree["packages"][package]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500855 depend_tree["packages"][package]["version"] = version
856
857 return depend_tree
858
859 ######## WARNING : this function requires cache_extra to be enabled ########
860 def generatePkgDepTreeData(self, pkgs_to_build, task):
861 """
862 Create a dependency tree of pkgs_to_build, returning the data.
863 """
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500864 if not task.startswith("do_"):
865 task = "do_%s" % task
866
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500867 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500868
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600869 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500870 depend_tree = {}
871 depend_tree["depends"] = {}
872 depend_tree["pn"] = {}
873 depend_tree["rdepends-pn"] = {}
874 depend_tree["rdepends-pkg"] = {}
875 depend_tree["rrecs-pkg"] = {}
876
877 # if we have extra caches, list all attributes they bring in
878 extra_info = []
879 for cache_class in self.caches_array:
880 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
881 cachefields = getattr(cache_class, 'cachefields', [])
882 extra_info = extra_info + cachefields
883
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600884 tids = []
885 for mc in taskdata:
886 for tid in taskdata[mc].taskentries:
887 tids.append(tid)
888
889 for tid in tids:
890 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
891
892 pn = self.recipecaches[mc].pkg_fn[taskfn]
893 pn = self.add_mc_prefix(mc, pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500894
895 if pn not in depend_tree["pn"]:
896 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600897 depend_tree["pn"][pn]["filename"] = taskfn
898 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500899 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600900 rdepends = self.recipecaches[mc].rundeps[taskfn]
901 rrecs = self.recipecaches[mc].runrecs[taskfn]
902 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500903
904 # for all extra attributes stored, add them to the dependency tree
905 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600906 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500907
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600908 if taskfn not in seen_fns:
909 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500910
911 depend_tree["depends"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600912 for item in taskdata[mc].depids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500913 pn_provider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600914 if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
915 fn_provider = taskdata[mc].build_targets[dep][0]
916 pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500917 else:
918 pn_provider = item
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600919 pn_provider = self.add_mc_prefix(mc, pn_provider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500920 depend_tree["depends"][pn].append(pn_provider)
921
922 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600923 for rdep in taskdata[mc].rdepids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500924 pn_rprovider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600925 if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
926 fn_rprovider = taskdata[mc].run_targets[rdep][0]
927 pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500928 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600929 pn_rprovider = rdep
930 pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500931 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
932
933 depend_tree["rdepends-pkg"].update(rdepends)
934 depend_tree["rrecs-pkg"].update(rrecs)
935
936 return depend_tree
937
938 def generateDepTreeEvent(self, pkgs_to_build, task):
939 """
940 Create a task dependency graph of pkgs_to_build.
941 Generate an event with the result
942 """
943 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
944 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
945
946 def generateDotGraphFiles(self, pkgs_to_build, task):
947 """
948 Create a task dependency graph of pkgs_to_build.
949 Save the result to a set of .dot files.
950 """
951
952 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
953
954 # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600955 depends_file = open('pn-depends.dot', 'w' )
956 buildlist_file = open('pn-buildlist', 'w' )
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500957 print("digraph depends {", file=depends_file)
958 for pn in depgraph["pn"]:
959 fn = depgraph["pn"][pn]["filename"]
960 version = depgraph["pn"][pn]["version"]
961 print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
962 print("%s" % pn, file=buildlist_file)
963 buildlist_file.close()
964 logger.info("PN build list saved to 'pn-buildlist'")
965 for pn in depgraph["depends"]:
966 for depend in depgraph["depends"][pn]:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500967 print('"%s" -> "%s" [style=solid]' % (pn, depend), file=depends_file)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500968 for pn in depgraph["rdepends-pn"]:
969 for rdepend in depgraph["rdepends-pn"][pn]:
970 print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file)
971 print("}", file=depends_file)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600972 depends_file.close()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500973 logger.info("PN dependencies saved to 'pn-depends.dot'")
974
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600975 depends_file = open('package-depends.dot', 'w' )
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500976 print("digraph depends {", file=depends_file)
977 for package in depgraph["packages"]:
978 pn = depgraph["packages"][package]["pn"]
979 fn = depgraph["packages"][package]["filename"]
980 version = depgraph["packages"][package]["version"]
981 if package == pn:
982 print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
983 else:
984 print('"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn), file=depends_file)
985 for depend in depgraph["depends"][pn]:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500986 print('"%s" -> "%s" [style=solid]' % (package, depend), file=depends_file)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500987 for package in depgraph["rdepends-pkg"]:
988 for rdepend in depgraph["rdepends-pkg"][package]:
989 print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
990 for package in depgraph["rrecs-pkg"]:
991 for rdepend in depgraph["rrecs-pkg"][package]:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500992 print('"%s" -> "%s" [style=dotted]' % (package, rdepend), file=depends_file)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500993 print("}", file=depends_file)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600994 depends_file.close()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500995 logger.info("Package dependencies saved to 'package-depends.dot'")
996
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600997 tdepends_file = open('task-depends.dot', 'w' )
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500998 print("digraph depends {", file=tdepends_file)
999 for task in depgraph["tdepends"]:
1000 (pn, taskname) = task.rsplit(".", 1)
1001 fn = depgraph["pn"][pn]["filename"]
1002 version = depgraph["pn"][pn]["version"]
1003 print('"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn), file=tdepends_file)
1004 for dep in depgraph["tdepends"][task]:
1005 print('"%s" -> "%s"' % (task, dep), file=tdepends_file)
1006 print("}", file=tdepends_file)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001007 tdepends_file.close()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001008 logger.info("Task dependencies saved to 'task-depends.dot'")
1009
1010 def show_appends_with_no_recipes(self):
1011 # Determine which bbappends haven't been applied
1012
1013 # First get list of recipes, including skipped
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001014 recipefns = list(self.recipecaches[''].pkg_fn.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001015 recipefns.extend(self.skiplist.keys())
1016
1017 # Work out list of bbappends that have been applied
1018 applied_appends = []
1019 for fn in recipefns:
1020 applied_appends.extend(self.collection.get_file_appends(fn))
1021
1022 appends_without_recipes = []
1023 for _, appendfn in self.collection.bbappends:
1024 if not appendfn in applied_appends:
1025 appends_without_recipes.append(appendfn)
1026
1027 if appends_without_recipes:
1028 msg = 'No recipes available for:\n %s' % '\n '.join(appends_without_recipes)
1029 warn_only = self.data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
1030 False) or "no"
1031 if warn_only.lower() in ("1", "yes", "true"):
1032 bb.warn(msg)
1033 else:
1034 bb.fatal(msg)
1035
1036 def handlePrefProviders(self):
1037
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001038 for mc in self.multiconfigs:
1039 localdata = data.createCopy(self.databuilder.mcdata[mc])
1040 bb.data.update_data(localdata)
1041 bb.data.expandKeys(localdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001042
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001043 # Handle PREFERRED_PROVIDERS
1044 for p in (localdata.getVar('PREFERRED_PROVIDERS', True) or "").split():
1045 try:
1046 (providee, provider) = p.split(':')
1047 except:
1048 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
1049 continue
1050 if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
1051 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
1052 self.recipecaches[mc].preferred[providee] = provider
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001053
1054 def findCoreBaseFiles(self, subdir, configfile):
1055 corebase = self.data.getVar('COREBASE', True) or ""
1056 paths = []
1057 for root, dirs, files in os.walk(corebase + '/' + subdir):
1058 for d in dirs:
1059 configfilepath = os.path.join(root, d, configfile)
1060 if os.path.exists(configfilepath):
1061 paths.append(os.path.join(root, d))
1062
1063 if paths:
1064 bb.event.fire(bb.event.CoreBaseFilesFound(paths), self.data)
1065
1066 def findConfigFilePath(self, configfile):
1067 """
1068 Find the location on disk of configfile and if it exists and was parsed by BitBake
1069 emit the ConfigFilePathFound event with the path to the file.
1070 """
1071 path = bb.cookerdata.findConfigFile(configfile, self.data)
1072 if not path:
1073 return
1074
1075 # Generate a list of parsed configuration files by searching the files
1076 # listed in the __depends and __base_depends variables with a .conf suffix.
1077 conffiles = []
1078 dep_files = self.data.getVar('__base_depends', False) or []
1079 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
1080
1081 for f in dep_files:
1082 if f[0].endswith(".conf"):
1083 conffiles.append(f[0])
1084
1085 _, conf, conffile = path.rpartition("conf/")
1086 match = os.path.join(conf, conffile)
1087 # Try and find matches for conf/conffilename.conf as we don't always
1088 # have the full path to the file.
1089 for cfg in conffiles:
1090 if cfg.endswith(match):
1091 bb.event.fire(bb.event.ConfigFilePathFound(path),
1092 self.data)
1093 break
1094
1095 def findFilesMatchingInDir(self, filepattern, directory):
1096 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001097 Searches for files containing the substring 'filepattern' which are children of
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001098 'directory' in each BBPATH. i.e. to find all rootfs package classes available
1099 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1100 or to find all machine configuration files one could call:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001101 findFilesMatchingInDir(self, '.conf', 'conf/machine')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001102 """
1103
1104 matches = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001105 bbpaths = self.data.getVar('BBPATH', True).split(':')
1106 for path in bbpaths:
1107 dirpath = os.path.join(path, directory)
1108 if os.path.exists(dirpath):
1109 for root, dirs, files in os.walk(dirpath):
1110 for f in files:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001111 if filepattern in f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001112 matches.append(f)
1113
1114 if matches:
1115 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1116
1117 def findConfigFiles(self, varname):
1118 """
1119 Find config files which are appropriate values for varname.
1120 i.e. MACHINE, DISTRO
1121 """
1122 possible = []
1123 var = varname.lower()
1124
1125 data = self.data
1126 # iterate configs
1127 bbpaths = data.getVar('BBPATH', True).split(':')
1128 for path in bbpaths:
1129 confpath = os.path.join(path, "conf", var)
1130 if os.path.exists(confpath):
1131 for root, dirs, files in os.walk(confpath):
1132 # get all child files, these are appropriate values
1133 for f in files:
1134 val, sep, end = f.rpartition('.')
1135 if end == 'conf':
1136 possible.append(val)
1137
1138 if possible:
1139 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1140
1141 def findInheritsClass(self, klass):
1142 """
1143 Find all recipes which inherit the specified class
1144 """
1145 pkg_list = []
1146
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001147 for pfn in self.recipecaches[''].pkg_fn:
1148 inherits = self.recipecaches[''].inherits.get(pfn, None)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001149 if inherits and klass in inherits:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001150 pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001151
1152 return pkg_list
1153
1154 def generateTargetsTree(self, klass=None, pkgs=None):
1155 """
1156 Generate a dependency tree of buildable targets
1157 Generate an event with the result
1158 """
1159 # if the caller hasn't specified a pkgs list default to universe
1160 if not pkgs:
1161 pkgs = ['universe']
1162 # if inherited_class passed ensure all recipes which inherit the
1163 # specified class are included in pkgs
1164 if klass:
1165 extra_pkgs = self.findInheritsClass(klass)
1166 pkgs = pkgs + extra_pkgs
1167
1168 # generate a dependency tree for all our packages
1169 tree = self.generatePkgDepTreeData(pkgs, 'build')
1170 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1171
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001172 def interactiveMode( self ):
1173 """Drop off into a shell"""
1174 try:
1175 from bb import shell
1176 except ImportError:
1177 parselog.exception("Interactive mode not available")
1178 sys.exit(1)
1179 else:
1180 shell.start( self )
1181
1182
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001183 def handleCollections(self, collections):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001184 """Handle collections"""
1185 errors = False
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001186 self.bbfile_config_priorities = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001187 if collections:
1188 collection_priorities = {}
1189 collection_depends = {}
1190 collection_list = collections.split()
1191 min_prio = 0
1192 for c in collection_list:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001193 bb.debug(1,'Processing %s in collection list' % (c))
1194
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001195 # Get collection priority if defined explicitly
1196 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c, True)
1197 if priority:
1198 try:
1199 prio = int(priority)
1200 except ValueError:
1201 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1202 errors = True
1203 if min_prio == 0 or prio < min_prio:
1204 min_prio = prio
1205 collection_priorities[c] = prio
1206 else:
1207 collection_priorities[c] = None
1208
1209 # Check dependencies and store information for priority calculation
1210 deps = self.data.getVar("LAYERDEPENDS_%s" % c, True)
1211 if deps:
1212 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001213 depDict = bb.utils.explode_dep_versions2(deps)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001214 except bb.utils.VersionStringException as vse:
1215 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001216 for dep, oplist in list(depDict.items()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001217 if dep in collection_list:
1218 for opstr in oplist:
1219 layerver = self.data.getVar("LAYERVERSION_%s" % dep, True)
1220 (op, depver) = opstr.split()
1221 if layerver:
1222 try:
1223 res = bb.utils.vercmp_string_op(layerver, depver, op)
1224 except bb.utils.VersionStringException as vse:
1225 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1226 if not res:
1227 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1228 errors = True
1229 else:
1230 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1231 errors = True
1232 else:
1233 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1234 errors = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001235 collection_depends[c] = list(depDict.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001236 else:
1237 collection_depends[c] = []
1238
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001239 # Check recommends and store information for priority calculation
1240 recs = self.data.getVar("LAYERRECOMMENDS_%s" % c, True)
1241 if recs:
1242 try:
1243 recDict = bb.utils.explode_dep_versions2(recs)
1244 except bb.utils.VersionStringException as vse:
1245 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1246 for rec, oplist in list(recDict.items()):
1247 if rec in collection_list:
1248 if oplist:
1249 opstr = oplist[0]
1250 layerver = self.data.getVar("LAYERVERSION_%s" % rec, True)
1251 if layerver:
1252 (op, recver) = opstr.split()
1253 try:
1254 res = bb.utils.vercmp_string_op(layerver, recver, op)
1255 except bb.utils.VersionStringException as vse:
1256 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1257 if not res:
1258 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1259 continue
1260 else:
1261 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1262 continue
1263 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1264 collection_depends[c].append(rec)
1265 else:
1266 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1267
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001268 # Recursively work out collection priorities based on dependencies
1269 def calc_layer_priority(collection):
1270 if not collection_priorities[collection]:
1271 max_depprio = min_prio
1272 for dep in collection_depends[collection]:
1273 calc_layer_priority(dep)
1274 depprio = collection_priorities[dep]
1275 if depprio > max_depprio:
1276 max_depprio = depprio
1277 max_depprio += 1
1278 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1279 collection_priorities[collection] = max_depprio
1280
1281 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1282 for c in collection_list:
1283 calc_layer_priority(c)
1284 regex = self.data.getVar("BBFILE_PATTERN_%s" % c, True)
1285 if regex == None:
1286 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1287 errors = True
1288 continue
1289 try:
1290 cre = re.compile(regex)
1291 except re.error:
1292 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1293 errors = True
1294 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001295 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001296 if errors:
1297 # We've already printed the actual error(s)
1298 raise CollectionError("Errors during parsing layer configuration")
1299
1300 def buildSetVars(self):
1301 """
1302 Setup any variables needed before starting a build
1303 """
1304 t = time.gmtime()
1305 if not self.data.getVar("BUILDNAME", False):
1306 self.data.setVar("BUILDNAME", "${DATE}${TIME}")
1307 self.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1308 self.data.setVar("DATE", time.strftime('%Y%m%d', t))
1309 self.data.setVar("TIME", time.strftime('%H%M%S', t))
1310
1311 def matchFiles(self, bf):
1312 """
1313 Find the .bb files which match the expression in 'buildfile'.
1314 """
1315 if bf.startswith("/") or bf.startswith("../"):
1316 bf = os.path.abspath(bf)
1317
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001318 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001319 filelist, masked = self.collection.collect_bbfiles(self.data, self.expanded_data)
1320 try:
1321 os.stat(bf)
1322 bf = os.path.abspath(bf)
1323 return [bf]
1324 except OSError:
1325 regexp = re.compile(bf)
1326 matches = []
1327 for f in filelist:
1328 if regexp.search(f) and os.path.isfile(f):
1329 matches.append(f)
1330 return matches
1331
1332 def matchFile(self, buildfile):
1333 """
1334 Find the .bb file which matches the expression in 'buildfile'.
1335 Raise an error if multiple files
1336 """
1337 matches = self.matchFiles(buildfile)
1338 if len(matches) != 1:
1339 if matches:
1340 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1341 if matches:
1342 for f in matches:
1343 msg += "\n %s" % f
1344 parselog.error(msg)
1345 else:
1346 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1347 raise NoSpecificMatch
1348 return matches[0]
1349
1350 def buildFile(self, buildfile, task):
1351 """
1352 Build the file matching regexp buildfile
1353 """
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001354 bb.event.fire(bb.event.BuildInit(), self.expanded_data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001355
1356 # Too many people use -b because they think it's how you normally
1357 # specify a target to be built, so show a warning
1358 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1359
1360 # Parse the configuration here. We need to do it explicitly here since
1361 # buildFile() doesn't use the cache
1362 self.parseConfiguration()
1363
1364 # If we are told to do the None task then query the default task
1365 if (task == None):
1366 task = self.configuration.cmd
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001367 if not task.startswith("do_"):
1368 task = "do_%s" % task
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001369
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001370 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001371 fn = self.matchFile(fn)
1372
1373 self.buildSetVars()
1374
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001375 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
1376
1377 infos = bb_cache.parse(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001378 infos = dict(infos)
1379
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001380 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001381 try:
1382 info_array = infos[fn]
1383 except KeyError:
1384 bb.fatal("%s does not exist" % fn)
1385
1386 if info_array[0].skipped:
1387 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1388
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001389 self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001390
1391 # Tweak some variables
1392 item = info_array[0].pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001393 self.recipecaches[mc].ignored_dependencies = set()
1394 self.recipecaches[mc].bbfile_priority[fn] = 1
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001395
1396 # Remove external dependencies
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001397 self.recipecaches[mc].task_deps[fn]['depends'] = {}
1398 self.recipecaches[mc].deps[fn] = []
1399 self.recipecaches[mc].rundeps[fn] = []
1400 self.recipecaches[mc].runrecs[fn] = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001401
1402 # Invalidate task for target if force mode active
1403 if self.configuration.force:
1404 logger.verbose("Invalidate task %s, %s", task, fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001405 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001406
1407 # Setup taskdata structure
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001408 taskdata = {}
1409 taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
1410 taskdata[mc].add_provider(self.data, self.recipecaches[mc], item)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001411
1412 buildname = self.data.getVar("BUILDNAME", True)
1413 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.expanded_data)
1414
1415 # Execute the runqueue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001416 runlist = [[mc, item, task, fn]]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001417
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001418 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001419
1420 def buildFileIdle(server, rq, abort):
1421
1422 msg = None
1423 interrupted = 0
1424 if abort or self.state == state.forceshutdown:
1425 rq.finish_runqueue(True)
1426 msg = "Forced shutdown"
1427 interrupted = 2
1428 elif self.state == state.shutdown:
1429 rq.finish_runqueue(False)
1430 msg = "Stopped build"
1431 interrupted = 1
1432 failures = 0
1433 try:
1434 retval = rq.execute_runqueue()
1435 except runqueue.TaskFailure as exc:
1436 failures += len(exc.args)
1437 retval = False
1438 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001439 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001440 return False
1441
1442 if not retval:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001443 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.expanded_data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001444 self.command.finishAsyncCommand(msg)
1445 return False
1446 if retval is True:
1447 return True
1448 return retval
1449
1450 self.configuration.server_register_idlecallback(buildFileIdle, rq)
1451
1452 def buildTargets(self, targets, task):
1453 """
1454 Attempt to build the targets specified
1455 """
1456
1457 def buildTargetsIdle(server, rq, abort):
1458 msg = None
1459 interrupted = 0
1460 if abort or self.state == state.forceshutdown:
1461 rq.finish_runqueue(True)
1462 msg = "Forced shutdown"
1463 interrupted = 2
1464 elif self.state == state.shutdown:
1465 rq.finish_runqueue(False)
1466 msg = "Stopped build"
1467 interrupted = 1
1468 failures = 0
1469 try:
1470 retval = rq.execute_runqueue()
1471 except runqueue.TaskFailure as exc:
1472 failures += len(exc.args)
1473 retval = False
1474 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001475 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001476 return False
1477
1478 if not retval:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001479 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001480 self.command.finishAsyncCommand(msg)
1481 return False
1482 if retval is True:
1483 return True
1484 return retval
1485
1486 build.reset_cache()
1487 self.buildSetVars()
1488
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001489 # If we are told to do the None task then query the default task
1490 if (task == None):
1491 task = self.configuration.cmd
1492
1493 if not task.startswith("do_"):
1494 task = "do_%s" % task
1495
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001496 packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1497
1498 bb.event.fire(bb.event.BuildInit(packages), self.expanded_data)
1499
1500 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001501
1502 buildname = self.data.getVar("BUILDNAME", False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001503
1504 # make targets to always look as <target>:do_<task>
1505 ntargets = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001506 for target in runlist:
1507 if target[0]:
1508 ntargets.append("multiconfig:%s:%s:%s" % (target[0], target[1], target[2]))
1509 ntargets.append("%s:%s" % (target[1], target[2]))
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001510
1511 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001512
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001513 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001514 if 'universe' in targets:
1515 rq.rqdata.warn_multi_bb = True
1516
1517 self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
1518
1519
1520 def getAllKeysWithFlags(self, flaglist):
1521 dump = {}
1522 for k in self.data.keys():
1523 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001524 expand = True
1525 flags = self.data.getVarFlags(k)
1526 if flags and "func" in flags and "python" in flags:
1527 expand = False
1528 v = self.data.getVar(k, expand)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001529 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1530 dump[k] = {
1531 'v' : v ,
1532 'history' : self.data.varhistory.variable(k),
1533 }
1534 for d in flaglist:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001535 if flags and d in flags:
1536 dump[k][d] = flags[d]
1537 else:
1538 dump[k][d] = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001539 except Exception as e:
1540 print(e)
1541 return dump
1542
1543
1544 def generateNewImage(self, image, base_image, package_queue, timestamp, description):
1545 '''
1546 Create a new image with a "require"/"inherit" base_image statement
1547 '''
1548 if timestamp:
1549 image_name = os.path.splitext(image)[0]
1550 timestr = time.strftime("-%Y%m%d-%H%M%S")
1551 dest = image_name + str(timestr) + ".bb"
1552 else:
1553 if not image.endswith(".bb"):
1554 dest = image + ".bb"
1555 else:
1556 dest = image
1557
1558 basename = False
1559 if base_image:
1560 with open(base_image, 'r') as f:
1561 require_line = f.readline()
1562 p = re.compile("IMAGE_BASENAME *=")
1563 for line in f:
1564 if p.search(line):
1565 basename = True
1566
1567 with open(dest, "w") as imagefile:
1568 if base_image is None:
1569 imagefile.write("inherit core-image\n")
1570 else:
1571 topdir = self.data.getVar("TOPDIR", False)
1572 if topdir in base_image:
1573 base_image = require_line.split()[1]
1574 imagefile.write("require " + base_image + "\n")
1575 image_install = "IMAGE_INSTALL = \""
1576 for package in package_queue:
1577 image_install += str(package) + " "
1578 image_install += "\"\n"
1579 imagefile.write(image_install)
1580
1581 description_var = "DESCRIPTION = \"" + description + "\"\n"
1582 imagefile.write(description_var)
1583
1584 if basename:
1585 # If this is overwritten in a inherited image, reset it to default
1586 image_basename = "IMAGE_BASENAME = \"${PN}\"\n"
1587 imagefile.write(image_basename)
1588
1589 self.state = state.initial
1590 if timestamp:
1591 return timestr
1592
1593 def updateCacheSync(self):
1594 if self.state == state.running:
1595 return
1596
1597 # reload files for which we got notifications
1598 for p in self.inotify_modified_files:
1599 bb.parse.update_cache(p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001600 if p in bb.parse.BBHandler.cached_statements:
1601 del bb.parse.BBHandler.cached_statements[p]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001602 self.inotify_modified_files = []
1603
1604 if not self.baseconfig_valid:
1605 logger.debug(1, "Reloading base configuration data")
1606 self.initConfigurationData()
1607 self.baseconfig_valid = True
1608 self.parsecache_valid = False
1609
1610 # This is called for all async commands when self.state != running
1611 def updateCache(self):
1612 if self.state == state.running:
1613 return
1614
1615 if self.state in (state.shutdown, state.forceshutdown, state.error):
1616 if hasattr(self.parser, 'shutdown'):
1617 self.parser.shutdown(clean=False, force = True)
1618 raise bb.BBHandledException()
1619
1620 if self.state != state.parsing:
1621 self.updateCacheSync()
1622
1623 if self.state != state.parsing and not self.parsecache_valid:
1624 self.parseConfiguration ()
1625 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001626 for mc in self.multiconfigs:
1627 bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001628
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001629 for mc in self.multiconfigs:
1630 ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED", True) or ""
1631 self.recipecaches[mc].ignored_dependencies = set(ignore.split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001632
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001633 for dep in self.configuration.extra_assume_provided:
1634 self.recipecaches[mc].ignored_dependencies.add(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001635
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001636 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001637 (filelist, masked) = self.collection.collect_bbfiles(self.data, self.expanded_data)
1638
1639 self.parser = CookerParser(self, filelist, masked)
1640 self.parsecache_valid = True
1641
1642 self.state = state.parsing
1643
1644 if not self.parser.parse_next():
1645 collectlog.debug(1, "parsing complete")
1646 if self.parser.error:
1647 raise bb.BBHandledException()
1648 self.show_appends_with_no_recipes()
1649 self.handlePrefProviders()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001650 for mc in self.multiconfigs:
1651 self.recipecaches[mc].bbfile_priority = self.collection.collection_priorities(self.recipecaches[mc].pkg_fn, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001652 self.state = state.running
1653
1654 # Send an event listing all stamps reachable after parsing
1655 # which the metadata may use to clean up stale data
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001656 for mc in self.multiconfigs:
1657 event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1658 bb.event.fire(event, self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001659 return None
1660
1661 return True
1662
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001663 def checkPackages(self, pkgs_to_build, task=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001664
1665 # Return a copy, don't modify the original
1666 pkgs_to_build = pkgs_to_build[:]
1667
1668 if len(pkgs_to_build) == 0:
1669 raise NothingToBuild
1670
1671 ignore = (self.expanded_data.getVar("ASSUME_PROVIDED", True) or "").split()
1672 for pkg in pkgs_to_build:
1673 if pkg in ignore:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001674 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001675
1676 if 'world' in pkgs_to_build:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001677 pkgs_to_build.remove('world')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001678 for mc in self.multiconfigs:
1679 bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1680 for t in self.recipecaches[mc].world_target:
1681 if mc:
1682 t = "multiconfig:" + mc + ":" + t
1683 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001684
1685 if 'universe' in pkgs_to_build:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001686 parselog.warning("The \"universe\" target is only intended for testing and may produce errors.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001687 parselog.debug(1, "collating packages for \"universe\"")
1688 pkgs_to_build.remove('universe')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001689 for mc in self.multiconfigs:
1690 for t in self.recipecaches[mc].universe_target:
1691 if mc:
1692 t = "multiconfig:" + mc + ":" + t
1693 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001694
1695 return pkgs_to_build
1696
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001697 def pre_serve(self):
1698 # Empty the environment. The environment will be populated as
1699 # necessary from the data store.
1700 #bb.utils.empty_environment()
1701 try:
1702 self.prhost = prserv.serv.auto_start(self.data)
1703 except prserv.serv.PRServiceConfigError:
1704 bb.event.fire(CookerExit(), self.expanded_data)
1705 self.state = state.error
1706 return
1707
1708 def post_serve(self):
1709 prserv.serv.auto_shutdown(self.data)
1710 bb.event.fire(CookerExit(), self.expanded_data)
1711 lockfile = self.lock.name
1712 self.lock.close()
1713 self.lock = None
1714
1715 while not self.lock:
1716 with bb.utils.timeout(3):
1717 self.lock = bb.utils.lockfile(lockfile, shared=False, retry=False, block=True)
1718 if not self.lock:
1719 # Some systems may not have lsof available
1720 procs = None
1721 try:
1722 procs = subprocess.check_output(["lsof", '-w', lockfile], stderr=subprocess.STDOUT)
1723 except OSError as e:
1724 if e.errno != errno.ENOENT:
1725 raise
1726 if procs is None:
1727 # Fall back to fuser if lsof is unavailable
1728 try:
1729 procs = subprocess.check_output(["fuser", '-v', lockfile], stderr=subprocess.STDOUT)
1730 except OSError as e:
1731 if e.errno != errno.ENOENT:
1732 raise
1733
1734 msg = "Delaying shutdown due to active processes which appear to be holding bitbake.lock"
1735 if procs:
1736 msg += ":\n%s" % str(procs)
1737 print(msg)
1738
1739
1740 def shutdown(self, force = False):
1741 if force:
1742 self.state = state.forceshutdown
1743 else:
1744 self.state = state.shutdown
1745
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001746 if self.parser:
1747 self.parser.shutdown(clean=not force, force=force)
1748
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001749 def finishcommand(self):
1750 self.state = state.initial
1751
1752 def reset(self):
1753 self.initConfigurationData()
1754
1755 def lockBitbake(self):
1756 if not hasattr(self, 'lock'):
1757 self.lock = None
1758 if self.data:
1759 lockfile = self.data.expand("${TOPDIR}/bitbake.lock")
1760 if lockfile:
1761 self.lock = bb.utils.lockfile(lockfile, False, False)
1762 return self.lock
1763
1764 def unlockBitbake(self):
1765 if hasattr(self, 'lock') and self.lock:
1766 bb.utils.unlockfile(self.lock)
1767
1768def server_main(cooker, func, *args):
1769 cooker.pre_serve()
1770
1771 if cooker.configuration.profile:
1772 try:
1773 import cProfile as profile
1774 except:
1775 import profile
1776 prof = profile.Profile()
1777
1778 ret = profile.Profile.runcall(prof, func, *args)
1779
1780 prof.dump_stats("profile.log")
1781 bb.utils.process_profilelog("profile.log")
1782 print("Raw profiling information saved to profile.log and processed statistics to profile.log.processed")
1783
1784 else:
1785 ret = func(*args)
1786
1787 cooker.post_serve()
1788
1789 return ret
1790
1791class CookerExit(bb.event.Event):
1792 """
1793 Notify clients of the Cooker shutdown
1794 """
1795
1796 def __init__(self):
1797 bb.event.Event.__init__(self)
1798
1799
1800class CookerCollectFiles(object):
1801 def __init__(self, priorities):
1802 self.bbappends = []
1803 self.bbfile_config_priorities = priorities
1804
1805 def calc_bbfile_priority( self, filename, matched = None ):
1806 for _, _, regex, pri in self.bbfile_config_priorities:
1807 if regex.match(filename):
1808 if matched != None:
1809 if not regex in matched:
1810 matched.add(regex)
1811 return pri
1812 return 0
1813
1814 def get_bbfiles(self):
1815 """Get list of default .bb files by reading out the current directory"""
1816 path = os.getcwd()
1817 contents = os.listdir(path)
1818 bbfiles = []
1819 for f in contents:
1820 if f.endswith(".bb"):
1821 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1822 return bbfiles
1823
1824 def find_bbfiles(self, path):
1825 """Find all the .bb and .bbappend files in a directory"""
1826 found = []
1827 for dir, dirs, files in os.walk(path):
1828 for ignored in ('SCCS', 'CVS', '.svn'):
1829 if ignored in dirs:
1830 dirs.remove(ignored)
1831 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1832
1833 return found
1834
1835 def collect_bbfiles(self, config, eventdata):
1836 """Collect all available .bb build files"""
1837 masked = 0
1838
1839 collectlog.debug(1, "collecting .bb files")
1840
1841 files = (config.getVar( "BBFILES", True) or "").split()
1842 config.setVar("BBFILES", " ".join(files))
1843
1844 # Sort files by priority
1845 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
1846
1847 if not len(files):
1848 files = self.get_bbfiles()
1849
1850 if not len(files):
1851 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1852 bb.event.fire(CookerExit(), eventdata)
1853
1854 # Can't use set here as order is important
1855 newfiles = []
1856 for f in files:
1857 if os.path.isdir(f):
1858 dirfiles = self.find_bbfiles(f)
1859 for g in dirfiles:
1860 if g not in newfiles:
1861 newfiles.append(g)
1862 else:
1863 globbed = glob.glob(f)
1864 if not globbed and os.path.exists(f):
1865 globbed = [f]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001866 # glob gives files in order on disk. Sort to be deterministic.
1867 for g in sorted(globbed):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001868 if g not in newfiles:
1869 newfiles.append(g)
1870
1871 bbmask = config.getVar('BBMASK', True)
1872
1873 if bbmask:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001874 # First validate the individual regular expressions and ignore any
1875 # that do not compile
1876 bbmasks = []
1877 for mask in bbmask.split():
1878 try:
1879 re.compile(mask)
1880 bbmasks.append(mask)
1881 except sre_constants.error:
1882 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1883
1884 # Then validate the combined regular expressions. This should never
1885 # fail, but better safe than sorry...
1886 bbmask = "|".join(bbmasks)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001887 try:
1888 bbmask_compiled = re.compile(bbmask)
1889 except sre_constants.error:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001890 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1891 bbmask = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001892
1893 bbfiles = []
1894 bbappend = []
1895 for f in newfiles:
1896 if bbmask and bbmask_compiled.search(f):
1897 collectlog.debug(1, "skipping masked file %s", f)
1898 masked += 1
1899 continue
1900 if f.endswith('.bb'):
1901 bbfiles.append(f)
1902 elif f.endswith('.bbappend'):
1903 bbappend.append(f)
1904 else:
1905 collectlog.debug(1, "skipping %s: unknown file extension", f)
1906
1907 # Build a list of .bbappend files for each .bb file
1908 for f in bbappend:
1909 base = os.path.basename(f).replace('.bbappend', '.bb')
1910 self.bbappends.append((base, f))
1911
1912 # Find overlayed recipes
1913 # bbfiles will be in priority order which makes this easy
1914 bbfile_seen = dict()
1915 self.overlayed = defaultdict(list)
1916 for f in reversed(bbfiles):
1917 base = os.path.basename(f)
1918 if base not in bbfile_seen:
1919 bbfile_seen[base] = f
1920 else:
1921 topfile = bbfile_seen[base]
1922 self.overlayed[topfile].append(f)
1923
1924 return (bbfiles, masked)
1925
1926 def get_file_appends(self, fn):
1927 """
1928 Returns a list of .bbappend files to apply to fn
1929 """
1930 filelist = []
1931 f = os.path.basename(fn)
1932 for b in self.bbappends:
1933 (bbappend, filename) = b
1934 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1935 filelist.append(filename)
1936 return filelist
1937
1938 def collection_priorities(self, pkgfns, d):
1939
1940 priorities = {}
1941
1942 # Calculate priorities for each file
1943 matched = set()
1944 for p in pkgfns:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001945 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001946 priorities[p] = self.calc_bbfile_priority(realfn, matched)
1947
1948 # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
1949 unmatched = set()
1950 for _, _, regex, pri in self.bbfile_config_priorities:
1951 if not regex in matched:
1952 unmatched.add(regex)
1953
1954 def findmatch(regex):
1955 for b in self.bbappends:
1956 (bbfile, append) = b
1957 if regex.match(append):
1958 return True
1959 return False
1960
1961 for unmatch in unmatched.copy():
1962 if findmatch(unmatch):
1963 unmatched.remove(unmatch)
1964
1965 for collection, pattern, regex, _ in self.bbfile_config_priorities:
1966 if regex in unmatched:
1967 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection, True) != '1':
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001968 collectlog.warning("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001969
1970 return priorities
1971
1972class ParsingFailure(Exception):
1973 def __init__(self, realexception, recipe):
1974 self.realexception = realexception
1975 self.recipe = recipe
1976 Exception.__init__(self, realexception, recipe)
1977
1978class Feeder(multiprocessing.Process):
1979 def __init__(self, jobs, to_parsers, quit):
1980 self.quit = quit
1981 self.jobs = jobs
1982 self.to_parsers = to_parsers
1983 multiprocessing.Process.__init__(self)
1984
1985 def run(self):
1986 while True:
1987 try:
1988 quit = self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001989 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001990 pass
1991 else:
1992 if quit == 'cancel':
1993 self.to_parsers.cancel_join_thread()
1994 break
1995
1996 try:
1997 job = self.jobs.pop()
1998 except IndexError:
1999 break
2000
2001 try:
2002 self.to_parsers.put(job, timeout=0.5)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002003 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002004 self.jobs.insert(0, job)
2005 continue
2006
2007class Parser(multiprocessing.Process):
2008 def __init__(self, jobs, results, quit, init, profile):
2009 self.jobs = jobs
2010 self.results = results
2011 self.quit = quit
2012 self.init = init
2013 multiprocessing.Process.__init__(self)
2014 self.context = bb.utils.get_context().copy()
2015 self.handlers = bb.event.get_class_handlers().copy()
2016 self.profile = profile
2017
2018 def run(self):
2019
2020 if not self.profile:
2021 self.realrun()
2022 return
2023
2024 try:
2025 import cProfile as profile
2026 except:
2027 import profile
2028 prof = profile.Profile()
2029 try:
2030 profile.Profile.runcall(prof, self.realrun)
2031 finally:
2032 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
2033 prof.dump_stats(logfile)
2034
2035 def realrun(self):
2036 if self.init:
2037 self.init()
2038
2039 pending = []
2040 while True:
2041 try:
2042 self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002043 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002044 pass
2045 else:
2046 self.results.cancel_join_thread()
2047 break
2048
2049 if pending:
2050 result = pending.pop()
2051 else:
2052 try:
2053 job = self.jobs.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002054 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002055 continue
2056
2057 if job is None:
2058 break
2059 result = self.parse(*job)
2060
2061 try:
2062 self.results.put(result, timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002063 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002064 pending.append(result)
2065
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002066 def parse(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002067 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002068 # Record the filename we're parsing into any events generated
2069 def parse_filter(self, record):
2070 record.taskpid = bb.event.worker_pid
2071 record.fn = filename
2072 return True
2073
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002074 # Reset our environment and handlers to the original settings
2075 bb.utils.set_context(self.context.copy())
2076 bb.event.set_class_handlers(self.handlers.copy())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002077 bb.event.LogHandler.filter = parse_filter
2078
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002079 return True, self.bb_cache.parse(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002080 except Exception as exc:
2081 tb = sys.exc_info()[2]
2082 exc.recipe = filename
2083 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
2084 return True, exc
2085 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2086 # and for example a worker thread doesn't just exit on its own in response to
2087 # a SystemExit event for example.
2088 except BaseException as exc:
2089 return True, ParsingFailure(exc, filename)
2090
2091class CookerParser(object):
2092 def __init__(self, cooker, filelist, masked):
2093 self.filelist = filelist
2094 self.cooker = cooker
2095 self.cfgdata = cooker.data
2096 self.cfghash = cooker.data_hash
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002097 self.cfgbuilder = cooker.databuilder
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002098
2099 # Accounting statistics
2100 self.parsed = 0
2101 self.cached = 0
2102 self.error = 0
2103 self.masked = masked
2104
2105 self.skipped = 0
2106 self.virtuals = 0
2107 self.total = len(filelist)
2108
2109 self.current = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002110 self.process_names = []
2111
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002112 self.bb_cache = bb.cache.Cache(self.cfgbuilder, self.cfghash, cooker.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002113 self.fromcache = []
2114 self.willparse = []
2115 for filename in self.filelist:
2116 appends = self.cooker.collection.get_file_appends(filename)
2117 if not self.bb_cache.cacheValid(filename, appends):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002118 self.willparse.append((filename, appends))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002119 else:
2120 self.fromcache.append((filename, appends))
2121 self.toparse = self.total - len(self.fromcache)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002122 self.progress_chunk = int(max(self.toparse / 100, 1))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002123
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002124 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or
2125 multiprocessing.cpu_count()), len(self.willparse))
2126
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002127 self.start()
2128 self.haveshutdown = False
2129
2130 def start(self):
2131 self.results = self.load_cached()
2132 self.processes = []
2133 if self.toparse:
2134 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2135 def init():
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002136 Parser.bb_cache = self.bb_cache
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002137 bb.utils.set_process_name(multiprocessing.current_process().name)
2138 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2139 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002140
2141 self.feeder_quit = multiprocessing.Queue(maxsize=1)
2142 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
2143 self.jobs = multiprocessing.Queue(maxsize=self.num_processes)
2144 self.result_queue = multiprocessing.Queue()
2145 self.feeder = Feeder(self.willparse, self.jobs, self.feeder_quit)
2146 self.feeder.start()
2147 for i in range(0, self.num_processes):
2148 parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
2149 parser.start()
2150 self.process_names.append(parser.name)
2151 self.processes.append(parser)
2152
2153 self.results = itertools.chain(self.results, self.parse_generator())
2154
2155 def shutdown(self, clean=True, force=False):
2156 if not self.toparse:
2157 return
2158 if self.haveshutdown:
2159 return
2160 self.haveshutdown = True
2161
2162 if clean:
2163 event = bb.event.ParseCompleted(self.cached, self.parsed,
2164 self.skipped, self.masked,
2165 self.virtuals, self.error,
2166 self.total)
2167
2168 bb.event.fire(event, self.cfgdata)
2169 self.feeder_quit.put(None)
2170 for process in self.processes:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002171 self.parser_quit.put(None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002172 else:
2173 self.feeder_quit.put('cancel')
2174
2175 self.parser_quit.cancel_join_thread()
2176 for process in self.processes:
2177 self.parser_quit.put(None)
2178
2179 self.jobs.cancel_join_thread()
2180
2181 for process in self.processes:
2182 if force:
2183 process.join(.1)
2184 process.terminate()
2185 else:
2186 process.join()
2187 self.feeder.join()
2188
2189 sync = threading.Thread(target=self.bb_cache.sync)
2190 sync.start()
2191 multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002192 bb.codeparser.parser_cache_savemerge()
2193 bb.fetch.fetcher_parse_done()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002194 if self.cooker.configuration.profile:
2195 profiles = []
2196 for i in self.process_names:
2197 logfile = "profile-parse-%s.log" % i
2198 if os.path.exists(logfile):
2199 profiles.append(logfile)
2200
2201 pout = "profile-parse.log.processed"
2202 bb.utils.process_profilelog(profiles, pout = pout)
2203 print("Processed parsing statistics saved to %s" % (pout))
2204
2205 def load_cached(self):
2206 for filename, appends in self.fromcache:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002207 cached, infos = self.bb_cache.load(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002208 yield not cached, infos
2209
2210 def parse_generator(self):
2211 while True:
2212 if self.parsed >= self.toparse:
2213 break
2214
2215 try:
2216 result = self.result_queue.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002217 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002218 pass
2219 else:
2220 value = result[1]
2221 if isinstance(value, BaseException):
2222 raise value
2223 else:
2224 yield result
2225
2226 def parse_next(self):
2227 result = []
2228 parsed = None
2229 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002230 parsed, result = next(self.results)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002231 except StopIteration:
2232 self.shutdown()
2233 return False
2234 except bb.BBHandledException as exc:
2235 self.error += 1
2236 logger.error('Failed to parse recipe: %s' % exc.recipe)
2237 self.shutdown(clean=False)
2238 return False
2239 except ParsingFailure as exc:
2240 self.error += 1
2241 logger.error('Unable to parse %s: %s' %
2242 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2243 self.shutdown(clean=False)
2244 return False
2245 except bb.parse.ParseError as exc:
2246 self.error += 1
2247 logger.error(str(exc))
2248 self.shutdown(clean=False)
2249 return False
2250 except bb.data_smart.ExpansionError as exc:
2251 self.error += 1
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002252 bbdir = os.path.dirname(__file__) + os.sep
2253 etype, value, _ = sys.exc_info()
2254 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2255 logger.error('ExpansionError during parsing %s', value.recipe,
2256 exc_info=(etype, value, tb))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002257 self.shutdown(clean=False)
2258 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002259 except Exception as exc:
2260 self.error += 1
2261 etype, value, tb = sys.exc_info()
2262 if hasattr(value, "recipe"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002263 logger.error('Unable to parse %s' % value.recipe,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002264 exc_info=(etype, value, exc.traceback))
2265 else:
2266 # Most likely, an exception occurred during raising an exception
2267 import traceback
2268 logger.error('Exception during parse: %s' % traceback.format_exc())
2269 self.shutdown(clean=False)
2270 return False
2271
2272 self.current += 1
2273 self.virtuals += len(result)
2274 if parsed:
2275 self.parsed += 1
2276 if self.parsed % self.progress_chunk == 0:
2277 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2278 self.cfgdata)
2279 else:
2280 self.cached += 1
2281
2282 for virtualfn, info_array in result:
2283 if info_array[0].skipped:
2284 self.skipped += 1
2285 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002286 (fn, cls, mc) = bb.cache.virtualfn2realfn(virtualfn)
2287 self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002288 parsed=parsed, watcher = self.cooker.add_filewatch)
2289 return True
2290
2291 def reparse(self, filename):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002292 infos = self.bb_cache.parse(filename, self.cooker.collection.get_file_appends(filename))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002293 for vfn, info_array in infos:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002294 (fn, cls, mc) = bb.cache.virtualfn2realfn(vfn)
2295 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)