blob: 42831e2771765cae1bee544026bd5c36230d335b [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# Copyright (C) 2003, 2004 Chris Larson
6# Copyright (C) 2003, 2004 Phil Blundell
7# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
8# Copyright (C) 2005 Holger Hans Peter Freyther
9# Copyright (C) 2005 ROAD GmbH
10# Copyright (C) 2006 - 2007 Richard Purdie
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
Patrick Williamsc0f7c042017-02-23 20:41:17 -060025
Patrick Williamsc124f4f2015-09-15 14:41:29 -050026import sys, os, glob, os.path, re, time
27import atexit
28import itertools
29import logging
30import multiprocessing
31import sre_constants
32import threading
Patrick Williamsc0f7c042017-02-23 20:41:17 -060033from io import StringIO, UnsupportedOperation
Patrick Williamsc124f4f2015-09-15 14:41:29 -050034from contextlib import closing
35from functools import wraps
Patrick Williamsc0f7c042017-02-23 20:41:17 -060036from collections import defaultdict, namedtuple
Patrick Williamsc124f4f2015-09-15 14:41:29 -050037import bb, bb.exceptions, bb.command
38from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
Patrick Williamsc0f7c042017-02-23 20:41:17 -060039import queue
Patrick Williamsc124f4f2015-09-15 14:41:29 -050040import signal
41import subprocess
42import errno
43import prserv.serv
44import pyinotify
Patrick Williamsc0f7c042017-02-23 20:41:17 -060045import json
46import pickle
47import codecs
Patrick Williamsc124f4f2015-09-15 14:41:29 -050048
49logger = logging.getLogger("BitBake")
50collectlog = logging.getLogger("BitBake.Collection")
51buildlog = logging.getLogger("BitBake.Build")
52parselog = logging.getLogger("BitBake.Parsing")
53providerlog = logging.getLogger("BitBake.Provider")
54
55class NoSpecificMatch(bb.BBHandledException):
56 """
57 Exception raised when no or multiple file matches are found
58 """
59
60class NothingToBuild(Exception):
61 """
62 Exception raised when there is nothing to build
63 """
64
65class CollectionError(bb.BBHandledException):
66 """
67 Exception raised when layer configuration is incorrect
68 """
69
70class state:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060071 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050072
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050073 @classmethod
74 def get_name(cls, code):
75 for name in dir(cls):
76 value = getattr(cls, name)
77 if type(value) == type(cls.initial) and value == code:
78 return name
79 raise ValueError("Invalid status code: %s" % code)
80
Patrick Williamsc124f4f2015-09-15 14:41:29 -050081
82class SkippedPackage:
83 def __init__(self, info = None, reason = None):
84 self.pn = None
85 self.skipreason = None
86 self.provides = None
87 self.rprovides = None
88
89 if info:
90 self.pn = info.pn
91 self.skipreason = info.skipreason
92 self.provides = info.provides
93 self.rprovides = info.rprovides
94 elif reason:
95 self.skipreason = reason
96
97
98class CookerFeatures(object):
Patrick Williamsc0f7c042017-02-23 20:41:17 -060099 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500100
101 def __init__(self):
102 self._features=set()
103
104 def setFeature(self, f):
105 # validate we got a request for a feature we support
106 if f not in CookerFeatures._feature_list:
107 return
108 self._features.add(f)
109
110 def __contains__(self, f):
111 return f in self._features
112
113 def __iter__(self):
114 return self._features.__iter__()
115
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600116 def __next__(self):
117 return next(self._features)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500118
119
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600120class EventWriter:
121 def __init__(self, cooker, eventfile):
122 self.file_inited = None
123 self.cooker = cooker
124 self.eventfile = eventfile
125 self.event_queue = []
126
127 def write_event(self, event):
128 with open(self.eventfile, "a") as f:
129 try:
130 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
131 f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
132 "vars": str_event}))
133 except Exception as err:
134 import traceback
135 print(err, traceback.format_exc())
136
137 def send(self, event):
138 if self.file_inited:
139 # we have the file, just write the event
140 self.write_event(event)
141 else:
142 # init on bb.event.BuildStarted
143 name = "%s.%s" % (event.__module__, event.__class__.__name__)
144 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
145 with open(self.eventfile, "w") as f:
146 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
147
148 self.file_inited = True
149
150 # write pending events
151 for evt in self.event_queue:
152 self.write_event(evt)
153
154 # also write the current event
155 self.write_event(event)
156 else:
157 # queue all events until the file is inited
158 self.event_queue.append(event)
159
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500160#============================================================================#
161# BBCooker
162#============================================================================#
163class BBCooker:
164 """
165 Manages one bitbake build run
166 """
167
168 def __init__(self, configuration, featureSet=None):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600169 self.recipecaches = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500170 self.skiplist = {}
171 self.featureset = CookerFeatures()
172 if featureSet:
173 for f in featureSet:
174 self.featureset.setFeature(f)
175
176 self.configuration = configuration
177
178 self.configwatcher = pyinotify.WatchManager()
179 self.configwatcher.bbseen = []
180 self.configwatcher.bbwatchedfiles = []
181 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
182 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
183 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
184 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
185 self.watcher = pyinotify.WatchManager()
186 self.watcher.bbseen = []
187 self.watcher.bbwatchedfiles = []
188 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
189
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500190 # If being called by something like tinfoil, we need to clean cached data
191 # which may now be invalid
192 bb.parse.__mtime_cache = {}
193 bb.parse.BBHandler.cached_statements = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500194
195 self.initConfigurationData()
196
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600197 # we log all events to a file if so directed
198 if self.configuration.writeeventlog:
199 # register the log file writer as UI Handler
200 writer = EventWriter(self, self.configuration.writeeventlog)
201 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
202 bb.event.register_UIHhandler(EventLogWriteHandler(writer))
203
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500204 self.inotify_modified_files = []
205
206 def _process_inotify_updates(server, notifier_list, abort):
207 for n in notifier_list:
208 if n.check_events(timeout=0):
209 # read notified events and enqeue them
210 n.read_events()
211 n.process_events()
212 return 1.0
213
214 self.configuration.server_register_idlecallback(_process_inotify_updates, [self.confignotifier, self.notifier])
215
216 self.baseconfig_valid = True
217 self.parsecache_valid = False
218
219 # Take a lock so only one copy of bitbake can run against a given build
220 # directory at a time
221 if not self.lockBitbake():
222 bb.fatal("Only one copy of bitbake should be run against a build directory")
223 try:
224 self.lock.seek(0)
225 self.lock.truncate()
226 if len(configuration.interface) >= 2:
227 self.lock.write("%s:%s\n" % (configuration.interface[0], configuration.interface[1]));
228 self.lock.flush()
229 except:
230 pass
231
232 # TOSTOP must not be set or our children will hang when they output
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600233 try:
234 fd = sys.stdout.fileno()
235 if os.isatty(fd):
236 import termios
237 tcattr = termios.tcgetattr(fd)
238 if tcattr[3] & termios.TOSTOP:
239 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
240 tcattr[3] = tcattr[3] & ~termios.TOSTOP
241 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
242 except UnsupportedOperation:
243 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500244
245 self.command = bb.command.Command(self)
246 self.state = state.initial
247
248 self.parser = None
249
250 signal.signal(signal.SIGTERM, self.sigterm_exception)
251 # Let SIGHUP exit as SIGTERM
252 signal.signal(signal.SIGHUP, self.sigterm_exception)
253
254 def config_notifications(self, event):
255 if not event.pathname in self.configwatcher.bbwatchedfiles:
256 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500257 if not event.pathname in self.inotify_modified_files:
258 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500259 self.baseconfig_valid = False
260
261 def notifications(self, event):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500262 if not event.pathname in self.inotify_modified_files:
263 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500264 self.parsecache_valid = False
265
266 def add_filewatch(self, deps, watcher=None):
267 if not watcher:
268 watcher = self.watcher
269 for i in deps:
270 watcher.bbwatchedfiles.append(i[0])
271 f = os.path.dirname(i[0])
272 if f in watcher.bbseen:
273 continue
274 watcher.bbseen.append(f)
275 watchtarget = None
276 while True:
277 # We try and add watches for files that don't exist but if they did, would influence
278 # the parser. The parent directory of these files may not exist, in which case we need
279 # to watch any parent that does exist for changes.
280 try:
281 watcher.add_watch(f, self.watchmask, quiet=False)
282 if watchtarget:
283 watcher.bbwatchedfiles.append(watchtarget)
284 break
285 except pyinotify.WatchManagerError as e:
286 if 'ENOENT' in str(e):
287 watchtarget = f
288 f = os.path.dirname(f)
289 if f in watcher.bbseen:
290 break
291 watcher.bbseen.append(f)
292 continue
293 if 'ENOSPC' in str(e):
294 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
295 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
296 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
297 providerlog.error("Root privilege is required to modify max_user_watches.")
298 raise
299
300 def sigterm_exception(self, signum, stackframe):
301 if signum == signal.SIGTERM:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500302 bb.warn("Cooker received SIGTERM, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500303 elif signum == signal.SIGHUP:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500304 bb.warn("Cooker received SIGHUP, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500305 self.state = state.forceshutdown
306
307 def setFeatures(self, features):
308 # we only accept a new feature set if we're in state initial, so we can reset without problems
309 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
310 raise Exception("Illegal state for feature set change")
311 original_featureset = list(self.featureset)
312 for feature in features:
313 self.featureset.setFeature(feature)
314 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
315 if (original_featureset != list(self.featureset)) and self.state != state.error:
316 self.reset()
317
318 def initConfigurationData(self):
319
320 self.state = state.initial
321 self.caches_array = []
322
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500323 # Need to preserve BB_CONSOLELOG over resets
324 consolelog = None
325 if hasattr(self, "data"):
326 consolelog = self.data.getVar("BB_CONSOLELOG", True)
327
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500328 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
329 self.enableDataTracking()
330
331 all_extra_cache_names = []
332 # We hardcode all known cache types in a single place, here.
333 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
334 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
335
336 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
337
338 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
339 # This is the entry point, no further check needed!
340 for var in caches_name_array:
341 try:
342 module_name, cache_name = var.split(':')
343 module = __import__(module_name, fromlist=(cache_name,))
344 self.caches_array.append(getattr(module, cache_name))
345 except ImportError as exc:
346 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
347 sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
348
349 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
350 self.databuilder.parseBaseConfiguration()
351 self.data = self.databuilder.data
352 self.data_hash = self.databuilder.data_hash
353
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500354 if consolelog:
355 self.data.setVar("BB_CONSOLELOG", consolelog)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500356
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500357 #
358 # Copy of the data store which has been expanded.
359 # Used for firing events and accessing variables where expansion needs to be accounted for
360 #
361 self.expanded_data = bb.data.createCopy(self.data)
362 bb.data.update_data(self.expanded_data)
363 bb.parse.init_parser(self.expanded_data)
364
365 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
366 self.disableDataTracking()
367
368 self.data.renameVar("__depends", "__base_depends")
369 self.add_filewatch(self.data.getVar("__base_depends", False), self.configwatcher)
370
371
372 def enableDataTracking(self):
373 self.configuration.tracking = True
374 if hasattr(self, "data"):
375 self.data.enableTracking()
376
377 def disableDataTracking(self):
378 self.configuration.tracking = False
379 if hasattr(self, "data"):
380 self.data.disableTracking()
381
382 def modifyConfigurationVar(self, var, val, default_file, op):
383 if op == "append":
384 self.appendConfigurationVar(var, val, default_file)
385 elif op == "set":
386 self.saveConfigurationVar(var, val, default_file, "=")
387 elif op == "earlyAssign":
388 self.saveConfigurationVar(var, val, default_file, "?=")
389
390
391 def appendConfigurationVar(self, var, val, default_file):
392 #add append var operation to the end of default_file
393 default_file = bb.cookerdata.findConfigFile(default_file, self.data)
394
395 total = "#added by hob"
396 total += "\n%s += \"%s\"\n" % (var, val)
397
398 with open(default_file, 'a') as f:
399 f.write(total)
400
401 #add to history
402 loginfo = {"op":"append", "file":default_file, "line":total.count("\n")}
403 self.data.appendVar(var, val, **loginfo)
404
405 def saveConfigurationVar(self, var, val, default_file, op):
406
407 replaced = False
408 #do not save if nothing changed
409 if str(val) == self.data.getVar(var, False):
410 return
411
412 conf_files = self.data.varhistory.get_variable_files(var)
413
414 #format the value when it is a list
415 if isinstance(val, list):
416 listval = ""
417 for value in val:
418 listval += "%s " % value
419 val = listval
420
421 topdir = self.data.getVar("TOPDIR", False)
422
423 #comment or replace operations made on var
424 for conf_file in conf_files:
425 if topdir in conf_file:
426 with open(conf_file, 'r') as f:
427 contents = f.readlines()
428
429 lines = self.data.varhistory.get_variable_lines(var, conf_file)
430 for line in lines:
431 total = ""
432 i = 0
433 for c in contents:
434 total += c
435 i = i + 1
436 if i==int(line):
437 end_index = len(total)
438 index = total.rfind(var, 0, end_index)
439
440 begin_line = total.count("\n",0,index)
441 end_line = int(line)
442
443 #check if the variable was saved before in the same way
444 #if true it replace the place where the variable was declared
445 #else it comments it
446 if contents[begin_line-1]== "#added by hob\n":
447 contents[begin_line] = "%s %s \"%s\"\n" % (var, op, val)
448 replaced = True
449 else:
450 for ii in range(begin_line, end_line):
451 contents[ii] = "#" + contents[ii]
452
453 with open(conf_file, 'w') as f:
454 f.writelines(contents)
455
456 if replaced == False:
457 #remove var from history
458 self.data.varhistory.del_var_history(var)
459
460 #add var to the end of default_file
461 default_file = bb.cookerdata.findConfigFile(default_file, self.data)
462
463 #add the variable on a single line, to be easy to replace the second time
464 total = "\n#added by hob"
465 total += "\n%s %s \"%s\"\n" % (var, op, val)
466
467 with open(default_file, 'a') as f:
468 f.write(total)
469
470 #add to history
471 loginfo = {"op":"set", "file":default_file, "line":total.count("\n")}
472 self.data.setVar(var, val, **loginfo)
473
474 def removeConfigurationVar(self, var):
475 conf_files = self.data.varhistory.get_variable_files(var)
476 topdir = self.data.getVar("TOPDIR", False)
477
478 for conf_file in conf_files:
479 if topdir in conf_file:
480 with open(conf_file, 'r') as f:
481 contents = f.readlines()
482
483 lines = self.data.varhistory.get_variable_lines(var, conf_file)
484 for line in lines:
485 total = ""
486 i = 0
487 for c in contents:
488 total += c
489 i = i + 1
490 if i==int(line):
491 end_index = len(total)
492 index = total.rfind(var, 0, end_index)
493
494 begin_line = total.count("\n",0,index)
495
496 #check if the variable was saved before in the same way
497 if contents[begin_line-1]== "#added by hob\n":
498 contents[begin_line-1] = contents[begin_line] = "\n"
499 else:
500 contents[begin_line] = "\n"
501 #remove var from history
502 self.data.varhistory.del_var_history(var, conf_file, line)
503 #remove variable
504 self.data.delVar(var)
505
506 with open(conf_file, 'w') as f:
507 f.writelines(contents)
508
509 def createConfigFile(self, name):
510 path = os.getcwd()
511 confpath = os.path.join(path, "conf", name)
512 open(confpath, 'w').close()
513
514 def parseConfiguration(self):
515 # Set log file verbosity
516 verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
517 if verboselogs:
518 bb.msg.loggerVerboseLogs = True
519
520 # Change nice level if we're asked to
521 nice = self.data.getVar("BB_NICE_LEVEL", True)
522 if nice:
523 curnice = os.nice(0)
524 nice = int(nice) - curnice
525 buildlog.verbose("Renice to %s " % os.nice(nice))
526
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600527 if self.recipecaches:
528 del self.recipecaches
529 self.multiconfigs = self.databuilder.mcdata.keys()
530 self.recipecaches = {}
531 for mc in self.multiconfigs:
532 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500533
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600534 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS", True))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500535
536 def updateConfigOpts(self, options, environment):
537 clean = True
538 for o in options:
539 if o in ['prefile', 'postfile']:
540 clean = False
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500541 server_val = getattr(self.configuration, "%s_server" % o)
542 if not options[o] and server_val:
543 # restore value provided on server start
544 setattr(self.configuration, o, server_val)
545 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500546 setattr(self.configuration, o, options[o])
547 for k in bb.utils.approved_variables():
548 if k in environment and k not in self.configuration.env:
549 logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
550 self.configuration.env[k] = environment[k]
551 clean = False
552 if k in self.configuration.env and k not in environment:
553 logger.debug(1, "Updating environment variable %s (deleted)" % (k))
554 del self.configuration.env[k]
555 clean = False
556 if k not in self.configuration.env and k not in environment:
557 continue
558 if environment[k] != self.configuration.env[k]:
559 logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
560 self.configuration.env[k] = environment[k]
561 clean = False
562 if not clean:
563 logger.debug(1, "Base environment change, triggering reparse")
564 self.baseconfig_valid = False
565 self.reset()
566
567 def runCommands(self, server, data, abort):
568 """
569 Run any queued asynchronous command
570 This is done by the idle handler so it runs in true context rather than
571 tied to any UI.
572 """
573
574 return self.command.runAsyncCommand()
575
576 def showVersions(self):
577
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600578 pkg_pn = self.recipecaches[''].pkg_pn
579 (latest_versions, preferred_versions) = bb.providers.findProviders(self.data, self.recipecaches[''], pkg_pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500580
581 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
582 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
583
584 for p in sorted(pkg_pn):
585 pref = preferred_versions[p]
586 latest = latest_versions[p]
587
588 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
589 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
590
591 if pref == latest:
592 prefstr = ""
593
594 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
595
596 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
597 """
598 Show the outer or per-recipe environment
599 """
600 fn = None
601 envdata = None
602 if not pkgs_to_build:
603 pkgs_to_build = []
604
605 if buildfile:
606 # Parse the configuration here. We need to do it explicitly here since
607 # this showEnvironment() code path doesn't use the cache
608 self.parseConfiguration()
609
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600610 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500611 fn = self.matchFile(fn)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600612 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500613 elif len(pkgs_to_build) == 1:
614 ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or ""
615 if pkgs_to_build[0] in set(ignore.split()):
616 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
617
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600618 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500619
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600620 mc = runlist[0][0]
621 fn = runlist[0][3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500622 else:
623 envdata = self.data
624
625 if fn:
626 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600627 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
628 envdata = bb_cache.loadDataFull(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500629 except Exception as e:
630 parselog.exception("Unable to read %s", fn)
631 raise
632
633 # Display history
634 with closing(StringIO()) as env:
635 self.data.inchistory.emit(env)
636 logger.plain(env.getvalue())
637
638 # emit variables and shell functions
639 data.update_data(envdata)
640 with closing(StringIO()) as env:
641 data.emit_env(env, envdata, True)
642 logger.plain(env.getvalue())
643
644 # emit the metadata which isnt valid shell
645 data.expandKeys(envdata)
646 for e in envdata.keys():
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600647 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500648 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500649
650
651 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
652 """
653 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
654 """
655 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
656
657 # A task of None means use the default task
658 if task is None:
659 task = self.configuration.cmd
660
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600661 fulltargetlist = self.checkPackages(pkgs_to_build, task)
662 taskdata = {}
663 localdata = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500664
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600665 for mc in self.multiconfigs:
666 taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
667 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
668 bb.data.update_data(localdata[mc])
669 bb.data.expandKeys(localdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500670
671 current = 0
672 runlist = []
673 for k in fulltargetlist:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600674 mc = ""
675 if k.startswith("multiconfig:"):
676 mc = k.split(":")[1]
677 k = ":".join(k.split(":")[2:])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500678 ktask = task
679 if ":do_" in k:
680 k2 = k.split(":do_")
681 k = k2[0]
682 ktask = k2[1]
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600683 taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500684 current += 1
685 if not ktask.startswith("do_"):
686 ktask = "do_%s" % ktask
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600687 if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
688 # e.g. in ASSUME_PROVIDED
689 continue
690 fn = taskdata[mc].build_targets[k][0]
691 runlist.append([mc, k, ktask, fn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500692 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600693
694 for mc in self.multiconfigs:
695 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
696
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500697 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600698 return taskdata, runlist
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500699
700 def prepareTreeData(self, pkgs_to_build, task):
701 """
702 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
703 """
704
705 # We set abort to False here to prevent unbuildable targets raising
706 # an exception when we're just generating data
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600707 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500708
709 return runlist, taskdata
710
711 ######## WARNING : this function requires cache_extra to be enabled ########
712
713 def generateTaskDepTreeData(self, pkgs_to_build, task):
714 """
715 Create a dependency graph of pkgs_to_build including reverse dependency
716 information.
717 """
718 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600719 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500720 rq.rqdata.prepare()
721 return self.buildDependTree(rq, taskdata)
722
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600723 @staticmethod
724 def add_mc_prefix(mc, pn):
725 if mc:
726 return "multiconfig:%s.%s" % (mc, pn)
727 return pn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500728
729 def buildDependTree(self, rq, taskdata):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600730 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500731 depend_tree = {}
732 depend_tree["depends"] = {}
733 depend_tree["tdepends"] = {}
734 depend_tree["pn"] = {}
735 depend_tree["rdepends-pn"] = {}
736 depend_tree["packages"] = {}
737 depend_tree["rdepends-pkg"] = {}
738 depend_tree["rrecs-pkg"] = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500739 depend_tree['providermap'] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600740 depend_tree["layer-priorities"] = self.bbfile_config_priorities
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500741
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600742 for mc in taskdata:
743 for name, fn in list(taskdata[mc].get_providermap().items()):
744 pn = self.recipecaches[mc].pkg_fn[fn]
745 pn = self.add_mc_prefix(mc, pn)
746 if name != pn:
747 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
748 depend_tree['providermap'][name] = (pn, version)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500749
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600750 for tid in rq.rqdata.runtaskentries:
751 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
752 pn = self.recipecaches[mc].pkg_fn[taskfn]
753 pn = self.add_mc_prefix(mc, pn)
754 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500755 if pn not in depend_tree["pn"]:
756 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600757 depend_tree["pn"][pn]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500758 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600759 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500760
761 # if we have extra caches, list all attributes they bring in
762 extra_info = []
763 for cache_class in self.caches_array:
764 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
765 cachefields = getattr(cache_class, 'cachefields', [])
766 extra_info = extra_info + cachefields
767
768 # for all attributes stored, add them to the dependency tree
769 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600770 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500771
772
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600773 for dep in rq.rqdata.runtaskentries[tid].depends:
774 (depmc, depfn, deptaskname, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
775 deppn = self.recipecaches[mc].pkg_fn[deptaskfn]
776 dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500777 if not dotname in depend_tree["tdepends"]:
778 depend_tree["tdepends"][dotname] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600779 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
780 if taskfn not in seen_fns:
781 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500782 packages = []
783
784 depend_tree["depends"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600785 for dep in taskdata[mc].depids[taskfn]:
786 depend_tree["depends"][pn].append(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500787
788 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600789 for rdep in taskdata[mc].rdepids[taskfn]:
790 depend_tree["rdepends-pn"][pn].append(rdep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500791
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600792 rdepends = self.recipecaches[mc].rundeps[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500793 for package in rdepends:
794 depend_tree["rdepends-pkg"][package] = []
795 for rdepend in rdepends[package]:
796 depend_tree["rdepends-pkg"][package].append(rdepend)
797 packages.append(package)
798
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600799 rrecs = self.recipecaches[mc].runrecs[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500800 for package in rrecs:
801 depend_tree["rrecs-pkg"][package] = []
802 for rdepend in rrecs[package]:
803 depend_tree["rrecs-pkg"][package].append(rdepend)
804 if not package in packages:
805 packages.append(package)
806
807 for package in packages:
808 if package not in depend_tree["packages"]:
809 depend_tree["packages"][package] = {}
810 depend_tree["packages"][package]["pn"] = pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600811 depend_tree["packages"][package]["filename"] = taskfn
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500812 depend_tree["packages"][package]["version"] = version
813
814 return depend_tree
815
816 ######## WARNING : this function requires cache_extra to be enabled ########
817 def generatePkgDepTreeData(self, pkgs_to_build, task):
818 """
819 Create a dependency tree of pkgs_to_build, returning the data.
820 """
821 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500822
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600823 seen_fns = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500824 depend_tree = {}
825 depend_tree["depends"] = {}
826 depend_tree["pn"] = {}
827 depend_tree["rdepends-pn"] = {}
828 depend_tree["rdepends-pkg"] = {}
829 depend_tree["rrecs-pkg"] = {}
830
831 # if we have extra caches, list all attributes they bring in
832 extra_info = []
833 for cache_class in self.caches_array:
834 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
835 cachefields = getattr(cache_class, 'cachefields', [])
836 extra_info = extra_info + cachefields
837
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600838 tids = []
839 for mc in taskdata:
840 for tid in taskdata[mc].taskentries:
841 tids.append(tid)
842
843 for tid in tids:
844 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
845
846 pn = self.recipecaches[mc].pkg_fn[taskfn]
847 pn = self.add_mc_prefix(mc, pn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500848
849 if pn not in depend_tree["pn"]:
850 depend_tree["pn"][pn] = {}
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600851 depend_tree["pn"][pn]["filename"] = taskfn
852 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500853 depend_tree["pn"][pn]["version"] = version
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600854 rdepends = self.recipecaches[mc].rundeps[taskfn]
855 rrecs = self.recipecaches[mc].runrecs[taskfn]
856 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500857
858 # for all extra attributes stored, add them to the dependency tree
859 for ei in extra_info:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600860 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500861
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600862 if taskfn not in seen_fns:
863 seen_fns.append(taskfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500864
865 depend_tree["depends"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600866 for item in taskdata[mc].depids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500867 pn_provider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600868 if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
869 fn_provider = taskdata[mc].build_targets[dep][0]
870 pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500871 else:
872 pn_provider = item
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600873 pn_provider = self.add_mc_prefix(mc, pn_provider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500874 depend_tree["depends"][pn].append(pn_provider)
875
876 depend_tree["rdepends-pn"][pn] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600877 for rdep in taskdata[mc].rdepids[taskfn]:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500878 pn_rprovider = ""
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600879 if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
880 fn_rprovider = taskdata[mc].run_targets[rdep][0]
881 pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500882 else:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600883 pn_rprovider = rdep
884 pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500885 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
886
887 depend_tree["rdepends-pkg"].update(rdepends)
888 depend_tree["rrecs-pkg"].update(rrecs)
889
890 return depend_tree
891
892 def generateDepTreeEvent(self, pkgs_to_build, task):
893 """
894 Create a task dependency graph of pkgs_to_build.
895 Generate an event with the result
896 """
897 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
898 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
899
900 def generateDotGraphFiles(self, pkgs_to_build, task):
901 """
902 Create a task dependency graph of pkgs_to_build.
903 Save the result to a set of .dot files.
904 """
905
906 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
907
908 # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600909 depends_file = open('pn-depends.dot', 'w' )
910 buildlist_file = open('pn-buildlist', 'w' )
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500911 print("digraph depends {", file=depends_file)
912 for pn in depgraph["pn"]:
913 fn = depgraph["pn"][pn]["filename"]
914 version = depgraph["pn"][pn]["version"]
915 print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
916 print("%s" % pn, file=buildlist_file)
917 buildlist_file.close()
918 logger.info("PN build list saved to 'pn-buildlist'")
919 for pn in depgraph["depends"]:
920 for depend in depgraph["depends"][pn]:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500921 print('"%s" -> "%s" [style=solid]' % (pn, depend), file=depends_file)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500922 for pn in depgraph["rdepends-pn"]:
923 for rdepend in depgraph["rdepends-pn"][pn]:
924 print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file)
925 print("}", file=depends_file)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600926 depends_file.close()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500927 logger.info("PN dependencies saved to 'pn-depends.dot'")
928
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600929 depends_file = open('package-depends.dot', 'w' )
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500930 print("digraph depends {", file=depends_file)
931 for package in depgraph["packages"]:
932 pn = depgraph["packages"][package]["pn"]
933 fn = depgraph["packages"][package]["filename"]
934 version = depgraph["packages"][package]["version"]
935 if package == pn:
936 print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
937 else:
938 print('"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn), file=depends_file)
939 for depend in depgraph["depends"][pn]:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500940 print('"%s" -> "%s" [style=solid]' % (package, depend), file=depends_file)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500941 for package in depgraph["rdepends-pkg"]:
942 for rdepend in depgraph["rdepends-pkg"][package]:
943 print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
944 for package in depgraph["rrecs-pkg"]:
945 for rdepend in depgraph["rrecs-pkg"][package]:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500946 print('"%s" -> "%s" [style=dotted]' % (package, rdepend), file=depends_file)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500947 print("}", file=depends_file)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600948 depends_file.close()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500949 logger.info("Package dependencies saved to 'package-depends.dot'")
950
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600951 tdepends_file = open('task-depends.dot', 'w' )
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500952 print("digraph depends {", file=tdepends_file)
953 for task in depgraph["tdepends"]:
954 (pn, taskname) = task.rsplit(".", 1)
955 fn = depgraph["pn"][pn]["filename"]
956 version = depgraph["pn"][pn]["version"]
957 print('"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn), file=tdepends_file)
958 for dep in depgraph["tdepends"][task]:
959 print('"%s" -> "%s"' % (task, dep), file=tdepends_file)
960 print("}", file=tdepends_file)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600961 tdepends_file.close()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500962 logger.info("Task dependencies saved to 'task-depends.dot'")
963
964 def show_appends_with_no_recipes(self):
965 # Determine which bbappends haven't been applied
966
967 # First get list of recipes, including skipped
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600968 recipefns = list(self.recipecaches[''].pkg_fn.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500969 recipefns.extend(self.skiplist.keys())
970
971 # Work out list of bbappends that have been applied
972 applied_appends = []
973 for fn in recipefns:
974 applied_appends.extend(self.collection.get_file_appends(fn))
975
976 appends_without_recipes = []
977 for _, appendfn in self.collection.bbappends:
978 if not appendfn in applied_appends:
979 appends_without_recipes.append(appendfn)
980
981 if appends_without_recipes:
982 msg = 'No recipes available for:\n %s' % '\n '.join(appends_without_recipes)
983 warn_only = self.data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
984 False) or "no"
985 if warn_only.lower() in ("1", "yes", "true"):
986 bb.warn(msg)
987 else:
988 bb.fatal(msg)
989
990 def handlePrefProviders(self):
991
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600992 for mc in self.multiconfigs:
993 localdata = data.createCopy(self.databuilder.mcdata[mc])
994 bb.data.update_data(localdata)
995 bb.data.expandKeys(localdata)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500996
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600997 # Handle PREFERRED_PROVIDERS
998 for p in (localdata.getVar('PREFERRED_PROVIDERS', True) or "").split():
999 try:
1000 (providee, provider) = p.split(':')
1001 except:
1002 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
1003 continue
1004 if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
1005 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
1006 self.recipecaches[mc].preferred[providee] = provider
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001007
1008 def findCoreBaseFiles(self, subdir, configfile):
1009 corebase = self.data.getVar('COREBASE', True) or ""
1010 paths = []
1011 for root, dirs, files in os.walk(corebase + '/' + subdir):
1012 for d in dirs:
1013 configfilepath = os.path.join(root, d, configfile)
1014 if os.path.exists(configfilepath):
1015 paths.append(os.path.join(root, d))
1016
1017 if paths:
1018 bb.event.fire(bb.event.CoreBaseFilesFound(paths), self.data)
1019
1020 def findConfigFilePath(self, configfile):
1021 """
1022 Find the location on disk of configfile and if it exists and was parsed by BitBake
1023 emit the ConfigFilePathFound event with the path to the file.
1024 """
1025 path = bb.cookerdata.findConfigFile(configfile, self.data)
1026 if not path:
1027 return
1028
1029 # Generate a list of parsed configuration files by searching the files
1030 # listed in the __depends and __base_depends variables with a .conf suffix.
1031 conffiles = []
1032 dep_files = self.data.getVar('__base_depends', False) or []
1033 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
1034
1035 for f in dep_files:
1036 if f[0].endswith(".conf"):
1037 conffiles.append(f[0])
1038
1039 _, conf, conffile = path.rpartition("conf/")
1040 match = os.path.join(conf, conffile)
1041 # Try and find matches for conf/conffilename.conf as we don't always
1042 # have the full path to the file.
1043 for cfg in conffiles:
1044 if cfg.endswith(match):
1045 bb.event.fire(bb.event.ConfigFilePathFound(path),
1046 self.data)
1047 break
1048
1049 def findFilesMatchingInDir(self, filepattern, directory):
1050 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001051 Searches for files containing the substring 'filepattern' which are children of
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001052 'directory' in each BBPATH. i.e. to find all rootfs package classes available
1053 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1054 or to find all machine configuration files one could call:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001055 findFilesMatchingInDir(self, '.conf', 'conf/machine')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001056 """
1057
1058 matches = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001059 bbpaths = self.data.getVar('BBPATH', True).split(':')
1060 for path in bbpaths:
1061 dirpath = os.path.join(path, directory)
1062 if os.path.exists(dirpath):
1063 for root, dirs, files in os.walk(dirpath):
1064 for f in files:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001065 if filepattern in f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001066 matches.append(f)
1067
1068 if matches:
1069 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1070
1071 def findConfigFiles(self, varname):
1072 """
1073 Find config files which are appropriate values for varname.
1074 i.e. MACHINE, DISTRO
1075 """
1076 possible = []
1077 var = varname.lower()
1078
1079 data = self.data
1080 # iterate configs
1081 bbpaths = data.getVar('BBPATH', True).split(':')
1082 for path in bbpaths:
1083 confpath = os.path.join(path, "conf", var)
1084 if os.path.exists(confpath):
1085 for root, dirs, files in os.walk(confpath):
1086 # get all child files, these are appropriate values
1087 for f in files:
1088 val, sep, end = f.rpartition('.')
1089 if end == 'conf':
1090 possible.append(val)
1091
1092 if possible:
1093 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1094
1095 def findInheritsClass(self, klass):
1096 """
1097 Find all recipes which inherit the specified class
1098 """
1099 pkg_list = []
1100
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001101 for pfn in self.recipecaches[''].pkg_fn:
1102 inherits = self.recipecaches[''].inherits.get(pfn, None)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001103 if inherits and klass in inherits:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001104 pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001105
1106 return pkg_list
1107
1108 def generateTargetsTree(self, klass=None, pkgs=None):
1109 """
1110 Generate a dependency tree of buildable targets
1111 Generate an event with the result
1112 """
1113 # if the caller hasn't specified a pkgs list default to universe
1114 if not pkgs:
1115 pkgs = ['universe']
1116 # if inherited_class passed ensure all recipes which inherit the
1117 # specified class are included in pkgs
1118 if klass:
1119 extra_pkgs = self.findInheritsClass(klass)
1120 pkgs = pkgs + extra_pkgs
1121
1122 # generate a dependency tree for all our packages
1123 tree = self.generatePkgDepTreeData(pkgs, 'build')
1124 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1125
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001126 def interactiveMode( self ):
1127 """Drop off into a shell"""
1128 try:
1129 from bb import shell
1130 except ImportError:
1131 parselog.exception("Interactive mode not available")
1132 sys.exit(1)
1133 else:
1134 shell.start( self )
1135
1136
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001137 def handleCollections(self, collections):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001138 """Handle collections"""
1139 errors = False
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001140 self.bbfile_config_priorities = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001141 if collections:
1142 collection_priorities = {}
1143 collection_depends = {}
1144 collection_list = collections.split()
1145 min_prio = 0
1146 for c in collection_list:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001147 bb.debug(1,'Processing %s in collection list' % (c))
1148
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001149 # Get collection priority if defined explicitly
1150 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c, True)
1151 if priority:
1152 try:
1153 prio = int(priority)
1154 except ValueError:
1155 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1156 errors = True
1157 if min_prio == 0 or prio < min_prio:
1158 min_prio = prio
1159 collection_priorities[c] = prio
1160 else:
1161 collection_priorities[c] = None
1162
1163 # Check dependencies and store information for priority calculation
1164 deps = self.data.getVar("LAYERDEPENDS_%s" % c, True)
1165 if deps:
1166 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001167 depDict = bb.utils.explode_dep_versions2(deps)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001168 except bb.utils.VersionStringException as vse:
1169 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001170 for dep, oplist in list(depDict.items()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001171 if dep in collection_list:
1172 for opstr in oplist:
1173 layerver = self.data.getVar("LAYERVERSION_%s" % dep, True)
1174 (op, depver) = opstr.split()
1175 if layerver:
1176 try:
1177 res = bb.utils.vercmp_string_op(layerver, depver, op)
1178 except bb.utils.VersionStringException as vse:
1179 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1180 if not res:
1181 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1182 errors = True
1183 else:
1184 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1185 errors = True
1186 else:
1187 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1188 errors = True
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001189 collection_depends[c] = list(depDict.keys())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001190 else:
1191 collection_depends[c] = []
1192
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001193 # Check recommends and store information for priority calculation
1194 recs = self.data.getVar("LAYERRECOMMENDS_%s" % c, True)
1195 if recs:
1196 try:
1197 recDict = bb.utils.explode_dep_versions2(recs)
1198 except bb.utils.VersionStringException as vse:
1199 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1200 for rec, oplist in list(recDict.items()):
1201 if rec in collection_list:
1202 if oplist:
1203 opstr = oplist[0]
1204 layerver = self.data.getVar("LAYERVERSION_%s" % rec, True)
1205 if layerver:
1206 (op, recver) = opstr.split()
1207 try:
1208 res = bb.utils.vercmp_string_op(layerver, recver, op)
1209 except bb.utils.VersionStringException as vse:
1210 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1211 if not res:
1212 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1213 continue
1214 else:
1215 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1216 continue
1217 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1218 collection_depends[c].append(rec)
1219 else:
1220 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1221
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001222 # Recursively work out collection priorities based on dependencies
1223 def calc_layer_priority(collection):
1224 if not collection_priorities[collection]:
1225 max_depprio = min_prio
1226 for dep in collection_depends[collection]:
1227 calc_layer_priority(dep)
1228 depprio = collection_priorities[dep]
1229 if depprio > max_depprio:
1230 max_depprio = depprio
1231 max_depprio += 1
1232 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1233 collection_priorities[collection] = max_depprio
1234
1235 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1236 for c in collection_list:
1237 calc_layer_priority(c)
1238 regex = self.data.getVar("BBFILE_PATTERN_%s" % c, True)
1239 if regex == None:
1240 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1241 errors = True
1242 continue
1243 try:
1244 cre = re.compile(regex)
1245 except re.error:
1246 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1247 errors = True
1248 continue
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001249 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001250 if errors:
1251 # We've already printed the actual error(s)
1252 raise CollectionError("Errors during parsing layer configuration")
1253
1254 def buildSetVars(self):
1255 """
1256 Setup any variables needed before starting a build
1257 """
1258 t = time.gmtime()
1259 if not self.data.getVar("BUILDNAME", False):
1260 self.data.setVar("BUILDNAME", "${DATE}${TIME}")
1261 self.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1262 self.data.setVar("DATE", time.strftime('%Y%m%d', t))
1263 self.data.setVar("TIME", time.strftime('%H%M%S', t))
1264
1265 def matchFiles(self, bf):
1266 """
1267 Find the .bb files which match the expression in 'buildfile'.
1268 """
1269 if bf.startswith("/") or bf.startswith("../"):
1270 bf = os.path.abspath(bf)
1271
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001272 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001273 filelist, masked = self.collection.collect_bbfiles(self.data, self.expanded_data)
1274 try:
1275 os.stat(bf)
1276 bf = os.path.abspath(bf)
1277 return [bf]
1278 except OSError:
1279 regexp = re.compile(bf)
1280 matches = []
1281 for f in filelist:
1282 if regexp.search(f) and os.path.isfile(f):
1283 matches.append(f)
1284 return matches
1285
1286 def matchFile(self, buildfile):
1287 """
1288 Find the .bb file which matches the expression in 'buildfile'.
1289 Raise an error if multiple files
1290 """
1291 matches = self.matchFiles(buildfile)
1292 if len(matches) != 1:
1293 if matches:
1294 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1295 if matches:
1296 for f in matches:
1297 msg += "\n %s" % f
1298 parselog.error(msg)
1299 else:
1300 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1301 raise NoSpecificMatch
1302 return matches[0]
1303
1304 def buildFile(self, buildfile, task):
1305 """
1306 Build the file matching regexp buildfile
1307 """
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001308 bb.event.fire(bb.event.BuildInit(), self.expanded_data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001309
1310 # Too many people use -b because they think it's how you normally
1311 # specify a target to be built, so show a warning
1312 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1313
1314 # Parse the configuration here. We need to do it explicitly here since
1315 # buildFile() doesn't use the cache
1316 self.parseConfiguration()
1317
1318 # If we are told to do the None task then query the default task
1319 if (task == None):
1320 task = self.configuration.cmd
1321
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001322 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001323 fn = self.matchFile(fn)
1324
1325 self.buildSetVars()
1326
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001327 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
1328
1329 infos = bb_cache.parse(fn, self.collection.get_file_appends(fn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001330 infos = dict(infos)
1331
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001332 fn = bb.cache.realfn2virtual(fn, cls, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001333 try:
1334 info_array = infos[fn]
1335 except KeyError:
1336 bb.fatal("%s does not exist" % fn)
1337
1338 if info_array[0].skipped:
1339 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1340
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001341 self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001342
1343 # Tweak some variables
1344 item = info_array[0].pn
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001345 self.recipecaches[mc].ignored_dependencies = set()
1346 self.recipecaches[mc].bbfile_priority[fn] = 1
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001347
1348 # Remove external dependencies
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001349 self.recipecaches[mc].task_deps[fn]['depends'] = {}
1350 self.recipecaches[mc].deps[fn] = []
1351 self.recipecaches[mc].rundeps[fn] = []
1352 self.recipecaches[mc].runrecs[fn] = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001353
1354 # Invalidate task for target if force mode active
1355 if self.configuration.force:
1356 logger.verbose("Invalidate task %s, %s", task, fn)
1357 if not task.startswith("do_"):
1358 task = "do_%s" % task
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001359 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001360
1361 # Setup taskdata structure
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001362 taskdata = {}
1363 taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
1364 taskdata[mc].add_provider(self.data, self.recipecaches[mc], item)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001365
1366 buildname = self.data.getVar("BUILDNAME", True)
1367 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.expanded_data)
1368
1369 # Execute the runqueue
1370 if not task.startswith("do_"):
1371 task = "do_%s" % task
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001372 runlist = [[mc, item, task, fn]]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001373
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001374 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001375
1376 def buildFileIdle(server, rq, abort):
1377
1378 msg = None
1379 interrupted = 0
1380 if abort or self.state == state.forceshutdown:
1381 rq.finish_runqueue(True)
1382 msg = "Forced shutdown"
1383 interrupted = 2
1384 elif self.state == state.shutdown:
1385 rq.finish_runqueue(False)
1386 msg = "Stopped build"
1387 interrupted = 1
1388 failures = 0
1389 try:
1390 retval = rq.execute_runqueue()
1391 except runqueue.TaskFailure as exc:
1392 failures += len(exc.args)
1393 retval = False
1394 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001395 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001396 return False
1397
1398 if not retval:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001399 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.expanded_data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001400 self.command.finishAsyncCommand(msg)
1401 return False
1402 if retval is True:
1403 return True
1404 return retval
1405
1406 self.configuration.server_register_idlecallback(buildFileIdle, rq)
1407
1408 def buildTargets(self, targets, task):
1409 """
1410 Attempt to build the targets specified
1411 """
1412
1413 def buildTargetsIdle(server, rq, abort):
1414 msg = None
1415 interrupted = 0
1416 if abort or self.state == state.forceshutdown:
1417 rq.finish_runqueue(True)
1418 msg = "Forced shutdown"
1419 interrupted = 2
1420 elif self.state == state.shutdown:
1421 rq.finish_runqueue(False)
1422 msg = "Stopped build"
1423 interrupted = 1
1424 failures = 0
1425 try:
1426 retval = rq.execute_runqueue()
1427 except runqueue.TaskFailure as exc:
1428 failures += len(exc.args)
1429 retval = False
1430 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001431 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001432 return False
1433
1434 if not retval:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001435 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001436 self.command.finishAsyncCommand(msg)
1437 return False
1438 if retval is True:
1439 return True
1440 return retval
1441
1442 build.reset_cache()
1443 self.buildSetVars()
1444
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001445 # If we are told to do the None task then query the default task
1446 if (task == None):
1447 task = self.configuration.cmd
1448
1449 if not task.startswith("do_"):
1450 task = "do_%s" % task
1451
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001452 packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1453
1454 bb.event.fire(bb.event.BuildInit(packages), self.expanded_data)
1455
1456 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001457
1458 buildname = self.data.getVar("BUILDNAME", False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001459
1460 # make targets to always look as <target>:do_<task>
1461 ntargets = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001462 for target in runlist:
1463 if target[0]:
1464 ntargets.append("multiconfig:%s:%s:%s" % (target[0], target[1], target[2]))
1465 ntargets.append("%s:%s" % (target[1], target[2]))
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001466
1467 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001468
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001469 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001470 if 'universe' in targets:
1471 rq.rqdata.warn_multi_bb = True
1472
1473 self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
1474
1475
1476 def getAllKeysWithFlags(self, flaglist):
1477 dump = {}
1478 for k in self.data.keys():
1479 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001480 expand = True
1481 flags = self.data.getVarFlags(k)
1482 if flags and "func" in flags and "python" in flags:
1483 expand = False
1484 v = self.data.getVar(k, expand)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001485 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1486 dump[k] = {
1487 'v' : v ,
1488 'history' : self.data.varhistory.variable(k),
1489 }
1490 for d in flaglist:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001491 if flags and d in flags:
1492 dump[k][d] = flags[d]
1493 else:
1494 dump[k][d] = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001495 except Exception as e:
1496 print(e)
1497 return dump
1498
1499
1500 def generateNewImage(self, image, base_image, package_queue, timestamp, description):
1501 '''
1502 Create a new image with a "require"/"inherit" base_image statement
1503 '''
1504 if timestamp:
1505 image_name = os.path.splitext(image)[0]
1506 timestr = time.strftime("-%Y%m%d-%H%M%S")
1507 dest = image_name + str(timestr) + ".bb"
1508 else:
1509 if not image.endswith(".bb"):
1510 dest = image + ".bb"
1511 else:
1512 dest = image
1513
1514 basename = False
1515 if base_image:
1516 with open(base_image, 'r') as f:
1517 require_line = f.readline()
1518 p = re.compile("IMAGE_BASENAME *=")
1519 for line in f:
1520 if p.search(line):
1521 basename = True
1522
1523 with open(dest, "w") as imagefile:
1524 if base_image is None:
1525 imagefile.write("inherit core-image\n")
1526 else:
1527 topdir = self.data.getVar("TOPDIR", False)
1528 if topdir in base_image:
1529 base_image = require_line.split()[1]
1530 imagefile.write("require " + base_image + "\n")
1531 image_install = "IMAGE_INSTALL = \""
1532 for package in package_queue:
1533 image_install += str(package) + " "
1534 image_install += "\"\n"
1535 imagefile.write(image_install)
1536
1537 description_var = "DESCRIPTION = \"" + description + "\"\n"
1538 imagefile.write(description_var)
1539
1540 if basename:
1541 # If this is overwritten in a inherited image, reset it to default
1542 image_basename = "IMAGE_BASENAME = \"${PN}\"\n"
1543 imagefile.write(image_basename)
1544
1545 self.state = state.initial
1546 if timestamp:
1547 return timestr
1548
1549 def updateCacheSync(self):
1550 if self.state == state.running:
1551 return
1552
1553 # reload files for which we got notifications
1554 for p in self.inotify_modified_files:
1555 bb.parse.update_cache(p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001556 if p in bb.parse.BBHandler.cached_statements:
1557 del bb.parse.BBHandler.cached_statements[p]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001558 self.inotify_modified_files = []
1559
1560 if not self.baseconfig_valid:
1561 logger.debug(1, "Reloading base configuration data")
1562 self.initConfigurationData()
1563 self.baseconfig_valid = True
1564 self.parsecache_valid = False
1565
1566 # This is called for all async commands when self.state != running
1567 def updateCache(self):
1568 if self.state == state.running:
1569 return
1570
1571 if self.state in (state.shutdown, state.forceshutdown, state.error):
1572 if hasattr(self.parser, 'shutdown'):
1573 self.parser.shutdown(clean=False, force = True)
1574 raise bb.BBHandledException()
1575
1576 if self.state != state.parsing:
1577 self.updateCacheSync()
1578
1579 if self.state != state.parsing and not self.parsecache_valid:
1580 self.parseConfiguration ()
1581 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
1582 bb.event.fire(bb.event.SanityCheck(False), self.data)
1583
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001584 for mc in self.multiconfigs:
1585 ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED", True) or ""
1586 self.recipecaches[mc].ignored_dependencies = set(ignore.split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001587
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001588 for dep in self.configuration.extra_assume_provided:
1589 self.recipecaches[mc].ignored_dependencies.add(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001590
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001591 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001592 (filelist, masked) = self.collection.collect_bbfiles(self.data, self.expanded_data)
1593
1594 self.parser = CookerParser(self, filelist, masked)
1595 self.parsecache_valid = True
1596
1597 self.state = state.parsing
1598
1599 if not self.parser.parse_next():
1600 collectlog.debug(1, "parsing complete")
1601 if self.parser.error:
1602 raise bb.BBHandledException()
1603 self.show_appends_with_no_recipes()
1604 self.handlePrefProviders()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001605 for mc in self.multiconfigs:
1606 self.recipecaches[mc].bbfile_priority = self.collection.collection_priorities(self.recipecaches[mc].pkg_fn, self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001607 self.state = state.running
1608
1609 # Send an event listing all stamps reachable after parsing
1610 # which the metadata may use to clean up stale data
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001611 for mc in self.multiconfigs:
1612 event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1613 bb.event.fire(event, self.databuilder.mcdata[mc])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001614 return None
1615
1616 return True
1617
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001618 def checkPackages(self, pkgs_to_build, task=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001619
1620 # Return a copy, don't modify the original
1621 pkgs_to_build = pkgs_to_build[:]
1622
1623 if len(pkgs_to_build) == 0:
1624 raise NothingToBuild
1625
1626 ignore = (self.expanded_data.getVar("ASSUME_PROVIDED", True) or "").split()
1627 for pkg in pkgs_to_build:
1628 if pkg in ignore:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001629 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001630
1631 if 'world' in pkgs_to_build:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001632 pkgs_to_build.remove('world')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001633 for mc in self.multiconfigs:
1634 bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1635 for t in self.recipecaches[mc].world_target:
1636 if mc:
1637 t = "multiconfig:" + mc + ":" + t
1638 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001639
1640 if 'universe' in pkgs_to_build:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001641 parselog.warning("The \"universe\" target is only intended for testing and may produce errors.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001642 parselog.debug(1, "collating packages for \"universe\"")
1643 pkgs_to_build.remove('universe')
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001644 for mc in self.multiconfigs:
1645 for t in self.recipecaches[mc].universe_target:
1646 if mc:
1647 t = "multiconfig:" + mc + ":" + t
1648 pkgs_to_build.append(t)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001649
1650 return pkgs_to_build
1651
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001652 def pre_serve(self):
1653 # Empty the environment. The environment will be populated as
1654 # necessary from the data store.
1655 #bb.utils.empty_environment()
1656 try:
1657 self.prhost = prserv.serv.auto_start(self.data)
1658 except prserv.serv.PRServiceConfigError:
1659 bb.event.fire(CookerExit(), self.expanded_data)
1660 self.state = state.error
1661 return
1662
1663 def post_serve(self):
1664 prserv.serv.auto_shutdown(self.data)
1665 bb.event.fire(CookerExit(), self.expanded_data)
1666 lockfile = self.lock.name
1667 self.lock.close()
1668 self.lock = None
1669
1670 while not self.lock:
1671 with bb.utils.timeout(3):
1672 self.lock = bb.utils.lockfile(lockfile, shared=False, retry=False, block=True)
1673 if not self.lock:
1674 # Some systems may not have lsof available
1675 procs = None
1676 try:
1677 procs = subprocess.check_output(["lsof", '-w', lockfile], stderr=subprocess.STDOUT)
1678 except OSError as e:
1679 if e.errno != errno.ENOENT:
1680 raise
1681 if procs is None:
1682 # Fall back to fuser if lsof is unavailable
1683 try:
1684 procs = subprocess.check_output(["fuser", '-v', lockfile], stderr=subprocess.STDOUT)
1685 except OSError as e:
1686 if e.errno != errno.ENOENT:
1687 raise
1688
1689 msg = "Delaying shutdown due to active processes which appear to be holding bitbake.lock"
1690 if procs:
1691 msg += ":\n%s" % str(procs)
1692 print(msg)
1693
1694
1695 def shutdown(self, force = False):
1696 if force:
1697 self.state = state.forceshutdown
1698 else:
1699 self.state = state.shutdown
1700
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001701 if self.parser:
1702 self.parser.shutdown(clean=not force, force=force)
1703
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001704 def finishcommand(self):
1705 self.state = state.initial
1706
1707 def reset(self):
1708 self.initConfigurationData()
1709
1710 def lockBitbake(self):
1711 if not hasattr(self, 'lock'):
1712 self.lock = None
1713 if self.data:
1714 lockfile = self.data.expand("${TOPDIR}/bitbake.lock")
1715 if lockfile:
1716 self.lock = bb.utils.lockfile(lockfile, False, False)
1717 return self.lock
1718
1719 def unlockBitbake(self):
1720 if hasattr(self, 'lock') and self.lock:
1721 bb.utils.unlockfile(self.lock)
1722
1723def server_main(cooker, func, *args):
1724 cooker.pre_serve()
1725
1726 if cooker.configuration.profile:
1727 try:
1728 import cProfile as profile
1729 except:
1730 import profile
1731 prof = profile.Profile()
1732
1733 ret = profile.Profile.runcall(prof, func, *args)
1734
1735 prof.dump_stats("profile.log")
1736 bb.utils.process_profilelog("profile.log")
1737 print("Raw profiling information saved to profile.log and processed statistics to profile.log.processed")
1738
1739 else:
1740 ret = func(*args)
1741
1742 cooker.post_serve()
1743
1744 return ret
1745
1746class CookerExit(bb.event.Event):
1747 """
1748 Notify clients of the Cooker shutdown
1749 """
1750
1751 def __init__(self):
1752 bb.event.Event.__init__(self)
1753
1754
1755class CookerCollectFiles(object):
1756 def __init__(self, priorities):
1757 self.bbappends = []
1758 self.bbfile_config_priorities = priorities
1759
1760 def calc_bbfile_priority( self, filename, matched = None ):
1761 for _, _, regex, pri in self.bbfile_config_priorities:
1762 if regex.match(filename):
1763 if matched != None:
1764 if not regex in matched:
1765 matched.add(regex)
1766 return pri
1767 return 0
1768
1769 def get_bbfiles(self):
1770 """Get list of default .bb files by reading out the current directory"""
1771 path = os.getcwd()
1772 contents = os.listdir(path)
1773 bbfiles = []
1774 for f in contents:
1775 if f.endswith(".bb"):
1776 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1777 return bbfiles
1778
1779 def find_bbfiles(self, path):
1780 """Find all the .bb and .bbappend files in a directory"""
1781 found = []
1782 for dir, dirs, files in os.walk(path):
1783 for ignored in ('SCCS', 'CVS', '.svn'):
1784 if ignored in dirs:
1785 dirs.remove(ignored)
1786 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1787
1788 return found
1789
1790 def collect_bbfiles(self, config, eventdata):
1791 """Collect all available .bb build files"""
1792 masked = 0
1793
1794 collectlog.debug(1, "collecting .bb files")
1795
1796 files = (config.getVar( "BBFILES", True) or "").split()
1797 config.setVar("BBFILES", " ".join(files))
1798
1799 # Sort files by priority
1800 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
1801
1802 if not len(files):
1803 files = self.get_bbfiles()
1804
1805 if not len(files):
1806 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1807 bb.event.fire(CookerExit(), eventdata)
1808
1809 # Can't use set here as order is important
1810 newfiles = []
1811 for f in files:
1812 if os.path.isdir(f):
1813 dirfiles = self.find_bbfiles(f)
1814 for g in dirfiles:
1815 if g not in newfiles:
1816 newfiles.append(g)
1817 else:
1818 globbed = glob.glob(f)
1819 if not globbed and os.path.exists(f):
1820 globbed = [f]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001821 # glob gives files in order on disk. Sort to be deterministic.
1822 for g in sorted(globbed):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001823 if g not in newfiles:
1824 newfiles.append(g)
1825
1826 bbmask = config.getVar('BBMASK', True)
1827
1828 if bbmask:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001829 # First validate the individual regular expressions and ignore any
1830 # that do not compile
1831 bbmasks = []
1832 for mask in bbmask.split():
1833 try:
1834 re.compile(mask)
1835 bbmasks.append(mask)
1836 except sre_constants.error:
1837 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1838
1839 # Then validate the combined regular expressions. This should never
1840 # fail, but better safe than sorry...
1841 bbmask = "|".join(bbmasks)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001842 try:
1843 bbmask_compiled = re.compile(bbmask)
1844 except sre_constants.error:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001845 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1846 bbmask = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001847
1848 bbfiles = []
1849 bbappend = []
1850 for f in newfiles:
1851 if bbmask and bbmask_compiled.search(f):
1852 collectlog.debug(1, "skipping masked file %s", f)
1853 masked += 1
1854 continue
1855 if f.endswith('.bb'):
1856 bbfiles.append(f)
1857 elif f.endswith('.bbappend'):
1858 bbappend.append(f)
1859 else:
1860 collectlog.debug(1, "skipping %s: unknown file extension", f)
1861
1862 # Build a list of .bbappend files for each .bb file
1863 for f in bbappend:
1864 base = os.path.basename(f).replace('.bbappend', '.bb')
1865 self.bbappends.append((base, f))
1866
1867 # Find overlayed recipes
1868 # bbfiles will be in priority order which makes this easy
1869 bbfile_seen = dict()
1870 self.overlayed = defaultdict(list)
1871 for f in reversed(bbfiles):
1872 base = os.path.basename(f)
1873 if base not in bbfile_seen:
1874 bbfile_seen[base] = f
1875 else:
1876 topfile = bbfile_seen[base]
1877 self.overlayed[topfile].append(f)
1878
1879 return (bbfiles, masked)
1880
1881 def get_file_appends(self, fn):
1882 """
1883 Returns a list of .bbappend files to apply to fn
1884 """
1885 filelist = []
1886 f = os.path.basename(fn)
1887 for b in self.bbappends:
1888 (bbappend, filename) = b
1889 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1890 filelist.append(filename)
1891 return filelist
1892
1893 def collection_priorities(self, pkgfns, d):
1894
1895 priorities = {}
1896
1897 # Calculate priorities for each file
1898 matched = set()
1899 for p in pkgfns:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001900 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001901 priorities[p] = self.calc_bbfile_priority(realfn, matched)
1902
1903 # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
1904 unmatched = set()
1905 for _, _, regex, pri in self.bbfile_config_priorities:
1906 if not regex in matched:
1907 unmatched.add(regex)
1908
1909 def findmatch(regex):
1910 for b in self.bbappends:
1911 (bbfile, append) = b
1912 if regex.match(append):
1913 return True
1914 return False
1915
1916 for unmatch in unmatched.copy():
1917 if findmatch(unmatch):
1918 unmatched.remove(unmatch)
1919
1920 for collection, pattern, regex, _ in self.bbfile_config_priorities:
1921 if regex in unmatched:
1922 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection, True) != '1':
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001923 collectlog.warning("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001924
1925 return priorities
1926
1927class ParsingFailure(Exception):
1928 def __init__(self, realexception, recipe):
1929 self.realexception = realexception
1930 self.recipe = recipe
1931 Exception.__init__(self, realexception, recipe)
1932
1933class Feeder(multiprocessing.Process):
1934 def __init__(self, jobs, to_parsers, quit):
1935 self.quit = quit
1936 self.jobs = jobs
1937 self.to_parsers = to_parsers
1938 multiprocessing.Process.__init__(self)
1939
1940 def run(self):
1941 while True:
1942 try:
1943 quit = self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001944 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001945 pass
1946 else:
1947 if quit == 'cancel':
1948 self.to_parsers.cancel_join_thread()
1949 break
1950
1951 try:
1952 job = self.jobs.pop()
1953 except IndexError:
1954 break
1955
1956 try:
1957 self.to_parsers.put(job, timeout=0.5)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001958 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001959 self.jobs.insert(0, job)
1960 continue
1961
1962class Parser(multiprocessing.Process):
1963 def __init__(self, jobs, results, quit, init, profile):
1964 self.jobs = jobs
1965 self.results = results
1966 self.quit = quit
1967 self.init = init
1968 multiprocessing.Process.__init__(self)
1969 self.context = bb.utils.get_context().copy()
1970 self.handlers = bb.event.get_class_handlers().copy()
1971 self.profile = profile
1972
1973 def run(self):
1974
1975 if not self.profile:
1976 self.realrun()
1977 return
1978
1979 try:
1980 import cProfile as profile
1981 except:
1982 import profile
1983 prof = profile.Profile()
1984 try:
1985 profile.Profile.runcall(prof, self.realrun)
1986 finally:
1987 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1988 prof.dump_stats(logfile)
1989
1990 def realrun(self):
1991 if self.init:
1992 self.init()
1993
1994 pending = []
1995 while True:
1996 try:
1997 self.quit.get_nowait()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001998 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001999 pass
2000 else:
2001 self.results.cancel_join_thread()
2002 break
2003
2004 if pending:
2005 result = pending.pop()
2006 else:
2007 try:
2008 job = self.jobs.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002009 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002010 continue
2011
2012 if job is None:
2013 break
2014 result = self.parse(*job)
2015
2016 try:
2017 self.results.put(result, timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002018 except queue.Full:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002019 pending.append(result)
2020
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002021 def parse(self, filename, appends):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002022 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002023 # Record the filename we're parsing into any events generated
2024 def parse_filter(self, record):
2025 record.taskpid = bb.event.worker_pid
2026 record.fn = filename
2027 return True
2028
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002029 # Reset our environment and handlers to the original settings
2030 bb.utils.set_context(self.context.copy())
2031 bb.event.set_class_handlers(self.handlers.copy())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002032 bb.event.LogHandler.filter = parse_filter
2033
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002034 return True, self.bb_cache.parse(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002035 except Exception as exc:
2036 tb = sys.exc_info()[2]
2037 exc.recipe = filename
2038 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
2039 return True, exc
2040 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2041 # and for example a worker thread doesn't just exit on its own in response to
2042 # a SystemExit event for example.
2043 except BaseException as exc:
2044 return True, ParsingFailure(exc, filename)
2045
2046class CookerParser(object):
2047 def __init__(self, cooker, filelist, masked):
2048 self.filelist = filelist
2049 self.cooker = cooker
2050 self.cfgdata = cooker.data
2051 self.cfghash = cooker.data_hash
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002052 self.cfgbuilder = cooker.databuilder
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002053
2054 # Accounting statistics
2055 self.parsed = 0
2056 self.cached = 0
2057 self.error = 0
2058 self.masked = masked
2059
2060 self.skipped = 0
2061 self.virtuals = 0
2062 self.total = len(filelist)
2063
2064 self.current = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002065 self.process_names = []
2066
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002067 self.bb_cache = bb.cache.Cache(self.cfgbuilder, self.cfghash, cooker.caches_array)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002068 self.fromcache = []
2069 self.willparse = []
2070 for filename in self.filelist:
2071 appends = self.cooker.collection.get_file_appends(filename)
2072 if not self.bb_cache.cacheValid(filename, appends):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002073 self.willparse.append((filename, appends))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002074 else:
2075 self.fromcache.append((filename, appends))
2076 self.toparse = self.total - len(self.fromcache)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002077 self.progress_chunk = int(max(self.toparse / 100, 1))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002078
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002079 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or
2080 multiprocessing.cpu_count()), len(self.willparse))
2081
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002082 self.start()
2083 self.haveshutdown = False
2084
2085 def start(self):
2086 self.results = self.load_cached()
2087 self.processes = []
2088 if self.toparse:
2089 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2090 def init():
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002091 Parser.bb_cache = self.bb_cache
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002092 bb.utils.set_process_name(multiprocessing.current_process().name)
2093 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2094 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002095
2096 self.feeder_quit = multiprocessing.Queue(maxsize=1)
2097 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
2098 self.jobs = multiprocessing.Queue(maxsize=self.num_processes)
2099 self.result_queue = multiprocessing.Queue()
2100 self.feeder = Feeder(self.willparse, self.jobs, self.feeder_quit)
2101 self.feeder.start()
2102 for i in range(0, self.num_processes):
2103 parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
2104 parser.start()
2105 self.process_names.append(parser.name)
2106 self.processes.append(parser)
2107
2108 self.results = itertools.chain(self.results, self.parse_generator())
2109
2110 def shutdown(self, clean=True, force=False):
2111 if not self.toparse:
2112 return
2113 if self.haveshutdown:
2114 return
2115 self.haveshutdown = True
2116
2117 if clean:
2118 event = bb.event.ParseCompleted(self.cached, self.parsed,
2119 self.skipped, self.masked,
2120 self.virtuals, self.error,
2121 self.total)
2122
2123 bb.event.fire(event, self.cfgdata)
2124 self.feeder_quit.put(None)
2125 for process in self.processes:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002126 self.parser_quit.put(None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002127 else:
2128 self.feeder_quit.put('cancel')
2129
2130 self.parser_quit.cancel_join_thread()
2131 for process in self.processes:
2132 self.parser_quit.put(None)
2133
2134 self.jobs.cancel_join_thread()
2135
2136 for process in self.processes:
2137 if force:
2138 process.join(.1)
2139 process.terminate()
2140 else:
2141 process.join()
2142 self.feeder.join()
2143
2144 sync = threading.Thread(target=self.bb_cache.sync)
2145 sync.start()
2146 multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002147 bb.codeparser.parser_cache_savemerge()
2148 bb.fetch.fetcher_parse_done()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002149 if self.cooker.configuration.profile:
2150 profiles = []
2151 for i in self.process_names:
2152 logfile = "profile-parse-%s.log" % i
2153 if os.path.exists(logfile):
2154 profiles.append(logfile)
2155
2156 pout = "profile-parse.log.processed"
2157 bb.utils.process_profilelog(profiles, pout = pout)
2158 print("Processed parsing statistics saved to %s" % (pout))
2159
2160 def load_cached(self):
2161 for filename, appends in self.fromcache:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002162 cached, infos = self.bb_cache.load(filename, appends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002163 yield not cached, infos
2164
2165 def parse_generator(self):
2166 while True:
2167 if self.parsed >= self.toparse:
2168 break
2169
2170 try:
2171 result = self.result_queue.get(timeout=0.25)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002172 except queue.Empty:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002173 pass
2174 else:
2175 value = result[1]
2176 if isinstance(value, BaseException):
2177 raise value
2178 else:
2179 yield result
2180
2181 def parse_next(self):
2182 result = []
2183 parsed = None
2184 try:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002185 parsed, result = next(self.results)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002186 except StopIteration:
2187 self.shutdown()
2188 return False
2189 except bb.BBHandledException as exc:
2190 self.error += 1
2191 logger.error('Failed to parse recipe: %s' % exc.recipe)
2192 self.shutdown(clean=False)
2193 return False
2194 except ParsingFailure as exc:
2195 self.error += 1
2196 logger.error('Unable to parse %s: %s' %
2197 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2198 self.shutdown(clean=False)
2199 return False
2200 except bb.parse.ParseError as exc:
2201 self.error += 1
2202 logger.error(str(exc))
2203 self.shutdown(clean=False)
2204 return False
2205 except bb.data_smart.ExpansionError as exc:
2206 self.error += 1
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002207 bbdir = os.path.dirname(__file__) + os.sep
2208 etype, value, _ = sys.exc_info()
2209 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2210 logger.error('ExpansionError during parsing %s', value.recipe,
2211 exc_info=(etype, value, tb))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002212 self.shutdown(clean=False)
2213 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002214 except Exception as exc:
2215 self.error += 1
2216 etype, value, tb = sys.exc_info()
2217 if hasattr(value, "recipe"):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002218 logger.error('Unable to parse %s' % value.recipe,
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002219 exc_info=(etype, value, exc.traceback))
2220 else:
2221 # Most likely, an exception occurred during raising an exception
2222 import traceback
2223 logger.error('Exception during parse: %s' % traceback.format_exc())
2224 self.shutdown(clean=False)
2225 return False
2226
2227 self.current += 1
2228 self.virtuals += len(result)
2229 if parsed:
2230 self.parsed += 1
2231 if self.parsed % self.progress_chunk == 0:
2232 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2233 self.cfgdata)
2234 else:
2235 self.cached += 1
2236
2237 for virtualfn, info_array in result:
2238 if info_array[0].skipped:
2239 self.skipped += 1
2240 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002241 (fn, cls, mc) = bb.cache.virtualfn2realfn(virtualfn)
2242 self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002243 parsed=parsed, watcher = self.cooker.add_filewatch)
2244 return True
2245
2246 def reparse(self, filename):
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002247 infos = self.bb_cache.parse(filename, self.cooker.collection.get_file_appends(filename))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002248 for vfn, info_array in infos:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06002249 (fn, cls, mc) = bb.cache.virtualfn2realfn(vfn)
2250 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)