blob: a0d7d59eaa797fcd07012bbaccbff5049208ed7f [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# Copyright (C) 2003, 2004 Chris Larson
6# Copyright (C) 2003, 2004 Phil Blundell
7# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
8# Copyright (C) 2005 Holger Hans Peter Freyther
9# Copyright (C) 2005 ROAD GmbH
10# Copyright (C) 2006 - 2007 Richard Purdie
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
25from __future__ import print_function
26import sys, os, glob, os.path, re, time
27import atexit
28import itertools
29import logging
30import multiprocessing
31import sre_constants
32import threading
33from cStringIO import StringIO
34from contextlib import closing
35from functools import wraps
36from collections import defaultdict
37import bb, bb.exceptions, bb.command
38from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
39import Queue
40import signal
41import subprocess
42import errno
43import prserv.serv
44import pyinotify
45
46logger = logging.getLogger("BitBake")
47collectlog = logging.getLogger("BitBake.Collection")
48buildlog = logging.getLogger("BitBake.Build")
49parselog = logging.getLogger("BitBake.Parsing")
50providerlog = logging.getLogger("BitBake.Provider")
51
52class NoSpecificMatch(bb.BBHandledException):
53 """
54 Exception raised when no or multiple file matches are found
55 """
56
57class NothingToBuild(Exception):
58 """
59 Exception raised when there is nothing to build
60 """
61
62class CollectionError(bb.BBHandledException):
63 """
64 Exception raised when layer configuration is incorrect
65 """
66
67class state:
68 initial, parsing, running, shutdown, forceshutdown, stopped, error = range(7)
69
70
71class SkippedPackage:
72 def __init__(self, info = None, reason = None):
73 self.pn = None
74 self.skipreason = None
75 self.provides = None
76 self.rprovides = None
77
78 if info:
79 self.pn = info.pn
80 self.skipreason = info.skipreason
81 self.provides = info.provides
82 self.rprovides = info.rprovides
83 elif reason:
84 self.skipreason = reason
85
86
87class CookerFeatures(object):
88 _feature_list = [HOB_EXTRA_CACHES, SEND_DEPENDS_TREE, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = range(4)
89
90 def __init__(self):
91 self._features=set()
92
93 def setFeature(self, f):
94 # validate we got a request for a feature we support
95 if f not in CookerFeatures._feature_list:
96 return
97 self._features.add(f)
98
99 def __contains__(self, f):
100 return f in self._features
101
102 def __iter__(self):
103 return self._features.__iter__()
104
105 def next(self):
106 return self._features.next()
107
108
109#============================================================================#
110# BBCooker
111#============================================================================#
112class BBCooker:
113 """
114 Manages one bitbake build run
115 """
116
117 def __init__(self, configuration, featureSet=None):
118 self.recipecache = None
119 self.skiplist = {}
120 self.featureset = CookerFeatures()
121 if featureSet:
122 for f in featureSet:
123 self.featureset.setFeature(f)
124
125 self.configuration = configuration
126
127 self.configwatcher = pyinotify.WatchManager()
128 self.configwatcher.bbseen = []
129 self.configwatcher.bbwatchedfiles = []
130 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
131 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
132 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
133 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
134 self.watcher = pyinotify.WatchManager()
135 self.watcher.bbseen = []
136 self.watcher.bbwatchedfiles = []
137 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
138
139
140 self.initConfigurationData()
141
142 self.inotify_modified_files = []
143
144 def _process_inotify_updates(server, notifier_list, abort):
145 for n in notifier_list:
146 if n.check_events(timeout=0):
147 # read notified events and enqeue them
148 n.read_events()
149 n.process_events()
150 return 1.0
151
152 self.configuration.server_register_idlecallback(_process_inotify_updates, [self.confignotifier, self.notifier])
153
154 self.baseconfig_valid = True
155 self.parsecache_valid = False
156
157 # Take a lock so only one copy of bitbake can run against a given build
158 # directory at a time
159 if not self.lockBitbake():
160 bb.fatal("Only one copy of bitbake should be run against a build directory")
161 try:
162 self.lock.seek(0)
163 self.lock.truncate()
164 if len(configuration.interface) >= 2:
165 self.lock.write("%s:%s\n" % (configuration.interface[0], configuration.interface[1]));
166 self.lock.flush()
167 except:
168 pass
169
170 # TOSTOP must not be set or our children will hang when they output
171 fd = sys.stdout.fileno()
172 if os.isatty(fd):
173 import termios
174 tcattr = termios.tcgetattr(fd)
175 if tcattr[3] & termios.TOSTOP:
176 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
177 tcattr[3] = tcattr[3] & ~termios.TOSTOP
178 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
179
180 self.command = bb.command.Command(self)
181 self.state = state.initial
182
183 self.parser = None
184
185 signal.signal(signal.SIGTERM, self.sigterm_exception)
186 # Let SIGHUP exit as SIGTERM
187 signal.signal(signal.SIGHUP, self.sigterm_exception)
188
189 def config_notifications(self, event):
190 if not event.pathname in self.configwatcher.bbwatchedfiles:
191 return
192 if not event.path in self.inotify_modified_files:
193 self.inotify_modified_files.append(event.path)
194 self.baseconfig_valid = False
195
196 def notifications(self, event):
197 if not event.path in self.inotify_modified_files:
198 self.inotify_modified_files.append(event.path)
199 self.parsecache_valid = False
200
201 def add_filewatch(self, deps, watcher=None):
202 if not watcher:
203 watcher = self.watcher
204 for i in deps:
205 watcher.bbwatchedfiles.append(i[0])
206 f = os.path.dirname(i[0])
207 if f in watcher.bbseen:
208 continue
209 watcher.bbseen.append(f)
210 watchtarget = None
211 while True:
212 # We try and add watches for files that don't exist but if they did, would influence
213 # the parser. The parent directory of these files may not exist, in which case we need
214 # to watch any parent that does exist for changes.
215 try:
216 watcher.add_watch(f, self.watchmask, quiet=False)
217 if watchtarget:
218 watcher.bbwatchedfiles.append(watchtarget)
219 break
220 except pyinotify.WatchManagerError as e:
221 if 'ENOENT' in str(e):
222 watchtarget = f
223 f = os.path.dirname(f)
224 if f in watcher.bbseen:
225 break
226 watcher.bbseen.append(f)
227 continue
228 if 'ENOSPC' in str(e):
229 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
230 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
231 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
232 providerlog.error("Root privilege is required to modify max_user_watches.")
233 raise
234
235 def sigterm_exception(self, signum, stackframe):
236 if signum == signal.SIGTERM:
237 bb.warn("Cooker recieved SIGTERM, shutting down...")
238 elif signum == signal.SIGHUP:
239 bb.warn("Cooker recieved SIGHUP, shutting down...")
240 self.state = state.forceshutdown
241
242 def setFeatures(self, features):
243 # we only accept a new feature set if we're in state initial, so we can reset without problems
244 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
245 raise Exception("Illegal state for feature set change")
246 original_featureset = list(self.featureset)
247 for feature in features:
248 self.featureset.setFeature(feature)
249 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
250 if (original_featureset != list(self.featureset)) and self.state != state.error:
251 self.reset()
252
253 def initConfigurationData(self):
254
255 self.state = state.initial
256 self.caches_array = []
257
258 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
259 self.enableDataTracking()
260
261 all_extra_cache_names = []
262 # We hardcode all known cache types in a single place, here.
263 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
264 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
265
266 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
267
268 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
269 # This is the entry point, no further check needed!
270 for var in caches_name_array:
271 try:
272 module_name, cache_name = var.split(':')
273 module = __import__(module_name, fromlist=(cache_name,))
274 self.caches_array.append(getattr(module, cache_name))
275 except ImportError as exc:
276 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
277 sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
278
279 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
280 self.databuilder.parseBaseConfiguration()
281 self.data = self.databuilder.data
282 self.data_hash = self.databuilder.data_hash
283
284
285 # we log all events to a file if so directed
286 if self.configuration.writeeventlog:
287 import json, pickle
288 DEFAULT_EVENTFILE = self.configuration.writeeventlog
289 class EventLogWriteHandler():
290
291 class EventWriter():
292 def __init__(self, cooker):
293 self.file_inited = None
294 self.cooker = cooker
295 self.event_queue = []
296
297 def init_file(self):
298 try:
299 # delete the old log
300 os.remove(DEFAULT_EVENTFILE)
301 except:
302 pass
303
304 # write current configuration data
305 with open(DEFAULT_EVENTFILE, "w") as f:
306 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
307
308 def write_event(self, event):
309 with open(DEFAULT_EVENTFILE, "a") as f:
310 try:
311 f.write("%s\n" % json.dumps({"class":event.__module__ + "." + event.__class__.__name__, "vars":json.dumps(pickle.dumps(event)) }))
312 except Exception as e:
313 import traceback
314 print(e, traceback.format_exc(e))
315
316
317 def send(self, event):
318 event_class = event.__module__ + "." + event.__class__.__name__
319
320 # init on bb.event.BuildStarted
321 if self.file_inited is None:
322 if event_class == "bb.event.BuildStarted":
323 self.init_file()
324 self.file_inited = True
325
326 # write pending events
327 for e in self.event_queue:
328 self.write_event(e)
329
330 # also write the current event
331 self.write_event(event)
332
333 else:
334 # queue all events until the file is inited
335 self.event_queue.append(event)
336
337 else:
338 # we have the file, just write the event
339 self.write_event(event)
340
341 # set our handler's event processor
342 event = EventWriter(self) # self is the cooker here
343
344
345 # set up cooker features for this mock UI handler
346
347 # we need to write the dependency tree in the log
348 self.featureset.setFeature(CookerFeatures.SEND_DEPENDS_TREE)
349 # register the log file writer as UI Handler
350 bb.event.register_UIHhandler(EventLogWriteHandler())
351
352
353 #
354 # Copy of the data store which has been expanded.
355 # Used for firing events and accessing variables where expansion needs to be accounted for
356 #
357 self.expanded_data = bb.data.createCopy(self.data)
358 bb.data.update_data(self.expanded_data)
359 bb.parse.init_parser(self.expanded_data)
360
361 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
362 self.disableDataTracking()
363
364 self.data.renameVar("__depends", "__base_depends")
365 self.add_filewatch(self.data.getVar("__base_depends", False), self.configwatcher)
366
367
368 def enableDataTracking(self):
369 self.configuration.tracking = True
370 if hasattr(self, "data"):
371 self.data.enableTracking()
372
373 def disableDataTracking(self):
374 self.configuration.tracking = False
375 if hasattr(self, "data"):
376 self.data.disableTracking()
377
378 def modifyConfigurationVar(self, var, val, default_file, op):
379 if op == "append":
380 self.appendConfigurationVar(var, val, default_file)
381 elif op == "set":
382 self.saveConfigurationVar(var, val, default_file, "=")
383 elif op == "earlyAssign":
384 self.saveConfigurationVar(var, val, default_file, "?=")
385
386
387 def appendConfigurationVar(self, var, val, default_file):
388 #add append var operation to the end of default_file
389 default_file = bb.cookerdata.findConfigFile(default_file, self.data)
390
391 total = "#added by hob"
392 total += "\n%s += \"%s\"\n" % (var, val)
393
394 with open(default_file, 'a') as f:
395 f.write(total)
396
397 #add to history
398 loginfo = {"op":"append", "file":default_file, "line":total.count("\n")}
399 self.data.appendVar(var, val, **loginfo)
400
401 def saveConfigurationVar(self, var, val, default_file, op):
402
403 replaced = False
404 #do not save if nothing changed
405 if str(val) == self.data.getVar(var, False):
406 return
407
408 conf_files = self.data.varhistory.get_variable_files(var)
409
410 #format the value when it is a list
411 if isinstance(val, list):
412 listval = ""
413 for value in val:
414 listval += "%s " % value
415 val = listval
416
417 topdir = self.data.getVar("TOPDIR", False)
418
419 #comment or replace operations made on var
420 for conf_file in conf_files:
421 if topdir in conf_file:
422 with open(conf_file, 'r') as f:
423 contents = f.readlines()
424
425 lines = self.data.varhistory.get_variable_lines(var, conf_file)
426 for line in lines:
427 total = ""
428 i = 0
429 for c in contents:
430 total += c
431 i = i + 1
432 if i==int(line):
433 end_index = len(total)
434 index = total.rfind(var, 0, end_index)
435
436 begin_line = total.count("\n",0,index)
437 end_line = int(line)
438
439 #check if the variable was saved before in the same way
440 #if true it replace the place where the variable was declared
441 #else it comments it
442 if contents[begin_line-1]== "#added by hob\n":
443 contents[begin_line] = "%s %s \"%s\"\n" % (var, op, val)
444 replaced = True
445 else:
446 for ii in range(begin_line, end_line):
447 contents[ii] = "#" + contents[ii]
448
449 with open(conf_file, 'w') as f:
450 f.writelines(contents)
451
452 if replaced == False:
453 #remove var from history
454 self.data.varhistory.del_var_history(var)
455
456 #add var to the end of default_file
457 default_file = bb.cookerdata.findConfigFile(default_file, self.data)
458
459 #add the variable on a single line, to be easy to replace the second time
460 total = "\n#added by hob"
461 total += "\n%s %s \"%s\"\n" % (var, op, val)
462
463 with open(default_file, 'a') as f:
464 f.write(total)
465
466 #add to history
467 loginfo = {"op":"set", "file":default_file, "line":total.count("\n")}
468 self.data.setVar(var, val, **loginfo)
469
470 def removeConfigurationVar(self, var):
471 conf_files = self.data.varhistory.get_variable_files(var)
472 topdir = self.data.getVar("TOPDIR", False)
473
474 for conf_file in conf_files:
475 if topdir in conf_file:
476 with open(conf_file, 'r') as f:
477 contents = f.readlines()
478
479 lines = self.data.varhistory.get_variable_lines(var, conf_file)
480 for line in lines:
481 total = ""
482 i = 0
483 for c in contents:
484 total += c
485 i = i + 1
486 if i==int(line):
487 end_index = len(total)
488 index = total.rfind(var, 0, end_index)
489
490 begin_line = total.count("\n",0,index)
491
492 #check if the variable was saved before in the same way
493 if contents[begin_line-1]== "#added by hob\n":
494 contents[begin_line-1] = contents[begin_line] = "\n"
495 else:
496 contents[begin_line] = "\n"
497 #remove var from history
498 self.data.varhistory.del_var_history(var, conf_file, line)
499 #remove variable
500 self.data.delVar(var)
501
502 with open(conf_file, 'w') as f:
503 f.writelines(contents)
504
505 def createConfigFile(self, name):
506 path = os.getcwd()
507 confpath = os.path.join(path, "conf", name)
508 open(confpath, 'w').close()
509
510 def parseConfiguration(self):
511 # Set log file verbosity
512 verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
513 if verboselogs:
514 bb.msg.loggerVerboseLogs = True
515
516 # Change nice level if we're asked to
517 nice = self.data.getVar("BB_NICE_LEVEL", True)
518 if nice:
519 curnice = os.nice(0)
520 nice = int(nice) - curnice
521 buildlog.verbose("Renice to %s " % os.nice(nice))
522
523 if self.recipecache:
524 del self.recipecache
525 self.recipecache = bb.cache.CacheData(self.caches_array)
526
527 self.handleCollections( self.data.getVar("BBFILE_COLLECTIONS", True) )
528
529 def updateConfigOpts(self, options, environment):
530 clean = True
531 for o in options:
532 if o in ['prefile', 'postfile']:
533 clean = False
534 setattr(self.configuration, o, options[o])
535 for k in bb.utils.approved_variables():
536 if k in environment and k not in self.configuration.env:
537 logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
538 self.configuration.env[k] = environment[k]
539 clean = False
540 if k in self.configuration.env and k not in environment:
541 logger.debug(1, "Updating environment variable %s (deleted)" % (k))
542 del self.configuration.env[k]
543 clean = False
544 if k not in self.configuration.env and k not in environment:
545 continue
546 if environment[k] != self.configuration.env[k]:
547 logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
548 self.configuration.env[k] = environment[k]
549 clean = False
550 if not clean:
551 logger.debug(1, "Base environment change, triggering reparse")
552 self.baseconfig_valid = False
553 self.reset()
554
555 def runCommands(self, server, data, abort):
556 """
557 Run any queued asynchronous command
558 This is done by the idle handler so it runs in true context rather than
559 tied to any UI.
560 """
561
562 return self.command.runAsyncCommand()
563
564 def showVersions(self):
565
566 pkg_pn = self.recipecache.pkg_pn
567 (latest_versions, preferred_versions) = bb.providers.findProviders(self.data, self.recipecache, pkg_pn)
568
569 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
570 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
571
572 for p in sorted(pkg_pn):
573 pref = preferred_versions[p]
574 latest = latest_versions[p]
575
576 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
577 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
578
579 if pref == latest:
580 prefstr = ""
581
582 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
583
584 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
585 """
586 Show the outer or per-recipe environment
587 """
588 fn = None
589 envdata = None
590 if not pkgs_to_build:
591 pkgs_to_build = []
592
593 if buildfile:
594 # Parse the configuration here. We need to do it explicitly here since
595 # this showEnvironment() code path doesn't use the cache
596 self.parseConfiguration()
597
598 fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
599 fn = self.matchFile(fn)
600 fn = bb.cache.Cache.realfn2virtual(fn, cls)
601 elif len(pkgs_to_build) == 1:
602 ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or ""
603 if pkgs_to_build[0] in set(ignore.split()):
604 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
605
606 taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
607
608 targetid = taskdata.getbuild_id(pkgs_to_build[0])
609 fnid = taskdata.build_targets[targetid][0]
610 fn = taskdata.fn_index[fnid]
611 else:
612 envdata = self.data
613
614 if fn:
615 try:
616 envdata = bb.cache.Cache.loadDataFull(fn, self.collection.get_file_appends(fn), self.data)
617 except Exception as e:
618 parselog.exception("Unable to read %s", fn)
619 raise
620
621 # Display history
622 with closing(StringIO()) as env:
623 self.data.inchistory.emit(env)
624 logger.plain(env.getvalue())
625
626 # emit variables and shell functions
627 data.update_data(envdata)
628 with closing(StringIO()) as env:
629 data.emit_env(env, envdata, True)
630 logger.plain(env.getvalue())
631
632 # emit the metadata which isnt valid shell
633 data.expandKeys(envdata)
634 for e in envdata.keys():
635 if data.getVarFlag( e, 'python', envdata ):
636 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, True))
637
638
639 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
640 """
641 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
642 """
643 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
644
645 # A task of None means use the default task
646 if task is None:
647 task = self.configuration.cmd
648
649 fulltargetlist = self.checkPackages(pkgs_to_build)
650
651 localdata = data.createCopy(self.data)
652 bb.data.update_data(localdata)
653 bb.data.expandKeys(localdata)
654 taskdata = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
655
656 current = 0
657 runlist = []
658 for k in fulltargetlist:
659 ktask = task
660 if ":do_" in k:
661 k2 = k.split(":do_")
662 k = k2[0]
663 ktask = k2[1]
664 taskdata.add_provider(localdata, self.recipecache, k)
665 current += 1
666 if not ktask.startswith("do_"):
667 ktask = "do_%s" % ktask
668 runlist.append([k, ktask])
669 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
670 taskdata.add_unresolved(localdata, self.recipecache)
671 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
672 return taskdata, runlist, fulltargetlist
673
674 def prepareTreeData(self, pkgs_to_build, task):
675 """
676 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
677 """
678
679 # We set abort to False here to prevent unbuildable targets raising
680 # an exception when we're just generating data
681 taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
682
683 return runlist, taskdata
684
685 ######## WARNING : this function requires cache_extra to be enabled ########
686
687 def generateTaskDepTreeData(self, pkgs_to_build, task):
688 """
689 Create a dependency graph of pkgs_to_build including reverse dependency
690 information.
691 """
692 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
693 rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
694 rq.rqdata.prepare()
695 return self.buildDependTree(rq, taskdata)
696
697
698 def buildDependTree(self, rq, taskdata):
699 seen_fnids = []
700 depend_tree = {}
701 depend_tree["depends"] = {}
702 depend_tree["tdepends"] = {}
703 depend_tree["pn"] = {}
704 depend_tree["rdepends-pn"] = {}
705 depend_tree["packages"] = {}
706 depend_tree["rdepends-pkg"] = {}
707 depend_tree["rrecs-pkg"] = {}
708 depend_tree["layer-priorities"] = self.recipecache.bbfile_config_priorities
709
710 for task in xrange(len(rq.rqdata.runq_fnid)):
711 taskname = rq.rqdata.runq_task[task]
712 fnid = rq.rqdata.runq_fnid[task]
713 fn = taskdata.fn_index[fnid]
714 pn = self.recipecache.pkg_fn[fn]
715 version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
716 if pn not in depend_tree["pn"]:
717 depend_tree["pn"][pn] = {}
718 depend_tree["pn"][pn]["filename"] = fn
719 depend_tree["pn"][pn]["version"] = version
720 depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None)
721
722 # if we have extra caches, list all attributes they bring in
723 extra_info = []
724 for cache_class in self.caches_array:
725 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
726 cachefields = getattr(cache_class, 'cachefields', [])
727 extra_info = extra_info + cachefields
728
729 # for all attributes stored, add them to the dependency tree
730 for ei in extra_info:
731 depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn]
732
733
734 for dep in rq.rqdata.runq_depends[task]:
735 depfn = taskdata.fn_index[rq.rqdata.runq_fnid[dep]]
736 deppn = self.recipecache.pkg_fn[depfn]
737 dotname = "%s.%s" % (pn, rq.rqdata.runq_task[task])
738 if not dotname in depend_tree["tdepends"]:
739 depend_tree["tdepends"][dotname] = []
740 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, rq.rqdata.runq_task[dep]))
741 if fnid not in seen_fnids:
742 seen_fnids.append(fnid)
743 packages = []
744
745 depend_tree["depends"][pn] = []
746 for dep in taskdata.depids[fnid]:
747 depend_tree["depends"][pn].append(taskdata.build_names_index[dep])
748
749 depend_tree["rdepends-pn"][pn] = []
750 for rdep in taskdata.rdepids[fnid]:
751 depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep])
752
753 rdepends = self.recipecache.rundeps[fn]
754 for package in rdepends:
755 depend_tree["rdepends-pkg"][package] = []
756 for rdepend in rdepends[package]:
757 depend_tree["rdepends-pkg"][package].append(rdepend)
758 packages.append(package)
759
760 rrecs = self.recipecache.runrecs[fn]
761 for package in rrecs:
762 depend_tree["rrecs-pkg"][package] = []
763 for rdepend in rrecs[package]:
764 depend_tree["rrecs-pkg"][package].append(rdepend)
765 if not package in packages:
766 packages.append(package)
767
768 for package in packages:
769 if package not in depend_tree["packages"]:
770 depend_tree["packages"][package] = {}
771 depend_tree["packages"][package]["pn"] = pn
772 depend_tree["packages"][package]["filename"] = fn
773 depend_tree["packages"][package]["version"] = version
774
775 return depend_tree
776
777 ######## WARNING : this function requires cache_extra to be enabled ########
778 def generatePkgDepTreeData(self, pkgs_to_build, task):
779 """
780 Create a dependency tree of pkgs_to_build, returning the data.
781 """
782 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
783 tasks_fnid = []
784 if len(taskdata.tasks_name) != 0:
785 for task in xrange(len(taskdata.tasks_name)):
786 tasks_fnid.append(taskdata.tasks_fnid[task])
787
788 seen_fnids = []
789 depend_tree = {}
790 depend_tree["depends"] = {}
791 depend_tree["pn"] = {}
792 depend_tree["rdepends-pn"] = {}
793 depend_tree["rdepends-pkg"] = {}
794 depend_tree["rrecs-pkg"] = {}
795
796 # if we have extra caches, list all attributes they bring in
797 extra_info = []
798 for cache_class in self.caches_array:
799 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
800 cachefields = getattr(cache_class, 'cachefields', [])
801 extra_info = extra_info + cachefields
802
803 for task in xrange(len(tasks_fnid)):
804 fnid = tasks_fnid[task]
805 fn = taskdata.fn_index[fnid]
806 pn = self.recipecache.pkg_fn[fn]
807
808 if pn not in depend_tree["pn"]:
809 depend_tree["pn"][pn] = {}
810 depend_tree["pn"][pn]["filename"] = fn
811 version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
812 depend_tree["pn"][pn]["version"] = version
813 rdepends = self.recipecache.rundeps[fn]
814 rrecs = self.recipecache.runrecs[fn]
815 depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None)
816
817 # for all extra attributes stored, add them to the dependency tree
818 for ei in extra_info:
819 depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn]
820
821 if fnid not in seen_fnids:
822 seen_fnids.append(fnid)
823
824 depend_tree["depends"][pn] = []
825 for dep in taskdata.depids[fnid]:
826 item = taskdata.build_names_index[dep]
827 pn_provider = ""
828 targetid = taskdata.getbuild_id(item)
829 if targetid in taskdata.build_targets and taskdata.build_targets[targetid]:
830 id = taskdata.build_targets[targetid][0]
831 fn_provider = taskdata.fn_index[id]
832 pn_provider = self.recipecache.pkg_fn[fn_provider]
833 else:
834 pn_provider = item
835 depend_tree["depends"][pn].append(pn_provider)
836
837 depend_tree["rdepends-pn"][pn] = []
838 for rdep in taskdata.rdepids[fnid]:
839 item = taskdata.run_names_index[rdep]
840 pn_rprovider = ""
841 targetid = taskdata.getrun_id(item)
842 if targetid in taskdata.run_targets and taskdata.run_targets[targetid]:
843 id = taskdata.run_targets[targetid][0]
844 fn_rprovider = taskdata.fn_index[id]
845 pn_rprovider = self.recipecache.pkg_fn[fn_rprovider]
846 else:
847 pn_rprovider = item
848 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
849
850 depend_tree["rdepends-pkg"].update(rdepends)
851 depend_tree["rrecs-pkg"].update(rrecs)
852
853 return depend_tree
854
855 def generateDepTreeEvent(self, pkgs_to_build, task):
856 """
857 Create a task dependency graph of pkgs_to_build.
858 Generate an event with the result
859 """
860 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
861 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
862
863 def generateDotGraphFiles(self, pkgs_to_build, task):
864 """
865 Create a task dependency graph of pkgs_to_build.
866 Save the result to a set of .dot files.
867 """
868
869 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
870
871 # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn
872 depends_file = file('pn-depends.dot', 'w' )
873 buildlist_file = file('pn-buildlist', 'w' )
874 print("digraph depends {", file=depends_file)
875 for pn in depgraph["pn"]:
876 fn = depgraph["pn"][pn]["filename"]
877 version = depgraph["pn"][pn]["version"]
878 print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
879 print("%s" % pn, file=buildlist_file)
880 buildlist_file.close()
881 logger.info("PN build list saved to 'pn-buildlist'")
882 for pn in depgraph["depends"]:
883 for depend in depgraph["depends"][pn]:
884 print('"%s" -> "%s"' % (pn, depend), file=depends_file)
885 for pn in depgraph["rdepends-pn"]:
886 for rdepend in depgraph["rdepends-pn"][pn]:
887 print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file)
888 print("}", file=depends_file)
889 logger.info("PN dependencies saved to 'pn-depends.dot'")
890
891 depends_file = file('package-depends.dot', 'w' )
892 print("digraph depends {", file=depends_file)
893 for package in depgraph["packages"]:
894 pn = depgraph["packages"][package]["pn"]
895 fn = depgraph["packages"][package]["filename"]
896 version = depgraph["packages"][package]["version"]
897 if package == pn:
898 print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
899 else:
900 print('"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn), file=depends_file)
901 for depend in depgraph["depends"][pn]:
902 print('"%s" -> "%s"' % (package, depend), file=depends_file)
903 for package in depgraph["rdepends-pkg"]:
904 for rdepend in depgraph["rdepends-pkg"][package]:
905 print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
906 for package in depgraph["rrecs-pkg"]:
907 for rdepend in depgraph["rrecs-pkg"][package]:
908 print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
909 print("}", file=depends_file)
910 logger.info("Package dependencies saved to 'package-depends.dot'")
911
912 tdepends_file = file('task-depends.dot', 'w' )
913 print("digraph depends {", file=tdepends_file)
914 for task in depgraph["tdepends"]:
915 (pn, taskname) = task.rsplit(".", 1)
916 fn = depgraph["pn"][pn]["filename"]
917 version = depgraph["pn"][pn]["version"]
918 print('"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn), file=tdepends_file)
919 for dep in depgraph["tdepends"][task]:
920 print('"%s" -> "%s"' % (task, dep), file=tdepends_file)
921 print("}", file=tdepends_file)
922 logger.info("Task dependencies saved to 'task-depends.dot'")
923
924 def show_appends_with_no_recipes(self):
925 # Determine which bbappends haven't been applied
926
927 # First get list of recipes, including skipped
928 recipefns = self.recipecache.pkg_fn.keys()
929 recipefns.extend(self.skiplist.keys())
930
931 # Work out list of bbappends that have been applied
932 applied_appends = []
933 for fn in recipefns:
934 applied_appends.extend(self.collection.get_file_appends(fn))
935
936 appends_without_recipes = []
937 for _, appendfn in self.collection.bbappends:
938 if not appendfn in applied_appends:
939 appends_without_recipes.append(appendfn)
940
941 if appends_without_recipes:
942 msg = 'No recipes available for:\n %s' % '\n '.join(appends_without_recipes)
943 warn_only = self.data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
944 False) or "no"
945 if warn_only.lower() in ("1", "yes", "true"):
946 bb.warn(msg)
947 else:
948 bb.fatal(msg)
949
950 def handlePrefProviders(self):
951
952 localdata = data.createCopy(self.data)
953 bb.data.update_data(localdata)
954 bb.data.expandKeys(localdata)
955
956 # Handle PREFERRED_PROVIDERS
957 for p in (localdata.getVar('PREFERRED_PROVIDERS', True) or "").split():
958 try:
959 (providee, provider) = p.split(':')
960 except:
961 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
962 continue
963 if providee in self.recipecache.preferred and self.recipecache.preferred[providee] != provider:
964 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecache.preferred[providee])
965 self.recipecache.preferred[providee] = provider
966
967 def findCoreBaseFiles(self, subdir, configfile):
968 corebase = self.data.getVar('COREBASE', True) or ""
969 paths = []
970 for root, dirs, files in os.walk(corebase + '/' + subdir):
971 for d in dirs:
972 configfilepath = os.path.join(root, d, configfile)
973 if os.path.exists(configfilepath):
974 paths.append(os.path.join(root, d))
975
976 if paths:
977 bb.event.fire(bb.event.CoreBaseFilesFound(paths), self.data)
978
979 def findConfigFilePath(self, configfile):
980 """
981 Find the location on disk of configfile and if it exists and was parsed by BitBake
982 emit the ConfigFilePathFound event with the path to the file.
983 """
984 path = bb.cookerdata.findConfigFile(configfile, self.data)
985 if not path:
986 return
987
988 # Generate a list of parsed configuration files by searching the files
989 # listed in the __depends and __base_depends variables with a .conf suffix.
990 conffiles = []
991 dep_files = self.data.getVar('__base_depends', False) or []
992 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
993
994 for f in dep_files:
995 if f[0].endswith(".conf"):
996 conffiles.append(f[0])
997
998 _, conf, conffile = path.rpartition("conf/")
999 match = os.path.join(conf, conffile)
1000 # Try and find matches for conf/conffilename.conf as we don't always
1001 # have the full path to the file.
1002 for cfg in conffiles:
1003 if cfg.endswith(match):
1004 bb.event.fire(bb.event.ConfigFilePathFound(path),
1005 self.data)
1006 break
1007
1008 def findFilesMatchingInDir(self, filepattern, directory):
1009 """
1010 Searches for files matching the regex 'pattern' which are children of
1011 'directory' in each BBPATH. i.e. to find all rootfs package classes available
1012 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1013 or to find all machine configuration files one could call:
1014 findFilesMatchingInDir(self, 'conf/machines', 'conf')
1015 """
1016
1017 matches = []
1018 p = re.compile(re.escape(filepattern))
1019 bbpaths = self.data.getVar('BBPATH', True).split(':')
1020 for path in bbpaths:
1021 dirpath = os.path.join(path, directory)
1022 if os.path.exists(dirpath):
1023 for root, dirs, files in os.walk(dirpath):
1024 for f in files:
1025 if p.search(f):
1026 matches.append(f)
1027
1028 if matches:
1029 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1030
1031 def findConfigFiles(self, varname):
1032 """
1033 Find config files which are appropriate values for varname.
1034 i.e. MACHINE, DISTRO
1035 """
1036 possible = []
1037 var = varname.lower()
1038
1039 data = self.data
1040 # iterate configs
1041 bbpaths = data.getVar('BBPATH', True).split(':')
1042 for path in bbpaths:
1043 confpath = os.path.join(path, "conf", var)
1044 if os.path.exists(confpath):
1045 for root, dirs, files in os.walk(confpath):
1046 # get all child files, these are appropriate values
1047 for f in files:
1048 val, sep, end = f.rpartition('.')
1049 if end == 'conf':
1050 possible.append(val)
1051
1052 if possible:
1053 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1054
1055 def findInheritsClass(self, klass):
1056 """
1057 Find all recipes which inherit the specified class
1058 """
1059 pkg_list = []
1060
1061 for pfn in self.recipecache.pkg_fn:
1062 inherits = self.recipecache.inherits.get(pfn, None)
1063 if inherits and inherits.count(klass) > 0:
1064 pkg_list.append(self.recipecache.pkg_fn[pfn])
1065
1066 return pkg_list
1067
1068 def generateTargetsTree(self, klass=None, pkgs=None):
1069 """
1070 Generate a dependency tree of buildable targets
1071 Generate an event with the result
1072 """
1073 # if the caller hasn't specified a pkgs list default to universe
1074 if not pkgs:
1075 pkgs = ['universe']
1076 # if inherited_class passed ensure all recipes which inherit the
1077 # specified class are included in pkgs
1078 if klass:
1079 extra_pkgs = self.findInheritsClass(klass)
1080 pkgs = pkgs + extra_pkgs
1081
1082 # generate a dependency tree for all our packages
1083 tree = self.generatePkgDepTreeData(pkgs, 'build')
1084 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1085
1086 def buildWorldTargetList(self):
1087 """
1088 Build package list for "bitbake world"
1089 """
1090 parselog.debug(1, "collating packages for \"world\"")
1091 for f in self.recipecache.possible_world:
1092 terminal = True
1093 pn = self.recipecache.pkg_fn[f]
1094
1095 for p in self.recipecache.pn_provides[pn]:
1096 if p.startswith('virtual/'):
1097 parselog.debug(2, "World build skipping %s due to %s provider starting with virtual/", f, p)
1098 terminal = False
1099 break
1100 for pf in self.recipecache.providers[p]:
1101 if self.recipecache.pkg_fn[pf] != pn:
1102 parselog.debug(2, "World build skipping %s due to both us and %s providing %s", f, pf, p)
1103 terminal = False
1104 break
1105 if terminal:
1106 self.recipecache.world_target.add(pn)
1107
1108 def interactiveMode( self ):
1109 """Drop off into a shell"""
1110 try:
1111 from bb import shell
1112 except ImportError:
1113 parselog.exception("Interactive mode not available")
1114 sys.exit(1)
1115 else:
1116 shell.start( self )
1117
1118
1119 def handleCollections( self, collections ):
1120 """Handle collections"""
1121 errors = False
1122 self.recipecache.bbfile_config_priorities = []
1123 if collections:
1124 collection_priorities = {}
1125 collection_depends = {}
1126 collection_list = collections.split()
1127 min_prio = 0
1128 for c in collection_list:
1129 # Get collection priority if defined explicitly
1130 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c, True)
1131 if priority:
1132 try:
1133 prio = int(priority)
1134 except ValueError:
1135 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1136 errors = True
1137 if min_prio == 0 or prio < min_prio:
1138 min_prio = prio
1139 collection_priorities[c] = prio
1140 else:
1141 collection_priorities[c] = None
1142
1143 # Check dependencies and store information for priority calculation
1144 deps = self.data.getVar("LAYERDEPENDS_%s" % c, True)
1145 if deps:
1146 try:
1147 deplist = bb.utils.explode_dep_versions2(deps)
1148 except bb.utils.VersionStringException as vse:
1149 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1150 for dep, oplist in deplist.iteritems():
1151 if dep in collection_list:
1152 for opstr in oplist:
1153 layerver = self.data.getVar("LAYERVERSION_%s" % dep, True)
1154 (op, depver) = opstr.split()
1155 if layerver:
1156 try:
1157 res = bb.utils.vercmp_string_op(layerver, depver, op)
1158 except bb.utils.VersionStringException as vse:
1159 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1160 if not res:
1161 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1162 errors = True
1163 else:
1164 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1165 errors = True
1166 else:
1167 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1168 errors = True
1169 collection_depends[c] = deplist.keys()
1170 else:
1171 collection_depends[c] = []
1172
1173 # Recursively work out collection priorities based on dependencies
1174 def calc_layer_priority(collection):
1175 if not collection_priorities[collection]:
1176 max_depprio = min_prio
1177 for dep in collection_depends[collection]:
1178 calc_layer_priority(dep)
1179 depprio = collection_priorities[dep]
1180 if depprio > max_depprio:
1181 max_depprio = depprio
1182 max_depprio += 1
1183 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1184 collection_priorities[collection] = max_depprio
1185
1186 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1187 for c in collection_list:
1188 calc_layer_priority(c)
1189 regex = self.data.getVar("BBFILE_PATTERN_%s" % c, True)
1190 if regex == None:
1191 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1192 errors = True
1193 continue
1194 try:
1195 cre = re.compile(regex)
1196 except re.error:
1197 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1198 errors = True
1199 continue
1200 self.recipecache.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
1201 if errors:
1202 # We've already printed the actual error(s)
1203 raise CollectionError("Errors during parsing layer configuration")
1204
1205 def buildSetVars(self):
1206 """
1207 Setup any variables needed before starting a build
1208 """
1209 t = time.gmtime()
1210 if not self.data.getVar("BUILDNAME", False):
1211 self.data.setVar("BUILDNAME", "${DATE}${TIME}")
1212 self.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1213 self.data.setVar("DATE", time.strftime('%Y%m%d', t))
1214 self.data.setVar("TIME", time.strftime('%H%M%S', t))
1215
1216 def matchFiles(self, bf):
1217 """
1218 Find the .bb files which match the expression in 'buildfile'.
1219 """
1220 if bf.startswith("/") or bf.startswith("../"):
1221 bf = os.path.abspath(bf)
1222
1223 self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
1224 filelist, masked = self.collection.collect_bbfiles(self.data, self.expanded_data)
1225 try:
1226 os.stat(bf)
1227 bf = os.path.abspath(bf)
1228 return [bf]
1229 except OSError:
1230 regexp = re.compile(bf)
1231 matches = []
1232 for f in filelist:
1233 if regexp.search(f) and os.path.isfile(f):
1234 matches.append(f)
1235 return matches
1236
1237 def matchFile(self, buildfile):
1238 """
1239 Find the .bb file which matches the expression in 'buildfile'.
1240 Raise an error if multiple files
1241 """
1242 matches = self.matchFiles(buildfile)
1243 if len(matches) != 1:
1244 if matches:
1245 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1246 if matches:
1247 for f in matches:
1248 msg += "\n %s" % f
1249 parselog.error(msg)
1250 else:
1251 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1252 raise NoSpecificMatch
1253 return matches[0]
1254
1255 def buildFile(self, buildfile, task):
1256 """
1257 Build the file matching regexp buildfile
1258 """
1259
1260 # Too many people use -b because they think it's how you normally
1261 # specify a target to be built, so show a warning
1262 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1263
1264 # Parse the configuration here. We need to do it explicitly here since
1265 # buildFile() doesn't use the cache
1266 self.parseConfiguration()
1267
1268 # If we are told to do the None task then query the default task
1269 if (task == None):
1270 task = self.configuration.cmd
1271
1272 fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
1273 fn = self.matchFile(fn)
1274
1275 self.buildSetVars()
1276
1277 infos = bb.cache.Cache.parse(fn, self.collection.get_file_appends(fn), \
1278 self.data,
1279 self.caches_array)
1280 infos = dict(infos)
1281
1282 fn = bb.cache.Cache.realfn2virtual(fn, cls)
1283 try:
1284 info_array = infos[fn]
1285 except KeyError:
1286 bb.fatal("%s does not exist" % fn)
1287
1288 if info_array[0].skipped:
1289 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1290
1291 self.recipecache.add_from_recipeinfo(fn, info_array)
1292
1293 # Tweak some variables
1294 item = info_array[0].pn
1295 self.recipecache.ignored_dependencies = set()
1296 self.recipecache.bbfile_priority[fn] = 1
1297
1298 # Remove external dependencies
1299 self.recipecache.task_deps[fn]['depends'] = {}
1300 self.recipecache.deps[fn] = []
1301 self.recipecache.rundeps[fn] = []
1302 self.recipecache.runrecs[fn] = []
1303
1304 # Invalidate task for target if force mode active
1305 if self.configuration.force:
1306 logger.verbose("Invalidate task %s, %s", task, fn)
1307 if not task.startswith("do_"):
1308 task = "do_%s" % task
1309 bb.parse.siggen.invalidate_task(task, self.recipecache, fn)
1310
1311 # Setup taskdata structure
1312 taskdata = bb.taskdata.TaskData(self.configuration.abort)
1313 taskdata.add_provider(self.data, self.recipecache, item)
1314
1315 buildname = self.data.getVar("BUILDNAME", True)
1316 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.expanded_data)
1317
1318 # Execute the runqueue
1319 if not task.startswith("do_"):
1320 task = "do_%s" % task
1321 runlist = [[item, task]]
1322
1323 rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
1324
1325 def buildFileIdle(server, rq, abort):
1326
1327 msg = None
1328 interrupted = 0
1329 if abort or self.state == state.forceshutdown:
1330 rq.finish_runqueue(True)
1331 msg = "Forced shutdown"
1332 interrupted = 2
1333 elif self.state == state.shutdown:
1334 rq.finish_runqueue(False)
1335 msg = "Stopped build"
1336 interrupted = 1
1337 failures = 0
1338 try:
1339 retval = rq.execute_runqueue()
1340 except runqueue.TaskFailure as exc:
1341 failures += len(exc.args)
1342 retval = False
1343 except SystemExit as exc:
1344 self.command.finishAsyncCommand()
1345 return False
1346
1347 if not retval:
1348 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures, interrupted), self.expanded_data)
1349 self.command.finishAsyncCommand(msg)
1350 return False
1351 if retval is True:
1352 return True
1353 return retval
1354
1355 self.configuration.server_register_idlecallback(buildFileIdle, rq)
1356
1357 def buildTargets(self, targets, task):
1358 """
1359 Attempt to build the targets specified
1360 """
1361
1362 def buildTargetsIdle(server, rq, abort):
1363 msg = None
1364 interrupted = 0
1365 if abort or self.state == state.forceshutdown:
1366 rq.finish_runqueue(True)
1367 msg = "Forced shutdown"
1368 interrupted = 2
1369 elif self.state == state.shutdown:
1370 rq.finish_runqueue(False)
1371 msg = "Stopped build"
1372 interrupted = 1
1373 failures = 0
1374 try:
1375 retval = rq.execute_runqueue()
1376 except runqueue.TaskFailure as exc:
1377 failures += len(exc.args)
1378 retval = False
1379 except SystemExit as exc:
1380 self.command.finishAsyncCommand()
1381 return False
1382
1383 if not retval:
1384 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, targets, failures, interrupted), self.data)
1385 self.command.finishAsyncCommand(msg)
1386 return False
1387 if retval is True:
1388 return True
1389 return retval
1390
1391 build.reset_cache()
1392 self.buildSetVars()
1393
1394 taskdata, runlist, fulltargetlist = self.buildTaskData(targets, task, self.configuration.abort)
1395
1396 buildname = self.data.getVar("BUILDNAME", False)
1397 bb.event.fire(bb.event.BuildStarted(buildname, fulltargetlist), self.data)
1398
1399 rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
1400 if 'universe' in targets:
1401 rq.rqdata.warn_multi_bb = True
1402
1403 self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
1404
1405
1406 def getAllKeysWithFlags(self, flaglist):
1407 dump = {}
1408 for k in self.data.keys():
1409 try:
1410 v = self.data.getVar(k, True)
1411 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1412 dump[k] = {
1413 'v' : v ,
1414 'history' : self.data.varhistory.variable(k),
1415 }
1416 for d in flaglist:
1417 dump[k][d] = self.data.getVarFlag(k, d)
1418 except Exception as e:
1419 print(e)
1420 return dump
1421
1422
1423 def generateNewImage(self, image, base_image, package_queue, timestamp, description):
1424 '''
1425 Create a new image with a "require"/"inherit" base_image statement
1426 '''
1427 if timestamp:
1428 image_name = os.path.splitext(image)[0]
1429 timestr = time.strftime("-%Y%m%d-%H%M%S")
1430 dest = image_name + str(timestr) + ".bb"
1431 else:
1432 if not image.endswith(".bb"):
1433 dest = image + ".bb"
1434 else:
1435 dest = image
1436
1437 basename = False
1438 if base_image:
1439 with open(base_image, 'r') as f:
1440 require_line = f.readline()
1441 p = re.compile("IMAGE_BASENAME *=")
1442 for line in f:
1443 if p.search(line):
1444 basename = True
1445
1446 with open(dest, "w") as imagefile:
1447 if base_image is None:
1448 imagefile.write("inherit core-image\n")
1449 else:
1450 topdir = self.data.getVar("TOPDIR", False)
1451 if topdir in base_image:
1452 base_image = require_line.split()[1]
1453 imagefile.write("require " + base_image + "\n")
1454 image_install = "IMAGE_INSTALL = \""
1455 for package in package_queue:
1456 image_install += str(package) + " "
1457 image_install += "\"\n"
1458 imagefile.write(image_install)
1459
1460 description_var = "DESCRIPTION = \"" + description + "\"\n"
1461 imagefile.write(description_var)
1462
1463 if basename:
1464 # If this is overwritten in a inherited image, reset it to default
1465 image_basename = "IMAGE_BASENAME = \"${PN}\"\n"
1466 imagefile.write(image_basename)
1467
1468 self.state = state.initial
1469 if timestamp:
1470 return timestr
1471
1472 def updateCacheSync(self):
1473 if self.state == state.running:
1474 return
1475
1476 # reload files for which we got notifications
1477 for p in self.inotify_modified_files:
1478 bb.parse.update_cache(p)
1479 self.inotify_modified_files = []
1480
1481 if not self.baseconfig_valid:
1482 logger.debug(1, "Reloading base configuration data")
1483 self.initConfigurationData()
1484 self.baseconfig_valid = True
1485 self.parsecache_valid = False
1486
1487 # This is called for all async commands when self.state != running
1488 def updateCache(self):
1489 if self.state == state.running:
1490 return
1491
1492 if self.state in (state.shutdown, state.forceshutdown, state.error):
1493 if hasattr(self.parser, 'shutdown'):
1494 self.parser.shutdown(clean=False, force = True)
1495 raise bb.BBHandledException()
1496
1497 if self.state != state.parsing:
1498 self.updateCacheSync()
1499
1500 if self.state != state.parsing and not self.parsecache_valid:
1501 self.parseConfiguration ()
1502 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
1503 bb.event.fire(bb.event.SanityCheck(False), self.data)
1504
1505 ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or ""
1506 self.recipecache.ignored_dependencies = set(ignore.split())
1507
1508 for dep in self.configuration.extra_assume_provided:
1509 self.recipecache.ignored_dependencies.add(dep)
1510
1511 self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
1512 (filelist, masked) = self.collection.collect_bbfiles(self.data, self.expanded_data)
1513
1514 self.parser = CookerParser(self, filelist, masked)
1515 self.parsecache_valid = True
1516
1517 self.state = state.parsing
1518
1519 if not self.parser.parse_next():
1520 collectlog.debug(1, "parsing complete")
1521 if self.parser.error:
1522 raise bb.BBHandledException()
1523 self.show_appends_with_no_recipes()
1524 self.handlePrefProviders()
1525 self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn, self.data)
1526 self.state = state.running
1527
1528 # Send an event listing all stamps reachable after parsing
1529 # which the metadata may use to clean up stale data
1530 event = bb.event.ReachableStamps(self.recipecache.stamp)
1531 bb.event.fire(event, self.expanded_data)
1532 return None
1533
1534 return True
1535
1536 def checkPackages(self, pkgs_to_build):
1537
1538 # Return a copy, don't modify the original
1539 pkgs_to_build = pkgs_to_build[:]
1540
1541 if len(pkgs_to_build) == 0:
1542 raise NothingToBuild
1543
1544 ignore = (self.expanded_data.getVar("ASSUME_PROVIDED", True) or "").split()
1545 for pkg in pkgs_to_build:
1546 if pkg in ignore:
1547 parselog.warn("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
1548
1549 if 'world' in pkgs_to_build:
1550 self.buildWorldTargetList()
1551 pkgs_to_build.remove('world')
1552 for t in self.recipecache.world_target:
1553 pkgs_to_build.append(t)
1554
1555 if 'universe' in pkgs_to_build:
1556 parselog.warn("The \"universe\" target is only intended for testing and may produce errors.")
1557 parselog.debug(1, "collating packages for \"universe\"")
1558 pkgs_to_build.remove('universe')
1559 for t in self.recipecache.universe_target:
1560 pkgs_to_build.append(t)
1561
1562 return pkgs_to_build
1563
1564
1565
1566
1567 def pre_serve(self):
1568 # Empty the environment. The environment will be populated as
1569 # necessary from the data store.
1570 #bb.utils.empty_environment()
1571 try:
1572 self.prhost = prserv.serv.auto_start(self.data)
1573 except prserv.serv.PRServiceConfigError:
1574 bb.event.fire(CookerExit(), self.expanded_data)
1575 self.state = state.error
1576 return
1577
1578 def post_serve(self):
1579 prserv.serv.auto_shutdown(self.data)
1580 bb.event.fire(CookerExit(), self.expanded_data)
1581 lockfile = self.lock.name
1582 self.lock.close()
1583 self.lock = None
1584
1585 while not self.lock:
1586 with bb.utils.timeout(3):
1587 self.lock = bb.utils.lockfile(lockfile, shared=False, retry=False, block=True)
1588 if not self.lock:
1589 # Some systems may not have lsof available
1590 procs = None
1591 try:
1592 procs = subprocess.check_output(["lsof", '-w', lockfile], stderr=subprocess.STDOUT)
1593 except OSError as e:
1594 if e.errno != errno.ENOENT:
1595 raise
1596 if procs is None:
1597 # Fall back to fuser if lsof is unavailable
1598 try:
1599 procs = subprocess.check_output(["fuser", '-v', lockfile], stderr=subprocess.STDOUT)
1600 except OSError as e:
1601 if e.errno != errno.ENOENT:
1602 raise
1603
1604 msg = "Delaying shutdown due to active processes which appear to be holding bitbake.lock"
1605 if procs:
1606 msg += ":\n%s" % str(procs)
1607 print(msg)
1608
1609
1610 def shutdown(self, force = False):
1611 if force:
1612 self.state = state.forceshutdown
1613 else:
1614 self.state = state.shutdown
1615
1616 def finishcommand(self):
1617 self.state = state.initial
1618
1619 def reset(self):
1620 self.initConfigurationData()
1621
1622 def lockBitbake(self):
1623 if not hasattr(self, 'lock'):
1624 self.lock = None
1625 if self.data:
1626 lockfile = self.data.expand("${TOPDIR}/bitbake.lock")
1627 if lockfile:
1628 self.lock = bb.utils.lockfile(lockfile, False, False)
1629 return self.lock
1630
1631 def unlockBitbake(self):
1632 if hasattr(self, 'lock') and self.lock:
1633 bb.utils.unlockfile(self.lock)
1634
1635def server_main(cooker, func, *args):
1636 cooker.pre_serve()
1637
1638 if cooker.configuration.profile:
1639 try:
1640 import cProfile as profile
1641 except:
1642 import profile
1643 prof = profile.Profile()
1644
1645 ret = profile.Profile.runcall(prof, func, *args)
1646
1647 prof.dump_stats("profile.log")
1648 bb.utils.process_profilelog("profile.log")
1649 print("Raw profiling information saved to profile.log and processed statistics to profile.log.processed")
1650
1651 else:
1652 ret = func(*args)
1653
1654 cooker.post_serve()
1655
1656 return ret
1657
1658class CookerExit(bb.event.Event):
1659 """
1660 Notify clients of the Cooker shutdown
1661 """
1662
1663 def __init__(self):
1664 bb.event.Event.__init__(self)
1665
1666
1667class CookerCollectFiles(object):
1668 def __init__(self, priorities):
1669 self.bbappends = []
1670 self.bbfile_config_priorities = priorities
1671
1672 def calc_bbfile_priority( self, filename, matched = None ):
1673 for _, _, regex, pri in self.bbfile_config_priorities:
1674 if regex.match(filename):
1675 if matched != None:
1676 if not regex in matched:
1677 matched.add(regex)
1678 return pri
1679 return 0
1680
1681 def get_bbfiles(self):
1682 """Get list of default .bb files by reading out the current directory"""
1683 path = os.getcwd()
1684 contents = os.listdir(path)
1685 bbfiles = []
1686 for f in contents:
1687 if f.endswith(".bb"):
1688 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1689 return bbfiles
1690
1691 def find_bbfiles(self, path):
1692 """Find all the .bb and .bbappend files in a directory"""
1693 found = []
1694 for dir, dirs, files in os.walk(path):
1695 for ignored in ('SCCS', 'CVS', '.svn'):
1696 if ignored in dirs:
1697 dirs.remove(ignored)
1698 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1699
1700 return found
1701
1702 def collect_bbfiles(self, config, eventdata):
1703 """Collect all available .bb build files"""
1704 masked = 0
1705
1706 collectlog.debug(1, "collecting .bb files")
1707
1708 files = (config.getVar( "BBFILES", True) or "").split()
1709 config.setVar("BBFILES", " ".join(files))
1710
1711 # Sort files by priority
1712 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
1713
1714 if not len(files):
1715 files = self.get_bbfiles()
1716
1717 if not len(files):
1718 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1719 bb.event.fire(CookerExit(), eventdata)
1720
1721 # Can't use set here as order is important
1722 newfiles = []
1723 for f in files:
1724 if os.path.isdir(f):
1725 dirfiles = self.find_bbfiles(f)
1726 for g in dirfiles:
1727 if g not in newfiles:
1728 newfiles.append(g)
1729 else:
1730 globbed = glob.glob(f)
1731 if not globbed and os.path.exists(f):
1732 globbed = [f]
1733 for g in globbed:
1734 if g not in newfiles:
1735 newfiles.append(g)
1736
1737 bbmask = config.getVar('BBMASK', True)
1738
1739 if bbmask:
1740 try:
1741 bbmask_compiled = re.compile(bbmask)
1742 except sre_constants.error:
1743 collectlog.critical("BBMASK is not a valid regular expression, ignoring.")
1744 return list(newfiles), 0
1745
1746 bbfiles = []
1747 bbappend = []
1748 for f in newfiles:
1749 if bbmask and bbmask_compiled.search(f):
1750 collectlog.debug(1, "skipping masked file %s", f)
1751 masked += 1
1752 continue
1753 if f.endswith('.bb'):
1754 bbfiles.append(f)
1755 elif f.endswith('.bbappend'):
1756 bbappend.append(f)
1757 else:
1758 collectlog.debug(1, "skipping %s: unknown file extension", f)
1759
1760 # Build a list of .bbappend files for each .bb file
1761 for f in bbappend:
1762 base = os.path.basename(f).replace('.bbappend', '.bb')
1763 self.bbappends.append((base, f))
1764
1765 # Find overlayed recipes
1766 # bbfiles will be in priority order which makes this easy
1767 bbfile_seen = dict()
1768 self.overlayed = defaultdict(list)
1769 for f in reversed(bbfiles):
1770 base = os.path.basename(f)
1771 if base not in bbfile_seen:
1772 bbfile_seen[base] = f
1773 else:
1774 topfile = bbfile_seen[base]
1775 self.overlayed[topfile].append(f)
1776
1777 return (bbfiles, masked)
1778
1779 def get_file_appends(self, fn):
1780 """
1781 Returns a list of .bbappend files to apply to fn
1782 """
1783 filelist = []
1784 f = os.path.basename(fn)
1785 for b in self.bbappends:
1786 (bbappend, filename) = b
1787 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1788 filelist.append(filename)
1789 return filelist
1790
1791 def collection_priorities(self, pkgfns, d):
1792
1793 priorities = {}
1794
1795 # Calculate priorities for each file
1796 matched = set()
1797 for p in pkgfns:
1798 realfn, cls = bb.cache.Cache.virtualfn2realfn(p)
1799 priorities[p] = self.calc_bbfile_priority(realfn, matched)
1800
1801 # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
1802 unmatched = set()
1803 for _, _, regex, pri in self.bbfile_config_priorities:
1804 if not regex in matched:
1805 unmatched.add(regex)
1806
1807 def findmatch(regex):
1808 for b in self.bbappends:
1809 (bbfile, append) = b
1810 if regex.match(append):
1811 return True
1812 return False
1813
1814 for unmatch in unmatched.copy():
1815 if findmatch(unmatch):
1816 unmatched.remove(unmatch)
1817
1818 for collection, pattern, regex, _ in self.bbfile_config_priorities:
1819 if regex in unmatched:
1820 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection, True) != '1':
1821 collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
1822
1823 return priorities
1824
1825class ParsingFailure(Exception):
1826 def __init__(self, realexception, recipe):
1827 self.realexception = realexception
1828 self.recipe = recipe
1829 Exception.__init__(self, realexception, recipe)
1830
1831class Feeder(multiprocessing.Process):
1832 def __init__(self, jobs, to_parsers, quit):
1833 self.quit = quit
1834 self.jobs = jobs
1835 self.to_parsers = to_parsers
1836 multiprocessing.Process.__init__(self)
1837
1838 def run(self):
1839 while True:
1840 try:
1841 quit = self.quit.get_nowait()
1842 except Queue.Empty:
1843 pass
1844 else:
1845 if quit == 'cancel':
1846 self.to_parsers.cancel_join_thread()
1847 break
1848
1849 try:
1850 job = self.jobs.pop()
1851 except IndexError:
1852 break
1853
1854 try:
1855 self.to_parsers.put(job, timeout=0.5)
1856 except Queue.Full:
1857 self.jobs.insert(0, job)
1858 continue
1859
1860class Parser(multiprocessing.Process):
1861 def __init__(self, jobs, results, quit, init, profile):
1862 self.jobs = jobs
1863 self.results = results
1864 self.quit = quit
1865 self.init = init
1866 multiprocessing.Process.__init__(self)
1867 self.context = bb.utils.get_context().copy()
1868 self.handlers = bb.event.get_class_handlers().copy()
1869 self.profile = profile
1870
1871 def run(self):
1872
1873 if not self.profile:
1874 self.realrun()
1875 return
1876
1877 try:
1878 import cProfile as profile
1879 except:
1880 import profile
1881 prof = profile.Profile()
1882 try:
1883 profile.Profile.runcall(prof, self.realrun)
1884 finally:
1885 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1886 prof.dump_stats(logfile)
1887
1888 def realrun(self):
1889 if self.init:
1890 self.init()
1891
1892 pending = []
1893 while True:
1894 try:
1895 self.quit.get_nowait()
1896 except Queue.Empty:
1897 pass
1898 else:
1899 self.results.cancel_join_thread()
1900 break
1901
1902 if pending:
1903 result = pending.pop()
1904 else:
1905 try:
1906 job = self.jobs.get(timeout=0.25)
1907 except Queue.Empty:
1908 continue
1909
1910 if job is None:
1911 break
1912 result = self.parse(*job)
1913
1914 try:
1915 self.results.put(result, timeout=0.25)
1916 except Queue.Full:
1917 pending.append(result)
1918
1919 def parse(self, filename, appends, caches_array):
1920 try:
1921 # Reset our environment and handlers to the original settings
1922 bb.utils.set_context(self.context.copy())
1923 bb.event.set_class_handlers(self.handlers.copy())
1924 return True, bb.cache.Cache.parse(filename, appends, self.cfg, caches_array)
1925 except Exception as exc:
1926 tb = sys.exc_info()[2]
1927 exc.recipe = filename
1928 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
1929 return True, exc
1930 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
1931 # and for example a worker thread doesn't just exit on its own in response to
1932 # a SystemExit event for example.
1933 except BaseException as exc:
1934 return True, ParsingFailure(exc, filename)
1935
1936class CookerParser(object):
1937 def __init__(self, cooker, filelist, masked):
1938 self.filelist = filelist
1939 self.cooker = cooker
1940 self.cfgdata = cooker.data
1941 self.cfghash = cooker.data_hash
1942
1943 # Accounting statistics
1944 self.parsed = 0
1945 self.cached = 0
1946 self.error = 0
1947 self.masked = masked
1948
1949 self.skipped = 0
1950 self.virtuals = 0
1951 self.total = len(filelist)
1952
1953 self.current = 0
1954 self.num_processes = int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or
1955 multiprocessing.cpu_count())
1956 self.process_names = []
1957
1958 self.bb_cache = bb.cache.Cache(self.cfgdata, self.cfghash, cooker.caches_array)
1959 self.fromcache = []
1960 self.willparse = []
1961 for filename in self.filelist:
1962 appends = self.cooker.collection.get_file_appends(filename)
1963 if not self.bb_cache.cacheValid(filename, appends):
1964 self.willparse.append((filename, appends, cooker.caches_array))
1965 else:
1966 self.fromcache.append((filename, appends))
1967 self.toparse = self.total - len(self.fromcache)
1968 self.progress_chunk = max(self.toparse / 100, 1)
1969
1970 self.start()
1971 self.haveshutdown = False
1972
1973 def start(self):
1974 self.results = self.load_cached()
1975 self.processes = []
1976 if self.toparse:
1977 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
1978 def init():
1979 Parser.cfg = self.cfgdata
1980 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, args=(self.cfgdata,), exitpriority=1)
1981 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, args=(self.cfgdata,), exitpriority=1)
1982
1983 self.feeder_quit = multiprocessing.Queue(maxsize=1)
1984 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
1985 self.jobs = multiprocessing.Queue(maxsize=self.num_processes)
1986 self.result_queue = multiprocessing.Queue()
1987 self.feeder = Feeder(self.willparse, self.jobs, self.feeder_quit)
1988 self.feeder.start()
1989 for i in range(0, self.num_processes):
1990 parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
1991 parser.start()
1992 self.process_names.append(parser.name)
1993 self.processes.append(parser)
1994
1995 self.results = itertools.chain(self.results, self.parse_generator())
1996
1997 def shutdown(self, clean=True, force=False):
1998 if not self.toparse:
1999 return
2000 if self.haveshutdown:
2001 return
2002 self.haveshutdown = True
2003
2004 if clean:
2005 event = bb.event.ParseCompleted(self.cached, self.parsed,
2006 self.skipped, self.masked,
2007 self.virtuals, self.error,
2008 self.total)
2009
2010 bb.event.fire(event, self.cfgdata)
2011 self.feeder_quit.put(None)
2012 for process in self.processes:
2013 self.jobs.put(None)
2014 else:
2015 self.feeder_quit.put('cancel')
2016
2017 self.parser_quit.cancel_join_thread()
2018 for process in self.processes:
2019 self.parser_quit.put(None)
2020
2021 self.jobs.cancel_join_thread()
2022
2023 for process in self.processes:
2024 if force:
2025 process.join(.1)
2026 process.terminate()
2027 else:
2028 process.join()
2029 self.feeder.join()
2030
2031 sync = threading.Thread(target=self.bb_cache.sync)
2032 sync.start()
2033 multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
2034 bb.codeparser.parser_cache_savemerge(self.cooker.data)
2035 bb.fetch.fetcher_parse_done(self.cooker.data)
2036 if self.cooker.configuration.profile:
2037 profiles = []
2038 for i in self.process_names:
2039 logfile = "profile-parse-%s.log" % i
2040 if os.path.exists(logfile):
2041 profiles.append(logfile)
2042
2043 pout = "profile-parse.log.processed"
2044 bb.utils.process_profilelog(profiles, pout = pout)
2045 print("Processed parsing statistics saved to %s" % (pout))
2046
2047 def load_cached(self):
2048 for filename, appends in self.fromcache:
2049 cached, infos = self.bb_cache.load(filename, appends, self.cfgdata)
2050 yield not cached, infos
2051
2052 def parse_generator(self):
2053 while True:
2054 if self.parsed >= self.toparse:
2055 break
2056
2057 try:
2058 result = self.result_queue.get(timeout=0.25)
2059 except Queue.Empty:
2060 pass
2061 else:
2062 value = result[1]
2063 if isinstance(value, BaseException):
2064 raise value
2065 else:
2066 yield result
2067
2068 def parse_next(self):
2069 result = []
2070 parsed = None
2071 try:
2072 parsed, result = self.results.next()
2073 except StopIteration:
2074 self.shutdown()
2075 return False
2076 except bb.BBHandledException as exc:
2077 self.error += 1
2078 logger.error('Failed to parse recipe: %s' % exc.recipe)
2079 self.shutdown(clean=False)
2080 return False
2081 except ParsingFailure as exc:
2082 self.error += 1
2083 logger.error('Unable to parse %s: %s' %
2084 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2085 self.shutdown(clean=False)
2086 return False
2087 except bb.parse.ParseError as exc:
2088 self.error += 1
2089 logger.error(str(exc))
2090 self.shutdown(clean=False)
2091 return False
2092 except bb.data_smart.ExpansionError as exc:
2093 self.error += 1
2094 _, value, _ = sys.exc_info()
2095 logger.error('ExpansionError during parsing %s: %s', value.recipe, str(exc))
2096 self.shutdown(clean=False)
2097 return False
2098 except SyntaxError as exc:
2099 self.error += 1
2100 logger.error('Unable to parse %s', exc.recipe)
2101 self.shutdown(clean=False)
2102 return False
2103 except Exception as exc:
2104 self.error += 1
2105 etype, value, tb = sys.exc_info()
2106 if hasattr(value, "recipe"):
2107 logger.error('Unable to parse %s', value.recipe,
2108 exc_info=(etype, value, exc.traceback))
2109 else:
2110 # Most likely, an exception occurred during raising an exception
2111 import traceback
2112 logger.error('Exception during parse: %s' % traceback.format_exc())
2113 self.shutdown(clean=False)
2114 return False
2115
2116 self.current += 1
2117 self.virtuals += len(result)
2118 if parsed:
2119 self.parsed += 1
2120 if self.parsed % self.progress_chunk == 0:
2121 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2122 self.cfgdata)
2123 else:
2124 self.cached += 1
2125
2126 for virtualfn, info_array in result:
2127 if info_array[0].skipped:
2128 self.skipped += 1
2129 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
2130 self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecache,
2131 parsed=parsed, watcher = self.cooker.add_filewatch)
2132 return True
2133
2134 def reparse(self, filename):
2135 infos = self.bb_cache.parse(filename,
2136 self.cooker.collection.get_file_appends(filename),
2137 self.cfgdata, self.cooker.caches_array)
2138 for vfn, info_array in infos:
2139 self.cooker.recipecache.add_from_recipeinfo(vfn, info_array)