blob: 4df88818fc4681970933d807746031b5418cac0f [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# Copyright (C) 2003, 2004 Chris Larson
6# Copyright (C) 2003, 2004 Phil Blundell
7# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
8# Copyright (C) 2005 Holger Hans Peter Freyther
9# Copyright (C) 2005 ROAD GmbH
10# Copyright (C) 2006 - 2007 Richard Purdie
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
25from __future__ import print_function
26import sys, os, glob, os.path, re, time
27import atexit
28import itertools
29import logging
30import multiprocessing
31import sre_constants
32import threading
33from cStringIO import StringIO
34from contextlib import closing
35from functools import wraps
36from collections import defaultdict
37import bb, bb.exceptions, bb.command
38from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
39import Queue
40import signal
41import subprocess
42import errno
43import prserv.serv
44import pyinotify
45
46logger = logging.getLogger("BitBake")
47collectlog = logging.getLogger("BitBake.Collection")
48buildlog = logging.getLogger("BitBake.Build")
49parselog = logging.getLogger("BitBake.Parsing")
50providerlog = logging.getLogger("BitBake.Provider")
51
52class NoSpecificMatch(bb.BBHandledException):
53 """
54 Exception raised when no or multiple file matches are found
55 """
56
57class NothingToBuild(Exception):
58 """
59 Exception raised when there is nothing to build
60 """
61
62class CollectionError(bb.BBHandledException):
63 """
64 Exception raised when layer configuration is incorrect
65 """
66
67class state:
68 initial, parsing, running, shutdown, forceshutdown, stopped, error = range(7)
69
70
71class SkippedPackage:
72 def __init__(self, info = None, reason = None):
73 self.pn = None
74 self.skipreason = None
75 self.provides = None
76 self.rprovides = None
77
78 if info:
79 self.pn = info.pn
80 self.skipreason = info.skipreason
81 self.provides = info.provides
82 self.rprovides = info.rprovides
83 elif reason:
84 self.skipreason = reason
85
86
87class CookerFeatures(object):
88 _feature_list = [HOB_EXTRA_CACHES, SEND_DEPENDS_TREE, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = range(4)
89
90 def __init__(self):
91 self._features=set()
92
93 def setFeature(self, f):
94 # validate we got a request for a feature we support
95 if f not in CookerFeatures._feature_list:
96 return
97 self._features.add(f)
98
99 def __contains__(self, f):
100 return f in self._features
101
102 def __iter__(self):
103 return self._features.__iter__()
104
105 def next(self):
106 return self._features.next()
107
108
109#============================================================================#
110# BBCooker
111#============================================================================#
112class BBCooker:
113 """
114 Manages one bitbake build run
115 """
116
117 def __init__(self, configuration, featureSet=None):
118 self.recipecache = None
119 self.skiplist = {}
120 self.featureset = CookerFeatures()
121 if featureSet:
122 for f in featureSet:
123 self.featureset.setFeature(f)
124
125 self.configuration = configuration
126
127 self.configwatcher = pyinotify.WatchManager()
128 self.configwatcher.bbseen = []
129 self.configwatcher.bbwatchedfiles = []
130 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
131 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
132 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
133 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
134 self.watcher = pyinotify.WatchManager()
135 self.watcher.bbseen = []
136 self.watcher.bbwatchedfiles = []
137 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
138
139
140 self.initConfigurationData()
141
142 self.inotify_modified_files = []
143
144 def _process_inotify_updates(server, notifier_list, abort):
145 for n in notifier_list:
146 if n.check_events(timeout=0):
147 # read notified events and enqeue them
148 n.read_events()
149 n.process_events()
150 return 1.0
151
152 self.configuration.server_register_idlecallback(_process_inotify_updates, [self.confignotifier, self.notifier])
153
154 self.baseconfig_valid = True
155 self.parsecache_valid = False
156
157 # Take a lock so only one copy of bitbake can run against a given build
158 # directory at a time
159 if not self.lockBitbake():
160 bb.fatal("Only one copy of bitbake should be run against a build directory")
161 try:
162 self.lock.seek(0)
163 self.lock.truncate()
164 if len(configuration.interface) >= 2:
165 self.lock.write("%s:%s\n" % (configuration.interface[0], configuration.interface[1]));
166 self.lock.flush()
167 except:
168 pass
169
170 # TOSTOP must not be set or our children will hang when they output
171 fd = sys.stdout.fileno()
172 if os.isatty(fd):
173 import termios
174 tcattr = termios.tcgetattr(fd)
175 if tcattr[3] & termios.TOSTOP:
176 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
177 tcattr[3] = tcattr[3] & ~termios.TOSTOP
178 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
179
180 self.command = bb.command.Command(self)
181 self.state = state.initial
182
183 self.parser = None
184
185 signal.signal(signal.SIGTERM, self.sigterm_exception)
186 # Let SIGHUP exit as SIGTERM
187 signal.signal(signal.SIGHUP, self.sigterm_exception)
188
189 def config_notifications(self, event):
190 if not event.pathname in self.configwatcher.bbwatchedfiles:
191 return
192 if not event.path in self.inotify_modified_files:
193 self.inotify_modified_files.append(event.path)
194 self.baseconfig_valid = False
195
196 def notifications(self, event):
197 if not event.path in self.inotify_modified_files:
198 self.inotify_modified_files.append(event.path)
199 self.parsecache_valid = False
200
201 def add_filewatch(self, deps, watcher=None):
202 if not watcher:
203 watcher = self.watcher
204 for i in deps:
205 watcher.bbwatchedfiles.append(i[0])
206 f = os.path.dirname(i[0])
207 if f in watcher.bbseen:
208 continue
209 watcher.bbseen.append(f)
210 watchtarget = None
211 while True:
212 # We try and add watches for files that don't exist but if they did, would influence
213 # the parser. The parent directory of these files may not exist, in which case we need
214 # to watch any parent that does exist for changes.
215 try:
216 watcher.add_watch(f, self.watchmask, quiet=False)
217 if watchtarget:
218 watcher.bbwatchedfiles.append(watchtarget)
219 break
220 except pyinotify.WatchManagerError as e:
221 if 'ENOENT' in str(e):
222 watchtarget = f
223 f = os.path.dirname(f)
224 if f in watcher.bbseen:
225 break
226 watcher.bbseen.append(f)
227 continue
228 if 'ENOSPC' in str(e):
229 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
230 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
231 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
232 providerlog.error("Root privilege is required to modify max_user_watches.")
233 raise
234
235 def sigterm_exception(self, signum, stackframe):
236 if signum == signal.SIGTERM:
237 bb.warn("Cooker recieved SIGTERM, shutting down...")
238 elif signum == signal.SIGHUP:
239 bb.warn("Cooker recieved SIGHUP, shutting down...")
240 self.state = state.forceshutdown
241
242 def setFeatures(self, features):
243 # we only accept a new feature set if we're in state initial, so we can reset without problems
244 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
245 raise Exception("Illegal state for feature set change")
246 original_featureset = list(self.featureset)
247 for feature in features:
248 self.featureset.setFeature(feature)
249 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
250 if (original_featureset != list(self.featureset)) and self.state != state.error:
251 self.reset()
252
253 def initConfigurationData(self):
254
255 self.state = state.initial
256 self.caches_array = []
257
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500258 # Need to preserve BB_CONSOLELOG over resets
259 consolelog = None
260 if hasattr(self, "data"):
261 consolelog = self.data.getVar("BB_CONSOLELOG", True)
262
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500263 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
264 self.enableDataTracking()
265
266 all_extra_cache_names = []
267 # We hardcode all known cache types in a single place, here.
268 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
269 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
270
271 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
272
273 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
274 # This is the entry point, no further check needed!
275 for var in caches_name_array:
276 try:
277 module_name, cache_name = var.split(':')
278 module = __import__(module_name, fromlist=(cache_name,))
279 self.caches_array.append(getattr(module, cache_name))
280 except ImportError as exc:
281 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
282 sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
283
284 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
285 self.databuilder.parseBaseConfiguration()
286 self.data = self.databuilder.data
287 self.data_hash = self.databuilder.data_hash
288
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500289 if consolelog:
290 self.data.setVar("BB_CONSOLELOG", consolelog)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500291
292 # we log all events to a file if so directed
293 if self.configuration.writeeventlog:
294 import json, pickle
295 DEFAULT_EVENTFILE = self.configuration.writeeventlog
296 class EventLogWriteHandler():
297
298 class EventWriter():
299 def __init__(self, cooker):
300 self.file_inited = None
301 self.cooker = cooker
302 self.event_queue = []
303
304 def init_file(self):
305 try:
306 # delete the old log
307 os.remove(DEFAULT_EVENTFILE)
308 except:
309 pass
310
311 # write current configuration data
312 with open(DEFAULT_EVENTFILE, "w") as f:
313 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
314
315 def write_event(self, event):
316 with open(DEFAULT_EVENTFILE, "a") as f:
317 try:
318 f.write("%s\n" % json.dumps({"class":event.__module__ + "." + event.__class__.__name__, "vars":json.dumps(pickle.dumps(event)) }))
319 except Exception as e:
320 import traceback
321 print(e, traceback.format_exc(e))
322
323
324 def send(self, event):
325 event_class = event.__module__ + "." + event.__class__.__name__
326
327 # init on bb.event.BuildStarted
328 if self.file_inited is None:
329 if event_class == "bb.event.BuildStarted":
330 self.init_file()
331 self.file_inited = True
332
333 # write pending events
334 for e in self.event_queue:
335 self.write_event(e)
336
337 # also write the current event
338 self.write_event(event)
339
340 else:
341 # queue all events until the file is inited
342 self.event_queue.append(event)
343
344 else:
345 # we have the file, just write the event
346 self.write_event(event)
347
348 # set our handler's event processor
349 event = EventWriter(self) # self is the cooker here
350
351
352 # set up cooker features for this mock UI handler
353
354 # we need to write the dependency tree in the log
355 self.featureset.setFeature(CookerFeatures.SEND_DEPENDS_TREE)
356 # register the log file writer as UI Handler
357 bb.event.register_UIHhandler(EventLogWriteHandler())
358
359
360 #
361 # Copy of the data store which has been expanded.
362 # Used for firing events and accessing variables where expansion needs to be accounted for
363 #
364 self.expanded_data = bb.data.createCopy(self.data)
365 bb.data.update_data(self.expanded_data)
366 bb.parse.init_parser(self.expanded_data)
367
368 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
369 self.disableDataTracking()
370
371 self.data.renameVar("__depends", "__base_depends")
372 self.add_filewatch(self.data.getVar("__base_depends", False), self.configwatcher)
373
374
375 def enableDataTracking(self):
376 self.configuration.tracking = True
377 if hasattr(self, "data"):
378 self.data.enableTracking()
379
380 def disableDataTracking(self):
381 self.configuration.tracking = False
382 if hasattr(self, "data"):
383 self.data.disableTracking()
384
385 def modifyConfigurationVar(self, var, val, default_file, op):
386 if op == "append":
387 self.appendConfigurationVar(var, val, default_file)
388 elif op == "set":
389 self.saveConfigurationVar(var, val, default_file, "=")
390 elif op == "earlyAssign":
391 self.saveConfigurationVar(var, val, default_file, "?=")
392
393
394 def appendConfigurationVar(self, var, val, default_file):
395 #add append var operation to the end of default_file
396 default_file = bb.cookerdata.findConfigFile(default_file, self.data)
397
398 total = "#added by hob"
399 total += "\n%s += \"%s\"\n" % (var, val)
400
401 with open(default_file, 'a') as f:
402 f.write(total)
403
404 #add to history
405 loginfo = {"op":"append", "file":default_file, "line":total.count("\n")}
406 self.data.appendVar(var, val, **loginfo)
407
408 def saveConfigurationVar(self, var, val, default_file, op):
409
410 replaced = False
411 #do not save if nothing changed
412 if str(val) == self.data.getVar(var, False):
413 return
414
415 conf_files = self.data.varhistory.get_variable_files(var)
416
417 #format the value when it is a list
418 if isinstance(val, list):
419 listval = ""
420 for value in val:
421 listval += "%s " % value
422 val = listval
423
424 topdir = self.data.getVar("TOPDIR", False)
425
426 #comment or replace operations made on var
427 for conf_file in conf_files:
428 if topdir in conf_file:
429 with open(conf_file, 'r') as f:
430 contents = f.readlines()
431
432 lines = self.data.varhistory.get_variable_lines(var, conf_file)
433 for line in lines:
434 total = ""
435 i = 0
436 for c in contents:
437 total += c
438 i = i + 1
439 if i==int(line):
440 end_index = len(total)
441 index = total.rfind(var, 0, end_index)
442
443 begin_line = total.count("\n",0,index)
444 end_line = int(line)
445
446 #check if the variable was saved before in the same way
447 #if true it replace the place where the variable was declared
448 #else it comments it
449 if contents[begin_line-1]== "#added by hob\n":
450 contents[begin_line] = "%s %s \"%s\"\n" % (var, op, val)
451 replaced = True
452 else:
453 for ii in range(begin_line, end_line):
454 contents[ii] = "#" + contents[ii]
455
456 with open(conf_file, 'w') as f:
457 f.writelines(contents)
458
459 if replaced == False:
460 #remove var from history
461 self.data.varhistory.del_var_history(var)
462
463 #add var to the end of default_file
464 default_file = bb.cookerdata.findConfigFile(default_file, self.data)
465
466 #add the variable on a single line, to be easy to replace the second time
467 total = "\n#added by hob"
468 total += "\n%s %s \"%s\"\n" % (var, op, val)
469
470 with open(default_file, 'a') as f:
471 f.write(total)
472
473 #add to history
474 loginfo = {"op":"set", "file":default_file, "line":total.count("\n")}
475 self.data.setVar(var, val, **loginfo)
476
477 def removeConfigurationVar(self, var):
478 conf_files = self.data.varhistory.get_variable_files(var)
479 topdir = self.data.getVar("TOPDIR", False)
480
481 for conf_file in conf_files:
482 if topdir in conf_file:
483 with open(conf_file, 'r') as f:
484 contents = f.readlines()
485
486 lines = self.data.varhistory.get_variable_lines(var, conf_file)
487 for line in lines:
488 total = ""
489 i = 0
490 for c in contents:
491 total += c
492 i = i + 1
493 if i==int(line):
494 end_index = len(total)
495 index = total.rfind(var, 0, end_index)
496
497 begin_line = total.count("\n",0,index)
498
499 #check if the variable was saved before in the same way
500 if contents[begin_line-1]== "#added by hob\n":
501 contents[begin_line-1] = contents[begin_line] = "\n"
502 else:
503 contents[begin_line] = "\n"
504 #remove var from history
505 self.data.varhistory.del_var_history(var, conf_file, line)
506 #remove variable
507 self.data.delVar(var)
508
509 with open(conf_file, 'w') as f:
510 f.writelines(contents)
511
512 def createConfigFile(self, name):
513 path = os.getcwd()
514 confpath = os.path.join(path, "conf", name)
515 open(confpath, 'w').close()
516
517 def parseConfiguration(self):
518 # Set log file verbosity
519 verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
520 if verboselogs:
521 bb.msg.loggerVerboseLogs = True
522
523 # Change nice level if we're asked to
524 nice = self.data.getVar("BB_NICE_LEVEL", True)
525 if nice:
526 curnice = os.nice(0)
527 nice = int(nice) - curnice
528 buildlog.verbose("Renice to %s " % os.nice(nice))
529
530 if self.recipecache:
531 del self.recipecache
532 self.recipecache = bb.cache.CacheData(self.caches_array)
533
534 self.handleCollections( self.data.getVar("BBFILE_COLLECTIONS", True) )
535
536 def updateConfigOpts(self, options, environment):
537 clean = True
538 for o in options:
539 if o in ['prefile', 'postfile']:
540 clean = False
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500541 server_val = getattr(self.configuration, "%s_server" % o)
542 if not options[o] and server_val:
543 # restore value provided on server start
544 setattr(self.configuration, o, server_val)
545 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500546 setattr(self.configuration, o, options[o])
547 for k in bb.utils.approved_variables():
548 if k in environment and k not in self.configuration.env:
549 logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
550 self.configuration.env[k] = environment[k]
551 clean = False
552 if k in self.configuration.env and k not in environment:
553 logger.debug(1, "Updating environment variable %s (deleted)" % (k))
554 del self.configuration.env[k]
555 clean = False
556 if k not in self.configuration.env and k not in environment:
557 continue
558 if environment[k] != self.configuration.env[k]:
559 logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
560 self.configuration.env[k] = environment[k]
561 clean = False
562 if not clean:
563 logger.debug(1, "Base environment change, triggering reparse")
564 self.baseconfig_valid = False
565 self.reset()
566
567 def runCommands(self, server, data, abort):
568 """
569 Run any queued asynchronous command
570 This is done by the idle handler so it runs in true context rather than
571 tied to any UI.
572 """
573
574 return self.command.runAsyncCommand()
575
576 def showVersions(self):
577
578 pkg_pn = self.recipecache.pkg_pn
579 (latest_versions, preferred_versions) = bb.providers.findProviders(self.data, self.recipecache, pkg_pn)
580
581 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
582 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
583
584 for p in sorted(pkg_pn):
585 pref = preferred_versions[p]
586 latest = latest_versions[p]
587
588 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
589 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
590
591 if pref == latest:
592 prefstr = ""
593
594 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
595
596 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
597 """
598 Show the outer or per-recipe environment
599 """
600 fn = None
601 envdata = None
602 if not pkgs_to_build:
603 pkgs_to_build = []
604
605 if buildfile:
606 # Parse the configuration here. We need to do it explicitly here since
607 # this showEnvironment() code path doesn't use the cache
608 self.parseConfiguration()
609
610 fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
611 fn = self.matchFile(fn)
612 fn = bb.cache.Cache.realfn2virtual(fn, cls)
613 elif len(pkgs_to_build) == 1:
614 ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or ""
615 if pkgs_to_build[0] in set(ignore.split()):
616 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
617
618 taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
619
620 targetid = taskdata.getbuild_id(pkgs_to_build[0])
621 fnid = taskdata.build_targets[targetid][0]
622 fn = taskdata.fn_index[fnid]
623 else:
624 envdata = self.data
625
626 if fn:
627 try:
628 envdata = bb.cache.Cache.loadDataFull(fn, self.collection.get_file_appends(fn), self.data)
629 except Exception as e:
630 parselog.exception("Unable to read %s", fn)
631 raise
632
633 # Display history
634 with closing(StringIO()) as env:
635 self.data.inchistory.emit(env)
636 logger.plain(env.getvalue())
637
638 # emit variables and shell functions
639 data.update_data(envdata)
640 with closing(StringIO()) as env:
641 data.emit_env(env, envdata, True)
642 logger.plain(env.getvalue())
643
644 # emit the metadata which isnt valid shell
645 data.expandKeys(envdata)
646 for e in envdata.keys():
647 if data.getVarFlag( e, 'python', envdata ):
648 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, True))
649
650
651 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
652 """
653 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
654 """
655 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
656
657 # A task of None means use the default task
658 if task is None:
659 task = self.configuration.cmd
660
661 fulltargetlist = self.checkPackages(pkgs_to_build)
662
663 localdata = data.createCopy(self.data)
664 bb.data.update_data(localdata)
665 bb.data.expandKeys(localdata)
666 taskdata = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
667
668 current = 0
669 runlist = []
670 for k in fulltargetlist:
671 ktask = task
672 if ":do_" in k:
673 k2 = k.split(":do_")
674 k = k2[0]
675 ktask = k2[1]
676 taskdata.add_provider(localdata, self.recipecache, k)
677 current += 1
678 if not ktask.startswith("do_"):
679 ktask = "do_%s" % ktask
680 runlist.append([k, ktask])
681 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
682 taskdata.add_unresolved(localdata, self.recipecache)
683 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
684 return taskdata, runlist, fulltargetlist
685
686 def prepareTreeData(self, pkgs_to_build, task):
687 """
688 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
689 """
690
691 # We set abort to False here to prevent unbuildable targets raising
692 # an exception when we're just generating data
693 taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
694
695 return runlist, taskdata
696
697 ######## WARNING : this function requires cache_extra to be enabled ########
698
699 def generateTaskDepTreeData(self, pkgs_to_build, task):
700 """
701 Create a dependency graph of pkgs_to_build including reverse dependency
702 information.
703 """
704 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
705 rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
706 rq.rqdata.prepare()
707 return self.buildDependTree(rq, taskdata)
708
709
710 def buildDependTree(self, rq, taskdata):
711 seen_fnids = []
712 depend_tree = {}
713 depend_tree["depends"] = {}
714 depend_tree["tdepends"] = {}
715 depend_tree["pn"] = {}
716 depend_tree["rdepends-pn"] = {}
717 depend_tree["packages"] = {}
718 depend_tree["rdepends-pkg"] = {}
719 depend_tree["rrecs-pkg"] = {}
720 depend_tree["layer-priorities"] = self.recipecache.bbfile_config_priorities
721
722 for task in xrange(len(rq.rqdata.runq_fnid)):
723 taskname = rq.rqdata.runq_task[task]
724 fnid = rq.rqdata.runq_fnid[task]
725 fn = taskdata.fn_index[fnid]
726 pn = self.recipecache.pkg_fn[fn]
727 version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
728 if pn not in depend_tree["pn"]:
729 depend_tree["pn"][pn] = {}
730 depend_tree["pn"][pn]["filename"] = fn
731 depend_tree["pn"][pn]["version"] = version
732 depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None)
733
734 # if we have extra caches, list all attributes they bring in
735 extra_info = []
736 for cache_class in self.caches_array:
737 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
738 cachefields = getattr(cache_class, 'cachefields', [])
739 extra_info = extra_info + cachefields
740
741 # for all attributes stored, add them to the dependency tree
742 for ei in extra_info:
743 depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn]
744
745
746 for dep in rq.rqdata.runq_depends[task]:
747 depfn = taskdata.fn_index[rq.rqdata.runq_fnid[dep]]
748 deppn = self.recipecache.pkg_fn[depfn]
749 dotname = "%s.%s" % (pn, rq.rqdata.runq_task[task])
750 if not dotname in depend_tree["tdepends"]:
751 depend_tree["tdepends"][dotname] = []
752 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, rq.rqdata.runq_task[dep]))
753 if fnid not in seen_fnids:
754 seen_fnids.append(fnid)
755 packages = []
756
757 depend_tree["depends"][pn] = []
758 for dep in taskdata.depids[fnid]:
759 depend_tree["depends"][pn].append(taskdata.build_names_index[dep])
760
761 depend_tree["rdepends-pn"][pn] = []
762 for rdep in taskdata.rdepids[fnid]:
763 depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep])
764
765 rdepends = self.recipecache.rundeps[fn]
766 for package in rdepends:
767 depend_tree["rdepends-pkg"][package] = []
768 for rdepend in rdepends[package]:
769 depend_tree["rdepends-pkg"][package].append(rdepend)
770 packages.append(package)
771
772 rrecs = self.recipecache.runrecs[fn]
773 for package in rrecs:
774 depend_tree["rrecs-pkg"][package] = []
775 for rdepend in rrecs[package]:
776 depend_tree["rrecs-pkg"][package].append(rdepend)
777 if not package in packages:
778 packages.append(package)
779
780 for package in packages:
781 if package not in depend_tree["packages"]:
782 depend_tree["packages"][package] = {}
783 depend_tree["packages"][package]["pn"] = pn
784 depend_tree["packages"][package]["filename"] = fn
785 depend_tree["packages"][package]["version"] = version
786
787 return depend_tree
788
789 ######## WARNING : this function requires cache_extra to be enabled ########
790 def generatePkgDepTreeData(self, pkgs_to_build, task):
791 """
792 Create a dependency tree of pkgs_to_build, returning the data.
793 """
794 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
795 tasks_fnid = []
796 if len(taskdata.tasks_name) != 0:
797 for task in xrange(len(taskdata.tasks_name)):
798 tasks_fnid.append(taskdata.tasks_fnid[task])
799
800 seen_fnids = []
801 depend_tree = {}
802 depend_tree["depends"] = {}
803 depend_tree["pn"] = {}
804 depend_tree["rdepends-pn"] = {}
805 depend_tree["rdepends-pkg"] = {}
806 depend_tree["rrecs-pkg"] = {}
807
808 # if we have extra caches, list all attributes they bring in
809 extra_info = []
810 for cache_class in self.caches_array:
811 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
812 cachefields = getattr(cache_class, 'cachefields', [])
813 extra_info = extra_info + cachefields
814
815 for task in xrange(len(tasks_fnid)):
816 fnid = tasks_fnid[task]
817 fn = taskdata.fn_index[fnid]
818 pn = self.recipecache.pkg_fn[fn]
819
820 if pn not in depend_tree["pn"]:
821 depend_tree["pn"][pn] = {}
822 depend_tree["pn"][pn]["filename"] = fn
823 version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
824 depend_tree["pn"][pn]["version"] = version
825 rdepends = self.recipecache.rundeps[fn]
826 rrecs = self.recipecache.runrecs[fn]
827 depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None)
828
829 # for all extra attributes stored, add them to the dependency tree
830 for ei in extra_info:
831 depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn]
832
833 if fnid not in seen_fnids:
834 seen_fnids.append(fnid)
835
836 depend_tree["depends"][pn] = []
837 for dep in taskdata.depids[fnid]:
838 item = taskdata.build_names_index[dep]
839 pn_provider = ""
840 targetid = taskdata.getbuild_id(item)
841 if targetid in taskdata.build_targets and taskdata.build_targets[targetid]:
842 id = taskdata.build_targets[targetid][0]
843 fn_provider = taskdata.fn_index[id]
844 pn_provider = self.recipecache.pkg_fn[fn_provider]
845 else:
846 pn_provider = item
847 depend_tree["depends"][pn].append(pn_provider)
848
849 depend_tree["rdepends-pn"][pn] = []
850 for rdep in taskdata.rdepids[fnid]:
851 item = taskdata.run_names_index[rdep]
852 pn_rprovider = ""
853 targetid = taskdata.getrun_id(item)
854 if targetid in taskdata.run_targets and taskdata.run_targets[targetid]:
855 id = taskdata.run_targets[targetid][0]
856 fn_rprovider = taskdata.fn_index[id]
857 pn_rprovider = self.recipecache.pkg_fn[fn_rprovider]
858 else:
859 pn_rprovider = item
860 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
861
862 depend_tree["rdepends-pkg"].update(rdepends)
863 depend_tree["rrecs-pkg"].update(rrecs)
864
865 return depend_tree
866
867 def generateDepTreeEvent(self, pkgs_to_build, task):
868 """
869 Create a task dependency graph of pkgs_to_build.
870 Generate an event with the result
871 """
872 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
873 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
874
875 def generateDotGraphFiles(self, pkgs_to_build, task):
876 """
877 Create a task dependency graph of pkgs_to_build.
878 Save the result to a set of .dot files.
879 """
880
881 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
882
883 # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn
884 depends_file = file('pn-depends.dot', 'w' )
885 buildlist_file = file('pn-buildlist', 'w' )
886 print("digraph depends {", file=depends_file)
887 for pn in depgraph["pn"]:
888 fn = depgraph["pn"][pn]["filename"]
889 version = depgraph["pn"][pn]["version"]
890 print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
891 print("%s" % pn, file=buildlist_file)
892 buildlist_file.close()
893 logger.info("PN build list saved to 'pn-buildlist'")
894 for pn in depgraph["depends"]:
895 for depend in depgraph["depends"][pn]:
896 print('"%s" -> "%s"' % (pn, depend), file=depends_file)
897 for pn in depgraph["rdepends-pn"]:
898 for rdepend in depgraph["rdepends-pn"][pn]:
899 print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file)
900 print("}", file=depends_file)
901 logger.info("PN dependencies saved to 'pn-depends.dot'")
902
903 depends_file = file('package-depends.dot', 'w' )
904 print("digraph depends {", file=depends_file)
905 for package in depgraph["packages"]:
906 pn = depgraph["packages"][package]["pn"]
907 fn = depgraph["packages"][package]["filename"]
908 version = depgraph["packages"][package]["version"]
909 if package == pn:
910 print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
911 else:
912 print('"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn), file=depends_file)
913 for depend in depgraph["depends"][pn]:
914 print('"%s" -> "%s"' % (package, depend), file=depends_file)
915 for package in depgraph["rdepends-pkg"]:
916 for rdepend in depgraph["rdepends-pkg"][package]:
917 print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
918 for package in depgraph["rrecs-pkg"]:
919 for rdepend in depgraph["rrecs-pkg"][package]:
920 print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
921 print("}", file=depends_file)
922 logger.info("Package dependencies saved to 'package-depends.dot'")
923
924 tdepends_file = file('task-depends.dot', 'w' )
925 print("digraph depends {", file=tdepends_file)
926 for task in depgraph["tdepends"]:
927 (pn, taskname) = task.rsplit(".", 1)
928 fn = depgraph["pn"][pn]["filename"]
929 version = depgraph["pn"][pn]["version"]
930 print('"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn), file=tdepends_file)
931 for dep in depgraph["tdepends"][task]:
932 print('"%s" -> "%s"' % (task, dep), file=tdepends_file)
933 print("}", file=tdepends_file)
934 logger.info("Task dependencies saved to 'task-depends.dot'")
935
936 def show_appends_with_no_recipes(self):
937 # Determine which bbappends haven't been applied
938
939 # First get list of recipes, including skipped
940 recipefns = self.recipecache.pkg_fn.keys()
941 recipefns.extend(self.skiplist.keys())
942
943 # Work out list of bbappends that have been applied
944 applied_appends = []
945 for fn in recipefns:
946 applied_appends.extend(self.collection.get_file_appends(fn))
947
948 appends_without_recipes = []
949 for _, appendfn in self.collection.bbappends:
950 if not appendfn in applied_appends:
951 appends_without_recipes.append(appendfn)
952
953 if appends_without_recipes:
954 msg = 'No recipes available for:\n %s' % '\n '.join(appends_without_recipes)
955 warn_only = self.data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
956 False) or "no"
957 if warn_only.lower() in ("1", "yes", "true"):
958 bb.warn(msg)
959 else:
960 bb.fatal(msg)
961
962 def handlePrefProviders(self):
963
964 localdata = data.createCopy(self.data)
965 bb.data.update_data(localdata)
966 bb.data.expandKeys(localdata)
967
968 # Handle PREFERRED_PROVIDERS
969 for p in (localdata.getVar('PREFERRED_PROVIDERS', True) or "").split():
970 try:
971 (providee, provider) = p.split(':')
972 except:
973 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
974 continue
975 if providee in self.recipecache.preferred and self.recipecache.preferred[providee] != provider:
976 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecache.preferred[providee])
977 self.recipecache.preferred[providee] = provider
978
979 def findCoreBaseFiles(self, subdir, configfile):
980 corebase = self.data.getVar('COREBASE', True) or ""
981 paths = []
982 for root, dirs, files in os.walk(corebase + '/' + subdir):
983 for d in dirs:
984 configfilepath = os.path.join(root, d, configfile)
985 if os.path.exists(configfilepath):
986 paths.append(os.path.join(root, d))
987
988 if paths:
989 bb.event.fire(bb.event.CoreBaseFilesFound(paths), self.data)
990
991 def findConfigFilePath(self, configfile):
992 """
993 Find the location on disk of configfile and if it exists and was parsed by BitBake
994 emit the ConfigFilePathFound event with the path to the file.
995 """
996 path = bb.cookerdata.findConfigFile(configfile, self.data)
997 if not path:
998 return
999
1000 # Generate a list of parsed configuration files by searching the files
1001 # listed in the __depends and __base_depends variables with a .conf suffix.
1002 conffiles = []
1003 dep_files = self.data.getVar('__base_depends', False) or []
1004 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
1005
1006 for f in dep_files:
1007 if f[0].endswith(".conf"):
1008 conffiles.append(f[0])
1009
1010 _, conf, conffile = path.rpartition("conf/")
1011 match = os.path.join(conf, conffile)
1012 # Try and find matches for conf/conffilename.conf as we don't always
1013 # have the full path to the file.
1014 for cfg in conffiles:
1015 if cfg.endswith(match):
1016 bb.event.fire(bb.event.ConfigFilePathFound(path),
1017 self.data)
1018 break
1019
1020 def findFilesMatchingInDir(self, filepattern, directory):
1021 """
1022 Searches for files matching the regex 'pattern' which are children of
1023 'directory' in each BBPATH. i.e. to find all rootfs package classes available
1024 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1025 or to find all machine configuration files one could call:
1026 findFilesMatchingInDir(self, 'conf/machines', 'conf')
1027 """
1028
1029 matches = []
1030 p = re.compile(re.escape(filepattern))
1031 bbpaths = self.data.getVar('BBPATH', True).split(':')
1032 for path in bbpaths:
1033 dirpath = os.path.join(path, directory)
1034 if os.path.exists(dirpath):
1035 for root, dirs, files in os.walk(dirpath):
1036 for f in files:
1037 if p.search(f):
1038 matches.append(f)
1039
1040 if matches:
1041 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1042
1043 def findConfigFiles(self, varname):
1044 """
1045 Find config files which are appropriate values for varname.
1046 i.e. MACHINE, DISTRO
1047 """
1048 possible = []
1049 var = varname.lower()
1050
1051 data = self.data
1052 # iterate configs
1053 bbpaths = data.getVar('BBPATH', True).split(':')
1054 for path in bbpaths:
1055 confpath = os.path.join(path, "conf", var)
1056 if os.path.exists(confpath):
1057 for root, dirs, files in os.walk(confpath):
1058 # get all child files, these are appropriate values
1059 for f in files:
1060 val, sep, end = f.rpartition('.')
1061 if end == 'conf':
1062 possible.append(val)
1063
1064 if possible:
1065 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1066
1067 def findInheritsClass(self, klass):
1068 """
1069 Find all recipes which inherit the specified class
1070 """
1071 pkg_list = []
1072
1073 for pfn in self.recipecache.pkg_fn:
1074 inherits = self.recipecache.inherits.get(pfn, None)
1075 if inherits and inherits.count(klass) > 0:
1076 pkg_list.append(self.recipecache.pkg_fn[pfn])
1077
1078 return pkg_list
1079
1080 def generateTargetsTree(self, klass=None, pkgs=None):
1081 """
1082 Generate a dependency tree of buildable targets
1083 Generate an event with the result
1084 """
1085 # if the caller hasn't specified a pkgs list default to universe
1086 if not pkgs:
1087 pkgs = ['universe']
1088 # if inherited_class passed ensure all recipes which inherit the
1089 # specified class are included in pkgs
1090 if klass:
1091 extra_pkgs = self.findInheritsClass(klass)
1092 pkgs = pkgs + extra_pkgs
1093
1094 # generate a dependency tree for all our packages
1095 tree = self.generatePkgDepTreeData(pkgs, 'build')
1096 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1097
1098 def buildWorldTargetList(self):
1099 """
1100 Build package list for "bitbake world"
1101 """
1102 parselog.debug(1, "collating packages for \"world\"")
1103 for f in self.recipecache.possible_world:
1104 terminal = True
1105 pn = self.recipecache.pkg_fn[f]
1106
1107 for p in self.recipecache.pn_provides[pn]:
1108 if p.startswith('virtual/'):
1109 parselog.debug(2, "World build skipping %s due to %s provider starting with virtual/", f, p)
1110 terminal = False
1111 break
1112 for pf in self.recipecache.providers[p]:
1113 if self.recipecache.pkg_fn[pf] != pn:
1114 parselog.debug(2, "World build skipping %s due to both us and %s providing %s", f, pf, p)
1115 terminal = False
1116 break
1117 if terminal:
1118 self.recipecache.world_target.add(pn)
1119
1120 def interactiveMode( self ):
1121 """Drop off into a shell"""
1122 try:
1123 from bb import shell
1124 except ImportError:
1125 parselog.exception("Interactive mode not available")
1126 sys.exit(1)
1127 else:
1128 shell.start( self )
1129
1130
1131 def handleCollections( self, collections ):
1132 """Handle collections"""
1133 errors = False
1134 self.recipecache.bbfile_config_priorities = []
1135 if collections:
1136 collection_priorities = {}
1137 collection_depends = {}
1138 collection_list = collections.split()
1139 min_prio = 0
1140 for c in collection_list:
1141 # Get collection priority if defined explicitly
1142 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c, True)
1143 if priority:
1144 try:
1145 prio = int(priority)
1146 except ValueError:
1147 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1148 errors = True
1149 if min_prio == 0 or prio < min_prio:
1150 min_prio = prio
1151 collection_priorities[c] = prio
1152 else:
1153 collection_priorities[c] = None
1154
1155 # Check dependencies and store information for priority calculation
1156 deps = self.data.getVar("LAYERDEPENDS_%s" % c, True)
1157 if deps:
1158 try:
1159 deplist = bb.utils.explode_dep_versions2(deps)
1160 except bb.utils.VersionStringException as vse:
1161 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1162 for dep, oplist in deplist.iteritems():
1163 if dep in collection_list:
1164 for opstr in oplist:
1165 layerver = self.data.getVar("LAYERVERSION_%s" % dep, True)
1166 (op, depver) = opstr.split()
1167 if layerver:
1168 try:
1169 res = bb.utils.vercmp_string_op(layerver, depver, op)
1170 except bb.utils.VersionStringException as vse:
1171 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1172 if not res:
1173 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1174 errors = True
1175 else:
1176 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1177 errors = True
1178 else:
1179 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1180 errors = True
1181 collection_depends[c] = deplist.keys()
1182 else:
1183 collection_depends[c] = []
1184
1185 # Recursively work out collection priorities based on dependencies
1186 def calc_layer_priority(collection):
1187 if not collection_priorities[collection]:
1188 max_depprio = min_prio
1189 for dep in collection_depends[collection]:
1190 calc_layer_priority(dep)
1191 depprio = collection_priorities[dep]
1192 if depprio > max_depprio:
1193 max_depprio = depprio
1194 max_depprio += 1
1195 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1196 collection_priorities[collection] = max_depprio
1197
1198 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1199 for c in collection_list:
1200 calc_layer_priority(c)
1201 regex = self.data.getVar("BBFILE_PATTERN_%s" % c, True)
1202 if regex == None:
1203 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1204 errors = True
1205 continue
1206 try:
1207 cre = re.compile(regex)
1208 except re.error:
1209 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1210 errors = True
1211 continue
1212 self.recipecache.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
1213 if errors:
1214 # We've already printed the actual error(s)
1215 raise CollectionError("Errors during parsing layer configuration")
1216
1217 def buildSetVars(self):
1218 """
1219 Setup any variables needed before starting a build
1220 """
1221 t = time.gmtime()
1222 if not self.data.getVar("BUILDNAME", False):
1223 self.data.setVar("BUILDNAME", "${DATE}${TIME}")
1224 self.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1225 self.data.setVar("DATE", time.strftime('%Y%m%d', t))
1226 self.data.setVar("TIME", time.strftime('%H%M%S', t))
1227
1228 def matchFiles(self, bf):
1229 """
1230 Find the .bb files which match the expression in 'buildfile'.
1231 """
1232 if bf.startswith("/") or bf.startswith("../"):
1233 bf = os.path.abspath(bf)
1234
1235 self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
1236 filelist, masked = self.collection.collect_bbfiles(self.data, self.expanded_data)
1237 try:
1238 os.stat(bf)
1239 bf = os.path.abspath(bf)
1240 return [bf]
1241 except OSError:
1242 regexp = re.compile(bf)
1243 matches = []
1244 for f in filelist:
1245 if regexp.search(f) and os.path.isfile(f):
1246 matches.append(f)
1247 return matches
1248
1249 def matchFile(self, buildfile):
1250 """
1251 Find the .bb file which matches the expression in 'buildfile'.
1252 Raise an error if multiple files
1253 """
1254 matches = self.matchFiles(buildfile)
1255 if len(matches) != 1:
1256 if matches:
1257 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1258 if matches:
1259 for f in matches:
1260 msg += "\n %s" % f
1261 parselog.error(msg)
1262 else:
1263 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1264 raise NoSpecificMatch
1265 return matches[0]
1266
1267 def buildFile(self, buildfile, task):
1268 """
1269 Build the file matching regexp buildfile
1270 """
1271
1272 # Too many people use -b because they think it's how you normally
1273 # specify a target to be built, so show a warning
1274 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1275
1276 # Parse the configuration here. We need to do it explicitly here since
1277 # buildFile() doesn't use the cache
1278 self.parseConfiguration()
1279
1280 # If we are told to do the None task then query the default task
1281 if (task == None):
1282 task = self.configuration.cmd
1283
1284 fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
1285 fn = self.matchFile(fn)
1286
1287 self.buildSetVars()
1288
1289 infos = bb.cache.Cache.parse(fn, self.collection.get_file_appends(fn), \
1290 self.data,
1291 self.caches_array)
1292 infos = dict(infos)
1293
1294 fn = bb.cache.Cache.realfn2virtual(fn, cls)
1295 try:
1296 info_array = infos[fn]
1297 except KeyError:
1298 bb.fatal("%s does not exist" % fn)
1299
1300 if info_array[0].skipped:
1301 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1302
1303 self.recipecache.add_from_recipeinfo(fn, info_array)
1304
1305 # Tweak some variables
1306 item = info_array[0].pn
1307 self.recipecache.ignored_dependencies = set()
1308 self.recipecache.bbfile_priority[fn] = 1
1309
1310 # Remove external dependencies
1311 self.recipecache.task_deps[fn]['depends'] = {}
1312 self.recipecache.deps[fn] = []
1313 self.recipecache.rundeps[fn] = []
1314 self.recipecache.runrecs[fn] = []
1315
1316 # Invalidate task for target if force mode active
1317 if self.configuration.force:
1318 logger.verbose("Invalidate task %s, %s", task, fn)
1319 if not task.startswith("do_"):
1320 task = "do_%s" % task
1321 bb.parse.siggen.invalidate_task(task, self.recipecache, fn)
1322
1323 # Setup taskdata structure
1324 taskdata = bb.taskdata.TaskData(self.configuration.abort)
1325 taskdata.add_provider(self.data, self.recipecache, item)
1326
1327 buildname = self.data.getVar("BUILDNAME", True)
1328 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.expanded_data)
1329
1330 # Execute the runqueue
1331 if not task.startswith("do_"):
1332 task = "do_%s" % task
1333 runlist = [[item, task]]
1334
1335 rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
1336
1337 def buildFileIdle(server, rq, abort):
1338
1339 msg = None
1340 interrupted = 0
1341 if abort or self.state == state.forceshutdown:
1342 rq.finish_runqueue(True)
1343 msg = "Forced shutdown"
1344 interrupted = 2
1345 elif self.state == state.shutdown:
1346 rq.finish_runqueue(False)
1347 msg = "Stopped build"
1348 interrupted = 1
1349 failures = 0
1350 try:
1351 retval = rq.execute_runqueue()
1352 except runqueue.TaskFailure as exc:
1353 failures += len(exc.args)
1354 retval = False
1355 except SystemExit as exc:
1356 self.command.finishAsyncCommand()
1357 return False
1358
1359 if not retval:
1360 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures, interrupted), self.expanded_data)
1361 self.command.finishAsyncCommand(msg)
1362 return False
1363 if retval is True:
1364 return True
1365 return retval
1366
1367 self.configuration.server_register_idlecallback(buildFileIdle, rq)
1368
1369 def buildTargets(self, targets, task):
1370 """
1371 Attempt to build the targets specified
1372 """
1373
1374 def buildTargetsIdle(server, rq, abort):
1375 msg = None
1376 interrupted = 0
1377 if abort or self.state == state.forceshutdown:
1378 rq.finish_runqueue(True)
1379 msg = "Forced shutdown"
1380 interrupted = 2
1381 elif self.state == state.shutdown:
1382 rq.finish_runqueue(False)
1383 msg = "Stopped build"
1384 interrupted = 1
1385 failures = 0
1386 try:
1387 retval = rq.execute_runqueue()
1388 except runqueue.TaskFailure as exc:
1389 failures += len(exc.args)
1390 retval = False
1391 except SystemExit as exc:
1392 self.command.finishAsyncCommand()
1393 return False
1394
1395 if not retval:
1396 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, targets, failures, interrupted), self.data)
1397 self.command.finishAsyncCommand(msg)
1398 return False
1399 if retval is True:
1400 return True
1401 return retval
1402
1403 build.reset_cache()
1404 self.buildSetVars()
1405
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001406 # If we are told to do the None task then query the default task
1407 if (task == None):
1408 task = self.configuration.cmd
1409
1410 if not task.startswith("do_"):
1411 task = "do_%s" % task
1412
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001413 taskdata, runlist, fulltargetlist = self.buildTaskData(targets, task, self.configuration.abort)
1414
1415 buildname = self.data.getVar("BUILDNAME", False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001416
1417 # make targets to always look as <target>:do_<task>
1418 ntargets = []
1419 for target in fulltargetlist:
1420 if ":" in target:
1421 if ":do_" not in target:
1422 target = "%s:do_%s" % tuple(target.split(":", 1))
1423 else:
1424 target = "%s:%s" % (target, task)
1425 ntargets.append(target)
1426
1427 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001428
1429 rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
1430 if 'universe' in targets:
1431 rq.rqdata.warn_multi_bb = True
1432
1433 self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
1434
1435
1436 def getAllKeysWithFlags(self, flaglist):
1437 dump = {}
1438 for k in self.data.keys():
1439 try:
1440 v = self.data.getVar(k, True)
1441 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1442 dump[k] = {
1443 'v' : v ,
1444 'history' : self.data.varhistory.variable(k),
1445 }
1446 for d in flaglist:
1447 dump[k][d] = self.data.getVarFlag(k, d)
1448 except Exception as e:
1449 print(e)
1450 return dump
1451
1452
1453 def generateNewImage(self, image, base_image, package_queue, timestamp, description):
1454 '''
1455 Create a new image with a "require"/"inherit" base_image statement
1456 '''
1457 if timestamp:
1458 image_name = os.path.splitext(image)[0]
1459 timestr = time.strftime("-%Y%m%d-%H%M%S")
1460 dest = image_name + str(timestr) + ".bb"
1461 else:
1462 if not image.endswith(".bb"):
1463 dest = image + ".bb"
1464 else:
1465 dest = image
1466
1467 basename = False
1468 if base_image:
1469 with open(base_image, 'r') as f:
1470 require_line = f.readline()
1471 p = re.compile("IMAGE_BASENAME *=")
1472 for line in f:
1473 if p.search(line):
1474 basename = True
1475
1476 with open(dest, "w") as imagefile:
1477 if base_image is None:
1478 imagefile.write("inherit core-image\n")
1479 else:
1480 topdir = self.data.getVar("TOPDIR", False)
1481 if topdir in base_image:
1482 base_image = require_line.split()[1]
1483 imagefile.write("require " + base_image + "\n")
1484 image_install = "IMAGE_INSTALL = \""
1485 for package in package_queue:
1486 image_install += str(package) + " "
1487 image_install += "\"\n"
1488 imagefile.write(image_install)
1489
1490 description_var = "DESCRIPTION = \"" + description + "\"\n"
1491 imagefile.write(description_var)
1492
1493 if basename:
1494 # If this is overwritten in a inherited image, reset it to default
1495 image_basename = "IMAGE_BASENAME = \"${PN}\"\n"
1496 imagefile.write(image_basename)
1497
1498 self.state = state.initial
1499 if timestamp:
1500 return timestr
1501
1502 def updateCacheSync(self):
1503 if self.state == state.running:
1504 return
1505
1506 # reload files for which we got notifications
1507 for p in self.inotify_modified_files:
1508 bb.parse.update_cache(p)
1509 self.inotify_modified_files = []
1510
1511 if not self.baseconfig_valid:
1512 logger.debug(1, "Reloading base configuration data")
1513 self.initConfigurationData()
1514 self.baseconfig_valid = True
1515 self.parsecache_valid = False
1516
1517 # This is called for all async commands when self.state != running
1518 def updateCache(self):
1519 if self.state == state.running:
1520 return
1521
1522 if self.state in (state.shutdown, state.forceshutdown, state.error):
1523 if hasattr(self.parser, 'shutdown'):
1524 self.parser.shutdown(clean=False, force = True)
1525 raise bb.BBHandledException()
1526
1527 if self.state != state.parsing:
1528 self.updateCacheSync()
1529
1530 if self.state != state.parsing and not self.parsecache_valid:
1531 self.parseConfiguration ()
1532 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
1533 bb.event.fire(bb.event.SanityCheck(False), self.data)
1534
1535 ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or ""
1536 self.recipecache.ignored_dependencies = set(ignore.split())
1537
1538 for dep in self.configuration.extra_assume_provided:
1539 self.recipecache.ignored_dependencies.add(dep)
1540
1541 self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
1542 (filelist, masked) = self.collection.collect_bbfiles(self.data, self.expanded_data)
1543
1544 self.parser = CookerParser(self, filelist, masked)
1545 self.parsecache_valid = True
1546
1547 self.state = state.parsing
1548
1549 if not self.parser.parse_next():
1550 collectlog.debug(1, "parsing complete")
1551 if self.parser.error:
1552 raise bb.BBHandledException()
1553 self.show_appends_with_no_recipes()
1554 self.handlePrefProviders()
1555 self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn, self.data)
1556 self.state = state.running
1557
1558 # Send an event listing all stamps reachable after parsing
1559 # which the metadata may use to clean up stale data
1560 event = bb.event.ReachableStamps(self.recipecache.stamp)
1561 bb.event.fire(event, self.expanded_data)
1562 return None
1563
1564 return True
1565
1566 def checkPackages(self, pkgs_to_build):
1567
1568 # Return a copy, don't modify the original
1569 pkgs_to_build = pkgs_to_build[:]
1570
1571 if len(pkgs_to_build) == 0:
1572 raise NothingToBuild
1573
1574 ignore = (self.expanded_data.getVar("ASSUME_PROVIDED", True) or "").split()
1575 for pkg in pkgs_to_build:
1576 if pkg in ignore:
1577 parselog.warn("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
1578
1579 if 'world' in pkgs_to_build:
1580 self.buildWorldTargetList()
1581 pkgs_to_build.remove('world')
1582 for t in self.recipecache.world_target:
1583 pkgs_to_build.append(t)
1584
1585 if 'universe' in pkgs_to_build:
1586 parselog.warn("The \"universe\" target is only intended for testing and may produce errors.")
1587 parselog.debug(1, "collating packages for \"universe\"")
1588 pkgs_to_build.remove('universe')
1589 for t in self.recipecache.universe_target:
1590 pkgs_to_build.append(t)
1591
1592 return pkgs_to_build
1593
1594
1595
1596
1597 def pre_serve(self):
1598 # Empty the environment. The environment will be populated as
1599 # necessary from the data store.
1600 #bb.utils.empty_environment()
1601 try:
1602 self.prhost = prserv.serv.auto_start(self.data)
1603 except prserv.serv.PRServiceConfigError:
1604 bb.event.fire(CookerExit(), self.expanded_data)
1605 self.state = state.error
1606 return
1607
1608 def post_serve(self):
1609 prserv.serv.auto_shutdown(self.data)
1610 bb.event.fire(CookerExit(), self.expanded_data)
1611 lockfile = self.lock.name
1612 self.lock.close()
1613 self.lock = None
1614
1615 while not self.lock:
1616 with bb.utils.timeout(3):
1617 self.lock = bb.utils.lockfile(lockfile, shared=False, retry=False, block=True)
1618 if not self.lock:
1619 # Some systems may not have lsof available
1620 procs = None
1621 try:
1622 procs = subprocess.check_output(["lsof", '-w', lockfile], stderr=subprocess.STDOUT)
1623 except OSError as e:
1624 if e.errno != errno.ENOENT:
1625 raise
1626 if procs is None:
1627 # Fall back to fuser if lsof is unavailable
1628 try:
1629 procs = subprocess.check_output(["fuser", '-v', lockfile], stderr=subprocess.STDOUT)
1630 except OSError as e:
1631 if e.errno != errno.ENOENT:
1632 raise
1633
1634 msg = "Delaying shutdown due to active processes which appear to be holding bitbake.lock"
1635 if procs:
1636 msg += ":\n%s" % str(procs)
1637 print(msg)
1638
1639
1640 def shutdown(self, force = False):
1641 if force:
1642 self.state = state.forceshutdown
1643 else:
1644 self.state = state.shutdown
1645
1646 def finishcommand(self):
1647 self.state = state.initial
1648
1649 def reset(self):
1650 self.initConfigurationData()
1651
1652 def lockBitbake(self):
1653 if not hasattr(self, 'lock'):
1654 self.lock = None
1655 if self.data:
1656 lockfile = self.data.expand("${TOPDIR}/bitbake.lock")
1657 if lockfile:
1658 self.lock = bb.utils.lockfile(lockfile, False, False)
1659 return self.lock
1660
1661 def unlockBitbake(self):
1662 if hasattr(self, 'lock') and self.lock:
1663 bb.utils.unlockfile(self.lock)
1664
1665def server_main(cooker, func, *args):
1666 cooker.pre_serve()
1667
1668 if cooker.configuration.profile:
1669 try:
1670 import cProfile as profile
1671 except:
1672 import profile
1673 prof = profile.Profile()
1674
1675 ret = profile.Profile.runcall(prof, func, *args)
1676
1677 prof.dump_stats("profile.log")
1678 bb.utils.process_profilelog("profile.log")
1679 print("Raw profiling information saved to profile.log and processed statistics to profile.log.processed")
1680
1681 else:
1682 ret = func(*args)
1683
1684 cooker.post_serve()
1685
1686 return ret
1687
1688class CookerExit(bb.event.Event):
1689 """
1690 Notify clients of the Cooker shutdown
1691 """
1692
1693 def __init__(self):
1694 bb.event.Event.__init__(self)
1695
1696
1697class CookerCollectFiles(object):
1698 def __init__(self, priorities):
1699 self.bbappends = []
1700 self.bbfile_config_priorities = priorities
1701
1702 def calc_bbfile_priority( self, filename, matched = None ):
1703 for _, _, regex, pri in self.bbfile_config_priorities:
1704 if regex.match(filename):
1705 if matched != None:
1706 if not regex in matched:
1707 matched.add(regex)
1708 return pri
1709 return 0
1710
1711 def get_bbfiles(self):
1712 """Get list of default .bb files by reading out the current directory"""
1713 path = os.getcwd()
1714 contents = os.listdir(path)
1715 bbfiles = []
1716 for f in contents:
1717 if f.endswith(".bb"):
1718 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1719 return bbfiles
1720
1721 def find_bbfiles(self, path):
1722 """Find all the .bb and .bbappend files in a directory"""
1723 found = []
1724 for dir, dirs, files in os.walk(path):
1725 for ignored in ('SCCS', 'CVS', '.svn'):
1726 if ignored in dirs:
1727 dirs.remove(ignored)
1728 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1729
1730 return found
1731
1732 def collect_bbfiles(self, config, eventdata):
1733 """Collect all available .bb build files"""
1734 masked = 0
1735
1736 collectlog.debug(1, "collecting .bb files")
1737
1738 files = (config.getVar( "BBFILES", True) or "").split()
1739 config.setVar("BBFILES", " ".join(files))
1740
1741 # Sort files by priority
1742 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
1743
1744 if not len(files):
1745 files = self.get_bbfiles()
1746
1747 if not len(files):
1748 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1749 bb.event.fire(CookerExit(), eventdata)
1750
1751 # Can't use set here as order is important
1752 newfiles = []
1753 for f in files:
1754 if os.path.isdir(f):
1755 dirfiles = self.find_bbfiles(f)
1756 for g in dirfiles:
1757 if g not in newfiles:
1758 newfiles.append(g)
1759 else:
1760 globbed = glob.glob(f)
1761 if not globbed and os.path.exists(f):
1762 globbed = [f]
1763 for g in globbed:
1764 if g not in newfiles:
1765 newfiles.append(g)
1766
1767 bbmask = config.getVar('BBMASK', True)
1768
1769 if bbmask:
1770 try:
1771 bbmask_compiled = re.compile(bbmask)
1772 except sre_constants.error:
1773 collectlog.critical("BBMASK is not a valid regular expression, ignoring.")
1774 return list(newfiles), 0
1775
1776 bbfiles = []
1777 bbappend = []
1778 for f in newfiles:
1779 if bbmask and bbmask_compiled.search(f):
1780 collectlog.debug(1, "skipping masked file %s", f)
1781 masked += 1
1782 continue
1783 if f.endswith('.bb'):
1784 bbfiles.append(f)
1785 elif f.endswith('.bbappend'):
1786 bbappend.append(f)
1787 else:
1788 collectlog.debug(1, "skipping %s: unknown file extension", f)
1789
1790 # Build a list of .bbappend files for each .bb file
1791 for f in bbappend:
1792 base = os.path.basename(f).replace('.bbappend', '.bb')
1793 self.bbappends.append((base, f))
1794
1795 # Find overlayed recipes
1796 # bbfiles will be in priority order which makes this easy
1797 bbfile_seen = dict()
1798 self.overlayed = defaultdict(list)
1799 for f in reversed(bbfiles):
1800 base = os.path.basename(f)
1801 if base not in bbfile_seen:
1802 bbfile_seen[base] = f
1803 else:
1804 topfile = bbfile_seen[base]
1805 self.overlayed[topfile].append(f)
1806
1807 return (bbfiles, masked)
1808
1809 def get_file_appends(self, fn):
1810 """
1811 Returns a list of .bbappend files to apply to fn
1812 """
1813 filelist = []
1814 f = os.path.basename(fn)
1815 for b in self.bbappends:
1816 (bbappend, filename) = b
1817 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1818 filelist.append(filename)
1819 return filelist
1820
1821 def collection_priorities(self, pkgfns, d):
1822
1823 priorities = {}
1824
1825 # Calculate priorities for each file
1826 matched = set()
1827 for p in pkgfns:
1828 realfn, cls = bb.cache.Cache.virtualfn2realfn(p)
1829 priorities[p] = self.calc_bbfile_priority(realfn, matched)
1830
1831 # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
1832 unmatched = set()
1833 for _, _, regex, pri in self.bbfile_config_priorities:
1834 if not regex in matched:
1835 unmatched.add(regex)
1836
1837 def findmatch(regex):
1838 for b in self.bbappends:
1839 (bbfile, append) = b
1840 if regex.match(append):
1841 return True
1842 return False
1843
1844 for unmatch in unmatched.copy():
1845 if findmatch(unmatch):
1846 unmatched.remove(unmatch)
1847
1848 for collection, pattern, regex, _ in self.bbfile_config_priorities:
1849 if regex in unmatched:
1850 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection, True) != '1':
1851 collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
1852
1853 return priorities
1854
1855class ParsingFailure(Exception):
1856 def __init__(self, realexception, recipe):
1857 self.realexception = realexception
1858 self.recipe = recipe
1859 Exception.__init__(self, realexception, recipe)
1860
1861class Feeder(multiprocessing.Process):
1862 def __init__(self, jobs, to_parsers, quit):
1863 self.quit = quit
1864 self.jobs = jobs
1865 self.to_parsers = to_parsers
1866 multiprocessing.Process.__init__(self)
1867
1868 def run(self):
1869 while True:
1870 try:
1871 quit = self.quit.get_nowait()
1872 except Queue.Empty:
1873 pass
1874 else:
1875 if quit == 'cancel':
1876 self.to_parsers.cancel_join_thread()
1877 break
1878
1879 try:
1880 job = self.jobs.pop()
1881 except IndexError:
1882 break
1883
1884 try:
1885 self.to_parsers.put(job, timeout=0.5)
1886 except Queue.Full:
1887 self.jobs.insert(0, job)
1888 continue
1889
1890class Parser(multiprocessing.Process):
1891 def __init__(self, jobs, results, quit, init, profile):
1892 self.jobs = jobs
1893 self.results = results
1894 self.quit = quit
1895 self.init = init
1896 multiprocessing.Process.__init__(self)
1897 self.context = bb.utils.get_context().copy()
1898 self.handlers = bb.event.get_class_handlers().copy()
1899 self.profile = profile
1900
1901 def run(self):
1902
1903 if not self.profile:
1904 self.realrun()
1905 return
1906
1907 try:
1908 import cProfile as profile
1909 except:
1910 import profile
1911 prof = profile.Profile()
1912 try:
1913 profile.Profile.runcall(prof, self.realrun)
1914 finally:
1915 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1916 prof.dump_stats(logfile)
1917
1918 def realrun(self):
1919 if self.init:
1920 self.init()
1921
1922 pending = []
1923 while True:
1924 try:
1925 self.quit.get_nowait()
1926 except Queue.Empty:
1927 pass
1928 else:
1929 self.results.cancel_join_thread()
1930 break
1931
1932 if pending:
1933 result = pending.pop()
1934 else:
1935 try:
1936 job = self.jobs.get(timeout=0.25)
1937 except Queue.Empty:
1938 continue
1939
1940 if job is None:
1941 break
1942 result = self.parse(*job)
1943
1944 try:
1945 self.results.put(result, timeout=0.25)
1946 except Queue.Full:
1947 pending.append(result)
1948
1949 def parse(self, filename, appends, caches_array):
1950 try:
1951 # Reset our environment and handlers to the original settings
1952 bb.utils.set_context(self.context.copy())
1953 bb.event.set_class_handlers(self.handlers.copy())
1954 return True, bb.cache.Cache.parse(filename, appends, self.cfg, caches_array)
1955 except Exception as exc:
1956 tb = sys.exc_info()[2]
1957 exc.recipe = filename
1958 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
1959 return True, exc
1960 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
1961 # and for example a worker thread doesn't just exit on its own in response to
1962 # a SystemExit event for example.
1963 except BaseException as exc:
1964 return True, ParsingFailure(exc, filename)
1965
1966class CookerParser(object):
1967 def __init__(self, cooker, filelist, masked):
1968 self.filelist = filelist
1969 self.cooker = cooker
1970 self.cfgdata = cooker.data
1971 self.cfghash = cooker.data_hash
1972
1973 # Accounting statistics
1974 self.parsed = 0
1975 self.cached = 0
1976 self.error = 0
1977 self.masked = masked
1978
1979 self.skipped = 0
1980 self.virtuals = 0
1981 self.total = len(filelist)
1982
1983 self.current = 0
1984 self.num_processes = int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or
1985 multiprocessing.cpu_count())
1986 self.process_names = []
1987
1988 self.bb_cache = bb.cache.Cache(self.cfgdata, self.cfghash, cooker.caches_array)
1989 self.fromcache = []
1990 self.willparse = []
1991 for filename in self.filelist:
1992 appends = self.cooker.collection.get_file_appends(filename)
1993 if not self.bb_cache.cacheValid(filename, appends):
1994 self.willparse.append((filename, appends, cooker.caches_array))
1995 else:
1996 self.fromcache.append((filename, appends))
1997 self.toparse = self.total - len(self.fromcache)
1998 self.progress_chunk = max(self.toparse / 100, 1)
1999
2000 self.start()
2001 self.haveshutdown = False
2002
2003 def start(self):
2004 self.results = self.load_cached()
2005 self.processes = []
2006 if self.toparse:
2007 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2008 def init():
2009 Parser.cfg = self.cfgdata
2010 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, args=(self.cfgdata,), exitpriority=1)
2011 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, args=(self.cfgdata,), exitpriority=1)
2012
2013 self.feeder_quit = multiprocessing.Queue(maxsize=1)
2014 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
2015 self.jobs = multiprocessing.Queue(maxsize=self.num_processes)
2016 self.result_queue = multiprocessing.Queue()
2017 self.feeder = Feeder(self.willparse, self.jobs, self.feeder_quit)
2018 self.feeder.start()
2019 for i in range(0, self.num_processes):
2020 parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
2021 parser.start()
2022 self.process_names.append(parser.name)
2023 self.processes.append(parser)
2024
2025 self.results = itertools.chain(self.results, self.parse_generator())
2026
2027 def shutdown(self, clean=True, force=False):
2028 if not self.toparse:
2029 return
2030 if self.haveshutdown:
2031 return
2032 self.haveshutdown = True
2033
2034 if clean:
2035 event = bb.event.ParseCompleted(self.cached, self.parsed,
2036 self.skipped, self.masked,
2037 self.virtuals, self.error,
2038 self.total)
2039
2040 bb.event.fire(event, self.cfgdata)
2041 self.feeder_quit.put(None)
2042 for process in self.processes:
2043 self.jobs.put(None)
2044 else:
2045 self.feeder_quit.put('cancel')
2046
2047 self.parser_quit.cancel_join_thread()
2048 for process in self.processes:
2049 self.parser_quit.put(None)
2050
2051 self.jobs.cancel_join_thread()
2052
2053 for process in self.processes:
2054 if force:
2055 process.join(.1)
2056 process.terminate()
2057 else:
2058 process.join()
2059 self.feeder.join()
2060
2061 sync = threading.Thread(target=self.bb_cache.sync)
2062 sync.start()
2063 multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
2064 bb.codeparser.parser_cache_savemerge(self.cooker.data)
2065 bb.fetch.fetcher_parse_done(self.cooker.data)
2066 if self.cooker.configuration.profile:
2067 profiles = []
2068 for i in self.process_names:
2069 logfile = "profile-parse-%s.log" % i
2070 if os.path.exists(logfile):
2071 profiles.append(logfile)
2072
2073 pout = "profile-parse.log.processed"
2074 bb.utils.process_profilelog(profiles, pout = pout)
2075 print("Processed parsing statistics saved to %s" % (pout))
2076
2077 def load_cached(self):
2078 for filename, appends in self.fromcache:
2079 cached, infos = self.bb_cache.load(filename, appends, self.cfgdata)
2080 yield not cached, infos
2081
2082 def parse_generator(self):
2083 while True:
2084 if self.parsed >= self.toparse:
2085 break
2086
2087 try:
2088 result = self.result_queue.get(timeout=0.25)
2089 except Queue.Empty:
2090 pass
2091 else:
2092 value = result[1]
2093 if isinstance(value, BaseException):
2094 raise value
2095 else:
2096 yield result
2097
2098 def parse_next(self):
2099 result = []
2100 parsed = None
2101 try:
2102 parsed, result = self.results.next()
2103 except StopIteration:
2104 self.shutdown()
2105 return False
2106 except bb.BBHandledException as exc:
2107 self.error += 1
2108 logger.error('Failed to parse recipe: %s' % exc.recipe)
2109 self.shutdown(clean=False)
2110 return False
2111 except ParsingFailure as exc:
2112 self.error += 1
2113 logger.error('Unable to parse %s: %s' %
2114 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2115 self.shutdown(clean=False)
2116 return False
2117 except bb.parse.ParseError as exc:
2118 self.error += 1
2119 logger.error(str(exc))
2120 self.shutdown(clean=False)
2121 return False
2122 except bb.data_smart.ExpansionError as exc:
2123 self.error += 1
2124 _, value, _ = sys.exc_info()
2125 logger.error('ExpansionError during parsing %s: %s', value.recipe, str(exc))
2126 self.shutdown(clean=False)
2127 return False
2128 except SyntaxError as exc:
2129 self.error += 1
2130 logger.error('Unable to parse %s', exc.recipe)
2131 self.shutdown(clean=False)
2132 return False
2133 except Exception as exc:
2134 self.error += 1
2135 etype, value, tb = sys.exc_info()
2136 if hasattr(value, "recipe"):
2137 logger.error('Unable to parse %s', value.recipe,
2138 exc_info=(etype, value, exc.traceback))
2139 else:
2140 # Most likely, an exception occurred during raising an exception
2141 import traceback
2142 logger.error('Exception during parse: %s' % traceback.format_exc())
2143 self.shutdown(clean=False)
2144 return False
2145
2146 self.current += 1
2147 self.virtuals += len(result)
2148 if parsed:
2149 self.parsed += 1
2150 if self.parsed % self.progress_chunk == 0:
2151 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2152 self.cfgdata)
2153 else:
2154 self.cached += 1
2155
2156 for virtualfn, info_array in result:
2157 if info_array[0].skipped:
2158 self.skipped += 1
2159 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
2160 self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecache,
2161 parsed=parsed, watcher = self.cooker.add_filewatch)
2162 return True
2163
2164 def reparse(self, filename):
2165 infos = self.bb_cache.parse(filename,
2166 self.cooker.collection.get_file_appends(filename),
2167 self.cfgdata, self.cooker.caches_array)
2168 for vfn, info_array in infos:
2169 self.cooker.recipecache.add_from_recipeinfo(vfn, info_array)