blob: 9b565fc37df04ddc83cd30517246ceb3e7953545 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# Copyright (C) 2003, 2004 Chris Larson
6# Copyright (C) 2003, 2004 Phil Blundell
7# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
8# Copyright (C) 2005 Holger Hans Peter Freyther
9# Copyright (C) 2005 ROAD GmbH
10# Copyright (C) 2006 - 2007 Richard Purdie
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
25from __future__ import print_function
26import sys, os, glob, os.path, re, time
27import atexit
28import itertools
29import logging
30import multiprocessing
31import sre_constants
32import threading
33from cStringIO import StringIO
34from contextlib import closing
35from functools import wraps
36from collections import defaultdict
37import bb, bb.exceptions, bb.command
38from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
39import Queue
40import signal
41import subprocess
42import errno
43import prserv.serv
44import pyinotify
45
46logger = logging.getLogger("BitBake")
47collectlog = logging.getLogger("BitBake.Collection")
48buildlog = logging.getLogger("BitBake.Build")
49parselog = logging.getLogger("BitBake.Parsing")
50providerlog = logging.getLogger("BitBake.Provider")
51
52class NoSpecificMatch(bb.BBHandledException):
53 """
54 Exception raised when no or multiple file matches are found
55 """
56
57class NothingToBuild(Exception):
58 """
59 Exception raised when there is nothing to build
60 """
61
62class CollectionError(bb.BBHandledException):
63 """
64 Exception raised when layer configuration is incorrect
65 """
66
67class state:
68 initial, parsing, running, shutdown, forceshutdown, stopped, error = range(7)
69
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050070 @classmethod
71 def get_name(cls, code):
72 for name in dir(cls):
73 value = getattr(cls, name)
74 if type(value) == type(cls.initial) and value == code:
75 return name
76 raise ValueError("Invalid status code: %s" % code)
77
Patrick Williamsc124f4f2015-09-15 14:41:29 -050078
79class SkippedPackage:
80 def __init__(self, info = None, reason = None):
81 self.pn = None
82 self.skipreason = None
83 self.provides = None
84 self.rprovides = None
85
86 if info:
87 self.pn = info.pn
88 self.skipreason = info.skipreason
89 self.provides = info.provides
90 self.rprovides = info.rprovides
91 elif reason:
92 self.skipreason = reason
93
94
95class CookerFeatures(object):
96 _feature_list = [HOB_EXTRA_CACHES, SEND_DEPENDS_TREE, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = range(4)
97
98 def __init__(self):
99 self._features=set()
100
101 def setFeature(self, f):
102 # validate we got a request for a feature we support
103 if f not in CookerFeatures._feature_list:
104 return
105 self._features.add(f)
106
107 def __contains__(self, f):
108 return f in self._features
109
110 def __iter__(self):
111 return self._features.__iter__()
112
113 def next(self):
114 return self._features.next()
115
116
117#============================================================================#
118# BBCooker
119#============================================================================#
120class BBCooker:
121 """
122 Manages one bitbake build run
123 """
124
125 def __init__(self, configuration, featureSet=None):
126 self.recipecache = None
127 self.skiplist = {}
128 self.featureset = CookerFeatures()
129 if featureSet:
130 for f in featureSet:
131 self.featureset.setFeature(f)
132
133 self.configuration = configuration
134
135 self.configwatcher = pyinotify.WatchManager()
136 self.configwatcher.bbseen = []
137 self.configwatcher.bbwatchedfiles = []
138 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
139 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
140 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
141 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
142 self.watcher = pyinotify.WatchManager()
143 self.watcher.bbseen = []
144 self.watcher.bbwatchedfiles = []
145 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
146
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500147 # If being called by something like tinfoil, we need to clean cached data
148 # which may now be invalid
149 bb.parse.__mtime_cache = {}
150 bb.parse.BBHandler.cached_statements = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500151
152 self.initConfigurationData()
153
154 self.inotify_modified_files = []
155
156 def _process_inotify_updates(server, notifier_list, abort):
157 for n in notifier_list:
158 if n.check_events(timeout=0):
159 # read notified events and enqeue them
160 n.read_events()
161 n.process_events()
162 return 1.0
163
164 self.configuration.server_register_idlecallback(_process_inotify_updates, [self.confignotifier, self.notifier])
165
166 self.baseconfig_valid = True
167 self.parsecache_valid = False
168
169 # Take a lock so only one copy of bitbake can run against a given build
170 # directory at a time
171 if not self.lockBitbake():
172 bb.fatal("Only one copy of bitbake should be run against a build directory")
173 try:
174 self.lock.seek(0)
175 self.lock.truncate()
176 if len(configuration.interface) >= 2:
177 self.lock.write("%s:%s\n" % (configuration.interface[0], configuration.interface[1]));
178 self.lock.flush()
179 except:
180 pass
181
182 # TOSTOP must not be set or our children will hang when they output
183 fd = sys.stdout.fileno()
184 if os.isatty(fd):
185 import termios
186 tcattr = termios.tcgetattr(fd)
187 if tcattr[3] & termios.TOSTOP:
188 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
189 tcattr[3] = tcattr[3] & ~termios.TOSTOP
190 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
191
192 self.command = bb.command.Command(self)
193 self.state = state.initial
194
195 self.parser = None
196
197 signal.signal(signal.SIGTERM, self.sigterm_exception)
198 # Let SIGHUP exit as SIGTERM
199 signal.signal(signal.SIGHUP, self.sigterm_exception)
200
201 def config_notifications(self, event):
202 if not event.pathname in self.configwatcher.bbwatchedfiles:
203 return
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500204 if not event.pathname in self.inotify_modified_files:
205 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500206 self.baseconfig_valid = False
207
208 def notifications(self, event):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500209 if not event.pathname in self.inotify_modified_files:
210 self.inotify_modified_files.append(event.pathname)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500211 self.parsecache_valid = False
212
213 def add_filewatch(self, deps, watcher=None):
214 if not watcher:
215 watcher = self.watcher
216 for i in deps:
217 watcher.bbwatchedfiles.append(i[0])
218 f = os.path.dirname(i[0])
219 if f in watcher.bbseen:
220 continue
221 watcher.bbseen.append(f)
222 watchtarget = None
223 while True:
224 # We try and add watches for files that don't exist but if they did, would influence
225 # the parser. The parent directory of these files may not exist, in which case we need
226 # to watch any parent that does exist for changes.
227 try:
228 watcher.add_watch(f, self.watchmask, quiet=False)
229 if watchtarget:
230 watcher.bbwatchedfiles.append(watchtarget)
231 break
232 except pyinotify.WatchManagerError as e:
233 if 'ENOENT' in str(e):
234 watchtarget = f
235 f = os.path.dirname(f)
236 if f in watcher.bbseen:
237 break
238 watcher.bbseen.append(f)
239 continue
240 if 'ENOSPC' in str(e):
241 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
242 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
243 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
244 providerlog.error("Root privilege is required to modify max_user_watches.")
245 raise
246
247 def sigterm_exception(self, signum, stackframe):
248 if signum == signal.SIGTERM:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500249 bb.warn("Cooker received SIGTERM, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500250 elif signum == signal.SIGHUP:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500251 bb.warn("Cooker received SIGHUP, shutting down...")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500252 self.state = state.forceshutdown
253
254 def setFeatures(self, features):
255 # we only accept a new feature set if we're in state initial, so we can reset without problems
256 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
257 raise Exception("Illegal state for feature set change")
258 original_featureset = list(self.featureset)
259 for feature in features:
260 self.featureset.setFeature(feature)
261 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
262 if (original_featureset != list(self.featureset)) and self.state != state.error:
263 self.reset()
264
265 def initConfigurationData(self):
266
267 self.state = state.initial
268 self.caches_array = []
269
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500270 # Need to preserve BB_CONSOLELOG over resets
271 consolelog = None
272 if hasattr(self, "data"):
273 consolelog = self.data.getVar("BB_CONSOLELOG", True)
274
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500275 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
276 self.enableDataTracking()
277
278 all_extra_cache_names = []
279 # We hardcode all known cache types in a single place, here.
280 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
281 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
282
283 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
284
285 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
286 # This is the entry point, no further check needed!
287 for var in caches_name_array:
288 try:
289 module_name, cache_name = var.split(':')
290 module = __import__(module_name, fromlist=(cache_name,))
291 self.caches_array.append(getattr(module, cache_name))
292 except ImportError as exc:
293 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
294 sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
295
296 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
297 self.databuilder.parseBaseConfiguration()
298 self.data = self.databuilder.data
299 self.data_hash = self.databuilder.data_hash
300
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500301 if consolelog:
302 self.data.setVar("BB_CONSOLELOG", consolelog)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500303
304 # we log all events to a file if so directed
305 if self.configuration.writeeventlog:
306 import json, pickle
307 DEFAULT_EVENTFILE = self.configuration.writeeventlog
308 class EventLogWriteHandler():
309
310 class EventWriter():
311 def __init__(self, cooker):
312 self.file_inited = None
313 self.cooker = cooker
314 self.event_queue = []
315
316 def init_file(self):
317 try:
318 # delete the old log
319 os.remove(DEFAULT_EVENTFILE)
320 except:
321 pass
322
323 # write current configuration data
324 with open(DEFAULT_EVENTFILE, "w") as f:
325 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
326
327 def write_event(self, event):
328 with open(DEFAULT_EVENTFILE, "a") as f:
329 try:
330 f.write("%s\n" % json.dumps({"class":event.__module__ + "." + event.__class__.__name__, "vars":json.dumps(pickle.dumps(event)) }))
331 except Exception as e:
332 import traceback
333 print(e, traceback.format_exc(e))
334
335
336 def send(self, event):
337 event_class = event.__module__ + "." + event.__class__.__name__
338
339 # init on bb.event.BuildStarted
340 if self.file_inited is None:
341 if event_class == "bb.event.BuildStarted":
342 self.init_file()
343 self.file_inited = True
344
345 # write pending events
346 for e in self.event_queue:
347 self.write_event(e)
348
349 # also write the current event
350 self.write_event(event)
351
352 else:
353 # queue all events until the file is inited
354 self.event_queue.append(event)
355
356 else:
357 # we have the file, just write the event
358 self.write_event(event)
359
360 # set our handler's event processor
361 event = EventWriter(self) # self is the cooker here
362
363
364 # set up cooker features for this mock UI handler
365
366 # we need to write the dependency tree in the log
367 self.featureset.setFeature(CookerFeatures.SEND_DEPENDS_TREE)
368 # register the log file writer as UI Handler
369 bb.event.register_UIHhandler(EventLogWriteHandler())
370
371
372 #
373 # Copy of the data store which has been expanded.
374 # Used for firing events and accessing variables where expansion needs to be accounted for
375 #
376 self.expanded_data = bb.data.createCopy(self.data)
377 bb.data.update_data(self.expanded_data)
378 bb.parse.init_parser(self.expanded_data)
379
380 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
381 self.disableDataTracking()
382
383 self.data.renameVar("__depends", "__base_depends")
384 self.add_filewatch(self.data.getVar("__base_depends", False), self.configwatcher)
385
386
387 def enableDataTracking(self):
388 self.configuration.tracking = True
389 if hasattr(self, "data"):
390 self.data.enableTracking()
391
392 def disableDataTracking(self):
393 self.configuration.tracking = False
394 if hasattr(self, "data"):
395 self.data.disableTracking()
396
397 def modifyConfigurationVar(self, var, val, default_file, op):
398 if op == "append":
399 self.appendConfigurationVar(var, val, default_file)
400 elif op == "set":
401 self.saveConfigurationVar(var, val, default_file, "=")
402 elif op == "earlyAssign":
403 self.saveConfigurationVar(var, val, default_file, "?=")
404
405
406 def appendConfigurationVar(self, var, val, default_file):
407 #add append var operation to the end of default_file
408 default_file = bb.cookerdata.findConfigFile(default_file, self.data)
409
410 total = "#added by hob"
411 total += "\n%s += \"%s\"\n" % (var, val)
412
413 with open(default_file, 'a') as f:
414 f.write(total)
415
416 #add to history
417 loginfo = {"op":"append", "file":default_file, "line":total.count("\n")}
418 self.data.appendVar(var, val, **loginfo)
419
420 def saveConfigurationVar(self, var, val, default_file, op):
421
422 replaced = False
423 #do not save if nothing changed
424 if str(val) == self.data.getVar(var, False):
425 return
426
427 conf_files = self.data.varhistory.get_variable_files(var)
428
429 #format the value when it is a list
430 if isinstance(val, list):
431 listval = ""
432 for value in val:
433 listval += "%s " % value
434 val = listval
435
436 topdir = self.data.getVar("TOPDIR", False)
437
438 #comment or replace operations made on var
439 for conf_file in conf_files:
440 if topdir in conf_file:
441 with open(conf_file, 'r') as f:
442 contents = f.readlines()
443
444 lines = self.data.varhistory.get_variable_lines(var, conf_file)
445 for line in lines:
446 total = ""
447 i = 0
448 for c in contents:
449 total += c
450 i = i + 1
451 if i==int(line):
452 end_index = len(total)
453 index = total.rfind(var, 0, end_index)
454
455 begin_line = total.count("\n",0,index)
456 end_line = int(line)
457
458 #check if the variable was saved before in the same way
459 #if true it replace the place where the variable was declared
460 #else it comments it
461 if contents[begin_line-1]== "#added by hob\n":
462 contents[begin_line] = "%s %s \"%s\"\n" % (var, op, val)
463 replaced = True
464 else:
465 for ii in range(begin_line, end_line):
466 contents[ii] = "#" + contents[ii]
467
468 with open(conf_file, 'w') as f:
469 f.writelines(contents)
470
471 if replaced == False:
472 #remove var from history
473 self.data.varhistory.del_var_history(var)
474
475 #add var to the end of default_file
476 default_file = bb.cookerdata.findConfigFile(default_file, self.data)
477
478 #add the variable on a single line, to be easy to replace the second time
479 total = "\n#added by hob"
480 total += "\n%s %s \"%s\"\n" % (var, op, val)
481
482 with open(default_file, 'a') as f:
483 f.write(total)
484
485 #add to history
486 loginfo = {"op":"set", "file":default_file, "line":total.count("\n")}
487 self.data.setVar(var, val, **loginfo)
488
489 def removeConfigurationVar(self, var):
490 conf_files = self.data.varhistory.get_variable_files(var)
491 topdir = self.data.getVar("TOPDIR", False)
492
493 for conf_file in conf_files:
494 if topdir in conf_file:
495 with open(conf_file, 'r') as f:
496 contents = f.readlines()
497
498 lines = self.data.varhistory.get_variable_lines(var, conf_file)
499 for line in lines:
500 total = ""
501 i = 0
502 for c in contents:
503 total += c
504 i = i + 1
505 if i==int(line):
506 end_index = len(total)
507 index = total.rfind(var, 0, end_index)
508
509 begin_line = total.count("\n",0,index)
510
511 #check if the variable was saved before in the same way
512 if contents[begin_line-1]== "#added by hob\n":
513 contents[begin_line-1] = contents[begin_line] = "\n"
514 else:
515 contents[begin_line] = "\n"
516 #remove var from history
517 self.data.varhistory.del_var_history(var, conf_file, line)
518 #remove variable
519 self.data.delVar(var)
520
521 with open(conf_file, 'w') as f:
522 f.writelines(contents)
523
524 def createConfigFile(self, name):
525 path = os.getcwd()
526 confpath = os.path.join(path, "conf", name)
527 open(confpath, 'w').close()
528
529 def parseConfiguration(self):
530 # Set log file verbosity
531 verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
532 if verboselogs:
533 bb.msg.loggerVerboseLogs = True
534
535 # Change nice level if we're asked to
536 nice = self.data.getVar("BB_NICE_LEVEL", True)
537 if nice:
538 curnice = os.nice(0)
539 nice = int(nice) - curnice
540 buildlog.verbose("Renice to %s " % os.nice(nice))
541
542 if self.recipecache:
543 del self.recipecache
544 self.recipecache = bb.cache.CacheData(self.caches_array)
545
546 self.handleCollections( self.data.getVar("BBFILE_COLLECTIONS", True) )
547
548 def updateConfigOpts(self, options, environment):
549 clean = True
550 for o in options:
551 if o in ['prefile', 'postfile']:
552 clean = False
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500553 server_val = getattr(self.configuration, "%s_server" % o)
554 if not options[o] and server_val:
555 # restore value provided on server start
556 setattr(self.configuration, o, server_val)
557 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500558 setattr(self.configuration, o, options[o])
559 for k in bb.utils.approved_variables():
560 if k in environment and k not in self.configuration.env:
561 logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
562 self.configuration.env[k] = environment[k]
563 clean = False
564 if k in self.configuration.env and k not in environment:
565 logger.debug(1, "Updating environment variable %s (deleted)" % (k))
566 del self.configuration.env[k]
567 clean = False
568 if k not in self.configuration.env and k not in environment:
569 continue
570 if environment[k] != self.configuration.env[k]:
571 logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
572 self.configuration.env[k] = environment[k]
573 clean = False
574 if not clean:
575 logger.debug(1, "Base environment change, triggering reparse")
576 self.baseconfig_valid = False
577 self.reset()
578
579 def runCommands(self, server, data, abort):
580 """
581 Run any queued asynchronous command
582 This is done by the idle handler so it runs in true context rather than
583 tied to any UI.
584 """
585
586 return self.command.runAsyncCommand()
587
588 def showVersions(self):
589
590 pkg_pn = self.recipecache.pkg_pn
591 (latest_versions, preferred_versions) = bb.providers.findProviders(self.data, self.recipecache, pkg_pn)
592
593 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
594 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
595
596 for p in sorted(pkg_pn):
597 pref = preferred_versions[p]
598 latest = latest_versions[p]
599
600 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
601 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
602
603 if pref == latest:
604 prefstr = ""
605
606 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
607
608 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
609 """
610 Show the outer or per-recipe environment
611 """
612 fn = None
613 envdata = None
614 if not pkgs_to_build:
615 pkgs_to_build = []
616
617 if buildfile:
618 # Parse the configuration here. We need to do it explicitly here since
619 # this showEnvironment() code path doesn't use the cache
620 self.parseConfiguration()
621
622 fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
623 fn = self.matchFile(fn)
624 fn = bb.cache.Cache.realfn2virtual(fn, cls)
625 elif len(pkgs_to_build) == 1:
626 ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or ""
627 if pkgs_to_build[0] in set(ignore.split()):
628 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
629
630 taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
631
632 targetid = taskdata.getbuild_id(pkgs_to_build[0])
633 fnid = taskdata.build_targets[targetid][0]
634 fn = taskdata.fn_index[fnid]
635 else:
636 envdata = self.data
637
638 if fn:
639 try:
640 envdata = bb.cache.Cache.loadDataFull(fn, self.collection.get_file_appends(fn), self.data)
641 except Exception as e:
642 parselog.exception("Unable to read %s", fn)
643 raise
644
645 # Display history
646 with closing(StringIO()) as env:
647 self.data.inchistory.emit(env)
648 logger.plain(env.getvalue())
649
650 # emit variables and shell functions
651 data.update_data(envdata)
652 with closing(StringIO()) as env:
653 data.emit_env(env, envdata, True)
654 logger.plain(env.getvalue())
655
656 # emit the metadata which isnt valid shell
657 data.expandKeys(envdata)
658 for e in envdata.keys():
659 if data.getVarFlag( e, 'python', envdata ):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500660 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500661
662
663 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
664 """
665 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
666 """
667 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
668
669 # A task of None means use the default task
670 if task is None:
671 task = self.configuration.cmd
672
673 fulltargetlist = self.checkPackages(pkgs_to_build)
674
675 localdata = data.createCopy(self.data)
676 bb.data.update_data(localdata)
677 bb.data.expandKeys(localdata)
678 taskdata = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
679
680 current = 0
681 runlist = []
682 for k in fulltargetlist:
683 ktask = task
684 if ":do_" in k:
685 k2 = k.split(":do_")
686 k = k2[0]
687 ktask = k2[1]
688 taskdata.add_provider(localdata, self.recipecache, k)
689 current += 1
690 if not ktask.startswith("do_"):
691 ktask = "do_%s" % ktask
692 runlist.append([k, ktask])
693 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
694 taskdata.add_unresolved(localdata, self.recipecache)
695 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
696 return taskdata, runlist, fulltargetlist
697
698 def prepareTreeData(self, pkgs_to_build, task):
699 """
700 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
701 """
702
703 # We set abort to False here to prevent unbuildable targets raising
704 # an exception when we're just generating data
705 taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
706
707 return runlist, taskdata
708
709 ######## WARNING : this function requires cache_extra to be enabled ########
710
711 def generateTaskDepTreeData(self, pkgs_to_build, task):
712 """
713 Create a dependency graph of pkgs_to_build including reverse dependency
714 information.
715 """
716 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
717 rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
718 rq.rqdata.prepare()
719 return self.buildDependTree(rq, taskdata)
720
721
722 def buildDependTree(self, rq, taskdata):
723 seen_fnids = []
724 depend_tree = {}
725 depend_tree["depends"] = {}
726 depend_tree["tdepends"] = {}
727 depend_tree["pn"] = {}
728 depend_tree["rdepends-pn"] = {}
729 depend_tree["packages"] = {}
730 depend_tree["rdepends-pkg"] = {}
731 depend_tree["rrecs-pkg"] = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500732 depend_tree['providermap'] = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500733 depend_tree["layer-priorities"] = self.recipecache.bbfile_config_priorities
734
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500735 for name, fn in taskdata.get_providermap().iteritems():
736 pn = self.recipecache.pkg_fn[fn]
737 if name != pn:
738 version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
739 depend_tree['providermap'][name] = (pn, version)
740
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500741 for task in xrange(len(rq.rqdata.runq_fnid)):
742 taskname = rq.rqdata.runq_task[task]
743 fnid = rq.rqdata.runq_fnid[task]
744 fn = taskdata.fn_index[fnid]
745 pn = self.recipecache.pkg_fn[fn]
746 version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
747 if pn not in depend_tree["pn"]:
748 depend_tree["pn"][pn] = {}
749 depend_tree["pn"][pn]["filename"] = fn
750 depend_tree["pn"][pn]["version"] = version
751 depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None)
752
753 # if we have extra caches, list all attributes they bring in
754 extra_info = []
755 for cache_class in self.caches_array:
756 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
757 cachefields = getattr(cache_class, 'cachefields', [])
758 extra_info = extra_info + cachefields
759
760 # for all attributes stored, add them to the dependency tree
761 for ei in extra_info:
762 depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn]
763
764
765 for dep in rq.rqdata.runq_depends[task]:
766 depfn = taskdata.fn_index[rq.rqdata.runq_fnid[dep]]
767 deppn = self.recipecache.pkg_fn[depfn]
768 dotname = "%s.%s" % (pn, rq.rqdata.runq_task[task])
769 if not dotname in depend_tree["tdepends"]:
770 depend_tree["tdepends"][dotname] = []
771 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, rq.rqdata.runq_task[dep]))
772 if fnid not in seen_fnids:
773 seen_fnids.append(fnid)
774 packages = []
775
776 depend_tree["depends"][pn] = []
777 for dep in taskdata.depids[fnid]:
778 depend_tree["depends"][pn].append(taskdata.build_names_index[dep])
779
780 depend_tree["rdepends-pn"][pn] = []
781 for rdep in taskdata.rdepids[fnid]:
782 depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep])
783
784 rdepends = self.recipecache.rundeps[fn]
785 for package in rdepends:
786 depend_tree["rdepends-pkg"][package] = []
787 for rdepend in rdepends[package]:
788 depend_tree["rdepends-pkg"][package].append(rdepend)
789 packages.append(package)
790
791 rrecs = self.recipecache.runrecs[fn]
792 for package in rrecs:
793 depend_tree["rrecs-pkg"][package] = []
794 for rdepend in rrecs[package]:
795 depend_tree["rrecs-pkg"][package].append(rdepend)
796 if not package in packages:
797 packages.append(package)
798
799 for package in packages:
800 if package not in depend_tree["packages"]:
801 depend_tree["packages"][package] = {}
802 depend_tree["packages"][package]["pn"] = pn
803 depend_tree["packages"][package]["filename"] = fn
804 depend_tree["packages"][package]["version"] = version
805
806 return depend_tree
807
808 ######## WARNING : this function requires cache_extra to be enabled ########
809 def generatePkgDepTreeData(self, pkgs_to_build, task):
810 """
811 Create a dependency tree of pkgs_to_build, returning the data.
812 """
813 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
814 tasks_fnid = []
815 if len(taskdata.tasks_name) != 0:
816 for task in xrange(len(taskdata.tasks_name)):
817 tasks_fnid.append(taskdata.tasks_fnid[task])
818
819 seen_fnids = []
820 depend_tree = {}
821 depend_tree["depends"] = {}
822 depend_tree["pn"] = {}
823 depend_tree["rdepends-pn"] = {}
824 depend_tree["rdepends-pkg"] = {}
825 depend_tree["rrecs-pkg"] = {}
826
827 # if we have extra caches, list all attributes they bring in
828 extra_info = []
829 for cache_class in self.caches_array:
830 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
831 cachefields = getattr(cache_class, 'cachefields', [])
832 extra_info = extra_info + cachefields
833
834 for task in xrange(len(tasks_fnid)):
835 fnid = tasks_fnid[task]
836 fn = taskdata.fn_index[fnid]
837 pn = self.recipecache.pkg_fn[fn]
838
839 if pn not in depend_tree["pn"]:
840 depend_tree["pn"][pn] = {}
841 depend_tree["pn"][pn]["filename"] = fn
842 version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
843 depend_tree["pn"][pn]["version"] = version
844 rdepends = self.recipecache.rundeps[fn]
845 rrecs = self.recipecache.runrecs[fn]
846 depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None)
847
848 # for all extra attributes stored, add them to the dependency tree
849 for ei in extra_info:
850 depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn]
851
852 if fnid not in seen_fnids:
853 seen_fnids.append(fnid)
854
855 depend_tree["depends"][pn] = []
856 for dep in taskdata.depids[fnid]:
857 item = taskdata.build_names_index[dep]
858 pn_provider = ""
859 targetid = taskdata.getbuild_id(item)
860 if targetid in taskdata.build_targets and taskdata.build_targets[targetid]:
861 id = taskdata.build_targets[targetid][0]
862 fn_provider = taskdata.fn_index[id]
863 pn_provider = self.recipecache.pkg_fn[fn_provider]
864 else:
865 pn_provider = item
866 depend_tree["depends"][pn].append(pn_provider)
867
868 depend_tree["rdepends-pn"][pn] = []
869 for rdep in taskdata.rdepids[fnid]:
870 item = taskdata.run_names_index[rdep]
871 pn_rprovider = ""
872 targetid = taskdata.getrun_id(item)
873 if targetid in taskdata.run_targets and taskdata.run_targets[targetid]:
874 id = taskdata.run_targets[targetid][0]
875 fn_rprovider = taskdata.fn_index[id]
876 pn_rprovider = self.recipecache.pkg_fn[fn_rprovider]
877 else:
878 pn_rprovider = item
879 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
880
881 depend_tree["rdepends-pkg"].update(rdepends)
882 depend_tree["rrecs-pkg"].update(rrecs)
883
884 return depend_tree
885
886 def generateDepTreeEvent(self, pkgs_to_build, task):
887 """
888 Create a task dependency graph of pkgs_to_build.
889 Generate an event with the result
890 """
891 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
892 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
893
894 def generateDotGraphFiles(self, pkgs_to_build, task):
895 """
896 Create a task dependency graph of pkgs_to_build.
897 Save the result to a set of .dot files.
898 """
899
900 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
901
902 # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn
903 depends_file = file('pn-depends.dot', 'w' )
904 buildlist_file = file('pn-buildlist', 'w' )
905 print("digraph depends {", file=depends_file)
906 for pn in depgraph["pn"]:
907 fn = depgraph["pn"][pn]["filename"]
908 version = depgraph["pn"][pn]["version"]
909 print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
910 print("%s" % pn, file=buildlist_file)
911 buildlist_file.close()
912 logger.info("PN build list saved to 'pn-buildlist'")
913 for pn in depgraph["depends"]:
914 for depend in depgraph["depends"][pn]:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500915 print('"%s" -> "%s" [style=solid]' % (pn, depend), file=depends_file)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500916 for pn in depgraph["rdepends-pn"]:
917 for rdepend in depgraph["rdepends-pn"][pn]:
918 print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file)
919 print("}", file=depends_file)
920 logger.info("PN dependencies saved to 'pn-depends.dot'")
921
922 depends_file = file('package-depends.dot', 'w' )
923 print("digraph depends {", file=depends_file)
924 for package in depgraph["packages"]:
925 pn = depgraph["packages"][package]["pn"]
926 fn = depgraph["packages"][package]["filename"]
927 version = depgraph["packages"][package]["version"]
928 if package == pn:
929 print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
930 else:
931 print('"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn), file=depends_file)
932 for depend in depgraph["depends"][pn]:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500933 print('"%s" -> "%s" [style=solid]' % (package, depend), file=depends_file)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500934 for package in depgraph["rdepends-pkg"]:
935 for rdepend in depgraph["rdepends-pkg"][package]:
936 print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
937 for package in depgraph["rrecs-pkg"]:
938 for rdepend in depgraph["rrecs-pkg"][package]:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500939 print('"%s" -> "%s" [style=dotted]' % (package, rdepend), file=depends_file)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500940 print("}", file=depends_file)
941 logger.info("Package dependencies saved to 'package-depends.dot'")
942
943 tdepends_file = file('task-depends.dot', 'w' )
944 print("digraph depends {", file=tdepends_file)
945 for task in depgraph["tdepends"]:
946 (pn, taskname) = task.rsplit(".", 1)
947 fn = depgraph["pn"][pn]["filename"]
948 version = depgraph["pn"][pn]["version"]
949 print('"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn), file=tdepends_file)
950 for dep in depgraph["tdepends"][task]:
951 print('"%s" -> "%s"' % (task, dep), file=tdepends_file)
952 print("}", file=tdepends_file)
953 logger.info("Task dependencies saved to 'task-depends.dot'")
954
955 def show_appends_with_no_recipes(self):
956 # Determine which bbappends haven't been applied
957
958 # First get list of recipes, including skipped
959 recipefns = self.recipecache.pkg_fn.keys()
960 recipefns.extend(self.skiplist.keys())
961
962 # Work out list of bbappends that have been applied
963 applied_appends = []
964 for fn in recipefns:
965 applied_appends.extend(self.collection.get_file_appends(fn))
966
967 appends_without_recipes = []
968 for _, appendfn in self.collection.bbappends:
969 if not appendfn in applied_appends:
970 appends_without_recipes.append(appendfn)
971
972 if appends_without_recipes:
973 msg = 'No recipes available for:\n %s' % '\n '.join(appends_without_recipes)
974 warn_only = self.data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
975 False) or "no"
976 if warn_only.lower() in ("1", "yes", "true"):
977 bb.warn(msg)
978 else:
979 bb.fatal(msg)
980
981 def handlePrefProviders(self):
982
983 localdata = data.createCopy(self.data)
984 bb.data.update_data(localdata)
985 bb.data.expandKeys(localdata)
986
987 # Handle PREFERRED_PROVIDERS
988 for p in (localdata.getVar('PREFERRED_PROVIDERS', True) or "").split():
989 try:
990 (providee, provider) = p.split(':')
991 except:
992 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
993 continue
994 if providee in self.recipecache.preferred and self.recipecache.preferred[providee] != provider:
995 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecache.preferred[providee])
996 self.recipecache.preferred[providee] = provider
997
998 def findCoreBaseFiles(self, subdir, configfile):
999 corebase = self.data.getVar('COREBASE', True) or ""
1000 paths = []
1001 for root, dirs, files in os.walk(corebase + '/' + subdir):
1002 for d in dirs:
1003 configfilepath = os.path.join(root, d, configfile)
1004 if os.path.exists(configfilepath):
1005 paths.append(os.path.join(root, d))
1006
1007 if paths:
1008 bb.event.fire(bb.event.CoreBaseFilesFound(paths), self.data)
1009
1010 def findConfigFilePath(self, configfile):
1011 """
1012 Find the location on disk of configfile and if it exists and was parsed by BitBake
1013 emit the ConfigFilePathFound event with the path to the file.
1014 """
1015 path = bb.cookerdata.findConfigFile(configfile, self.data)
1016 if not path:
1017 return
1018
1019 # Generate a list of parsed configuration files by searching the files
1020 # listed in the __depends and __base_depends variables with a .conf suffix.
1021 conffiles = []
1022 dep_files = self.data.getVar('__base_depends', False) or []
1023 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
1024
1025 for f in dep_files:
1026 if f[0].endswith(".conf"):
1027 conffiles.append(f[0])
1028
1029 _, conf, conffile = path.rpartition("conf/")
1030 match = os.path.join(conf, conffile)
1031 # Try and find matches for conf/conffilename.conf as we don't always
1032 # have the full path to the file.
1033 for cfg in conffiles:
1034 if cfg.endswith(match):
1035 bb.event.fire(bb.event.ConfigFilePathFound(path),
1036 self.data)
1037 break
1038
1039 def findFilesMatchingInDir(self, filepattern, directory):
1040 """
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001041 Searches for files containing the substring 'filepattern' which are children of
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001042 'directory' in each BBPATH. i.e. to find all rootfs package classes available
1043 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1044 or to find all machine configuration files one could call:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001045 findFilesMatchingInDir(self, '.conf', 'conf/machine')
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001046 """
1047
1048 matches = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001049 bbpaths = self.data.getVar('BBPATH', True).split(':')
1050 for path in bbpaths:
1051 dirpath = os.path.join(path, directory)
1052 if os.path.exists(dirpath):
1053 for root, dirs, files in os.walk(dirpath):
1054 for f in files:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001055 if filepattern in f:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001056 matches.append(f)
1057
1058 if matches:
1059 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1060
1061 def findConfigFiles(self, varname):
1062 """
1063 Find config files which are appropriate values for varname.
1064 i.e. MACHINE, DISTRO
1065 """
1066 possible = []
1067 var = varname.lower()
1068
1069 data = self.data
1070 # iterate configs
1071 bbpaths = data.getVar('BBPATH', True).split(':')
1072 for path in bbpaths:
1073 confpath = os.path.join(path, "conf", var)
1074 if os.path.exists(confpath):
1075 for root, dirs, files in os.walk(confpath):
1076 # get all child files, these are appropriate values
1077 for f in files:
1078 val, sep, end = f.rpartition('.')
1079 if end == 'conf':
1080 possible.append(val)
1081
1082 if possible:
1083 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1084
1085 def findInheritsClass(self, klass):
1086 """
1087 Find all recipes which inherit the specified class
1088 """
1089 pkg_list = []
1090
1091 for pfn in self.recipecache.pkg_fn:
1092 inherits = self.recipecache.inherits.get(pfn, None)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001093 if inherits and klass in inherits:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001094 pkg_list.append(self.recipecache.pkg_fn[pfn])
1095
1096 return pkg_list
1097
1098 def generateTargetsTree(self, klass=None, pkgs=None):
1099 """
1100 Generate a dependency tree of buildable targets
1101 Generate an event with the result
1102 """
1103 # if the caller hasn't specified a pkgs list default to universe
1104 if not pkgs:
1105 pkgs = ['universe']
1106 # if inherited_class passed ensure all recipes which inherit the
1107 # specified class are included in pkgs
1108 if klass:
1109 extra_pkgs = self.findInheritsClass(klass)
1110 pkgs = pkgs + extra_pkgs
1111
1112 # generate a dependency tree for all our packages
1113 tree = self.generatePkgDepTreeData(pkgs, 'build')
1114 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1115
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001116 def interactiveMode( self ):
1117 """Drop off into a shell"""
1118 try:
1119 from bb import shell
1120 except ImportError:
1121 parselog.exception("Interactive mode not available")
1122 sys.exit(1)
1123 else:
1124 shell.start( self )
1125
1126
1127 def handleCollections( self, collections ):
1128 """Handle collections"""
1129 errors = False
1130 self.recipecache.bbfile_config_priorities = []
1131 if collections:
1132 collection_priorities = {}
1133 collection_depends = {}
1134 collection_list = collections.split()
1135 min_prio = 0
1136 for c in collection_list:
1137 # Get collection priority if defined explicitly
1138 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c, True)
1139 if priority:
1140 try:
1141 prio = int(priority)
1142 except ValueError:
1143 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1144 errors = True
1145 if min_prio == 0 or prio < min_prio:
1146 min_prio = prio
1147 collection_priorities[c] = prio
1148 else:
1149 collection_priorities[c] = None
1150
1151 # Check dependencies and store information for priority calculation
1152 deps = self.data.getVar("LAYERDEPENDS_%s" % c, True)
1153 if deps:
1154 try:
1155 deplist = bb.utils.explode_dep_versions2(deps)
1156 except bb.utils.VersionStringException as vse:
1157 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1158 for dep, oplist in deplist.iteritems():
1159 if dep in collection_list:
1160 for opstr in oplist:
1161 layerver = self.data.getVar("LAYERVERSION_%s" % dep, True)
1162 (op, depver) = opstr.split()
1163 if layerver:
1164 try:
1165 res = bb.utils.vercmp_string_op(layerver, depver, op)
1166 except bb.utils.VersionStringException as vse:
1167 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1168 if not res:
1169 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1170 errors = True
1171 else:
1172 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1173 errors = True
1174 else:
1175 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1176 errors = True
1177 collection_depends[c] = deplist.keys()
1178 else:
1179 collection_depends[c] = []
1180
1181 # Recursively work out collection priorities based on dependencies
1182 def calc_layer_priority(collection):
1183 if not collection_priorities[collection]:
1184 max_depprio = min_prio
1185 for dep in collection_depends[collection]:
1186 calc_layer_priority(dep)
1187 depprio = collection_priorities[dep]
1188 if depprio > max_depprio:
1189 max_depprio = depprio
1190 max_depprio += 1
1191 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1192 collection_priorities[collection] = max_depprio
1193
1194 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1195 for c in collection_list:
1196 calc_layer_priority(c)
1197 regex = self.data.getVar("BBFILE_PATTERN_%s" % c, True)
1198 if regex == None:
1199 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1200 errors = True
1201 continue
1202 try:
1203 cre = re.compile(regex)
1204 except re.error:
1205 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1206 errors = True
1207 continue
1208 self.recipecache.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
1209 if errors:
1210 # We've already printed the actual error(s)
1211 raise CollectionError("Errors during parsing layer configuration")
1212
1213 def buildSetVars(self):
1214 """
1215 Setup any variables needed before starting a build
1216 """
1217 t = time.gmtime()
1218 if not self.data.getVar("BUILDNAME", False):
1219 self.data.setVar("BUILDNAME", "${DATE}${TIME}")
1220 self.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1221 self.data.setVar("DATE", time.strftime('%Y%m%d', t))
1222 self.data.setVar("TIME", time.strftime('%H%M%S', t))
1223
1224 def matchFiles(self, bf):
1225 """
1226 Find the .bb files which match the expression in 'buildfile'.
1227 """
1228 if bf.startswith("/") or bf.startswith("../"):
1229 bf = os.path.abspath(bf)
1230
1231 self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
1232 filelist, masked = self.collection.collect_bbfiles(self.data, self.expanded_data)
1233 try:
1234 os.stat(bf)
1235 bf = os.path.abspath(bf)
1236 return [bf]
1237 except OSError:
1238 regexp = re.compile(bf)
1239 matches = []
1240 for f in filelist:
1241 if regexp.search(f) and os.path.isfile(f):
1242 matches.append(f)
1243 return matches
1244
1245 def matchFile(self, buildfile):
1246 """
1247 Find the .bb file which matches the expression in 'buildfile'.
1248 Raise an error if multiple files
1249 """
1250 matches = self.matchFiles(buildfile)
1251 if len(matches) != 1:
1252 if matches:
1253 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1254 if matches:
1255 for f in matches:
1256 msg += "\n %s" % f
1257 parselog.error(msg)
1258 else:
1259 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1260 raise NoSpecificMatch
1261 return matches[0]
1262
1263 def buildFile(self, buildfile, task):
1264 """
1265 Build the file matching regexp buildfile
1266 """
1267
1268 # Too many people use -b because they think it's how you normally
1269 # specify a target to be built, so show a warning
1270 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1271
1272 # Parse the configuration here. We need to do it explicitly here since
1273 # buildFile() doesn't use the cache
1274 self.parseConfiguration()
1275
1276 # If we are told to do the None task then query the default task
1277 if (task == None):
1278 task = self.configuration.cmd
1279
1280 fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
1281 fn = self.matchFile(fn)
1282
1283 self.buildSetVars()
1284
1285 infos = bb.cache.Cache.parse(fn, self.collection.get_file_appends(fn), \
1286 self.data,
1287 self.caches_array)
1288 infos = dict(infos)
1289
1290 fn = bb.cache.Cache.realfn2virtual(fn, cls)
1291 try:
1292 info_array = infos[fn]
1293 except KeyError:
1294 bb.fatal("%s does not exist" % fn)
1295
1296 if info_array[0].skipped:
1297 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1298
1299 self.recipecache.add_from_recipeinfo(fn, info_array)
1300
1301 # Tweak some variables
1302 item = info_array[0].pn
1303 self.recipecache.ignored_dependencies = set()
1304 self.recipecache.bbfile_priority[fn] = 1
1305
1306 # Remove external dependencies
1307 self.recipecache.task_deps[fn]['depends'] = {}
1308 self.recipecache.deps[fn] = []
1309 self.recipecache.rundeps[fn] = []
1310 self.recipecache.runrecs[fn] = []
1311
1312 # Invalidate task for target if force mode active
1313 if self.configuration.force:
1314 logger.verbose("Invalidate task %s, %s", task, fn)
1315 if not task.startswith("do_"):
1316 task = "do_%s" % task
1317 bb.parse.siggen.invalidate_task(task, self.recipecache, fn)
1318
1319 # Setup taskdata structure
1320 taskdata = bb.taskdata.TaskData(self.configuration.abort)
1321 taskdata.add_provider(self.data, self.recipecache, item)
1322
1323 buildname = self.data.getVar("BUILDNAME", True)
1324 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.expanded_data)
1325
1326 # Execute the runqueue
1327 if not task.startswith("do_"):
1328 task = "do_%s" % task
1329 runlist = [[item, task]]
1330
1331 rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
1332
1333 def buildFileIdle(server, rq, abort):
1334
1335 msg = None
1336 interrupted = 0
1337 if abort or self.state == state.forceshutdown:
1338 rq.finish_runqueue(True)
1339 msg = "Forced shutdown"
1340 interrupted = 2
1341 elif self.state == state.shutdown:
1342 rq.finish_runqueue(False)
1343 msg = "Stopped build"
1344 interrupted = 1
1345 failures = 0
1346 try:
1347 retval = rq.execute_runqueue()
1348 except runqueue.TaskFailure as exc:
1349 failures += len(exc.args)
1350 retval = False
1351 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001352 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001353 return False
1354
1355 if not retval:
1356 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures, interrupted), self.expanded_data)
1357 self.command.finishAsyncCommand(msg)
1358 return False
1359 if retval is True:
1360 return True
1361 return retval
1362
1363 self.configuration.server_register_idlecallback(buildFileIdle, rq)
1364
1365 def buildTargets(self, targets, task):
1366 """
1367 Attempt to build the targets specified
1368 """
1369
1370 def buildTargetsIdle(server, rq, abort):
1371 msg = None
1372 interrupted = 0
1373 if abort or self.state == state.forceshutdown:
1374 rq.finish_runqueue(True)
1375 msg = "Forced shutdown"
1376 interrupted = 2
1377 elif self.state == state.shutdown:
1378 rq.finish_runqueue(False)
1379 msg = "Stopped build"
1380 interrupted = 1
1381 failures = 0
1382 try:
1383 retval = rq.execute_runqueue()
1384 except runqueue.TaskFailure as exc:
1385 failures += len(exc.args)
1386 retval = False
1387 except SystemExit as exc:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001388 self.command.finishAsyncCommand(str(exc))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001389 return False
1390
1391 if not retval:
1392 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, targets, failures, interrupted), self.data)
1393 self.command.finishAsyncCommand(msg)
1394 return False
1395 if retval is True:
1396 return True
1397 return retval
1398
1399 build.reset_cache()
1400 self.buildSetVars()
1401
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001402 # If we are told to do the None task then query the default task
1403 if (task == None):
1404 task = self.configuration.cmd
1405
1406 if not task.startswith("do_"):
1407 task = "do_%s" % task
1408
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001409 taskdata, runlist, fulltargetlist = self.buildTaskData(targets, task, self.configuration.abort)
1410
1411 buildname = self.data.getVar("BUILDNAME", False)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001412
1413 # make targets to always look as <target>:do_<task>
1414 ntargets = []
1415 for target in fulltargetlist:
1416 if ":" in target:
1417 if ":do_" not in target:
1418 target = "%s:do_%s" % tuple(target.split(":", 1))
1419 else:
1420 target = "%s:%s" % (target, task)
1421 ntargets.append(target)
1422
1423 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001424
1425 rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
1426 if 'universe' in targets:
1427 rq.rqdata.warn_multi_bb = True
1428
1429 self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
1430
1431
1432 def getAllKeysWithFlags(self, flaglist):
1433 dump = {}
1434 for k in self.data.keys():
1435 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001436 expand = True
1437 flags = self.data.getVarFlags(k)
1438 if flags and "func" in flags and "python" in flags:
1439 expand = False
1440 v = self.data.getVar(k, expand)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001441 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1442 dump[k] = {
1443 'v' : v ,
1444 'history' : self.data.varhistory.variable(k),
1445 }
1446 for d in flaglist:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001447 if flags and d in flags:
1448 dump[k][d] = flags[d]
1449 else:
1450 dump[k][d] = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001451 except Exception as e:
1452 print(e)
1453 return dump
1454
1455
1456 def generateNewImage(self, image, base_image, package_queue, timestamp, description):
1457 '''
1458 Create a new image with a "require"/"inherit" base_image statement
1459 '''
1460 if timestamp:
1461 image_name = os.path.splitext(image)[0]
1462 timestr = time.strftime("-%Y%m%d-%H%M%S")
1463 dest = image_name + str(timestr) + ".bb"
1464 else:
1465 if not image.endswith(".bb"):
1466 dest = image + ".bb"
1467 else:
1468 dest = image
1469
1470 basename = False
1471 if base_image:
1472 with open(base_image, 'r') as f:
1473 require_line = f.readline()
1474 p = re.compile("IMAGE_BASENAME *=")
1475 for line in f:
1476 if p.search(line):
1477 basename = True
1478
1479 with open(dest, "w") as imagefile:
1480 if base_image is None:
1481 imagefile.write("inherit core-image\n")
1482 else:
1483 topdir = self.data.getVar("TOPDIR", False)
1484 if topdir in base_image:
1485 base_image = require_line.split()[1]
1486 imagefile.write("require " + base_image + "\n")
1487 image_install = "IMAGE_INSTALL = \""
1488 for package in package_queue:
1489 image_install += str(package) + " "
1490 image_install += "\"\n"
1491 imagefile.write(image_install)
1492
1493 description_var = "DESCRIPTION = \"" + description + "\"\n"
1494 imagefile.write(description_var)
1495
1496 if basename:
1497 # If this is overwritten in a inherited image, reset it to default
1498 image_basename = "IMAGE_BASENAME = \"${PN}\"\n"
1499 imagefile.write(image_basename)
1500
1501 self.state = state.initial
1502 if timestamp:
1503 return timestr
1504
1505 def updateCacheSync(self):
1506 if self.state == state.running:
1507 return
1508
1509 # reload files for which we got notifications
1510 for p in self.inotify_modified_files:
1511 bb.parse.update_cache(p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001512 if p in bb.parse.BBHandler.cached_statements:
1513 del bb.parse.BBHandler.cached_statements[p]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001514 self.inotify_modified_files = []
1515
1516 if not self.baseconfig_valid:
1517 logger.debug(1, "Reloading base configuration data")
1518 self.initConfigurationData()
1519 self.baseconfig_valid = True
1520 self.parsecache_valid = False
1521
1522 # This is called for all async commands when self.state != running
1523 def updateCache(self):
1524 if self.state == state.running:
1525 return
1526
1527 if self.state in (state.shutdown, state.forceshutdown, state.error):
1528 if hasattr(self.parser, 'shutdown'):
1529 self.parser.shutdown(clean=False, force = True)
1530 raise bb.BBHandledException()
1531
1532 if self.state != state.parsing:
1533 self.updateCacheSync()
1534
1535 if self.state != state.parsing and not self.parsecache_valid:
1536 self.parseConfiguration ()
1537 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
1538 bb.event.fire(bb.event.SanityCheck(False), self.data)
1539
1540 ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or ""
1541 self.recipecache.ignored_dependencies = set(ignore.split())
1542
1543 for dep in self.configuration.extra_assume_provided:
1544 self.recipecache.ignored_dependencies.add(dep)
1545
1546 self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
1547 (filelist, masked) = self.collection.collect_bbfiles(self.data, self.expanded_data)
1548
1549 self.parser = CookerParser(self, filelist, masked)
1550 self.parsecache_valid = True
1551
1552 self.state = state.parsing
1553
1554 if not self.parser.parse_next():
1555 collectlog.debug(1, "parsing complete")
1556 if self.parser.error:
1557 raise bb.BBHandledException()
1558 self.show_appends_with_no_recipes()
1559 self.handlePrefProviders()
1560 self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn, self.data)
1561 self.state = state.running
1562
1563 # Send an event listing all stamps reachable after parsing
1564 # which the metadata may use to clean up stale data
1565 event = bb.event.ReachableStamps(self.recipecache.stamp)
1566 bb.event.fire(event, self.expanded_data)
1567 return None
1568
1569 return True
1570
1571 def checkPackages(self, pkgs_to_build):
1572
1573 # Return a copy, don't modify the original
1574 pkgs_to_build = pkgs_to_build[:]
1575
1576 if len(pkgs_to_build) == 0:
1577 raise NothingToBuild
1578
1579 ignore = (self.expanded_data.getVar("ASSUME_PROVIDED", True) or "").split()
1580 for pkg in pkgs_to_build:
1581 if pkg in ignore:
1582 parselog.warn("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
1583
1584 if 'world' in pkgs_to_build:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001585 bb.providers.buildWorldTargetList(self.recipecache)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001586 pkgs_to_build.remove('world')
1587 for t in self.recipecache.world_target:
1588 pkgs_to_build.append(t)
1589
1590 if 'universe' in pkgs_to_build:
1591 parselog.warn("The \"universe\" target is only intended for testing and may produce errors.")
1592 parselog.debug(1, "collating packages for \"universe\"")
1593 pkgs_to_build.remove('universe')
1594 for t in self.recipecache.universe_target:
1595 pkgs_to_build.append(t)
1596
1597 return pkgs_to_build
1598
1599
1600
1601
1602 def pre_serve(self):
1603 # Empty the environment. The environment will be populated as
1604 # necessary from the data store.
1605 #bb.utils.empty_environment()
1606 try:
1607 self.prhost = prserv.serv.auto_start(self.data)
1608 except prserv.serv.PRServiceConfigError:
1609 bb.event.fire(CookerExit(), self.expanded_data)
1610 self.state = state.error
1611 return
1612
1613 def post_serve(self):
1614 prserv.serv.auto_shutdown(self.data)
1615 bb.event.fire(CookerExit(), self.expanded_data)
1616 lockfile = self.lock.name
1617 self.lock.close()
1618 self.lock = None
1619
1620 while not self.lock:
1621 with bb.utils.timeout(3):
1622 self.lock = bb.utils.lockfile(lockfile, shared=False, retry=False, block=True)
1623 if not self.lock:
1624 # Some systems may not have lsof available
1625 procs = None
1626 try:
1627 procs = subprocess.check_output(["lsof", '-w', lockfile], stderr=subprocess.STDOUT)
1628 except OSError as e:
1629 if e.errno != errno.ENOENT:
1630 raise
1631 if procs is None:
1632 # Fall back to fuser if lsof is unavailable
1633 try:
1634 procs = subprocess.check_output(["fuser", '-v', lockfile], stderr=subprocess.STDOUT)
1635 except OSError as e:
1636 if e.errno != errno.ENOENT:
1637 raise
1638
1639 msg = "Delaying shutdown due to active processes which appear to be holding bitbake.lock"
1640 if procs:
1641 msg += ":\n%s" % str(procs)
1642 print(msg)
1643
1644
1645 def shutdown(self, force = False):
1646 if force:
1647 self.state = state.forceshutdown
1648 else:
1649 self.state = state.shutdown
1650
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001651 if self.parser:
1652 self.parser.shutdown(clean=not force, force=force)
1653
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001654 def finishcommand(self):
1655 self.state = state.initial
1656
1657 def reset(self):
1658 self.initConfigurationData()
1659
1660 def lockBitbake(self):
1661 if not hasattr(self, 'lock'):
1662 self.lock = None
1663 if self.data:
1664 lockfile = self.data.expand("${TOPDIR}/bitbake.lock")
1665 if lockfile:
1666 self.lock = bb.utils.lockfile(lockfile, False, False)
1667 return self.lock
1668
1669 def unlockBitbake(self):
1670 if hasattr(self, 'lock') and self.lock:
1671 bb.utils.unlockfile(self.lock)
1672
1673def server_main(cooker, func, *args):
1674 cooker.pre_serve()
1675
1676 if cooker.configuration.profile:
1677 try:
1678 import cProfile as profile
1679 except:
1680 import profile
1681 prof = profile.Profile()
1682
1683 ret = profile.Profile.runcall(prof, func, *args)
1684
1685 prof.dump_stats("profile.log")
1686 bb.utils.process_profilelog("profile.log")
1687 print("Raw profiling information saved to profile.log and processed statistics to profile.log.processed")
1688
1689 else:
1690 ret = func(*args)
1691
1692 cooker.post_serve()
1693
1694 return ret
1695
1696class CookerExit(bb.event.Event):
1697 """
1698 Notify clients of the Cooker shutdown
1699 """
1700
1701 def __init__(self):
1702 bb.event.Event.__init__(self)
1703
1704
1705class CookerCollectFiles(object):
1706 def __init__(self, priorities):
1707 self.bbappends = []
1708 self.bbfile_config_priorities = priorities
1709
1710 def calc_bbfile_priority( self, filename, matched = None ):
1711 for _, _, regex, pri in self.bbfile_config_priorities:
1712 if regex.match(filename):
1713 if matched != None:
1714 if not regex in matched:
1715 matched.add(regex)
1716 return pri
1717 return 0
1718
1719 def get_bbfiles(self):
1720 """Get list of default .bb files by reading out the current directory"""
1721 path = os.getcwd()
1722 contents = os.listdir(path)
1723 bbfiles = []
1724 for f in contents:
1725 if f.endswith(".bb"):
1726 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1727 return bbfiles
1728
1729 def find_bbfiles(self, path):
1730 """Find all the .bb and .bbappend files in a directory"""
1731 found = []
1732 for dir, dirs, files in os.walk(path):
1733 for ignored in ('SCCS', 'CVS', '.svn'):
1734 if ignored in dirs:
1735 dirs.remove(ignored)
1736 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1737
1738 return found
1739
1740 def collect_bbfiles(self, config, eventdata):
1741 """Collect all available .bb build files"""
1742 masked = 0
1743
1744 collectlog.debug(1, "collecting .bb files")
1745
1746 files = (config.getVar( "BBFILES", True) or "").split()
1747 config.setVar("BBFILES", " ".join(files))
1748
1749 # Sort files by priority
1750 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
1751
1752 if not len(files):
1753 files = self.get_bbfiles()
1754
1755 if not len(files):
1756 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1757 bb.event.fire(CookerExit(), eventdata)
1758
1759 # Can't use set here as order is important
1760 newfiles = []
1761 for f in files:
1762 if os.path.isdir(f):
1763 dirfiles = self.find_bbfiles(f)
1764 for g in dirfiles:
1765 if g not in newfiles:
1766 newfiles.append(g)
1767 else:
1768 globbed = glob.glob(f)
1769 if not globbed and os.path.exists(f):
1770 globbed = [f]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001771 # glob gives files in order on disk. Sort to be deterministic.
1772 for g in sorted(globbed):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001773 if g not in newfiles:
1774 newfiles.append(g)
1775
1776 bbmask = config.getVar('BBMASK', True)
1777
1778 if bbmask:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001779 # First validate the individual regular expressions and ignore any
1780 # that do not compile
1781 bbmasks = []
1782 for mask in bbmask.split():
1783 try:
1784 re.compile(mask)
1785 bbmasks.append(mask)
1786 except sre_constants.error:
1787 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1788
1789 # Then validate the combined regular expressions. This should never
1790 # fail, but better safe than sorry...
1791 bbmask = "|".join(bbmasks)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001792 try:
1793 bbmask_compiled = re.compile(bbmask)
1794 except sre_constants.error:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001795 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1796 bbmask = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001797
1798 bbfiles = []
1799 bbappend = []
1800 for f in newfiles:
1801 if bbmask and bbmask_compiled.search(f):
1802 collectlog.debug(1, "skipping masked file %s", f)
1803 masked += 1
1804 continue
1805 if f.endswith('.bb'):
1806 bbfiles.append(f)
1807 elif f.endswith('.bbappend'):
1808 bbappend.append(f)
1809 else:
1810 collectlog.debug(1, "skipping %s: unknown file extension", f)
1811
1812 # Build a list of .bbappend files for each .bb file
1813 for f in bbappend:
1814 base = os.path.basename(f).replace('.bbappend', '.bb')
1815 self.bbappends.append((base, f))
1816
1817 # Find overlayed recipes
1818 # bbfiles will be in priority order which makes this easy
1819 bbfile_seen = dict()
1820 self.overlayed = defaultdict(list)
1821 for f in reversed(bbfiles):
1822 base = os.path.basename(f)
1823 if base not in bbfile_seen:
1824 bbfile_seen[base] = f
1825 else:
1826 topfile = bbfile_seen[base]
1827 self.overlayed[topfile].append(f)
1828
1829 return (bbfiles, masked)
1830
1831 def get_file_appends(self, fn):
1832 """
1833 Returns a list of .bbappend files to apply to fn
1834 """
1835 filelist = []
1836 f = os.path.basename(fn)
1837 for b in self.bbappends:
1838 (bbappend, filename) = b
1839 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1840 filelist.append(filename)
1841 return filelist
1842
1843 def collection_priorities(self, pkgfns, d):
1844
1845 priorities = {}
1846
1847 # Calculate priorities for each file
1848 matched = set()
1849 for p in pkgfns:
1850 realfn, cls = bb.cache.Cache.virtualfn2realfn(p)
1851 priorities[p] = self.calc_bbfile_priority(realfn, matched)
1852
1853 # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
1854 unmatched = set()
1855 for _, _, regex, pri in self.bbfile_config_priorities:
1856 if not regex in matched:
1857 unmatched.add(regex)
1858
1859 def findmatch(regex):
1860 for b in self.bbappends:
1861 (bbfile, append) = b
1862 if regex.match(append):
1863 return True
1864 return False
1865
1866 for unmatch in unmatched.copy():
1867 if findmatch(unmatch):
1868 unmatched.remove(unmatch)
1869
1870 for collection, pattern, regex, _ in self.bbfile_config_priorities:
1871 if regex in unmatched:
1872 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection, True) != '1':
1873 collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
1874
1875 return priorities
1876
1877class ParsingFailure(Exception):
1878 def __init__(self, realexception, recipe):
1879 self.realexception = realexception
1880 self.recipe = recipe
1881 Exception.__init__(self, realexception, recipe)
1882
1883class Feeder(multiprocessing.Process):
1884 def __init__(self, jobs, to_parsers, quit):
1885 self.quit = quit
1886 self.jobs = jobs
1887 self.to_parsers = to_parsers
1888 multiprocessing.Process.__init__(self)
1889
1890 def run(self):
1891 while True:
1892 try:
1893 quit = self.quit.get_nowait()
1894 except Queue.Empty:
1895 pass
1896 else:
1897 if quit == 'cancel':
1898 self.to_parsers.cancel_join_thread()
1899 break
1900
1901 try:
1902 job = self.jobs.pop()
1903 except IndexError:
1904 break
1905
1906 try:
1907 self.to_parsers.put(job, timeout=0.5)
1908 except Queue.Full:
1909 self.jobs.insert(0, job)
1910 continue
1911
1912class Parser(multiprocessing.Process):
1913 def __init__(self, jobs, results, quit, init, profile):
1914 self.jobs = jobs
1915 self.results = results
1916 self.quit = quit
1917 self.init = init
1918 multiprocessing.Process.__init__(self)
1919 self.context = bb.utils.get_context().copy()
1920 self.handlers = bb.event.get_class_handlers().copy()
1921 self.profile = profile
1922
1923 def run(self):
1924
1925 if not self.profile:
1926 self.realrun()
1927 return
1928
1929 try:
1930 import cProfile as profile
1931 except:
1932 import profile
1933 prof = profile.Profile()
1934 try:
1935 profile.Profile.runcall(prof, self.realrun)
1936 finally:
1937 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1938 prof.dump_stats(logfile)
1939
1940 def realrun(self):
1941 if self.init:
1942 self.init()
1943
1944 pending = []
1945 while True:
1946 try:
1947 self.quit.get_nowait()
1948 except Queue.Empty:
1949 pass
1950 else:
1951 self.results.cancel_join_thread()
1952 break
1953
1954 if pending:
1955 result = pending.pop()
1956 else:
1957 try:
1958 job = self.jobs.get(timeout=0.25)
1959 except Queue.Empty:
1960 continue
1961
1962 if job is None:
1963 break
1964 result = self.parse(*job)
1965
1966 try:
1967 self.results.put(result, timeout=0.25)
1968 except Queue.Full:
1969 pending.append(result)
1970
1971 def parse(self, filename, appends, caches_array):
1972 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001973 # Record the filename we're parsing into any events generated
1974 def parse_filter(self, record):
1975 record.taskpid = bb.event.worker_pid
1976 record.fn = filename
1977 return True
1978
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001979 # Reset our environment and handlers to the original settings
1980 bb.utils.set_context(self.context.copy())
1981 bb.event.set_class_handlers(self.handlers.copy())
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001982 bb.event.LogHandler.filter = parse_filter
1983
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001984 return True, bb.cache.Cache.parse(filename, appends, self.cfg, caches_array)
1985 except Exception as exc:
1986 tb = sys.exc_info()[2]
1987 exc.recipe = filename
1988 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
1989 return True, exc
1990 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
1991 # and for example a worker thread doesn't just exit on its own in response to
1992 # a SystemExit event for example.
1993 except BaseException as exc:
1994 return True, ParsingFailure(exc, filename)
1995
1996class CookerParser(object):
1997 def __init__(self, cooker, filelist, masked):
1998 self.filelist = filelist
1999 self.cooker = cooker
2000 self.cfgdata = cooker.data
2001 self.cfghash = cooker.data_hash
2002
2003 # Accounting statistics
2004 self.parsed = 0
2005 self.cached = 0
2006 self.error = 0
2007 self.masked = masked
2008
2009 self.skipped = 0
2010 self.virtuals = 0
2011 self.total = len(filelist)
2012
2013 self.current = 0
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002014 self.process_names = []
2015
2016 self.bb_cache = bb.cache.Cache(self.cfgdata, self.cfghash, cooker.caches_array)
2017 self.fromcache = []
2018 self.willparse = []
2019 for filename in self.filelist:
2020 appends = self.cooker.collection.get_file_appends(filename)
2021 if not self.bb_cache.cacheValid(filename, appends):
2022 self.willparse.append((filename, appends, cooker.caches_array))
2023 else:
2024 self.fromcache.append((filename, appends))
2025 self.toparse = self.total - len(self.fromcache)
2026 self.progress_chunk = max(self.toparse / 100, 1)
2027
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002028 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or
2029 multiprocessing.cpu_count()), len(self.willparse))
2030
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002031 self.start()
2032 self.haveshutdown = False
2033
2034 def start(self):
2035 self.results = self.load_cached()
2036 self.processes = []
2037 if self.toparse:
2038 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2039 def init():
2040 Parser.cfg = self.cfgdata
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002041 bb.utils.set_process_name(multiprocessing.current_process().name)
2042 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2043 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002044
2045 self.feeder_quit = multiprocessing.Queue(maxsize=1)
2046 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
2047 self.jobs = multiprocessing.Queue(maxsize=self.num_processes)
2048 self.result_queue = multiprocessing.Queue()
2049 self.feeder = Feeder(self.willparse, self.jobs, self.feeder_quit)
2050 self.feeder.start()
2051 for i in range(0, self.num_processes):
2052 parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
2053 parser.start()
2054 self.process_names.append(parser.name)
2055 self.processes.append(parser)
2056
2057 self.results = itertools.chain(self.results, self.parse_generator())
2058
2059 def shutdown(self, clean=True, force=False):
2060 if not self.toparse:
2061 return
2062 if self.haveshutdown:
2063 return
2064 self.haveshutdown = True
2065
2066 if clean:
2067 event = bb.event.ParseCompleted(self.cached, self.parsed,
2068 self.skipped, self.masked,
2069 self.virtuals, self.error,
2070 self.total)
2071
2072 bb.event.fire(event, self.cfgdata)
2073 self.feeder_quit.put(None)
2074 for process in self.processes:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002075 self.parser_quit.put(None)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002076 else:
2077 self.feeder_quit.put('cancel')
2078
2079 self.parser_quit.cancel_join_thread()
2080 for process in self.processes:
2081 self.parser_quit.put(None)
2082
2083 self.jobs.cancel_join_thread()
2084
2085 for process in self.processes:
2086 if force:
2087 process.join(.1)
2088 process.terminate()
2089 else:
2090 process.join()
2091 self.feeder.join()
2092
2093 sync = threading.Thread(target=self.bb_cache.sync)
2094 sync.start()
2095 multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002096 bb.codeparser.parser_cache_savemerge()
2097 bb.fetch.fetcher_parse_done()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002098 if self.cooker.configuration.profile:
2099 profiles = []
2100 for i in self.process_names:
2101 logfile = "profile-parse-%s.log" % i
2102 if os.path.exists(logfile):
2103 profiles.append(logfile)
2104
2105 pout = "profile-parse.log.processed"
2106 bb.utils.process_profilelog(profiles, pout = pout)
2107 print("Processed parsing statistics saved to %s" % (pout))
2108
2109 def load_cached(self):
2110 for filename, appends in self.fromcache:
2111 cached, infos = self.bb_cache.load(filename, appends, self.cfgdata)
2112 yield not cached, infos
2113
2114 def parse_generator(self):
2115 while True:
2116 if self.parsed >= self.toparse:
2117 break
2118
2119 try:
2120 result = self.result_queue.get(timeout=0.25)
2121 except Queue.Empty:
2122 pass
2123 else:
2124 value = result[1]
2125 if isinstance(value, BaseException):
2126 raise value
2127 else:
2128 yield result
2129
2130 def parse_next(self):
2131 result = []
2132 parsed = None
2133 try:
2134 parsed, result = self.results.next()
2135 except StopIteration:
2136 self.shutdown()
2137 return False
2138 except bb.BBHandledException as exc:
2139 self.error += 1
2140 logger.error('Failed to parse recipe: %s' % exc.recipe)
2141 self.shutdown(clean=False)
2142 return False
2143 except ParsingFailure as exc:
2144 self.error += 1
2145 logger.error('Unable to parse %s: %s' %
2146 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2147 self.shutdown(clean=False)
2148 return False
2149 except bb.parse.ParseError as exc:
2150 self.error += 1
2151 logger.error(str(exc))
2152 self.shutdown(clean=False)
2153 return False
2154 except bb.data_smart.ExpansionError as exc:
2155 self.error += 1
2156 _, value, _ = sys.exc_info()
2157 logger.error('ExpansionError during parsing %s: %s', value.recipe, str(exc))
2158 self.shutdown(clean=False)
2159 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002160 except Exception as exc:
2161 self.error += 1
2162 etype, value, tb = sys.exc_info()
2163 if hasattr(value, "recipe"):
2164 logger.error('Unable to parse %s', value.recipe,
2165 exc_info=(etype, value, exc.traceback))
2166 else:
2167 # Most likely, an exception occurred during raising an exception
2168 import traceback
2169 logger.error('Exception during parse: %s' % traceback.format_exc())
2170 self.shutdown(clean=False)
2171 return False
2172
2173 self.current += 1
2174 self.virtuals += len(result)
2175 if parsed:
2176 self.parsed += 1
2177 if self.parsed % self.progress_chunk == 0:
2178 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2179 self.cfgdata)
2180 else:
2181 self.cached += 1
2182
2183 for virtualfn, info_array in result:
2184 if info_array[0].skipped:
2185 self.skipped += 1
2186 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
2187 self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecache,
2188 parsed=parsed, watcher = self.cooker.add_filewatch)
2189 return True
2190
2191 def reparse(self, filename):
2192 infos = self.bb_cache.parse(filename,
2193 self.cooker.collection.get_file_appends(filename),
2194 self.cfgdata, self.cooker.caches_array)
2195 for vfn, info_array in infos:
2196 self.cooker.recipecache.add_from_recipeinfo(vfn, info_array)