Yocto 2.3

Move OpenBMC to Yocto 2.3(pyro).

Tested: Built and verified Witherspoon and Palmetto images
Change-Id: I50744030e771f4850afc2a93a10d3507e76d36bc
Signed-off-by: Brad Bishop <bradleyb@fuzziesquirrel.com>
Resolves: openbmc/openbmc#2461
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/COW.py b/import-layers/yocto-poky/bitbake/lib/bb/COW.py
index 77a05cf..36ebbd9 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/COW.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/COW.py
@@ -213,11 +213,11 @@
     print()
 
     print("a", a)
-    for x in a.items():
+    for x in a.iteritems():
         print(x)
     print("--")
     print("b", b)
-    for x in b.items():
+    for x in b.iteritems():
         print(x)
     print()
 
@@ -225,11 +225,11 @@
     b['a'] = 'c'
 
     print("a", a)
-    for x in a.items():
+    for x in a.iteritems():
         print(x)
     print("--")
     print("b", b)
-    for x in b.items():
+    for x in b.iteritems():
         print(x)
     print()
 
@@ -244,22 +244,22 @@
     a['set'].add("o2")
 
     print("a", a)
-    for x in a['set'].values():
+    for x in a['set'].itervalues():
         print(x)
     print("--")
     print("b", b)
-    for x in b['set'].values():
+    for x in b['set'].itervalues():
         print(x)
     print()
 
     b['set'].add('o3')
 
     print("a", a)
-    for x in a['set'].values():
+    for x in a['set'].itervalues():
         print(x)
     print("--")
     print("b", b)
-    for x in b['set'].values():
+    for x in b['set'].itervalues():
         print(x)
     print()
 
@@ -269,7 +269,7 @@
     a['set2'].add("o2")
 
     print("a", a)
-    for x in a.items():
+    for x in a.iteritems():
         print(x)
     print("--")
     print("b", b)
@@ -289,7 +289,7 @@
         print("Yay - has_key with delete works!")
 
     print("a", a)
-    for x in a.items():
+    for x in a.iteritems():
         print(x)
     print("--")
     print("b", b)
@@ -300,7 +300,7 @@
     b.__revertitem__('b')
 
     print("a", a)
-    for x in a.items():
+    for x in a.iteritems():
         print(x)
     print("--")
     print("b", b)
@@ -310,7 +310,7 @@
 
     b.__revertitem__('dict')
     print("a", a)
-    for x in a.items():
+    for x in a.iteritems():
         print(x)
     print("--")
     print("b", b)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/__init__.py b/import-layers/yocto-poky/bitbake/lib/bb/__init__.py
index f019d48..bfe0ca5 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/__init__.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/__init__.py
@@ -21,7 +21,7 @@
 # with this program; if not, write to the Free Software Foundation, Inc.,
 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
-__version__ = "1.32.0"
+__version__ = "1.34.0"
 
 import sys
 if sys.version_info < (3, 4, 0):
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/build.py b/import-layers/yocto-poky/bitbake/lib/bb/build.py
index b59a49b..0d0100a 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/build.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/build.py
@@ -91,14 +91,14 @@
 
     def __init__(self, t, logfile, d):
         self._task = t
-        self._package = d.getVar("PF", True)
-        self._mc = d.getVar("BB_CURRENT_MC", True)
-        self.taskfile = d.getVar("FILE", True)
+        self._package = d.getVar("PF")
+        self._mc = d.getVar("BB_CURRENT_MC")
+        self.taskfile = d.getVar("FILE")
         self.taskname = self._task
         self.logfile = logfile
         self.time = time.time()
         event.Event.__init__(self)
-        self._message = "recipe %s: task %s: %s" % (d.getVar("PF", True), t, self.getDisplayName())
+        self._message = "recipe %s: task %s: %s" % (d.getVar("PF"), t, self.getDisplayName())
 
     def getTask(self):
         return self._task
@@ -195,13 +195,13 @@
         oldcwd = None
 
     flags = d.getVarFlags(func)
-    cleandirs = flags.get('cleandirs')
+    cleandirs = flags.get('cleandirs') if flags else None
     if cleandirs:
         for cdir in d.expand(cleandirs).split():
             bb.utils.remove(cdir, True)
             bb.utils.mkdirhier(cdir)
 
-    if dirs is None:
+    if flags and dirs is None:
         dirs = flags.get('dirs')
         if dirs:
             dirs = d.expand(dirs).split()
@@ -227,17 +227,17 @@
     else:
         lockfiles = None
 
-    tempdir = d.getVar('T', True)
+    tempdir = d.getVar('T')
 
     # or func allows items to be executed outside of the normal
     # task set, such as buildhistory
-    task = d.getVar('BB_RUNTASK', True) or func
+    task = d.getVar('BB_RUNTASK') or func
     if task == func:
         taskfunc = task
     else:
         taskfunc = "%s.%s" % (task, func)
 
-    runfmt = d.getVar('BB_RUNFMT', True) or "run.{func}.{pid}"
+    runfmt = d.getVar('BB_RUNFMT') or "run.{func}.{pid}"
     runfn = runfmt.format(taskfunc=taskfunc, task=task, func=func, pid=os.getpid())
     runfile = os.path.join(tempdir, runfn)
     bb.utils.mkdirhier(os.path.dirname(runfile))
@@ -369,7 +369,7 @@
 
     cmd = runfile
     if d.getVarFlag(func, 'fakeroot', False):
-        fakerootcmd = d.getVar('FAKEROOT', True)
+        fakerootcmd = d.getVar('FAKEROOT')
         if fakerootcmd:
             cmd = [fakerootcmd, runfile]
 
@@ -378,7 +378,7 @@
     else:
         logfile = sys.stdout
 
-    progress = d.getVarFlag(func, 'progress', True)
+    progress = d.getVarFlag(func, 'progress')
     if progress:
         if progress == 'percent':
             # Use default regex
@@ -430,7 +430,7 @@
             else:
                 break
 
-    tempdir = d.getVar('T', True)
+    tempdir = d.getVar('T')
     fifopath = os.path.join(tempdir, 'fifo.%s' % os.getpid())
     if os.path.exists(fifopath):
         os.unlink(fifopath)
@@ -443,7 +443,7 @@
                 with open(os.devnull, 'r+') as stdin:
                     bb.process.run(cmd, shell=False, stdin=stdin, log=logfile, extrafiles=[(fifo,readfifo)])
             except bb.process.CmdError:
-                logfn = d.getVar('BB_LOGFILE', True)
+                logfn = d.getVar('BB_LOGFILE')
                 raise FuncFailed(func, logfn)
         finally:
             os.unlink(fifopath)
@@ -474,18 +474,18 @@
     logger.debug(1, "Executing task %s", task)
 
     localdata = _task_data(fn, task, d)
-    tempdir = localdata.getVar('T', True)
+    tempdir = localdata.getVar('T')
     if not tempdir:
         bb.fatal("T variable not set, unable to build")
 
     # Change nice level if we're asked to
-    nice = localdata.getVar("BB_TASK_NICE_LEVEL", True)
+    nice = localdata.getVar("BB_TASK_NICE_LEVEL")
     if nice:
         curnice = os.nice(0)
         nice = int(nice) - curnice
         newnice = os.nice(nice)
         logger.debug(1, "Renice to %s " % newnice)
-    ionice = localdata.getVar("BB_TASK_IONICE_LEVEL", True)
+    ionice = localdata.getVar("BB_TASK_IONICE_LEVEL")
     if ionice:
         try:
             cls, prio = ionice.split(".", 1)
@@ -496,7 +496,7 @@
     bb.utils.mkdirhier(tempdir)
 
     # Determine the logfile to generate
-    logfmt = localdata.getVar('BB_LOGFMT', True) or 'log.{task}.{pid}'
+    logfmt = localdata.getVar('BB_LOGFMT') or 'log.{task}.{pid}'
     logbase = logfmt.format(task=task, pid=os.getpid())
 
     # Document the order of the tasks...
@@ -563,6 +563,7 @@
 
     localdata.setVar('BB_LOGFILE', logfn)
     localdata.setVar('BB_RUNTASK', task)
+    localdata.setVar('BB_TASK_LOGGER', bblogger)
 
     flags = localdata.getVarFlags(task)
 
@@ -628,7 +629,7 @@
             quieterr = True
 
         if profile:
-            profname = "profile-%s.log" % (d.getVar("PN", True) + "-" + task)
+            profname = "profile-%s.log" % (d.getVar("PN") + "-" + task)
             try:
                 import cProfile as profile
             except:
@@ -668,9 +669,9 @@
         stamp = d.stamp[file_name]
         extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
     else:
-        stamp = d.getVar('STAMP', True)
-        file_name = d.getVar('BB_FILENAME', True)
-        extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
+        stamp = d.getVar('STAMP')
+        file_name = d.getVar('BB_FILENAME')
+        extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info') or ""
 
     if baseonly:
         return stamp
@@ -704,9 +705,9 @@
         stamp = d.stampclean[file_name]
         extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
     else:
-        stamp = d.getVar('STAMPCLEAN', True)
-        file_name = d.getVar('BB_FILENAME', True)
-        extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
+        stamp = d.getVar('STAMPCLEAN')
+        file_name = d.getVar('BB_FILENAME')
+        extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info') or ""
 
     if not stamp:
         return []
@@ -742,7 +743,7 @@
     # as it completes
     if not task.endswith("_setscene") and task != "do_setscene" and not file_name:
         stampbase = stamp_internal(task, d, None, True)
-        file_name = d.getVar('BB_FILENAME', True)
+        file_name = d.getVar('BB_FILENAME')
         bb.parse.siggen.dump_sigtask(file_name, task, stampbase, True)
 
 def del_stamp(task, d, file_name = None):
@@ -764,7 +765,7 @@
     if file_name:
         taintfn = d.stamp[file_name] + '.' + task + '.taint'
     else:
-        taintfn = d.getVar('STAMP', True) + '.' + task + '.taint'
+        taintfn = d.getVar('STAMP') + '.' + task + '.taint'
     bb.utils.mkdirhier(os.path.dirname(taintfn))
     # The specific content of the taint file is not really important,
     # we just need it to be random, so a random UUID is used
@@ -861,3 +862,46 @@
         if task in deps:
             deps.remove(task)
             d.setVarFlag(bbtask, 'deps', deps)
+
+def preceedtask(task, with_recrdeptasks, d):
+    """
+    Returns a set of tasks in the current recipe which were specified as
+    precondition by the task itself ("after") or which listed themselves
+    as precondition ("before"). Preceeding tasks specified via the
+    "recrdeptask" are included in the result only if requested. Beware
+    that this may lead to the task itself being listed.
+    """
+    preceed = set()
+    preceed.update(d.getVarFlag(task, 'deps') or [])
+    if with_recrdeptasks:
+        recrdeptask = d.getVarFlag(task, 'recrdeptask')
+        if recrdeptask:
+            preceed.update(recrdeptask.split())
+    return preceed
+
+def tasksbetween(task_start, task_end, d):
+    """
+    Return the list of tasks between two tasks in the current recipe,
+    where task_start is to start at and task_end is the task to end at
+    (and task_end has a dependency chain back to task_start).
+    """
+    outtasks = []
+    tasks = list(filter(lambda k: d.getVarFlag(k, "task"), d.keys()))
+    def follow_chain(task, endtask, chain=None):
+        if not chain:
+            chain = []
+        chain.append(task)
+        for othertask in tasks:
+            if othertask == task:
+                continue
+            if task == endtask:
+                for ctask in chain:
+                    if ctask not in outtasks:
+                        outtasks.append(ctask)
+            else:
+                deps = d.getVarFlag(othertask, 'deps', False)
+                if task in deps:
+                    follow_chain(othertask, endtask, chain)
+        chain.pop()
+    follow_chain(task_start, task_end)
+    return outtasks
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/cache.py b/import-layers/yocto-poky/bitbake/lib/bb/cache.py
index dd9cfdf..e7eeb4f 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/cache.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/cache.py
@@ -37,7 +37,7 @@
 
 logger = logging.getLogger("BitBake.Cache")
 
-__cache_version__ = "150"
+__cache_version__ = "151"
 
 def getCacheFile(path, filename, data_hash):
     return os.path.join(path, filename + "." + data_hash)
@@ -71,7 +71,7 @@
 
     @classmethod
     def flaglist(cls, flag, varlist, metadata, squash=False):
-        out_dict = dict((var, metadata.getVarFlag(var, flag, True))
+        out_dict = dict((var, metadata.getVarFlag(var, flag))
                     for var in varlist)
         if squash:
             return dict((k,v) for (k,v) in out_dict.items() if v)
@@ -296,7 +296,7 @@
     bb_data.setVar("__BBMULTICONFIG", mc)
 
     # expand tmpdir to include this topdir
-    bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR', True) or "")
+    bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR') or "")
     bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
     oldpath = os.path.abspath(os.getcwd())
     bb.parse.cached_mtime_noerror(bbfile_loc)
@@ -378,7 +378,7 @@
         # It will be used later for deciding whether we 
         # need extra cache file dump/load support 
         self.caches_array = caches_array
-        self.cachedir = data.getVar("CACHE", True)
+        self.cachedir = data.getVar("CACHE")
         self.clean = set()
         self.checked = set()
         self.depends_cache = {}
@@ -462,6 +462,10 @@
                         self.depends_cache[key] = [value]
                     # only fire events on even percentage boundaries
                     current_progress = cachefile.tell() + previous_progress
+                    if current_progress > cachesize:
+                        # we might have calculated incorrect total size because a file
+                        # might've been written out just after we checked its size
+                        cachesize = current_progress
                     current_percent = 100 * current_progress / cachesize
                     if current_percent > previous_percent:
                         previous_percent = current_percent
@@ -792,8 +796,8 @@
         self.cachedata_extras = self.create_cachedata()
 
     def init_cache(self, d, cache_file_name=None):
-        cachedir = (d.getVar("PERSISTENT_DIR", True) or
-                    d.getVar("CACHE", True))
+        cachedir = (d.getVar("PERSISTENT_DIR") or
+                    d.getVar("CACHE"))
         if cachedir in [None, '']:
             return
         bb.utils.mkdirhier(cachedir)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/codeparser.py b/import-layers/yocto-poky/bitbake/lib/bb/codeparser.py
index 5d2d440..530f44e 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/codeparser.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/codeparser.py
@@ -1,3 +1,22 @@
+"""
+BitBake code parser
+
+Parses actual code (i.e. python and shell) for functions and in-line
+expressions. Used mainly to determine dependencies on other functions
+and variables within the BitBake metadata. Also provides a cache for
+this information in order to speed up processing.
+
+(Not to be confused with the code that parses the metadata itself,
+see lib/bb/parse/ for that).
+
+NOTE: if you change how the parsers gather information you will almost
+certainly need to increment CodeParserCache.CACHE_VERSION below so that
+any existing codeparser cache gets invalidated. Additionally you'll need
+to increment __cache_version__ in cache.py in order to ensure that old
+recipe caches don't trigger "Taskhash mismatch" errors.
+
+"""
+
 import ast
 import sys
 import codegen
@@ -117,7 +136,11 @@
 
 class CodeParserCache(MultiProcessCache):
     cache_file_name = "bb_codeparser.dat"
-    CACHE_VERSION = 8
+    # NOTE: you must increment this if you change how the parsers gather information,
+    # so that an existing cache gets invalidated. Additionally you'll need
+    # to increment __cache_version__ in cache.py in order to ensure that old
+    # recipe caches don't trigger "Taskhash mismatch" errors.
+    CACHE_VERSION = 9
 
     def __init__(self):
         MultiProcessCache.__init__(self)
@@ -186,13 +209,15 @@
 
     def flush(self):
         for record in self.buffer:
-            self.target.handle(record)
+            if self.target.isEnabledFor(record.levelno):
+                self.target.handle(record)
         self.buffer = []
 
 class PythonParser():
     getvars = (".getVar", ".appendVar", ".prependVar")
     getvarflags = (".getVarFlag", ".appendVarFlag", ".prependVarFlag")
-    containsfuncs = ("bb.utils.contains", "base_contains", "bb.utils.contains_any")
+    containsfuncs = ("bb.utils.contains", "base_contains")
+    containsanyfuncs = ("bb.utils.contains_any",  "bb.utils.filter")
     execfuncs = ("bb.build.exec_func", "bb.build.exec_task")
 
     def warn(self, func, arg):
@@ -211,13 +236,17 @@
 
     def visit_Call(self, node):
         name = self.called_node_name(node.func)
-        if name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs):
+        if name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs or name in self.containsanyfuncs):
             if isinstance(node.args[0], ast.Str):
                 varname = node.args[0].s
                 if name in self.containsfuncs and isinstance(node.args[1], ast.Str):
                     if varname not in self.contains:
                         self.contains[varname] = set()
                     self.contains[varname].add(node.args[1].s)
+                elif name in self.containsanyfuncs and isinstance(node.args[1], ast.Str):
+                    if varname not in self.contains:
+                        self.contains[varname] = set()
+                    self.contains[varname].update(node.args[1].s.split())
                 elif name.endswith(self.getvarflags):
                     if isinstance(node.args[1], ast.Str):
                         self.references.add('%s[%s]' % (varname, node.args[1].s))
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/command.py b/import-layers/yocto-poky/bitbake/lib/bb/command.py
index caa3e4d..a919f58 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/command.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/command.py
@@ -28,8 +28,15 @@
 Commands are queued in a CommandQueue
 """
 
+from collections import OrderedDict, defaultdict
+
 import bb.event
 import bb.cooker
+import bb.remotedata
+
+class DataStoreConnectionHandle(object):
+    def __init__(self, dsindex=0):
+        self.dsindex = dsindex
 
 class CommandCompleted(bb.event.Event):
     pass
@@ -55,6 +62,7 @@
         self.cooker = cooker
         self.cmds_sync = CommandsSync()
         self.cmds_async = CommandsAsync()
+        self.remotedatastores = bb.remotedata.RemoteDatastores(cooker)
 
         # FIXME Add lock for this
         self.currentAsyncCommand = None
@@ -125,14 +133,20 @@
 
     def finishAsyncCommand(self, msg=None, code=None):
         if msg or msg == "":
-            bb.event.fire(CommandFailed(msg), self.cooker.expanded_data)
+            bb.event.fire(CommandFailed(msg), self.cooker.data)
         elif code:
-            bb.event.fire(CommandExit(code), self.cooker.expanded_data)
+            bb.event.fire(CommandExit(code), self.cooker.data)
         else:
-            bb.event.fire(CommandCompleted(), self.cooker.expanded_data)
+            bb.event.fire(CommandCompleted(), self.cooker.data)
         self.currentAsyncCommand = None
         self.cooker.finishcommand()
 
+def split_mc_pn(pn):
+    if pn.startswith("multiconfig:"):
+        _, mc, pn = pn.split(":", 2)
+        return (mc, pn)
+    return ('', pn)
+
 class CommandsSync:
     """
     A class of synchronous commands
@@ -179,6 +193,7 @@
         """
         varname = params[0]
         value = str(params[1])
+        command.cooker.extraconfigdata[varname] = value
         command.cooker.data.setVar(varname, value)
 
     def getSetVariable(self, command, params):
@@ -295,9 +310,274 @@
     def updateConfig(self, command, params):
         options = params[0]
         environment = params[1]
-        command.cooker.updateConfigOpts(options, environment)
+        cmdline = params[2]
+        command.cooker.updateConfigOpts(options, environment, cmdline)
     updateConfig.needconfig = False
 
+    def parseConfiguration(self, command, params):
+        """Instruct bitbake to parse its configuration
+        NOTE: it is only necessary to call this if you aren't calling any normal action
+        (otherwise parsing is taken care of automatically)
+        """
+        command.cooker.parseConfiguration()
+    parseConfiguration.needconfig = False
+
+    def getLayerPriorities(self, command, params):
+        ret = []
+        # regex objects cannot be marshalled by xmlrpc
+        for collection, pattern, regex, pri in command.cooker.bbfile_config_priorities:
+            ret.append((collection, pattern, regex.pattern, pri))
+        return ret
+    getLayerPriorities.readonly = True
+
+    def getRecipes(self, command, params):
+        try:
+            mc = params[0]
+        except IndexError:
+            mc = ''
+        return list(command.cooker.recipecaches[mc].pkg_pn.items())
+    getRecipes.readonly = True
+
+    def getRecipeDepends(self, command, params):
+        try:
+            mc = params[0]
+        except IndexError:
+            mc = ''
+        return list(command.cooker.recipecaches[mc].deps.items())
+    getRecipeDepends.readonly = True
+
+    def getRecipeVersions(self, command, params):
+        try:
+            mc = params[0]
+        except IndexError:
+            mc = ''
+        return command.cooker.recipecaches[mc].pkg_pepvpr
+    getRecipeVersions.readonly = True
+
+    def getRuntimeDepends(self, command, params):
+        ret = []
+        try:
+            mc = params[0]
+        except IndexError:
+            mc = ''
+        rundeps = command.cooker.recipecaches[mc].rundeps
+        for key, value in rundeps.items():
+            if isinstance(value, defaultdict):
+                value = dict(value)
+            ret.append((key, value))
+        return ret
+    getRuntimeDepends.readonly = True
+
+    def getRuntimeRecommends(self, command, params):
+        ret = []
+        try:
+            mc = params[0]
+        except IndexError:
+            mc = ''
+        runrecs = command.cooker.recipecaches[mc].runrecs
+        for key, value in runrecs.items():
+            if isinstance(value, defaultdict):
+                value = dict(value)
+            ret.append((key, value))
+        return ret
+    getRuntimeRecommends.readonly = True
+
+    def getRecipeInherits(self, command, params):
+        try:
+            mc = params[0]
+        except IndexError:
+            mc = ''
+        return command.cooker.recipecaches[mc].inherits
+    getRecipeInherits.readonly = True
+
+    def getBbFilePriority(self, command, params):
+        try:
+            mc = params[0]
+        except IndexError:
+            mc = ''
+        return command.cooker.recipecaches[mc].bbfile_priority
+    getBbFilePriority.readonly = True
+
+    def getDefaultPreference(self, command, params):
+        try:
+            mc = params[0]
+        except IndexError:
+            mc = ''
+        return command.cooker.recipecaches[mc].pkg_dp
+    getDefaultPreference.readonly = True
+
+    def getSkippedRecipes(self, command, params):
+        # Return list sorted by reverse priority order
+        import bb.cache
+        skipdict = OrderedDict(sorted(command.cooker.skiplist.items(),
+                                      key=lambda x: (-command.cooker.collection.calc_bbfile_priority(bb.cache.virtualfn2realfn(x[0])[0]), x[0])))
+        return list(skipdict.items())
+    getSkippedRecipes.readonly = True
+
+    def getOverlayedRecipes(self, command, params):
+        return list(command.cooker.collection.overlayed.items())
+    getOverlayedRecipes.readonly = True
+
+    def getFileAppends(self, command, params):
+        fn = params[0]
+        return command.cooker.collection.get_file_appends(fn)
+    getFileAppends.readonly = True
+
+    def getAllAppends(self, command, params):
+        return command.cooker.collection.bbappends
+    getAllAppends.readonly = True
+
+    def findProviders(self, command, params):
+        return command.cooker.findProviders()
+    findProviders.readonly = True
+
+    def findBestProvider(self, command, params):
+        (mc, pn) = split_mc_pn(params[0])
+        return command.cooker.findBestProvider(pn, mc)
+    findBestProvider.readonly = True
+
+    def allProviders(self, command, params):
+        try:
+            mc = params[0]
+        except IndexError:
+            mc = ''
+        return list(bb.providers.allProviders(command.cooker.recipecaches[mc]).items())
+    allProviders.readonly = True
+
+    def getRuntimeProviders(self, command, params):
+        rprovide = params[0]
+        try:
+            mc = params[1]
+        except IndexError:
+            mc = ''
+        all_p = bb.providers.getRuntimeProviders(command.cooker.recipecaches[mc], rprovide)
+        if all_p:
+            best = bb.providers.filterProvidersRunTime(all_p, rprovide,
+                            command.cooker.data,
+                            command.cooker.recipecaches[mc])[0][0]
+        else:
+            best = None
+        return all_p, best
+    getRuntimeProviders.readonly = True
+
+    def dataStoreConnectorFindVar(self, command, params):
+        dsindex = params[0]
+        name = params[1]
+        datastore = command.remotedatastores[dsindex]
+        value, overridedata = datastore._findVar(name)
+
+        if value:
+            content = value.get('_content', None)
+            if isinstance(content, bb.data_smart.DataSmart):
+                # Value is a datastore (e.g. BB_ORIGENV) - need to handle this carefully
+                idx = command.remotedatastores.check_store(content, True)
+                return {'_content': DataStoreConnectionHandle(idx),
+                        '_connector_origtype': 'DataStoreConnectionHandle',
+                        '_connector_overrides': overridedata}
+            elif isinstance(content, set):
+                return {'_content': list(content),
+                        '_connector_origtype': 'set',
+                        '_connector_overrides': overridedata}
+            else:
+                value['_connector_overrides'] = overridedata
+        else:
+            value = {}
+            value['_connector_overrides'] = overridedata
+        return value
+    dataStoreConnectorFindVar.readonly = True
+
+    def dataStoreConnectorGetKeys(self, command, params):
+        dsindex = params[0]
+        datastore = command.remotedatastores[dsindex]
+        return list(datastore.keys())
+    dataStoreConnectorGetKeys.readonly = True
+
+    def dataStoreConnectorGetVarHistory(self, command, params):
+        dsindex = params[0]
+        name = params[1]
+        datastore = command.remotedatastores[dsindex]
+        return datastore.varhistory.variable(name)
+    dataStoreConnectorGetVarHistory.readonly = True
+
+    def dataStoreConnectorExpandPythonRef(self, command, params):
+        config_data_dict = params[0]
+        varname = params[1]
+        expr = params[2]
+
+        config_data = command.remotedatastores.receive_datastore(config_data_dict)
+
+        varparse = bb.data_smart.VariableParse(varname, config_data)
+        return varparse.python_sub(expr)
+
+    def dataStoreConnectorRelease(self, command, params):
+        dsindex = params[0]
+        if dsindex <= 0:
+            raise CommandError('dataStoreConnectorRelease: invalid index %d' % dsindex)
+        command.remotedatastores.release(dsindex)
+
+    def dataStoreConnectorSetVarFlag(self, command, params):
+        dsindex = params[0]
+        name = params[1]
+        flag = params[2]
+        value = params[3]
+        datastore = command.remotedatastores[dsindex]
+        datastore.setVarFlag(name, flag, value)
+
+    def dataStoreConnectorDelVar(self, command, params):
+        dsindex = params[0]
+        name = params[1]
+        datastore = command.remotedatastores[dsindex]
+        if len(params) > 2:
+            flag = params[2]
+            datastore.delVarFlag(name, flag)
+        else:
+            datastore.delVar(name)
+
+    def dataStoreConnectorRenameVar(self, command, params):
+        dsindex = params[0]
+        name = params[1]
+        newname = params[2]
+        datastore = command.remotedatastores[dsindex]
+        datastore.renameVar(name, newname)
+
+    def parseRecipeFile(self, command, params):
+        """
+        Parse the specified recipe file (with or without bbappends)
+        and return a datastore object representing the environment
+        for the recipe.
+        """
+        fn = params[0]
+        appends = params[1]
+        appendlist = params[2]
+        if len(params) > 3:
+            config_data_dict = params[3]
+            config_data = command.remotedatastores.receive_datastore(config_data_dict)
+        else:
+            config_data = None
+
+        if appends:
+            if appendlist is not None:
+                appendfiles = appendlist
+            else:
+                appendfiles = command.cooker.collection.get_file_appends(fn)
+        else:
+            appendfiles = []
+        # We are calling bb.cache locally here rather than on the server,
+        # but that's OK because it doesn't actually need anything from
+        # the server barring the global datastore (which we have a remote
+        # version of)
+        if config_data:
+            # We have to use a different function here if we're passing in a datastore
+            # NOTE: we took a copy above, so we don't do it here again
+            envdata = bb.cache.parse_recipe(config_data, fn, appendfiles)['']
+        else:
+            # Use the standard path
+            parser = bb.cache.NoCache(command.cooker.databuilder)
+            envdata = parser.loadDataFull(fn, appendfiles)
+        idx = command.remotedatastores.store(envdata)
+        return DataStoreConnectionHandle(idx)
+    parseRecipeFile.readonly = True
+
 class CommandsAsync:
     """
     A class of asynchronous commands
@@ -311,8 +591,12 @@
         """
         bfile = params[0]
         task = params[1]
+        if len(params) > 2:
+            hidewarning = params[2]
+        else:
+            hidewarning = False
 
-        command.cooker.buildFile(bfile, task)
+        command.cooker.buildFile(bfile, task, hidewarning)
     buildFile.needcache = False
 
     def buildTargets(self, command, params):
@@ -472,3 +756,11 @@
         command.finishAsyncCommand()
     resetCooker.needcache = False
 
+    def clientComplete(self, command, params):
+        """
+        Do the right thing when the controlling client exits
+        """
+        command.cooker.clientComplete()
+        command.finishAsyncCommand()
+    clientComplete.needcache = False
+
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/cooker.py b/import-layers/yocto-poky/bitbake/lib/bb/cooker.py
index 07897be..3c9e88c 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/cooker.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/cooker.py
@@ -192,6 +192,8 @@
         bb.parse.__mtime_cache = {}
         bb.parse.BBHandler.cached_statements = {}
 
+        self.ui_cmdline = None
+
         self.initConfigurationData()
 
         # we log all events to a file if so directed
@@ -271,12 +273,15 @@
             self.inotify_modified_files.append(event.pathname)
         self.parsecache_valid = False
 
-    def add_filewatch(self, deps, watcher=None):
+    def add_filewatch(self, deps, watcher=None, dirs=False):
         if not watcher:
             watcher = self.watcher
         for i in deps:
             watcher.bbwatchedfiles.append(i[0])
-            f = os.path.dirname(i[0])
+            if dirs:
+                f = i[0]
+            else:
+                f = os.path.dirname(i[0])
             if f in watcher.bbseen:
                 continue
             watcher.bbseen.append(f)
@@ -331,7 +336,7 @@
         # Need to preserve BB_CONSOLELOG over resets
         consolelog = None
         if hasattr(self, "data"):
-            consolelog = self.data.getVar("BB_CONSOLELOG", True)
+            consolelog = self.data.getVar("BB_CONSOLELOG")
 
         if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
             self.enableDataTracking()
@@ -358,17 +363,18 @@
         self.databuilder.parseBaseConfiguration()
         self.data = self.databuilder.data
         self.data_hash = self.databuilder.data_hash
+        self.extraconfigdata = {}
 
         if consolelog:
             self.data.setVar("BB_CONSOLELOG", consolelog)
 
+        self.data.setVar('BB_CMDLINE', self.ui_cmdline)
+
         #
         # Copy of the data store which has been expanded.
         # Used for firing events and accessing variables where expansion needs to be accounted for
         #
-        self.expanded_data = bb.data.createCopy(self.data)
-        bb.data.update_data(self.expanded_data)
-        bb.parse.init_parser(self.expanded_data)
+        bb.parse.init_parser(self.data)
 
         if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
             self.disableDataTracking()
@@ -526,7 +532,7 @@
             bb.msg.loggerVerboseLogs = True
 
         # Change nice level if we're asked to
-        nice = self.data.getVar("BB_NICE_LEVEL", True)
+        nice = self.data.getVar("BB_NICE_LEVEL")
         if nice:
             curnice = os.nice(0)
             nice = int(nice) - curnice
@@ -539,9 +545,10 @@
         for mc in self.multiconfigs:
             self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
 
-        self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS", True))
+        self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
 
-    def updateConfigOpts(self, options, environment):
+    def updateConfigOpts(self, options, environment, cmdline):
+        self.ui_cmdline = cmdline
         clean = True
         for o in options:
             if o in ['prefile', 'postfile']:
@@ -583,13 +590,12 @@
 
     def showVersions(self):
 
-        pkg_pn = self.recipecaches[''].pkg_pn
-        (latest_versions, preferred_versions) = bb.providers.findProviders(self.data, self.recipecaches[''], pkg_pn)
+        (latest_versions, preferred_versions) = self.findProviders()
 
         logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
         logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
 
-        for p in sorted(pkg_pn):
+        for p in sorted(self.recipecaches[''].pkg_pn):
             pref = preferred_versions[p]
             latest = latest_versions[p]
 
@@ -619,7 +625,7 @@
             fn = self.matchFile(fn)
             fn = bb.cache.realfn2virtual(fn, cls, mc)
         elif len(pkgs_to_build) == 1:
-            ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or ""
+            ignore = self.data.getVar("ASSUME_PROVIDED") or ""
             if pkgs_to_build[0] in set(ignore.split()):
                 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
 
@@ -644,14 +650,13 @@
             logger.plain(env.getvalue())
 
         # emit variables and shell functions
-        data.update_data(envdata)
         with closing(StringIO()) as env:
             data.emit_env(env, envdata, True)
             logger.plain(env.getvalue())
 
         # emit the metadata which isnt valid shell
         data.expandKeys(envdata)
-        for e in envdata.keys():
+        for e in sorted(envdata.keys()):
             if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
                 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
 
@@ -705,7 +710,6 @@
         for mc in self.multiconfigs:
             taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
             localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
-            bb.data.update_data(localdata[mc])
             bb.data.expandKeys(localdata[mc])
 
         current = 0
@@ -766,7 +770,7 @@
     @staticmethod
     def add_mc_prefix(mc, pn):
         if mc:
-            return "multiconfig:%s.%s" % (mc, pn)
+            return "multiconfig:%s:%s" % (mc, pn)
         return pn
 
     def buildDependTree(self, rq, taskdata):
@@ -951,62 +955,54 @@
 
         depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
 
-        # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn
-        depends_file = open('pn-depends.dot', 'w' )
-        buildlist_file = open('pn-buildlist', 'w' )
-        print("digraph depends {", file=depends_file)
-        for pn in depgraph["pn"]:
-            fn = depgraph["pn"][pn]["filename"]
-            version = depgraph["pn"][pn]["version"]
-            print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
-            print("%s" % pn, file=buildlist_file)
-        buildlist_file.close()
+        with open('pn-buildlist', 'w') as f:
+            for pn in depgraph["pn"]:
+                f.write(pn + "\n")
         logger.info("PN build list saved to 'pn-buildlist'")
-        for pn in depgraph["depends"]:
-            for depend in depgraph["depends"][pn]:
-                print('"%s" -> "%s" [style=solid]' % (pn, depend), file=depends_file)
-        for pn in depgraph["rdepends-pn"]:
-            for rdepend in depgraph["rdepends-pn"][pn]:
-                print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file)
-        print("}", file=depends_file)
-        depends_file.close()
-        logger.info("PN dependencies saved to 'pn-depends.dot'")
 
-        depends_file = open('package-depends.dot', 'w' )
-        print("digraph depends {", file=depends_file)
-        for package in depgraph["packages"]:
-            pn = depgraph["packages"][package]["pn"]
-            fn = depgraph["packages"][package]["filename"]
-            version = depgraph["packages"][package]["version"]
-            if package == pn:
-                print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
-            else:
-                print('"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn), file=depends_file)
-            for depend in depgraph["depends"][pn]:
-                print('"%s" -> "%s" [style=solid]' % (package, depend), file=depends_file)
-        for package in depgraph["rdepends-pkg"]:
-            for rdepend in depgraph["rdepends-pkg"][package]:
-                print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
-        for package in depgraph["rrecs-pkg"]:
-            for rdepend in depgraph["rrecs-pkg"][package]:
-                print('"%s" -> "%s" [style=dotted]' % (package, rdepend), file=depends_file)
-        print("}", file=depends_file)
-        depends_file.close()
-        logger.info("Package dependencies saved to 'package-depends.dot'")
+        # Remove old format output files to ensure no confusion with stale data
+        try:
+            os.unlink('pn-depends.dot')
+        except FileNotFoundError:
+            pass
+        try:
+            os.unlink('package-depends.dot')
+        except FileNotFoundError:
+            pass
 
-        tdepends_file = open('task-depends.dot', 'w' )
-        print("digraph depends {", file=tdepends_file)
-        for task in depgraph["tdepends"]:
-            (pn, taskname) = task.rsplit(".", 1)
-            fn = depgraph["pn"][pn]["filename"]
-            version = depgraph["pn"][pn]["version"]
-            print('"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn), file=tdepends_file)
-            for dep in depgraph["tdepends"][task]:
-                print('"%s" -> "%s"' % (task, dep), file=tdepends_file)
-        print("}", file=tdepends_file)
-        tdepends_file.close()
+        with open('task-depends.dot', 'w') as f:
+            f.write("digraph depends {\n")
+            for task in depgraph["tdepends"]:
+                (pn, taskname) = task.rsplit(".", 1)
+                fn = depgraph["pn"][pn]["filename"]
+                version = depgraph["pn"][pn]["version"]
+                f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
+                for dep in depgraph["tdepends"][task]:
+                    f.write('"%s" -> "%s"\n' % (task, dep))
+            f.write("}\n")
         logger.info("Task dependencies saved to 'task-depends.dot'")
 
+        with open('recipe-depends.dot', 'w') as f:
+            f.write("digraph depends {\n")
+            pndeps = {}
+            for task in depgraph["tdepends"]:
+                (pn, taskname) = task.rsplit(".", 1)
+                if pn not in pndeps:
+                    pndeps[pn] = set()
+                for dep in depgraph["tdepends"][task]:
+                    (deppn, deptaskname) = dep.rsplit(".", 1)
+                    pndeps[pn].add(deppn)
+            for pn in pndeps:
+                fn = depgraph["pn"][pn]["filename"]
+                version = depgraph["pn"][pn]["version"]
+                f.write('"%s" [label="%s\\n%s\\n%s"]\n' % (pn, pn, version, fn))
+                for dep in pndeps[pn]:
+                    if dep == pn:
+                        continue
+                    f.write('"%s" -> "%s"\n' % (pn, dep))
+            f.write("}\n")
+        logger.info("Flatened recipe dependencies saved to 'recipe-depends.dot'")
+
     def show_appends_with_no_recipes(self):
         # Determine which bbappends haven't been applied
 
@@ -1037,11 +1033,10 @@
 
         for mc in self.multiconfigs:
             localdata = data.createCopy(self.databuilder.mcdata[mc])
-            bb.data.update_data(localdata)
             bb.data.expandKeys(localdata)
 
             # Handle PREFERRED_PROVIDERS
-            for p in (localdata.getVar('PREFERRED_PROVIDERS', True) or "").split():
+            for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
                 try:
                     (providee, provider) = p.split(':')
                 except:
@@ -1052,7 +1047,7 @@
                 self.recipecaches[mc].preferred[providee] = provider
 
     def findCoreBaseFiles(self, subdir, configfile):
-        corebase = self.data.getVar('COREBASE', True) or ""
+        corebase = self.data.getVar('COREBASE') or ""
         paths = []
         for root, dirs, files in os.walk(corebase + '/' + subdir):
             for d in dirs:
@@ -1102,7 +1097,7 @@
         """
 
         matches = []
-        bbpaths = self.data.getVar('BBPATH', True).split(':')
+        bbpaths = self.data.getVar('BBPATH').split(':')
         for path in bbpaths:
             dirpath = os.path.join(path, directory)
             if os.path.exists(dirpath):
@@ -1114,6 +1109,20 @@
         if matches:
             bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
 
+    def findProviders(self, mc=''):
+        return bb.providers.findProviders(self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
+
+    def findBestProvider(self, pn, mc=''):
+        if pn in self.recipecaches[mc].providers:
+            filenames = self.recipecaches[mc].providers[pn]
+            eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.data, self.recipecaches[mc])
+            filename = eligible[0]
+            return None, None, None, filename
+        elif pn in self.recipecaches[mc].pkg_pn:
+            return bb.providers.findBestProvider(pn, self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
+        else:
+            return None, None, None, None
+
     def findConfigFiles(self, varname):
         """
         Find config files which are appropriate values for varname.
@@ -1124,7 +1133,7 @@
 
         data = self.data
         # iterate configs
-        bbpaths = data.getVar('BBPATH', True).split(':')
+        bbpaths = data.getVar('BBPATH').split(':')
         for path in bbpaths:
             confpath = os.path.join(path, "conf", var)
             if os.path.exists(confpath):
@@ -1193,7 +1202,7 @@
                 bb.debug(1,'Processing %s in collection list' % (c))
 
                 # Get collection priority if defined explicitly
-                priority = self.data.getVar("BBFILE_PRIORITY_%s" % c, True)
+                priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
                 if priority:
                     try:
                         prio = int(priority)
@@ -1207,7 +1216,7 @@
                     collection_priorities[c] = None
 
                 # Check dependencies and store information for priority calculation
-                deps = self.data.getVar("LAYERDEPENDS_%s" % c, True)
+                deps = self.data.getVar("LAYERDEPENDS_%s" % c)
                 if deps:
                     try:
                         depDict = bb.utils.explode_dep_versions2(deps)
@@ -1216,7 +1225,7 @@
                     for dep, oplist in list(depDict.items()):
                         if dep in collection_list:
                             for opstr in oplist:
-                                layerver = self.data.getVar("LAYERVERSION_%s" % dep, True)
+                                layerver = self.data.getVar("LAYERVERSION_%s" % dep)
                                 (op, depver) = opstr.split()
                                 if layerver:
                                     try:
@@ -1237,7 +1246,7 @@
                     collection_depends[c] = []
 
                 # Check recommends and store information for priority calculation
-                recs = self.data.getVar("LAYERRECOMMENDS_%s" % c, True)
+                recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
                 if recs:
                     try:
                         recDict = bb.utils.explode_dep_versions2(recs)
@@ -1247,7 +1256,7 @@
                         if rec in collection_list:
                             if oplist:
                                 opstr = oplist[0]
-                                layerver = self.data.getVar("LAYERVERSION_%s" % rec, True)
+                                layerver = self.data.getVar("LAYERVERSION_%s" % rec)
                                 if layerver:
                                     (op, recver) = opstr.split()
                                     try:
@@ -1281,17 +1290,21 @@
             # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
             for c in collection_list:
                 calc_layer_priority(c)
-                regex = self.data.getVar("BBFILE_PATTERN_%s" % c, True)
+                regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
                 if regex == None:
                     parselog.error("BBFILE_PATTERN_%s not defined" % c)
                     errors = True
                     continue
-                try:
-                    cre = re.compile(regex)
-                except re.error:
-                    parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
-                    errors = True
-                    continue
+                elif regex == "":
+                    parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
+                    errors = False
+                else:
+                    try:
+                        cre = re.compile(regex)
+                    except re.error:
+                        parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
+                        errors = True
+                        continue
                 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
         if errors:
             # We've already printed the actual error(s)
@@ -1316,7 +1329,7 @@
             bf = os.path.abspath(bf)
 
         self.collection = CookerCollectFiles(self.bbfile_config_priorities)
-        filelist, masked = self.collection.collect_bbfiles(self.data, self.expanded_data)
+        filelist, masked, searchdirs = self.collection.collect_bbfiles(self.data, self.data)
         try:
             os.stat(bf)
             bf = os.path.abspath(bf)
@@ -1347,15 +1360,16 @@
             raise NoSpecificMatch
         return matches[0]
 
-    def buildFile(self, buildfile, task):
+    def buildFile(self, buildfile, task, hidewarning=False):
         """
         Build the file matching regexp buildfile
         """
-        bb.event.fire(bb.event.BuildInit(), self.expanded_data)
+        bb.event.fire(bb.event.BuildInit(), self.data)
 
-        # Too many people use -b because they think it's how you normally
-        # specify a target to be built, so show a warning
-        bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
+        if not hidewarning:
+            # Too many people use -b because they think it's how you normally
+            # specify a target to be built, so show a warning
+            bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
 
         # Parse the configuration here. We need to do it explicitly here since
         # buildFile() doesn't use the cache
@@ -1392,6 +1406,7 @@
         item = info_array[0].pn
         self.recipecaches[mc].ignored_dependencies = set()
         self.recipecaches[mc].bbfile_priority[fn] = 1
+        self.configuration.limited_deps = True
 
         # Remove external dependencies
         self.recipecaches[mc].task_deps[fn]['depends'] = {}
@@ -1409,8 +1424,8 @@
         taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
         taskdata[mc].add_provider(self.data, self.recipecaches[mc], item)
 
-        buildname = self.data.getVar("BUILDNAME", True)
-        bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.expanded_data)
+        buildname = self.data.getVar("BUILDNAME")
+        bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.data)
 
         # Execute the runqueue
         runlist = [[mc, item, task, fn]]
@@ -1440,7 +1455,7 @@
                 return False
 
             if not retval:
-                bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.expanded_data)
+                bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.data)
                 self.command.finishAsyncCommand(msg)
                 return False
             if retval is True:
@@ -1495,7 +1510,7 @@
 
         packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
 
-        bb.event.fire(bb.event.BuildInit(packages), self.expanded_data)
+        bb.event.fire(bb.event.BuildInit(packages), self.data)
 
         taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
 
@@ -1528,7 +1543,7 @@
                 v = self.data.getVar(k, expand)
                 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
                     dump[k] = {
-    'v' : v ,
+    'v' : str(v) ,
     'history' : self.data.varhistory.variable(k),
                     }
                     for d in flaglist:
@@ -1627,14 +1642,18 @@
                     bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
 
             for mc in self.multiconfigs:
-                ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED", True) or ""
+                ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
                 self.recipecaches[mc].ignored_dependencies = set(ignore.split())
 
                 for dep in self.configuration.extra_assume_provided:
                     self.recipecaches[mc].ignored_dependencies.add(dep)
 
             self.collection = CookerCollectFiles(self.bbfile_config_priorities)
-            (filelist, masked) = self.collection.collect_bbfiles(self.data, self.expanded_data)
+            (filelist, masked, searchdirs) = self.collection.collect_bbfiles(self.data, self.data)
+
+            # Add inotify watches for directories searched for bb/bbappend files
+            for dirent in searchdirs:
+                self.add_filewatch([[dirent]], dirs=True)
 
             self.parser = CookerParser(self, filelist, masked)
             self.parsecache_valid = True
@@ -1668,7 +1687,7 @@
         if len(pkgs_to_build) == 0:
             raise NothingToBuild
 
-        ignore = (self.expanded_data.getVar("ASSUME_PROVIDED", True) or "").split()
+        ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
         for pkg in pkgs_to_build:
             if pkg in ignore:
                 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
@@ -1688,6 +1707,15 @@
             pkgs_to_build.remove('universe')
             for mc in self.multiconfigs:
                 for t in self.recipecaches[mc].universe_target:
+                    if task:
+                        foundtask = False
+                        for provider_fn in self.recipecaches[mc].providers[t]:
+                            if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
+                                foundtask = True
+                                break
+                        if not foundtask:
+                            bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
+                            continue
                     if mc:
                         t = "multiconfig:" + mc + ":" + t
                     pkgs_to_build.append(t)
@@ -1701,13 +1729,13 @@
         try:
             self.prhost = prserv.serv.auto_start(self.data)
         except prserv.serv.PRServiceConfigError:
-            bb.event.fire(CookerExit(), self.expanded_data)
+            bb.event.fire(CookerExit(), self.data)
             self.state = state.error
         return
 
     def post_serve(self):
         prserv.serv.auto_shutdown(self.data)
-        bb.event.fire(CookerExit(), self.expanded_data)
+        bb.event.fire(CookerExit(), self.data)
         lockfile = self.lock.name
         self.lock.close()
         self.lock = None
@@ -1745,6 +1773,8 @@
 
         if self.parser:
             self.parser.shutdown(clean=not force, force=force)
+        self.notifier.stop()
+        self.confignotifier.stop()
 
     def finishcommand(self):
         self.state = state.initial
@@ -1752,6 +1782,13 @@
     def reset(self):
         self.initConfigurationData()
 
+    def clientComplete(self):
+        """Called when the client is done using the server"""
+        if self.configuration.server_only:
+            self.finishcommand()
+        else:
+            self.shutdown(True)
+
     def lockBitbake(self):
         if not hasattr(self, 'lock'):
             self.lock = None
@@ -1838,7 +1875,7 @@
 
         collectlog.debug(1, "collecting .bb files")
 
-        files = (config.getVar( "BBFILES", True) or "").split()
+        files = (config.getVar( "BBFILES") or "").split()
         config.setVar("BBFILES", " ".join(files))
 
         # Sort files by priority
@@ -1851,30 +1888,49 @@
             collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
             bb.event.fire(CookerExit(), eventdata)
 
-        # Can't use set here as order is important
-        newfiles = []
-        for f in files:
-            if os.path.isdir(f):
-                dirfiles = self.find_bbfiles(f)
-                for g in dirfiles:
-                    if g not in newfiles:
-                        newfiles.append(g)
-            else:
-                globbed = glob.glob(f)
-                if not globbed and os.path.exists(f):
-                    globbed = [f]
-                # glob gives files in order on disk. Sort to be deterministic.
-                for g in sorted(globbed):
-                    if g not in newfiles:
-                        newfiles.append(g)
+        # We need to track where we look so that we can add inotify watches. There
+        # is no nice way to do this, this is horrid. We intercept the os.listdir()
+        # calls while we run glob().
+        origlistdir = os.listdir
+        searchdirs = []
 
-        bbmask = config.getVar('BBMASK', True)
+        def ourlistdir(d):
+            searchdirs.append(d)
+            return origlistdir(d)
+
+        os.listdir = ourlistdir
+        try:
+            # Can't use set here as order is important
+            newfiles = []
+            for f in files:
+                if os.path.isdir(f):
+                    dirfiles = self.find_bbfiles(f)
+                    for g in dirfiles:
+                        if g not in newfiles:
+                            newfiles.append(g)
+                else:
+                    globbed = glob.glob(f)
+                    if not globbed and os.path.exists(f):
+                        globbed = [f]
+                    # glob gives files in order on disk. Sort to be deterministic.
+                    for g in sorted(globbed):
+                        if g not in newfiles:
+                            newfiles.append(g)
+        finally:
+            os.listdir = origlistdir
+
+        bbmask = config.getVar('BBMASK')
 
         if bbmask:
             # First validate the individual regular expressions and ignore any
             # that do not compile
             bbmasks = []
             for mask in bbmask.split():
+                # When constructing an older style single regex, it's possible for BBMASK
+                # to end up beginning with '|', which matches and masks _everything_.
+                if mask.startswith("|"):
+                    collectlog.warn("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
+                    mask = mask[1:]
                 try:
                     re.compile(mask)
                     bbmasks.append(mask)
@@ -1921,7 +1977,7 @@
                 topfile = bbfile_seen[base]
                 self.overlayed[topfile].append(f)
 
-        return (bbfiles, masked)
+        return (bbfiles, masked, searchdirs)
 
     def get_file_appends(self, fn):
         """
@@ -1964,7 +2020,7 @@
 
         for collection, pattern, regex, _ in self.bbfile_config_priorities:
             if regex in unmatched:
-                if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection, True) != '1':
+                if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
                     collectlog.warning("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
 
         return priorities
@@ -2121,7 +2177,7 @@
         self.toparse = self.total - len(self.fromcache)
         self.progress_chunk = int(max(self.toparse / 100, 1))
 
-        self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or
+        self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
                                  multiprocessing.cpu_count()), len(self.willparse))
 
         self.start()
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/cookerdata.py b/import-layers/yocto-poky/bitbake/lib/bb/cookerdata.py
index 98f56ac..e408a35 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/cookerdata.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/cookerdata.py
@@ -79,7 +79,7 @@
                   "prefile", "postfile"]:
             options[o] = getattr(self.options, o)
 
-        ret, error = server.runCommand(["updateConfig", options, environment])
+        ret, error = server.runCommand(["updateConfig", options, environment, sys.argv])
         if error:
                 raise Exception("Unable to update the server configuration with local parameters: %s" % error)
 
@@ -146,6 +146,9 @@
         self.tracking = False
         self.interface = []
         self.writeeventlog = False
+        self.server_only = False
+        self.limited_deps = False
+        self.runall = None
 
         self.env = {}
 
@@ -212,7 +215,7 @@
 
 def findConfigFile(configfile, data):
     search = []
-    bbpath = data.getVar("BBPATH", True)
+    bbpath = data.getVar("BBPATH")
     if bbpath:
         for i in bbpath.split(":"):
             search.append(os.path.join(i, "conf", configfile))
@@ -286,7 +289,7 @@
             self.data_hash = self.data.get_hash()
             self.mcdata[''] = self.data
 
-            multiconfig = (self.data.getVar("BBMULTICONFIG", True) or "").split()
+            multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split()
             for config in multiconfig:
                 mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config)
                 bb.event.fire(bb.event.ConfigParsed(), mcdata)
@@ -320,7 +323,7 @@
             data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf)))
             data = parse_config_file(layerconf, data)
 
-            layers = (data.getVar('BBLAYERS', True) or "").split()
+            layers = (data.getVar('BBLAYERS') or "").split()
 
             data = bb.data.createCopy(data)
             approved = bb.utils.approved_variables()
@@ -343,7 +346,7 @@
             data.delVar('LAYERDIR_RE')
             data.delVar('LAYERDIR')
 
-        if not data.getVar("BBPATH", True):
+        if not data.getVar("BBPATH"):
             msg = "The BBPATH variable is not set"
             if not layerconf:
                 msg += (" and bitbake did not find a conf/bblayers.conf file in"
@@ -358,7 +361,7 @@
             data = parse_config_file(p, data)
 
         # Handle any INHERITs and inherit the base class
-        bbclasses  = ["base"] + (data.getVar('INHERIT', True) or "").split()
+        bbclasses  = ["base"] + (data.getVar('INHERIT') or "").split()
         for bbclass in bbclasses:
             data = _inherit(bbclass, data)
 
@@ -370,7 +373,7 @@
                 parselog.critical("Undefined event handler function '%s'" % var)
                 sys.exit(1)
             handlerln = int(data.getVarFlag(var, "lineno", False))
-            bb.event.register(var, data.getVar(var, False),  (data.getVarFlag(var, "eventmask", True) or "").split(), handlerfn, handlerln)
+            bb.event.register(var, data.getVar(var, False),  (data.getVarFlag(var, "eventmask") or "").split(), handlerfn, handlerln)
 
         data.setVar('BBINCLUDED',bb.parse.get_file_depends(data))
 
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/data.py b/import-layers/yocto-poky/bitbake/lib/bb/data.py
index c56965c..134afaa 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/data.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/data.py
@@ -78,59 +78,6 @@
     """Non-destructive var init for data structure"""
     d.initVar(var)
 
-
-def setVar(var, value, d):
-    """Set a variable to a given value"""
-    d.setVar(var, value)
-
-
-def getVar(var, d, exp = False):
-    """Gets the value of a variable"""
-    return d.getVar(var, exp)
-
-
-def renameVar(key, newkey, d):
-    """Renames a variable from key to newkey"""
-    d.renameVar(key, newkey)
-
-def delVar(var, d):
-    """Removes a variable from the data set"""
-    d.delVar(var)
-
-def appendVar(var, value, d):
-    """Append additional value to a variable"""
-    d.appendVar(var, value)
-
-def setVarFlag(var, flag, flagvalue, d):
-    """Set a flag for a given variable to a given value"""
-    d.setVarFlag(var, flag, flagvalue)
-
-def getVarFlag(var, flag, d):
-    """Gets given flag from given var"""
-    return d.getVarFlag(var, flag, False)
-
-def delVarFlag(var, flag, d):
-    """Removes a given flag from the variable's flags"""
-    d.delVarFlag(var, flag)
-
-def setVarFlags(var, flags, d):
-    """Set the flags for a given variable
-
-    Note:
-        setVarFlags will not clear previous
-        flags. Think of this method as
-        addVarFlags
-    """
-    d.setVarFlags(var, flags)
-
-def getVarFlags(var, d):
-    """Gets a variable's flags"""
-    return d.getVarFlags(var)
-
-def delVarFlags(var, d):
-    """Removes a variable's flags"""
-    d.delVarFlags(var)
-
 def keys(d):
     """Return a list of keys in d"""
     return d.keys()
@@ -174,7 +121,7 @@
     for s in savedenv.keys():
         if s in permitted:
             try:
-                d.setVar(s, savedenv.getVar(s, True), op = 'from env')
+                d.setVar(s, savedenv.getVar(s), op = 'from env')
                 if s in exportlist:
                     d.setVarFlag(s, "export", True, op = 'auto env export')
             except TypeError:
@@ -194,7 +141,7 @@
     try:
         if all:
             oval = d.getVar(var, False)
-        val = d.getVar(var, True)
+        val = d.getVar(var)
     except (KeyboardInterrupt, bb.build.FuncFailed):
         raise
     except Exception as exc:
@@ -249,7 +196,7 @@
     keys = sorted((key for key in d.keys() if not key.startswith("__")), key=isfunc)
     grouped = groupby(keys, isfunc)
     for isfunc, keys in grouped:
-        for key in keys:
+        for key in sorted(keys):
             emit_var(key, o, d, all and not isfunc) and o.write('\n')
 
 def exported_keys(d):
@@ -261,9 +208,9 @@
     k = list(exported_keys(d))
     for key in k:
         try:
-            value = d.getVar(key, True)
+            value = d.getVar(key)
         except Exception as err:
-            bb.warn("%s: Unable to export ${%s}: %s" % (d.getVar("FILE", True), key, err))
+            bb.warn("%s: Unable to export ${%s}: %s" % (d.getVar("FILE"), key, err))
             continue
 
         if value is not None:
@@ -273,13 +220,13 @@
     """Emits all items in the data store in a format such that it can be sourced by a shell."""
 
     keys = (key for key in d.keys() if not key.startswith("__") and not d.getVarFlag(key, "func", False))
-    for key in keys:
+    for key in sorted(keys):
         emit_var(key, o, d, False)
 
     o.write('\n')
     emit_var(func, o, d, False) and o.write('\n')
-    newdeps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func, True))
-    newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split())
+    newdeps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func))
+    newdeps |= set((d.getVarFlag(func, "vardeps") or "").split())
     seen = set()
     while newdeps:
         deps = newdeps
@@ -288,8 +235,8 @@
         for dep in deps:
             if d.getVarFlag(dep, "func", False) and not d.getVarFlag(dep, "python", False):
                emit_var(dep, o, d, False) and o.write('\n')
-               newdeps |=  bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep, True))
-               newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split())
+               newdeps |=  bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep))
+               newdeps |= set((d.getVarFlag(dep, "vardeps") or "").split())
         newdeps -= seen
 
 _functionfmt = """
@@ -312,7 +259,7 @@
     pp = bb.codeparser.PythonParser(func, logger)
     pp.parse_python(d.getVar(func, False))
     newdeps = pp.execs
-    newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split())
+    newdeps |= set((d.getVarFlag(func, "vardeps") or "").split())
     seen = set()
     while newdeps:
         deps = newdeps
@@ -324,7 +271,7 @@
                pp = bb.codeparser.PythonParser(dep, logger)
                pp.parse_python(d.getVar(dep, False))
                newdeps |= pp.execs
-               newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split())
+               newdeps |= set((d.getVarFlag(dep, "vardeps") or "").split())
         newdeps -= seen
 
 def update_data(d):
@@ -348,12 +295,14 @@
         def handle_contains(value, contains, d):
             newvalue = ""
             for k in sorted(contains):
-                l = (d.getVar(k, True) or "").split()
-                for word in sorted(contains[k]):
-                    if word in l:
-                        newvalue += "\n%s{%s} = Set" %  (k, word)
+                l = (d.getVar(k) or "").split()
+                for item in sorted(contains[k]):
+                    for word in item.split():
+                        if not word in l:
+                            newvalue += "\n%s{%s} = Unset" % (k, item)
+                            break
                     else:
-                        newvalue += "\n%s{%s} = Unset" %  (k, word)
+                        newvalue += "\n%s{%s} = Set" % (k, item)
             if not newvalue:
                 return value
             if not value:
@@ -366,7 +315,7 @@
             if varflags.get("python"):
                 parser = bb.codeparser.PythonParser(key, logger)
                 if value and "\t" in value:
-                    logger.warning("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE", True)))
+                    logger.warning("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE")))
                 parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno"))
                 deps = deps | parser.references
                 deps = deps | (keys & parser.execs)
@@ -410,6 +359,8 @@
 
         deps |= set((vardeps or "").split())
         deps -= set(varflags.get("vardepsexclude", "").split())
+    except bb.parse.SkipRecipe:
+        raise
     except Exception as e:
         bb.warn("Exception during build_dependencies for %s" % key)
         raise
@@ -421,7 +372,7 @@
 
     keys = set(key for key in d if not key.startswith("__"))
     shelldeps = set(key for key in d.getVar("__exportlist", False) if d.getVarFlag(key, "export", False) and not d.getVarFlag(key, "unexport", False))
-    varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS', True)
+    varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS')
 
     deps = {}
     values = {}
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/data_smart.py b/import-layers/yocto-poky/bitbake/lib/bb/data_smart.py
index 805a9a7..7dc1c68 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/data_smart.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/data_smart.py
@@ -108,7 +108,7 @@
                 varparse = self.d.expand_cache[key]
                 var = varparse.value
             else:
-                var = self.d.getVarFlag(key, "_content", True)
+                var = self.d.getVarFlag(key, "_content")
             self.references.add(key)
             if var is not None:
                 return var
@@ -116,13 +116,21 @@
                 return match.group()
 
     def python_sub(self, match):
-            code = match.group()[3:-1]
+            if isinstance(match, str):
+                code = match
+            else:
+                code = match.group()[3:-1]
+
+            if "_remote_data" in self.d:
+                connector = self.d["_remote_data"]
+                return connector.expandPythonRef(self.varname, code, self.d)
+
             codeobj = compile(code.strip(), self.varname or "<expansion>", "eval")
 
             parser = bb.codeparser.PythonParser(self.varname, logger)
             parser.parse_python(code)
             if self.varname:
-                vardeps = self.d.getVarFlag(self.varname, "vardeps", True)
+                vardeps = self.d.getVarFlag(self.varname, "vardeps")
                 if vardeps is None:
                     parser.log.flush()
             else:
@@ -146,7 +154,7 @@
         self['d'] = metadata
 
     def __missing__(self, key):
-        value = self.metadata.getVar(key, True)
+        value = self.metadata.getVar(key)
         if value is None or self.metadata.getVarFlag(key, 'func', False):
             raise KeyError(key)
         else:
@@ -222,6 +230,19 @@
         new.variables = self.variables.copy()
         return new
 
+    def __getstate__(self):
+        vardict = {}
+        for k, v in self.variables.iteritems():
+            vardict[k] = v
+        return {'dataroot': self.dataroot,
+                'variables': vardict}
+
+    def __setstate__(self, state):
+        self.dataroot = state['dataroot']
+        self.variables = COWDictBase.copy()
+        for k, v in state['variables'].items():
+            self.variables[k] = v
+
     def record(self, *kwonly, **loginfo):
         if not self.dataroot._tracking:
             return
@@ -247,10 +268,15 @@
         self.variables[var].append(loginfo.copy())
 
     def variable(self, var):
-        if var in self.variables:
-            return self.variables[var]
+        remote_connector = self.dataroot.getVar('_remote_data', False)
+        if remote_connector:
+            varhistory = remote_connector.getVarHistory(var)
         else:
-            return []
+            varhistory = []
+
+        if var in self.variables:
+            varhistory.extend(self.variables[var])
+        return varhistory
 
     def emit(self, var, oval, val, o, d):
         history = self.variable(var)
@@ -318,7 +344,7 @@
         the files in which they were added.
         """
         history = self.variable(var)
-        finalitems = (d.getVar(var, True) or '').split()
+        finalitems = (d.getVar(var) or '').split()
         filemap = {}
         isset = False
         for event in history:
@@ -426,11 +452,11 @@
             # Can end up here recursively so setup dummy values
             self.overrides = []
             self.overridesset = set()
-            self.overrides = (self.getVar("OVERRIDES", True) or "").split(":") or []
+            self.overrides = (self.getVar("OVERRIDES") or "").split(":") or []
             self.overridesset = set(self.overrides)
             self.inoverride = False
             self.expand_cache = {}
-            newoverrides = (self.getVar("OVERRIDES", True) or "").split(":") or []
+            newoverrides = (self.getVar("OVERRIDES") or "").split(":") or []
             if newoverrides == self.overrides:
                 break
             self.overrides = newoverrides
@@ -447,17 +473,22 @@
         dest = self.dict
         while dest:
             if var in dest:
-                return dest[var]
+                return dest[var], self.overridedata.get(var, None)
+
+            if "_remote_data" in dest:
+                connector = dest["_remote_data"]["_content"]
+                return connector.getVar(var)
 
             if "_data" not in dest:
                 break
             dest = dest["_data"]
+        return None, self.overridedata.get(var, None)
 
     def _makeShadowCopy(self, var):
         if var in self.dict:
             return
 
-        local_var = self._findVar(var)
+        local_var, _ = self._findVar(var)
 
         if local_var:
             self.dict[var] = copy.copy(local_var)
@@ -471,6 +502,12 @@
         if 'parsing' in loginfo:
             parsing=True
 
+        if '_remote_data' in self.dict:
+            connector = self.dict["_remote_data"]["_content"]
+            res = connector.setVar(var, value)
+            if not res:
+                return
+
         if 'op' not in loginfo:
             loginfo['op'] = "set"
         self.expand_cache = {}
@@ -509,6 +546,8 @@
                 del self.dict[var]["_append"]
             if "_prepend" in self.dict[var]:
                 del self.dict[var]["_prepend"]
+            if "_remove" in self.dict[var]:
+                del self.dict[var]["_remove"]
             if var in self.overridedata:
                 active = []
                 self.need_overrides()
@@ -541,7 +580,7 @@
             nextnew = set()
             self.overridevars.update(new)
             for i in new:
-                vardata = self.expandWithRefs(self.getVar(i, True), i)
+                vardata = self.expandWithRefs(self.getVar(i), i)
                 nextnew.update(vardata.references)
                 nextnew.update(vardata.contains.keys())
             new = nextnew
@@ -565,13 +604,19 @@
                 if len(shortvar) == 0:
                     override = None
 
-    def getVar(self, var, expand, noweakdefault=False, parsing=False):
+    def getVar(self, var, expand=True, noweakdefault=False, parsing=False):
         return self.getVarFlag(var, "_content", expand, noweakdefault, parsing)
 
     def renameVar(self, key, newkey, **loginfo):
         """
         Rename the variable key to newkey
         """
+        if '_remote_data' in self.dict:
+            connector = self.dict["_remote_data"]["_content"]
+            res = connector.renameVar(key, newkey)
+            if not res:
+                return
+
         val = self.getVar(key, 0, parsing=True)
         if val is not None:
             loginfo['variable'] = newkey
@@ -615,6 +660,12 @@
         self.setVar(var + "_prepend", value, ignore=True, parsing=True)
 
     def delVar(self, var, **loginfo):
+        if '_remote_data' in self.dict:
+            connector = self.dict["_remote_data"]["_content"]
+            res = connector.delVar(var)
+            if not res:
+                return
+
         loginfo['detail'] = ""
         loginfo['op'] = 'del'
         self.varhistory.record(**loginfo)
@@ -641,6 +692,12 @@
                          override = None
 
     def setVarFlag(self, var, flag, value, **loginfo):
+        if '_remote_data' in self.dict:
+            connector = self.dict["_remote_data"]["_content"]
+            res = connector.setVarFlag(var, flag, value)
+            if not res:
+                return
+
         self.expand_cache = {}
         if 'op' not in loginfo:
             loginfo['op'] = "set"
@@ -662,14 +719,14 @@
                 self.dict["__exportlist"]["_content"] = set()
             self.dict["__exportlist"]["_content"].add(var)
 
-    def getVarFlag(self, var, flag, expand, noweakdefault=False, parsing=False):
-        local_var = self._findVar(var)
+    def getVarFlag(self, var, flag, expand=True, noweakdefault=False, parsing=False):
+        local_var, overridedata = self._findVar(var)
         value = None
-        if flag == "_content" and var in self.overridedata and not parsing:
+        if flag == "_content" and overridedata is not None and not parsing:
             match = False
             active = {}
             self.need_overrides()
-            for (r, o) in self.overridedata[var]:
+            for (r, o) in overridedata:
                 # What about double overrides both with "_" in the name?
                 if o in self.overridesset:
                     active[o] = r
@@ -759,8 +816,14 @@
         return value
 
     def delVarFlag(self, var, flag, **loginfo):
+        if '_remote_data' in self.dict:
+            connector = self.dict["_remote_data"]["_content"]
+            res = connector.delVarFlag(var, flag)
+            if not res:
+                return
+
         self.expand_cache = {}
-        local_var = self._findVar(var)
+        local_var, _ = self._findVar(var)
         if not local_var:
             return
         if not var in self.dict:
@@ -803,7 +866,7 @@
             self.dict[var][i] = flags[i]
 
     def getVarFlags(self, var, expand = False, internalflags=False):
-        local_var = self._findVar(var)
+        local_var, _ = self._findVar(var)
         flags = {}
 
         if local_var:
@@ -845,7 +908,7 @@
         data = DataSmart()
         data.dict["_data"] = self.dict
         data.varhistory = self.varhistory.copy()
-        data.varhistory.datasmart = data
+        data.varhistory.dataroot = data
         data.inchistory = self.inchistory.copy()
 
         data._tracking = self._tracking
@@ -876,7 +939,7 @@
 
     def localkeys(self):
         for key in self.dict:
-            if key != '_data':
+            if key not in ['_data', '_remote_data']:
                 yield key
 
     def __iter__(self):
@@ -885,7 +948,7 @@
         def keylist(d):        
             klist = set()
             for key in d:
-                if key == "_data":
+                if key in ["_data", "_remote_data"]:
                     continue
                 if key in deleted:
                     continue
@@ -899,6 +962,13 @@
             if "_data" in d:
                 klist |= keylist(d["_data"])
 
+            if "_remote_data" in d:
+                connector = d["_remote_data"]["_content"]
+                for key in connector.getKeys():
+                    if key in deleted:
+                        continue
+                    klist.add(key)
+
             return klist
 
         self.need_overrides()
@@ -936,9 +1006,8 @@
         data = {}
         d = self.createCopy()
         bb.data.expandKeys(d)
-        bb.data.update_data(d)
 
-        config_whitelist = set((d.getVar("BB_HASHCONFIG_WHITELIST", True) or "").split())
+        config_whitelist = set((d.getVar("BB_HASHCONFIG_WHITELIST") or "").split())
         keys = set(key for key in iter(d) if not key.startswith("__"))
         for key in keys:
             if key in config_whitelist:
@@ -957,7 +1026,6 @@
 
         for key in ["__BBTASKS", "__BBANONFUNCS", "__BBHANDLERS"]:
             bb_list = d.getVar(key, False) or []
-            bb_list.sort()
             data.update({key:str(bb_list)})
 
             if key == "__BBANONFUNCS":
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/event.py b/import-layers/yocto-poky/bitbake/lib/bb/event.py
index 6f1cb10..6d8493b 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/event.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/event.py
@@ -48,6 +48,16 @@
     def __init__(self):
         self.pid = worker_pid
 
+
+class HeartbeatEvent(Event):
+    """Triggered at regular time intervals of 10 seconds. Other events can fire much more often
+       (runQueueTaskStarted when there are many short tasks) or not at all for long periods
+       of time (again runQueueTaskStarted, when there is just one long-running task), so this
+       event is more suitable for doing some task-independent work occassionally."""
+    def __init__(self, time):
+        Event.__init__(self)
+        self.time = time
+
 Registered        = 10
 AlreadyRegistered = 14
 
@@ -351,6 +361,17 @@
 class RecipePreFinalise(RecipeEvent):
     """ Recipe Parsing Complete but not yet finialised"""
 
+class RecipeTaskPreProcess(RecipeEvent):
+    """
+    Recipe Tasks about to be finalised
+    The list of tasks should be final at this point and handlers
+    are only able to change interdependencies
+    """
+    def __init__(self, fn, tasklist):
+        self.fn = fn
+        self.tasklist = tasklist
+        Event.__init__(self)
+
 class RecipeParsed(RecipeEvent):
     """ Recipe Parsing Complete """
 
@@ -372,7 +393,7 @@
     targets = property(getTargets)
 
 class BuildBase(Event):
-    """Base class for bbmake run events"""
+    """Base class for bitbake build events"""
 
     def __init__(self, n, p, failures = 0):
         self._name = n
@@ -417,13 +438,13 @@
         BuildBase.__init__(self, name, p)
 
 class BuildStarted(BuildBase, OperationStarted):
-    """bbmake build run started"""
+    """Event when builds start"""
     def __init__(self, n, p, failures = 0):
         OperationStarted.__init__(self, "Building Started")
         BuildBase.__init__(self, n, p, failures)
 
 class BuildCompleted(BuildBase, OperationCompleted):
-    """bbmake build run completed"""
+    """Event when builds have completed"""
     def __init__(self, total, n, p, failures=0, interrupted=0):
         if not failures:
             OperationCompleted.__init__(self, total, "Building Succeeded")
@@ -441,6 +462,23 @@
         self._free = freespace
         self._mountpoint = mountpoint
 
+class DiskUsageSample:
+    def __init__(self, available_bytes, free_bytes, total_bytes):
+        # Number of bytes available to non-root processes.
+        self.available_bytes = available_bytes
+        # Number of bytes available to root processes.
+        self.free_bytes = free_bytes
+        # Total capacity of the volume.
+        self.total_bytes = total_bytes
+
+class MonitorDiskEvent(Event):
+    """If BB_DISKMON_DIRS is set, then this event gets triggered each time disk space is checked.
+       Provides information about devices that are getting monitored."""
+    def __init__(self, disk_usage):
+        Event.__init__(self)
+        # hash of device root path -> DiskUsageSample
+        self.disk_usage = disk_usage
+
 class NoProvider(Event):
     """No Provider for an Event"""
 
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/__init__.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/__init__.py
index cd7362c..b853da3 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/__init__.py
@@ -35,9 +35,9 @@
 import collections
 import subprocess
 import pickle
+import errno
 import bb.persist_data, bb.utils
 import bb.checksum
-from bb import data
 import bb.process
 
 __version__ = "2"
@@ -355,7 +355,7 @@
     user, password, parameters).
     """
 
-    m = re.compile('(?P<type>[^:]*)://((?P<user>[^/]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
+    m = re.compile('(?P<type>[^:]*)://((?P<user>[^/;]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
     if not m:
         raise MalformedUrl(url)
 
@@ -491,7 +491,7 @@
     Calls before this must not hit the cache.
     """
     # When to drop SCM head revisions controlled by user policy
-    srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear"
+    srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear"
     if srcrev_policy == "cache":
         logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
     elif srcrev_policy == "clear":
@@ -537,7 +537,11 @@
     return False
 
 def mirror_from_string(data):
-    return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
+    mirrors = (data or "").replace('\\n',' ').split()
+    # Split into pairs
+    if len(mirrors) % 2 != 0:
+        bb.warn('Invalid mirror data %s, should have paired members.' % data)
+    return list(zip(*[iter(mirrors)]*2))
 
 def verify_checksum(ud, d, precomputed={}):
     """
@@ -572,7 +576,7 @@
 
     if ud.method.recommends_checksum(ud) and not ud.md5_expected and not ud.sha256_expected:
         # If strict checking enabled and neither sum defined, raise error
-        strict = d.getVar("BB_STRICT_CHECKSUM", True) or "0"
+        strict = d.getVar("BB_STRICT_CHECKSUM") or "0"
         if strict == "1":
             logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
                              'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
@@ -621,7 +625,7 @@
     Returns True, if the donestamp exists and is valid, False otherwise. When
     returning False, any existing done stamps are removed.
     """
-    if not ud.needdonestamp:
+    if not ud.needdonestamp or (origud and not origud.needdonestamp):
         return True
 
     if not os.path.exists(ud.donestamp):
@@ -718,13 +722,13 @@
 
 def get_autorev(d):
     #  only not cache src rev in autorev case
-    if d.getVar('BB_SRCREV_POLICY', True) != "cache":
+    if d.getVar('BB_SRCREV_POLICY') != "cache":
         d.setVar('BB_DONT_CACHE', '1')
     return "AUTOINC"
 
 def get_srcrev(d, method_name='sortable_revision'):
     """
-    Return the revsion string, usually for use in the version string (PV) of the current package
+    Return the revision string, usually for use in the version string (PV) of the current package
     Most packages usually only have one SCM so we just pass on the call.
     In the multi SCM case, we build a value based on SRCREV_FORMAT which must
     have been set.
@@ -737,7 +741,7 @@
     """
 
     scms = []
-    fetcher = Fetch(d.getVar('SRC_URI', True).split(), d)
+    fetcher = Fetch(d.getVar('SRC_URI').split(), d)
     urldata = fetcher.ud
     for u in urldata:
         if urldata[u].method.supports_srcrev():
@@ -757,7 +761,7 @@
     #
     # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
     #
-    format = d.getVar('SRCREV_FORMAT', True)
+    format = d.getVar('SRCREV_FORMAT')
     if not format:
         raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
 
@@ -819,9 +823,18 @@
     if not cleanup:
         cleanup = []
 
+    # If PATH contains WORKDIR which contains PV which contains SRCPV we
+    # can end up in circular recursion here so give the option of breaking it
+    # in a data store copy.
+    try:
+        d.getVar("PV")
+    except bb.data_smart.ExpansionError:
+        d = bb.data.createCopy(d)
+        d.setVar("PV", "fetcheravoidrecurse")
+
     origenv = d.getVar("BB_ORIGENV", False)
     for var in exportvars:
-        val = d.getVar(var, True) or (origenv and origenv.getVar(var, True))
+        val = d.getVar(var) or (origenv and origenv.getVar(var))
         if val:
             cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
 
@@ -856,12 +869,15 @@
 
     return output
 
-def check_network_access(d, info = "", url = None):
+def check_network_access(d, info, url):
     """
-    log remote network access, and error if BB_NO_NETWORK is set
+    log remote network access, and error if BB_NO_NETWORK is set or the given
+    URI is untrusted
     """
-    if d.getVar("BB_NO_NETWORK", True) == "1":
+    if d.getVar("BB_NO_NETWORK") == "1":
         raise NetworkAccess(url, info)
+    elif not trusted_network(d, url):
+        raise UntrustedUrl(url, info)
     else:
         logger.debug(1, "Fetcher accessed the network with the command %s" % info)
 
@@ -958,7 +974,7 @@
 
         # We may be obtaining a mirror tarball which needs further processing by the real fetcher
         # If that tarball is a local file:// we need to provide a symlink to it
-        dldir = ld.getVar("DL_DIR", True)
+        dldir = ld.getVar("DL_DIR")
         if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \
                 and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
             # Create donestamp in old format to avoid triggering a re-download
@@ -967,7 +983,14 @@
                 open(ud.donestamp, 'w').close()
             dest = os.path.join(dldir, os.path.basename(ud.localpath))
             if not os.path.exists(dest):
-                os.symlink(ud.localpath, dest)
+                # In case this is executing without any file locks held (as is
+                # the case for file:// URLs), two tasks may end up here at the
+                # same time, in which case we do not want the second task to
+                # fail when the link has already been created by the first task.
+                try:
+                    os.symlink(ud.localpath, dest)
+                except FileExistsError:
+                    pass
             if not verify_donestamp(origud, ld) or origud.method.need_update(origud, ld):
                 origud.method.download(origud, ld)
                 if hasattr(origud.method,"build_mirror_data"):
@@ -979,13 +1002,23 @@
                 # Broken symbolic link
                 os.unlink(origud.localpath)
 
-            os.symlink(ud.localpath, origud.localpath)
+            # As per above, in case two tasks end up here simultaneously.
+            try:
+                os.symlink(ud.localpath, origud.localpath)
+            except FileExistsError:
+                pass
         update_stamp(origud, ld)
         return ud.localpath
 
     except bb.fetch2.NetworkAccess:
         raise
 
+    except IOError as e:
+        if e.errno in [os.errno.ESTALE]:
+            logger.warn("Stale Error Observed %s." % ud.url)
+            return False
+        raise
+
     except bb.fetch2.BBFetchException as e:
         if isinstance(e, ChecksumError):
             logger.warning("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
@@ -1032,14 +1065,14 @@
     BB_ALLOWED_NETWORKS is set globally or for a specific recipe.
     Note: modifies SRC_URI & mirrors.
     """
-    if d.getVar('BB_NO_NETWORK', True) == "1":
+    if d.getVar('BB_NO_NETWORK') == "1":
         return True
 
     pkgname = d.expand(d.getVar('PN', False))
     trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
 
     if not trusted_hosts:
-        trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS', True)
+        trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS')
 
     # Not enabled.
     if not trusted_hosts:
@@ -1071,7 +1104,7 @@
     """
 
     srcrev = None
-    pn = d.getVar("PN", True)
+    pn = d.getVar("PN")
     attempts = []
     if name != '' and pn:
         attempts.append("SRCREV_%s_pn-%s" % (name, pn))
@@ -1082,7 +1115,7 @@
     attempts.append("SRCREV")
 
     for a in attempts:
-        srcrev = d.getVar(a, True)              
+        srcrev = d.getVar(a)              
         if srcrev and srcrev != "INVALID":
             break
 
@@ -1115,7 +1148,7 @@
     """
     fetch = Fetch([], d, cache = False, localonly = True)
 
-    dl_dir = d.getVar('DL_DIR', True)
+    dl_dir = d.getVar('DL_DIR')
     filelist = []
     for u in fetch.urls:
         ud = fetch.ud[u]
@@ -1129,9 +1162,9 @@
                 if f.startswith(dl_dir):
                     # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
                     if os.path.exists(f):
-                        bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f)))
+                        bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN'), os.path.basename(f)))
                     else:
-                        bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f)))
+                        bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN'), os.path.basename(f)))
                 filelist.append(f + ":" + str(os.path.exists(f)))
 
     return " ".join(filelist)
@@ -1160,7 +1193,7 @@
         self.mirrortarball = None
         self.basename = None
         self.basepath = None
-        (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d))
+        (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(d.expand(url))
         self.date = self.getSRCDate(d)
         self.url = url
         if not self.user and "user" in self.parm:
@@ -1177,16 +1210,16 @@
             self.sha256_name = "sha256sum"
         if self.md5_name in self.parm:
             self.md5_expected = self.parm[self.md5_name]
-        elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
+        elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]:
             self.md5_expected = None
         else:
-            self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name, True)
+            self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name)
         if self.sha256_name in self.parm:
             self.sha256_expected = self.parm[self.sha256_name]
-        elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
+        elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]:
             self.sha256_expected = None
         else:
-            self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name, True)
+            self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
         self.ignore_checksums = False
 
         self.names = self.parm.get("name",'default').split(',')
@@ -1204,7 +1237,7 @@
             raise NonLocalMethod()
 
         if self.parm.get("proto", None) and "protocol" not in self.parm:
-            logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True))
+            logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN'))
             self.parm["protocol"] = self.parm.get("proto", None)
 
         if hasattr(self.method, "urldata_init"):
@@ -1217,7 +1250,7 @@
         elif self.localfile:
             self.localpath = self.method.localpath(self, d)
 
-        dldir = d.getVar("DL_DIR", True)
+        dldir = d.getVar("DL_DIR")
 
         if not self.needdonestamp:
             return
@@ -1235,7 +1268,7 @@
         self.donestamp = basepath + '.done'
         self.lockfile = basepath + '.lock'
 
-    def setup_revisons(self, d):
+    def setup_revisions(self, d):
         self.revisions = {}
         for name in self.names:
             self.revisions[name] = srcrev_internal_helper(self, d, name)
@@ -1257,12 +1290,12 @@
         if "srcdate" in self.parm:
             return self.parm['srcdate']
 
-        pn = d.getVar("PN", True)
+        pn = d.getVar("PN")
 
         if pn:
-            return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True)
+            return d.getVar("SRCDATE_%s" % pn) or d.getVar("SRCDATE") or d.getVar("DATE")
 
-        return d.getVar("SRCDATE", True) or d.getVar("DATE", True)
+        return d.getVar("SRCDATE") or d.getVar("DATE")
 
 class FetchMethod(object):
     """Base class for 'fetch'ing data"""
@@ -1282,7 +1315,7 @@
         Can also setup variables in urldata for use in go (saving code duplication
         and duplicate code execution)
         """
-        return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile)
+        return os.path.join(d.getVar("DL_DIR"), urldata.localfile)
 
     def supports_checksum(self, urldata):
         """
@@ -1382,6 +1415,10 @@
                 cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file
             elif file.endswith('.lz'):
                 cmd = 'lzip -dc %s > %s' % (file, efile)
+            elif file.endswith('.tar.7z'):
+                cmd = '7z x -so %s | tar x --no-same-owner -f -' % file
+            elif file.endswith('.7z'):
+                cmd = '7za x -y %s 1>/dev/null' % file
             elif file.endswith('.zip') or file.endswith('.jar'):
                 try:
                     dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
@@ -1413,10 +1450,6 @@
                 else:
                     raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url)
                 cmd = 'ar x %s %s && tar --no-same-owner -xpf %s && rm %s' % (file, datafile, datafile, datafile)
-            elif file.endswith('.tar.7z'):
-                cmd = '7z x -so %s | tar xf - ' % file
-            elif file.endswith('.7z'):
-                cmd = '7za x -y %s 1>/dev/null' % file
 
         # If 'subdir' param exists, create a dir and use it as destination for unpack cmd
         if 'subdir' in urldata.parm:
@@ -1450,7 +1483,7 @@
         if not cmd:
             return
 
-        path = data.getVar('PATH', True)
+        path = data.getVar('PATH')
         if path:
             cmd = "PATH=\"%s\" %s" % (path, cmd)
         bb.note("Unpacking %s to %s/" % (file, unpackdir))
@@ -1507,7 +1540,7 @@
 
     def generate_revision_key(self, ud, d, name):
         key = self._revision_key(ud, d, name)
-        return "%s-%s" % (key, d.getVar("PN", True) or "")
+        return "%s-%s" % (key, d.getVar("PN") or "")
 
 class Fetch(object):
     def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
@@ -1515,14 +1548,14 @@
             raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
 
         if len(urls) == 0:
-            urls = d.getVar("SRC_URI", True).split()
+            urls = d.getVar("SRC_URI").split()
         self.urls = urls
         self.d = d
         self.ud = {}
         self.connection_cache = connection_cache
 
-        fn = d.getVar('FILE', True)
-        mc = d.getVar('__BBMULTICONFIG', True) or ""
+        fn = d.getVar('FILE')
+        mc = d.getVar('__BBMULTICONFIG') or ""
         if cache and fn and mc + fn in urldata_cache:
             self.ud = urldata_cache[mc + fn]
 
@@ -1565,8 +1598,8 @@
         if not urls:
             urls = self.urls
 
-        network = self.d.getVar("BB_NO_NETWORK", True)
-        premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1")
+        network = self.d.getVar("BB_NO_NETWORK")
+        premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY") == "1")
 
         for u in urls:
             ud = self.ud[u]
@@ -1584,8 +1617,17 @@
                     localpath = ud.localpath
                 elif m.try_premirror(ud, self.d):
                     logger.debug(1, "Trying PREMIRRORS")
-                    mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
+                    mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
                     localpath = try_mirrors(self, self.d, ud, mirrors, False)
+                    if localpath:
+                        try:
+                            # early checksum verification so that if the checksum of the premirror
+                            # contents mismatch the fetcher can still try upstream and mirrors
+                            update_stamp(ud, self.d)
+                        except ChecksumError as e:
+                            logger.warning("Checksum failure encountered with premirror download of %s - will attempt other sources." % u)
+                            logger.debug(1, str(e))
+                            localpath = ""
 
                 if premirroronly:
                     self.d.setVar("BB_NO_NETWORK", "1")
@@ -1624,7 +1666,7 @@
                         if not verified_stamp:
                             m.clean(ud, self.d)
                         logger.debug(1, "Trying MIRRORS")
-                        mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
+                        mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
                         localpath = try_mirrors(self, self.d, ud, mirrors)
 
                 if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
@@ -1634,6 +1676,11 @@
 
                 update_stamp(ud, self.d)
 
+            except IOError as e:
+                if e.errno in [os.errno.ESTALE]:
+                    logger.error("Stale Error Observed %s." % u)
+                    raise ChecksumError("Stale Error Detected")
+
             except BBFetchException as e:
                 if isinstance(e, ChecksumError):
                     logger.error("Checksum failure fetching %s" % u)
@@ -1657,7 +1704,7 @@
             m = ud.method
             logger.debug(1, "Testing URL %s", u)
             # First try checking uri, u, from PREMIRRORS
-            mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
+            mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
             ret = try_mirrors(self, self.d, ud, mirrors, True)
             if not ret:
                 # Next try checking from the original uri, u
@@ -1665,7 +1712,7 @@
                     ret = m.checkstatus(self, ud, self.d)
                 except:
                     # Finally, try checking uri, u, from MIRRORS
-                    mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
+                    mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
                     ret = try_mirrors(self, self.d, ud, mirrors, True)
 
             if not ret:
@@ -1763,6 +1810,7 @@
 from . import wget
 from . import ssh
 from . import sftp
+from . import s3
 from . import perforce
 from . import bzr
 from . import hg
@@ -1780,6 +1828,7 @@
 methods.append(cvs.Cvs())
 methods.append(ssh.SSH())
 methods.append(sftp.SFTP())
+methods.append(s3.S3())
 methods.append(perforce.Perforce())
 methods.append(bzr.Bzr())
 methods.append(hg.Hg())
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/bzr.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/bzr.py
index 72264af..16123f8 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/bzr.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/bzr.py
@@ -27,7 +27,6 @@
 import sys
 import logging
 import bb
-from bb import data
 from bb.fetch2 import FetchMethod
 from bb.fetch2 import FetchError
 from bb.fetch2 import runfetchcmd
@@ -43,14 +42,14 @@
         """
         # Create paths to bzr checkouts
         relpath = self._strip_leading_slashes(ud.path)
-        ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath)
+        ud.pkgdir = os.path.join(d.expand('${BZRDIR}'), ud.host, relpath)
 
-        ud.setup_revisons(d)
+        ud.setup_revisions(d)
 
         if not ud.revision:
             ud.revision = self.latest_revision(ud, d)
 
-        ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d)
+        ud.localfile = d.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision))
 
     def _buildbzrcommand(self, ud, d, command):
         """
@@ -58,7 +57,7 @@
         command is "fetch", "update", "revno"
         """
 
-        basecmd = data.expand('${FETCHCMD_bzr}', d)
+        basecmd = d.expand('${FETCHCMD_bzr}')
 
         proto =  ud.parm.get('protocol', 'http')
 
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/clearcase.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/clearcase.py
index 70e280a..36beab6a 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/clearcase.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/clearcase.py
@@ -65,7 +65,6 @@
 import sys
 import shutil
 import bb
-from   bb import data
 from   bb.fetch2 import FetchMethod
 from   bb.fetch2 import FetchError
 from   bb.fetch2 import runfetchcmd
@@ -108,13 +107,13 @@
         else:
             ud.module = ""
 
-        ud.basecmd = d.getVar("FETCHCMD_ccrc", True) or spawn.find_executable("cleartool") or spawn.find_executable("rcleartool")
+        ud.basecmd = d.getVar("FETCHCMD_ccrc") or spawn.find_executable("cleartool") or spawn.find_executable("rcleartool")
 
-        if data.getVar("SRCREV", d, True) == "INVALID":
+        if d.getVar("SRCREV") == "INVALID":
           raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.")
 
         ud.label = d.getVar("SRCREV", False)
-        ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", True)
+        ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC")
 
         ud.server     = "%s://%s%s" % (ud.proto, ud.host, ud.path)
 
@@ -124,7 +123,7 @@
 
         ud.viewname         = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True))
         ud.csname           = "%s-config-spec" % (ud.identifier)
-        ud.ccasedir         = os.path.join(data.getVar("DL_DIR", d, True), ud.type)
+        ud.ccasedir         = os.path.join(d.getVar("DL_DIR"), ud.type)
         ud.viewdir          = os.path.join(ud.ccasedir, ud.viewname)
         ud.configspecfile   = os.path.join(ud.ccasedir, ud.csname)
         ud.localfile        = "%s.tar.gz" % (ud.identifier)
@@ -144,7 +143,7 @@
         self.debug("configspecfile  = %s" % ud.configspecfile)
         self.debug("localfile       = %s" % ud.localfile)
 
-        ud.localfile = os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
+        ud.localfile = os.path.join(d.getVar("DL_DIR"), ud.localfile)
 
     def _build_ccase_command(self, ud, command):
         """
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/cvs.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/cvs.py
index 5ff70ba..490c954 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/cvs.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/cvs.py
@@ -63,7 +63,7 @@
         if 'fullpath' in ud.parm:
             fullpath = '_fullpath'
 
-        ud.localfile = bb.data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
+        ud.localfile = d.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath))
 
     def need_update(self, ud, d):
         if (ud.date == "now"):
@@ -87,10 +87,10 @@
             cvsroot = ud.path
         else:
             cvsroot = ":" + method
-            cvsproxyhost = d.getVar('CVS_PROXY_HOST', True)
+            cvsproxyhost = d.getVar('CVS_PROXY_HOST')
             if cvsproxyhost:
                 cvsroot += ";proxy=" + cvsproxyhost
-            cvsproxyport = d.getVar('CVS_PROXY_PORT', True)
+            cvsproxyport = d.getVar('CVS_PROXY_PORT')
             if cvsproxyport:
                 cvsroot += ";proxyport=" + cvsproxyport
             cvsroot += ":" + ud.user
@@ -110,7 +110,7 @@
         if ud.tag:
             options.append("-r %s" % ud.tag)
 
-        cvsbasecmd = d.getVar("FETCHCMD_cvs", True)
+        cvsbasecmd = d.getVar("FETCHCMD_cvs")
         cvscmd = cvsbasecmd + " '-d" + cvsroot + "' co " + " ".join(options) + " " + ud.module
         cvsupdatecmd = cvsbasecmd + " '-d" + cvsroot + "' update -d -P " + " ".join(options)
 
@@ -120,8 +120,8 @@
 
         # create module directory
         logger.debug(2, "Fetch: checking for module directory")
-        pkg = d.getVar('PN', True)
-        pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg)
+        pkg = d.getVar('PN')
+        pkgdir = os.path.join(d.getVar('CVSDIR'), pkg)
         moddir = os.path.join(pkgdir, localdir)
         workdir = None
         if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
@@ -164,8 +164,8 @@
     def clean(self, ud, d):
         """ Clean CVS Files and tarballs """
 
-        pkg = d.getVar('PN', True)
-        pkgdir = os.path.join(d.getVar("CVSDIR", True), pkg)
+        pkg = d.getVar('PN')
+        pkgdir = os.path.join(d.getVar("CVSDIR"), pkg)
 
         bb.utils.remove(pkgdir, True)
         bb.utils.remove(ud.localpath)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/git.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/git.py
index 792c183..7442f84 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/git.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/git.py
@@ -50,7 +50,7 @@
    The default is "0", set nobranch=1 if needed.
 
 - usehead
-   For local git:// urls to use the current branch HEAD as the revsion for use with
+   For local git:// urls to use the current branch HEAD as the revision for use with
    AUTOREV. Implies nobranch.
 
 """
@@ -76,7 +76,6 @@
 import bb
 import errno
 import bb.progress
-from   bb    import data
 from   bb.fetch2 import FetchMethod
 from   bb.fetch2 import runfetchcmd
 from   bb.fetch2 import logger
@@ -174,19 +173,19 @@
         if len(branches) != len(ud.names):
             raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
         ud.branches = {}
-        for name in ud.names:
-            branch = branches[ud.names.index(name)]
+        for pos, name in enumerate(ud.names):
+            branch = branches[pos]
             ud.branches[name] = branch
             ud.unresolvedrev[name] = branch
 
         if ud.usehead:
             ud.unresolvedrev['default'] = 'HEAD'
 
-        ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git -c core.fsyncobjectfiles=0"
+        ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0"
 
-        ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0") or ud.rebaseable
+        ud.write_tarballs = ((d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0") != "0") or ud.rebaseable
 
-        ud.setup_revisons(d)
+        ud.setup_revisions(d)
 
         for name in ud.names:
             # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
@@ -206,9 +205,9 @@
         if ud.rebaseable:
             for name in ud.names:
                 gitsrcname = gitsrcname + '_' + ud.revisions[name]
-        ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname)
-        ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
-        gitdir = d.getVar("GITDIR", True) or (d.getVar("DL_DIR", True) + "/git2/")
+        ud.mirrortarball = 'git2_%s.tar.gz' % gitsrcname
+        ud.fullmirror = os.path.join(d.getVar("DL_DIR"), ud.mirrortarball)
+        gitdir = d.getVar("GITDIR") or (d.getVar("DL_DIR") + "/git2/")
         ud.clonedir = os.path.join(gitdir, gitsrcname)
 
         ud.localfile = ud.clonedir
@@ -229,7 +228,7 @@
     def try_premirror(self, ud, d):
         # If we don't do this, updating an existing checkout with only premirrors
         # is not possible
-        if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None:
+        if d.getVar("BB_FETCH_PREMIRRORONLY") is not None:
             return True
         if os.path.exists(ud.clonedir):
             return False
@@ -241,7 +240,7 @@
         # If the checkout doesn't exist and the mirror tarball does, extract it
         if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
             bb.utils.mkdirhier(ud.clonedir)
-            runfetchcmd("tar -xzf %s" % (ud.fullmirror), d, workdir=ud.clonedir)
+            runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir)
 
         repourl = self._get_repo_url(ud)
 
@@ -252,7 +251,7 @@
                 repourl = repourl[7:]
             clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, repourl, ud.clonedir)
             if ud.proto.lower() != 'file':
-                bb.fetch2.check_network_access(d, clone_cmd)
+                bb.fetch2.check_network_access(d, clone_cmd, ud.url)
             progresshandler = GitProgressHandler(d)
             runfetchcmd(clone_cmd, d, log=progresshandler)
 
@@ -292,15 +291,15 @@
                 os.unlink(ud.fullmirror)
 
             logger.info("Creating tarball of git repository")
-            runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d, workdir=ud.clonedir)
-            runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=ud.clonedir)
+            runfetchcmd("tar -czf %s ." % ud.fullmirror, d, workdir=ud.clonedir)
+            runfetchcmd("touch %s.done" % ud.fullmirror, d)
 
     def unpack(self, ud, destdir, d):
         """ unpack the downloaded src to destdir"""
 
         subdir = ud.parm.get("subpath", "")
         if subdir != "":
-            readpathspec = ":%s" % (subdir)
+            readpathspec = ":%s" % subdir
             def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/'))
         else:
             readpathspec = ""
@@ -380,14 +379,26 @@
         """
         Run git ls-remote with the specified search string
         """
-        repourl = self._get_repo_url(ud)
-        cmd = "%s ls-remote %s %s" % \
-              (ud.basecmd, repourl, search)
-        if ud.proto.lower() != 'file':
-            bb.fetch2.check_network_access(d, cmd)
-        output = runfetchcmd(cmd, d, True)
-        if not output:
-            raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, ud.url)
+        # Prevent recursion e.g. in OE if SRCPV is in PV, PV is in WORKDIR,
+        # and WORKDIR is in PATH (as a result of RSS), our call to
+        # runfetchcmd() exports PATH so this function will get called again (!)
+        # In this scenario the return call of the function isn't actually
+        # important - WORKDIR isn't needed in PATH to call git ls-remote
+        # anyway.
+        if d.getVar('_BB_GIT_IN_LSREMOTE', False):
+            return ''
+        d.setVar('_BB_GIT_IN_LSREMOTE', '1')
+        try:
+            repourl = self._get_repo_url(ud)
+            cmd = "%s ls-remote %s %s" % \
+                (ud.basecmd, repourl, search)
+            if ud.proto.lower() != 'file':
+                bb.fetch2.check_network_access(d, cmd, repourl)
+            output = runfetchcmd(cmd, d, True)
+            if not output:
+                raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, ud.url)
+        finally:
+            d.delVar('_BB_GIT_IN_LSREMOTE')
         return output
 
     def _latest_revision(self, ud, d, name):
@@ -418,7 +429,7 @@
         """
         pupver = ('', '')
 
-        tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX', True) or "(?P<pver>([0-9][\.|_]?)+)")
+        tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or "(?P<pver>([0-9][\.|_]?)+)")
         try:
             output = self._lsremote(ud, d, "refs/tags/*")
         except bb.fetch2.FetchError or bb.fetch2.NetworkAccess:
@@ -470,7 +481,7 @@
             if not os.path.exists(rev_file) or not os.path.getsize(rev_file):
                 from pipes import quote
                 commits = bb.fetch2.runfetchcmd(
-                        "git rev-list %s -- | wc -l" % (quote(rev)),
+                        "git rev-list %s -- | wc -l" % quote(rev),
                         d, quiet=True).strip().lstrip('0')
                 if commits:
                     open(rev_file, "w").write("%d\n" % int(commits))
@@ -485,5 +496,5 @@
         try:
             self._lsremote(ud, d, "")
             return True
-        except FetchError:
+        except bb.fetch2.FetchError:
             return False
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitannex.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitannex.py
index 4937a10..c66c211 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitannex.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitannex.py
@@ -22,7 +22,6 @@
 
 import os
 import bb
-from   bb import data
 from   bb.fetch2.git import Git
 from   bb.fetch2 import runfetchcmd
 from   bb.fetch2 import logger
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitsm.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitsm.py
index 6613762..a95584c 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitsm.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitsm.py
@@ -31,7 +31,6 @@
 
 import os
 import bb
-from   bb    import data
 from   bb.fetch2.git import Git
 from   bb.fetch2 import runfetchcmd
 from   bb.fetch2 import logger
@@ -108,7 +107,7 @@
         os.rename(ud.clonedir, gitdir)
         runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d)
         runfetchcmd(ud.basecmd + " reset --hard", d, workdir=tmpclonedir)
-        runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d, workdir=tmpclonedir)
+        runfetchcmd(ud.basecmd + " checkout -f " + ud.revisions[ud.names[0]], d, workdir=tmpclonedir)
         runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=tmpclonedir)
         self._set_relative_paths(tmpclonedir)
         runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d, workdir=tmpclonedir)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/hg.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/hg.py
index 20df801..b5f2686 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/hg.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/hg.py
@@ -29,7 +29,6 @@
 import logging
 import bb
 import errno
-from bb import data
 from bb.fetch2 import FetchMethod
 from bb.fetch2 import FetchError
 from bb.fetch2 import MissingParameterError
@@ -67,7 +66,7 @@
         else:
             ud.proto = "hg"
 
-        ud.setup_revisons(d)
+        ud.setup_revisions(d)
 
         if 'rev' in ud.parm:
             ud.revision = ud.parm['rev']
@@ -78,15 +77,15 @@
         hgsrcname = '%s_%s_%s' % (ud.module.replace('/', '.'), \
                             ud.host, ud.path.replace('/', '.'))
         ud.mirrortarball = 'hg_%s.tar.gz' % hgsrcname
-        ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
+        ud.fullmirror = os.path.join(d.getVar("DL_DIR"), ud.mirrortarball)
 
-        hgdir = d.getVar("HGDIR", True) or (d.getVar("DL_DIR", True) + "/hg/")
+        hgdir = d.getVar("HGDIR") or (d.getVar("DL_DIR") + "/hg/")
         ud.pkgdir = os.path.join(hgdir, hgsrcname)
         ud.moddir = os.path.join(ud.pkgdir, ud.module)
         ud.localfile = ud.moddir
-        ud.basecmd = data.getVar("FETCHCMD_hg", d, True) or "/usr/bin/env hg"
+        ud.basecmd = d.getVar("FETCHCMD_hg") or "/usr/bin/env hg"
 
-        ud.write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS", True)
+        ud.write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS")
 
     def need_update(self, ud, d):
         revTag = ud.parm.get('rev', 'tip')
@@ -99,7 +98,7 @@
     def try_premirror(self, ud, d):
         # If we don't do this, updating an existing checkout with only premirrors
         # is not possible
-        if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None:
+        if d.getVar("BB_FETCH_PREMIRRORONLY") is not None:
             return True
         if os.path.exists(ud.moddir):
             return False
@@ -221,7 +220,7 @@
         """
         Compute tip revision for the url
         """
-        bb.fetch2.check_network_access(d, self._buildhgcommand(ud, d, "info"))
+        bb.fetch2.check_network_access(d, self._buildhgcommand(ud, d, "info"), ud.url)
         output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d)
         return output.strip()
 
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/local.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/local.py
index 51ca78d..a114ac1 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/local.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/local.py
@@ -29,7 +29,6 @@
 import urllib.request, urllib.parse, urllib.error
 import bb
 import bb.utils
-from   bb import data
 from   bb.fetch2 import FetchMethod, FetchError
 from   bb.fetch2 import logger
 
@@ -63,17 +62,11 @@
         newpath = path
         if path[0] == "/":
             return [path]
-        filespath = data.getVar('FILESPATH', d, True)
+        filespath = d.getVar('FILESPATH')
         if filespath:
             logger.debug(2, "Searching for %s in paths:\n    %s" % (path, "\n    ".join(filespath.split(":"))))
             newpath, hist = bb.utils.which(filespath, path, history=True)
             searched.extend(hist)
-        if not newpath:
-            filesdir = data.getVar('FILESDIR', d, True)
-            if filesdir:
-                logger.debug(2, "Searching for %s in path: %s" % (path, filesdir))
-                newpath = os.path.join(filesdir, path)
-                searched.append(newpath)
         if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1:
             # For expressions using '*', best we can do is take the first directory in FILESPATH that exists
             newpath, hist = bb.utils.which(filespath, ".", history=True)
@@ -81,7 +74,7 @@
             logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
             return searched
         if not os.path.exists(newpath):
-            dldirfile = os.path.join(d.getVar("DL_DIR", True), path)
+            dldirfile = os.path.join(d.getVar("DL_DIR"), path)
             logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
             bb.utils.mkdirhier(os.path.dirname(dldirfile))
             searched.append(dldirfile)
@@ -100,13 +93,10 @@
         # no need to fetch local files, we'll deal with them in place.
         if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath):
             locations = []
-            filespath = data.getVar('FILESPATH', d, True)
+            filespath = d.getVar('FILESPATH')
             if filespath:
                 locations = filespath.split(":")
-            filesdir = data.getVar('FILESDIR', d, True)
-            if filesdir:
-                locations.append(filesdir)
-            locations.append(d.getVar("DL_DIR", True))
+            locations.append(d.getVar("DL_DIR"))
 
             msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n    " + "\n    ".join(locations)
             raise FetchError(msg)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/npm.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/npm.py
index 699ae72..73a75fe 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/npm.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/npm.py
@@ -25,7 +25,6 @@
 import subprocess
 import signal
 import bb
-from   bb import data
 from   bb.fetch2 import FetchMethod
 from   bb.fetch2 import FetchError
 from   bb.fetch2 import ChecksumError
@@ -80,6 +79,7 @@
         if not ud.version:
             raise ParameterError("NPM fetcher requires a version parameter", ud.url)
         ud.bbnpmmanifest = "%s-%s.deps.json" % (ud.pkgname, ud.version)
+        ud.bbnpmmanifest = ud.bbnpmmanifest.replace('/', '-')
         ud.registry = "http://%s" % (ud.url.replace('npm://', '', 1).split(';'))[0]
         prefixdir = "npm/%s" % ud.pkgname
         ud.pkgdatadir = d.expand("${DL_DIR}/%s" % prefixdir)
@@ -87,12 +87,13 @@
             bb.utils.mkdirhier(ud.pkgdatadir)
         ud.localpath = d.expand("${DL_DIR}/npm/%s" % ud.bbnpmmanifest)
 
-        self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -O -t 2 -T 30 -nv --passive-ftp --no-check-certificate "
+        self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -O -t 2 -T 30 -nv --passive-ftp --no-check-certificate "
         ud.prefixdir = prefixdir
 
-        ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0")
+        ud.write_tarballs = ((d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0") != "0")
         ud.mirrortarball = 'npm_%s-%s.tar.xz' % (ud.pkgname, ud.version)
-        ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
+        ud.mirrortarball = ud.mirrortarball.replace('/', '-')
+        ud.fullmirror = os.path.join(d.getVar("DL_DIR"), ud.mirrortarball)
 
     def need_update(self, ud, d):
         if os.path.exists(ud.localpath):
@@ -101,8 +102,8 @@
 
     def _runwget(self, ud, d, command, quiet):
         logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
-        bb.fetch2.check_network_access(d, command)
-        dldir = d.getVar("DL_DIR", True)
+        bb.fetch2.check_network_access(d, command, ud.url)
+        dldir = d.getVar("DL_DIR")
         runfetchcmd(command, d, quiet, workdir=dldir)
 
     def _unpackdep(self, ud, pkg, data, destdir, dldir, d):
@@ -116,7 +117,7 @@
         # Change to subdir before executing command
         if not os.path.exists(destdir):
             os.makedirs(destdir)
-        path = d.getVar('PATH', True)
+        path = d.getVar('PATH')
         if path:
             cmd = "PATH=\"%s\" %s" % (path, cmd)
         bb.note("Unpacking %s to %s/" % (file, destdir))
@@ -132,9 +133,8 @@
 
 
     def unpack(self, ud, destdir, d):
-        dldir = d.getVar("DL_DIR", True)
-        depdumpfile = "%s-%s.deps.json" % (ud.pkgname, ud.version)
-        with open("%s/npm/%s" % (dldir, depdumpfile)) as datafile:
+        dldir = d.getVar("DL_DIR")
+        with open("%s/npm/%s" % (dldir, ud.bbnpmmanifest)) as datafile:
             workobj = json.load(datafile)
         dldir = "%s/%s" % (os.path.dirname(ud.localpath), ud.pkgname)
 
@@ -182,7 +182,12 @@
             if pkg_os:
                 if not isinstance(pkg_os, list):
                     pkg_os = [pkg_os]
-                if 'linux' not in pkg_os or '!linux' in pkg_os:
+                blacklist = False
+                for item in pkg_os:
+                    if item.startswith('!'):
+                        blacklist = True
+                        break
+                if (not blacklist and 'linux' not in pkg_os) or '!linux' in pkg_os:
                     logger.debug(2, "Skipping %s since it's incompatible with Linux" % pkg)
                     return
         #logger.debug(2, "Output URL is %s - %s - %s" % (ud.basepath, ud.basename, ud.localfile))
@@ -195,6 +200,7 @@
 
         dependencies = pdata.get('dependencies', {})
         optionalDependencies = pdata.get('optionalDependencies', {})
+        dependencies.update(optionalDependencies)
         depsfound = {}
         optdepsfound = {}
         data[pkg]['deps'] = {}
@@ -251,24 +257,30 @@
         lockdown = {}
 
         if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror):
-            dest = d.getVar("DL_DIR", True)
+            dest = d.getVar("DL_DIR")
             bb.utils.mkdirhier(dest)
             runfetchcmd("tar -xJf %s" % (ud.fullmirror), d, workdir=dest)
             return
 
-        shwrf = d.getVar('NPM_SHRINKWRAP', True)
+        shwrf = d.getVar('NPM_SHRINKWRAP')
         logger.debug(2, "NPM shrinkwrap file is %s" % shwrf)
-        try:
-            with open(shwrf) as datafile:
-                shrinkobj = json.load(datafile)
-        except:
+        if shwrf:
+            try:
+                with open(shwrf) as datafile:
+                    shrinkobj = json.load(datafile)
+            except Exception as e:
+                raise FetchError('Error loading NPM_SHRINKWRAP file "%s" for %s: %s' % (shwrf, ud.pkgname, str(e)))
+        elif not ud.ignore_checksums:
             logger.warning('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname)
-        lckdf = d.getVar('NPM_LOCKDOWN', True)
+        lckdf = d.getVar('NPM_LOCKDOWN')
         logger.debug(2, "NPM lockdown file is %s" % lckdf)
-        try:
-            with open(lckdf) as datafile:
-                lockdown = json.load(datafile)
-        except:
+        if lckdf:
+            try:
+                with open(lckdf) as datafile:
+                    lockdown = json.load(datafile)
+            except Exception as e:
+                raise FetchError('Error loading NPM_LOCKDOWN file "%s" for %s: %s' % (lckdf, ud.pkgname, str(e)))
+        elif not ud.ignore_checksums:
             logger.warning('Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname)
 
         if ('name' not in shrinkobj):
@@ -286,7 +298,7 @@
             if os.path.islink(ud.fullmirror):
                 os.unlink(ud.fullmirror)
 
-            dldir = d.getVar("DL_DIR", True)
+            dldir = d.getVar("DL_DIR")
             logger.info("Creating tarball of npm data")
             runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d,
                         workdir=dldir)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/osc.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/osc.py
index 295abf9..2b4f7d9 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/osc.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/osc.py
@@ -10,7 +10,6 @@
 import  sys
 import logging
 import  bb
-from    bb       import data
 from    bb.fetch2 import FetchMethod
 from    bb.fetch2 import FetchError
 from    bb.fetch2 import MissingParameterError
@@ -34,7 +33,7 @@
 
         # Create paths to osc checkouts
         relpath = self._strip_leading_slashes(ud.path)
-        ud.pkgdir = os.path.join(d.getVar('OSCDIR', True), ud.host)
+        ud.pkgdir = os.path.join(d.getVar('OSCDIR'), ud.host)
         ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
 
         if 'rev' in ud.parm:
@@ -47,7 +46,7 @@
             else:
                 ud.revision = ""
 
-        ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d)
+        ud.localfile = d.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision))
 
     def _buildosccommand(self, ud, d, command):
         """
@@ -55,7 +54,7 @@
         command is "fetch", "update", "info"
         """
 
-        basecmd = data.expand('${FETCHCMD_osc}', d)
+        basecmd = d.expand('${FETCHCMD_osc}')
 
         proto = ud.parm.get('protocol', 'ocs')
 
@@ -84,7 +83,7 @@
 
         logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
 
-        if os.access(os.path.join(d.getVar('OSCDIR', True), ud.path, ud.module), os.R_OK):
+        if os.access(os.path.join(d.getVar('OSCDIR'), ud.path, ud.module), os.R_OK):
             oscupdatecmd = self._buildosccommand(ud, d, "update")
             logger.info("Update "+ ud.url)
             # update sources there
@@ -112,7 +111,7 @@
         Generate a .oscrc to be used for this run.
         """
 
-        config_path = os.path.join(d.getVar('OSCDIR', True), "oscrc")
+        config_path = os.path.join(d.getVar('OSCDIR'), "oscrc")
         if (os.path.exists(config_path)):
             os.remove(config_path)
 
@@ -121,8 +120,8 @@
         f.write("apisrv = %s\n" % ud.host)
         f.write("scheme = http\n")
         f.write("su-wrapper = su -c\n")
-        f.write("build-root = %s\n" % d.getVar('WORKDIR', True))
-        f.write("urllist = %s\n" % d.getVar("OSCURLLIST", True))
+        f.write("build-root = %s\n" % d.getVar('WORKDIR'))
+        f.write("urllist = %s\n" % d.getVar("OSCURLLIST"))
         f.write("extra-pkgs = gzip\n")
         f.write("\n")
         f.write("[%s]\n" % ud.host)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/perforce.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/perforce.py
index 50cb479..3debad5 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/perforce.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/perforce.py
@@ -26,7 +26,6 @@
 import os
 import logging
 import bb
-from   bb import data
 from   bb.fetch2 import FetchMethod
 from   bb.fetch2 import FetchError
 from   bb.fetch2 import logger
@@ -44,13 +43,13 @@
         provided by the env, use it.  If P4PORT is specified by the recipe, use
         its values, which may override the settings in P4CONFIG.
         """
-        ud.basecmd = d.getVar('FETCHCMD_p4', True)
+        ud.basecmd = d.getVar('FETCHCMD_p4')
         if not ud.basecmd:
             ud.basecmd = "/usr/bin/env p4"
 
-        ud.dldir = d.getVar('P4DIR', True)
+        ud.dldir = d.getVar('P4DIR')
         if not ud.dldir:
-            ud.dldir = '%s/%s' % (d.getVar('DL_DIR', True), 'p4')
+            ud.dldir = '%s/%s' % (d.getVar('DL_DIR'), 'p4')
 
         path = ud.url.split('://')[1]
         path = path.split(';')[0]
@@ -62,7 +61,7 @@
             ud.path = path
 
         ud.usingp4config = False
-        p4port = d.getVar('P4PORT', True)
+        p4port = d.getVar('P4PORT')
 
         if p4port:
             logger.debug(1, 'Using recipe provided P4PORT: %s' % p4port)
@@ -71,7 +70,7 @@
             logger.debug(1, 'Trying to use P4CONFIG to automatically set P4PORT...')
             ud.usingp4config = True
             p4cmd = '%s info | grep "Server address"' % ud.basecmd
-            bb.fetch2.check_network_access(d, p4cmd)
+            bb.fetch2.check_network_access(d, p4cmd, ud.url)
             ud.host = runfetchcmd(p4cmd, d, True)
             ud.host = ud.host.split(': ')[1].strip()
             logger.debug(1, 'Determined P4PORT to be: %s' % ud.host)
@@ -87,9 +86,9 @@
         cleanedhost = ud.host.replace(':', '.')
         ud.pkgdir = os.path.join(ud.dldir, cleanedhost, cleanedpath)
 
-        ud.setup_revisons(d)
+        ud.setup_revisions(d)
 
-        ud.localfile = data.expand('%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, ud.revision), d)
+        ud.localfile = d.expand('%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, ud.revision))
 
     def _buildp4command(self, ud, d, command, depot_filename=None):
         """
@@ -140,7 +139,7 @@
         'p4 files' command, including trailing '#rev' file revision indicator
         """
         p4cmd = self._buildp4command(ud, d, 'files')
-        bb.fetch2.check_network_access(d, p4cmd)
+        bb.fetch2.check_network_access(d, p4cmd, ud.url)
         p4fileslist = runfetchcmd(p4cmd, d, True)
         p4fileslist = [f.rstrip() for f in p4fileslist.splitlines()]
 
@@ -171,7 +170,7 @@
 
         for afile in filelist:
             p4fetchcmd = self._buildp4command(ud, d, 'print', afile)
-            bb.fetch2.check_network_access(d, p4fetchcmd)
+            bb.fetch2.check_network_access(d, p4fetchcmd, ud.url)
             runfetchcmd(p4fetchcmd, d, workdir=ud.pkgdir)
 
         runfetchcmd('tar -czf %s p4' % (ud.localpath), d, cleanup=[ud.localpath], workdir=ud.pkgdir)
@@ -191,7 +190,7 @@
     def _latest_revision(self, ud, d, name):
         """ Return the latest upstream scm revision number """
         p4cmd = self._buildp4command(ud, d, "changes")
-        bb.fetch2.check_network_access(d, p4cmd)
+        bb.fetch2.check_network_access(d, p4cmd, ud.url)
         tip = runfetchcmd(p4cmd, d, True)
 
         if not tip:
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/repo.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/repo.py
index ecc6e68..1be91cc 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/repo.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/repo.py
@@ -25,7 +25,6 @@
 
 import os
 import bb
-from   bb    import data
 from   bb.fetch2 import FetchMethod
 from   bb.fetch2 import runfetchcmd
 
@@ -51,17 +50,17 @@
         if not ud.manifest.endswith('.xml'):
             ud.manifest += '.xml'
 
-        ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d)
+        ud.localfile = d.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch))
 
     def download(self, ud, d):
         """Fetch url"""
 
-        if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
+        if os.access(os.path.join(d.getVar("DL_DIR"), ud.localfile), os.R_OK):
             logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
             return
 
         gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
-        repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo")
+        repodir = d.getVar("REPODIR") or os.path.join(d.getVar("DL_DIR"), "repo")
         codir = os.path.join(repodir, gitsrcname, ud.manifest)
 
         if ud.user:
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/s3.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/s3.py
new file mode 100644
index 0000000..1629288
--- /dev/null
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/s3.py
@@ -0,0 +1,98 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementation for Amazon AWS S3.
+
+Class for fetching files from Amazon S3 using the AWS Command Line Interface.
+The aws tool must be correctly installed and configured prior to use.
+
+"""
+
+# Copyright (C) 2017, Andre McCurdy <armccurdy@gmail.com>
+#
+# Based in part on bb.fetch2.wget:
+#    Copyright (C) 2003, 2004  Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os
+import bb
+import urllib.request, urllib.parse, urllib.error
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import runfetchcmd
+
+class S3(FetchMethod):
+    """Class to fetch urls via 'aws s3'"""
+
+    def supports(self, ud, d):
+        """
+        Check to see if a given url can be fetched with s3.
+        """
+        return ud.type in ['s3']
+
+    def recommends_checksum(self, urldata):
+        return True
+
+    def urldata_init(self, ud, d):
+        if 'downloadfilename' in ud.parm:
+            ud.basename = ud.parm['downloadfilename']
+        else:
+            ud.basename = os.path.basename(ud.path)
+
+        ud.localfile = d.expand(urllib.parse.unquote(ud.basename))
+
+        ud.basecmd = d.getVar("FETCHCMD_s3") or "/usr/bin/env aws s3"
+
+    def download(self, ud, d):
+        """
+        Fetch urls
+        Assumes localpath was called first
+        """
+
+        cmd = '%s cp s3://%s%s %s' % (ud.basecmd, ud.host, ud.path, ud.localpath)
+        bb.fetch2.check_network_access(d, cmd, ud.url)
+        runfetchcmd(cmd, d)
+
+        # Additional sanity checks copied from the wget class (although there
+        # are no known issues which mean these are required, treat the aws cli
+        # tool with a little healthy suspicion).
+
+        if not os.path.exists(ud.localpath):
+            raise FetchError("The aws cp command returned success for s3://%s%s but %s doesn't exist?!" % (ud.host, ud.path, ud.localpath))
+
+        if os.path.getsize(ud.localpath) == 0:
+            os.remove(ud.localpath)
+            raise FetchError("The aws cp command for s3://%s%s resulted in a zero size file?! Deleting and failing since this isn't right." % (ud.host, ud.path))
+
+        return True
+
+    def checkstatus(self, fetch, ud, d):
+        """
+        Check the status of a URL
+        """
+
+        cmd = '%s ls s3://%s%s' % (ud.basecmd, ud.host, ud.path)
+        bb.fetch2.check_network_access(d, cmd, ud.url)
+        output = runfetchcmd(cmd, d)
+
+        # "aws s3 ls s3://mybucket/foo" will exit with success even if the file
+        # is not found, so check output of the command to confirm success.
+
+        if not output:
+            raise FetchError("The aws ls command for s3://%s%s gave empty output" % (ud.host, ud.path))
+
+        return True
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/sftp.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/sftp.py
index 7989fcc..81884a6 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/sftp.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/sftp.py
@@ -62,12 +62,10 @@
 import os
 import bb
 import urllib.request, urllib.parse, urllib.error
-from bb import data
 from bb.fetch2 import URI
 from bb.fetch2 import FetchMethod
 from bb.fetch2 import runfetchcmd
 
-
 class SFTP(FetchMethod):
     """Class to fetch urls via 'sftp'"""
 
@@ -92,7 +90,7 @@
         else:
             ud.basename = os.path.basename(ud.path)
 
-        ud.localfile = data.expand(urllib.parse.unquote(ud.basename), d)
+        ud.localfile = d.expand(urllib.parse.unquote(ud.basename))
 
     def download(self, ud, d):
         """Fetch urls"""
@@ -104,7 +102,7 @@
             port = '-P %d' % urlo.port
             urlo.port = None
 
-        dldir = data.getVar('DL_DIR', d, True)
+        dldir = d.getVar('DL_DIR')
         lpath = os.path.join(dldir, ud.localfile)
 
         user = ''
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/ssh.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/ssh.py
index 56f9b7e..6047ee4 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/ssh.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/ssh.py
@@ -43,7 +43,6 @@
 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import re, os
-from   bb import data
 from   bb.fetch2 import FetchMethod
 from   bb.fetch2 import FetchError
 from   bb.fetch2 import logger
@@ -87,11 +86,11 @@
         m = __pattern__.match(urldata.url)
         path = m.group('path')
         host = m.group('host')
-        urldata.localpath = os.path.join(d.getVar('DL_DIR', True),
+        urldata.localpath = os.path.join(d.getVar('DL_DIR'),
                 os.path.basename(os.path.normpath(path)))
 
     def download(self, urldata, d):
-        dldir = d.getVar('DL_DIR', True)
+        dldir = d.getVar('DL_DIR')
 
         m = __pattern__.match(urldata.url)
         path = m.group('path')
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/svn.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/svn.py
index 6ca79d3..3f172ee 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/svn.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/svn.py
@@ -28,7 +28,6 @@
 import logging
 import bb
 import re
-from   bb import data
 from   bb.fetch2 import FetchMethod
 from   bb.fetch2 import FetchError
 from   bb.fetch2 import MissingParameterError
@@ -50,7 +49,7 @@
         if not "module" in ud.parm:
             raise MissingParameterError('module', ud.url)
 
-        ud.basecmd = d.getVar('FETCHCMD_svn', True)
+        ud.basecmd = d.getVar('FETCHCMD_svn')
 
         ud.module = ud.parm["module"]
 
@@ -61,15 +60,15 @@
 
         # Create paths to svn checkouts
         relpath = self._strip_leading_slashes(ud.path)
-        ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
+        ud.pkgdir = os.path.join(d.expand('${SVNDIR}'), ud.host, relpath)
         ud.moddir = os.path.join(ud.pkgdir, ud.module)
 
-        ud.setup_revisons(d)
+        ud.setup_revisions(d)
 
         if 'rev' in ud.parm:
             ud.revision = ud.parm['rev']
 
-        ud.localfile = data.expand('%s_%s_%s_%s_.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
+        ud.localfile = d.expand('%s_%s_%s_%s_.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision))
 
     def _buildsvncommand(self, ud, d, command):
         """
@@ -79,9 +78,9 @@
 
         proto = ud.parm.get('protocol', 'svn')
 
-        svn_rsh = None
-        if proto == "svn+ssh" and "rsh" in ud.parm:
-            svn_rsh = ud.parm["rsh"]
+        svn_ssh = None
+        if proto == "svn+ssh" and "ssh" in ud.parm:
+            svn_ssh = ud.parm["ssh"]
 
         svnroot = ud.host + ud.path
 
@@ -113,8 +112,8 @@
             else:
                 raise FetchError("Invalid svn command %s" % command, ud.url)
 
-        if svn_rsh:
-            svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
+        if svn_ssh:
+            svncmd = "SVN_SSH=\"%s\" %s" % (svn_ssh, svncmd)
 
         return svncmd
 
@@ -173,7 +172,7 @@
         """
         Return the latest upstream revision number
         """
-        bb.fetch2.check_network_access(d, self._buildsvncommand(ud, d, "log1"))
+        bb.fetch2.check_network_access(d, self._buildsvncommand(ud, d, "log1"), ud.url)
 
         output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "log1"), d, True)
 
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/wget.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/wget.py
index 23d48ac..ae0ffa8 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/wget.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/wget.py
@@ -33,7 +33,6 @@
 import bb
 import bb.progress
 import urllib.request, urllib.parse, urllib.error
-from   bb import data
 from   bb.fetch2 import FetchMethod
 from   bb.fetch2 import FetchError
 from   bb.fetch2 import logger
@@ -84,18 +83,18 @@
         else:
             ud.basename = os.path.basename(ud.path)
 
-        ud.localfile = data.expand(urllib.parse.unquote(ud.basename), d)
+        ud.localfile = d.expand(urllib.parse.unquote(ud.basename))
         if not ud.localfile:
-            ud.localfile = data.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."), d)
+            ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."))
 
-        self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate"
+        self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate"
 
     def _runwget(self, ud, d, command, quiet):
 
         progresshandler = WgetProgressHandler(d)
 
         logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
-        bb.fetch2.check_network_access(d, command)
+        bb.fetch2.check_network_access(d, command, ud.url)
         runfetchcmd(command + ' --progress=dot -v', d, quiet, log=progresshandler)
 
     def download(self, ud, d):
@@ -104,7 +103,7 @@
         fetchcmd = self.basecmd
 
         if 'downloadfilename' in ud.parm:
-            dldir = d.getVar("DL_DIR", True)
+            dldir = d.getVar("DL_DIR")
             bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile))
             fetchcmd += " -O " + dldir + os.sep + ud.localfile
 
@@ -304,12 +303,24 @@
             r = urllib.request.Request(uri)
             r.get_method = lambda: "HEAD"
 
-            if ud.user:
+            def add_basic_auth(login_str, request):
+                '''Adds Basic auth to http request, pass in login:password as string'''
                 import base64
-                encodeuser = base64.b64encode(ud.user.encode('utf-8')).decode("utf-8")
+                encodeuser = base64.b64encode(login_str.encode('utf-8')).decode("utf-8")
                 authheader =  "Basic %s" % encodeuser
                 r.add_header("Authorization", authheader)
 
+            if ud.user:
+                add_basic_auth(ud.user, r)
+
+            try:
+                import netrc, urllib.parse
+                n = netrc.netrc()
+                login, unused, password = n.authenticators(urllib.parse.urlparse(uri).hostname)
+                add_basic_auth("%s:%s" % (login, password), r)
+            except (TypeError, ImportError, IOError, netrc.NetrcParseError):
+                 pass
+
             opener.open(r)
         except urllib.error.URLError as e:
             if try_again:
@@ -534,7 +545,7 @@
 
         # src.rpm extension was added only for rpm package. Can be removed if the rpm
         # packaged will always be considered as having to be manually upgraded
-        psuffix_regex = "(tar\.gz|tgz|tar\.bz2|zip|xz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
+        psuffix_regex = "(tar\.gz|tgz|tar\.bz2|zip|xz|tar\.lz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
 
         # match name, version and archive type of a package
         package_regex_comp = re.compile("(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
@@ -542,7 +553,7 @@
         self.suffix_regex_comp = re.compile(psuffix_regex)
 
         # compile regex, can be specific by package or generic regex
-        pn_regex = d.getVar('UPSTREAM_CHECK_REGEX', True)
+        pn_regex = d.getVar('UPSTREAM_CHECK_REGEX')
         if pn_regex:
             package_custom_regex_comp = re.compile(pn_regex)
         else:
@@ -563,7 +574,7 @@
         sanity check to ensure same name and type.
         """
         package = ud.path.split("/")[-1]
-        current_version = ['', d.getVar('PV', True), '']
+        current_version = ['', d.getVar('PV'), '']
 
         """possible to have no version in pkg name, such as spectrum-fw"""
         if not re.search("\d+", package):
@@ -578,7 +589,7 @@
         bb.debug(3, "latest_versionstring, regex: %s" % (package_regex.pattern))
 
         uri = ""
-        regex_uri = d.getVar("UPSTREAM_CHECK_URI", True)
+        regex_uri = d.getVar("UPSTREAM_CHECK_URI")
         if not regex_uri:
             path = ud.path.split(package)[0]
 
@@ -587,7 +598,7 @@
             dirver_regex = re.compile("(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
             m = dirver_regex.search(path)
             if m:
-                pn = d.getVar('PN', True)
+                pn = d.getVar('PN')
                 dirver = m.group('dirver')
 
                 dirver_pn_regex = re.compile("%s\d?" % (re.escape(pn)))
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/main.py b/import-layers/yocto-poky/bitbake/lib/bb/main.py
index f2f59f6..8c948c2 100755
--- a/import-layers/yocto-poky/bitbake/lib/bb/main.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/main.py
@@ -174,13 +174,24 @@
                           help="Read the specified file after bitbake.conf.")
 
         parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False,
-                          help="Output more log message data to the terminal.")
+                          help="Enable tracing of shell tasks (with 'set -x'). "
+                               "Also print bb.note(...) messages to stdout (in "
+                               "addition to writing them to ${T}/log.do_<task>).")
 
         parser.add_option("-D", "--debug", action="count", dest="debug", default=0,
-                          help="Increase the debug level. You can specify this more than once.")
+                          help="Increase the debug level. You can specify this "
+                               "more than once. -D sets the debug level to 1, "
+                               "where only bb.debug(1, ...) messages are printed "
+                               "to stdout; -DD sets the debug level to 2, where "
+                               "both bb.debug(1, ...) and bb.debug(2, ...) "
+                               "messages are printed; etc. Without -D, no debug "
+                               "messages are printed. Note that -D only affects "
+                               "output to stdout. All debug messages are written "
+                               "to ${T}/log.do_taskname, regardless of the debug "
+                               "level.")
 
-        parser.add_option("-q", "--quiet", action="store_true", dest="quiet", default=False,
-                          help="Output less log message data to the terminal.")
+        parser.add_option("-q", "--quiet", action="count", dest="quiet", default=0,
+                          help="Output less log message data to the terminal. You can specify this more than once.")
 
         parser.add_option("-n", "--dry-run", action="store_true", dest="dry_run", default=False,
                           help="Don't execute, just go through the motions.")
@@ -287,6 +298,9 @@
                           help="Writes the event log of the build to a bitbake event json file. "
                                "Use '' (empty string) to assign the name automatically.")
 
+        parser.add_option("", "--runall", action="store", dest="runall",
+                          help="Run the specified task for all build targets and their dependencies.")
+
         options, targets = parser.parse_args(argv)
 
         if options.quiet and options.verbose:
@@ -367,6 +381,7 @@
         raise
     if not configParams.foreground:
         server.detach()
+        cooker.shutdown()
     cooker.lock.close()
     return server
 
@@ -389,12 +404,8 @@
     except:
         pass
 
-
     configuration.setConfigParameters(configParams)
 
-    ui_module = import_extension_module(bb.ui, configParams.ui, 'main')
-    servermodule = import_extension_module(bb.server, configParams.servertype, 'BitBakeServer')
-
     if configParams.server_only:
         if configParams.servertype != "xmlrpc":
             raise BBMainException("FATAL: If '--server-only' is defined, we must set the "
@@ -442,66 +453,11 @@
     bb.msg.init_msgconfig(configParams.verbose, configuration.debug,
                           configuration.debug_domains)
 
-    # Ensure logging messages get sent to the UI as events
-    handler = bb.event.LogHandler()
-    if not configParams.status_only:
-        # In status only mode there are no logs and no UI
-        logger.addHandler(handler)
-
-    # Clear away any spurious environment variables while we stoke up the cooker
-    cleanedvars = bb.utils.clean_environment()
-
-    featureset = []
-    if not configParams.server_only:
-        # Collect the feature set for the UI
-        featureset = getattr(ui_module, "featureSet", [])
-
-    if configParams.server_only:
-        for param in ('prefile', 'postfile'):
-            value = getattr(configParams, param)
-            if value:
-                setattr(configuration, "%s_server" % param, value)
-                param = "%s_server" % param
-
-    if not configParams.remote_server:
-        # we start a server with a given configuration
-        server = start_server(servermodule, configParams, configuration, featureset)
-        bb.event.ui_queue = []
-    else:
-        if os.getenv('BBSERVER') == 'autostart':
-            if configParams.remote_server == 'autostart' or \
-               not servermodule.check_connection(configParams.remote_server, timeout=2):
-                configParams.bind = 'localhost:0'
-                srv = start_server(servermodule, configParams, configuration, featureset)
-                configParams.remote_server = '%s:%d' % tuple(configuration.interface)
-                bb.event.ui_queue = []
-
-        # we start a stub server that is actually a XMLRPClient that connects to a real server
-        server = servermodule.BitBakeXMLRPCClient(configParams.observe_only,
-                                                  configParams.xmlrpctoken)
-        server.saveConnectionDetails(configParams.remote_server)
-
+    server, server_connection, ui_module = setup_bitbake(configParams, configuration)
+    if server_connection is None and configParams.kill_server:
+        return 0
 
     if not configParams.server_only:
-        try:
-            server_connection = server.establishConnection(featureset)
-        except Exception as e:
-            bb.fatal("Could not connect to server %s: %s" % (configParams.remote_server, str(e)))
-
-        if configParams.kill_server:
-            server_connection.connection.terminateServer()
-            bb.event.ui_queue = []
-            return 0
-
-        server_connection.setupEventQueue()
-
-        # Restore the environment in case the UI needs it
-        for k in cleanedvars:
-            os.environ[k] = cleanedvars[k]
-
-        logger.removeHandler(handler)
-
-
         if configParams.status_only:
             server_connection.terminate()
             return 0
@@ -520,3 +476,77 @@
         return 0
 
     return 1
+
+def setup_bitbake(configParams, configuration, extrafeatures=None):
+    # Ensure logging messages get sent to the UI as events
+    handler = bb.event.LogHandler()
+    if not configParams.status_only:
+        # In status only mode there are no logs and no UI
+        logger.addHandler(handler)
+
+    # Clear away any spurious environment variables while we stoke up the cooker
+    cleanedvars = bb.utils.clean_environment()
+
+    if configParams.server_only:
+        featureset = []
+        ui_module = None
+    else:
+        ui_module = import_extension_module(bb.ui, configParams.ui, 'main')
+        # Collect the feature set for the UI
+        featureset = getattr(ui_module, "featureSet", [])
+
+    if configParams.server_only:
+        for param in ('prefile', 'postfile'):
+            value = getattr(configParams, param)
+            if value:
+                setattr(configuration, "%s_server" % param, value)
+                param = "%s_server" % param
+
+    if extrafeatures:
+        for feature in extrafeatures:
+            if not feature in featureset:
+                featureset.append(feature)
+
+    servermodule = import_extension_module(bb.server,
+                                            configParams.servertype,
+                                            'BitBakeServer')
+    if configParams.remote_server:
+        if os.getenv('BBSERVER') == 'autostart':
+            if configParams.remote_server == 'autostart' or \
+               not servermodule.check_connection(configParams.remote_server, timeout=2):
+                configParams.bind = 'localhost:0'
+                srv = start_server(servermodule, configParams, configuration, featureset)
+                configParams.remote_server = '%s:%d' % tuple(configuration.interface)
+                bb.event.ui_queue = []
+        # we start a stub server that is actually a XMLRPClient that connects to a real server
+        from bb.server.xmlrpc import BitBakeXMLRPCClient
+        server = servermodule.BitBakeXMLRPCClient(configParams.observe_only,
+                                                  configParams.xmlrpctoken)
+        server.saveConnectionDetails(configParams.remote_server)
+    else:
+        # we start a server with a given configuration
+        server = start_server(servermodule, configParams, configuration, featureset)
+        bb.event.ui_queue = []
+
+    if configParams.server_only:
+        server_connection = None
+    else:
+        try:
+            server_connection = server.establishConnection(featureset)
+        except Exception as e:
+            bb.fatal("Could not connect to server %s: %s" % (configParams.remote_server, str(e)))
+
+        if configParams.kill_server:
+            server_connection.connection.terminateServer()
+            bb.event.ui_queue = []
+            return None, None, None
+
+        server_connection.setupEventQueue()
+
+        # Restore the environment in case the UI needs it
+        for k in cleanedvars:
+            os.environ[k] = cleanedvars[k]
+
+        logger.removeHandler(handler)
+
+    return server, server_connection, ui_module
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/monitordisk.py b/import-layers/yocto-poky/bitbake/lib/bb/monitordisk.py
index 203c405..833cd3d 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/monitordisk.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/monitordisk.py
@@ -129,7 +129,7 @@
             bb.utils.mkdirhier(path)
         dev = getMountedDev(path)
         # Use path/action as the key
-        devDict[os.path.join(path, action)] = [dev, minSpace, minInode]
+        devDict[(path, action)] = [dev, minSpace, minInode]
 
     return devDict
 
@@ -141,7 +141,7 @@
     spaceDefault = 50 * 1024 * 1024
     inodeDefault = 5 * 1024
 
-    interval = configuration.getVar("BB_DISKMON_WARNINTERVAL", True)
+    interval = configuration.getVar("BB_DISKMON_WARNINTERVAL")
     if not interval:
         return spaceDefault, inodeDefault
     else:
@@ -179,7 +179,7 @@
         self.enableMonitor = False
         self.configuration = configuration
 
-        BBDirs = configuration.getVar("BB_DISKMON_DIRS", True) or None
+        BBDirs = configuration.getVar("BB_DISKMON_DIRS") or None
         if BBDirs:
             self.devDict = getDiskData(BBDirs, configuration)
             if self.devDict:
@@ -205,18 +205,21 @@
         """ Take action for the monitor """
 
         if self.enableMonitor:
-            for k in self.devDict:
-                path = os.path.dirname(k)
-                action = os.path.basename(k)
-                dev = self.devDict[k][0]
-                minSpace = self.devDict[k][1]
-                minInode = self.devDict[k][2]
+            diskUsage = {}
+            for k, attributes in self.devDict.items():
+                path, action = k
+                dev, minSpace, minInode = attributes
 
                 st = os.statvfs(path)
 
-                # The free space, float point number
+                # The available free space, integer number
                 freeSpace = st.f_bavail * st.f_frsize
 
+                # Send all relevant information in the event.
+                freeSpaceRoot = st.f_bfree * st.f_frsize
+                totalSpace = st.f_blocks * st.f_frsize
+                diskUsage[dev] = bb.event.DiskUsageSample(freeSpace, freeSpaceRoot, totalSpace)
+
                 if minSpace and freeSpace < minSpace:
                     # Always show warning, the self.checked would always be False if the action is WARN
                     if self.preFreeS[k] == 0 or self.preFreeS[k] - freeSpace > self.spaceInterval and not self.checked[k]:
@@ -235,7 +238,7 @@
                         rq.finish_runqueue(True)
                         bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration)
 
-                # The free inodes, float point number
+                # The free inodes, integer number
                 freeInode = st.f_favail
 
                 if minInode and freeInode < minInode:
@@ -260,4 +263,6 @@
                         self.checked[k] = True
                         rq.finish_runqueue(True)
                         bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration)
+
+            bb.event.fire(bb.event.MonitorDiskEvent(diskUsage), self.configuration)
         return
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/msg.py b/import-layers/yocto-poky/bitbake/lib/bb/msg.py
index b7c39fa..90b1582 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/msg.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/msg.py
@@ -201,3 +201,18 @@
         logger = logging.getLogger("BitBake")
     logger.critical(msg)
     sys.exit(1)
+
+def logger_create(name, output=sys.stderr, level=logging.INFO, preserve_handlers=False, color='auto'):
+    """Standalone logger creation function"""
+    logger = logging.getLogger(name)
+    console = logging.StreamHandler(output)
+    format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
+    if color == 'always' or (color == 'auto' and output.isatty()):
+        format.enable_color()
+    console.setFormatter(format)
+    if preserve_handlers:
+        logger.addHandler(console)
+    else:
+        logger.handlers = [console]
+    logger.setLevel(level)
+    return logger
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/parse/__init__.py b/import-layers/yocto-poky/bitbake/lib/bb/parse/__init__.py
index 26ae7ea..a2952ec 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/parse/__init__.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/parse/__init__.py
@@ -123,7 +123,7 @@
 
 def resolve_file(fn, d):
     if not os.path.isabs(fn):
-        bbpath = d.getVar("BBPATH", True)
+        bbpath = d.getVar("BBPATH")
         newfn, attempts = bb.utils.which(bbpath, fn, history=True)
         for af in attempts:
             mark_dependency(d, af)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/parse/ast.py b/import-layers/yocto-poky/bitbake/lib/bb/parse/ast.py
index fa83b18..dba4540 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/parse/ast.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/parse/ast.py
@@ -30,8 +30,6 @@
 from bb import methodpool
 from bb.parse import logger
 
-_bbversions_re = re.compile(r"\[(?P<from>[0-9]+)-(?P<to>[0-9]+)\]")
-
 class StatementGroup(list):
     def eval(self, data):
         for statement in self:
@@ -132,7 +130,6 @@
                 val = groupd["value"]
         elif "colon" in groupd and groupd["colon"] != None:
             e = data.createCopy()
-            bb.data.update_data(e)
             op = "immediate"
             val = e.expand(groupd["value"], key + "[:=]")
         elif "append" in groupd and groupd["append"] != None:
@@ -347,19 +344,18 @@
         if not handlerfn:
             bb.fatal("Undefined event handler function '%s'" % var)
         handlerln = int(d.getVarFlag(var, "lineno", False))
-        bb.event.register(var, d.getVar(var, False), (d.getVarFlag(var, "eventmask", True) or "").split(), handlerfn, handlerln)
+        bb.event.register(var, d.getVar(var, False), (d.getVarFlag(var, "eventmask") or "").split(), handlerfn, handlerln)
 
     bb.event.fire(bb.event.RecipePreFinalise(fn), d)
 
     bb.data.expandKeys(d)
-    bb.data.update_data(d)
     code = []
     for funcname in d.getVar("__BBANONFUNCS", False) or []:
         code.append("%s(d)" % funcname)
     bb.utils.better_exec("\n".join(code), {"d": d})
-    bb.data.update_data(d)
 
     tasklist = d.getVar('__BBTASKS', False) or []
+    bb.event.fire(bb.event.RecipeTaskPreProcess(fn, list(tasklist)), d)
     bb.build.add_tasks(tasklist, d)
 
     bb.parse.siggen.finalise(fn, d, variant)
@@ -385,29 +381,8 @@
             else:
                 create_variant("%s-%s" % (variant, name), datastores[variant], name)
 
-def _expand_versions(versions):
-    def expand_one(version, start, end):
-        for i in range(start, end + 1):
-            ver = _bbversions_re.sub(str(i), version, 1)
-            yield ver
-
-    versions = iter(versions)
-    while True:
-        try:
-            version = next(versions)
-        except StopIteration:
-            break
-
-        range_ver = _bbversions_re.search(version)
-        if not range_ver:
-            yield version
-        else:
-            newversions = expand_one(version, int(range_ver.group("from")),
-                                     int(range_ver.group("to")))
-            versions = itertools.chain(newversions, versions)
-
 def multi_finalize(fn, d):
-    appends = (d.getVar("__BBAPPEND", True) or "").split()
+    appends = (d.getVar("__BBAPPEND") or "").split()
     for append in appends:
         logger.debug(1, "Appending .bbappend file %s to %s", append, fn)
         bb.parse.BBHandler.handle(append, d, True)
@@ -422,51 +397,7 @@
         d.setVar("__SKIPPED", e.args[0])
     datastores = {"": safe_d}
 
-    versions = (d.getVar("BBVERSIONS", True) or "").split()
-    if versions:
-        pv = orig_pv = d.getVar("PV", True)
-        baseversions = {}
-
-        def verfunc(ver, d, pv_d = None):
-            if pv_d is None:
-                pv_d = d
-
-            overrides = d.getVar("OVERRIDES", True).split(":")
-            pv_d.setVar("PV", ver)
-            overrides.append(ver)
-            bpv = baseversions.get(ver) or orig_pv
-            pv_d.setVar("BPV", bpv)
-            overrides.append(bpv)
-            d.setVar("OVERRIDES", ":".join(overrides))
-
-        versions = list(_expand_versions(versions))
-        for pos, version in enumerate(list(versions)):
-            try:
-                pv, bpv = version.split(":", 2)
-            except ValueError:
-                pass
-            else:
-                versions[pos] = pv
-                baseversions[pv] = bpv
-
-        if pv in versions and not baseversions.get(pv):
-            versions.remove(pv)
-        else:
-            pv = versions.pop()
-
-            # This is necessary because our existing main datastore
-            # has already been finalized with the old PV, we need one
-            # that's been finalized with the new PV.
-            d = bb.data.createCopy(safe_d)
-            verfunc(pv, d, safe_d)
-            try:
-                finalize(fn, d)
-            except bb.parse.SkipRecipe as e:
-                d.setVar("__SKIPPED", e.args[0])
-
-        _create_variants(datastores, versions, verfunc, onlyfinalise)
-
-    extended = d.getVar("BBCLASSEXTEND", True) or ""
+    extended = d.getVar("BBCLASSEXTEND") or ""
     if extended:
         # the following is to support bbextends with arguments, for e.g. multilib
         # an example is as follows:
@@ -484,7 +415,7 @@
             else:
                 extendedmap[ext] = ext
 
-        pn = d.getVar("PN", True)
+        pn = d.getVar("PN")
         def extendfunc(name, d):
             if name != extendedmap[name]:
                 d.setVar("BBEXTENDCURR", extendedmap[name])
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/BBHandler.py b/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
index c54a079..fe918a4 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -66,7 +66,7 @@
             file = os.path.join('classes', '%s.bbclass' % file)
 
         if not os.path.isabs(file):
-            bbpath = d.getVar("BBPATH", True)
+            bbpath = d.getVar("BBPATH")
             abs_fn, attempts = bb.utils.which(bbpath, file, history=True)
             for af in attempts:
                 if af != abs_fn:
@@ -87,17 +87,17 @@
     try:
         return cached_statements[absolute_filename]
     except KeyError:
-        file = open(absolute_filename, 'r')
-        statements = ast.StatementGroup()
+        with open(absolute_filename, 'r') as f:
+            statements = ast.StatementGroup()
 
-        lineno = 0
-        while True:
-            lineno = lineno + 1
-            s = file.readline()
-            if not s: break
-            s = s.rstrip()
-            feeder(lineno, s, filename, base_name, statements)
-        file.close()
+            lineno = 0
+            while True:
+                lineno = lineno + 1
+                s = f.readline()
+                if not s: break
+                s = s.rstrip()
+                feeder(lineno, s, filename, base_name, statements)
+
         if __inpython__:
             # add a blank line to close out any python definition
             feeder(lineno, "", filename, base_name, statements, eof=True)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py
index 875250d..f7d0cf7 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py
@@ -33,7 +33,7 @@
 __config_regexp__  = re.compile( r"""
     ^
     (?P<exp>export\s*)?
-    (?P<var>[a-zA-Z0-9\-~_+.${}/]+?)
+    (?P<var>[a-zA-Z0-9\-_+.${}/~]+?)
     (\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?
 
     \s* (
@@ -56,9 +56,9 @@
     """, re.X)
 __include_regexp__ = re.compile( r"include\s+(.+)" )
 __require_regexp__ = re.compile( r"require\s+(.+)" )
-__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/]+)$" )
-__unset_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/]+)$" )
-__unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/]+)\[([a-zA-Z0-9\-_+.${}/]+)\]$" )
+__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/~]+)$" )
+__unset_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)$" )
+__unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)\[([a-zA-Z0-9\-_+.]+)\]$" )
 
 def init(data):
     topdir = data.getVar('TOPDIR', False)
@@ -83,16 +83,16 @@
 
     if not os.path.isabs(fn):
         dname = os.path.dirname(parentfn)
-        bbpath = "%s:%s" % (dname, data.getVar("BBPATH", True))
+        bbpath = "%s:%s" % (dname, data.getVar("BBPATH"))
         abs_fn, attempts = bb.utils.which(bbpath, fn, history=True)
         if abs_fn and bb.parse.check_dependency(data, abs_fn):
-            logger.warning("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True)))
+            logger.warning("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE')))
         for af in attempts:
             bb.parse.mark_dependency(data, af)
         if abs_fn:
             fn = abs_fn
     elif bb.parse.check_dependency(data, fn):
-        logger.warning("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True)))
+        logger.warning("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE')))
 
     try:
         bb.parse.handle(fn, data, True)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/persist_data.py b/import-layers/yocto-poky/bitbake/lib/bb/persist_data.py
index bb6deca..bef7018 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/persist_data.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/persist_data.py
@@ -28,11 +28,7 @@
 import warnings
 from bb.compat import total_ordering
 from collections import Mapping
-
-try:
-    import sqlite3
-except ImportError:
-    from pysqlite2 import dbapi2 as sqlite3
+import sqlite3
 
 sqlversion = sqlite3.sqlite_version_info
 if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
@@ -207,8 +203,8 @@
 def persist(domain, d):
     """Convenience factory for SQLTable objects based upon metadata"""
     import bb.utils
-    cachedir = (d.getVar("PERSISTENT_DIR", True) or
-                d.getVar("CACHE", True))
+    cachedir = (d.getVar("PERSISTENT_DIR") or
+                d.getVar("CACHE"))
     if not cachedir:
         logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable")
         sys.exit(1)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/process.py b/import-layers/yocto-poky/bitbake/lib/bb/process.py
index c62d7bc..a4a5599 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/process.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/process.py
@@ -162,9 +162,9 @@
         stdout, stderr = _logged_communicate(pipe, log, input, extrafiles)
     else:
         stdout, stderr = pipe.communicate(input)
-        if stdout:
+        if not stdout is None:
             stdout = stdout.decode("utf-8")
-        if stderr:
+        if not stderr is None:
             stderr = stderr.decode("utf-8")
 
     if pipe.returncode != 0:
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/providers.py b/import-layers/yocto-poky/bitbake/lib/bb/providers.py
index db02a0b..443187e 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/providers.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/providers.py
@@ -48,7 +48,6 @@
 
     # Need to ensure data store is expanded
     localdata = data.createCopy(cfgData)
-    bb.data.update_data(localdata)
     bb.data.expandKeys(localdata)
 
     preferred_versions = {}
@@ -123,11 +122,11 @@
 
     # pn can contain '_', e.g. gcc-cross-x86_64 and an override cannot
     # hence we do this manually rather than use OVERRIDES
-    preferred_v = cfgData.getVar("PREFERRED_VERSION_pn-%s" % pn, True)
+    preferred_v = cfgData.getVar("PREFERRED_VERSION_pn-%s" % pn)
     if not preferred_v:
-        preferred_v = cfgData.getVar("PREFERRED_VERSION_%s" % pn, True)
+        preferred_v = cfgData.getVar("PREFERRED_VERSION_%s" % pn)
     if not preferred_v:
-        preferred_v = cfgData.getVar("PREFERRED_VERSION", True)
+        preferred_v = cfgData.getVar("PREFERRED_VERSION")
 
     if preferred_v:
         m = re.match('(\d+:)*(.*)(_.*)*', preferred_v)
@@ -289,7 +288,7 @@
 
     eligible = _filterProviders(providers, item, cfgData, dataCache)
 
-    prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % item, True)
+    prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % item)
     if prefervar:
         dataCache.preferred[item] = prefervar
 
@@ -318,7 +317,7 @@
     eligible = _filterProviders(providers, item, cfgData, dataCache)
 
     # First try and match any PREFERRED_RPROVIDER entry
-    prefervar = cfgData.getVar('PREFERRED_RPROVIDER_%s' % item, True)
+    prefervar = cfgData.getVar('PREFERRED_RPROVIDER_%s' % item)
     foundUnique = False
     if prefervar:
         for p in eligible:
@@ -345,7 +344,7 @@
             pn = dataCache.pkg_fn[p]
             provides = dataCache.pn_provides[pn]
             for provide in provides:
-                prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % provide, True)
+                prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % provide)
                 #logger.debug(1, "checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys())
                 if prefervar in pns and pns[prefervar] not in preferred:
                     var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/remotedata.py b/import-layers/yocto-poky/bitbake/lib/bb/remotedata.py
new file mode 100644
index 0000000..68ecffc
--- /dev/null
+++ b/import-layers/yocto-poky/bitbake/lib/bb/remotedata.py
@@ -0,0 +1,116 @@
+"""
+BitBake 'remotedata' module
+
+Provides support for using a datastore from the bitbake client
+"""
+
+# Copyright (C) 2016  Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import bb.data
+
+class RemoteDatastores:
+    """Used on the server side to manage references to server-side datastores"""
+    def __init__(self, cooker):
+        self.cooker = cooker
+        self.datastores = {}
+        self.locked = []
+        self.nextindex = 1
+
+    def __len__(self):
+        return len(self.datastores)
+
+    def __getitem__(self, key):
+        if key is None:
+            return self.cooker.data
+        else:
+            return self.datastores[key]
+
+    def items(self):
+        return self.datastores.items()
+
+    def store(self, d, locked=False):
+        """
+        Put a datastore into the collection. If locked=True then the datastore
+        is understood to be managed externally and cannot be released by calling
+        release().
+        """
+        idx = self.nextindex
+        self.datastores[idx] = d
+        if locked:
+            self.locked.append(idx)
+        self.nextindex += 1
+        return idx
+
+    def check_store(self, d, locked=False):
+        """
+        Put a datastore into the collection if it's not already in there;
+        in either case return the index
+        """
+        for key, val in self.datastores.items():
+            if val is d:
+                idx = key
+                break
+        else:
+            idx = self.store(d, locked)
+        return idx
+
+    def release(self, idx):
+        """Discard a datastore in the collection"""
+        if idx in self.locked:
+            raise Exception('Tried to release locked datastore %d' % idx)
+        del self.datastores[idx]
+
+    def receive_datastore(self, remote_data):
+        """Receive a datastore object sent from the client (as prepared by transmit_datastore())"""
+        dct = dict(remote_data)
+        d = bb.data_smart.DataSmart()
+        d.dict = dct
+        while True:
+            if '_remote_data' in dct:
+                dsindex = dct['_remote_data']['_content']
+                del dct['_remote_data']
+                if dsindex is None:
+                    dct['_data'] = self.cooker.data.dict
+                else:
+                    dct['_data'] = self.datastores[dsindex].dict
+                break
+            elif '_data' in dct:
+                idct = dict(dct['_data'])
+                dct['_data'] = idct
+                dct = idct
+            else:
+                break
+        return d
+
+    @staticmethod
+    def transmit_datastore(d):
+        """Prepare a datastore object for sending over IPC from the client end"""
+        # FIXME content might be a dict, need to turn that into a list as well
+        def copy_dicts(dct):
+            if '_remote_data' in dct:
+                dsindex = dct['_remote_data']['_content'].dsindex
+                newdct = dct.copy()
+                newdct['_remote_data'] = {'_content': dsindex}
+                return list(newdct.items())
+            elif '_data' in dct:
+                newdct = dct.copy()
+                newdata = copy_dicts(dct['_data'])
+                if newdata:
+                    newdct['_data'] = newdata
+                return list(newdct.items())
+            return None
+        main_dict = copy_dicts(d.dict)
+        return main_dict
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/runqueue.py b/import-layers/yocto-poky/bitbake/lib/bb/runqueue.py
index 9384c72..7d2ff81 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/runqueue.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/runqueue.py
@@ -36,6 +36,7 @@
 from bb import monitordisk
 import subprocess
 import pickle
+from multiprocessing import Process
 
 bblogger = logging.getLogger("BitBake")
 logger = logging.getLogger("BitBake.RunQueue")
@@ -183,6 +184,18 @@
     def newbuilable(self, task):
         self.buildable.append(task)
 
+    def describe_task(self, taskid):
+        result = 'ID %s' % taskid
+        if self.rev_prio_map:
+            result = result + (' pri %d' % self.rev_prio_map[taskid])
+        return result
+
+    def dump_prio(self, comment):
+        bb.debug(3, '%s (most important first):\n%s' %
+                 (comment,
+                  '\n'.join(['%d. %s' % (index + 1, self.describe_task(taskid)) for
+                             index, taskid in enumerate(self.prio_map)])))
+
 class RunQueueSchedulerSpeed(RunQueueScheduler):
     """
     A scheduler optimised for speed. The priority map is sorted by task weight,
@@ -212,35 +225,100 @@
 
 class RunQueueSchedulerCompletion(RunQueueSchedulerSpeed):
     """
-    A scheduler optimised to complete .bb files are quickly as possible. The
+    A scheduler optimised to complete .bb files as quickly as possible. The
     priority map is sorted by task weight, but then reordered so once a given
-    .bb file starts to build, it's completed as quickly as possible. This works
-    well where disk space is at a premium and classes like OE's rm_work are in
-    force.
+    .bb file starts to build, it's completed as quickly as possible by
+    running all tasks related to the same .bb file one after the after.
+    This works well where disk space is at a premium and classes like OE's
+    rm_work are in force.
     """
     name = "completion"
 
     def __init__(self, runqueue, rqdata):
-        RunQueueSchedulerSpeed.__init__(self, runqueue, rqdata)
+        super(RunQueueSchedulerCompletion, self).__init__(runqueue, rqdata)
 
-        #FIXME - whilst this groups all fns together it does not reorder the
-        #fn groups optimally.
+        # Extract list of tasks for each recipe, with tasks sorted
+        # ascending from "must run first" (typically do_fetch) to
+        # "runs last" (do_build). The speed scheduler prioritizes
+        # tasks that must run first before the ones that run later;
+        # this is what we depend on here.
+        task_lists = {}
+        for taskid in self.prio_map:
+            fn, taskname = taskid.rsplit(':', 1)
+            task_lists.setdefault(fn, []).append(taskname)
 
-        basemap = copy.deepcopy(self.prio_map)
-        self.prio_map = []
-        while (len(basemap) > 0):
-            entry = basemap.pop(0)
-            self.prio_map.append(entry)
-            fn = fn_from_tid(entry)
-            todel = []
-            for entry in basemap:
-                entry_fn = fn_from_tid(entry)
-                if entry_fn == fn:
-                    todel.append(basemap.index(entry))
-                    self.prio_map.append(entry)
-            todel.reverse()
-            for idx in todel:
-                del basemap[idx]
+        # Now unify the different task lists. The strategy is that
+        # common tasks get skipped and new ones get inserted after the
+        # preceeding common one(s) as they are found. Because task
+        # lists should differ only by their number of tasks, but not
+        # the ordering of the common tasks, this should result in a
+        # deterministic result that is a superset of the individual
+        # task ordering.
+        all_tasks = []
+        for recipe, new_tasks in task_lists.items():
+            index = 0
+            old_task = all_tasks[index] if index < len(all_tasks) else None
+            for new_task in new_tasks:
+                if old_task == new_task:
+                    # Common task, skip it. This is the fast-path which
+                    # avoids a full search.
+                    index += 1
+                    old_task = all_tasks[index] if index < len(all_tasks) else None
+                else:
+                    try:
+                        index = all_tasks.index(new_task)
+                        # Already present, just not at the current
+                        # place. We re-synchronized by changing the
+                        # index so that it matches again. Now
+                        # move on to the next existing task.
+                        index += 1
+                        old_task = all_tasks[index] if index < len(all_tasks) else None
+                    except ValueError:
+                        # Not present. Insert before old_task, which
+                        # remains the same (but gets shifted back).
+                        all_tasks.insert(index, new_task)
+                        index += 1
+        bb.debug(3, 'merged task list: %s'  % all_tasks)
+
+        # Now reverse the order so that tasks that finish the work on one
+        # recipe are considered more imporant (= come first). The ordering
+        # is now so that do_build is most important.
+        all_tasks.reverse()
+
+        # Group tasks of the same kind before tasks of less important
+        # kinds at the head of the queue (because earlier = lower
+        # priority number = runs earlier), while preserving the
+        # ordering by recipe. If recipe foo is more important than
+        # bar, then the goal is to work on foo's do_populate_sysroot
+        # before bar's do_populate_sysroot and on the more important
+        # tasks of foo before any of the less important tasks in any
+        # other recipe (if those other recipes are more important than
+        # foo).
+        #
+        # All of this only applies when tasks are runable. Explicit
+        # dependencies still override this ordering by priority.
+        #
+        # Here's an example why this priority re-ordering helps with
+        # minimizing disk usage. Consider a recipe foo with a higher
+        # priority than bar where foo DEPENDS on bar. Then the
+        # implicit rule (from base.bbclass) is that foo's do_configure
+        # depends on bar's do_populate_sysroot. This ensures that
+        # bar's do_populate_sysroot gets done first. Normally the
+        # tasks from foo would continue to run once that is done, and
+        # bar only gets completed and cleaned up later. By ordering
+        # bar's task that depend on bar's do_populate_sysroot before foo's
+        # do_configure, that problem gets avoided.
+        task_index = 0
+        self.dump_prio('original priorities')
+        for task in all_tasks:
+            for index in range(task_index, self.numTasks):
+                taskid = self.prio_map[index]
+                taskname = taskid.rsplit(':', 1)[1]
+                if taskname == task:
+                    del self.prio_map[index]
+                    self.prio_map.insert(task_index, taskid)
+                    task_index += 1
+        self.dump_prio('completion priorities')
 
 class RunTaskEntry(object):
     def __init__(self):
@@ -262,10 +340,11 @@
         self.rq = rq
         self.warn_multi_bb = False
 
-        self.stampwhitelist = cfgData.getVar("BB_STAMP_WHITELIST", True) or ""
-        self.multi_provider_whitelist = (cfgData.getVar("MULTI_PROVIDER_WHITELIST", True) or "").split()
+        self.stampwhitelist = cfgData.getVar("BB_STAMP_WHITELIST") or ""
+        self.multi_provider_whitelist = (cfgData.getVar("MULTI_PROVIDER_WHITELIST") or "").split()
         self.setscenewhitelist = get_setscene_enforce_whitelist(cfgData)
         self.setscenewhitelist_checked = False
+        self.setscene_enforce = (cfgData.getVar('BB_SETSCENE_ENFORCE') == "1")
         self.init_progress_reporter = bb.progress.DummyMultiStageProcessProgressReporter()
 
         self.reset()
@@ -565,6 +644,8 @@
                 for (depname, idependtask) in irdepends:
                     if depname in taskData[mc].run_targets:
                         # Won't be in run_targets if ASSUME_PROVIDED
+                        if not taskData[mc].run_targets[depname]:
+                            continue
                         depdata = taskData[mc].run_targets[depname][0]
                         if depdata is not None:
                             t = depdata + ":" + idependtask
@@ -616,6 +697,9 @@
                 seendeps.add(t)
                 newdeps.add(t)
                 for i in newdeps:
+                    if i not in self.runtaskentries:
+                        # Not all recipes might have the recrdeptask task as a task
+                        continue
                     task = self.runtaskentries[i].task
                     for n in self.runtaskentries[i].depends:
                         if n not in seendeps:
@@ -722,6 +806,23 @@
 
         self.init_progress_reporter.next_stage()
 
+        if self.cooker.configuration.runall is not None:
+            runall = "do_%s" % self.cooker.configuration.runall
+            runall_tids = { k: v for k, v in self.runtaskentries.items() if taskname_from_tid(k) == runall }
+
+            # re-run the mark_active and then drop unused tasks from new list
+            runq_build = {}
+            for tid in list(runall_tids):
+                mark_active(tid,1)
+
+            for tid in list(self.runtaskentries.keys()):
+                if tid not in runq_build:
+                    del self.runtaskentries[tid]
+                    delcount += 1
+
+            if len(self.runtaskentries) == 0:
+                bb.msg.fatal("RunQueue", "No remaining tasks to run for build target %s with runall %s" % (target, runall))
+
         #
         # Step D - Sanity checks and computation
         #
@@ -976,16 +1077,22 @@
         self.cfgData = cfgData
         self.rqdata = RunQueueData(self, cooker, cfgData, dataCaches, taskData, targets)
 
-        self.stamppolicy = cfgData.getVar("BB_STAMP_POLICY", True) or "perfile"
-        self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION", True) or None
-        self.setsceneverify = cfgData.getVar("BB_SETSCENE_VERIFY_FUNCTION2", True) or None
-        self.depvalidate = cfgData.getVar("BB_SETSCENE_DEPVALID", True) or None
+        self.stamppolicy = cfgData.getVar("BB_STAMP_POLICY") or "perfile"
+        self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION") or None
+        self.setsceneverify = cfgData.getVar("BB_SETSCENE_VERIFY_FUNCTION2") or None
+        self.depvalidate = cfgData.getVar("BB_SETSCENE_DEPVALID") or None
 
         self.state = runQueuePrepare
 
         # For disk space monitor
+        # Invoked at regular time intervals via the bitbake heartbeat event
+        # while the build is running. We generate a unique name for the handler
+        # here, just in case that there ever is more than one RunQueue instance,
+        # start the handler when reaching runQueueSceneRun, and stop it when
+        # done with the build.
         self.dm = monitordisk.diskMonitor(cfgData)
-
+        self.dm_event_handler_name = '_bb_diskmonitor_' + str(id(self))
+        self.dm_event_handler_registered = False
         self.rqexe = None
         self.worker = {}
         self.fakeworker = {}
@@ -998,8 +1105,8 @@
         if fakeroot:
             magic = magic + "beef"
             mcdata = self.cooker.databuilder.mcdata[mc]
-            fakerootcmd = mcdata.getVar("FAKEROOTCMD", True)
-            fakerootenv = (mcdata.getVar("FAKEROOTBASEENV", True) or "").split()
+            fakerootcmd = mcdata.getVar("FAKEROOTCMD")
+            fakerootenv = (mcdata.getVar("FAKEROOTBASEENV") or "").split()
             env = os.environ.copy()
             for key, value in (var.split('=') for var in fakerootenv):
                 env[key] = value
@@ -1025,12 +1132,13 @@
             "logdefaultverboselogs" : bb.msg.loggerVerboseLogs,
             "logdefaultdomain" : bb.msg.loggerDefaultDomains,
             "prhost" : self.cooker.prhost,
-            "buildname" : self.cfgData.getVar("BUILDNAME", True),
-            "date" : self.cfgData.getVar("DATE", True),
-            "time" : self.cfgData.getVar("TIME", True),
+            "buildname" : self.cfgData.getVar("BUILDNAME"),
+            "date" : self.cfgData.getVar("DATE"),
+            "time" : self.cfgData.getVar("TIME"),
         }
 
         worker.stdin.write(b"<cookerconfig>" + pickle.dumps(self.cooker.configuration) + b"</cookerconfig>")
+        worker.stdin.write(b"<extraconfigdata>" + pickle.dumps(self.cooker.extraconfigdata) + b"</extraconfigdata>")
         worker.stdin.write(b"<workerdata>" + pickle.dumps(workerdata) + b"</workerdata>")
         worker.stdin.flush()
 
@@ -1208,10 +1316,12 @@
                 self.rqdata.init_progress_reporter.next_stage()
                 self.rqexe = RunQueueExecuteScenequeue(self)
 
-        if self.state in [runQueueSceneRun, runQueueRunning, runQueueCleanUp]:
-            self.dm.check(self)
-
         if self.state is runQueueSceneRun:
+            if not self.dm_event_handler_registered:
+                 res = bb.event.register(self.dm_event_handler_name,
+                                         lambda x: self.dm.check(self) if self.state in [runQueueSceneRun, runQueueRunning, runQueueCleanUp] else False,
+                                         ('bb.event.HeartbeatEvent',))
+                 self.dm_event_handler_registered = True
             retval = self.rqexe.execute()
 
         if self.state is runQueueRunInit:
@@ -1230,7 +1340,13 @@
         if self.state is runQueueCleanUp:
             retval = self.rqexe.finish()
 
-        if (self.state is runQueueComplete or self.state is runQueueFailed) and self.rqexe:
+        build_done = self.state is runQueueComplete or self.state is runQueueFailed
+
+        if build_done and self.dm_event_handler_registered:
+            bb.event.remove(self.dm_event_handler_name, None)
+            self.dm_event_handler_registered = False
+
+        if build_done and self.rqexe:
             self.teardown_workers()
             if self.rqexe.stats.failed:
                 logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed.", self.rqexe.stats.completed + self.rqexe.stats.failed, self.rqexe.stats.skipped, self.rqexe.stats.failed)
@@ -1287,15 +1403,36 @@
         else:
             self.rqexe.finish()
 
-    def dump_signatures(self, options):
-        done = set()
-        bb.note("Reparsing files to collect dependency data")
+    def rq_dump_sigfn(self, fn, options):
         bb_cache = bb.cache.NoCache(self.cooker.databuilder)
+        the_data = bb_cache.loadDataFull(fn, self.cooker.collection.get_file_appends(fn))
+        siggen = bb.parse.siggen
+        dataCaches = self.rqdata.dataCaches
+        siggen.dump_sigfn(fn, dataCaches, options)
+
+    def dump_signatures(self, options):
+        fns = set()
+        bb.note("Reparsing files to collect dependency data")
+
         for tid in self.rqdata.runtaskentries:
             fn = fn_from_tid(tid)
-            if fn not in done:
-                the_data = bb_cache.loadDataFull(fn, self.cooker.collection.get_file_appends(fn))
-                done.add(fn)
+            fns.add(fn)
+
+        max_process = int(self.cfgData.getVar("BB_NUMBER_PARSE_THREADS") or os.cpu_count() or 1)
+        # We cannot use the real multiprocessing.Pool easily due to some local data
+        # that can't be pickled. This is a cheap multi-process solution.
+        launched = []
+        while fns:
+            if len(launched) < max_process:
+                p = Process(target=self.rq_dump_sigfn, args=(fns.pop(), options))
+                p.start()
+                launched.append(p)
+            for q in launched:
+                # The finished processes are joined when calling is_alive()
+                if not q.is_alive():
+                    launched.remove(q)
+        for p in launched:
+                p.join()
 
         bb.parse.siggen.dump_sigs(self.rqdata.dataCaches, options)
 
@@ -1326,7 +1463,7 @@
             sq_hash.append(self.rqdata.runtaskentries[tid].hash)
             sq_taskname.append(taskname)
             sq_task.append(tid)
-        locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.expanded_data }
+        locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.data }
         try:
             call = self.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=True)"
             valid = bb.utils.better_eval(call, locs)
@@ -1427,8 +1564,8 @@
         self.cfgData = rq.cfgData
         self.rqdata = rq.rqdata
 
-        self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS", True) or 1)
-        self.scheduler = self.cfgData.getVar("BB_SCHEDULER", True) or "speed"
+        self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS") or 1)
+        self.scheduler = self.cfgData.getVar("BB_SCHEDULER") or "speed"
 
         self.runq_buildable = set()
         self.runq_running = set()
@@ -1510,7 +1647,7 @@
             pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
             taskdata[dep] = [pn, taskname, fn]
         call = self.rq.depvalidate + "(task, taskdata, notneeded, d)"
-        locs = { "task" : task, "taskdata" : taskdata, "notneeded" : self.scenequeue_notneeded, "d" : self.cooker.expanded_data }
+        locs = { "task" : task, "taskdata" : taskdata, "notneeded" : self.scenequeue_notneeded, "d" : self.cooker.data }
         valid = bb.utils.better_eval(call, locs)
         return valid
 
@@ -1578,7 +1715,7 @@
                 invalidtasks.append(tid)
 
             call = self.rq.setsceneverify + "(covered, tasknames, fns, d, invalidtasks=invalidtasks)"
-            locs = { "covered" : self.rq.scenequeue_covered, "tasknames" : tasknames, "fns" : fns, "d" : self.cooker.expanded_data, "invalidtasks" : invalidtasks }
+            locs = { "covered" : self.rq.scenequeue_covered, "tasknames" : tasknames, "fns" : fns, "d" : self.cooker.data, "invalidtasks" : invalidtasks }
             covered_remove = bb.utils.better_eval(call, locs)
 
         def removecoveredtask(tid):
@@ -1630,7 +1767,7 @@
                              if type(obj) is type and
                                 issubclass(obj, RunQueueScheduler))
 
-        user_schedulers = self.cfgData.getVar("BB_SCHEDULERS", True)
+        user_schedulers = self.cfgData.getVar("BB_SCHEDULERS")
         if user_schedulers:
             for sched in user_schedulers.split():
                 if not "." in sched:
@@ -1775,7 +1912,7 @@
                 bb.event.fire(startevent, self.cfgData)
                 self.runq_running.add(task)
                 self.stats.taskActive()
-                if not self.cooker.configuration.dry_run:
+                if not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce):
                     bb.build.make_stamp(taskname, self.rqdata.dataCaches[mc], taskfn)
                 self.task_complete(task)
                 return True
@@ -1786,7 +1923,7 @@
             taskdepdata = self.build_taskdepdata(task)
 
             taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
-            if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run:
+            if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce):
                 if not mc in self.rq.fakeworker:
                     try:
                         self.rq.start_fakeworker(self, mc)
@@ -1795,10 +1932,10 @@
                         self.rq.state = runQueueFailed
                         self.stats.taskFailed()
                         return True
-                self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, False, self.cooker.collection.get_file_appends(fn), taskdepdata)) + b"</runtask>")
+                self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, False, self.cooker.collection.get_file_appends(fn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>")
                 self.rq.fakeworker[mc].process.stdin.flush()
             else:
-                self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, False, self.cooker.collection.get_file_appends(taskfn), taskdepdata)) + b"</runtask>")
+                self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, False, self.cooker.collection.get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>")
                 self.rq.worker[mc].process.stdin.flush()
 
             self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True)
@@ -1839,7 +1976,8 @@
                 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
                 deps = self.rqdata.runtaskentries[revdep].depends
                 provides = self.rqdata.dataCaches[mc].fn_provides[taskfn]
-                taskdepdata[revdep] = [pn, taskname, fn, deps, provides]
+                taskhash = self.rqdata.runtaskentries[revdep].hash
+                taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash]
                 for revdep2 in deps:
                     if revdep2 not in taskdepdata:
                         additional.append(revdep2)
@@ -1892,6 +2030,8 @@
         for tid in self.rqdata.runq_setscene_tids:
             #bb.warn("Added endpoint 2 %s" % (tid))
             for dep in self.rqdata.runtaskentries[tid].depends:
+                    if tid in sq_revdeps[dep]:
+                        sq_revdeps[dep].remove(tid)
                     if dep not in endpoints:
                         endpoints[dep] = set()
                     #bb.warn("  Added endpoint 3 %s" % (dep))
@@ -1911,12 +2051,13 @@
                 if point in self.rqdata.runq_setscene_tids:
                     sq_revdeps_new[point] = tasks
                     tasks = set()
+                    continue
                 for dep in self.rqdata.runtaskentries[point].depends:
                     if point in sq_revdeps[dep]:
                         sq_revdeps[dep].remove(point)
                     if tasks:
                         sq_revdeps_new[dep] |= tasks
-                    if (len(sq_revdeps[dep]) == 0 or len(sq_revdeps_new[dep]) != 0) and dep not in self.rqdata.runq_setscene_tids:
+                    if len(sq_revdeps[dep]) == 0 and dep not in self.rqdata.runq_setscene_tids:
                         newendpoints[dep] = task
             if len(newendpoints) != 0:
                 process_endpoints(newendpoints)
@@ -2072,7 +2213,7 @@
                 sq_taskname.append(taskname)
                 sq_task.append(tid)
             call = self.rq.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
-            locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.expanded_data }
+            locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.data }
             valid = bb.utils.better_eval(call, locs)
 
             valid_new = stamppresent
@@ -2199,14 +2340,16 @@
             startevent = sceneQueueTaskStarted(task, self.stats, self.rq)
             bb.event.fire(startevent, self.cfgData)
 
+            taskdepdata = self.build_taskdepdata(task)
+
             taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
             if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run:
                 if not mc in self.rq.fakeworker:
                     self.rq.start_fakeworker(self, mc)
-                self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, True, self.cooker.collection.get_file_appends(taskfn), None)) + b"</runtask>")
+                self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, True, self.cooker.collection.get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>")
                 self.rq.fakeworker[mc].process.stdin.flush()
             else:
-                self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, True, self.cooker.collection.get_file_appends(taskfn), None)) + b"</runtask>")
+                self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, True, self.cooker.collection.get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>")
                 self.rq.worker[mc].process.stdin.flush()
 
             self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True)
@@ -2241,6 +2384,44 @@
     def runqueue_process_waitpid(self, task, status):
         RunQueueExecute.runqueue_process_waitpid(self, task, status)
 
+
+    def build_taskdepdata(self, task):
+        def getsetscenedeps(tid):
+            deps = set()
+            (mc, fn, taskname, _) = split_tid_mcfn(tid)
+            realtid = tid + "_setscene"
+            idepends = self.rqdata.taskData[mc].taskentries[realtid].idepends
+            for (depname, idependtask) in idepends:
+                if depname not in self.rqdata.taskData[mc].build_targets:
+                    continue
+
+                depfn = self.rqdata.taskData[mc].build_targets[depname][0]
+                if depfn is None:
+                     continue
+                deptid = depfn + ":" + idependtask.replace("_setscene", "")
+                deps.add(deptid)
+            return deps
+
+        taskdepdata = {}
+        next = getsetscenedeps(task)
+        next.add(task)
+        while next:
+            additional = []
+            for revdep in next:
+                (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep)
+                pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
+                deps = getsetscenedeps(revdep)
+                provides = self.rqdata.dataCaches[mc].fn_provides[taskfn]
+                taskhash = self.rqdata.runtaskentries[revdep].hash
+                taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash]
+                for revdep2 in deps:
+                    if revdep2 not in taskdepdata:
+                        additional.append(revdep2)
+            next = additional
+
+        #bb.note("Task %s: " % task + str(taskdepdata).replace("], ", "],\n"))
+        return taskdepdata
+
 class TaskFailure(Exception):
     """
     Exception raised when a task in a runqueue fails
@@ -2406,9 +2587,9 @@
         self.input.close()
 
 def get_setscene_enforce_whitelist(d):
-    if d.getVar('BB_SETSCENE_ENFORCE', True) != '1':
+    if d.getVar('BB_SETSCENE_ENFORCE') != '1':
         return None
-    whitelist = (d.getVar("BB_SETSCENE_ENFORCE_WHITELIST", True) or "").split()
+    whitelist = (d.getVar("BB_SETSCENE_ENFORCE_WHITELIST") or "").split()
     outlist = []
     for item in whitelist[:]:
         if item.startswith('%:'):
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/server/process.py b/import-layers/yocto-poky/bitbake/lib/bb/server/process.py
index 982fcf7..c3c1450 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/server/process.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/server/process.py
@@ -92,6 +92,8 @@
         self.event = EventAdapter(event_queue)
         self.featurelist = featurelist
         self.quit = False
+        self.heartbeat_seconds = 1 # default, BB_HEARTBEAT_EVENT will be checked once we have a datastore.
+        self.next_heartbeat = time.time()
 
         self.quitin, self.quitout = Pipe()
         self.event_handle = multiprocessing.Value("i")
@@ -101,6 +103,14 @@
             self.event_queue.put(event)
         self.event_handle.value = bb.event.register_UIHhandler(self, True)
 
+        heartbeat_event = self.cooker.data.getVar('BB_HEARTBEAT_EVENT')
+        if heartbeat_event:
+            try:
+                self.heartbeat_seconds = float(heartbeat_event)
+            except:
+                # Throwing an exception here causes bitbake to hang.
+                # Just warn about the invalid setting and continue
+                bb.warn('Ignoring invalid BB_HEARTBEAT_EVENT=%s, must be a float specifying seconds.' % heartbeat_event)
         bb.cooker.server_main(self.cooker, self.main)
 
     def main(self):
@@ -160,6 +170,21 @@
                 del self._idlefuns[function]
                 self.quit = True
 
+        # Create new heartbeat event?
+        now = time.time()
+        if now >= self.next_heartbeat:
+            # We might have missed heartbeats. Just trigger once in
+            # that case and continue after the usual delay.
+            self.next_heartbeat += self.heartbeat_seconds
+            if self.next_heartbeat <= now:
+                self.next_heartbeat = now + self.heartbeat_seconds
+            heartbeat = bb.event.HeartbeatEvent(now)
+            bb.event.fire(heartbeat, self.cooker.data)
+        if nextsleep and now + nextsleep > self.next_heartbeat:
+            # Shorten timeout so that we we wake up in time for
+            # the heartbeat.
+            nextsleep = self.next_heartbeat - now
+
         if nextsleep is not None:
             select.select(fds,[],[],nextsleep)
 
@@ -199,7 +224,6 @@
                 if isinstance(event, logging.LogRecord):
                     logger.handle(event)
 
-        signal.signal(signal.SIGINT, signal.SIG_IGN)
         self.procserver.stop()
 
         while self.procserver.is_alive():
@@ -209,6 +233,9 @@
         self.ui_channel.close()
         self.event_queue.close()
         self.event_queue.setexit()
+        # XXX: Call explicity close in _writer to avoid
+        # fd leakage because isn't called on Queue.close()
+        self.event_queue._writer.close()
 
 # Wrap Queue to provide API which isn't server implementation specific
 class ProcessEventQueue(multiprocessing.queues.Queue):
@@ -240,7 +267,6 @@
                 sys.exit(1)
             return None
 
-
 class BitBakeServer(BitBakeBaseServer):
     def initServer(self, single_use=True):
         # establish communication channels.  We use bidirectional pipes for
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/server/xmlrpc.py b/import-layers/yocto-poky/bitbake/lib/bb/server/xmlrpc.py
index 452f14b..a06007f 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/server/xmlrpc.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/server/xmlrpc.py
@@ -190,7 +190,7 @@
         self.send_header("Content-type", "text/plain")
         self.send_header("Content-length", str(len(response)))
         self.end_headers()
-        self.wfile.write(response)
+        self.wfile.write(bytes(response, 'utf-8'))
 
 
 class XMLRPCProxyServer(BaseImplServer):
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/siggen.py b/import-layers/yocto-poky/bitbake/lib/bb/siggen.py
index 542bbb9..f71190a 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/siggen.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/siggen.py
@@ -5,6 +5,8 @@
 import tempfile
 import pickle
 import bb.data
+import difflib
+import simplediff
 from bb.checksum import FileChecksumCache
 
 logger = logging.getLogger('BitBake.SigGen')
@@ -13,7 +15,7 @@
     siggens = [obj for obj in globals().values()
                       if type(obj) is type and issubclass(obj, SignatureGenerator)]
 
-    desired = d.getVar("BB_SIGNATURE_HANDLER", True) or "noop"
+    desired = d.getVar("BB_SIGNATURE_HANDLER") or "noop"
     for sg in siggens:
         if desired == sg.name:
             return sg(d)
@@ -82,10 +84,10 @@
         self.gendeps = {}
         self.lookupcache = {}
         self.pkgnameextract = re.compile("(?P<fn>.*)\..*")
-        self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST", True) or "").split())
+        self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST") or "").split())
         self.taskwhitelist = None
         self.init_rundepcheck(data)
-        checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE", True)
+        checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE")
         if checksum_cache_file:
             self.checksum_cache = FileChecksumCache()
             self.checksum_cache.init_cache(data, checksum_cache_file)
@@ -93,7 +95,7 @@
             self.checksum_cache = None
 
     def init_rundepcheck(self, data):
-        self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST", True) or None
+        self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST") or None
         if self.taskwhitelist:
             self.twl = re.compile(self.taskwhitelist)
         else:
@@ -101,6 +103,7 @@
 
     def _build_data(self, fn, d):
 
+        ignore_mismatch = ((d.getVar("BB_HASH_IGNORE_MISMATCH") or '') == '1')
         tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d)
 
         taskdeps = {}
@@ -135,7 +138,7 @@
                     data = data + str(var)
             datahash = hashlib.md5(data.encode("utf-8")).hexdigest()
             k = fn + "." + task
-            if k in self.basehash and self.basehash[k] != datahash:
+            if not ignore_mismatch and k in self.basehash and self.basehash[k] != datahash:
                 bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (k, self.basehash[k], datahash))
             self.basehash[k] = datahash
             taskdeps[task] = alldeps
@@ -154,13 +157,15 @@
 
         try:
             taskdeps = self._build_data(fn, d)
+        except bb.parse.SkipRecipe:
+            raise
         except:
             bb.warn("Error during finalise of %s" % fn)
             raise
 
         #Slow but can be useful for debugging mismatched basehashes
         #for task in self.taskdeps[fn]:
-        #    self.dump_sigtask(fn, task, d.getVar("STAMP", True), False)
+        #    self.dump_sigtask(fn, task, d.getVar("STAMP"), False)
 
         for task in taskdeps:
             d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + "." + task])
@@ -306,8 +311,8 @@
                 pass
             raise err
 
-    def dump_sigs(self, dataCaches, options):
-        for fn in self.taskdeps:
+    def dump_sigfn(self, fn, dataCaches, options):
+        if fn in self.taskdeps:
             for task in self.taskdeps[fn]:
                 tid = fn + ":" + task
                 (mc, _, _) = bb.runqueue.split_tid(tid)
@@ -345,16 +350,67 @@
 
 def dump_this_task(outfile, d):
     import bb.parse
-    fn = d.getVar("BB_FILENAME", True)
-    task = "do_" + d.getVar("BB_CURRENTTASK", True)
+    fn = d.getVar("BB_FILENAME")
+    task = "do_" + d.getVar("BB_CURRENTTASK")
     referencestamp = bb.build.stamp_internal(task, d, None, True)
     bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile:" + referencestamp)
 
+def init_colors(enable_color):
+    """Initialise colour dict for passing to compare_sigfiles()"""
+    # First set up the colours
+    colors = {'color_title':   '\033[1;37;40m',
+              'color_default': '\033[0;37;40m',
+              'color_add':     '\033[1;32;40m',
+              'color_remove':  '\033[1;31;40m',
+             }
+    # Leave all keys present but clear the values
+    if not enable_color:
+        for k in colors.keys():
+            colors[k] = ''
+    return colors
+
+def worddiff_str(oldstr, newstr, colors=None):
+    if not colors:
+        colors = init_colors(False)
+    diff = simplediff.diff(oldstr.split(' '), newstr.split(' '))
+    ret = []
+    for change, value in diff:
+        value = ' '.join(value)
+        if change == '=':
+            ret.append(value)
+        elif change == '+':
+            item = '{color_add}{{+{value}+}}{color_default}'.format(value=value, **colors)
+            ret.append(item)
+        elif change == '-':
+            item = '{color_remove}[-{value}-]{color_default}'.format(value=value, **colors)
+            ret.append(item)
+    whitespace_note = ''
+    if oldstr != newstr and ' '.join(oldstr.split()) == ' '.join(newstr.split()):
+        whitespace_note = ' (whitespace changed)'
+    return '"%s"%s' % (' '.join(ret), whitespace_note)
+
+def list_inline_diff(oldlist, newlist, colors=None):
+    if not colors:
+        colors = init_colors(False)
+    diff = simplediff.diff(oldlist, newlist)
+    ret = []
+    for change, value in diff:
+        value = ' '.join(value)
+        if change == '=':
+            ret.append("'%s'" % value)
+        elif change == '+':
+            item = '{color_add}+{value}{color_default}'.format(value=value, **colors)
+            ret.append(item)
+        elif change == '-':
+            item = '{color_remove}-{value}{color_default}'.format(value=value, **colors)
+            ret.append(item)
+    return '[%s]' % (', '.join(ret))
+
 def clean_basepath(a):
     mc = None
     if a.startswith("multiconfig:"):
         _, mc, a = a.split(":", 2)
-    b = a.rsplit("/", 2)[1] + a.rsplit("/", 2)[2]
+    b = a.rsplit("/", 2)[1] + '/' + a.rsplit("/", 2)[2]
     if a.startswith("virtual:"):
         b = b + ":" + a.rsplit(":", 1)[0]
     if mc:
@@ -373,9 +429,26 @@
         b.append(clean_basepath(x))
     return b
 
-def compare_sigfiles(a, b, recursecb = None):
+def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
     output = []
 
+    colors = init_colors(color)
+    def color_format(formatstr, **values):
+        """
+        Return colour formatted string.
+        NOTE: call with the format string, not an already formatted string
+        containing values (otherwise you could have trouble with { and }
+        characters)
+        """
+        if not formatstr.endswith('{color_default}'):
+            formatstr += '{color_default}'
+        # In newer python 3 versions you can pass both of these directly,
+        # but we only require 3.4 at the moment
+        formatparams = {}
+        formatparams.update(colors)
+        formatparams.update(values)
+        return formatstr.format(**formatparams)
+
     with open(a, 'rb') as f:
         p1 = pickle.Unpickler(f)
         a_data = p1.load()
@@ -429,39 +502,59 @@
         return changed, added, removed
 
     if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']:
-        output.append("basewhitelist changed from '%s' to '%s'" % (a_data['basewhitelist'], b_data['basewhitelist']))
+        output.append(color_format("{color_title}basewhitelist changed{color_default} from '%s' to '%s'") % (a_data['basewhitelist'], b_data['basewhitelist']))
         if a_data['basewhitelist'] and b_data['basewhitelist']:
             output.append("changed items: %s" % a_data['basewhitelist'].symmetric_difference(b_data['basewhitelist']))
 
     if 'taskwhitelist' in a_data and a_data['taskwhitelist'] != b_data['taskwhitelist']:
-        output.append("taskwhitelist changed from '%s' to '%s'" % (a_data['taskwhitelist'], b_data['taskwhitelist']))
+        output.append(color_format("{color_title}taskwhitelist changed{color_default} from '%s' to '%s'") % (a_data['taskwhitelist'], b_data['taskwhitelist']))
         if a_data['taskwhitelist'] and b_data['taskwhitelist']:
             output.append("changed items: %s" % a_data['taskwhitelist'].symmetric_difference(b_data['taskwhitelist']))
 
     if a_data['taskdeps'] != b_data['taskdeps']:
-        output.append("Task dependencies changed from:\n%s\nto:\n%s" % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps'])))
+        output.append(color_format("{color_title}Task dependencies changed{color_default} from:\n%s\nto:\n%s") % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps'])))
 
-    if a_data['basehash'] != b_data['basehash']:
-        output.append("basehash changed from %s to %s" % (a_data['basehash'], b_data['basehash']))
+    if a_data['basehash'] != b_data['basehash'] and not collapsed:
+        output.append(color_format("{color_title}basehash changed{color_default} from %s to %s") % (a_data['basehash'], b_data['basehash']))
 
     changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'], a_data['basewhitelist'] & b_data['basewhitelist'])
     if changed:
         for dep in changed:
-            output.append("List of dependencies for variable %s changed from '%s' to '%s'" % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep]))
+            output.append(color_format("{color_title}List of dependencies for variable %s changed from '{color_default}%s{color_title}' to '{color_default}%s{color_title}'") % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep]))
             if a_data['gendeps'][dep] and b_data['gendeps'][dep]:
                 output.append("changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep]))
     if added:
         for dep in added:
-            output.append("Dependency on variable %s was added" % (dep))
+            output.append(color_format("{color_title}Dependency on variable %s was added") % (dep))
     if removed:
         for dep in removed:
-            output.append("Dependency on Variable %s was removed" % (dep))
+            output.append(color_format("{color_title}Dependency on Variable %s was removed") % (dep))
 
 
     changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals'])
     if changed:
         for dep in changed:
-            output.append("Variable %s value changed from '%s' to '%s'" % (dep, a_data['varvals'][dep], b_data['varvals'][dep]))
+            oldval = a_data['varvals'][dep]
+            newval = b_data['varvals'][dep]
+            if newval and oldval and ('\n' in oldval or '\n' in newval):
+                diff = difflib.unified_diff(oldval.splitlines(), newval.splitlines(), lineterm='')
+                # Cut off the first two lines, since we aren't interested in
+                # the old/new filename (they are blank anyway in this case)
+                difflines = list(diff)[2:]
+                if color:
+                    # Add colour to diff output
+                    for i, line in enumerate(difflines):
+                        if line.startswith('+'):
+                            line = color_format('{color_add}{line}', line=line)
+                            difflines[i] = line
+                        elif line.startswith('-'):
+                            line = color_format('{color_remove}{line}', line=line)
+                            difflines[i] = line
+                output.append(color_format("{color_title}Variable {var} value changed:{color_default}\n{diff}", var=dep, diff='\n'.join(difflines)))
+            elif newval and oldval and (' ' in oldval or ' ' in newval):
+                output.append(color_format("{color_title}Variable {var} value changed:{color_default}\n{diff}", var=dep, diff=worddiff_str(oldval, newval, colors)))
+            else:
+                output.append(color_format("{color_title}Variable {var} value changed from '{color_default}{oldval}{color_title}' to '{color_default}{newval}{color_title}'{color_default}", var=dep, oldval=oldval, newval=newval))
 
     if not 'file_checksum_values' in a_data:
          a_data['file_checksum_values'] = {}
@@ -471,32 +564,38 @@
     changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values'])
     if changed:
         for f, old, new in changed:
-            output.append("Checksum for file %s changed from %s to %s" % (f, old, new))
+            output.append(color_format("{color_title}Checksum for file %s changed{color_default} from %s to %s") % (f, old, new))
     if added:
         for f in added:
-            output.append("Dependency on checksum of file %s was added" % (f))
+            output.append(color_format("{color_title}Dependency on checksum of file %s was added") % (f))
     if removed:
         for f in removed:
-            output.append("Dependency on checksum of file %s was removed" % (f))
+            output.append(color_format("{color_title}Dependency on checksum of file %s was removed") % (f))
 
     if not 'runtaskdeps' in a_data:
          a_data['runtaskdeps'] = {}
     if not 'runtaskdeps' in b_data:
          b_data['runtaskdeps'] = {}
 
-    if len(a_data['runtaskdeps']) != len(b_data['runtaskdeps']):
-        changed = ["Number of task dependencies changed"]
-    else:
-        changed = []
-        for idx, task in enumerate(a_data['runtaskdeps']):
-            a = a_data['runtaskdeps'][idx]
-            b = b_data['runtaskdeps'][idx]
-            if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b]:
-                changed.append("%s with hash %s\n changed to\n%s with hash %s" % (a, a_data['runtaskhashes'][a], b, b_data['runtaskhashes'][b]))
+    if not collapsed:
+        if len(a_data['runtaskdeps']) != len(b_data['runtaskdeps']):
+            changed = ["Number of task dependencies changed"]
+        else:
+            changed = []
+            for idx, task in enumerate(a_data['runtaskdeps']):
+                a = a_data['runtaskdeps'][idx]
+                b = b_data['runtaskdeps'][idx]
+                if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b] and not collapsed:
+                    changed.append("%s with hash %s\n changed to\n%s with hash %s" % (clean_basepath(a), a_data['runtaskhashes'][a], clean_basepath(b), b_data['runtaskhashes'][b]))
 
-    if changed:
-        output.append("runtaskdeps changed from %s to %s" % (clean_basepaths_list(a_data['runtaskdeps']), clean_basepaths_list(b_data['runtaskdeps'])))
-        output.append("\n".join(changed))
+        if changed:
+            clean_a = clean_basepaths_list(a_data['runtaskdeps'])
+            clean_b = clean_basepaths_list(b_data['runtaskdeps'])
+            if clean_a != clean_b:
+                output.append(color_format("{color_title}runtaskdeps changed:{color_default}\n%s") % list_inline_diff(clean_a, clean_b, colors))
+            else:
+                output.append(color_format("{color_title}runtaskdeps changed:"))
+            output.append("\n".join(changed))
 
 
     if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
@@ -512,7 +611,7 @@
                             #output.append("Dependency on task %s was replaced by %s with same hash" % (dep, bdep))
                             bdep_found = True
                 if not bdep_found:
-                    output.append("Dependency on task %s was added with hash %s" % (clean_basepath(dep), b[dep]))
+                    output.append(color_format("{color_title}Dependency on task %s was added{color_default} with hash %s") % (clean_basepath(dep), b[dep]))
         if removed:
             for dep in removed:
                 adep_found = False
@@ -522,21 +621,25 @@
                             #output.append("Dependency on task %s was replaced by %s with same hash" % (adep, dep))
                             adep_found = True
                 if not adep_found:
-                    output.append("Dependency on task %s was removed with hash %s" % (clean_basepath(dep), a[dep]))
+                    output.append(color_format("{color_title}Dependency on task %s was removed{color_default} with hash %s") % (clean_basepath(dep), a[dep]))
         if changed:
             for dep in changed:
-                output.append("Hash for dependent task %s changed from %s to %s" % (clean_basepath(dep), a[dep], b[dep]))
+                if not collapsed:
+                    output.append(color_format("{color_title}Hash for dependent task %s changed{color_default} from %s to %s") % (clean_basepath(dep), a[dep], b[dep]))
                 if callable(recursecb):
-                    # If a dependent hash changed, might as well print the line above and then defer to the changes in 
-                    # that hash since in all likelyhood, they're the same changes this task also saw.
                     recout = recursecb(dep, a[dep], b[dep])
                     if recout:
-                        output = [output[-1]] + recout
+                        if collapsed:
+                            output.extend(recout)
+                        else:
+                            # If a dependent hash changed, might as well print the line above and then defer to the changes in 
+                            # that hash since in all likelyhood, they're the same changes this task also saw.
+                            output = [output[-1]] + recout
 
     a_taint = a_data.get('taint', None)
     b_taint = b_data.get('taint', None)
     if a_taint != b_taint:
-        output.append("Taint (by forced/invalidated task) changed from %s to %s" % (a_taint, b_taint))
+        output.append(color_format("{color_title}Taint (by forced/invalidated task) changed{color_default} from %s to %s") % (a_taint, b_taint))
 
     return output
 
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/taskdata.py b/import-layers/yocto-poky/bitbake/lib/bb/taskdata.py
index d8bdbca..8c96a56 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/taskdata.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/taskdata.py
@@ -89,6 +89,19 @@
 
         self.add_extra_deps(fn, dataCache)
 
+        # Common code for dep_name/depends = 'depends'/idepends and 'rdepends'/irdepends
+        def handle_deps(task, dep_name, depends, seen):
+            if dep_name in task_deps and task in task_deps[dep_name]:
+                ids = []
+                for dep in task_deps[dep_name][task].split():
+                    if dep:
+                        parts = dep.split(":")
+                        if len(parts) != 2:
+                            bb.msg.fatal("TaskData", "Error for %s:%s[%s], dependency %s in '%s' does not contain exactly one ':' character.\n Task '%s' should be specified in the form 'packagename:task'" % (fn, task, dep_name, dep, task_deps[dep_name][task], dep_name))
+                        ids.append((parts[0], parts[1]))
+                        seen(parts[0])
+                depends.extend(ids)
+
         for task in task_deps['tasks']:
 
             tid = "%s:%s" % (fn, task)
@@ -105,24 +118,8 @@
             self.taskentries[tid].tdepends.extend(parentids)
 
             # Touch all intertask dependencies
-            if 'depends' in task_deps and task in task_deps['depends']:
-                ids = []
-                for dep in task_deps['depends'][task].split():
-                    if dep:
-                        if ":" not in dep:
-                            bb.msg.fatal("TaskData", "Error for %s, dependency %s does not contain ':' character\n. Task 'depends' should be specified in the form 'packagename:task'" % (fn, dep))
-                        ids.append(((dep.split(":")[0]), dep.split(":")[1]))
-                        self.seen_build_target(dep.split(":")[0])
-                self.taskentries[tid].idepends.extend(ids)
-            if 'rdepends' in task_deps and task in task_deps['rdepends']:
-                ids = []
-                for dep in task_deps['rdepends'][task].split():
-                    if dep:
-                        if ":" not in dep:
-                            bb.msg.fatal("TaskData", "Error for %s, dependency %s does not contain ':' character\n. Task 'rdepends' should be specified in the form 'packagename:task'" % (fn, dep))
-                        ids.append(((dep.split(":")[0]), dep.split(":")[1]))
-                        self.seen_run_target(dep.split(":")[0])
-                self.taskentries[tid].irdepends.extend(ids)
+            handle_deps(task, 'depends', self.taskentries[tid].idepends, self.seen_build_target)
+            handle_deps(task, 'rdepends', self.taskentries[tid].irdepends, self.seen_run_target)
 
         # Work out build dependencies
         if not fn in self.depids:
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/tests/codeparser.py b/import-layers/yocto-poky/bitbake/lib/bb/tests/codeparser.py
index 14f0e25..e30e78c 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/tests/codeparser.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/tests/codeparser.py
@@ -49,6 +49,9 @@
     def assertExecs(self, execs):
         self.assertEqual(self.execs, execs)
 
+    def assertContains(self, contains):
+        self.assertEqual(self.contains, contains)
+
 class VariableReferenceTest(ReferenceTest):
 
     def parseExpression(self, exp):
@@ -68,7 +71,7 @@
 
     def test_python_reference(self):
         self.setEmptyVars(["BAR"])
-        self.parseExpression("${@bb.data.getVar('BAR', d, True) + 'foo'}")
+        self.parseExpression("${@d.getVar('BAR') + 'foo'}")
         self.assertReferences(set(["BAR"]))
 
 class ShellReferenceTest(ReferenceTest):
@@ -201,6 +204,7 @@
 
         self.references = parsedvar.references | parser.references
         self.execs = parser.execs
+        self.contains = parser.contains
 
     @staticmethod
     def indent(value):
@@ -209,17 +213,17 @@
         return " " + value
 
     def test_getvar_reference(self):
-        self.parseExpression("bb.data.getVar('foo', d, True)")
+        self.parseExpression("d.getVar('foo')")
         self.assertReferences(set(["foo"]))
         self.assertExecs(set())
 
     def test_getvar_computed_reference(self):
-        self.parseExpression("bb.data.getVar('f' + 'o' + 'o', d, True)")
+        self.parseExpression("d.getVar('f' + 'o' + 'o')")
         self.assertReferences(set())
         self.assertExecs(set())
 
     def test_getvar_exec_reference(self):
-        self.parseExpression("eval('bb.data.getVar(\"foo\", d, True)')")
+        self.parseExpression("eval('d.getVar(\"foo\")')")
         self.assertReferences(set())
         self.assertExecs(set(["eval"]))
 
@@ -265,15 +269,35 @@
         self.assertExecs(set(["testget"]))
         del self.context["testget"]
 
+    def test_contains(self):
+        self.parseExpression('bb.utils.contains("TESTVAR", "one", "true", "false", d)')
+        self.assertContains({'TESTVAR': {'one'}})
+
+    def test_contains_multi(self):
+        self.parseExpression('bb.utils.contains("TESTVAR", "one two", "true", "false", d)')
+        self.assertContains({'TESTVAR': {'one two'}})
+
+    def test_contains_any(self):
+        self.parseExpression('bb.utils.contains_any("TESTVAR", "hello", "true", "false", d)')
+        self.assertContains({'TESTVAR': {'hello'}})
+
+    def test_contains_any_multi(self):
+        self.parseExpression('bb.utils.contains_any("TESTVAR", "one two three", "true", "false", d)')
+        self.assertContains({'TESTVAR': {'one', 'two', 'three'}})
+
+    def test_contains_filter(self):
+        self.parseExpression('bb.utils.filter("TESTVAR", "hello there world", d)')
+        self.assertContains({'TESTVAR': {'hello', 'there', 'world'}})
+
 
 class DependencyReferenceTest(ReferenceTest):
 
     pydata = """
-bb.data.getVar('somevar', d, True)
+d.getVar('somevar')
 def test(d):
     foo = 'bar %s' % 'foo'
 def test2(d):
-    d.getVar(foo, True)
+    d.getVar(foo)
     d.getVar('bar', False)
     test2(d)
 
@@ -285,9 +309,9 @@
 
 test(d)
 
-bb.data.expand(bb.data.getVar("something", False, d), d)
-bb.data.expand("${inexpand} somethingelse", d)
-bb.data.getVar(a(), d, False)
+d.expand(d.getVar("something", False))
+d.expand("${inexpand} somethingelse")
+d.getVar(a(), False)
 """
 
     def test_python(self):
@@ -370,6 +394,30 @@
 
         self.assertEqual(deps, set(["oe_libinstall"]))
 
+    def test_contains_vardeps(self):
+        expr = '${@bb.utils.filter("TESTVAR", "somevalue anothervalue", d)} \
+                ${@bb.utils.contains("TESTVAR", "testval testval2", "yetanothervalue", "", d)} \
+                ${@bb.utils.contains("TESTVAR", "testval2 testval3", "blah", "", d)} \
+                ${@bb.utils.contains_any("TESTVAR", "testval2 testval3", "lastone", "", d)}'
+        parsedvar = self.d.expandWithRefs(expr, None)
+        # Check contains
+        self.assertEqual(parsedvar.contains, {'TESTVAR': {'testval2 testval3', 'anothervalue', 'somevalue', 'testval testval2', 'testval2', 'testval3'}})
+        # Check dependencies
+        self.d.setVar('ANOTHERVAR', expr)
+        self.d.setVar('TESTVAR', 'anothervalue testval testval2')
+        deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), self.d)
+        self.assertEqual(sorted(values.splitlines()),
+                         sorted([expr,
+                          'TESTVAR{anothervalue} = Set',
+                          'TESTVAR{somevalue} = Unset',
+                          'TESTVAR{testval testval2} = Set',
+                          'TESTVAR{testval2 testval3} = Unset',
+                          'TESTVAR{testval2} = Set',
+                          'TESTVAR{testval3} = Unset'
+                          ]))
+        # Check final value
+        self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['anothervalue', 'yetanothervalue', 'lastone'])
+
     #Currently no wildcard support
     #def test_vardeps_wildcards(self):
     #    self.d.setVar("oe_libinstall", "echo test")
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/tests/data.py b/import-layers/yocto-poky/bitbake/lib/bb/tests/data.py
index b54eb06..a4a9dd3 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/tests/data.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/tests/data.py
@@ -77,13 +77,13 @@
         self.assertEqual(str(val), "boo value_of_foo")
 
     def test_python_snippet_getvar(self):
-        val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}")
+        val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}")
         self.assertEqual(str(val), "value_of_foo value_of_bar")
 
     def test_python_unexpanded(self):
         self.d.setVar("bar", "${unsetvar}")
-        val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}")
-        self.assertEqual(str(val), "${@d.getVar('foo', True) + ' ${unsetvar}'}")
+        val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}")
+        self.assertEqual(str(val), "${@d.getVar('foo') + ' ${unsetvar}'}")
 
     def test_python_snippet_syntax_error(self):
         self.d.setVar("FOO", "${@foo = 5}")
@@ -99,7 +99,7 @@
         self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
 
     def test_value_containing_value(self):
-        val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}")
+        val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}")
         self.assertEqual(str(val), "value_of_foo value_of_bar")
 
     def test_reference_undefined_var(self):
@@ -109,7 +109,7 @@
     def test_double_reference(self):
         self.d.setVar("BAR", "bar value")
         self.d.setVar("FOO", "${BAR} foo ${BAR}")
-        val = self.d.getVar("FOO", True)
+        val = self.d.getVar("FOO")
         self.assertEqual(str(val), "bar value foo bar value")
 
     def test_direct_recursion(self):
@@ -129,12 +129,12 @@
 
     def test_incomplete_varexp_single_quotes(self):
         self.d.setVar("FOO", "sed -i -e 's:IP{:I${:g' $pc")
-        val = self.d.getVar("FOO", True)
+        val = self.d.getVar("FOO")
         self.assertEqual(str(val), "sed -i -e 's:IP{:I${:g' $pc")
 
     def test_nonstring(self):
         self.d.setVar("TEST", 5)
-        val = self.d.getVar("TEST", True)
+        val = self.d.getVar("TEST")
         self.assertEqual(str(val), "5")
 
     def test_rename(self):
@@ -234,19 +234,19 @@
     def test_prepend(self):
         self.d.setVar("TEST", "${VAL}")
         self.d.prependVar("TEST", "${FOO}:")
-        self.assertEqual(self.d.getVar("TEST", True), "foo:val")
+        self.assertEqual(self.d.getVar("TEST"), "foo:val")
 
     def test_append(self):
         self.d.setVar("TEST", "${VAL}")
         self.d.appendVar("TEST", ":${BAR}")
-        self.assertEqual(self.d.getVar("TEST", True), "val:bar")
+        self.assertEqual(self.d.getVar("TEST"), "val:bar")
 
     def test_multiple_append(self):
         self.d.setVar("TEST", "${VAL}")
         self.d.prependVar("TEST", "${FOO}:")
         self.d.appendVar("TEST", ":val2")
         self.d.appendVar("TEST", ":${BAR}")
-        self.assertEqual(self.d.getVar("TEST", True), "foo:val:val2:bar")
+        self.assertEqual(self.d.getVar("TEST"), "foo:val:val2:bar")
 
 class TestConcatOverride(unittest.TestCase):
     def setUp(self):
@@ -258,62 +258,66 @@
     def test_prepend(self):
         self.d.setVar("TEST", "${VAL}")
         self.d.setVar("TEST_prepend", "${FOO}:")
-        bb.data.update_data(self.d)
-        self.assertEqual(self.d.getVar("TEST", True), "foo:val")
+        self.assertEqual(self.d.getVar("TEST"), "foo:val")
 
     def test_append(self):
         self.d.setVar("TEST", "${VAL}")
         self.d.setVar("TEST_append", ":${BAR}")
-        bb.data.update_data(self.d)
-        self.assertEqual(self.d.getVar("TEST", True), "val:bar")
+        self.assertEqual(self.d.getVar("TEST"), "val:bar")
 
     def test_multiple_append(self):
         self.d.setVar("TEST", "${VAL}")
         self.d.setVar("TEST_prepend", "${FOO}:")
         self.d.setVar("TEST_append", ":val2")
         self.d.setVar("TEST_append", ":${BAR}")
-        bb.data.update_data(self.d)
-        self.assertEqual(self.d.getVar("TEST", True), "foo:val:val2:bar")
+        self.assertEqual(self.d.getVar("TEST"), "foo:val:val2:bar")
 
     def test_append_unset(self):
         self.d.setVar("TEST_prepend", "${FOO}:")
         self.d.setVar("TEST_append", ":val2")
         self.d.setVar("TEST_append", ":${BAR}")
-        bb.data.update_data(self.d)
-        self.assertEqual(self.d.getVar("TEST", True), "foo::val2:bar")
+        self.assertEqual(self.d.getVar("TEST"), "foo::val2:bar")
 
     def test_remove(self):
         self.d.setVar("TEST", "${VAL} ${BAR}")
         self.d.setVar("TEST_remove", "val")
-        bb.data.update_data(self.d)
-        self.assertEqual(self.d.getVar("TEST", True), "bar")
+        self.assertEqual(self.d.getVar("TEST"), "bar")
+
+    def test_remove_cleared(self):
+        self.d.setVar("TEST", "${VAL} ${BAR}")
+        self.d.setVar("TEST_remove", "val")
+        self.d.setVar("TEST", "${VAL} ${BAR}")
+        self.assertEqual(self.d.getVar("TEST"), "val bar")
+
+    # Ensure the value is unchanged if we have an inactive remove override
+    # (including that whitespace is preserved)
+    def test_remove_inactive_override(self):
+        self.d.setVar("TEST", "${VAL} ${BAR}    123")
+        self.d.setVar("TEST_remove_inactiveoverride", "val")
+        self.assertEqual(self.d.getVar("TEST"), "val bar    123")
 
     def test_doubleref_remove(self):
         self.d.setVar("TEST", "${VAL} ${BAR}")
         self.d.setVar("TEST_remove", "val")
         self.d.setVar("TEST_TEST", "${TEST} ${TEST}")
-        bb.data.update_data(self.d)
-        self.assertEqual(self.d.getVar("TEST_TEST", True), "bar bar")
+        self.assertEqual(self.d.getVar("TEST_TEST"), "bar bar")
 
     def test_empty_remove(self):
         self.d.setVar("TEST", "")
         self.d.setVar("TEST_remove", "val")
-        bb.data.update_data(self.d)
-        self.assertEqual(self.d.getVar("TEST", True), "")
+        self.assertEqual(self.d.getVar("TEST"), "")
 
     def test_remove_expansion(self):
         self.d.setVar("BAR", "Z")
         self.d.setVar("TEST", "${BAR}/X Y")
         self.d.setVar("TEST_remove", "${BAR}/X")
-        bb.data.update_data(self.d)
-        self.assertEqual(self.d.getVar("TEST", True), "Y")
+        self.assertEqual(self.d.getVar("TEST"), "Y")
 
     def test_remove_expansion_items(self):
         self.d.setVar("TEST", "A B C D")
         self.d.setVar("BAR", "B D")
         self.d.setVar("TEST_remove", "${BAR}")
-        bb.data.update_data(self.d)
-        self.assertEqual(self.d.getVar("TEST", True), "A C")
+        self.assertEqual(self.d.getVar("TEST"), "A C")
 
 class TestOverrides(unittest.TestCase):
     def setUp(self):
@@ -322,60 +326,53 @@
         self.d.setVar("TEST", "testvalue")
 
     def test_no_override(self):
-        bb.data.update_data(self.d)
-        self.assertEqual(self.d.getVar("TEST", True), "testvalue")
+        self.assertEqual(self.d.getVar("TEST"), "testvalue")
 
     def test_one_override(self):
         self.d.setVar("TEST_bar", "testvalue2")
-        bb.data.update_data(self.d)
-        self.assertEqual(self.d.getVar("TEST", True), "testvalue2")
+        self.assertEqual(self.d.getVar("TEST"), "testvalue2")
 
     def test_one_override_unset(self):
         self.d.setVar("TEST2_bar", "testvalue2")
-        bb.data.update_data(self.d)
-        self.assertEqual(self.d.getVar("TEST2", True), "testvalue2")
+
+        self.assertEqual(self.d.getVar("TEST2"), "testvalue2")
         self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2_bar'])
 
     def test_multiple_override(self):
         self.d.setVar("TEST_bar", "testvalue2")
         self.d.setVar("TEST_local", "testvalue3")
         self.d.setVar("TEST_foo", "testvalue4")
-        bb.data.update_data(self.d)
-        self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
+        self.assertEqual(self.d.getVar("TEST"), "testvalue3")
         self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST_foo', 'OVERRIDES', 'TEST_bar', 'TEST_local'])
 
     def test_multiple_combined_overrides(self):
         self.d.setVar("TEST_local_foo_bar", "testvalue3")
-        bb.data.update_data(self.d)
-        self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
+        self.assertEqual(self.d.getVar("TEST"), "testvalue3")
 
     def test_multiple_overrides_unset(self):
         self.d.setVar("TEST2_local_foo_bar", "testvalue3")
-        bb.data.update_data(self.d)
-        self.assertEqual(self.d.getVar("TEST2", True), "testvalue3")
+        self.assertEqual(self.d.getVar("TEST2"), "testvalue3")
 
     def test_keyexpansion_override(self):
         self.d.setVar("LOCAL", "local")
         self.d.setVar("TEST_bar", "testvalue2")
         self.d.setVar("TEST_${LOCAL}", "testvalue3")
         self.d.setVar("TEST_foo", "testvalue4")
-        bb.data.update_data(self.d)
         bb.data.expandKeys(self.d)
-        self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
+        self.assertEqual(self.d.getVar("TEST"), "testvalue3")
 
     def test_rename_override(self):
         self.d.setVar("ALTERNATIVE_ncurses-tools_class-target", "a")
         self.d.setVar("OVERRIDES", "class-target")
-        bb.data.update_data(self.d)
         self.d.renameVar("ALTERNATIVE_ncurses-tools", "ALTERNATIVE_lib32-ncurses-tools")
-        self.assertEqual(self.d.getVar("ALTERNATIVE_lib32-ncurses-tools", True), "a")
+        self.assertEqual(self.d.getVar("ALTERNATIVE_lib32-ncurses-tools"), "a")
 
     def test_underscore_override(self):
         self.d.setVar("TEST_bar", "testvalue2")
         self.d.setVar("TEST_some_val", "testvalue3")
         self.d.setVar("TEST_foo", "testvalue4")
         self.d.setVar("OVERRIDES", "foo:bar:some_val")
-        self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
+        self.assertEqual(self.d.getVar("TEST"), "testvalue3")
 
 class TestKeyExpansion(unittest.TestCase):
     def setUp(self):
@@ -389,7 +386,7 @@
         with LogRecord() as logs:
             bb.data.expandKeys(self.d)
             self.assertTrue(logContains("Variable key VAL_${FOO} (A) replaces original key VAL_foo (B)", logs))
-        self.assertEqual(self.d.getVar("VAL_foo", True), "A")
+        self.assertEqual(self.d.getVar("VAL_foo"), "A")
 
 class TestFlags(unittest.TestCase):
     def setUp(self):
@@ -444,3 +441,167 @@
 
         self.assertFalse(bb.utils.contains_any("SOMEFLAG", "x", True, False, self.d))
         self.assertFalse(bb.utils.contains_any("SOMEFLAG", "x y z", True, False, self.d))
+
+
+class Serialize(unittest.TestCase):
+
+    def test_serialize(self):
+        import tempfile
+        import pickle
+        d = bb.data.init()
+        d.enableTracking()
+        d.setVar('HELLO', 'world')
+        d.setVarFlag('HELLO', 'other', 'planet')
+        with tempfile.NamedTemporaryFile(delete=False) as tmpfile:
+            tmpfilename = tmpfile.name
+            pickle.dump(d, tmpfile)
+
+        with open(tmpfilename, 'rb') as f:
+            newd = pickle.load(f)
+
+        os.remove(tmpfilename)
+
+        self.assertEqual(d, newd)
+        self.assertEqual(newd.getVar('HELLO'), 'world')
+        self.assertEqual(newd.getVarFlag('HELLO', 'other'), 'planet')
+
+
+# Remote datastore tests
+# These really only test the interface, since in actual usage we have a
+# tinfoil connector that does everything over RPC, and this doesn't test
+# that.
+
+class TestConnector:
+    d = None
+    def __init__(self, d):
+        self.d = d
+    def getVar(self, name):
+        return self.d._findVar(name)
+    def getKeys(self):
+        return set(self.d.keys())
+    def getVarHistory(self, name):
+        return self.d.varhistory.variable(name)
+    def expandPythonRef(self, varname, expr, d):
+        localdata = self.d.createCopy()
+        for key in d.localkeys():
+            localdata.setVar(d.getVar(key))
+        varparse = bb.data_smart.VariableParse(varname, localdata)
+        return varparse.python_sub(expr)
+    def setVar(self, name, value):
+        self.d.setVar(name, value)
+    def setVarFlag(self, name, flag, value):
+        self.d.setVarFlag(name, flag, value)
+    def delVar(self, name):
+        self.d.delVar(name)
+        return False
+    def delVarFlag(self, name, flag):
+        self.d.delVarFlag(name, flag)
+        return False
+    def renameVar(self, name, newname):
+        self.d.renameVar(name, newname)
+        return False
+
+class Remote(unittest.TestCase):
+    def test_remote(self):
+
+        d1 = bb.data.init()
+        d1.enableTracking()
+        d2 = bb.data.init()
+        d2.enableTracking()
+        connector = TestConnector(d1)
+
+        d2.setVar('_remote_data', connector)
+
+        d1.setVar('HELLO', 'world')
+        d1.setVarFlag('OTHER', 'flagname', 'flagvalue')
+        self.assertEqual(d2.getVar('HELLO'), 'world')
+        self.assertEqual(d2.expand('${HELLO}'), 'world')
+        self.assertEqual(d2.expand('${@d.getVar("HELLO")}'), 'world')
+        self.assertIn('flagname', d2.getVarFlags('OTHER'))
+        self.assertEqual(d2.getVarFlag('OTHER', 'flagname'), 'flagvalue')
+        self.assertEqual(d1.varhistory.variable('HELLO'), d2.varhistory.variable('HELLO'))
+        # Test setVar on client side affects server
+        d2.setVar('HELLO', 'other-world')
+        self.assertEqual(d1.getVar('HELLO'), 'other-world')
+        # Test setVarFlag on client side affects server
+        d2.setVarFlag('HELLO', 'flagname', 'flagvalue')
+        self.assertEqual(d1.getVarFlag('HELLO', 'flagname'), 'flagvalue')
+        # Test client side data is incorporated in python expansion (which is done on server)
+        d2.setVar('FOO', 'bar')
+        self.assertEqual(d2.expand('${@d.getVar("FOO")}'), 'bar')
+        # Test overrides work
+        d1.setVar('FOO_test', 'baz')
+        d1.appendVar('OVERRIDES', ':test')
+        self.assertEqual(d2.getVar('FOO'), 'baz')
+
+
+# Remote equivalents of local test classes
+# Note that these aren't perfect since we only test in one direction
+
+class RemoteDataExpansions(DataExpansions):
+    def setUp(self):
+        self.d1 = bb.data.init()
+        self.d = bb.data.init()
+        self.d1["foo"] = "value_of_foo"
+        self.d1["bar"] = "value_of_bar"
+        self.d1["value_of_foo"] = "value_of_'value_of_foo'"
+        connector = TestConnector(self.d1)
+        self.d.setVar('_remote_data', connector)
+
+class TestRemoteNestedExpansions(TestNestedExpansions):
+    def setUp(self):
+        self.d1 = bb.data.init()
+        self.d = bb.data.init()
+        self.d1["foo"] = "foo"
+        self.d1["bar"] = "bar"
+        self.d1["value_of_foobar"] = "187"
+        connector = TestConnector(self.d1)
+        self.d.setVar('_remote_data', connector)
+
+class TestRemoteConcat(TestConcat):
+    def setUp(self):
+        self.d1 = bb.data.init()
+        self.d = bb.data.init()
+        self.d1.setVar("FOO", "foo")
+        self.d1.setVar("VAL", "val")
+        self.d1.setVar("BAR", "bar")
+        connector = TestConnector(self.d1)
+        self.d.setVar('_remote_data', connector)
+
+class TestRemoteConcatOverride(TestConcatOverride):
+    def setUp(self):
+        self.d1 = bb.data.init()
+        self.d = bb.data.init()
+        self.d1.setVar("FOO", "foo")
+        self.d1.setVar("VAL", "val")
+        self.d1.setVar("BAR", "bar")
+        connector = TestConnector(self.d1)
+        self.d.setVar('_remote_data', connector)
+
+class TestRemoteOverrides(TestOverrides):
+    def setUp(self):
+        self.d1 = bb.data.init()
+        self.d = bb.data.init()
+        self.d1.setVar("OVERRIDES", "foo:bar:local")
+        self.d1.setVar("TEST", "testvalue")
+        connector = TestConnector(self.d1)
+        self.d.setVar('_remote_data', connector)
+
+class TestRemoteKeyExpansion(TestKeyExpansion):
+    def setUp(self):
+        self.d1 = bb.data.init()
+        self.d = bb.data.init()
+        self.d1.setVar("FOO", "foo")
+        self.d1.setVar("BAR", "foo")
+        connector = TestConnector(self.d1)
+        self.d.setVar('_remote_data', connector)
+
+class TestRemoteFlags(TestFlags):
+    def setUp(self):
+        self.d1 = bb.data.init()
+        self.d = bb.data.init()
+        self.d1.setVar("foo", "value of foo")
+        self.d1.setVarFlag("foo", "flag1", "value of flag1")
+        self.d1.setVarFlag("foo", "flag2", "value of flag2")
+        connector = TestConnector(self.d1)
+        self.d.setVar('_remote_data', connector)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/tests/fetch.py b/import-layers/yocto-poky/bitbake/lib/bb/tests/fetch.py
index 0fd2c02..5a8d892 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/tests/fetch.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/tests/fetch.py
@@ -793,6 +793,7 @@
                 ud = bb.fetch2.FetchData(k[1], self.d)
                 pupver= ud.method.latest_versionstring(ud, self.d)
                 verstring = pupver[0]
+                self.assertTrue(verstring, msg="Could not find upstream version")
                 r = bb.utils.vercmp_string(v, verstring)
                 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
 
@@ -804,6 +805,7 @@
                 ud = bb.fetch2.FetchData(k[1], self.d)
                 pupver = ud.method.latest_versionstring(ud, self.d)
                 verstring = pupver[0]
+                self.assertTrue(verstring, msg="Could not find upstream version")
                 r = bb.utils.vercmp_string(v, verstring)
                 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
 
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/tests/parse.py b/import-layers/yocto-poky/bitbake/lib/bb/tests/parse.py
index 0b2706a..ab6ca90 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/tests/parse.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/tests/parse.py
@@ -58,9 +58,9 @@
     def test_parse_simple(self):
         f = self.parsehelper(self.testfile)
         d = bb.parse.handle(f.name, self.d)['']
-        self.assertEqual(d.getVar("A", True), "1")
-        self.assertEqual(d.getVar("B", True), "2")
-        self.assertEqual(d.getVar("C", True), "3")
+        self.assertEqual(d.getVar("A"), "1")
+        self.assertEqual(d.getVar("B"), "2")
+        self.assertEqual(d.getVar("C"), "3")
 
     def test_parse_incomplete_function(self):
         testfileB = self.testfile.replace("}", "")
@@ -80,9 +80,9 @@
     def test_parse_unset(self):
         f = self.parsehelper(self.unsettest)
         d = bb.parse.handle(f.name, self.d)['']
-        self.assertEqual(d.getVar("A", True), None)
-        self.assertEqual(d.getVarFlag("A","flag", True), None)
-        self.assertEqual(d.getVar("B", True), "2")
+        self.assertEqual(d.getVar("A"), None)
+        self.assertEqual(d.getVarFlag("A","flag"), None)
+        self.assertEqual(d.getVar("B"), "2")
         
 
     overridetest = """
@@ -95,11 +95,11 @@
     def test_parse_overrides(self):
         f = self.parsehelper(self.overridetest)
         d = bb.parse.handle(f.name, self.d)['']
-        self.assertEqual(d.getVar("RRECOMMENDS", True), "b")
+        self.assertEqual(d.getVar("RRECOMMENDS"), "b")
         bb.data.expandKeys(d)
-        self.assertEqual(d.getVar("RRECOMMENDS", True), "b")
+        self.assertEqual(d.getVar("RRECOMMENDS"), "b")
         d.setVar("RRECOMMENDS_gtk+", "c")
-        self.assertEqual(d.getVar("RRECOMMENDS", True), "c")
+        self.assertEqual(d.getVar("RRECOMMENDS"), "c")
 
     overridetest2 = """
 EXTRA_OECONF = ""
@@ -112,7 +112,7 @@
         d = bb.parse.handle(f.name, self.d)['']
         d.appendVar("EXTRA_OECONF", " d")
         d.setVar("OVERRIDES", "class-target")
-        self.assertEqual(d.getVar("EXTRA_OECONF", True), "b c d")
+        self.assertEqual(d.getVar("EXTRA_OECONF"), "b c d")
 
     overridetest3 = """
 DESCRIPTION = "A"
@@ -124,11 +124,11 @@
         f = self.parsehelper(self.overridetest3)
         d = bb.parse.handle(f.name, self.d)['']
         bb.data.expandKeys(d)
-        self.assertEqual(d.getVar("DESCRIPTION_bc-dev", True), "A B")
+        self.assertEqual(d.getVar("DESCRIPTION_bc-dev"), "A B")
         d.setVar("DESCRIPTION", "E")
         d.setVar("DESCRIPTION_bc-dev", "C D")
         d.setVar("OVERRIDES", "bc-dev")
-        self.assertEqual(d.getVar("DESCRIPTION", True), "C D")
+        self.assertEqual(d.getVar("DESCRIPTION"), "C D")
 
 
     classextend = """
@@ -159,6 +159,6 @@
         alldata = bb.parse.handle(f.name, self.d)
         d1 = alldata['']
         d2 = alldata[cls.name]
-        self.assertEqual(d1.getVar("VAR_var", True), "B")
-        self.assertEqual(d2.getVar("VAR_var", True), None)
+        self.assertEqual(d1.getVar("VAR_var"), "B")
+        self.assertEqual(d2.getVar("VAR_var"), None)
 
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/tinfoil.py b/import-layers/yocto-poky/bitbake/lib/bb/tinfoil.py
index 9fa5b5b..928333a 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/tinfoil.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/tinfoil.py
@@ -1,6 +1,6 @@
 # tinfoil: a simple wrapper around cooker for bitbake-based command-line utilities
 #
-# Copyright (C) 2012 Intel Corporation
+# Copyright (C) 2012-2017 Intel Corporation
 # Copyright (C) 2011 Mentor Graphics Corporation
 #
 # This program is free software; you can redistribute it and/or modify
@@ -17,50 +17,210 @@
 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 import logging
-import warnings
 import os
 import sys
+import atexit
+import re
+from collections import OrderedDict, defaultdict
 
 import bb.cache
 import bb.cooker
 import bb.providers
+import bb.taskdata
 import bb.utils
-from bb.cooker import state, BBCooker, CookerFeatures
+import bb.command
+import bb.remotedata
 from bb.cookerdata import CookerConfiguration, ConfigParameters
+from bb.main import setup_bitbake, BitBakeConfigParameters, BBMainException
 import bb.fetch2
 
+
+# We need this in order to shut down the connection to the bitbake server,
+# otherwise the process will never properly exit
+_server_connections = []
+def _terminate_connections():
+    for connection in _server_connections:
+        connection.terminate()
+atexit.register(_terminate_connections)
+
+class TinfoilUIException(Exception):
+    """Exception raised when the UI returns non-zero from its main function"""
+    def __init__(self, returncode):
+        self.returncode = returncode
+    def __repr__(self):
+        return 'UI module main returned %d' % self.returncode
+
+class TinfoilCommandFailed(Exception):
+    """Exception raised when run_command fails"""
+
+class TinfoilDataStoreConnector:
+
+    def __init__(self, tinfoil, dsindex):
+        self.tinfoil = tinfoil
+        self.dsindex = dsindex
+    def getVar(self, name):
+        value = self.tinfoil.run_command('dataStoreConnectorFindVar', self.dsindex, name)
+        overrides = None
+        if isinstance(value, dict):
+            if '_connector_origtype' in value:
+                value['_content'] = self.tinfoil._reconvert_type(value['_content'], value['_connector_origtype'])
+                del value['_connector_origtype']
+            if '_connector_overrides' in value:
+                overrides = value['_connector_overrides']
+                del value['_connector_overrides']
+        return value, overrides
+    def getKeys(self):
+        return set(self.tinfoil.run_command('dataStoreConnectorGetKeys', self.dsindex))
+    def getVarHistory(self, name):
+        return self.tinfoil.run_command('dataStoreConnectorGetVarHistory', self.dsindex, name)
+    def expandPythonRef(self, varname, expr, d):
+        ds = bb.remotedata.RemoteDatastores.transmit_datastore(d)
+        ret = self.tinfoil.run_command('dataStoreConnectorExpandPythonRef', ds, varname, expr)
+        return ret
+    def setVar(self, varname, value):
+        if self.dsindex is None:
+            self.tinfoil.run_command('setVariable', varname, value)
+        else:
+            # Not currently implemented - indicate that setting should
+            # be redirected to local side
+            return True
+    def setVarFlag(self, varname, flagname, value):
+        if self.dsindex is None:
+            self.tinfoil.run_command('dataStoreConnectorSetVarFlag', self.dsindex, varname, flagname, value)
+        else:
+            # Not currently implemented - indicate that setting should
+            # be redirected to local side
+            return True
+    def delVar(self, varname):
+        if self.dsindex is None:
+            self.tinfoil.run_command('dataStoreConnectorDelVar', self.dsindex, varname)
+        else:
+            # Not currently implemented - indicate that setting should
+            # be redirected to local side
+            return True
+    def delVarFlag(self, varname, flagname):
+        if self.dsindex is None:
+            self.tinfoil.run_command('dataStoreConnectorDelVar', self.dsindex, varname, flagname)
+        else:
+            # Not currently implemented - indicate that setting should
+            # be redirected to local side
+            return True
+    def renameVar(self, name, newname):
+        if self.dsindex is None:
+            self.tinfoil.run_command('dataStoreConnectorRenameVar', self.dsindex, name, newname)
+        else:
+            # Not currently implemented - indicate that setting should
+            # be redirected to local side
+            return True
+
+class TinfoilCookerAdapter:
+    """
+    Provide an adapter for existing code that expects to access a cooker object via Tinfoil,
+    since now Tinfoil is on the client side it no longer has direct access.
+    """
+
+    class TinfoilCookerCollectionAdapter:
+        """ cooker.collection adapter """
+        def __init__(self, tinfoil):
+            self.tinfoil = tinfoil
+        def get_file_appends(self, fn):
+            return self.tinfoil.get_file_appends(fn)
+        def __getattr__(self, name):
+            if name == 'overlayed':
+                return self.tinfoil.get_overlayed_recipes()
+            elif name == 'bbappends':
+                return self.tinfoil.run_command('getAllAppends')
+            else:
+                raise AttributeError("%s instance has no attribute '%s'" % (self.__class__.__name__, name))
+
+    class TinfoilRecipeCacheAdapter:
+        """ cooker.recipecache adapter """
+        def __init__(self, tinfoil):
+            self.tinfoil = tinfoil
+            self._cache = {}
+
+        def get_pkg_pn_fn(self):
+            pkg_pn = defaultdict(list, self.tinfoil.run_command('getRecipes') or [])
+            pkg_fn = {}
+            for pn, fnlist in pkg_pn.items():
+                for fn in fnlist:
+                    pkg_fn[fn] = pn
+            self._cache['pkg_pn'] = pkg_pn
+            self._cache['pkg_fn'] = pkg_fn
+
+        def __getattr__(self, name):
+            # Grab these only when they are requested since they aren't always used
+            if name in self._cache:
+                return self._cache[name]
+            elif name == 'pkg_pn':
+                self.get_pkg_pn_fn()
+                return self._cache[name]
+            elif name == 'pkg_fn':
+                self.get_pkg_pn_fn()
+                return self._cache[name]
+            elif name == 'deps':
+                attrvalue = defaultdict(list, self.tinfoil.run_command('getRecipeDepends') or [])
+            elif name == 'rundeps':
+                attrvalue = defaultdict(lambda: defaultdict(list), self.tinfoil.run_command('getRuntimeDepends') or [])
+            elif name == 'runrecs':
+                attrvalue = defaultdict(lambda: defaultdict(list), self.tinfoil.run_command('getRuntimeRecommends') or [])
+            elif name == 'pkg_pepvpr':
+                attrvalue = self.tinfoil.run_command('getRecipeVersions') or {}
+            elif name == 'inherits':
+                attrvalue = self.tinfoil.run_command('getRecipeInherits') or {}
+            elif name == 'bbfile_priority':
+                attrvalue = self.tinfoil.run_command('getBbFilePriority') or {}
+            elif name == 'pkg_dp':
+                attrvalue = self.tinfoil.run_command('getDefaultPreference') or {}
+            else:
+                raise AttributeError("%s instance has no attribute '%s'" % (self.__class__.__name__, name))
+
+            self._cache[name] = attrvalue
+            return attrvalue
+
+    def __init__(self, tinfoil):
+        self.tinfoil = tinfoil
+        self.collection = self.TinfoilCookerCollectionAdapter(tinfoil)
+        self.recipecaches = {}
+        # FIXME all machines
+        self.recipecaches[''] = self.TinfoilRecipeCacheAdapter(tinfoil)
+        self._cache = {}
+    def __getattr__(self, name):
+        # Grab these only when they are requested since they aren't always used
+        if name in self._cache:
+            return self._cache[name]
+        elif name == 'skiplist':
+            attrvalue = self.tinfoil.get_skipped_recipes()
+        elif name == 'bbfile_config_priorities':
+            ret = self.tinfoil.run_command('getLayerPriorities')
+            bbfile_config_priorities = []
+            for collection, pattern, regex, pri in ret:
+                bbfile_config_priorities.append((collection, pattern, re.compile(regex), pri))
+
+            attrvalue = bbfile_config_priorities
+        else:
+            raise AttributeError("%s instance has no attribute '%s'" % (self.__class__.__name__, name))
+
+        self._cache[name] = attrvalue
+        return attrvalue
+
+    def findBestProvider(self, pn):
+        return self.tinfoil.find_best_provider(pn)
+
+
 class Tinfoil:
-    def __init__(self, output=sys.stdout, tracking=False):
-        # Needed to avoid deprecation warnings with python 2.6
-        warnings.filterwarnings("ignore", category=DeprecationWarning)
 
-        # Set up logging
+    def __init__(self, output=sys.stdout, tracking=False, setup_logging=True):
         self.logger = logging.getLogger('BitBake')
-        self._log_hdlr = logging.StreamHandler(output)
-        bb.msg.addDefaultlogFilter(self._log_hdlr)
-        format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
-        if output.isatty():
-            format.enable_color()
-        self._log_hdlr.setFormatter(format)
-        self.logger.addHandler(self._log_hdlr)
-
-        self.config = CookerConfiguration()
-        configparams = TinfoilConfigParameters(parse_only=True)
-        self.config.setConfigParameters(configparams)
-        self.config.setServerRegIdleCallback(self.register_idle_function)
-        features = []
-        if tracking:
-            features.append(CookerFeatures.BASEDATASTORE_TRACKING)
-        cleanedvars = bb.utils.clean_environment()
-        self.cooker = BBCooker(self.config, features)
-        self.config_data = self.cooker.data
-        bb.providers.logger.setLevel(logging.ERROR)
-        self.cooker_data = None
-        for k in cleanedvars:
-            os.environ[k] = cleanedvars[k]
-
-    def register_idle_function(self, function, data):
-        pass
+        self.config_data = None
+        self.cooker = None
+        self.tracking = tracking
+        self.ui_module = None
+        self.server_connection = None
+        if setup_logging:
+            # This is the *client-side* logger, nothing to do with
+            # logging messages from the server
+            bb.msg.logger_create('BitBake', output)
 
     def __enter__(self):
         return self
@@ -68,30 +228,161 @@
     def __exit__(self, type, value, traceback):
         self.shutdown()
 
-    def parseRecipes(self):
-        sys.stderr.write("Parsing recipes..")
-        self.logger.setLevel(logging.WARNING)
+    def prepare(self, config_only=False, config_params=None, quiet=0):
+        if self.tracking:
+            extrafeatures = [bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING]
+        else:
+            extrafeatures = []
 
-        try:
-            while self.cooker.state in (state.initial, state.parsing):
-                self.cooker.updateCache()
-        except KeyboardInterrupt:
-            self.cooker.shutdown()
-            self.cooker.updateCache()
-            sys.exit(2)
+        if not config_params:
+            config_params = TinfoilConfigParameters(config_only=config_only, quiet=quiet)
 
-        self.logger.setLevel(logging.INFO)
-        sys.stderr.write("done.\n")
+        cookerconfig = CookerConfiguration()
+        cookerconfig.setConfigParameters(config_params)
 
-        self.cooker_data = self.cooker.recipecaches['']
+        server, self.server_connection, ui_module = setup_bitbake(config_params,
+                            cookerconfig,
+                            extrafeatures)
 
-    def prepare(self, config_only = False):
-        if not self.cooker_data:
+        self.ui_module = ui_module
+
+        # Ensure the path to bitbake's bin directory is in PATH so that things like
+        # bitbake-worker can be run (usually this is the case, but it doesn't have to be)
+        path = os.getenv('PATH').split(':')
+        bitbakebinpath = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..', 'bin'))
+        for entry in path:
+            if entry.endswith(os.sep):
+                entry = entry[:-1]
+            if os.path.abspath(entry) == bitbakebinpath:
+                break
+        else:
+            path.insert(0, bitbakebinpath)
+            os.environ['PATH'] = ':'.join(path)
+
+        if self.server_connection:
+            _server_connections.append(self.server_connection)
             if config_only:
-                self.cooker.parseConfiguration()
-                self.cooker_data = self.cooker.recipecaches['']
+                config_params.updateToServer(self.server_connection.connection, os.environ.copy())
+                self.run_command('parseConfiguration')
             else:
-                self.parseRecipes()
+                self.run_actions(config_params)
+
+            self.config_data = bb.data.init()
+            connector = TinfoilDataStoreConnector(self, None)
+            self.config_data.setVar('_remote_data', connector)
+            self.cooker = TinfoilCookerAdapter(self)
+            self.cooker_data = self.cooker.recipecaches['']
+        else:
+            raise Exception('Failed to start bitbake server')
+
+    def run_actions(self, config_params):
+        """
+        Run the actions specified in config_params through the UI.
+        """
+        ret = self.ui_module.main(self.server_connection.connection, self.server_connection.events, config_params)
+        if ret:
+            raise TinfoilUIException(ret)
+
+    def parseRecipes(self):
+        """
+        Force a parse of all recipes. Normally you should specify
+        config_only=False when calling prepare() instead of using this
+        function; this function is designed for situations where you need
+        to initialise Tinfoil and use it with config_only=True first and
+        then conditionally call this function to parse recipes later.
+        """
+        config_params = TinfoilConfigParameters(config_only=False)
+        self.run_actions(config_params)
+
+    def run_command(self, command, *params):
+        """
+        Run a command on the server (as implemented in bb.command).
+        Note that there are two types of command - synchronous and
+        asynchronous; in order to receive the results of asynchronous
+        commands you will need to set an appropriate event mask
+        using set_event_mask() and listen for the result using
+        wait_event() - with the correct event mask you'll at least get
+        bb.command.CommandCompleted and possibly other events before
+        that depending on the command.
+        """
+        if not self.server_connection:
+            raise Exception('Not connected to server (did you call .prepare()?)')
+
+        commandline = [command]
+        if params:
+            commandline.extend(params)
+        result = self.server_connection.connection.runCommand(commandline)
+        if result[1]:
+            raise TinfoilCommandFailed(result[1])
+        return result[0]
+
+    def set_event_mask(self, eventlist):
+        """Set the event mask which will be applied within wait_event()"""
+        if not self.server_connection:
+            raise Exception('Not connected to server (did you call .prepare()?)')
+        llevel, debug_domains = bb.msg.constructLogOptions()
+        ret = self.run_command('setEventMask', self.server_connection.connection.getEventHandle(), llevel, debug_domains, eventlist)
+        if not ret:
+            raise Exception('setEventMask failed')
+
+    def wait_event(self, timeout=0):
+        """
+        Wait for an event from the server for the specified time.
+        A timeout of 0 means don't wait if there are no events in the queue.
+        Returns the next event in the queue or None if the timeout was
+        reached. Note that in order to recieve any events you will
+        first need to set the internal event mask using set_event_mask()
+        (otherwise whatever event mask the UI set up will be in effect).
+        """
+        if not self.server_connection:
+            raise Exception('Not connected to server (did you call .prepare()?)')
+        return self.server_connection.events.waitEvent(timeout)
+
+    def get_overlayed_recipes(self):
+        return defaultdict(list, self.run_command('getOverlayedRecipes'))
+
+    def get_skipped_recipes(self):
+        return OrderedDict(self.run_command('getSkippedRecipes'))
+
+    def get_all_providers(self):
+        return defaultdict(list, self.run_command('allProviders'))
+
+    def find_providers(self):
+        return self.run_command('findProviders')
+
+    def find_best_provider(self, pn):
+        return self.run_command('findBestProvider', pn)
+
+    def get_runtime_providers(self, rdep):
+        return self.run_command('getRuntimeProviders', rdep)
+
+    def get_recipe_file(self, pn):
+        """
+        Get the file name for the specified recipe/target. Raises
+        bb.providers.NoProvider if there is no match or the recipe was
+        skipped.
+        """
+        best = self.find_best_provider(pn)
+        if not best or (len(best) > 3 and not best[3]):
+            skiplist = self.get_skipped_recipes()
+            taskdata = bb.taskdata.TaskData(None, skiplist=skiplist)
+            skipreasons = taskdata.get_reasons(pn)
+            if skipreasons:
+                raise bb.providers.NoProvider('%s is unavailable:\n  %s' % (pn, '  \n'.join(skipreasons)))
+            else:
+                raise bb.providers.NoProvider('Unable to find any recipe file matching "%s"' % pn)
+        return best[3]
+
+    def get_file_appends(self, fn):
+        return self.run_command('getFileAppends', fn)
+
+    def parse_recipe(self, pn):
+        """
+        Parse the specified recipe and return a datastore object
+        representing the environment for the recipe.
+        """
+        fn = self.get_recipe_file(pn)
+        return self.parse_recipe_file(fn)
 
     def parse_recipe_file(self, fn, appends=True, appendlist=None, config_data=None):
         """
@@ -110,41 +401,82 @@
         """
         if appends and appendlist == []:
             appends = False
-        if appends:
-            if appendlist:
-                appendfiles = appendlist
-            else:
-                if not hasattr(self.cooker, 'collection'):
-                    raise Exception('You must call tinfoil.prepare() with config_only=False in order to get bbappends')
-                appendfiles = self.cooker.collection.get_file_appends(fn)
-        else:
-            appendfiles = None
         if config_data:
-            # We have to use a different function here if we're passing in a datastore
-            localdata = bb.data.createCopy(config_data)
-            envdata = bb.cache.parse_recipe(localdata, fn, appendfiles)['']
+            dctr = bb.remotedata.RemoteDatastores.transmit_datastore(config_data)
+            dscon = self.run_command('parseRecipeFile', fn, appends, appendlist, dctr)
         else:
-            # Use the standard path
-            parser = bb.cache.NoCache(self.cooker.databuilder)
-            envdata = parser.loadDataFull(fn, appendfiles)
-        return envdata
+            dscon = self.run_command('parseRecipeFile', fn, appends, appendlist)
+        if dscon:
+            return self._reconvert_type(dscon, 'DataStoreConnectionHandle')
+        else:
+            return None
+
+    def build_file(self, buildfile, task):
+        """
+        Runs the specified task for just a single recipe (i.e. no dependencies).
+        This is equivalent to bitbake -b, except no warning will be printed.
+        """
+        return self.run_command('buildFile', buildfile, task, True)
 
     def shutdown(self):
-        self.cooker.shutdown(force=True)
-        self.cooker.post_serve()
-        self.cooker.unlockBitbake()
-        self.logger.removeHandler(self._log_hdlr)
+        if self.server_connection:
+            self.run_command('clientComplete')
+            _server_connections.remove(self.server_connection)
+            bb.event.ui_queue = []
+            self.server_connection.terminate()
+            self.server_connection = None
 
-class TinfoilConfigParameters(ConfigParameters):
+    def _reconvert_type(self, obj, origtypename):
+        """
+        Convert an object back to the right type, in the case
+        that marshalling has changed it (especially with xmlrpc)
+        """
+        supported_types = {
+            'set': set,
+            'DataStoreConnectionHandle': bb.command.DataStoreConnectionHandle,
+        }
 
-    def __init__(self, **options):
+        origtype = supported_types.get(origtypename, None)
+        if origtype is None:
+            raise Exception('Unsupported type "%s"' % origtypename)
+        if type(obj) == origtype:
+            newobj = obj
+        elif isinstance(obj, dict):
+            # New style class
+            newobj = origtype()
+            for k,v in obj.items():
+                setattr(newobj, k, v)
+        else:
+            # Assume we can coerce the type
+            newobj = origtype(obj)
+
+        if isinstance(newobj, bb.command.DataStoreConnectionHandle):
+            connector = TinfoilDataStoreConnector(self, newobj.dsindex)
+            newobj = bb.data.init()
+            newobj.setVar('_remote_data', connector)
+
+        return newobj
+
+
+class TinfoilConfigParameters(BitBakeConfigParameters):
+
+    def __init__(self, config_only, **options):
         self.initial_options = options
+        # Apply some sane defaults
+        if not 'parse_only' in options:
+            self.initial_options['parse_only'] = not config_only
+        #if not 'status_only' in options:
+        #    self.initial_options['status_only'] = config_only
+        if not 'ui' in options:
+            self.initial_options['ui'] = 'knotty'
+        if not 'argv' in options:
+            self.initial_options['argv'] = []
+
         super(TinfoilConfigParameters, self).__init__()
 
-    def parseCommandLine(self, argv=sys.argv):
-        class DummyOptions:
-            def __init__(self, initial_options):
-                for key, val in initial_options.items():
-                    setattr(self, key, val)
-
-        return DummyOptions(self.initial_options), None
+    def parseCommandLine(self, argv=None):
+        # We don't want any parameters parsed from the command line
+        opts = super(TinfoilConfigParameters, self).parseCommandLine([])
+        for key, val in self.initial_options.items():
+            setattr(opts[0], key, val)
+        return opts
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/buildinfohelper.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/buildinfohelper.py
index 3ddcb2a..e451c63 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/buildinfohelper.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/ui/buildinfohelper.py
@@ -42,10 +42,12 @@
 from orm.models import Package, Package_File, Target_Installed_Package, Target_File
 from orm.models import Task_Dependency, Package_Dependency
 from orm.models import Recipe_Dependency, Provides
-from orm.models import Project, CustomImagePackage, CustomImageRecipe
+from orm.models import Project, CustomImagePackage
 from orm.models import signal_runbuilds
 
 from bldcontrol.models import BuildEnvironment, BuildRequest
+from bldcontrol.models import BRLayer
+from bldcontrol import bbcontroller
 
 from bb.msg import BBLogFormatter as formatter
 from django.db import models
@@ -361,11 +363,6 @@
 
     def get_update_layer_version_object(self, build_obj, layer_obj, layer_version_information):
         if isinstance(layer_obj, Layer_Version):
-            # Special case the toaster-custom-images layer which is created
-            # on the fly so don't update the values which may cause the layer
-            # to be duplicated on a future get_or_create
-            if layer_obj.layer.name == CustomImageRecipe.LAYER_NAME:
-                return layer_obj
             # We already found our layer version for this build so just
             # update it with the new build information
             logger.debug("We found our layer from toaster")
@@ -384,8 +381,8 @@
                 local_path=layer_version_information['local_path'],
             )
 
-            logger.info("created new historical layer version %d",
-                        layer_copy.pk)
+            logger.debug("Created new layer version %s for build history",
+                         layer_copy.layer.name)
 
             self.layer_version_built.append(layer_copy)
 
@@ -441,48 +438,33 @@
         else:
             br_id, be_id = brbe.split(":")
 
-            # find layer by checkout path;
-            from bldcontrol import bbcontroller
-            bc = bbcontroller.getBuildEnvironmentController(pk = be_id)
+            # Find the layer version by matching the layer event information
+            # against the metadata we have in Toaster
 
-            # we might have a race condition here, as the project layers may change between the build trigger and the actual build execution
-            # but we can only match on the layer name, so the worst thing can happen is a mis-identification of the layer, not a total failure
+            try:
+                br_layer = BRLayer.objects.get(req=br_id,
+                                               name=layer_information['name'])
+                return br_layer.layer_version
+            except (BRLayer.MultipleObjectsReturned, BRLayer.DoesNotExist):
+                # There are multiple of the same layer name or the name
+                # hasn't been determined by the toaster.bbclass layer
+                # so let's filter by the local_path
+                bc = bbcontroller.getBuildEnvironmentController(pk=be_id)
+                for br_layer in BRLayer.objects.filter(req=br_id):
+                    if br_layer.giturl and \
+                       layer_information['local_path'].endswith(
+                           bc.getGitCloneDirectory(br_layer.giturl,
+                                                   br_layer.commit)):
+                            return br_layer.layer_version
 
-            # note that this is different
-            buildrequest = BuildRequest.objects.get(pk = br_id)
-            for brl in buildrequest.brlayer_set.all():
-                if brl.local_source_dir:
-                    localdirname = os.path.join(brl.local_source_dir,
-                                                brl.dirpath)
-                else:
-                    localdirname = os.path.join(bc.getGitCloneDirectory(brl.giturl, brl.commit), brl.dirpath)
-                # we get a relative path, unless running in HEAD mode where the path is absolute
-                if not localdirname.startswith("/"):
-                    localdirname = os.path.join(bc.be.sourcedir, localdirname)
-                #logger.debug(1, "Localdirname %s lcal_path %s" % (localdirname, layer_information['local_path']))
-                if localdirname.startswith(layer_information['local_path']):
-                  # If the build request came from toaster this field
-                  # should contain the information from the layer_version
-                  # That created this build request.
-                    if brl.layer_version:
-                        return brl.layer_version
+                    if br_layer.local_source_dir == \
+                            layer_information['local_path']:
+                        return br_layer.layer_version
 
-                # This might be a local layer (i.e. no git info) so try
-                # matching local_source_dir
-                if brl.local_source_dir and brl.local_source_dir == layer_information["local_path"]:
-                    return brl.layer_version
-
-                    # we matched the BRLayer, but we need the layer_version that generated this BR; reverse of the Project.schedule_build()
-                    #logger.debug(1, "Matched %s to BRlayer %s" % (pformat(layer_information["local_path"]), localdirname))
-
-                    for pl in buildrequest.project.projectlayer_set.filter(layercommit__layer__name = brl.name):
-                        if pl.layercommit.layer.vcs_url == brl.giturl :
-                            layer = pl.layercommit.layer
-                            layer.save()
-                            return layer
-
-            raise NotExisting("Unidentified layer %s" % pformat(layer_information))
-
+        # We've reached the end of our search and couldn't find the layer
+        # we can continue but some data may be missing
+        raise NotExisting("Unidentified layer %s" %
+                          pformat(layer_information))
 
     def save_target_file_information(self, build_obj, target_obj, filedata):
         assert isinstance(build_obj, Build)
@@ -876,6 +858,12 @@
         self.pathname = None
         self.lineno = None
 
+    def getMessage(self):
+        """
+        Simulate LogRecord message return
+        """
+        return self.msg
+
 
 class BuildInfoHelper(object):
     """ This class gathers the build information from the server and sends it
@@ -983,9 +971,10 @@
         return task_information
 
     def _get_layer_version_for_dependency(self, pathRE):
-        """ Returns the layer in the toaster db that has a full regex match to the pathRE.
-        pathRE - the layer path passed as a regex in the event. It is created in
-          cooker.py as a collection for the layer priorities.
+        """ Returns the layer in the toaster db that has a full regex
+        match to the pathRE. pathRE - the layer path passed as a regex in the
+        event. It is created in cooker.py as a collection for the layer
+        priorities.
         """
         self._ensure_build()
 
@@ -993,19 +982,31 @@
             assert isinstance(layer_version, Layer_Version)
             return len(layer_version.local_path)
 
-        # we don't care if we match the trailing slashes
-        p = re.compile(re.sub("/[^/]*?$","",pathRE))
-        # Heuristics: we always match recipe to the deepest layer path in the discovered layers
-        for lvo in sorted(self.orm_wrapper.layer_version_objects, reverse=True, key=_sort_longest_path):
-            if p.fullmatch(lvo.local_path):
+        # Our paths don't append a trailing slash
+        if pathRE.endswith("/"):
+            pathRE = pathRE[:-1]
+
+        p = re.compile(pathRE)
+        path=re.sub(r'[$^]',r'',pathRE)
+        # Heuristics: we always match recipe to the deepest layer path in
+        # the discovered layers
+        for lvo in sorted(self.orm_wrapper.layer_version_objects,
+                          reverse=True, key=_sort_longest_path):
+            if p.fullmatch(os.path.abspath(lvo.local_path)):
                 return lvo
             if lvo.layer.local_source_dir:
-                if p.fullmatch(lvo.layer.local_source_dir):
+                if p.fullmatch(os.path.abspath(lvo.layer.local_source_dir)):
                     return lvo
-        #if we get here, we didn't read layers correctly; dump whatever information we have on the error log
-        logger.warning("Could not match layer dependency for path %s : %s", path, self.orm_wrapper.layer_version_objects)
+            if 0 == path.find(lvo.local_path):
+                # sub-layer path inside existing layer
+                return lvo
 
-
+        # if we get here, we didn't read layers correctly;
+        # dump whatever information we have on the error log
+        logger.warning("Could not match layer dependency for path %s : %s",
+                       pathRE,
+                       self.orm_wrapper.layer_version_objects)
+        return None
 
     def _get_layer_version_for_path(self, path):
         self._ensure_build()
@@ -1268,6 +1269,14 @@
                 candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)]
                 if len(candidates) == 1:
                     identifier = candidates[0]
+                elif len(candidates) > 1 and hasattr(event,'_package'):
+                    if 'native-' in event._package:
+                        identifier = 'native:' + identifier
+                    if 'nativesdk-' in event._package:
+                        identifier = 'nativesdk:' + identifier
+                    candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)]
+                    if len(candidates) == 1:
+                        identifier = candidates[0]
 
         assert identifier in self.internal_state['taskdata']
         identifierlist = identifier.split(":")
@@ -1398,9 +1407,9 @@
             for lv in event._depgraph['layer-priorities']:
                 (_, path, _, priority) = lv
                 layer_version_obj = self._get_layer_version_for_dependency(path)
-                assert layer_version_obj is not None
-                layer_version_obj.priority = priority
-                layer_version_obj.save()
+                if layer_version_obj:
+                    layer_version_obj.priority = priority
+                    layer_version_obj.save()
 
         # save recipe information
         self.internal_state['recipes'] = {}
@@ -1665,6 +1674,36 @@
                 break
         return endswith
 
+    def scan_task_artifacts(self, event):
+        """
+        The 'TaskArtifacts' event passes the manifest file content for the
+        tasks 'do_deploy', 'do_image_complete', 'do_populate_sdk', and
+        'do_populate_sdk_ext'. The first two will be implemented later.
+        """
+        task_vars = BuildInfoHelper._get_data_from_event(event)
+        task_name = task_vars['task'][task_vars['task'].find(':')+1:]
+        task_artifacts = task_vars['artifacts']
+
+        if task_name in ['do_populate_sdk', 'do_populate_sdk_ext']:
+            targets = [target for target in self.internal_state['targets'] \
+                if target.task == task_name[3:]]
+            if not targets:
+                logger.warning("scan_task_artifacts: SDK targets not found: %s\n", task_name)
+                return
+            for artifact_path in task_artifacts:
+                if not os.path.isfile(artifact_path):
+                    logger.warning("scan_task_artifacts: artifact file not found: %s\n", artifact_path)
+                    continue
+                for target in targets:
+                    # don't record the file if it's already been added
+                    # to this target
+                    matching_files = TargetSDKFile.objects.filter(
+                        target=target, file_name=artifact_path)
+                    if matching_files.count() == 0:
+                        artifact_size = os.stat(artifact_path).st_size
+                        self.orm_wrapper.save_target_sdk_file(
+                            target, artifact_path, artifact_size)
+
     def _get_image_files(self, deploy_dir_image, image_name, image_file_extensions):
         """
         Find files in deploy_dir_image whose basename starts with the
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/knotty.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/knotty.py
index 948f527..82aa7c4 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/knotty.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/ui/knotty.py
@@ -75,10 +75,8 @@
                 extrastr = str(extra)
                 if extrastr[0] != ' ':
                     extrastr = ' ' + extrastr
-                if extrastr[-1] != ' ':
-                    extrastr += ' '
             else:
-                extrastr = ' '
+                extrastr = ''
             self.widgets[self.extrapos] = extrastr
 
     def _need_update(self):
@@ -284,7 +282,7 @@
             content = self.main_progress.update(progress)
             print('')
         lines = 1 + int(len(content) / (self.columns + 1))
-        if not self.quiet:
+        if self.quiet == 0:
             for tasknum, task in enumerate(tasks[:(self.rows - 2)]):
                 if isinstance(task, tuple):
                     pbar, progress, rate, start_time = task
@@ -312,7 +310,7 @@
             fd = sys.stdin.fileno()
             self.termios.tcsetattr(fd, self.termios.TCSADRAIN, self.stdinbackup)
 
-def _log_settings_from_server(server):
+def _log_settings_from_server(server, observe_only):
     # Get values of variables which control our output
     includelogs, error = server.runCommand(["getVariable", "BBINCLUDELOGS"])
     if error:
@@ -322,7 +320,11 @@
     if error:
         logger.error("Unable to get the value of BBINCLUDELOGS_LINES variable: %s" % error)
         raise BaseException(error)
-    consolelogfile, error = server.runCommand(["getSetVariable", "BB_CONSOLELOG"])
+    if observe_only:
+        cmd = 'getVariable'
+    else:
+        cmd = 'getSetVariable'
+    consolelogfile, error = server.runCommand([cmd, "BB_CONSOLELOG"])
     if error:
         logger.error("Unable to get the value of BB_CONSOLELOG variable: %s" % error)
         raise BaseException(error)
@@ -340,7 +342,7 @@
 
 def main(server, eventHandler, params, tf = TerminalFilter):
 
-    includelogs, loglines, consolelogfile = _log_settings_from_server(server)
+    includelogs, loglines, consolelogfile = _log_settings_from_server(server, params.observe_only)
 
     if sys.stdin.isatty() and sys.stdout.isatty():
         log_exec_tty = True
@@ -353,10 +355,13 @@
     errconsole = logging.StreamHandler(sys.stderr)
     format_str = "%(levelname)s: %(message)s"
     format = bb.msg.BBLogFormatter(format_str)
-    if params.options.quiet:
-        bb.msg.addDefaultlogFilter(console, bb.msg.BBLogFilterStdOut, bb.msg.BBLogFormatter.WARNING)
+    if params.options.quiet == 0:
+        forcelevel = None
+    elif params.options.quiet > 2:
+        forcelevel = bb.msg.BBLogFormatter.ERROR
     else:
-        bb.msg.addDefaultlogFilter(console, bb.msg.BBLogFilterStdOut)
+        forcelevel = bb.msg.BBLogFormatter.WARNING
+    bb.msg.addDefaultlogFilter(console, bb.msg.BBLogFilterStdOut, forcelevel)
     bb.msg.addDefaultlogFilter(errconsole, bb.msg.BBLogFilterStdErr)
     console.setFormatter(format)
     errconsole.setFormatter(format)
@@ -506,35 +511,47 @@
                 logger.info(event._message)
                 continue
             if isinstance(event, bb.event.ParseStarted):
+                if params.options.quiet > 1:
+                    continue
                 if event.total == 0:
                     continue
                 parseprogress = new_progress("Parsing recipes", event.total).start()
                 continue
             if isinstance(event, bb.event.ParseProgress):
+                if params.options.quiet > 1:
+                    continue
                 if parseprogress:
                     parseprogress.update(event.current)
                 else:
                     bb.warn("Got ParseProgress event for parsing that never started?")
                 continue
             if isinstance(event, bb.event.ParseCompleted):
+                if params.options.quiet > 1:
+                    continue
                 if not parseprogress:
                     continue
                 parseprogress.finish()
                 pasreprogress = None
-                if not params.options.quiet:
+                if params.options.quiet == 0:
                     print(("Parsing of %d .bb files complete (%d cached, %d parsed). %d targets, %d skipped, %d masked, %d errors."
                         % ( event.total, event.cached, event.parsed, event.virtuals, event.skipped, event.masked, event.errors)))
                 continue
 
             if isinstance(event, bb.event.CacheLoadStarted):
+                if params.options.quiet > 1:
+                    continue
                 cacheprogress = new_progress("Loading cache", event.total).start()
                 continue
             if isinstance(event, bb.event.CacheLoadProgress):
+                if params.options.quiet > 1:
+                    continue
                 cacheprogress.update(event.current)
                 continue
             if isinstance(event, bb.event.CacheLoadCompleted):
+                if params.options.quiet > 1:
+                    continue
                 cacheprogress.finish()
-                if not params.options.quiet:
+                if params.options.quiet == 0:
                     print("Loaded %d entries from dependency cache." % event.num_entries)
                 continue
 
@@ -620,16 +637,22 @@
                 continue
 
             if isinstance(event, bb.event.ProcessStarted):
+                if params.options.quiet > 1:
+                    continue
                 parseprogress = new_progress(event.processname, event.total)
                 parseprogress.start(False)
                 continue
             if isinstance(event, bb.event.ProcessProgress):
+                if params.options.quiet > 1:
+                    continue
                 if parseprogress:
                     parseprogress.update(event.progress)
                 else:
                     bb.warn("Got ProcessProgress event for someting that never started?")
                 continue
             if isinstance(event, bb.event.ProcessFinished):
+                if params.options.quiet > 1:
+                    continue
                 if parseprogress:
                     parseprogress.finish()
                 parseprogress = None
@@ -647,6 +670,7 @@
                                   bb.event.OperationCompleted,
                                   bb.event.OperationProgress,
                                   bb.event.DiskFull,
+                                  bb.event.HeartbeatEvent,
                                   bb.build.TaskProgress)):
                 continue
 
@@ -700,7 +724,7 @@
         if return_value and errors:
             summary += pluralise("\nSummary: There was %s ERROR message shown, returning a non-zero exit code.",
                                  "\nSummary: There were %s ERROR messages shown, returning a non-zero exit code.", errors)
-        if summary and not params.options.quiet:
+        if summary and params.options.quiet == 0:
             print(summary)
 
         if interrupted:
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/ncurses.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/ncurses.py
index d81e413..ca845a3 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/ncurses.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/ui/ncurses.py
@@ -297,7 +297,7 @@
 #                            bb.error("log data follows (%s)" % logfile)
 #                            number_of_lines = data.getVar("BBINCLUDELOGS_LINES", d)
 #                            if number_of_lines:
-#                                subprocess.call('tail -n%s %s' % (number_of_lines, logfile), shell=True)
+#                                subprocess.check_call('tail -n%s %s' % (number_of_lines, logfile), shell=True)
 #                            else:
 #                                f = open(logfile, "r")
 #                                while True:
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/depexp.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/taskexp.py
similarity index 90%
rename from import-layers/yocto-poky/bitbake/lib/bb/ui/depexp.py
rename to import-layers/yocto-poky/bitbake/lib/bb/ui/taskexp.py
index d879e04..9d14ece 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/depexp.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/ui/taskexp.py
@@ -78,7 +78,7 @@
 class DepExplorer(Gtk.Window):
     def __init__(self):
         Gtk.Window.__init__(self)
-        self.set_title("Dependency Explorer")
+        self.set_title("Task Dependency Explorer")
         self.set_default_size(500, 500)
         self.connect("delete-event", Gtk.main_quit)
 
@@ -106,30 +106,21 @@
 
         box = Gtk.VBox(homogeneous=True, spacing=4)
 
-        # Runtime Depends
+        # Task Depends
         scrolled = Gtk.ScrolledWindow()
         scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
         scrolled.set_shadow_type(Gtk.ShadowType.IN)
-        self.rdep_treeview = PackageDepView(self.depends_model, TYPE_RDEP, "Runtime Depends")
-        self.rdep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
-        scrolled.add(self.rdep_treeview)
-        box.add(scrolled)
-
-        # Build Depends
-        scrolled = Gtk.ScrolledWindow()
-        scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
-        scrolled.set_shadow_type(Gtk.ShadowType.IN)
-        self.dep_treeview = PackageDepView(self.depends_model, TYPE_DEP, "Build Depends")
+        self.dep_treeview = PackageDepView(self.depends_model, TYPE_DEP, "Dependencies")
         self.dep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
         scrolled.add(self.dep_treeview)
         box.add(scrolled)
         pane.add2(box)
 
-        # Reverse Depends
+        # Reverse Task Depends
         scrolled = Gtk.ScrolledWindow()
         scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
         scrolled.set_shadow_type(Gtk.ShadowType.IN)
-        self.revdep_treeview = PackageReverseDepView(self.depends_model, "Reverse Depends")
+        self.revdep_treeview = PackageReverseDepView(self.depends_model, "Dependent Tasks")
         self.revdep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PARENT)
         scrolled.add(self.revdep_treeview)
         box.add(scrolled)
@@ -160,22 +151,15 @@
             current_package = None
         else:
             current_package = model.get_value(it, COL_PKG_NAME)
-        self.rdep_treeview.set_current_package(current_package)
         self.dep_treeview.set_current_package(current_package)
         self.revdep_treeview.set_current_package(current_package)
 
 
     def parse(self, depgraph):
-        for package in depgraph["pn"]:
-            self.pkg_model.insert(0, (package,))
-
-        for package in depgraph["depends"]:
-            for depend in depgraph["depends"][package]:
-                self.depends_model.insert (0, (TYPE_DEP, package, depend))
-
-        for package in depgraph["rdepends-pn"]:
-            for rdepend in depgraph["rdepends-pn"][package]:
-                self.depends_model.insert (0, (TYPE_RDEP, package, rdepend))
+        for task in depgraph["tdepends"]:
+            self.pkg_model.insert(0, (task,))
+            for depend in depgraph["tdepends"][task]:
+                self.depends_model.insert (0, (TYPE_DEP, task, depend))
 
 
 class gtkthread(threading.Thread):
@@ -313,7 +297,7 @@
                         extra = ". Close matches:\n  %s" % '\n  '.join(event._close_matches)
 
                 if event._dependees:
-                    print("Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)%s" % r, event._item, ", ".join(event._dependees), r, extra)
+                    print("Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)%s" % (r, event._item, ", ".join(event._dependees), r, extra))
                 else:
                     print("Nothing %sPROVIDES '%s'%s" % (r, event._item, extra))
                 if event._reasons:
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/toasterui.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/toasterui.py
index 9808f6b..71f04fa 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/ui/toasterui.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/ui/toasterui.py
@@ -168,6 +168,9 @@
         logger.warning("buildhistory is not enabled. Please enable INHERIT += \"buildhistory\" to see image details.")
         build_history_enabled = False
 
+    if not "buildstats" in inheritlist.split(" "):
+        logger.warning("buildstats is not enabled. Please enable INHERIT += \"buildstats\" to generate build statistics.")
+
     if not params.observe_only:
         params.updateFromServer(server)
         params.updateToServer(server, os.environ.copy())
@@ -233,6 +236,9 @@
             # pylint: disable=protected-access
             # the code will look into the protected variables of the event; no easy way around this
 
+            if isinstance(event, bb.event.HeartbeatEvent):
+                continue
+
             if isinstance(event, bb.event.ParseStarted):
                 if not (build_log and build_log_file_path):
                     build_log, build_log_file_path = _open_build_log(log_dir)
@@ -432,9 +438,7 @@
                 elif event.type == "SetBRBE":
                     buildinfohelper.brbe = buildinfohelper._get_data_from_event(event)
                 elif event.type == "TaskArtifacts":
-                    # not implemented yet
-                    # see https://bugzilla.yoctoproject.org/show_bug.cgi?id=10283 for details
-                    pass
+                    buildinfohelper.scan_task_artifacts(event)
                 elif event.type == "OSErrorException":
                     logger.error(event)
                 else:
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/utils.py b/import-layers/yocto-poky/bitbake/lib/bb/utils.py
index 16fc9db..6a44db5 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/utils.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/utils.py
@@ -523,12 +523,8 @@
     """
     Return the hex string representation of the MD5 checksum of filename.
     """
-    try:
-        import hashlib
-        m = hashlib.md5()
-    except ImportError:
-        import md5
-        m = md5.new()
+    import hashlib
+    m = hashlib.md5()
 
     with open(filename, "rb") as f:
         for line in f:
@@ -538,14 +534,9 @@
 def sha256_file(filename):
     """
     Return the hex string representation of the 256-bit SHA checksum of
-    filename.  On Python 2.4 this will return None, so callers will need to
-    handle that by either skipping SHA checks, or running a standalone sha256sum
-    binary.
+    filename.
     """
-    try:
-        import hashlib
-    except ImportError:
-        return None
+    import hashlib
 
     s = hashlib.sha256()
     with open(filename, "rb") as f:
@@ -557,10 +548,7 @@
     """
     Return the hex string representation of the SHA1 checksum of the filename
     """
-    try:
-        import hashlib
-    except ImportError:
-        return None
+    import hashlib
 
     s = hashlib.sha1()
     with open(filename, "rb") as f:
@@ -665,7 +653,7 @@
     for var in bb.data.keys(d):
         export = d.getVarFlag(var, "export", False)
         if export:
-            os.environ[var] = d.getVar(var, True) or ""
+            os.environ[var] = d.getVar(var) or ""
 
 def _check_unsafe_delete_path(path):
     """
@@ -692,7 +680,7 @@
             if _check_unsafe_delete_path(path):
                 raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path)
         # shutil.rmtree(name) would be ideal but its too slow
-        subprocess.call(['rm', '-rf'] + glob.glob(path))
+        subprocess.check_call(['rm', '-rf'] + glob.glob(path))
         return
     for name in glob.glob(path):
         try:
@@ -911,11 +899,20 @@
         newmtime = sstat[stat.ST_MTIME]
     return newmtime
 
-def which(path, item, direction = 0, history = False):
+def which(path, item, direction = 0, history = False, executable=False):
     """
-    Locate a file in a PATH
+    Locate `item` in the list of paths `path` (colon separated string like $PATH).
+    If `direction` is non-zero then the list is reversed.
+    If `history` is True then the list of candidates also returned as result,history.
+    If `executable` is True then the candidate has to be an executable file,
+    otherwise the candidate simply has to exist.
     """
 
+    if executable:
+        is_candidate = lambda p: os.path.isfile(p) and os.access(p, os.X_OK)
+    else:
+        is_candidate = lambda p: os.path.exists(p)
+
     hist = []
     paths = (path or "").split(':')
     if direction != 0:
@@ -924,7 +921,7 @@
     for p in paths:
         next = os.path.join(p, item)
         hist.append(next)
-        if os.path.exists(next):
+        if is_candidate(next):
             if not os.path.isabs(next):
                 next = os.path.abspath(next)
             if history:
@@ -953,7 +950,7 @@
     Arguments:
 
     variable -- the variable name. This will be fetched and expanded (using
-    d.getVar(variable, True)) and then split into a set().
+    d.getVar(variable)) and then split into a set().
 
     checkvalues -- if this is a string it is split on whitespace into a set(),
     otherwise coerced directly into a set().
@@ -966,7 +963,7 @@
     d -- the data store.
     """
 
-    val = d.getVar(variable, True)
+    val = d.getVar(variable)
     if not val:
         return falsevalue
     val = set(val.split())
@@ -979,7 +976,7 @@
     return falsevalue
 
 def contains_any(variable, checkvalues, truevalue, falsevalue, d):
-    val = d.getVar(variable, True)
+    val = d.getVar(variable)
     if not val:
         return falsevalue
     val = set(val.split())
@@ -991,6 +988,30 @@
         return truevalue
     return falsevalue
 
+def filter(variable, checkvalues, d):
+    """Return all words in the variable that are present in the checkvalues.
+
+    Arguments:
+
+    variable -- the variable name. This will be fetched and expanded (using
+    d.getVar(variable)) and then split into a set().
+
+    checkvalues -- if this is a string it is split on whitespace into a set(),
+    otherwise coerced directly into a set().
+
+    d -- the data store.
+    """
+
+    val = d.getVar(variable)
+    if not val:
+        return ''
+    val = set(val.split())
+    if isinstance(checkvalues, str):
+        checkvalues = set(checkvalues.split())
+    else:
+        checkvalues = set(checkvalues)
+    return ' '.join(sorted(checkvalues & val))
+
 def cpu_count():
     return multiprocessing.cpu_count()
 
@@ -1378,10 +1399,10 @@
 
 def get_file_layer(filename, d):
     """Determine the collection (as defined by a layer's layer.conf file) containing the specified file"""
-    collections = (d.getVar('BBFILE_COLLECTIONS', True) or '').split()
+    collections = (d.getVar('BBFILE_COLLECTIONS') or '').split()
     collection_res = {}
     for collection in collections:
-        collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection, True) or ''
+        collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection) or ''
 
     def path_to_layer(path):
         # Use longest path so we handle nested layers
@@ -1394,7 +1415,7 @@
         return match
 
     result = None
-    bbfiles = (d.getVar('BBFILES', True) or '').split()
+    bbfiles = (d.getVar('BBFILES') or '').split()
     bbfilesmatch = False
     for bbfilesentry in bbfiles:
         if fnmatch.fnmatch(filename, bbfilesentry):
@@ -1471,7 +1492,7 @@
         if v in os.environ.keys():
             exported = True
         else:
-            v_proxy = d.getVar(v, True)
+            v_proxy = d.getVar(v)
             if v_proxy is not None:
                 os.environ[v] = v_proxy
                 exported = True
@@ -1503,3 +1524,14 @@
                 plugins.append(obj or plugin)
             else:
                 plugins.append(plugin)
+
+
+class LogCatcher(logging.Handler):
+    """Logging handler for collecting logged messages so you can check them later"""
+    def __init__(self):
+        self.messages = []
+        logging.Handler.__init__(self, logging.WARNING)
+    def emit(self, record):
+        self.messages.append(bb.build.logformatter.format(record))
+    def contains(self, message):
+        return (message in self.messages)