Yocto 2.5

Move OpenBMC to Yocto 2.5(sumo)

Signed-off-by: Brad Bishop <bradleyb@fuzziesquirrel.com>
Change-Id: I5c5ad6904a16e14c1c397f0baf10c9d465594a78
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/__init__.py b/import-layers/yocto-poky/bitbake/lib/bb/__init__.py
index 5268831..d24adb8 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/__init__.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/__init__.py
@@ -21,7 +21,7 @@
 # with this program; if not, write to the Free Software Foundation, Inc.,
 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
-__version__ = "1.36.0"
+__version__ = "1.38.0"
 
 import sys
 if sys.version_info < (3, 4, 0):
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/build.py b/import-layers/yocto-poky/bitbake/lib/bb/build.py
index 0d0100a..4631abd 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/build.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/build.py
@@ -872,6 +872,12 @@
     that this may lead to the task itself being listed.
     """
     preceed = set()
+
+    # Ignore tasks which don't exist
+    tasks = d.getVar('__BBTASKS', False)
+    if task not in tasks:
+        return preceed
+
     preceed.update(d.getVarFlag(task, 'deps') or [])
     if with_recrdeptasks:
         recrdeptask = d.getVarFlag(task, 'recrdeptask')
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/cache.py b/import-layers/yocto-poky/bitbake/lib/bb/cache.py
index 86ce0e7..168a77a 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/cache.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/cache.py
@@ -395,7 +395,7 @@
         self.has_cache = True
         self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash)
 
-        logger.debug(1, "Using cache in '%s'", self.cachedir)
+        logger.debug(1, "Cache dir: %s", self.cachedir)
         bb.utils.mkdirhier(self.cachedir)
 
         cache_ok = True
@@ -408,6 +408,8 @@
             self.load_cachefile()
         elif os.path.isfile(self.cachefile):
             logger.info("Out of date cache found, rebuilding...")
+        else:
+            logger.debug(1, "Cache file %s not found, building..." % self.cachefile)
 
     def load_cachefile(self):
         cachesize = 0
@@ -424,6 +426,7 @@
 
         for cache_class in self.caches_array:
             cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
+            logger.debug(1, 'Loading cache file: %s' % cachefile)
             with open(cachefile, "rb") as cachefile:
                 pickled = pickle.Unpickler(cachefile)
                 # Check cache version information
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/cooker.py b/import-layers/yocto-poky/bitbake/lib/bb/cooker.py
index c7fdd72..1fda40d 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/cooker.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/cooker.py
@@ -516,6 +516,8 @@
             fn = runlist[0][3]
         else:
             envdata = self.data
+            data.expandKeys(envdata)
+            parse.ast.runAnonFuncs(envdata)
 
         if fn:
             try:
@@ -536,7 +538,6 @@
             logger.plain(env.getvalue())
 
         # emit the metadata which isnt valid shell
-        data.expandKeys(envdata)
         for e in sorted(envdata.keys()):
             if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
                 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
@@ -856,12 +857,12 @@
 
         with open('task-depends.dot', 'w') as f:
             f.write("digraph depends {\n")
-            for task in depgraph["tdepends"]:
+            for task in sorted(depgraph["tdepends"]):
                 (pn, taskname) = task.rsplit(".", 1)
                 fn = depgraph["pn"][pn]["filename"]
                 version = depgraph["pn"][pn]["version"]
                 f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
-                for dep in depgraph["tdepends"][task]:
+                for dep in sorted(depgraph["tdepends"][task]):
                     f.write('"%s" -> "%s"\n' % (task, dep))
             f.write("}\n")
         logger.info("Task dependencies saved to 'task-depends.dot'")
@@ -869,23 +870,23 @@
         with open('recipe-depends.dot', 'w') as f:
             f.write("digraph depends {\n")
             pndeps = {}
-            for task in depgraph["tdepends"]:
+            for task in sorted(depgraph["tdepends"]):
                 (pn, taskname) = task.rsplit(".", 1)
                 if pn not in pndeps:
                     pndeps[pn] = set()
-                for dep in depgraph["tdepends"][task]:
+                for dep in sorted(depgraph["tdepends"][task]):
                     (deppn, deptaskname) = dep.rsplit(".", 1)
                     pndeps[pn].add(deppn)
-            for pn in pndeps:
+            for pn in sorted(pndeps):
                 fn = depgraph["pn"][pn]["filename"]
                 version = depgraph["pn"][pn]["version"]
                 f.write('"%s" [label="%s\\n%s\\n%s"]\n' % (pn, pn, version, fn))
-                for dep in pndeps[pn]:
+                for dep in sorted(pndeps[pn]):
                     if dep == pn:
                         continue
                     f.write('"%s" -> "%s"\n' % (pn, dep))
             f.write("}\n")
-        logger.info("Flatened recipe dependencies saved to 'recipe-depends.dot'")
+        logger.info("Flattened recipe dependencies saved to 'recipe-depends.dot'")
 
     def show_appends_with_no_recipes(self):
         # Determine which bbappends haven't been applied
@@ -1170,6 +1171,7 @@
                 elif regex == "":
                     parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
                     errors = False
+                    continue
                 else:
                     try:
                         cre = re.compile(regex)
@@ -1603,8 +1605,6 @@
 
         if self.parser:
             self.parser.shutdown(clean=not force, force=force)
-        self.notifier.stop()
-        self.confignotifier.stop()
 
     def finishcommand(self):
         self.state = state.initial
@@ -1807,21 +1807,25 @@
             realfn, cls, mc = bb.cache.virtualfn2realfn(p)
             priorities[p] = self.calc_bbfile_priority(realfn, matched)
 
-        # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
         unmatched = set()
         for _, _, regex, pri in self.bbfile_config_priorities:
             if not regex in matched:
                 unmatched.add(regex)
 
-        def findmatch(regex):
+        # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
+        def find_bbappend_match(regex):
             for b in self.bbappends:
                 (bbfile, append) = b
                 if regex.match(append):
+                    # If the bbappend is matched by already "matched set", return False
+                    for matched_regex in matched:
+                        if matched_regex.match(append):
+                            return False
                     return True
             return False
 
         for unmatch in unmatched.copy():
-            if findmatch(unmatch):
+            if find_bbappend_match(unmatch):
                 unmatched.remove(unmatch)
 
         for collection, pattern, regex, _ in self.bbfile_config_priorities:
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/cookerdata.py b/import-layers/yocto-poky/bitbake/lib/bb/cookerdata.py
index fab47c7..5df66e6 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/cookerdata.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/cookerdata.py
@@ -143,7 +143,8 @@
         self.writeeventlog = False
         self.server_only = False
         self.limited_deps = False
-        self.runall = None
+        self.runall = []
+        self.runonly = []
 
         self.env = {}
 
@@ -395,6 +396,8 @@
                 if compat and not (compat & layerseries):
                     bb.fatal("Layer %s is not compatible with the core layer which only supports these series: %s (layer is compatible with %s)"
                               % (c, " ".join(layerseries), " ".join(compat)))
+                elif not compat and not data.getVar("BB_WORKERCONTEXT"):
+                    bb.warn("Layer %s should set LAYERSERIES_COMPAT_%s in its conf/layer.conf file to list the core layer names it is compatible with." % (c, c))
 
         if not data.getVar("BBPATH"):
             msg = "The BBPATH variable is not set"
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/event.py b/import-layers/yocto-poky/bitbake/lib/bb/event.py
index 52072b5..5d00496 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/event.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/event.py
@@ -449,12 +449,6 @@
     def setName(self, name):
         self._name = name
 
-    def getCfg(self):
-        return self.data
-
-    def setCfg(self, cfg):
-        self.data = cfg
-
     def getFailures(self):
         """
         Return the number of failed packages
@@ -463,9 +457,6 @@
 
     pkgs = property(getPkgs, setPkgs, None, "pkgs property")
     name = property(getName, setName, None, "name property")
-    cfg = property(getCfg, setCfg, None, "cfg property")
-
-
 
 class BuildInit(BuildBase):
     """buildFile or buildTargets was invoked"""
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/__init__.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/__init__.py
index f70f1b5..72d6092 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/__init__.py
@@ -643,26 +643,25 @@
     if not ud.needdonestamp or (origud and not origud.needdonestamp):
         return True
 
-    if not os.path.exists(ud.donestamp):
+    if not os.path.exists(ud.localpath):
+        # local path does not exist
+        if os.path.exists(ud.donestamp):
+            # done stamp exists, but the downloaded file does not; the done stamp
+            # must be incorrect, re-trigger the download
+            bb.utils.remove(ud.donestamp)
         return False
 
     if (not ud.method.supports_checksum(ud) or
         (origud and not origud.method.supports_checksum(origud))):
-        # done stamp exists, checksums not supported; assume the local file is
-        # current
-        return True
-
-    if not os.path.exists(ud.localpath):
-        # done stamp exists, but the downloaded file does not; the done stamp
-        # must be incorrect, re-trigger the download
-        bb.utils.remove(ud.donestamp)
-        return False
+        # if done stamp exists and checksums not supported; assume the local
+        # file is current
+        return os.path.exists(ud.donestamp)
 
     precomputed_checksums = {}
     # Only re-use the precomputed checksums if the donestamp is newer than the
     # file. Do not rely on the mtime of directories, though. If ud.localpath is
     # a directory, there will probably not be any checksums anyway.
-    if (os.path.isdir(ud.localpath) or
+    if os.path.exists(ud.donestamp) and (os.path.isdir(ud.localpath) or
             os.path.getmtime(ud.localpath) < os.path.getmtime(ud.donestamp)):
         try:
             with open(ud.donestamp, "rb") as cachefile:
@@ -853,6 +852,9 @@
         if val:
             cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
 
+    # Disable pseudo as it may affect ssh, potentially causing it to hang.
+    cmd = 'export PSEUDO_DISABLED=1; ' + cmd
+
     logger.debug(1, "Running %s", cmd)
 
     success = False
@@ -1424,7 +1426,7 @@
                 cmd = 'gzip -dc %s > %s' % (file, efile)
             elif file.endswith('.bz2'):
                 cmd = 'bzip2 -dc %s > %s' % (file, efile)
-            elif file.endswith('.tar.xz'):
+            elif file.endswith('.txz') or file.endswith('.tar.xz'):
                 cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
             elif file.endswith('.xz'):
                 cmd = 'xz -dc %s > %s' % (file, efile)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/git.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/git.py
index 5ef8cd6..3de83be 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/git.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/git.py
@@ -125,6 +125,9 @@
 
 
 class Git(FetchMethod):
+    bitbake_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.join(os.path.abspath(__file__))), '..', '..', '..'))
+    make_shallow_path = os.path.join(bitbake_dir, 'bin', 'git-make-shallow')
+
     """Class to fetch a module or modules from git repositories"""
     def init(self, d):
         pass
@@ -363,6 +366,7 @@
             progresshandler = GitProgressHandler(d)
             runfetchcmd(fetch_cmd, d, log=progresshandler, workdir=ud.clonedir)
             runfetchcmd("%s prune-packed" % ud.basecmd, d, workdir=ud.clonedir)
+            runfetchcmd("%s pack-refs --all" % ud.basecmd, d, workdir=ud.clonedir)
             runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d, workdir=ud.clonedir)
             try:
                 os.unlink(ud.fullmirror)
@@ -445,7 +449,7 @@
                 shallow_branches.append(r)
 
         # Make the repository shallow
-        shallow_cmd = ['git', 'make-shallow', '-s']
+        shallow_cmd = [self.make_shallow_path, '-s']
         for b in shallow_branches:
             shallow_cmd.append('-r')
             shallow_cmd.append(b)
@@ -591,7 +595,8 @@
         tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or "(?P<pver>([0-9][\.|_]?)+)")
         try:
             output = self._lsremote(ud, d, "refs/tags/*")
-        except bb.fetch2.FetchError or bb.fetch2.NetworkAccess:
+        except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess) as e:
+            bb.note("Could not list remote: %s" % str(e))
             return pupver
 
         verstring = ""
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/npm.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/npm.py
index b5f148c..730c346 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/npm.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/npm.py
@@ -195,9 +195,11 @@
         outputurl = pdata['dist']['tarball']
         data[pkg] = {}
         data[pkg]['tgz'] = os.path.basename(outputurl)
-        if not outputurl in fetchedlist:
-            self._runwget(ud, d, "%s --directory-prefix=%s %s" % (self.basecmd, ud.prefixdir, outputurl), False)
-            fetchedlist.append(outputurl)
+        if outputurl in fetchedlist:
+            return
+
+        self._runwget(ud, d, "%s --directory-prefix=%s %s" % (self.basecmd, ud.prefixdir, outputurl), False)
+        fetchedlist.append(outputurl)
 
         dependencies = pdata.get('dependencies', {})
         optionalDependencies = pdata.get('optionalDependencies', {})
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/wget.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/wget.py
index 7c49c2b..8f505b6 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/wget.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/wget.py
@@ -250,6 +250,7 @@
                         return ""
                     def close(self):
                         pass
+                    closed = False
 
                 resp = addinfourl(fp_dummy(), r.msg, req.get_full_url())
                 resp.code = r.status
@@ -332,7 +333,8 @@
             except (TypeError, ImportError, IOError, netrc.NetrcParseError):
                  pass
 
-            opener.open(r)
+            with opener.open(r) as response:
+                pass
         except urllib.error.URLError as e:
             if try_again:
                 logger.debug(2, "checkstatus: trying again")
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/main.py b/import-layers/yocto-poky/bitbake/lib/bb/main.py
index 7711b29..f4474e4 100755
--- a/import-layers/yocto-poky/bitbake/lib/bb/main.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/main.py
@@ -292,8 +292,12 @@
                           help="Writes the event log of the build to a bitbake event json file. "
                                "Use '' (empty string) to assign the name automatically.")
 
-        parser.add_option("", "--runall", action="store", dest="runall",
-                          help="Run the specified task for all build targets and their dependencies.")
+        parser.add_option("", "--runall", action="append", dest="runall",
+                          help="Run the specified task for any recipe in the taskgraph of the specified target (even if it wouldn't otherwise have run).")
+
+        parser.add_option("", "--runonly", action="append", dest="runonly",
+                          help="Run only the specified task within the taskgraph of the specified targets (and any task dependencies those tasks may have).")
+
 
         options, targets = parser.parse_args(argv)
 
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/parse/__init__.py b/import-layers/yocto-poky/bitbake/lib/bb/parse/__init__.py
index 2fc4002..5397d57 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/parse/__init__.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/parse/__init__.py
@@ -134,8 +134,9 @@
         if not newfn:
             raise IOError(errno.ENOENT, "file %s not found in %s" % (fn, bbpath))
         fn = newfn
+    else:
+        mark_dependency(d, fn)
 
-    mark_dependency(d, fn)
     if not os.path.isfile(fn):
         raise IOError(errno.ENOENT, "file %s not found" % fn)
 
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/parse/ast.py b/import-layers/yocto-poky/bitbake/lib/bb/parse/ast.py
index dba4540..6690dc5 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/parse/ast.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/parse/ast.py
@@ -335,6 +335,12 @@
     classes = m.group(1)
     statements.append(InheritNode(filename, lineno, classes))
 
+def runAnonFuncs(d):
+    code = []
+    for funcname in d.getVar("__BBANONFUNCS", False) or []:
+        code.append("%s(d)" % funcname)
+    bb.utils.better_exec("\n".join(code), {"d": d})
+
 def finalize(fn, d, variant = None):
     saved_handlers = bb.event.get_handlers().copy()
 
@@ -349,10 +355,7 @@
     bb.event.fire(bb.event.RecipePreFinalise(fn), d)
 
     bb.data.expandKeys(d)
-    code = []
-    for funcname in d.getVar("__BBANONFUNCS", False) or []:
-        code.append("%s(d)" % funcname)
-    bb.utils.better_exec("\n".join(code), {"d": d})
+    runAnonFuncs(d)
 
     tasklist = d.getVar('__BBTASKS', False) or []
     bb.event.fire(bb.event.RecipeTaskPreProcess(fn, list(tasklist)), d)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/BBHandler.py b/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
index f89ad24..e5039e3 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -131,9 +131,6 @@
 
     abs_fn = resolve_file(fn, d)
 
-    if include:
-        bb.parse.mark_dependency(d, abs_fn)
-
     # actual loading
     statements = get_statements(fn, abs_fn, base_name)
 
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py
index 97aa130..9d3ebe1 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py
@@ -134,9 +134,6 @@
     abs_fn = resolve_file(fn, data)
     f = open(abs_fn, 'r')
 
-    if include:
-        bb.parse.mark_dependency(data, abs_fn)
-
     statements = ast.StatementGroup()
     lineno = 0
     while True:
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/runqueue.py b/import-layers/yocto-poky/bitbake/lib/bb/runqueue.py
index ae12c25..f2e52cf 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/runqueue.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/runqueue.py
@@ -181,7 +181,7 @@
         if self.rq.stats.active < self.rq.number_tasks:
             return self.next_buildable_task()
 
-    def newbuilable(self, task):
+    def newbuildable(self, task):
         self.buildable.append(task)
 
     def describe_task(self, taskid):
@@ -581,12 +581,6 @@
                     if t in taskData[mc].taskentries:
                         depends.add(t)
 
-        def add_resolved_dependencies(mc, fn, tasknames, depends):
-            for taskname in tasknames:
-                tid = build_tid(mc, fn, taskname)
-                if tid in self.runtaskentries:
-                    depends.add(tid)
-
         for mc in taskData:
             for tid in taskData[mc].taskentries:
 
@@ -673,57 +667,106 @@
                             recursiveitasks[tid].append(newdep)
 
                 self.runtaskentries[tid].depends = depends
+                # Remove all self references
+                self.runtaskentries[tid].depends.discard(tid)
 
         #self.dump_data()
 
+        self.init_progress_reporter.next_stage()
+
         # Resolve recursive 'recrdeptask' dependencies (Part B)
         #
         # e.g. do_sometask[recrdeptask] = "do_someothertask"
         # (makes sure sometask runs after someothertask of all DEPENDS, RDEPENDS and intertask dependencies, recursively)
         # We need to do this separately since we need all of runtaskentries[*].depends to be complete before this is processed
-        self.init_progress_reporter.next_stage(len(recursivetasks))
-        extradeps = {}
-        for taskcounter, tid in enumerate(recursivetasks):
-            extradeps[tid] = set(self.runtaskentries[tid].depends)
 
-            tasknames = recursivetasks[tid]
-            seendeps = set()
+        # Generating/interating recursive lists of dependencies is painful and potentially slow
+        # Precompute recursive task dependencies here by:
+        #     a) create a temp list of reverse dependencies (revdeps)
+        #     b) walk up the ends of the chains (when a given task no longer has dependencies i.e. len(deps) == 0)
+        #     c) combine the total list of dependencies in cumulativedeps
+        #     d) optimise by pre-truncating 'task' off the items in cumulativedeps (keeps items in sets lower)
 
-            def generate_recdeps(t):
-                newdeps = set()
-                (mc, fn, taskname, _) = split_tid_mcfn(t)
-                add_resolved_dependencies(mc, fn, tasknames, newdeps)
-                extradeps[tid].update(newdeps)
-                seendeps.add(t)
-                newdeps.add(t)
-                for i in newdeps:
-                    if i not in self.runtaskentries:
-                        # Not all recipes might have the recrdeptask task as a task
-                        continue
-                    task = self.runtaskentries[i].task
-                    for n in self.runtaskentries[i].depends:
-                        if n not in seendeps:
-                             generate_recdeps(n)
-            generate_recdeps(tid)
 
-            if tid in recursiveitasks:
-                for dep in recursiveitasks[tid]:
-                    generate_recdeps(dep)
-            self.init_progress_reporter.update(taskcounter)
-
-        # Remove circular references so that do_a[recrdeptask] = "do_a do_b" can work
-        for tid in recursivetasks:
-            extradeps[tid].difference_update(recursivetasksselfref)
-
+        revdeps = {}
+        deps = {}
+        cumulativedeps = {}
         for tid in self.runtaskentries:
-            task = self.runtaskentries[tid].task
-            # Add in extra dependencies
-            if tid in extradeps:
-                 self.runtaskentries[tid].depends = extradeps[tid]
-            # Remove all self references
-            if tid in self.runtaskentries[tid].depends:
-                logger.debug(2, "Task %s contains self reference!", tid)
-                self.runtaskentries[tid].depends.remove(tid)
+            deps[tid] = set(self.runtaskentries[tid].depends)
+            revdeps[tid] = set()
+            cumulativedeps[tid] = set()
+        # Generate a temp list of reverse dependencies
+        for tid in self.runtaskentries:
+            for dep in self.runtaskentries[tid].depends:
+                revdeps[dep].add(tid)
+        # Find the dependency chain endpoints
+        endpoints = set()
+        for tid in self.runtaskentries:
+            if len(deps[tid]) == 0:
+                endpoints.add(tid)
+        # Iterate the chains collating dependencies
+        while endpoints:
+            next = set()
+            for tid in endpoints:
+                for dep in revdeps[tid]:
+                    cumulativedeps[dep].add(fn_from_tid(tid))
+                    cumulativedeps[dep].update(cumulativedeps[tid])
+                    if tid in deps[dep]:
+                        deps[dep].remove(tid)
+                    if len(deps[dep]) == 0:
+                        next.add(dep)
+            endpoints = next
+        #for tid in deps:
+        #    if len(deps[tid]) != 0:
+        #        bb.warn("Sanity test failure, dependencies left for %s (%s)" % (tid, deps[tid]))
+
+        # Loop here since recrdeptasks can depend upon other recrdeptasks and we have to
+        # resolve these recursively until we aren't adding any further extra dependencies
+        extradeps = True
+        while extradeps:
+            extradeps = 0
+            for tid in recursivetasks:
+                tasknames = recursivetasks[tid]
+
+                totaldeps = set(self.runtaskentries[tid].depends)
+                if tid in recursiveitasks:
+                    totaldeps.update(recursiveitasks[tid])
+                    for dep in recursiveitasks[tid]:
+                        if dep not in self.runtaskentries:
+                            continue
+                        totaldeps.update(self.runtaskentries[dep].depends)
+
+                deps = set()
+                for dep in totaldeps:
+                    if dep in cumulativedeps:
+                        deps.update(cumulativedeps[dep])
+
+                for t in deps:
+                    for taskname in tasknames:
+                        newtid = t + ":" + taskname
+                        if newtid == tid:
+                            continue
+                        if newtid in self.runtaskentries and newtid not in self.runtaskentries[tid].depends:
+                            extradeps += 1
+                            self.runtaskentries[tid].depends.add(newtid)
+
+                # Handle recursive tasks which depend upon other recursive tasks
+                deps = set()
+                for dep in self.runtaskentries[tid].depends.intersection(recursivetasks):
+                    deps.update(self.runtaskentries[dep].depends.difference(self.runtaskentries[tid].depends))
+                for newtid in deps:
+                    for taskname in tasknames:
+                        if not newtid.endswith(":" + taskname):
+                            continue
+                        if newtid in self.runtaskentries:
+                            extradeps += 1
+                            self.runtaskentries[tid].depends.add(newtid)
+
+            bb.debug(1, "Added %s recursive dependencies in this loop" % extradeps)
+
+        # Remove recrdeptask circular references so that do_a[recrdeptask] = "do_a do_b" can work
+        for tid in recursivetasksselfref:
+            self.runtaskentries[tid].depends.difference_update(recursivetasksselfref)
 
         self.init_progress_reporter.next_stage()
 
@@ -798,30 +841,57 @@
         #
         # Once all active tasks are marked, prune the ones we don't need.
 
-        delcount = 0
+        delcount = {}
         for tid in list(self.runtaskentries.keys()):
             if tid not in runq_build:
+                delcount[tid] = self.runtaskentries[tid]
                 del self.runtaskentries[tid]
-                delcount += 1
 
-        self.init_progress_reporter.next_stage()
-
-        if self.cooker.configuration.runall is not None:
-            runall = "do_%s" % self.cooker.configuration.runall
-            runall_tids = { k: v for k, v in self.runtaskentries.items() if taskname_from_tid(k) == runall }
-
+        # Handle --runall
+        if self.cooker.configuration.runall:
             # re-run the mark_active and then drop unused tasks from new list
             runq_build = {}
-            for tid in list(runall_tids):
-                mark_active(tid,1)
+
+            for task in self.cooker.configuration.runall:
+                runall_tids = set()
+                for tid in list(self.runtaskentries):
+                    wanttid = fn_from_tid(tid) + ":do_%s" % task
+                    if wanttid in delcount:
+                        self.runtaskentries[wanttid] = delcount[wanttid]
+                    if wanttid in self.runtaskentries:
+                        runall_tids.add(wanttid)
+
+                for tid in list(runall_tids):
+                    mark_active(tid,1)
 
             for tid in list(self.runtaskentries.keys()):
                 if tid not in runq_build:
+                    delcount[tid] = self.runtaskentries[tid]
                     del self.runtaskentries[tid]
-                    delcount += 1
 
             if len(self.runtaskentries) == 0:
-                bb.msg.fatal("RunQueue", "No remaining tasks to run for build target %s with runall %s" % (target, runall))
+                bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the recipes of the taskgraphs of the targets %s" % (str(self.cooker.configuration.runall), str(self.targets)))
+
+        self.init_progress_reporter.next_stage()
+
+        # Handle runonly
+        if self.cooker.configuration.runonly:
+            # re-run the mark_active and then drop unused tasks from new list
+            runq_build = {}
+
+            for task in self.cooker.configuration.runonly:
+                runonly_tids = { k: v for k, v in self.runtaskentries.items() if taskname_from_tid(k) == "do_%s" % task }
+
+                for tid in list(runonly_tids):
+                    mark_active(tid,1)
+
+            for tid in list(self.runtaskentries.keys()):
+                if tid not in runq_build:
+                    delcount[tid] = self.runtaskentries[tid]
+                    del self.runtaskentries[tid]
+
+            if len(self.runtaskentries) == 0:
+                bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the taskgraphs of the targets %s" % (str(self.cooker.configuration.runonly), str(self.targets)))
 
         #
         # Step D - Sanity checks and computation
@@ -834,7 +904,7 @@
             else:
                 bb.msg.fatal("RunQueue", "No active tasks and not in --continue mode?! Please report this bug.")
 
-        logger.verbose("Pruned %s inactive tasks, %s left", delcount, len(self.runtaskentries))
+        logger.verbose("Pruned %s inactive tasks, %s left", len(delcount), len(self.runtaskentries))
 
         logger.verbose("Assign Weightings")
 
@@ -1781,7 +1851,7 @@
 
     def setbuildable(self, task):
         self.runq_buildable.add(task)
-        self.sched.newbuilable(task)
+        self.sched.newbuildable(task)
 
     def task_completeoutright(self, task):
         """
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/server/process.py b/import-layers/yocto-poky/bitbake/lib/bb/server/process.py
index 3d31355..828159e 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/server/process.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/server/process.py
@@ -223,6 +223,8 @@
 
         try: 
             self.cooker.shutdown(True)
+            self.cooker.notifier.stop()
+            self.cooker.confignotifier.stop()
         except:
             pass
 
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/shell.py b/import-layers/yocto-poky/bitbake/lib/bb/shell.py
deleted file mode 100644
index 1dd8d54..0000000
--- a/import-layers/yocto-poky/bitbake/lib/bb/shell.py
+++ /dev/null
@@ -1,820 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-##########################################################################
-#
-# Copyright (C) 2005-2006 Michael 'Mickey' Lauer <mickey@Vanille.de>
-# Copyright (C) 2005-2006 Vanille Media
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-##########################################################################
-#
-# Thanks to:
-# * Holger Freyther <zecke@handhelds.org>
-# * Justin Patrin <papercrane@reversefold.com>
-#
-##########################################################################
-
-"""
-BitBake Shell
-
-IDEAS:
-    * list defined tasks per package
-    * list classes
-    * toggle force
-    * command to reparse just one (or more) bbfile(s)
-    * automatic check if reparsing is necessary (inotify?)
-    * frontend for bb file manipulation
-    * more shell-like features:
-        - output control, i.e. pipe output into grep, sort, etc.
-        - job control, i.e. bring running commands into background and foreground
-    * start parsing in background right after startup
-    * ncurses interface
-
-PROBLEMS:
-    * force doesn't always work
-    * readline completion for commands with more than one parameters
-
-"""
-
-##########################################################################
-# Import and setup global variables
-##########################################################################
-
-from __future__ import print_function
-from functools import reduce
-try:
-    set
-except NameError:
-    from sets import Set as set
-import sys, os, readline, socket, httplib, urllib, commands, popen2, shlex, Queue, fnmatch
-from bb import data, parse, build, cache, taskdata, runqueue, providers as Providers
-
-__version__ = "0.5.3.1"
-__credits__ = """BitBake Shell Version %s (C) 2005 Michael 'Mickey' Lauer <mickey@Vanille.de>
-Type 'help' for more information, press CTRL-D to exit.""" % __version__
-
-cmds = {}
-leave_mainloop = False
-last_exception = None
-cooker = None
-parsed = False
-debug = os.environ.get( "BBSHELL_DEBUG", "" )
-
-##########################################################################
-# Class BitBakeShellCommands
-##########################################################################
-
-class BitBakeShellCommands:
-    """This class contains the valid commands for the shell"""
-
-    def __init__( self, shell ):
-        """Register all the commands"""
-        self._shell = shell
-        for attr in BitBakeShellCommands.__dict__:
-            if not attr.startswith( "_" ):
-                if attr.endswith( "_" ):
-                    command = attr[:-1].lower()
-                else:
-                    command = attr[:].lower()
-                method = getattr( BitBakeShellCommands, attr )
-                debugOut( "registering command '%s'" % command )
-                # scan number of arguments
-                usage = getattr( method, "usage", "" )
-                if usage != "<...>":
-                    numArgs = len( usage.split() )
-                else:
-                    numArgs = -1
-                shell.registerCommand( command, method, numArgs, "%s %s" % ( command, usage ), method.__doc__ )
-
-    def _checkParsed( self ):
-        if not parsed:
-            print("SHELL: This command needs to parse bbfiles...")
-            self.parse( None )
-
-    def _findProvider( self, item ):
-        self._checkParsed()
-        # Need to use taskData for this information
-        preferred = data.getVar( "PREFERRED_PROVIDER_%s" % item, cooker.configuration.data, 1 )
-        if not preferred: preferred = item
-        try:
-            lv, lf, pv, pf = Providers.findBestProvider(preferred, cooker.configuration.data, cooker.status)
-        except KeyError:
-            if item in cooker.status.providers:
-                pf = cooker.status.providers[item][0]
-            else:
-                pf = None
-        return pf
-
-    def alias( self, params ):
-        """Register a new name for a command"""
-        new, old = params
-        if not old in cmds:
-            print("ERROR: Command '%s' not known" % old)
-        else:
-            cmds[new] = cmds[old]
-            print("OK")
-    alias.usage = "<alias> <command>"
-
-    def buffer( self, params ):
-        """Dump specified output buffer"""
-        index = params[0]
-        print(self._shell.myout.buffer( int( index ) ))
-    buffer.usage = "<index>"
-
-    def buffers( self, params ):
-        """Show the available output buffers"""
-        commands = self._shell.myout.bufferedCommands()
-        if not commands:
-            print("SHELL: No buffered commands available yet. Start doing something.")
-        else:
-            print("="*35, "Available Output Buffers", "="*27)
-            for index, cmd in enumerate( commands ):
-                print("| %s %s" % ( str( index ).ljust( 3 ), cmd ))
-            print("="*88)
-
-    def build( self, params, cmd = "build" ):
-        """Build a providee"""
-        global last_exception
-        globexpr = params[0]
-        self._checkParsed()
-        names = globfilter( cooker.status.pkg_pn, globexpr )
-        if len( names ) == 0: names = [ globexpr ]
-        print("SHELL: Building %s" % ' '.join( names ))
-
-        td = taskdata.TaskData(cooker.configuration.abort)
-        localdata = data.createCopy(cooker.configuration.data)
-        data.update_data(localdata)
-        data.expandKeys(localdata)
-
-        try:
-            tasks = []
-            for name in names:
-                td.add_provider(localdata, cooker.status, name)
-                providers = td.get_provider(name)
-
-                if len(providers) == 0:
-                    raise Providers.NoProvider
-
-                tasks.append([name, "do_%s" % cmd])
-
-            td.add_unresolved(localdata, cooker.status)
-
-            rq = runqueue.RunQueue(cooker, localdata, cooker.status, td, tasks)
-            rq.prepare_runqueue()
-            rq.execute_runqueue()
-
-        except Providers.NoProvider:
-            print("ERROR: No Provider")
-            last_exception = Providers.NoProvider
-
-        except runqueue.TaskFailure as fnids:
-            last_exception = runqueue.TaskFailure
-
-        except build.FuncFailed as e:
-            print("ERROR: Couldn't build '%s'" % names)
-            last_exception = e
-
-
-    build.usage = "<providee>"
-
-    def clean( self, params ):
-        """Clean a providee"""
-        self.build( params, "clean" )
-    clean.usage = "<providee>"
-
-    def compile( self, params ):
-        """Execute 'compile' on a providee"""
-        self.build( params, "compile" )
-    compile.usage = "<providee>"
-
-    def configure( self, params ):
-        """Execute 'configure' on a providee"""
-        self.build( params, "configure" )
-    configure.usage = "<providee>"
-
-    def install( self, params ):
-        """Execute 'install' on a providee"""
-        self.build( params, "install" )
-    install.usage = "<providee>"
-
-    def edit( self, params ):
-        """Call $EDITOR on a providee"""
-        name = params[0]
-        bbfile = self._findProvider( name )
-        if bbfile is not None:
-            os.system( "%s %s" % ( os.environ.get( "EDITOR", "vi" ), bbfile ) )
-        else:
-            print("ERROR: Nothing provides '%s'" % name)
-    edit.usage = "<providee>"
-
-    def environment( self, params ):
-        """Dump out the outer BitBake environment"""
-        cooker.showEnvironment()
-
-    def exit_( self, params ):
-        """Leave the BitBake Shell"""
-        debugOut( "setting leave_mainloop to true" )
-        global leave_mainloop
-        leave_mainloop = True
-
-    def fetch( self, params ):
-        """Fetch a providee"""
-        self.build( params, "fetch" )
-    fetch.usage = "<providee>"
-
-    def fileBuild( self, params, cmd = "build" ):
-        """Parse and build a .bb file"""
-        global last_exception
-        name = params[0]
-        bf = completeFilePath( name )
-        print("SHELL: Calling '%s' on '%s'" % ( cmd, bf ))
-
-        try:
-            cooker.buildFile(bf, cmd)
-        except parse.ParseError:
-            print("ERROR: Unable to open or parse '%s'" % bf)
-        except build.FuncFailed as e:
-            print("ERROR: Couldn't build '%s'" % name)
-            last_exception = e
-
-    fileBuild.usage = "<bbfile>"
-
-    def fileClean( self, params ):
-        """Clean a .bb file"""
-        self.fileBuild( params, "clean" )
-    fileClean.usage = "<bbfile>"
-
-    def fileEdit( self, params ):
-        """Call $EDITOR on a .bb file"""
-        name = params[0]
-        os.system( "%s %s" % ( os.environ.get( "EDITOR", "vi" ), completeFilePath( name ) ) )
-    fileEdit.usage = "<bbfile>"
-
-    def fileRebuild( self, params ):
-        """Rebuild (clean & build) a .bb file"""
-        self.fileBuild( params, "rebuild" )
-    fileRebuild.usage = "<bbfile>"
-
-    def fileReparse( self, params ):
-        """(re)Parse a bb file"""
-        bbfile = params[0]
-        print("SHELL: Parsing '%s'" % bbfile)
-        parse.update_mtime( bbfile )
-        cooker.parser.reparse(bbfile)
-        if False: #fromCache:
-            print("SHELL: File has not been updated, not reparsing")
-        else:
-            print("SHELL: Parsed")
-    fileReparse.usage = "<bbfile>"
-
-    def abort( self, params ):
-        """Toggle abort task execution flag (see bitbake -k)"""
-        cooker.configuration.abort = not cooker.configuration.abort
-        print("SHELL: Abort Flag is now '%s'" % repr( cooker.configuration.abort ))
-
-    def force( self, params ):
-        """Toggle force task execution flag (see bitbake -f)"""
-        cooker.configuration.force = not cooker.configuration.force
-        print("SHELL: Force Flag is now '%s'" % repr( cooker.configuration.force ))
-
-    def help( self, params ):
-        """Show a comprehensive list of commands and their purpose"""
-        print("="*30, "Available Commands", "="*30)
-        for cmd in sorted(cmds):
-            function, numparams, usage, helptext = cmds[cmd]
-            print("| %s | %s" % (usage.ljust(30), helptext))
-        print("="*78)
-
-    def lastError( self, params ):
-        """Show the reason or log that was produced by the last BitBake event exception"""
-        if last_exception is None:
-            print("SHELL: No Errors yet (Phew)...")
-        else:
-            reason, event = last_exception.args
-            print("SHELL: Reason for the last error: '%s'" % reason)
-            if ':' in reason:
-                msg, filename = reason.split( ':' )
-                filename = filename.strip()
-                print("SHELL: Dumping log file for last error:")
-                try:
-                    print(open( filename ).read())
-                except IOError:
-                    print("ERROR: Couldn't open '%s'" % filename)
-
-    def match( self, params ):
-        """Dump all files or providers matching a glob expression"""
-        what, globexpr = params
-        if what == "files":
-            self._checkParsed()
-            for key in globfilter( cooker.status.pkg_fn, globexpr ): print(key)
-        elif what == "providers":
-            self._checkParsed()
-            for key in globfilter( cooker.status.pkg_pn, globexpr ): print(key)
-        else:
-            print("Usage: match %s" % self.print_.usage)
-    match.usage = "<files|providers> <glob>"
-
-    def new( self, params ):
-        """Create a new .bb file and open the editor"""
-        dirname, filename = params
-        packages = '/'.join( data.getVar( "BBFILES", cooker.configuration.data, 1 ).split('/')[:-2] )
-        fulldirname = "%s/%s" % ( packages, dirname )
-
-        if not os.path.exists( fulldirname ):
-            print("SHELL: Creating '%s'" % fulldirname)
-            os.mkdir( fulldirname )
-        if os.path.exists( fulldirname ) and os.path.isdir( fulldirname ):
-            if os.path.exists( "%s/%s" % ( fulldirname, filename ) ):
-                print("SHELL: ERROR: %s/%s already exists" % ( fulldirname, filename ))
-                return False
-            print("SHELL: Creating '%s/%s'" % ( fulldirname, filename ))
-            newpackage = open( "%s/%s" % ( fulldirname, filename ), "w" )
-            print("""DESCRIPTION = ""
-SECTION = ""
-AUTHOR = ""
-HOMEPAGE = ""
-MAINTAINER = ""
-LICENSE = "GPL"
-PR = "r0"
-
-SRC_URI = ""
-
-#inherit base
-
-#do_configure() {
-#
-#}
-
-#do_compile() {
-#
-#}
-
-#do_stage() {
-#
-#}
-
-#do_install() {
-#
-#}
-""", file=newpackage)
-            newpackage.close()
-            os.system( "%s %s/%s" % ( os.environ.get( "EDITOR" ), fulldirname, filename ) )
-    new.usage = "<directory> <filename>"
-
-    def package( self, params ):
-        """Execute 'package' on a providee"""
-        self.build( params, "package" )
-    package.usage = "<providee>"
-
-    def pasteBin( self, params ):
-        """Send a command + output buffer to the pastebin at http://rafb.net/paste"""
-        index = params[0]
-        contents = self._shell.myout.buffer( int( index ) )
-        sendToPastebin( "output of " + params[0], contents )
-    pasteBin.usage = "<index>"
-
-    def pasteLog( self, params ):
-        """Send the last event exception error log (if there is one) to http://rafb.net/paste"""
-        if last_exception is None:
-            print("SHELL: No Errors yet (Phew)...")
-        else:
-            reason, event = last_exception.args
-            print("SHELL: Reason for the last error: '%s'" % reason)
-            if ':' in reason:
-                msg, filename = reason.split( ':' )
-                filename = filename.strip()
-                print("SHELL: Pasting log file to pastebin...")
-
-                file = open( filename ).read()
-                sendToPastebin( "contents of " + filename, file )
-
-    def patch( self, params ):
-        """Execute 'patch' command on a providee"""
-        self.build( params, "patch" )
-    patch.usage = "<providee>"
-
-    def parse( self, params ):
-        """(Re-)parse .bb files and calculate the dependency graph"""
-        cooker.status = cache.CacheData(cooker.caches_array)
-        ignore = data.getVar("ASSUME_PROVIDED", cooker.configuration.data, 1) or ""
-        cooker.status.ignored_dependencies = set( ignore.split() )
-        cooker.handleCollections( data.getVar("BBFILE_COLLECTIONS", cooker.configuration.data, 1) )
-
-        (filelist, masked) = cooker.collect_bbfiles()
-        cooker.parse_bbfiles(filelist, masked, cooker.myProgressCallback)
-        cooker.buildDepgraph()
-        global parsed
-        parsed = True
-        print()
-
-    def reparse( self, params ):
-        """(re)Parse a providee's bb file"""
-        bbfile = self._findProvider( params[0] )
-        if bbfile is not None:
-            print("SHELL: Found bbfile '%s' for '%s'" % ( bbfile, params[0] ))
-            self.fileReparse( [ bbfile ] )
-        else:
-            print("ERROR: Nothing provides '%s'" % params[0])
-    reparse.usage = "<providee>"
-
-    def getvar( self, params ):
-        """Dump the contents of an outer BitBake environment variable"""
-        var = params[0]
-        value = data.getVar( var, cooker.configuration.data, 1 )
-        print(value)
-    getvar.usage = "<variable>"
-
-    def peek( self, params ):
-        """Dump contents of variable defined in providee's metadata"""
-        name, var = params
-        bbfile = self._findProvider( name )
-        if bbfile is not None:
-            the_data = cache.Cache.loadDataFull(bbfile, cooker.configuration.data)
-            value = the_data.getVar( var, 1 )
-            print(value)
-        else:
-            print("ERROR: Nothing provides '%s'" % name)
-    peek.usage = "<providee> <variable>"
-
-    def poke( self, params ):
-        """Set contents of variable defined in providee's metadata"""
-        name, var, value = params
-        bbfile = self._findProvider( name )
-        if bbfile is not None:
-            print("ERROR: Sorry, this functionality is currently broken")
-            #d = cooker.pkgdata[bbfile]
-            #data.setVar( var, value, d )
-
-            # mark the change semi persistant
-            #cooker.pkgdata.setDirty(bbfile, d)
-            #print "OK"
-        else:
-            print("ERROR: Nothing provides '%s'" % name)
-    poke.usage = "<providee> <variable> <value>"
-
-    def print_( self, params ):
-        """Dump all files or providers"""
-        what = params[0]
-        if what == "files":
-            self._checkParsed()
-            for key in cooker.status.pkg_fn: print(key)
-        elif what == "providers":
-            self._checkParsed()
-            for key in cooker.status.providers: print(key)
-        else:
-            print("Usage: print %s" % self.print_.usage)
-    print_.usage = "<files|providers>"
-
-    def python( self, params ):
-        """Enter the expert mode - an interactive BitBake Python Interpreter"""
-        sys.ps1 = "EXPERT BB>>> "
-        sys.ps2 = "EXPERT BB... "
-        import code
-        interpreter = code.InteractiveConsole( dict( globals() ) )
-        interpreter.interact( "SHELL: Expert Mode - BitBake Python %s\nType 'help' for more information, press CTRL-D to switch back to BBSHELL." % sys.version )
-
-    def showdata( self, params ):
-        """Execute 'showdata' on a providee"""
-        cooker.showEnvironment(None, params)
-    showdata.usage = "<providee>"
-
-    def setVar( self, params ):
-        """Set an outer BitBake environment variable"""
-        var, value = params
-        data.setVar( var, value, cooker.configuration.data )
-        print("OK")
-    setVar.usage = "<variable> <value>"
-
-    def rebuild( self, params ):
-        """Clean and rebuild a .bb file or a providee"""
-        self.build( params, "clean" )
-        self.build( params, "build" )
-    rebuild.usage = "<providee>"
-
-    def shell( self, params ):
-        """Execute a shell command and dump the output"""
-        if params != "":
-            print(commands.getoutput( " ".join( params ) ))
-    shell.usage = "<...>"
-
-    def stage( self, params ):
-        """Execute 'stage' on a providee"""
-        self.build( params, "populate_staging" )
-    stage.usage = "<providee>"
-
-    def status( self, params ):
-        """<just for testing>"""
-        print("-" * 78)
-        print("building list = '%s'" % cooker.building_list)
-        print("build path = '%s'" % cooker.build_path)
-        print("consider_msgs_cache = '%s'" % cooker.consider_msgs_cache)
-        print("build stats = '%s'" % cooker.stats)
-        if last_exception is not None: print("last_exception = '%s'" % repr( last_exception.args ))
-        print("memory output contents = '%s'" % self._shell.myout._buffer)
-
-    def test( self, params ):
-        """<just for testing>"""
-        print("testCommand called with '%s'" % params)
-
-    def unpack( self, params ):
-        """Execute 'unpack' on a providee"""
-        self.build( params, "unpack" )
-    unpack.usage = "<providee>"
-
-    def which( self, params ):
-        """Computes the providers for a given providee"""
-        # Need to use taskData for this information
-        item = params[0]
-
-        self._checkParsed()
-
-        preferred = data.getVar( "PREFERRED_PROVIDER_%s" % item, cooker.configuration.data, 1 )
-        if not preferred: preferred = item
-
-        try:
-            lv, lf, pv, pf = Providers.findBestProvider(preferred, cooker.configuration.data, cooker.status)
-        except KeyError:
-            lv, lf, pv, pf = (None,)*4
-
-        try:
-            providers = cooker.status.providers[item]
-        except KeyError:
-            print("SHELL: ERROR: Nothing provides", preferred)
-        else:
-            for provider in providers:
-                if provider == pf: provider = " (***) %s" % provider
-                else:              provider = "       %s" % provider
-                print(provider)
-    which.usage = "<providee>"
-
-##########################################################################
-# Common helper functions
-##########################################################################
-
-def completeFilePath( bbfile ):
-    """Get the complete bbfile path"""
-    if not cooker.status: return bbfile
-    if not cooker.status.pkg_fn: return bbfile
-    for key in cooker.status.pkg_fn:
-        if key.endswith( bbfile ):
-            return key
-    return bbfile
-
-def sendToPastebin( desc, content ):
-    """Send content to http://oe.pastebin.com"""
-    mydata = {}
-    mydata["lang"] = "Plain Text"
-    mydata["desc"] = desc
-    mydata["cvt_tabs"] = "No"
-    mydata["nick"] = "%s@%s" % ( os.environ.get( "USER", "unknown" ), socket.gethostname() or "unknown" )
-    mydata["text"] = content
-    params = urllib.urlencode( mydata )
-    headers = {"Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain"}
-
-    host = "rafb.net"
-    conn = httplib.HTTPConnection( "%s:80" % host )
-    conn.request("POST", "/paste/paste.php", params, headers )
-
-    response = conn.getresponse()
-    conn.close()
-
-    if response.status == 302:
-        location = response.getheader( "location" ) or "unknown"
-        print("SHELL: Pasted to http://%s%s" % ( host, location ))
-    else:
-        print("ERROR: %s %s" % ( response.status, response.reason ))
-
-def completer( text, state ):
-    """Return a possible readline completion"""
-    debugOut( "completer called with text='%s', state='%d'" % ( text, state ) )
-
-    if state == 0:
-        line = readline.get_line_buffer()
-        if " " in line:
-            line = line.split()
-            # we are in second (or more) argument
-            if line[0] in cmds and hasattr( cmds[line[0]][0], "usage" ): # known command and usage
-                u = getattr( cmds[line[0]][0], "usage" ).split()[0]
-                if u == "<variable>":
-                    allmatches = cooker.configuration.data.keys()
-                elif u == "<bbfile>":
-                    if cooker.status.pkg_fn is None: allmatches = [ "(No Matches Available. Parsed yet?)" ]
-                    else: allmatches = [ x.split("/")[-1] for x in cooker.status.pkg_fn ]
-                elif u == "<providee>":
-                    if cooker.status.pkg_fn is None: allmatches = [ "(No Matches Available. Parsed yet?)" ]
-                    else: allmatches = cooker.status.providers.iterkeys()
-                else: allmatches = [ "(No tab completion available for this command)" ]
-            else: allmatches = [ "(No tab completion available for this command)" ]
-        else:
-            # we are in first argument
-            allmatches = cmds.iterkeys()
-
-        completer.matches = [ x for x in allmatches if x[:len(text)] == text ]
-        #print "completer.matches = '%s'" % completer.matches
-    if len( completer.matches ) > state:
-        return completer.matches[state]
-    else:
-        return None
-
-def debugOut( text ):
-    if debug:
-        sys.stderr.write( "( %s )\n" % text )
-
-def columnize( alist, width = 80 ):
-    """
-    A word-wrap function that preserves existing line breaks
-    and most spaces in the text. Expects that existing line
-    breaks are posix newlines (\n).
-    """
-    return reduce(lambda line, word, width=width: '%s%s%s' %
-                  (line,
-                   ' \n'[(len(line[line.rfind('\n')+1:])
-                         + len(word.split('\n', 1)[0]
-                              ) >= width)],
-                   word),
-                  alist
-                 )
-
-def globfilter( names, pattern ):
-    return fnmatch.filter( names, pattern )
-
-##########################################################################
-# Class MemoryOutput
-##########################################################################
-
-class MemoryOutput:
-    """File-like output class buffering the output of the last 10 commands"""
-    def __init__( self, delegate ):
-        self.delegate = delegate
-        self._buffer = []
-        self.text = []
-        self._command = None
-
-    def startCommand( self, command ):
-        self._command = command
-        self.text = []
-    def endCommand( self ):
-        if self._command is not None:
-            if len( self._buffer ) == 10: del self._buffer[0]
-            self._buffer.append( ( self._command, self.text ) )
-    def removeLast( self ):
-        if self._buffer:
-            del self._buffer[ len( self._buffer ) - 1 ]
-        self.text = []
-        self._command = None
-    def lastBuffer( self ):
-        if self._buffer:
-            return self._buffer[ len( self._buffer ) -1 ][1]
-    def bufferedCommands( self ):
-        return [ cmd for cmd, output in self._buffer ]
-    def buffer( self, i ):
-        if i < len( self._buffer ):
-            return "BB>> %s\n%s" % ( self._buffer[i][0], "".join( self._buffer[i][1] ) )
-        else: return "ERROR: Invalid buffer number. Buffer needs to be in (0, %d)" % ( len( self._buffer ) - 1 )
-    def write( self, text ):
-        if self._command is not None and text != "BB>> ": self.text.append( text )
-        if self.delegate is not None: self.delegate.write( text )
-    def flush( self ):
-        return self.delegate.flush()
-    def fileno( self ):
-        return self.delegate.fileno()
-    def isatty( self ):
-        return self.delegate.isatty()
-
-##########################################################################
-# Class BitBakeShell
-##########################################################################
-
-class BitBakeShell:
-
-    def __init__( self ):
-        """Register commands and set up readline"""
-        self.commandQ = Queue.Queue()
-        self.commands = BitBakeShellCommands( self )
-        self.myout = MemoryOutput( sys.stdout )
-        self.historyfilename = os.path.expanduser( "~/.bbsh_history" )
-        self.startupfilename = os.path.expanduser( "~/.bbsh_startup" )
-
-        readline.set_completer( completer )
-        readline.set_completer_delims( " " )
-        readline.parse_and_bind("tab: complete")
-
-        try:
-            readline.read_history_file( self.historyfilename )
-        except IOError:
-            pass  # It doesn't exist yet.
-
-        print(__credits__)
-
-    def cleanup( self ):
-        """Write readline history and clean up resources"""
-        debugOut( "writing command history" )
-        try:
-            readline.write_history_file( self.historyfilename )
-        except:
-            print("SHELL: Unable to save command history")
-
-    def registerCommand( self, command, function, numparams = 0, usage = "", helptext = "" ):
-        """Register a command"""
-        if usage == "": usage = command
-        if helptext == "": helptext = function.__doc__ or "<not yet documented>"
-        cmds[command] = ( function, numparams, usage, helptext )
-
-    def processCommand( self, command, params ):
-        """Process a command. Check number of params and print a usage string, if appropriate"""
-        debugOut( "processing command '%s'..." % command )
-        try:
-            function, numparams, usage, helptext = cmds[command]
-        except KeyError:
-            print("SHELL: ERROR: '%s' command is not a valid command." % command)
-            self.myout.removeLast()
-        else:
-            if (numparams != -1) and (not len( params ) == numparams):
-                print("Usage: '%s'" % usage)
-                return
-
-            result = function( self.commands, params )
-            debugOut( "result was '%s'" % result )
-
-    def processStartupFile( self ):
-        """Read and execute all commands found in $HOME/.bbsh_startup"""
-        if os.path.exists( self.startupfilename ):
-            startupfile = open( self.startupfilename, "r" )
-            for cmdline in startupfile:
-                debugOut( "processing startup line '%s'" % cmdline )
-                if not cmdline:
-                    continue
-                if "|" in cmdline:
-                    print("ERROR: '|' in startup file is not allowed. Ignoring line")
-                    continue
-                self.commandQ.put( cmdline.strip() )
-
-    def main( self ):
-        """The main command loop"""
-        while not leave_mainloop:
-            try:
-                if self.commandQ.empty():
-                    sys.stdout = self.myout.delegate
-                    cmdline = raw_input( "BB>> " )
-                    sys.stdout = self.myout
-                else:
-                    cmdline = self.commandQ.get()
-                if cmdline:
-                    allCommands = cmdline.split( ';' )
-                    for command in allCommands:
-                        pipecmd = None
-                        #
-                        # special case for expert mode
-                        if command == 'python':
-                            sys.stdout = self.myout.delegate
-                            self.processCommand( command, "" )
-                            sys.stdout = self.myout
-                        else:
-                            self.myout.startCommand( command )
-                            if '|' in command: # disable output
-                                command, pipecmd = command.split( '|' )
-                                delegate = self.myout.delegate
-                                self.myout.delegate = None
-                            tokens = shlex.split( command, True )
-                            self.processCommand( tokens[0], tokens[1:] or "" )
-                            self.myout.endCommand()
-                            if pipecmd is not None: # restore output
-                                self.myout.delegate = delegate
-
-                                pipe = popen2.Popen4( pipecmd )
-                                pipe.tochild.write( "\n".join( self.myout.lastBuffer() ) )
-                                pipe.tochild.close()
-                                sys.stdout.write( pipe.fromchild.read() )
-                        #
-            except EOFError:
-                print()
-                return
-            except KeyboardInterrupt:
-                print()
-
-##########################################################################
-# Start function - called from the BitBake command line utility
-##########################################################################
-
-def start( aCooker ):
-    global cooker
-    cooker = aCooker
-    bbshell = BitBakeShell()
-    bbshell.processStartupFile()
-    bbshell.main()
-    bbshell.cleanup()
-
-if __name__ == "__main__":
-    print("SHELL: Sorry, this program should only be called by BitBake.")
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/tests/event.py b/import-layers/yocto-poky/bitbake/lib/bb/tests/event.py
index c7eb1fe..d3a5f62 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/tests/event.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/tests/event.py
@@ -30,28 +30,45 @@
 import pickle
 from unittest.mock import Mock
 from unittest.mock import call
+from bb.msg import BBLogFormatter
 
 
-class EventQueueStub():
+class EventQueueStubBase(object):
+    """ Base class for EventQueueStub classes """
+    def __init__(self):
+        self.event_calls = []
+        return
+
+    def _store_event_data_string(self, event):
+        if isinstance(event, logging.LogRecord):
+            formatter = BBLogFormatter("%(levelname)s: %(message)s")
+            self.event_calls.append(formatter.format(event))
+        else:
+            self.event_calls.append(bb.event.getName(event))
+        return
+
+
+class EventQueueStub(EventQueueStubBase):
     """ Class used as specification for UI event handler queue stub objects """
     def __init__(self):
-        return
+        super(EventQueueStub, self).__init__()
 
     def send(self, event):
-        return
+        super(EventQueueStub, self)._store_event_data_string(event)
 
 
-class PickleEventQueueStub():
+class PickleEventQueueStub(EventQueueStubBase):
     """ Class used as specification for UI event handler queue stub objects
         with sendpickle method """
     def __init__(self):
-        return
+        super(PickleEventQueueStub, self).__init__()
 
     def sendpickle(self, pickled_event):
-        return
+        event = pickle.loads(pickled_event)
+        super(PickleEventQueueStub, self)._store_event_data_string(event)
 
 
-class UIClientStub():
+class UIClientStub(object):
     """ Class used as specification for UI event handler stub objects """
     def __init__(self):
         self.event = None
@@ -59,7 +76,7 @@
 
 class EventHandlingTest(unittest.TestCase):
     """ Event handling test class """
-    _threadlock_test_calls = []
+
 
     def setUp(self):
         self._test_process = Mock()
@@ -179,6 +196,33 @@
         self.assertEqual(self._test_process.event_handler2.call_args_list,
                          expected_event_handler2)
 
+    def test_class_handler_filters(self):
+        """ Test filters for class handlers """
+        mask = ["bb.event.OperationStarted"]
+        result = bb.event.register("event_handler1",
+                                   self._test_process.event_handler1,
+                                   mask)
+        self.assertEqual(result, bb.event.Registered)
+        result = bb.event.register("event_handler2",
+                                   self._test_process.event_handler2,
+                                   "*")
+        self.assertEqual(result, bb.event.Registered)
+        bb.event.set_eventfilter(
+            lambda name, handler, event, d :
+            name == 'event_handler2' and
+            bb.event.getName(event) == "OperationStarted")
+        event1 = bb.event.OperationStarted()
+        event2 = bb.event.OperationCompleted(total=123)
+        bb.event.fire_class_handlers(event1, None)
+        bb.event.fire_class_handlers(event2, None)
+        bb.event.fire_class_handlers(event2, None)
+        expected_event_handler1 = []
+        expected_event_handler2 = [call(event1)]
+        self.assertEqual(self._test_process.event_handler1.call_args_list,
+                         expected_event_handler1)
+        self.assertEqual(self._test_process.event_handler2.call_args_list,
+                         expected_event_handler2)
+
     def test_change_handler_event_mapping(self):
         """ Test changing the event mapping for class handlers """
         event1 = bb.event.OperationStarted()
@@ -196,8 +240,8 @@
                          expected)
 
         # unregister handler and register it only for OperationStarted
-        result = bb.event.remove("event_handler1",
-                                 self._test_process.event_handler1)
+        bb.event.remove("event_handler1",
+                        self._test_process.event_handler1)
         mask = ["bb.event.OperationStarted"]
         result = bb.event.register("event_handler1",
                                    self._test_process.event_handler1,
@@ -210,8 +254,8 @@
                          expected)
 
         # unregister handler and register it only for OperationCompleted
-        result = bb.event.remove("event_handler1",
-                                 self._test_process.event_handler1)
+        bb.event.remove("event_handler1",
+                        self._test_process.event_handler1)
         mask = ["bb.event.OperationCompleted"]
         result = bb.event.register("event_handler1",
                                    self._test_process.event_handler1,
@@ -259,6 +303,61 @@
         self.assertEqual(self._test_ui2.event.sendpickle.call_args_list,
                          expected)
 
+    def test_ui_handler_mask_filter(self):
+        """ Test filters for UI handlers """
+        mask = ["bb.event.OperationStarted"]
+        debug_domains = {}
+        self._test_ui1.event = Mock(spec_set=EventQueueStub)
+        result = bb.event.register_UIHhandler(self._test_ui1, mainui=True)
+        bb.event.set_UIHmask(result, logging.INFO, debug_domains, mask)
+        self._test_ui2.event = Mock(spec_set=PickleEventQueueStub)
+        result = bb.event.register_UIHhandler(self._test_ui2, mainui=True)
+        bb.event.set_UIHmask(result, logging.INFO, debug_domains, mask)
+
+        event1 = bb.event.OperationStarted()
+        event2 = bb.event.OperationCompleted(total=1)
+
+        bb.event.fire_ui_handlers(event1, None)
+        bb.event.fire_ui_handlers(event2, None)
+        expected = [call(event1)]
+        self.assertEqual(self._test_ui1.event.send.call_args_list,
+                         expected)
+        expected = [call(pickle.dumps(event1))]
+        self.assertEqual(self._test_ui2.event.sendpickle.call_args_list,
+                         expected)
+
+    def test_ui_handler_log_filter(self):
+        """ Test log filters for UI handlers """
+        mask = ["*"]
+        debug_domains = {'BitBake.Foo': logging.WARNING}
+
+        self._test_ui1.event = EventQueueStub()
+        result = bb.event.register_UIHhandler(self._test_ui1, mainui=True)
+        bb.event.set_UIHmask(result, logging.ERROR, debug_domains, mask)
+        self._test_ui2.event = PickleEventQueueStub()
+        result = bb.event.register_UIHhandler(self._test_ui2, mainui=True)
+        bb.event.set_UIHmask(result, logging.ERROR, debug_domains, mask)
+
+        event1 = bb.event.OperationStarted()
+        bb.event.fire_ui_handlers(event1, None)   # All events match
+
+        event_log_handler = bb.event.LogHandler()
+        logger = logging.getLogger("BitBake")
+        logger.addHandler(event_log_handler)
+        logger1 = logging.getLogger("BitBake.Foo")
+        logger1.warning("Test warning LogRecord1") # Matches debug_domains level
+        logger1.info("Test info LogRecord")        # Filtered out
+        logger2 = logging.getLogger("BitBake.Bar")
+        logger2.error("Test error LogRecord")      # Matches filter base level
+        logger2.warning("Test warning LogRecord2") # Filtered out
+        logger.removeHandler(event_log_handler)
+
+        expected = ['OperationStarted',
+                    'WARNING: Test warning LogRecord1',
+                    'ERROR: Test error LogRecord']
+        self.assertEqual(self._test_ui1.event.event_calls, expected)
+        self.assertEqual(self._test_ui2.event.event_calls, expected)
+
     def test_fire(self):
         """ Test fire method used to trigger class and ui event handlers """
         mask = ["bb.event.ConfigParsed"]
@@ -289,18 +388,28 @@
         self.assertEqual(self._test_ui1.event.send.call_args_list,
                          expected)
 
+    def test_worker_fire(self):
+        """ Test the triggering of bb.event.worker_fire callback """
+        bb.event.worker_fire = Mock()
+        event = bb.event.Event()
+        bb.event.fire(event, None)
+        expected = [call(event, None)]
+        self.assertEqual(bb.event.worker_fire.call_args_list, expected)
+
     def test_print_ui_queue(self):
         """ Test print_ui_queue method """
         event1 = bb.event.OperationStarted()
         event2 = bb.event.OperationCompleted(total=123)
         bb.event.fire(event1, None)
         bb.event.fire(event2, None)
+        event_log_handler = bb.event.LogHandler()
         logger = logging.getLogger("BitBake")
-        logger.addHandler(bb.event.LogHandler())
+        logger.addHandler(event_log_handler)
         logger.info("Test info LogRecord")
         logger.warning("Test warning LogRecord")
         with self.assertLogs("BitBake", level="INFO") as cm:
             bb.event.print_ui_queue()
+        logger.removeHandler(event_log_handler)
         self.assertEqual(cm.output,
                          ["INFO:BitBake:Test info LogRecord",
                           "WARNING:BitBake:Test warning LogRecord"])
@@ -364,6 +473,7 @@
         self.assertEqual(self._threadlock_test_calls,
                          ["w1_ui1", "w1_ui2", "w2_ui1", "w2_ui2"])
 
+
     def test_disable_threadlock(self):
         """ Test disable_threadlock method """
         self._set_threadlock_test_mockups()
@@ -375,3 +485,502 @@
         # processed before finishing handling the first worker event.
         self.assertEqual(self._threadlock_test_calls,
                          ["w1_ui1", "w2_ui1", "w1_ui2", "w2_ui2"])
+
+
+class EventClassesTest(unittest.TestCase):
+    """ Event classes test class """
+
+    _worker_pid = 54321
+
+    def setUp(self):
+        bb.event.worker_pid = EventClassesTest._worker_pid
+
+    def test_Event(self):
+        """ Test the Event base class """
+        event = bb.event.Event()
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_HeartbeatEvent(self):
+        """ Test the HeartbeatEvent class """
+        time = 10
+        event = bb.event.HeartbeatEvent(time)
+        self.assertEqual(event.time, time)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_OperationStarted(self):
+        """ Test OperationStarted event class """
+        msg = "Foo Bar"
+        event = bb.event.OperationStarted(msg)
+        self.assertEqual(event.msg, msg)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_OperationCompleted(self):
+        """ Test OperationCompleted event class """
+        msg = "Foo Bar"
+        total = 123
+        event = bb.event.OperationCompleted(total, msg)
+        self.assertEqual(event.msg, msg)
+        self.assertEqual(event.total, total)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_OperationProgress(self):
+        """ Test OperationProgress event class """
+        msg = "Foo Bar"
+        total = 123
+        current = 111
+        event = bb.event.OperationProgress(current, total, msg)
+        self.assertEqual(event.msg, msg + ": %s/%s" % (current, total))
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_ConfigParsed(self):
+        """ Test the ConfigParsed class """
+        event = bb.event.ConfigParsed()
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_MultiConfigParsed(self):
+        """ Test MultiConfigParsed event class """
+        mcdata = {"foobar": "Foo Bar"}
+        event = bb.event.MultiConfigParsed(mcdata)
+        self.assertEqual(event.mcdata, mcdata)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_RecipeEvent(self):
+        """ Test RecipeEvent event base class """
+        callback = lambda a: 2 * a
+        event = bb.event.RecipeEvent(callback)
+        self.assertEqual(event.fn(1), callback(1))
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_RecipePreFinalise(self):
+        """ Test RecipePreFinalise event class """
+        callback = lambda a: 2 * a
+        event = bb.event.RecipePreFinalise(callback)
+        self.assertEqual(event.fn(1), callback(1))
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_RecipeTaskPreProcess(self):
+        """ Test RecipeTaskPreProcess event class """
+        callback = lambda a: 2 * a
+        tasklist = [("foobar", callback)]
+        event = bb.event.RecipeTaskPreProcess(callback, tasklist)
+        self.assertEqual(event.fn(1), callback(1))
+        self.assertEqual(event.tasklist, tasklist)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_RecipeParsed(self):
+        """ Test RecipeParsed event base class """
+        callback = lambda a: 2 * a
+        event = bb.event.RecipeParsed(callback)
+        self.assertEqual(event.fn(1), callback(1))
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_StampUpdate(self):
+        targets = ["foo", "bar"]
+        stampfns = [lambda:"foobar"]
+        event = bb.event.StampUpdate(targets, stampfns)
+        self.assertEqual(event.targets, targets)
+        self.assertEqual(event.stampPrefix, stampfns)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_BuildBase(self):
+        """ Test base class for bitbake build events """
+        name = "foo"
+        pkgs = ["bar"]
+        failures = 123
+        event = bb.event.BuildBase(name, pkgs, failures)
+        self.assertEqual(event.name, name)
+        self.assertEqual(event.pkgs, pkgs)
+        self.assertEqual(event.getFailures(), failures)
+        name = event.name = "bar"
+        pkgs = event.pkgs = ["foo"]
+        self.assertEqual(event.name, name)
+        self.assertEqual(event.pkgs, pkgs)
+        self.assertEqual(event.getFailures(), failures)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_BuildInit(self):
+        """ Test class for bitbake build invocation events """
+        event = bb.event.BuildInit()
+        self.assertEqual(event.name, None)
+        self.assertEqual(event.pkgs, [])
+        self.assertEqual(event.getFailures(), 0)
+        name = event.name = "bar"
+        pkgs = event.pkgs = ["foo"]
+        self.assertEqual(event.name, name)
+        self.assertEqual(event.pkgs, pkgs)
+        self.assertEqual(event.getFailures(), 0)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_BuildStarted(self):
+        """ Test class for build started events """
+        name = "foo"
+        pkgs = ["bar"]
+        failures = 123
+        event = bb.event.BuildStarted(name, pkgs, failures)
+        self.assertEqual(event.name, name)
+        self.assertEqual(event.pkgs, pkgs)
+        self.assertEqual(event.getFailures(), failures)
+        self.assertEqual(event.msg, "Building Started")
+        name = event.name = "bar"
+        pkgs = event.pkgs = ["foo"]
+        msg = event.msg = "foobar"
+        self.assertEqual(event.name, name)
+        self.assertEqual(event.pkgs, pkgs)
+        self.assertEqual(event.getFailures(), failures)
+        self.assertEqual(event.msg, msg)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_BuildCompleted(self):
+        """ Test class for build completed events """
+        total = 1000
+        name = "foo"
+        pkgs = ["bar"]
+        failures = 123
+        interrupted = 1
+        event = bb.event.BuildCompleted(total, name, pkgs, failures,
+                                        interrupted)
+        self.assertEqual(event.name, name)
+        self.assertEqual(event.pkgs, pkgs)
+        self.assertEqual(event.getFailures(), failures)
+        self.assertEqual(event.msg, "Building Failed")
+        event2 = bb.event.BuildCompleted(total, name, pkgs)
+        self.assertEqual(event2.name, name)
+        self.assertEqual(event2.pkgs, pkgs)
+        self.assertEqual(event2.getFailures(), 0)
+        self.assertEqual(event2.msg, "Building Succeeded")
+        self.assertEqual(event2.pid, EventClassesTest._worker_pid)
+
+    def test_DiskFull(self):
+        """ Test DiskFull event class """
+        dev = "/dev/foo"
+        type = "ext4"
+        freespace = "104M"
+        mountpoint = "/"
+        event = bb.event.DiskFull(dev, type, freespace, mountpoint)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_MonitorDiskEvent(self):
+        """ Test MonitorDiskEvent class """
+        available_bytes = 10000000
+        free_bytes = 90000000
+        total_bytes = 1000000000
+        du = bb.event.DiskUsageSample(available_bytes, free_bytes,
+                                      total_bytes)
+        event = bb.event.MonitorDiskEvent(du)
+        self.assertEqual(event.disk_usage.available_bytes, available_bytes)
+        self.assertEqual(event.disk_usage.free_bytes, free_bytes)
+        self.assertEqual(event.disk_usage.total_bytes, total_bytes)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_NoProvider(self):
+        """ Test NoProvider event class """
+        item = "foobar"
+        event1 = bb.event.NoProvider(item)
+        self.assertEqual(event1.getItem(), item)
+        self.assertEqual(event1.isRuntime(), False)
+        self.assertEqual(str(event1), "Nothing PROVIDES 'foobar'")
+        runtime = True
+        dependees = ["foo", "bar"]
+        reasons = None
+        close_matches = ["foibar", "footbar"]
+        event2 = bb.event.NoProvider(item, runtime, dependees, reasons,
+                                     close_matches)
+        self.assertEqual(event2.isRuntime(), True)
+        expected = ("Nothing RPROVIDES 'foobar' (but foo, bar RDEPENDS"
+                    " on or otherwise requires it). Close matches:\n"
+                    "  foibar\n"
+                    "  footbar")
+        self.assertEqual(str(event2), expected)
+        reasons = ["Item does not exist on database"]
+        close_matches = ["foibar", "footbar"]
+        event3 = bb.event.NoProvider(item, runtime, dependees, reasons,
+                                     close_matches)
+        expected = ("Nothing RPROVIDES 'foobar' (but foo, bar RDEPENDS"
+                    " on or otherwise requires it)\n"
+                    "Item does not exist on database")
+        self.assertEqual(str(event3), expected)
+        self.assertEqual(event3.pid, EventClassesTest._worker_pid)
+
+    def test_MultipleProviders(self):
+        """ Test MultipleProviders event class """
+        item = "foobar"
+        candidates = ["foobarv1", "foobars"]
+        event1 = bb.event.MultipleProviders(item, candidates)
+        self.assertEqual(event1.isRuntime(), False)
+        self.assertEqual(event1.getItem(), item)
+        self.assertEqual(event1.getCandidates(), candidates)
+        expected = ("Multiple providers are available for foobar (foobarv1,"
+                    " foobars)\n"
+                    "Consider defining a PREFERRED_PROVIDER entry to match "
+                    "foobar")
+        self.assertEqual(str(event1), expected)
+        runtime = True
+        event2 = bb.event.MultipleProviders(item, candidates, runtime)
+        self.assertEqual(event2.isRuntime(), runtime)
+        expected = ("Multiple providers are available for runtime foobar "
+                    "(foobarv1, foobars)\n"
+                    "Consider defining a PREFERRED_RPROVIDER entry to match "
+                    "foobar")
+        self.assertEqual(str(event2), expected)
+        self.assertEqual(event2.pid, EventClassesTest._worker_pid)
+
+    def test_ParseStarted(self):
+        """ Test ParseStarted event class """
+        total = 123
+        event = bb.event.ParseStarted(total)
+        self.assertEqual(event.msg, "Recipe parsing Started")
+        self.assertEqual(event.total, total)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_ParseCompleted(self):
+        """ Test ParseCompleted event class """
+        cached = 10
+        parsed = 13
+        skipped = 7
+        virtuals = 2
+        masked = 1
+        errors = 0
+        total = 23
+        event = bb.event.ParseCompleted(cached, parsed, skipped, masked,
+                                        virtuals, errors, total)
+        self.assertEqual(event.msg, "Recipe parsing Completed")
+        expected = [cached, parsed, skipped, virtuals, masked, errors,
+                    cached + parsed, total]
+        actual = [event.cached, event.parsed, event.skipped, event.virtuals,
+                  event.masked, event.errors, event.sofar, event.total]
+        self.assertEqual(str(actual), str(expected))
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_ParseProgress(self):
+        """ Test ParseProgress event class """
+        current = 10
+        total = 100
+        event = bb.event.ParseProgress(current, total)
+        self.assertEqual(event.msg,
+                         "Recipe parsing" + ": %s/%s" % (current, total))
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_CacheLoadStarted(self):
+        """ Test CacheLoadStarted event class """
+        total = 123
+        event = bb.event.CacheLoadStarted(total)
+        self.assertEqual(event.msg, "Loading cache Started")
+        self.assertEqual(event.total, total)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_CacheLoadProgress(self):
+        """ Test CacheLoadProgress event class """
+        current = 10
+        total = 100
+        event = bb.event.CacheLoadProgress(current, total)
+        self.assertEqual(event.msg,
+                         "Loading cache" + ": %s/%s" % (current, total))
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_CacheLoadCompleted(self):
+        """ Test CacheLoadCompleted event class """
+        total = 23
+        num_entries = 12
+        event = bb.event.CacheLoadCompleted(total, num_entries)
+        self.assertEqual(event.msg, "Loading cache Completed")
+        expected = [total, num_entries]
+        actual = [event.total, event.num_entries]
+        self.assertEqual(str(actual), str(expected))
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_TreeDataPreparationStarted(self):
+        """ Test TreeDataPreparationStarted event class """
+        event = bb.event.TreeDataPreparationStarted()
+        self.assertEqual(event.msg, "Preparing tree data Started")
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_TreeDataPreparationProgress(self):
+        """ Test TreeDataPreparationProgress event class """
+        current = 10
+        total = 100
+        event = bb.event.TreeDataPreparationProgress(current, total)
+        self.assertEqual(event.msg,
+                         "Preparing tree data" + ": %s/%s" % (current, total))
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_TreeDataPreparationCompleted(self):
+        """ Test TreeDataPreparationCompleted event class """
+        total = 23
+        event = bb.event.TreeDataPreparationCompleted(total)
+        self.assertEqual(event.msg, "Preparing tree data Completed")
+        self.assertEqual(event.total, total)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_DepTreeGenerated(self):
+        """ Test DepTreeGenerated event class """
+        depgraph = Mock()
+        event = bb.event.DepTreeGenerated(depgraph)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_TargetsTreeGenerated(self):
+        """ Test TargetsTreeGenerated event class """
+        model = Mock()
+        event = bb.event.TargetsTreeGenerated(model)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_ReachableStamps(self):
+        """ Test ReachableStamps event class """
+        stamps = [Mock(), Mock()]
+        event = bb.event.ReachableStamps(stamps)
+        self.assertEqual(event.stamps, stamps)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_FilesMatchingFound(self):
+        """ Test FilesMatchingFound event class """
+        pattern = "foo.*bar"
+        matches = ["foobar"]
+        event = bb.event.FilesMatchingFound(pattern, matches)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_ConfigFilesFound(self):
+        """ Test ConfigFilesFound event class """
+        variable = "FOO_BAR"
+        values = ["foo", "bar"]
+        event = bb.event.ConfigFilesFound(variable, values)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_ConfigFilePathFound(self):
+        """ Test ConfigFilePathFound event class """
+        path = "/foo/bar"
+        event = bb.event.ConfigFilePathFound(path)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_message_classes(self):
+        """ Test message event classes """
+        msg = "foobar foo bar"
+        event = bb.event.MsgBase(msg)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+        event = bb.event.MsgDebug(msg)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+        event = bb.event.MsgNote(msg)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+        event = bb.event.MsgWarn(msg)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+        event = bb.event.MsgError(msg)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+        event = bb.event.MsgFatal(msg)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+        event = bb.event.MsgPlain(msg)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_LogExecTTY(self):
+        """ Test LogExecTTY event class """
+        msg = "foo bar"
+        prog = "foo.sh"
+        sleep_delay = 10
+        retries = 3
+        event = bb.event.LogExecTTY(msg, prog, sleep_delay, retries)
+        self.assertEqual(event.msg, msg)
+        self.assertEqual(event.prog, prog)
+        self.assertEqual(event.sleep_delay, sleep_delay)
+        self.assertEqual(event.retries, retries)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def _throw_zero_division_exception(self):
+        a = 1 / 0
+        return
+
+    def _worker_handler(self, event, d):
+        self._returned_event = event
+        return
+
+    def test_LogHandler(self):
+        """ Test LogHandler class """
+        logger = logging.getLogger("TestEventClasses")
+        logger.propagate = False
+        handler = bb.event.LogHandler(logging.INFO)
+        logger.addHandler(handler)
+        bb.event.worker_fire = self._worker_handler
+        try:
+            self._throw_zero_division_exception()
+        except ZeroDivisionError as ex:
+            logger.exception(ex)
+        event = self._returned_event
+        try:
+            pe = pickle.dumps(event)
+            newevent = pickle.loads(pe)
+        except:
+            self.fail('Logged event is not serializable')
+        self.assertEqual(event.taskpid, EventClassesTest._worker_pid)
+
+    def test_MetadataEvent(self):
+        """ Test MetadataEvent class """
+        eventtype = "footype"
+        eventdata = {"foo": "bar"}
+        event = bb.event.MetadataEvent(eventtype, eventdata)
+        self.assertEqual(event.type, eventtype)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_ProcessStarted(self):
+        """ Test ProcessStarted class """
+        processname = "foo"
+        total = 9783128974
+        event = bb.event.ProcessStarted(processname, total)
+        self.assertEqual(event.processname, processname)
+        self.assertEqual(event.total, total)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_ProcessProgress(self):
+        """ Test ProcessProgress class """
+        processname = "foo"
+        progress = 243224
+        event = bb.event.ProcessProgress(processname, progress)
+        self.assertEqual(event.processname, processname)
+        self.assertEqual(event.progress, progress)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_ProcessFinished(self):
+        """ Test ProcessFinished class """
+        processname = "foo"
+        total = 1242342344
+        event = bb.event.ProcessFinished(processname)
+        self.assertEqual(event.processname, processname)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_SanityCheck(self):
+        """ Test SanityCheck class """
+        event1 = bb.event.SanityCheck()
+        self.assertEqual(event1.generateevents, True)
+        self.assertEqual(event1.pid, EventClassesTest._worker_pid)
+        generateevents = False
+        event2 = bb.event.SanityCheck(generateevents)
+        self.assertEqual(event2.generateevents, generateevents)
+        self.assertEqual(event2.pid, EventClassesTest._worker_pid)
+
+    def test_SanityCheckPassed(self):
+        """ Test SanityCheckPassed class """
+        event = bb.event.SanityCheckPassed()
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+    def test_SanityCheckFailed(self):
+        """ Test SanityCheckFailed class """
+        msg = "The sanity test failed."
+        event1 = bb.event.SanityCheckFailed(msg)
+        self.assertEqual(event1.pid, EventClassesTest._worker_pid)
+        network_error = True
+        event2 = bb.event.SanityCheckFailed(msg, network_error)
+        self.assertEqual(event2.pid, EventClassesTest._worker_pid)
+
+    def test_network_event_classes(self):
+        """ Test network event classes """
+        event1 = bb.event.NetworkTest()
+        generateevents = False
+        self.assertEqual(event1.pid, EventClassesTest._worker_pid)
+        event2 = bb.event.NetworkTest(generateevents)
+        self.assertEqual(event2.pid, EventClassesTest._worker_pid)
+        event3 = bb.event.NetworkTestPassed()
+        self.assertEqual(event3.pid, EventClassesTest._worker_pid)
+        event4 = bb.event.NetworkTestFailed()
+        self.assertEqual(event4.pid, EventClassesTest._worker_pid)
+
+    def test_FindSigInfoResult(self):
+        """ Test FindSigInfoResult event class """
+        result = [Mock()]
+        event = bb.event.FindSigInfoResult(result)
+        self.assertEqual(event.result, result)
+        self.assertEqual(event.pid, EventClassesTest._worker_pid)
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/tests/fetch.py b/import-layers/yocto-poky/bitbake/lib/bb/tests/fetch.py
index 7d7c5d7..74859f9 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/tests/fetch.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/tests/fetch.py
@@ -20,6 +20,7 @@
 #
 
 import unittest
+import hashlib
 import tempfile
 import subprocess
 import collections
@@ -522,6 +523,109 @@
         with self.assertRaises(bb.fetch2.UnpackError):
             self.fetchUnpack(['file://a;subdir=/bin/sh'])
 
+class FetcherNoNetworkTest(FetcherTest):
+    def setUp(self):
+        super().setUp()
+        # all test cases are based on not having network
+        self.d.setVar("BB_NO_NETWORK", "1")
+
+    def test_missing(self):
+        string = "this is a test file\n".encode("utf-8")
+        self.d.setVarFlag("SRC_URI", "md5sum", hashlib.md5(string).hexdigest())
+        self.d.setVarFlag("SRC_URI", "sha256sum", hashlib.sha256(string).hexdigest())
+
+        self.assertFalse(os.path.exists(os.path.join(self.dldir, "test-file.tar.gz")))
+        self.assertFalse(os.path.exists(os.path.join(self.dldir, "test-file.tar.gz.done")))
+        fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/test-file.tar.gz"], self.d)
+        with self.assertRaises(bb.fetch2.NetworkAccess):
+            fetcher.download()
+
+    def test_valid_missing_donestamp(self):
+        # create the file in the download directory with correct hash
+        string = "this is a test file\n".encode("utf-8")
+        with open(os.path.join(self.dldir, "test-file.tar.gz"), "wb") as f:
+            f.write(string)
+
+        self.d.setVarFlag("SRC_URI", "md5sum", hashlib.md5(string).hexdigest())
+        self.d.setVarFlag("SRC_URI", "sha256sum", hashlib.sha256(string).hexdigest())
+
+        self.assertTrue(os.path.exists(os.path.join(self.dldir, "test-file.tar.gz")))
+        self.assertFalse(os.path.exists(os.path.join(self.dldir, "test-file.tar.gz.done")))
+        fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/test-file.tar.gz"], self.d)
+        fetcher.download()
+        self.assertTrue(os.path.exists(os.path.join(self.dldir, "test-file.tar.gz.done")))
+
+    def test_invalid_missing_donestamp(self):
+        # create an invalid file in the download directory with incorrect hash
+        string = "this is a test file\n".encode("utf-8")
+        with open(os.path.join(self.dldir, "test-file.tar.gz"), "wb"):
+            pass
+
+        self.d.setVarFlag("SRC_URI", "md5sum", hashlib.md5(string).hexdigest())
+        self.d.setVarFlag("SRC_URI", "sha256sum", hashlib.sha256(string).hexdigest())
+
+        self.assertTrue(os.path.exists(os.path.join(self.dldir, "test-file.tar.gz")))
+        self.assertFalse(os.path.exists(os.path.join(self.dldir, "test-file.tar.gz.done")))
+        fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/test-file.tar.gz"], self.d)
+        with self.assertRaises(bb.fetch2.NetworkAccess):
+            fetcher.download()
+        # the existing file should not exist or should have be moved to "bad-checksum"
+        self.assertFalse(os.path.exists(os.path.join(self.dldir, "test-file.tar.gz")))
+
+    def test_nochecksums_missing(self):
+        self.assertFalse(os.path.exists(os.path.join(self.dldir, "test-file.tar.gz")))
+        self.assertFalse(os.path.exists(os.path.join(self.dldir, "test-file.tar.gz.done")))
+        # ssh fetch does not support checksums
+        fetcher = bb.fetch.Fetch(["ssh://invalid@invalid.yoctoproject.org/test-file.tar.gz"], self.d)
+        # attempts to download with missing donestamp
+        with self.assertRaises(bb.fetch2.NetworkAccess):
+            fetcher.download()
+
+    def test_nochecksums_missing_donestamp(self):
+        # create a file in the download directory
+        with open(os.path.join(self.dldir, "test-file.tar.gz"), "wb"):
+            pass
+
+        self.assertTrue(os.path.exists(os.path.join(self.dldir, "test-file.tar.gz")))
+        self.assertFalse(os.path.exists(os.path.join(self.dldir, "test-file.tar.gz.done")))
+        # ssh fetch does not support checksums
+        fetcher = bb.fetch.Fetch(["ssh://invalid@invalid.yoctoproject.org/test-file.tar.gz"], self.d)
+        # attempts to download with missing donestamp
+        with self.assertRaises(bb.fetch2.NetworkAccess):
+            fetcher.download()
+
+    def test_nochecksums_has_donestamp(self):
+        # create a file in the download directory with the donestamp
+        with open(os.path.join(self.dldir, "test-file.tar.gz"), "wb"):
+            pass
+        with open(os.path.join(self.dldir, "test-file.tar.gz.done"), "wb"):
+            pass
+
+        self.assertTrue(os.path.exists(os.path.join(self.dldir, "test-file.tar.gz")))
+        self.assertTrue(os.path.exists(os.path.join(self.dldir, "test-file.tar.gz.done")))
+        # ssh fetch does not support checksums
+        fetcher = bb.fetch.Fetch(["ssh://invalid@invalid.yoctoproject.org/test-file.tar.gz"], self.d)
+        # should not fetch
+        fetcher.download()
+        # both files should still exist
+        self.assertTrue(os.path.exists(os.path.join(self.dldir, "test-file.tar.gz")))
+        self.assertTrue(os.path.exists(os.path.join(self.dldir, "test-file.tar.gz.done")))
+
+    def test_nochecksums_missing_has_donestamp(self):
+        # create a file in the download directory with the donestamp
+        with open(os.path.join(self.dldir, "test-file.tar.gz.done"), "wb"):
+            pass
+
+        self.assertFalse(os.path.exists(os.path.join(self.dldir, "test-file.tar.gz")))
+        self.assertTrue(os.path.exists(os.path.join(self.dldir, "test-file.tar.gz.done")))
+        # ssh fetch does not support checksums
+        fetcher = bb.fetch.Fetch(["ssh://invalid@invalid.yoctoproject.org/test-file.tar.gz"], self.d)
+        with self.assertRaises(bb.fetch2.NetworkAccess):
+            fetcher.download()
+        # both files should still exist
+        self.assertFalse(os.path.exists(os.path.join(self.dldir, "test-file.tar.gz")))
+        self.assertFalse(os.path.exists(os.path.join(self.dldir, "test-file.tar.gz.done")))
+
 class FetcherNetworkTest(FetcherTest):
     @skipIfNoNetwork()
     def test_fetch(self):
@@ -809,7 +913,7 @@
             ud = bb.fetch2.FetchData(k[1], self.d)
             pupver= ud.method.latest_versionstring(ud, self.d)
             verstring = pupver[0]
-            self.assertTrue(verstring, msg="Could not find upstream version")
+            self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0])
             r = bb.utils.vercmp_string(v, verstring)
             self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
 
@@ -822,7 +926,7 @@
             ud = bb.fetch2.FetchData(k[1], self.d)
             pupver = ud.method.latest_versionstring(ud, self.d)
             verstring = pupver[0]
-            self.assertTrue(verstring, msg="Could not find upstream version")
+            self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0])
             r = bb.utils.vercmp_string(v, verstring)
             self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
 
@@ -874,9 +978,6 @@
 
 
 class GitMakeShallowTest(FetcherTest):
-    bitbake_dir = os.path.join(os.path.dirname(os.path.join(__file__)), '..', '..', '..')
-    make_shallow_path = os.path.join(bitbake_dir, 'bin', 'git-make-shallow')
-
     def setUp(self):
         FetcherTest.setUp(self)
         self.gitdir = os.path.join(self.tempdir, 'gitshallow')
@@ -905,7 +1006,7 @@
     def make_shallow(self, args=None):
         if args is None:
             args = ['HEAD']
-        return bb.process.run([self.make_shallow_path] + args, cwd=self.gitdir)
+        return bb.process.run([bb.fetch2.git.Git.make_shallow_path] + args, cwd=self.gitdir)
 
     def add_empty_file(self, path, msg=None):
         if msg is None:
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/tinfoil.py b/import-layers/yocto-poky/bitbake/lib/bb/tinfoil.py
index fa95f63..368264f 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/tinfoil.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/tinfoil.py
@@ -604,13 +604,16 @@
         recipecache = self.cooker.recipecaches[mc]
         prov = self.find_best_provider(pn)
         fn = prov[3]
-        actual_pn = recipecache.pkg_fn[fn]
-        recipe = TinfoilRecipeInfo(recipecache,
-                                    self.config_data,
-                                    pn=actual_pn,
-                                    fn=fn,
-                                    fns=recipecache.pkg_pn[actual_pn])
-        return recipe
+        if fn:
+            actual_pn = recipecache.pkg_fn[fn]
+            recipe = TinfoilRecipeInfo(recipecache,
+                                        self.config_data,
+                                        pn=actual_pn,
+                                        fn=fn,
+                                        fns=recipecache.pkg_pn[actual_pn])
+            return recipe
+        else:
+            return None
 
     def parse_recipe(self, pn):
         """
diff --git a/import-layers/yocto-poky/bitbake/lib/bb/utils.py b/import-layers/yocto-poky/bitbake/lib/bb/utils.py
index c540b49..378e699 100644
--- a/import-layers/yocto-poky/bitbake/lib/bb/utils.py
+++ b/import-layers/yocto-poky/bitbake/lib/bb/utils.py
@@ -187,7 +187,7 @@
             #r[-1] += ' ' + ' '.join(j)
     return r
 
-def explode_dep_versions2(s):
+def explode_dep_versions2(s, *, sort=True):
     """
     Take an RDEPENDS style string of format:
     "DEPEND1 (optional version) DEPEND2 (optional version) ..."
@@ -250,7 +250,8 @@
         if not (i in r and r[i]):
             r[lastdep] = []
 
-    r = collections.OrderedDict(sorted(r.items(), key=lambda x: x[0]))
+    if sort:
+        r = collections.OrderedDict(sorted(r.items(), key=lambda x: x[0]))
     return r
 
 def explode_dep_versions(s):
@@ -806,8 +807,8 @@
                 return None # failure
         try:
             if didcopy:
-                os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
-                os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
+                os.lchown(destpath, sstat[stat.ST_UID], sstat[stat.ST_GID])
+                os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
                 os.unlink(src)
         except Exception as e:
             print("movefile: Failed to chown/chmod/unlink", dest, e)