reset upstream subtrees to HEAD

Reset the following subtrees on HEAD:
  poky: 8217b477a1(master)
  meta-xilinx: 64aa3d35ae(master)
  meta-openembedded: 0435c9e193(master)
  meta-raspberrypi: 490a4441ac(master)
  meta-security: cb6d1c85ee(master)

Squashed patches:
  meta-phosphor: drop systemd 239 patches
  meta-phosphor: mrw-api: use correct install path

Change-Id: I268e2646d9174ad305630c6bbd3fbc1a6105f43d
Signed-off-by: Brad Bishop <bradleyb@fuzziesquirrel.com>
diff --git a/poky/meta/classes/archiver.bbclass b/poky/meta/classes/archiver.bbclass
index e321a0e..af9f010 100644
--- a/poky/meta/classes/archiver.bbclass
+++ b/poky/meta/classes/archiver.bbclass
@@ -37,8 +37,11 @@
 DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources"
 ARCHIVER_TOPDIR ?= "${WORKDIR}/deploy-sources"
 ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${TARGET_SYS}/${PF}/"
+ARCHIVER_RPMTOPDIR ?= "${WORKDIR}/deploy-sources-rpm"
+ARCHIVER_RPMOUTDIR = "${ARCHIVER_RPMTOPDIR}/${TARGET_SYS}/${PF}/"
 ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/"
 
+
 do_dumpdata[dirs] = "${ARCHIVER_OUTDIR}"
 do_ar_recipe[dirs] = "${ARCHIVER_OUTDIR}"
 do_ar_original[dirs] = "${ARCHIVER_OUTDIR} ${ARCHIVER_WORKDIR}"
@@ -117,6 +120,9 @@
     if d.getVarFlag('ARCHIVER_MODE', 'srpm') == "1" and d.getVar('PACKAGES'):
         if "package_rpm" in d.getVar('PACKAGE_CLASSES'):
             d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn)
+            d.appendVarFlag('do_package_write_rpm', 'dirs', ' ${ARCHIVER_RPMTOPDIR}')
+            d.appendVarFlag('do_package_write_rpm', 'sstate-inputdirs', ' ${ARCHIVER_RPMTOPDIR}')
+            d.appendVarFlag('do_package_write_rpm', 'sstate-outputdirs', ' ${DEPLOY_DIR_SRC}')
             if ar_dumpdata == "1":
                 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn)
             if ar_recipe == "1":
diff --git a/poky/meta/classes/base.bbclass b/poky/meta/classes/base.bbclass
index bc9b236..1636c6e 100644
--- a/poky/meta/classes/base.bbclass
+++ b/poky/meta/classes/base.bbclass
@@ -122,6 +122,10 @@
         desttool = os.path.join(dest, tool)
         if not os.path.exists(desttool):
             srctool = bb.utils.which(path, tool, executable=True)
+            # gcc/g++ may link to ccache on some hosts, e.g.,
+            # /usr/local/bin/ccache/gcc -> /usr/bin/ccache, then which(gcc)
+            # would return /usr/local/bin/ccache/gcc, but what we need is
+            # /usr/bin/gcc, this code can check and fix that.
             if "ccache" in srctool:
                 srctool = bb.utils.which(path, tool, executable=True, direction=1)
             if srctool:
@@ -216,7 +220,7 @@
         bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
 
 addhandler base_eventhandler
-base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.MultiConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.runqueue.sceneQueueComplete bb.event.RecipeParsed"
+base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.MultiConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.event.RecipeParsed"
 python base_eventhandler() {
     import bb.runqueue
 
@@ -224,6 +228,12 @@
         if not d.getVar("NATIVELSBSTRING", False):
             d.setVar("NATIVELSBSTRING", lsb_distro_identifier(d))
         d.setVar('BB_VERSION', bb.__version__)
+
+    # There might be no bb.event.ConfigParsed event if bitbake server is
+    # running, so check bb.event.BuildStarted too to make sure ${HOSTTOOLS_DIR}
+    # exists.
+    if isinstance(e, bb.event.ConfigParsed) or \
+            (isinstance(e, bb.event.BuildStarted) and not os.path.exists(d.getVar('HOSTTOOLS_DIR'))):
         # Works with the line in layer.conf which changes PATH to point here
         setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS', d)
         setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS_NONFATAL', d, fatal=False)
@@ -260,23 +270,10 @@
     if isinstance(e, bb.event.RecipePreFinalise):
         if d.getVar("TARGET_PREFIX") == d.getVar("SDK_PREFIX"):
             d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils")
-            d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc-initial")
             d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc")
             d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}g++")
             d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}compilerlibs")
 
-    if isinstance(e, bb.runqueue.sceneQueueComplete):
-        completions = d.expand("${STAGING_DIR}/sstatecompletions")
-        if os.path.exists(completions):
-            cmds = set()
-            with open(completions, "r") as f:
-                cmds = set(f)
-            d.setVar("completion_function", "\n".join(cmds))
-            d.setVarFlag("completion_function", "func", "1")
-            bb.debug(1, "Executing SceneQueue Completion commands: %s" % "\n".join(cmds))
-            bb.build.exec_func("completion_function", d)
-            os.remove(completions)
-
     if isinstance(e, bb.event.RecipeParsed):
         #
         # If we have multiple providers of virtual/X and a PREFERRED_PROVIDER_virtual/X is set
@@ -301,7 +298,6 @@
 
 addtask configure after do_patch
 do_configure[dirs] = "${B}"
-do_prepare_recipe_sysroot[deptask] = "do_populate_sysroot"
 base_do_configure() {
 	if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then
 		if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then
@@ -467,12 +463,15 @@
 
     if bb.data.inherits_class('license', d):
         check_license_format(d)
-        unmatched_license_flag = check_license_flags(d)
-        if unmatched_license_flag:
-            bb.debug(1, "Skipping %s because it has a restricted license not"
-                 " whitelisted in LICENSE_FLAGS_WHITELIST" % pn)
-            raise bb.parse.SkipRecipe("because it has a restricted license not"
-                 " whitelisted in LICENSE_FLAGS_WHITELIST")
+        unmatched_license_flags = check_license_flags(d)
+        if unmatched_license_flags:
+            if len(unmatched_license_flags) == 1:
+                message = "because it has a restricted license '{0}'. Which is not whitelisted in LICENSE_FLAGS_WHITELIST".format(unmatched_license_flags[0])
+            else:
+                message = "because it has restricted licenses {0}. Which are not whitelisted in LICENSE_FLAGS_WHITELIST".format(
+                    ", ".join("'{0}'".format(f) for f in unmatched_license_flags))
+            bb.debug(1, "Skipping %s %s" % (pn, message))
+            raise bb.parse.SkipRecipe(message)
 
     # If we're building a target package we need to use fakeroot (pseudo)
     # in order to capture permissions, owners, groups and special files
diff --git a/poky/meta/classes/buildhistory.bbclass b/poky/meta/classes/buildhistory.bbclass
index 40b292b..796f68c 100644
--- a/poky/meta/classes/buildhistory.bbclass
+++ b/poky/meta/classes/buildhistory.bbclass
@@ -519,12 +519,14 @@
 
 buildhistory_list_files() {
 	# List the files in the specified directory, but exclude date/time etc.
-	# This awk script is somewhat messy, but handles where the size is not printed for device files under pseudo
+	# This is somewhat messy, but handles where the size is not printed for device files under pseudo
+	( cd $1
+	find_cmd='find . ! -path . -printf "%M %-10u %-10g %10s %p -> %l\n"'
 	if [ "$3" = "fakeroot" ] ; then
-		( cd $1 && ${FAKEROOTENV} ${FAKEROOTCMD} find . ! -path . -printf "%M %-10u %-10g %10s %p -> %l\n" | sort -k5 | sed 's/ * -> $//' > $2 )
+		eval ${FAKEROOTENV} ${FAKEROOTCMD} $find_cmd
 	else
-		( cd $1 && find . ! -path . -printf "%M %-10u %-10g %10s %p -> %l\n" | sort -k5 | sed 's/ * -> $//' > $2 )
-	fi
+		eval $find_cmd
+	fi | sort -k5 | sed 's/ * -> $//' > $2 )
 }
 
 buildhistory_list_pkg_files() {
@@ -670,12 +672,29 @@
     statusheader = d.getVar('BUILDCFG_HEADER')
     return('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
 
+def buildhistory_get_modified(path):
+    # copied from get_layer_git_status() in image-buildinfo.bbclass
+    import subprocess
+    try:
+        subprocess.check_output("""cd %s; export PSEUDO_UNLOAD=1; set -e;
+                                git diff --quiet --no-ext-diff
+                                git diff --quiet --no-ext-diff --cached""" % path,
+                                shell=True,
+                                stderr=subprocess.STDOUT)
+        return ""
+    except subprocess.CalledProcessError as ex:
+        # Silently treat errors as "modified", without checking for the
+        # (expected) return code 1 in a modified git repo. For example, we get
+        # output and a 129 return code when a layer isn't a git repo at all.
+        return " -- modified"
+
 def buildhistory_get_metadata_revs(d):
     # We want an easily machine-readable format here, so get_layers_branch_rev isn't quite what we want
     layers = (d.getVar("BBLAYERS") or "").split()
-    medadata_revs = ["%-17s = %s:%s" % (os.path.basename(i), \
+    medadata_revs = ["%-17s = %s:%s%s" % (os.path.basename(i), \
         base_get_metadata_git_branch(i, None).strip(), \
-        base_get_metadata_git_revision(i, None)) \
+        base_get_metadata_git_revision(i, None), \
+        buildhistory_get_modified(i)) \
             for i in layers]
     return '\n'.join(medadata_revs)
 
@@ -896,7 +915,7 @@
             if orig_srcrev != 'INVALID':
                 f.write('# SRCREV = "%s"\n' % orig_srcrev)
             if len(srcrevs) > 1:
-                for name, srcrev in srcrevs.items():
+                for name, srcrev in sorted(srcrevs.items()):
                     orig_srcrev = d.getVar('SRCREV_%s' % name, False)
                     if orig_srcrev:
                         f.write('# SRCREV_%s = "%s"\n' % (name, orig_srcrev))
@@ -904,7 +923,7 @@
             else:
                 f.write('SRCREV = "%s"\n' % next(iter(srcrevs.values())))
             if len(tag_srcrevs) > 0:
-                for name, srcrev in tag_srcrevs.items():
+                for name, srcrev in sorted(tag_srcrevs.items()):
                     f.write('# tag_%s = "%s"\n' % (name, srcrev))
                     if name in old_tag_srcrevs and old_tag_srcrevs[name] != srcrev:
                         pkg = d.getVar('PN')
diff --git a/poky/meta/classes/ccache.bbclass b/poky/meta/classes/ccache.bbclass
index 9609020..b545735 100644
--- a/poky/meta/classes/ccache.bbclass
+++ b/poky/meta/classes/ccache.bbclass
@@ -1,5 +1,37 @@
-CCACHE = "${@bb.utils.which(d.getVar('PATH'), 'ccache') and 'ccache '}"
-export CCACHE_DIR ?= "${TMPDIR}/ccache/${MULTIMACH_TARGET_SYS}/${PN}"
+#
+# Usage:
+# - Enable ccache
+#   Add the following line to a conffile such as conf/local.conf:
+#   INHERIT += "ccache"
+#
+# - Disable ccache for a recipe
+#   Add the following line to the recipe if it can't be built with ccache:
+#   CCACHE_DISABLE = '1'
+#
+# - Share ccache files between different builds
+#   Set CCACHE_TOP_DIR to a shared dir
+#   CCACHE_TOP_DIR = /path/to/shared_ccache/
+#
+# - TO debug ccahe
+#   export CCACHE_DEBUG = "1"
+#   export CCACHE_LOGFILE = "${CCACHE_DIR}/logfile.log"
+#   And also set PARALLEL_MAKE = "-j 1" to get make the log in order
+#
+
+# Set it to a shared location for different builds, so that cache files can
+# be shared between different builds.
+CCACHE_TOP_DIR ?= "${TMPDIR}/ccache"
+
+# ccahe removes CCACHE_BASEDIR from file path, so that hashes will be the same
+# in different builds.
+export CCACHE_BASEDIR ?= "${TMPDIR}"
+
+# Used for sharing cache files after compiler is rebuilt
+export CCACHE_COMPILERCHECK ?= "%compiler% -dumpspecs"
+
+export CCACHE_CONFIGPATH ?= "${COREBASE}/meta/conf/ccache.conf"
+
+export CCACHE_DIR ?= "${CCACHE_TOP_DIR}/${MULTIMACH_TARGET_SYS}/${PN}"
 
 # We need to stop ccache considering the current directory or the
 # debug-prefix-map target directory to be significant when calculating
@@ -7,5 +39,28 @@
 # ${PV} or ${PR} change.
 export CCACHE_NOHASHDIR ?= "1"
 
-DEPENDS_append_class-target = " ccache-native"
-DEPENDS[vardepvalueexclude] = " ccache-native"
+python() {
+    """
+    Enable ccache for the recipe
+    """
+    pn = d.getVar('PN')
+    # quilt-native doesn't need ccache since no c files
+    if not (pn in ('ccache-native', 'quilt-native') or
+            bb.utils.to_boolean(d.getVar('CCACHE_DISABLE'))):
+        d.appendVar('DEPENDS', ' ccache-native')
+        d.setVar('CCACHE', 'ccache ')
+}
+
+addtask cleanccache after do_clean
+python do_cleanccache() {
+    import shutil
+
+    ccache_dir = d.getVar('CCACHE_DIR')
+    if os.path.exists(ccache_dir):
+        bb.note("Removing %s" % ccache_dir)
+        shutil.rmtree(ccache_dir)
+    else:
+        bb.note("%s doesn't exist" % ccache_dir)
+}
+addtask cleanall after do_cleanccache
+do_cleanccache[nostamp] = "1"
diff --git a/poky/meta/classes/clutter.bbclass b/poky/meta/classes/clutter.bbclass
index 5edab0e..24b53a1 100644
--- a/poky/meta/classes/clutter.bbclass
+++ b/poky/meta/classes/clutter.bbclass
@@ -14,4 +14,5 @@
 SRC_URI = "${GNOME_MIRROR}/${REALNAME}/${VERMINOR}/${REALNAME}-${PV}.tar.xz;name=archive"
 S = "${WORKDIR}/${REALNAME}-${PV}"
 
-inherit autotools pkgconfig gtk-doc gettext
+CLUTTERBASEBUILDCLASS ??= "autotools"
+inherit ${CLUTTERBASEBUILDCLASS} pkgconfig gtk-doc gettext
diff --git a/poky/meta/classes/cmake.bbclass b/poky/meta/classes/cmake.bbclass
index b364d2b..e166304 100644
--- a/poky/meta/classes/cmake.bbclass
+++ b/poky/meta/classes/cmake.bbclass
@@ -4,9 +4,6 @@
 DEPENDS_prepend = "cmake-native "
 B = "${WORKDIR}/build"
 
-# We need to unset CCACHE otherwise cmake gets too confused
-CCACHE = ""
-
 # What CMake generator to use.
 # The supported options are "Unix Makefiles" or "Ninja".
 OECMAKE_GENERATOR ?= "Ninja"
@@ -23,10 +20,22 @@
         d.setVarFlag("do_compile", "progress", r"outof:^\[(\d+)/(\d+)\]\s+")
     else:
         bb.fatal("Unknown CMake Generator %s" % generator)
+
+    # C/C++ Compiler (without cpu arch/tune arguments)
+    if not d.getVar('OECMAKE_C_COMPILER'):
+        cc_list = d.getVar('CC').split()
+        if cc_list[0] == 'ccache':
+            d.setVar('OECMAKE_C_COMPILER', '%s %s' % (cc_list[0], cc_list[1]))
+        else:
+            d.setVar('OECMAKE_C_COMPILER', cc_list[0])
+
+    if not d.getVar('OECMAKE_CXX_COMPILER'):
+        cxx_list = d.getVar('CXX').split()
+        if cxx_list[0] == 'ccache':
+            d.setVar('OECMAKE_CXX_COMPILER', '%s %s' % (cxx_list[0], cxx_list[1]))
+        else:
+            d.setVar('OECMAKE_CXX_COMPILER', cxx_list[0])
 }
-# C/C++ Compiler (without cpu arch/tune arguments)
-OECMAKE_C_COMPILER ?= "`echo ${CC} | sed 's/^\([^ ]*\).*/\1/'`"
-OECMAKE_CXX_COMPILER ?= "`echo ${CXX} | sed 's/^\([^ ]*\).*/\1/'`"
 OECMAKE_AR ?= "${AR}"
 
 # Compiler flags
@@ -108,6 +117,10 @@
 # add for non /usr/lib libdir, e.g. /usr/lib64
 set( CMAKE_LIBRARY_PATH ${libdir} ${base_libdir})
 
+# add include dir to implicit includes in case it differs from /usr/include
+list(APPEND CMAKE_C_IMPLICIT_INCLUDE_DIRECTORIES ${includedir})
+list(APPEND CMAKE_CXX_IMPLICIT_INCLUDE_DIRECTORIES ${includedir})
+
 EOF
 }
 
@@ -151,7 +164,6 @@
 	  -DCMAKE_INSTALL_DATAROOTDIR:PATH=${@os.path.relpath(d.getVar('datadir'), d.getVar('prefix'))} \
 	  -DCMAKE_INSTALL_SO_NO_EXE=0 \
 	  -DCMAKE_TOOLCHAIN_FILE=${WORKDIR}/toolchain.cmake \
-	  -DCMAKE_VERBOSE_MAKEFILE=1 \
 	  -DCMAKE_NO_SYSTEM_FROM_IMPORTED=1 \
 	  ${EXTRA_OECMAKE} \
 	  -Wno-dev
diff --git a/poky/meta/classes/cml1.bbclass b/poky/meta/classes/cml1.bbclass
index 926747f..98d24ce 100644
--- a/poky/meta/classes/cml1.bbclass
+++ b/poky/meta/classes/cml1.bbclass
@@ -26,7 +26,7 @@
     except OSError:
         mtime = 0
 
-    oe_terminal("${SHELL} -c \"make %s; if [ \$? -ne 0 ]; then echo 'Command failed.'; printf 'Press any key to continue... '; read r; fi\"" % d.getVar('KCONFIG_CONFIG_COMMAND'),
+    oe_terminal("sh -c \"make %s; if [ \\$? -ne 0 ]; then echo 'Command failed.'; printf 'Press any key to continue... '; read r; fi\"" % d.getVar('KCONFIG_CONFIG_COMMAND'),
                 d.getVar('PN') + ' Configuration', d)
 
     # FIXME this check can be removed when the minimum bitbake version has been bumped
diff --git a/poky/meta/classes/compress_doc.bbclass b/poky/meta/classes/compress_doc.bbclass
index 45bb8ff..d6d11fa 100644
--- a/poky/meta/classes/compress_doc.bbclass
+++ b/poky/meta/classes/compress_doc.bbclass
@@ -160,6 +160,7 @@
     return False, ''
 
 def compress_doc(topdir, compress_mode, compress_cmds):
+    import subprocess
     hardlink_dict = {}
     for root, dirs, files in os.walk(topdir):
         for f in files:
@@ -187,6 +188,7 @@
 
 # Decompress doc files which format is not compress_mode
 def decompress_doc(topdir, compress_mode, decompress_cmds):
+    import subprocess
     hardlink_dict = {}
     decompress = True
     for root, dirs, files in os.walk(topdir):
diff --git a/poky/meta/classes/cpan-base.bbclass b/poky/meta/classes/cpan-base.bbclass
index 577fcd6..867edf8 100644
--- a/poky/meta/classes/cpan-base.bbclass
+++ b/poky/meta/classes/cpan-base.bbclass
@@ -2,7 +2,7 @@
 # cpan-base providers various perl related information needed for building
 # cpan modules
 #
-FILES_${PN} += "${libdir}/perl ${datadir}/perl"
+FILES_${PN} += "${libdir}/perl5 ${datadir}/perl5"
 
 DEPENDS  += "${@["perl", "perl-native"][(bb.data.inherits_class('native', d))]}"
 RDEPENDS_${PN} += "${@["perl", ""][(bb.data.inherits_class('native', d))]}"
@@ -14,5 +14,5 @@
         return "yes"
     return "no"
 
-PERLLIBDIRS = "${libdir}/perl"
-PERLLIBDIRS_class-native = "${libdir}/perl-native"
+PERLLIBDIRS = "${libdir}/perl5"
+PERLLIBDIRS_class-native = "${libdir}/perl5"
diff --git a/poky/meta/classes/cpan.bbclass b/poky/meta/classes/cpan.bbclass
index a5bc301..e9908ae 100644
--- a/poky/meta/classes/cpan.bbclass
+++ b/poky/meta/classes/cpan.bbclass
@@ -10,13 +10,14 @@
 export PERLCONFIGTARGET = "${@is_target(d)}"
 
 # Env var which tells perl where the perl include files are
-export PERL_INC = "${STAGING_LIBDIR}${PERL_OWN_DIR}/perl/${@get_perl_version(d)}/CORE"
-export PERL_LIB = "${STAGING_LIBDIR}${PERL_OWN_DIR}/perl/${@get_perl_version(d)}"
-export PERL_ARCHLIB = "${STAGING_LIBDIR}${PERL_OWN_DIR}/perl/${@get_perl_version(d)}"
-export PERLHOSTLIB = "${STAGING_LIBDIR_NATIVE}/perl-native/perl/${@get_perl_version(d)}/"
+export PERL_INC = "${STAGING_LIBDIR}${PERL_OWN_DIR}/perl5/${@get_perl_version(d)}/${@get_perl_arch(d)}/CORE"
+export PERL_LIB = "${STAGING_LIBDIR}${PERL_OWN_DIR}/perl5/${@get_perl_version(d)}"
+export PERL_ARCHLIB = "${STAGING_LIBDIR}${PERL_OWN_DIR}/perl5/${@get_perl_version(d)}/${@get_perl_arch(d)}"
+export PERLHOSTLIB = "${STAGING_LIBDIR_NATIVE}/perl5/${@get_perl_version(d)}/"
+export PERLHOSTARCHLIB = "${STAGING_LIBDIR_NATIVE}/perl5/${@get_perl_version(d)}/${@get_perl_hostarch(d)}/"
 
 cpan_do_configure () {
-	yes '' | perl ${EXTRA_PERLFLAGS} Makefile.PL INSTALLDIRS=vendor NO_PERLLOCAL=1 NO_PACKLIST=1 ${EXTRA_CPANFLAGS}
+	yes '' | perl ${EXTRA_PERLFLAGS} Makefile.PL INSTALLDIRS=vendor NO_PERLLOCAL=1 NO_PACKLIST=1 PERL=$(which perl) ${EXTRA_CPANFLAGS}
 
 	# Makefile.PLs can exit with success without generating a
 	# Makefile, e.g. in cases of missing configure time
@@ -27,7 +28,7 @@
 	[ -e Makefile ] || bbfatal "No Makefile was generated by Makefile.PL"
 
 	if [ "${BUILD_SYS}" != "${HOST_SYS}" ]; then
-		. ${STAGING_LIBDIR}${PERL_OWN_DIR}/perl/config.sh
+		. ${STAGING_LIBDIR}${PERL_OWN_DIR}/perl5/config.sh
 		# Use find since there can be a Makefile generated for each Makefile.PL
 		for f in `find -name Makefile.PL`; do
 			f2=`echo $f | sed -e 's/.PL//'`
diff --git a/poky/meta/classes/cpan_build.bbclass b/poky/meta/classes/cpan_build.bbclass
index 9a2ad89..f3fb466 100644
--- a/poky/meta/classes/cpan_build.bbclass
+++ b/poky/meta/classes/cpan_build.bbclass
@@ -7,14 +7,15 @@
 
 # Env var which tells perl if it should use host (no) or target (yes) settings
 export PERLCONFIGTARGET = "${@is_target(d)}"
-export PERL_ARCHLIB = "${STAGING_LIBDIR}${PERL_OWN_DIR}/perl/${@get_perl_version(d)}"
-export PERLHOSTLIB = "${STAGING_LIBDIR_NATIVE}/perl-native/perl/${@get_perl_version(d)}/"
+export PERL_ARCHLIB = "${STAGING_LIBDIR}${PERL_OWN_DIR}/perl5/${@get_perl_version(d)}/${@get_perl_arch(d)}"
+export PERLHOSTLIB = "${STAGING_LIBDIR_NATIVE}/perl5/${@get_perl_version(d)}/"
+export PERLHOSTARCHLIB = "${STAGING_LIBDIR_NATIVE}/perl5/${@get_perl_version(d)}/${@get_perl_hostarch(d)}/"
 export LD = "${CCLD}"
 
 cpan_build_do_configure () {
 	if [ "${@is_target(d)}" = "yes" ]; then
 		# build for target
-		. ${STAGING_LIBDIR}/perl/config.sh
+		. ${STAGING_LIBDIR}/perl5/config.sh
 	fi
 
 	perl Build.PL --installdirs vendor --destdir ${D} \
diff --git a/poky/meta/classes/cross-canadian.bbclass b/poky/meta/classes/cross-canadian.bbclass
index acde331..f5c9f61 100644
--- a/poky/meta/classes/cross-canadian.bbclass
+++ b/poky/meta/classes/cross-canadian.bbclass
@@ -8,6 +8,8 @@
 # SDK packages are built either explicitly by the user,
 # or indirectly via dependency.  No need to be in 'world'.
 EXCLUDE_FROM_WORLD = "1"
+NATIVESDKLIBC ?= "libc-glibc"
+LIBCOVERRIDE = ":${NATIVESDKLIBC}"
 CLASSOVERRIDE = "class-cross-canadian"
 STAGING_BINDIR_TOOLCHAIN = "${STAGING_DIR_NATIVE}${bindir_native}/${SDK_ARCH}${SDK_VENDOR}-${SDK_OS}:${STAGING_DIR_NATIVE}${bindir_native}/${TARGET_ARCH}${TARGET_VENDOR}-${TARGET_OS}"
 
diff --git a/poky/meta/classes/cross.bbclass b/poky/meta/classes/cross.bbclass
index 34d7951..f832561 100644
--- a/poky/meta/classes/cross.bbclass
+++ b/poky/meta/classes/cross.bbclass
@@ -17,6 +17,9 @@
 HOST_LD_ARCH = "${BUILD_LD_ARCH}"
 HOST_AS_ARCH = "${BUILD_AS_ARCH}"
 
+# No strip sysroot when DEBUG_BUILD is enabled
+INHIBIT_SYSROOT_STRIP ?= "${@oe.utils.vartrue('DEBUG_BUILD', '1', '', d)}"
+
 export lt_cv_sys_lib_dlsearch_path_spec = "${libdir} ${base_libdir} /lib /lib64 /usr/lib /usr/lib64"
 
 STAGING_DIR_HOST = "${RECIPE_SYSROOT_NATIVE}"
diff --git a/poky/meta/classes/crosssdk.bbclass b/poky/meta/classes/crosssdk.bbclass
index fdaaac8..c0c0bfe 100644
--- a/poky/meta/classes/crosssdk.bbclass
+++ b/poky/meta/classes/crosssdk.bbclass
@@ -1,6 +1,8 @@
 inherit cross
 
 CLASSOVERRIDE = "class-crosssdk"
+NATIVESDKLIBC ?= "libc-glibc"
+LIBCOVERRIDE = ":${NATIVESDKLIBC}"
 MACHINEOVERRIDES = ""
 PACKAGE_ARCH = "${SDK_ARCH}"
 python () {
diff --git a/poky/meta/classes/debian.bbclass b/poky/meta/classes/debian.bbclass
index 989ea8f..6f8a599 100644
--- a/poky/meta/classes/debian.bbclass
+++ b/poky/meta/classes/debian.bbclass
@@ -29,11 +29,11 @@
 
     pkgdest = d.getVar("PKGDEST")
     packages = d.getVar('PACKAGES')
-    so_re = re.compile("lib.*\.so")
+    so_re = re.compile(r"lib.*\.so")
 
     def socrunch(s):
         s = s.lower().replace('_', '-')
-        m = re.match("^(.*)(.)\.so\.(.*)$", s)
+        m = re.match(r"^(.*)(.)\.so\.(.*)$", s)
         if m is None:
             return None
         if m.group(2) in '0123456789':
@@ -79,7 +79,7 @@
                     try:
                         cmd = [d.expand("${TARGET_PREFIX}objdump"), "-p", f]
                         output = subprocess.check_output(cmd).decode("utf-8")
-                        for m in re.finditer("\s+SONAME\s+([^\s]+)", output):
+                        for m in re.finditer(r"\s+SONAME\s+([^\s]+)", output):
                             if m.group(1) not in sonames:
                                 sonames.append(m.group(1))
                     except subprocess.CalledProcessError:
diff --git a/poky/meta/classes/devicetree.bbclass b/poky/meta/classes/devicetree.bbclass
index 8fe5a5e..5c03e4b 100644
--- a/poky/meta/classes/devicetree.bbclass
+++ b/poky/meta/classes/devicetree.bbclass
@@ -27,6 +27,8 @@
 
 COMPATIBLE_MACHINE ?= "^$"
 
+PROVIDES = "virtual/dtb"
+
 PACKAGE_ARCH = "${MACHINE_ARCH}"
 
 SYSROOT_DIRS += "/boot/devicetree"
@@ -120,9 +122,12 @@
     includes = expand_includes("DT_INCLUDE", d)
     listpath = d.getVar("DT_FILES_PATH")
     for dts in os.listdir(listpath):
-        if not dts.endswith(".dts"):
-            continue # skip non-.dts files
         dtspath = os.path.join(listpath, dts)
+        try:
+            if not(os.path.isfile(dtspath)) or not(dts.endswith(".dts") or devicetree_source_is_overlay(dtspath)):
+                continue # skip non-.dts files and non-overlay files
+        except:
+            continue # skip if can't determine if overlay
         devicetree_compile(dtspath, includes, d)
 }
 
diff --git a/poky/meta/classes/devtool-source.bbclass b/poky/meta/classes/devtool-source.bbclass
index 1372e32..a811000 100644
--- a/poky/meta/classes/devtool-source.bbclass
+++ b/poky/meta/classes/devtool-source.bbclass
@@ -103,8 +103,10 @@
                 for l in sccfile:
                     line = l.split()
                     if line and line[0] in ('kconf', 'patch'):
-                        local_files[line[-1]] = os.path.join(os.path.dirname(local_files[key]), line[-1])
-                        shutil.copy2(os.path.join(os.path.dirname(local_files[key]), line[-1]), workdir)
+                        cfg = os.path.join(os.path.dirname(local_files[key]), line[-1])
+                        if not cfg in local_files.values():
+                            local_files[line[-1]] = cfg
+                            shutil.copy2(cfg, workdir)
                 sccfile.close()
 
     # Ignore local files with subdir={BP}
diff --git a/poky/meta/classes/distro_features_check.bbclass b/poky/meta/classes/distro_features_check.bbclass
index 9b78b03..eeaa3b4 100644
--- a/poky/meta/classes/distro_features_check.bbclass
+++ b/poky/meta/classes/distro_features_check.bbclass
@@ -11,27 +11,22 @@
 
 python () {
     # Assume at least one var is set.
-    distro_features = (d.getVar('DISTRO_FEATURES') or "").split()
+    distro_features = set((d.getVar('DISTRO_FEATURES') or '').split())
 
-    any_of_distro_features = d.getVar('ANY_OF_DISTRO_FEATURES')
+    any_of_distro_features = set((d.getVar('ANY_OF_DISTRO_FEATURES') or '').split())
     if any_of_distro_features:
-        any_of_distro_features = any_of_distro_features.split()
-        if set.isdisjoint(set(any_of_distro_features),set(distro_features)):
-            raise bb.parse.SkipRecipe("one of '%s' needs to be in DISTRO_FEATURES" % any_of_distro_features)
+        if set.isdisjoint(any_of_distro_features, distro_features):
+            raise bb.parse.SkipRecipe("one of '%s' needs to be in DISTRO_FEATURES" % ' '.join(any_of_distro_features))
 
-    required_distro_features = d.getVar('REQUIRED_DISTRO_FEATURES')
+    required_distro_features = set((d.getVar('REQUIRED_DISTRO_FEATURES') or '').split())
     if required_distro_features:
-        required_distro_features = required_distro_features.split()
-        for f in required_distro_features:
-            if f in distro_features:
-                continue
-            else:
-                raise bb.parse.SkipRecipe("missing required distro feature '%s' (not in DISTRO_FEATURES)" % f)
+        missing = set.difference(required_distro_features, distro_features)
+        if missing:
+            raise bb.parse.SkipRecipe("missing required distro feature%s '%s' (not in DISTRO_FEATURES)" % ('s' if len(missing) > 1 else '', ' '.join(missing)))
 
-    conflict_distro_features = d.getVar('CONFLICT_DISTRO_FEATURES')
+    conflict_distro_features = set((d.getVar('CONFLICT_DISTRO_FEATURES') or '').split())
     if conflict_distro_features:
-        conflict_distro_features = conflict_distro_features.split()
-        for f in conflict_distro_features:
-            if f in distro_features:
-                raise bb.parse.SkipRecipe("conflicting distro feature '%s' (in DISTRO_FEATURES)" % f)
+        conflicts = set.intersection(conflict_distro_features, distro_features)
+        if conflicts:
+            raise bb.parse.SkipRecipe("conflicting distro feature%s '%s' (in DISTRO_FEATURES)" % ('s' if len(conflicts) > 1 else '', ' '.join(conflicts)))
 }
diff --git a/poky/meta/classes/distrodata.bbclass b/poky/meta/classes/distrodata.bbclass
deleted file mode 100644
index 59ee8ce..0000000
--- a/poky/meta/classes/distrodata.bbclass
+++ /dev/null
@@ -1,427 +0,0 @@
-include conf/distro/include/upstream_tracking.inc
-include conf/distro/include/distro_alias.inc
-include conf/distro/include/maintainers.inc
-
-addhandler distro_eventhandler
-distro_eventhandler[eventmask] = "bb.event.BuildStarted"
-python distro_eventhandler() {
-    import oe.distro_check as dc
-    import csv
-    logfile = dc.create_log_file(e.data, "distrodata.csv")
-
-    lf = bb.utils.lockfile("%s.lock" % logfile)
-    with open(logfile, "a") as f:
-        writer = csv.writer(f)
-        writer.writerow(['Package', 'Description', 'Owner', 'License', 
-            'VerMatch', 'Version', 'Upstream', 'Reason', 'Recipe Status',
-            'Distro 1', 'Distro 2', 'Distro 3'])
-        f.close()
-    bb.utils.unlockfile(lf)
-
-    return
-}
-
-addtask distrodata_np
-do_distrodata_np[nostamp] = "1"
-python do_distrodata_np() {
-        localdata = bb.data.createCopy(d)
-        pn = d.getVar("PN")
-        bb.note("Package Name: %s" % pn)
-
-        import oe.distro_check as dist_check
-        tmpdir = d.getVar('TMPDIR')
-        distro_check_dir = os.path.join(tmpdir, "distro_check")
-        datetime = localdata.getVar('DATETIME')
-        dist_check.update_distro_data(distro_check_dir, datetime, localdata)
-
-        if pn.find("-native") != -1:
-            pnstripped = pn.split("-native")
-            bb.note("Native Split: %s" % pnstripped)
-            localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
-
-        if pn.find("-cross") != -1:
-            pnstripped = pn.split("-cross")
-            bb.note("cross Split: %s" % pnstripped)
-            localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
-
-        if pn.find("-crosssdk") != -1:
-            pnstripped = pn.split("-crosssdk")
-            bb.note("cross Split: %s" % pnstripped)
-            localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
-
-        if pn.startswith("nativesdk-"):
-            pnstripped = pn.replace("nativesdk-", "")
-            bb.note("NativeSDK Split: %s" % pnstripped)
-            localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES'))
-
-
-        if pn.find("-initial") != -1:
-            pnstripped = pn.split("-initial")
-            bb.note("initial Split: %s" % pnstripped)
-            localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
-
-        """generate package information from .bb file"""
-        pname = localdata.getVar('PN')
-        pcurver = localdata.getVar('PV')
-        pdesc = localdata.getVar('DESCRIPTION')
-        if pdesc is not None:
-                pdesc = pdesc.replace(',','')
-                pdesc = pdesc.replace('\n','')
-
-        pgrp = localdata.getVar('SECTION')
-        plicense = localdata.getVar('LICENSE').replace(',','_')
-
-        rstatus = localdata.getVar('RECIPE_COLOR')
-        if rstatus is not None:
-                rstatus = rstatus.replace(',','')
-
-        pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION')
-        if pcurver == pupver:
-                vermatch="1"
-        else:
-                vermatch="0"
-        noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON')
-        if noupdate_reason is None:
-                noupdate="0"
-        else:
-                noupdate="1"
-                noupdate_reason = noupdate_reason.replace(',','')
-
-        maintainer = localdata.getVar('RECIPE_MAINTAINER')
-        rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE')
-        result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata)
-
-        bb.note("DISTRO: %s,%s,%s,%s,%s,%s,%s,%s,%s\n" % \
-                  (pname, pdesc, maintainer, plicense, vermatch, pcurver, pupver, noupdate_reason, rstatus))
-        line = pn
-        for i in result:
-            line = line + "," + i
-        bb.note("%s\n" % line)
-}
-do_distrodata_np[vardepsexclude] = "DATETIME"
-
-addtask distrodata
-do_distrodata[nostamp] = "1"
-python do_distrodata() {
-        import csv
-        logpath = d.getVar('LOG_DIR')
-        bb.utils.mkdirhier(logpath)
-        logfile = os.path.join(logpath, "distrodata.csv")
-
-        import oe.distro_check as dist_check
-        localdata = bb.data.createCopy(d)
-        tmpdir = d.getVar('TMPDIR')
-        distro_check_dir = os.path.join(tmpdir, "distro_check")
-        datetime = localdata.getVar('DATETIME')
-        dist_check.update_distro_data(distro_check_dir, datetime, localdata)
-
-        pn = d.getVar("PN")
-        bb.note("Package Name: %s" % pn)
-
-        if pn.find("-native") != -1:
-            pnstripped = pn.split("-native")
-            bb.note("Native Split: %s" % pnstripped)
-            localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
-
-        if pn.startswith("nativesdk-"):
-            pnstripped = pn.replace("nativesdk-", "")
-            bb.note("NativeSDK Split: %s" % pnstripped)
-            localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES'))
-
-        if pn.find("-cross") != -1:
-            pnstripped = pn.split("-cross")
-            bb.note("cross Split: %s" % pnstripped)
-            localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
-
-        if pn.find("-crosssdk") != -1:
-            pnstripped = pn.split("-crosssdk")
-            bb.note("cross Split: %s" % pnstripped)
-            localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
-
-        if pn.find("-initial") != -1:
-            pnstripped = pn.split("-initial")
-            bb.note("initial Split: %s" % pnstripped)
-            localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
-
-        """generate package information from .bb file"""
-        pname = localdata.getVar('PN')
-        pcurver = localdata.getVar('PV')
-        pdesc = localdata.getVar('DESCRIPTION')
-        if pdesc is not None:
-                pdesc = pdesc.replace(',','')
-                pdesc = pdesc.replace('\n','')
-
-        pgrp = localdata.getVar('SECTION')
-        plicense = localdata.getVar('LICENSE').replace(',','_')
-
-        rstatus = localdata.getVar('RECIPE_COLOR')
-        if rstatus is not None:
-                rstatus = rstatus.replace(',','')
-
-        pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION')
-        if pcurver == pupver:
-                vermatch="1"
-        else:
-                vermatch="0"
-
-        noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON')
-        if noupdate_reason is None:
-                noupdate="0"
-        else:
-                noupdate="1"
-                noupdate_reason = noupdate_reason.replace(',','')
-
-        maintainer = localdata.getVar('RECIPE_MAINTAINER')
-        rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE')
-        # do the comparison
-        result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata)
-
-        lf = bb.utils.lockfile("%s.lock" % logfile)
-        with open(logfile, "a") as f:
-            row = [pname, pdesc, maintainer, plicense, vermatch, pcurver, pupver, noupdate_reason, rstatus]
-            row.extend(result)
-
-            writer = csv.writer(f)
-            writer.writerow(row)
-            f.close()
-        bb.utils.unlockfile(lf)
-}
-do_distrodata[vardepsexclude] = "DATETIME"
-
-addhandler checkpkg_eventhandler
-checkpkg_eventhandler[eventmask] = "bb.event.BuildStarted bb.event.BuildCompleted"
-python checkpkg_eventhandler() {
-    import csv
-
-    def parse_csv_file(filename):
-        package_dict = {}
-
-        with open(filename, "r") as f:
-            reader = csv.reader(f, delimiter='\t')
-            for row in reader:
-                pn = row[0]
-
-                if reader.line_num == 1:
-                    header = row
-                    continue
-
-                if not pn in package_dict.keys():
-                    package_dict[pn] = row
-            f.close()
-
-        with open(filename, "w") as f:
-            writer = csv.writer(f, delimiter='\t')
-            writer.writerow(header)
-            for pn in package_dict.keys():
-                writer.writerow(package_dict[pn])
-            f.close()
-
-        del package_dict
-
-    if bb.event.getName(e) == "BuildStarted":
-        import oe.distro_check as dc
-        logfile = dc.create_log_file(e.data, "checkpkg.csv")
-
-        lf = bb.utils.lockfile("%s.lock" % logfile)
-        with open(logfile, "a") as f:
-            writer = csv.writer(f, delimiter='\t')
-            headers = ['Package', 'Version', 'Upver', 'License', 'Section',
-                'Home', 'Release', 'Depends', 'BugTracker', 'PE', 'Description',
-                'Status', 'Tracking', 'URI', 'MAINTAINER', 'NoUpReason']
-            writer.writerow(headers)
-            f.close()
-        bb.utils.unlockfile(lf)
-    elif bb.event.getName(e) == "BuildCompleted":
-        import os
-        filename = "tmp/log/checkpkg.csv"
-        if os.path.isfile(filename):
-            lf = bb.utils.lockfile("%s.lock"%filename)
-            parse_csv_file(filename)
-            bb.utils.unlockfile(lf)
-    return
-}
-
-addtask checkpkg
-do_checkpkg[nostamp] = "1"
-python do_checkpkg() {
-        localdata = bb.data.createCopy(d)
-        import csv
-        import re
-        import tempfile
-        import subprocess
-        import oe.recipeutils
-        from bb.utils import vercmp_string
-        from bb.fetch2 import FetchError, NoMethodError, decodeurl
-
-        def get_upstream_version_and_status():
-
-            # set if the upstream check fails reliably, e.g. absent git tags, or weird version format used on our or on upstream side.
-            upstream_version_unknown = localdata.getVar('UPSTREAM_VERSION_UNKNOWN')
-            # set if the upstream check cannot be reliably performed due to transient network failures, or server behaving weirdly. 
-            # This one should be used sparingly, as it completely excludes a recipe from upstream checking.
-            upstream_check_unreliable = localdata.getVar('UPSTREAM_CHECK_UNRELIABLE')
-
-            if upstream_check_unreliable == "1":
-                return "N/A", "CHECK_IS_UNRELIABLE"
-
-            uv = oe.recipeutils.get_recipe_upstream_version(localdata)
-            pupver = uv['version'] if uv['version'] else "N/A"
-            pversion = uv['current_version']
-            revision = uv['revision'] if uv['revision'] else "N/A"
-
-            if pupver == "N/A":
-                pstatus = "UNKNOWN" if upstream_version_unknown else "UNKNOWN_BROKEN"
-            else:
-                cmp = vercmp_string(pversion, pupver)
-                if cmp == -1:
-                    pstatus = "UPDATE" if not upstream_version_unknown else "KNOWN_BROKEN"
-                elif cmp == 0:
-                    pstatus = "MATCH" if not upstream_version_unknown else "KNOWN_BROKEN"
-                else:
-                    pstatus = "UNKNOWN" if upstream_version_unknown else "UNKNOWN_BROKEN"
-
-            return pversion, pupver, pstatus, revision
-
-
-        """initialize log files."""
-        logpath = d.getVar('LOG_DIR')
-        bb.utils.mkdirhier(logpath)
-        logfile = os.path.join(logpath, "checkpkg.csv")
-
-        """generate package information from .bb file"""
-        pname = d.getVar('PN')
-
-        if pname.find("-native") != -1:
-            if d.getVar('BBCLASSEXTEND'):
-                    return
-            pnstripped = pname.split("-native")
-            bb.note("Native Split: %s" % pnstripped)
-            localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
-
-        if pname.startswith("nativesdk-"):
-            if d.getVar('BBCLASSEXTEND'):
-                    return
-            pnstripped = pname.replace("nativesdk-", "")
-            bb.note("NativeSDK Split: %s" % pnstripped)
-            localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES'))
-
-        if pname.find("-cross") != -1:
-            pnstripped = pname.split("-cross")
-            bb.note("cross Split: %s" % pnstripped)
-            localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
-
-        if pname.find("-initial") != -1:
-            pnstripped = pname.split("-initial")
-            bb.note("initial Split: %s" % pnstripped)
-            localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
-
-        pdesc = localdata.getVar('DESCRIPTION')
-        pgrp = localdata.getVar('SECTION')
-        plicense = localdata.getVar('LICENSE')
-        psection = localdata.getVar('SECTION')
-        phome = localdata.getVar('HOMEPAGE')
-        prelease = localdata.getVar('PR')
-        pdepends = localdata.getVar('DEPENDS')
-        pbugtracker = localdata.getVar('BUGTRACKER')
-        ppe = localdata.getVar('PE')
-        psrcuri = localdata.getVar('SRC_URI')
-        maintainer = localdata.getVar('RECIPE_MAINTAINER')
-
-        pversion, pupver, pstatus, prevision = get_upstream_version_and_status()
-
-        if psrcuri:
-            psrcuri = psrcuri.split()[0]
-        else:
-            psrcuri = "none"
-        pdepends = "".join(pdepends.split("\t"))
-        pdesc = "".join(pdesc.split("\t"))
-        no_upgr_reason = d.getVar('RECIPE_NO_UPDATE_REASON')
-        lf = bb.utils.lockfile("%s.lock" % logfile)
-        with open(logfile, "a") as f:
-            writer = csv.writer(f, delimiter='\t')
-            writer.writerow([pname, pversion, pupver, plicense, psection, phome, 
-                prelease, pdepends, pbugtracker, ppe, pdesc, pstatus, prevision,
-                psrcuri, maintainer, no_upgr_reason])
-            f.close()
-        bb.utils.unlockfile(lf)
-}
-
-addhandler distro_check_eventhandler
-distro_check_eventhandler[eventmask] = "bb.event.BuildStarted"
-python distro_check_eventhandler() {
-    """initialize log files."""
-    import oe.distro_check as dc
-    result_file = dc.create_log_file(e.data, "distrocheck.csv")
-    return
-}
-
-addtask distro_check
-do_distro_check[nostamp] = "1"
-do_distro_check[vardepsexclude] += "DATETIME"
-python do_distro_check() {
-    """checks if the package is present in other public Linux distros"""
-    import oe.distro_check as dc
-    import shutil
-    if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('sdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('nativesdk',d):
-        return
-
-    localdata = bb.data.createCopy(d)
-    tmpdir = d.getVar('TMPDIR')
-    distro_check_dir = os.path.join(tmpdir, "distro_check")
-    logpath = d.getVar('LOG_DIR')
-    bb.utils.mkdirhier(logpath)
-    result_file = os.path.join(logpath, "distrocheck.csv")
-    datetime = localdata.getVar('DATETIME')
-    dc.update_distro_data(distro_check_dir, datetime, localdata)
-
-    # do the comparison
-    result = dc.compare_in_distro_packages_list(distro_check_dir, d)
-
-    # save the results
-    dc.save_distro_check_result(result, datetime, result_file, d)
-}
-
-#
-#Check Missing License Text.
-#Use this task to generate the missing license text data for pkg-report system,
-#then we can search those recipes which license text isn't exsit in common-licenses directory
-#
-addhandler checklicense_eventhandler
-checklicense_eventhandler[eventmask] = "bb.event.BuildStarted"
-python checklicense_eventhandler() {
-    """initialize log files."""
-    import csv
-    import oe.distro_check as dc
-    logfile = dc.create_log_file(e.data, "missinglicense.csv")
-    lf = bb.utils.lockfile("%s.lock" % logfile)
-    with open(logfile, "a") as f:
-        writer = csv.writer(f, delimiter='\t')
-        writer.writerow(['Package', 'License', 'MissingLicense'])
-        f.close()
-    bb.utils.unlockfile(lf)
-    return
-}
-
-addtask checklicense
-do_checklicense[nostamp] = "1"
-python do_checklicense() {
-    import csv
-    import shutil
-    logpath = d.getVar('LOG_DIR')
-    bb.utils.mkdirhier(logpath)
-    pn = d.getVar('PN')
-    logfile = os.path.join(logpath, "missinglicense.csv")
-    generic_directory = d.getVar('COMMON_LICENSE_DIR')
-    license_types = d.getVar('LICENSE')
-    for license_type in ((license_types.replace('+', '').replace('|', '&')
-                          .replace('(', '').replace(')', '').replace(';', '')
-                          .replace(',', '').replace(" ", "").split("&"))):
-        if not os.path.isfile(os.path.join(generic_directory, license_type)):
-            lf = bb.utils.lockfile("%s.lock" % logfile)
-            with open(logfile, "a") as f:
-                writer = csv.writer(f, delimiter='\t')
-                writer.writerow([pn, license_types, license_type])
-                f.close()
-            bb.utils.unlockfile(lf)
-    return
-}
diff --git a/poky/meta/classes/distutils-tools.bbclass b/poky/meta/classes/distutils-tools.bbclass
deleted file mode 100644
index 6f2880e..0000000
--- a/poky/meta/classes/distutils-tools.bbclass
+++ /dev/null
@@ -1,73 +0,0 @@
-DISTUTILS_BUILD_ARGS ?= ""
-DISTUTILS_STAGE_HEADERS_ARGS ?= "--install-dir=${STAGING_INCDIR}/${PYTHON_DIR}"
-DISTUTILS_STAGE_ALL_ARGS ?= "--prefix=${STAGING_DIR_HOST}${prefix} \
-    --install-data=${STAGING_DATADIR}"
-DISTUTILS_INSTALL_ARGS ?= "--prefix=${D}/${prefix} \
-    --install-data=${D}/${datadir}"
-
-distutils_do_compile() {
-         STAGING_INCDIR=${STAGING_INCDIR} \
-         STAGING_LIBDIR=${STAGING_LIBDIR} \
-         ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py build ${DISTUTILS_BUILD_ARGS} || \
-         bbfatal_log "${PYTHON_PN} setup.py build_ext execution failed."
-}
-
-distutils_stage_headers() {
-        install -d ${STAGING_DIR_HOST}${PYTHON_SITEPACKAGES_DIR}
-        ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py install_headers ${DISTUTILS_STAGE_HEADERS_ARGS} || \
-        bbfatal_log "${PYTHON_PN} setup.py install_headers execution failed."
-}
-
-distutils_stage_all() {
-        STAGING_INCDIR=${STAGING_INCDIR} \
-        STAGING_LIBDIR=${STAGING_LIBDIR} \
-        install -d ${STAGING_DIR_HOST}${PYTHON_SITEPACKAGES_DIR}
-        PYTHONPATH=${STAGING_DIR_HOST}${PYTHON_SITEPACKAGES_DIR} \
-        ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py install ${DISTUTILS_STAGE_ALL_ARGS} || \
-        bbfatal_log "${PYTHON_PN} setup.py install (stage) execution failed."
-}
-
-distutils_do_install() {
-        echo "Beginning ${PN} Install ..."
-        install -d ${D}${PYTHON_SITEPACKAGES_DIR}
-        echo "Step 2 of ${PN} Install ..."
-        STAGING_INCDIR=${STAGING_INCDIR} \
-        STAGING_LIBDIR=${STAGING_LIBDIR} \
-        PYTHONPATH=${D}/${PYTHON_SITEPACKAGES_DIR} \
-        ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py install --install-lib=${D}/${PYTHON_SITEPACKAGES_DIR} ${DISTUTILS_INSTALL_ARGS} || \
-        bbfatal_log "${PYTHON_PN} setup.py install execution failed."
-
-        echo "Step 3 of ${PN} Install ..."
-        # support filenames with *spaces*
-        find ${D} -name "*.py" -print0 | while read -d $'\0' i ; do \
-            sed -i -e s:${D}::g $i
-        done
-
-        echo "Step 4 of ${PN} Install ..."
-        if test -e ${D}${bindir} ; then	
-            for i in ${D}${bindir}/* ; do \
-                sed -i -e s:${STAGING_BINDIR_NATIVE}:${bindir}:g $i
-            done
-        fi
-
-        echo "Step 4 of ${PN} Install ..."
-        if test -e ${D}${sbindir}; then
-            for i in ${D}${sbindir}/* ; do \
-                sed -i -e s:${STAGING_BINDIR_NATIVE}:${bindir}:g $i
-            done
-        fi
-
-        echo "Step 5 of ${PN} Install ..."
-        rm -f ${D}${PYTHON_SITEPACKAGES_DIR}/easy-install.pth
-        
-        #
-        # FIXME: Bandaid against wrong datadir computation
-        #
-        if [ -e ${D}${datadir}/share ]; then
-            mv -f ${D}${datadir}/share/* ${D}${datadir}/
-        fi
-}
-
-#EXPORT_FUNCTIONS do_compile do_install
-
-export LDSHARED="${CCLD} -shared"
diff --git a/poky/meta/classes/extrausers.bbclass b/poky/meta/classes/extrausers.bbclass
index 7709407..32569e9 100644
--- a/poky/meta/classes/extrausers.bbclass
+++ b/poky/meta/classes/extrausers.bbclass
@@ -1,18 +1,17 @@
-# This bbclass is mainly used for image level user/group configuration.
+# This bbclass is used for image level user/group configuration.
 # Inherit this class if you want to make EXTRA_USERS_PARAMS effective.
 
 # Below is an example showing how to use this functionality.
-# INHERIT += "extrausers"
+# IMAGE_CLASSES += "extrausers"
 # EXTRA_USERS_PARAMS = "\
-# useradd -p '' tester; \
-# groupadd developers; \
-# userdel nobody; \
-# groupdel -g video; \
-# groupmod -g 1020 developers; \
-# usermod -s /bin/sh tester; \
+#     useradd -p '' tester; \
+#     groupadd developers; \
+#     userdel nobody; \
+#     groupdel -g video; \
+#     groupmod -g 1020 developers; \
+#     usermod -s /bin/sh tester; \
 # "
 
-
 inherit useradd_base
 
 PACKAGE_INSTALL_append = " ${@['', 'base-passwd shadow'][bool(d.getVar('EXTRA_USERS_PARAMS'))]}"
diff --git a/poky/meta/classes/fontcache.bbclass b/poky/meta/classes/fontcache.bbclass
index f71a754..13f9df1 100644
--- a/poky/meta/classes/fontcache.bbclass
+++ b/poky/meta/classes/fontcache.bbclass
@@ -20,6 +20,7 @@
 	$INTERCEPT_DIR/postinst_intercept update_font_cache ${PKG} mlprefix=${MLPREFIX} binprefix=${MLPREFIX} \
 		'bindir="${bindir}"' \
 		'libdir="${libdir}"' \
+                'libexecdir="${libexecdir}"' \
 		'base_libdir="${base_libdir}"' \
 		'fontconfigcachedir="${FONTCONFIG_CACHE_DIR}"' \
 		'fontconfigcacheparams="${FONTCONFIG_CACHE_PARAMS}"' \
diff --git a/poky/meta/classes/gconf.bbclass b/poky/meta/classes/gconf.bbclass
index 4e0ee2e..3e3c509 100644
--- a/poky/meta/classes/gconf.bbclass
+++ b/poky/meta/classes/gconf.bbclass
@@ -49,7 +49,7 @@
     for pkg in packages:
         schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg)
         schemas = []
-        schema_re = re.compile(".*\.schemas$")
+        schema_re = re.compile(r".*\.schemas$")
         if os.path.exists(schema_dir):
             for f in os.listdir(schema_dir):
                 if schema_re.match(f):
diff --git a/poky/meta/classes/go.bbclass b/poky/meta/classes/go.bbclass
index af331f8..7069c5f 100644
--- a/poky/meta/classes/go.bbclass
+++ b/poky/meta/classes/go.bbclass
@@ -45,7 +45,6 @@
 
 B = "${WORKDIR}/build"
 export GOPATH = "${B}"
-export GOCACHE = "off"
 export GOTMPDIR ?= "${WORKDIR}/go-tmp"
 GOTMPDIR[vardepvalue] = ""
 
diff --git a/poky/meta/classes/goarch.bbclass b/poky/meta/classes/goarch.bbclass
index b2c94fa..7aaf26a 100644
--- a/poky/meta/classes/goarch.bbclass
+++ b/poky/meta/classes/goarch.bbclass
@@ -42,6 +42,10 @@
 SECURITY_CFLAGS_mipsarch = "${SECURITY_NOPIE_CFLAGS}"
 SECURITY_NOPIE_CFLAGS ??= ""
 
+# go can't be built with ccache:
+# gcc: fatal error: no input files
+CCACHE_DISABLE ?= "1"
+
 def go_map_arch(a, d):
     import re
     if re.match('i.86', a):
@@ -64,6 +68,8 @@
         return 'ppc64'
     elif re.match('p(pc|owerpc)(64el)', a):
         return 'ppc64le'
+    elif a == 'riscv64':
+        return 'riscv64'
     else:
         raise bb.parse.SkipRecipe("Unsupported CPU architecture: %s" % a)
 
diff --git a/poky/meta/classes/gobject-introspection.bbclass b/poky/meta/classes/gobject-introspection.bbclass
index a323c1f..4ceb0c6 100644
--- a/poky/meta/classes/gobject-introspection.bbclass
+++ b/poky/meta/classes/gobject-introspection.bbclass
@@ -25,7 +25,7 @@
 DEPENDS_append_class-nativesdk = " gobject-introspection-native"
 
 # This is used by introspection tools to find .gir includes
-export XDG_DATA_DIRS = "${STAGING_DATADIR}"
+export XDG_DATA_DIRS = "${STAGING_DATADIR}:${STAGING_LIBDIR}"
 
 do_configure_prepend_class-target () {
     # introspection.m4 pre-packaged with upstream tarballs does not yet
diff --git a/poky/meta/classes/gsettings.bbclass b/poky/meta/classes/gsettings.bbclass
index eae3dc7..33afc96 100644
--- a/poky/meta/classes/gsettings.bbclass
+++ b/poky/meta/classes/gsettings.bbclass
@@ -7,32 +7,36 @@
 
 # TODO use a trigger so that this runs once per package operation run
 
+GSETTINGS_PACKAGE ?= "${PN}"
 
-RDEPENDS_${PN} += "glib-2.0-utils"
-
-FILES_${PN} += "${datadir}/glib-2.0/schemas"
-
-PACKAGE_WRITE_DEPS += "glib-2.0-native"
+python __anonymous() {
+    pkg = d.getVar("GSETTINGS_PACKAGE")
+    if pkg:
+        d.appendVar("PACKAGE_WRITE_DEPS", " glib-2.0-native")
+        d.appendVar("RDEPENDS_" + pkg, " ${MLPREFIX}glib-2.0-utils")
+        d.appendVar("FILES_" + pkg, " ${datadir}/glib-2.0/schemas")
+}
 
 gsettings_postinstrm () {
 	glib-compile-schemas $D${datadir}/glib-2.0/schemas
 }
 
 python populate_packages_append () {
-    pkg = d.getVar('PN')
-    bb.note("adding gsettings postinst scripts to %s" % pkg)
+    pkg = d.getVar('GSETTINGS_PACKAGE')
+    if pkg:
+        bb.note("adding gsettings postinst scripts to %s" % pkg)
 
-    postinst = d.getVar('pkg_postinst_%s' % pkg) or d.getVar('pkg_postinst')
-    if not postinst:
-        postinst = '#!/bin/sh\n'
-    postinst += d.getVar('gsettings_postinstrm')
-    d.setVar('pkg_postinst_%s' % pkg, postinst)
+        postinst = d.getVar('pkg_postinst_%s' % pkg) or d.getVar('pkg_postinst')
+        if not postinst:
+            postinst = '#!/bin/sh\n'
+        postinst += d.getVar('gsettings_postinstrm')
+        d.setVar('pkg_postinst_%s' % pkg, postinst)
 
-    bb.note("adding gsettings postrm scripts to %s" % pkg)
+        bb.note("adding gsettings postrm scripts to %s" % pkg)
 
-    postrm = d.getVar('pkg_postrm_%s' % pkg) or d.getVar('pkg_postrm')
-    if not postrm:
-        postrm = '#!/bin/sh\n'
-    postrm += d.getVar('gsettings_postinstrm')
-    d.setVar('pkg_postrm_%s' % pkg, postrm)
+        postrm = d.getVar('pkg_postrm_%s' % pkg) or d.getVar('pkg_postrm')
+        if not postrm:
+            postrm = '#!/bin/sh\n'
+        postrm += d.getVar('gsettings_postinstrm')
+        d.setVar('pkg_postrm_%s' % pkg, postrm)
 }
diff --git a/poky/meta/classes/gtk-doc.bbclass b/poky/meta/classes/gtk-doc.bbclass
index b4f6754..707d74d 100644
--- a/poky/meta/classes/gtk-doc.bbclass
+++ b/poky/meta/classes/gtk-doc.bbclass
@@ -41,7 +41,7 @@
     if [ ${GTKDOC_ENABLED} = True ]; then
         # Write out a qemu wrapper that will be given to gtkdoc-scangobj so that it
         # can run target helper binaries through that.
-        qemu_binary="${@qemu_wrapper_cmdline(d, '$STAGING_DIR_HOST', ['\$GIR_EXTRA_LIBS_PATH','$STAGING_DIR_HOST/${libdir}','$STAGING_DIR_HOST/${base_libdir}'])}"
+        qemu_binary="${@qemu_wrapper_cmdline(d, '$STAGING_DIR_HOST', ['\\$GIR_EXTRA_LIBS_PATH','$STAGING_DIR_HOST/${libdir}','$STAGING_DIR_HOST/${base_libdir}'])}"
         cat > ${B}/gtkdoc-qemuwrapper << EOF
 #!/bin/sh
 # Use a modules directory which doesn't exist so we don't load random things
@@ -51,6 +51,9 @@
 GIR_EXTRA_LIBS_PATH=\`find ${B} -name *.so -printf "%h\n"|sort|uniq| tr '\n' ':'\`\$GIR_EXTRA_LIBS_PATH
 GIR_EXTRA_LIBS_PATH=\`find ${B} -name .libs| tr '\n' ':'\`\$GIR_EXTRA_LIBS_PATH
 
+# meson sets this wrongly (only to libs in build-dir), qemu-wrapper_cmdline() and GIR_EXTRA_LIBS_PATH take care of it properly
+unset LD_LIBRARY_PATH
+
 if [ -d ".libs" ]; then
     $qemu_binary ".libs/\$@"
 else
diff --git a/poky/meta/classes/icecc.bbclass b/poky/meta/classes/icecc.bbclass
index 7d94525..6d003dc 100644
--- a/poky/meta/classes/icecc.bbclass
+++ b/poky/meta/classes/icecc.bbclass
@@ -34,6 +34,7 @@
     ICECC_DEBUG ICECC_LOGFILE ICECC_REPEAT_RATE ICECC_PREFERRED_HOST \
     ICECC_CLANG_REMOTE_CPP ICECC_IGNORE_UNVERIFIED ICECC_TEST_SOCKET \
     ICECC_ENV_DEBUG ICECC_SYSTEM_PACKAGE_BL ICECC_SYSTEM_CLASS_BL \
+    ICECC_REMOTE_CPP \
     "
 
 ICECC_ENV_EXEC ?= "${STAGING_BINDIR_NATIVE}/icecc-create-env"
@@ -56,6 +57,8 @@
 # See: https://github.com/icecc/icecream/issues/190
 export ICECC_CARET_WORKAROUND ??= "0"
 
+export ICECC_REMOTE_CPP ??= "1"
+
 ICECC_CFLAGS = ""
 CFLAGS += "${ICECC_CFLAGS}"
 CXXFLAGS += "${ICECC_CFLAGS}"
@@ -130,6 +133,13 @@
         return "no"
 
     pn = d.getVar('PN')
+    bpn = d.getVar('BPN')
+
+    # Blacklist/whitelist checks are made against BPN, because there is a good
+    # chance that if icecc should be skipped for a recipe, it should be skipped
+    # for all the variants of that recipe. PN is still checked in case a user
+    # specified a more specific recipe.
+    check_pn = set([pn, bpn])
 
     system_class_blacklist = (d.getVar('ICECC_SYSTEM_CLASS_BL') or "").split()
     user_class_blacklist = (d.getVar('ICECC_USER_CLASS_BL') or "none").split()
@@ -145,11 +155,11 @@
     user_package_whitelist = (d.getVar('ICECC_USER_PACKAGE_WL') or "").split()
     package_blacklist = system_package_blacklist + user_package_blacklist
 
-    if pn in package_blacklist:
+    if check_pn & set(package_blacklist):
         bb.debug(1, "%s: found in blacklist, disable icecc" % pn)
         return "no"
 
-    if pn in user_package_whitelist:
+    if check_pn & set(user_package_whitelist):
         bb.debug(1, "%s: found in whitelist, enable icecc" % pn)
         return "yes"
 
@@ -379,7 +389,7 @@
             ${ICECC_ENV_EXEC} ${ICECC_ENV_DEBUG} "${ICECC_CC}" "${ICECC_CXX}" "${ICECC_AS}" "${ICECC_VERSION}"
         then
             touch "${ICECC_VERSION}.done"
-        elif ! wait_for_file "${ICECC_VERSION}.done" 30 
+        elif ! wait_for_file "${ICECC_VERSION}.done" 30
         then
             # locking failed so wait for ${ICECC_VERSION}.done to appear
             bbwarn "Timeout waiting for ${ICECC_VERSION}.done"
@@ -395,7 +405,8 @@
     export ICECC_VERSION ICECC_CC ICECC_CXX
     export PATH="$ICE_PATH:$PATH"
 
-    bbnote "Using icecc"
+    bbnote "Using icecc path: $ICE_PATH"
+    bbnote "Using icecc tarball: $ICECC_VERSION"
 }
 
 do_configure_prepend() {
diff --git a/poky/meta/classes/image-buildinfo.bbclass b/poky/meta/classes/image-buildinfo.bbclass
index 87a6a1a..94c585d 100644
--- a/poky/meta/classes/image-buildinfo.bbclass
+++ b/poky/meta/classes/image-buildinfo.bbclass
@@ -16,9 +16,8 @@
 IMAGE_BUILDINFO_FILE ??= "${sysconfdir}/build"
 
 # From buildhistory.bbclass
-def image_buildinfo_outputvars(vars, listvars, d): 
+def image_buildinfo_outputvars(vars, d):
     vars = vars.split()
-    listvars = listvars.split()
     ret = ""
     for var in vars:
         value = d.getVar(var) or ""
@@ -59,8 +58,7 @@
                 return ""
         # Single and list variables to be read
         vars = (d.getVar("IMAGE_BUILDINFO_VARS") or "")
-        listvars = (d.getVar("IMAGE_BUILDINFO_LVARS") or "")
-        return image_buildinfo_outputvars(vars, listvars, d)
+        return image_buildinfo_outputvars(vars, d)
 
 # Write build information to target filesystem
 python buildinfo () {
diff --git a/poky/meta/classes/image.bbclass b/poky/meta/classes/image.bbclass
index 2ff574b..276d0d3 100644
--- a/poky/meta/classes/image.bbclass
+++ b/poky/meta/classes/image.bbclass
@@ -24,7 +24,7 @@
 LICENSE ?= "MIT"
 PACKAGES = ""
 DEPENDS += "${@' '.join(["%s-qemuwrapper-cross" % m for m in d.getVar("MULTILIB_VARIANTS").split()])} qemuwrapper-cross depmodwrapper-cross cross-localedef-native"
-RDEPENDS += "${PACKAGE_INSTALL} ${LINGUAS_INSTALL}"
+RDEPENDS += "${PACKAGE_INSTALL} ${LINGUAS_INSTALL} ${IMAGE_INSTALL_DEBUGFS}"
 RRECOMMENDS += "${PACKAGE_INSTALL_ATTEMPTONLY}"
 PATH_prepend = "${@":".join(all_multilib_tune_values(d, 'STAGING_BINDIR_CROSS').split())}:"
 
@@ -500,7 +500,7 @@
         d.prependVarFlag(task, 'postfuncs', 'create_symlinks ')
         d.appendVarFlag(task, 'subimages', ' ' + ' '.join(subimages))
         d.appendVarFlag(task, 'vardeps', ' ' + ' '.join(vardeps))
-        d.appendVarFlag(task, 'vardepsexclude', 'DATETIME DATE ' + ' '.join(vardepsexclude))
+        d.appendVarFlag(task, 'vardepsexclude', ' DATETIME DATE ' + ' '.join(vardepsexclude))
 
         bb.debug(2, "Adding task %s before %s, after %s" % (task, 'do_image_complete', after))
         bb.build.addtask(task, 'do_image_complete', after, d)
diff --git a/poky/meta/classes/image_types.bbclass b/poky/meta/classes/image_types.bbclass
index 0a69542..f363c42 100644
--- a/poky/meta/classes/image_types.bbclass
+++ b/poky/meta/classes/image_types.bbclass
@@ -56,7 +56,6 @@
 
 XZ_COMPRESSION_LEVEL ?= "-3"
 XZ_INTEGRITY_CHECK ?= "crc32"
-XZ_THREADS ?= "-T 0"
 
 ZIP_COMPRESSION_LEVEL ?= "-9"
 
@@ -280,11 +279,11 @@
 # CONVERSION_CMD/DEPENDS.
 COMPRESSIONTYPES ?= ""
 
-CONVERSIONTYPES = "gz bz2 lzma xz lz4 lzo zip sum md5sum sha1sum sha224sum sha256sum sha384sum sha512sum bmap u-boot vmdk vdi qcow2 ${COMPRESSIONTYPES}"
+CONVERSIONTYPES = "gz bz2 lzma xz lz4 lzo zip sum md5sum sha1sum sha224sum sha256sum sha384sum sha512sum bmap u-boot vmdk vdi qcow2 base64 ${COMPRESSIONTYPES}"
 CONVERSION_CMD_lzma = "lzma -k -f -7 ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
 CONVERSION_CMD_gz = "pigz -f -9 -n -c ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.gz"
 CONVERSION_CMD_bz2 = "pbzip2 -f -k ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
-CONVERSION_CMD_xz = "xz -f -k -c ${XZ_COMPRESSION_LEVEL} ${XZ_THREADS} --check=${XZ_INTEGRITY_CHECK} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.xz"
+CONVERSION_CMD_xz = "xz -f -k -c ${XZ_COMPRESSION_LEVEL} ${XZ_DEFAULTS} --check=${XZ_INTEGRITY_CHECK} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.xz"
 CONVERSION_CMD_lz4 = "lz4 -9 -z -l ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.lz4"
 CONVERSION_CMD_lzo = "lzop -9 ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
 CONVERSION_CMD_zip = "zip ${ZIP_COMPRESSION_LEVEL} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.zip ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
@@ -300,6 +299,7 @@
 CONVERSION_CMD_vmdk = "qemu-img convert -O vmdk ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.vmdk"
 CONVERSION_CMD_vdi = "qemu-img convert -O vdi ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.vdi"
 CONVERSION_CMD_qcow2 = "qemu-img convert -O qcow2 ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.qcow2"
+CONVERSION_CMD_base64 = "base64 ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.base64"
 CONVERSION_DEPENDS_lzma = "xz-native"
 CONVERSION_DEPENDS_gz = "pigz-native"
 CONVERSION_DEPENDS_bz2 = "pbzip2-native"
@@ -309,10 +309,11 @@
 CONVERSION_DEPENDS_zip = "zip-native"
 CONVERSION_DEPENDS_sum = "mtd-utils-native"
 CONVERSION_DEPENDS_bmap = "bmap-tools-native"
-CONVERSION_DEPENDS_u-boot = "u-boot-mkimage-native"
-CONVERSION_DEPENDS_vmdk = "qemu-native"
-CONVERSION_DEPENDS_vdi = "qemu-native"
-CONVERSION_DEPENDS_qcow2 = "qemu-native"
+CONVERSION_DEPENDS_u-boot = "u-boot-tools-native"
+CONVERSION_DEPENDS_vmdk = "qemu-system-native"
+CONVERSION_DEPENDS_vdi = "qemu-system-native"
+CONVERSION_DEPENDS_qcow2 = "qemu-system-native"
+CONVERSION_DEPENDS_base64 = "coreutils-native"
 
 RUNNABLE_IMAGE_TYPES ?= "ext2 ext3 ext4"
 RUNNABLE_MACHINE_PATTERNS ?= "qemu"
diff --git a/poky/meta/classes/insane.bbclass b/poky/meta/classes/insane.bbclass
index 295feb8..37b8bb0 100644
--- a/poky/meta/classes/insane.bbclass
+++ b/poky/meta/classes/insane.bbclass
@@ -33,7 +33,8 @@
             perms dep-cmp pkgvarcheck perm-config perm-line perm-link \
             split-strip packages-list pkgv-undefined var-undefined \
             version-going-backwards expanded-d invalid-chars \
-            license-checksum dev-elf file-rdeps \
+            license-checksum dev-elf file-rdeps configure-unsafe \
+            configure-gettext \
             "
 # Add usrmerge QA check based on distro feature
 ERROR_QA_append = "${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge', ' usrmerge', '', d)}"
@@ -307,10 +308,10 @@
     if not ((machine == elf.machine()) or is_32 or is_bpf):
         package_qa_add_message(messages, "arch", "Architecture did not match (%s, expected %s) on %s" % \
                  (oe.qa.elf_machine_to_string(elf.machine()), oe.qa.elf_machine_to_string(machine), package_qa_clean_path(path,d)))
-    elif not ((bits == elf.abiSize()) or is_32):
+    elif not ((bits == elf.abiSize()) or is_32 or is_bpf):
         package_qa_add_message(messages, "arch", "Bit size did not match (%d to %d) %s on %s" % \
                  (bits, elf.abiSize(), bpn, package_qa_clean_path(path,d)))
-    elif not littleendian == elf.isLittleEndian():
+    elif not ((littleendian == elf.isLittleEndian()) or is_bpf):
         package_qa_add_message(messages, "arch", "Endiannes did not match (%d to %d) on %s" % \
                  (littleendian, elf.isLittleEndian(), package_qa_clean_path(path,d)))
 
@@ -457,7 +458,6 @@
     """
     Check for changes in the license files.
     """
-    import tempfile
     sane = True
 
     lic_files = d.getVar('LIC_FILES_CHKSUM') or ''
@@ -495,61 +495,45 @@
 
         if (not beginline) and (not endline):
             md5chksum = bb.utils.md5_file(srclicfile)
-            with open(srclicfile, 'rb') as f:
-                license = f.read()
+            with open(srclicfile, 'r', errors='replace') as f:
+                license = f.read().splitlines()
         else:
-            fi = open(srclicfile, 'rb')
-            fo = tempfile.NamedTemporaryFile(mode='wb', prefix='poky.', suffix='.tmp', delete=False)
-            tmplicfile = fo.name;
-            lineno = 0
-            linesout = 0
-            license = []
-            for line in fi:
-                lineno += 1
-                if (lineno >= beginline):
-                    if ((lineno <= endline) or not endline):
-                        fo.write(line)
-                        license.append(line)
-                        linesout += 1
-                    else:
-                        break
-            fo.flush()
-            fo.close()
-            fi.close()
-            md5chksum = bb.utils.md5_file(tmplicfile)
-            license = b''.join(license)
-            os.unlink(tmplicfile)
-
+            with open(srclicfile, 'rb') as f:
+                import hashlib
+                lineno = 0
+                license = []
+                m = hashlib.md5()
+                for line in f:
+                    lineno += 1
+                    if (lineno >= beginline):
+                        if ((lineno <= endline) or not endline):
+                            m.update(line)
+                            license.append(line.decode('utf-8', errors='replace').rstrip())
+                        else:
+                            break
+                md5chksum = m.hexdigest()
         if recipemd5 == md5chksum:
             bb.note (pn + ": md5 checksum matched for ", url)
         else:
             if recipemd5:
                 msg = pn + ": The LIC_FILES_CHKSUM does not match for " + url
                 msg = msg + "\n" + pn + ": The new md5 checksum is " + md5chksum
-                try:
-                    license_lines = license.decode('utf-8').split('\n')
-                except:
-                    # License text might not be valid UTF-8, in which
-                    # case we don't know how to include it in our output
-                    # and have to skip it.
-                    pass
-                else:
-                    max_lines = int(d.getVar('QA_MAX_LICENSE_LINES') or 20)
-                    if not license_lines or license_lines[-1] != '':
-                        # Ensure that our license text ends with a line break
-                        # (will be added with join() below).
-                        license_lines.append('')
-                    remove = len(license_lines) - max_lines
-                    if remove > 0:
-                        start = max_lines // 2
-                        end = start + remove - 1
-                        del license_lines[start:end]
-                        license_lines.insert(start, '...')
-                    msg = msg + "\n" + pn + ": Here is the selected license text:" + \
-                          "\n" + \
-                          "{:v^70}".format(" beginline=%d " % beginline if beginline else "") + \
-                          "\n" + "\n".join(license_lines) + \
-                          "{:^^70}".format(" endline=%d " % endline if endline else "")
+                max_lines = int(d.getVar('QA_MAX_LICENSE_LINES') or 20)
+                if not license or license[-1] != '':
+                    # Ensure that our license text ends with a line break
+                    # (will be added with join() below).
+                    license.append('')
+                remove = len(license) - max_lines
+                if remove > 0:
+                    start = max_lines // 2
+                    end = start + remove - 1
+                    del license[start:end]
+                    license.insert(start, '...')
+                msg = msg + "\n" + pn + ": Here is the selected license text:" + \
+                        "\n" + \
+                        "{:v^70}".format(" beginline=%d " % beginline if beginline else "") + \
+                        "\n" + "\n".join(license) + \
+                        "{:^^70}".format(" endline=%d " % endline if endline else "")
                 if beginline:
                     if endline:
                         srcfiledesc = "%s (lines %d through to %d)" % (srclicfile, beginline, endline)
@@ -570,7 +554,7 @@
         bb.fatal("Fatal QA errors found, failing task.")
 }
 
-def package_qa_check_staged(path,d):
+def qa_check_staged(path,d):
     """
     Check staged la and pc files for common problems like references to the work
     directory.
@@ -589,20 +573,31 @@
     else:
         pkgconfigcheck = tmpdir
 
+    skip = (d.getVar('INSANE_SKIP') or "").split()
+    skip_la = False
+    if 'la' in skip:
+        bb.note("Recipe %s skipping qa checking: la" % d.getVar('PN'))
+        skip_la = True
+
+    skip_pkgconfig = False
+    if 'pkgconfig' in skip:
+        bb.note("Recipe %s skipping qa checking: pkgconfig" % d.getVar('PN'))
+        skip_pkgconfig = True
+
     # find all .la and .pc files
     # read the content
     # and check for stuff that looks wrong
     for root, dirs, files in os.walk(path):
         for file in files:
             path = os.path.join(root,file)
-            if file.endswith(".la"):
+            if file.endswith(".la") and not skip_la:
                 with open(path) as f:
                     file_content = f.read()
                     file_content = file_content.replace(recipesysroot, "")
                     if workdir in file_content:
                         error_msg = "%s failed sanity test (workdir) in path %s" % (file,root)
                         sane &= package_qa_handle_error("la", error_msg, d)
-            elif file.endswith(".pc"):
+            elif file.endswith(".pc") and not skip_pkgconfig:
                 with open(path) as f:
                     file_content = f.read()
                     file_content = file_content.replace(recipesysroot, "")
@@ -1017,6 +1012,13 @@
 do_package_qa[rdeptask] = "do_packagedata"
 addtask do_package_qa after do_packagedata do_package before do_build
 
+# Add the package specific INSANE_SKIPs to the sstate dependencies
+python() {
+    pkgs = (d.getVar('PACKAGES') or '').split()
+    for pkg in pkgs:
+        d.appendVarFlag("do_package_qa", "vardeps", " INSANE_SKIP_{}".format(pkg))
+}
+
 SSTATETASKS += "do_package_qa"
 do_package_qa[sstate-inputdirs] = ""
 do_package_qa[sstate-outputdirs] = ""
@@ -1027,8 +1029,7 @@
 
 python do_qa_staging() {
     bb.note("QA checking staging")
-
-    if not package_qa_check_staged(d.expand('${SYSROOT_DESTDIR}${libdir}'), d):
+    if not qa_check_staged(d.expand('${SYSROOT_DESTDIR}${libdir}'), d):
         bb.fatal("QA staging was broken by the package built above")
 }
 
@@ -1042,15 +1043,22 @@
     configs = []
     workdir = d.getVar('WORKDIR')
 
-    if bb.data.inherits_class('autotools', d):
+    skip = (d.getVar('INSANE_SKIP') or "").split()
+    skip_configure_unsafe = False
+    if 'configure-unsafe' in skip:
+        bb.note("Recipe %s skipping qa checking: configure-unsafe" % d.getVar('PN'))
+        skip_configure_unsafe = True
+
+    if bb.data.inherits_class('autotools', d) and not skip_configure_unsafe:
         bb.note("Checking autotools environment for common misconfiguration")
         for root, dirs, files in os.walk(workdir):
             statement = "grep -q -F -e 'CROSS COMPILE Badness:' -e 'is unsafe for cross-compilation' %s" % \
                         os.path.join(root,"config.log")
             if "config.log" in files:
                 if subprocess.call(statement, shell=True) == 0:
-                    bb.fatal("""This autoconf log indicates errors, it looked at host include and/or library paths while determining system capabilities.
-Rerun configure task after fixing this.""")
+                    error_msg = """This autoconf log indicates errors, it looked at host include and/or library paths while determining system capabilities.
+Rerun configure task after fixing this."""
+                    package_qa_handle_error("configure-unsafe", error_msg, d)
 
             if "configure.ac" in files:
                 configs.append(os.path.join(root,"configure.ac"))
@@ -1061,8 +1069,14 @@
     # Check gettext configuration and dependencies are correct
     ###########################################################################
 
+    skip_configure_gettext = False
+    if 'configure-gettext' in skip:
+        bb.note("Recipe %s skipping qa checking: configure-gettext" % d.getVar('PN'))
+        skip_configure_gettext = True
+
     cnf = d.getVar('EXTRA_OECONF') or ""
-    if "gettext" not in d.getVar('P') and "gcc-runtime" not in d.getVar('P') and "--disable-nls" not in cnf:
+    if not ("gettext" in d.getVar('P') or "gcc-runtime" in d.getVar('P') or \
+            "--disable-nls" in cnf or skip_configure_gettext):
         ml = d.getVar("MLPREFIX") or ""
         if bb.data.inherits_class('cross-canadian', d):
             gt = "nativesdk-gettext"
@@ -1073,8 +1087,8 @@
             for config in configs:
                 gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config
                 if subprocess.call(gnu, shell=True) == 0:
-                    bb.fatal("""%s required but not in DEPENDS for file %s.
-Missing inherit gettext?""" % (gt, config))
+                    error_msg = "%s required but not in DEPENDS for file %s. Missing inherit gettext?"
+                    package_qa_handle_error("configure-gettext", error_msg, d)
 
     ###########################################################################
     # Check unrecognised configure options (with a white list)
diff --git a/poky/meta/classes/kernel-fitimage.bbclass b/poky/meta/classes/kernel-fitimage.bbclass
index 4c4fd99..2517d75 100644
--- a/poky/meta/classes/kernel-fitimage.bbclass
+++ b/poky/meta/classes/kernel-fitimage.bbclass
@@ -4,7 +4,7 @@
     kerneltypes = d.getVar('KERNEL_IMAGETYPES') or ""
     if 'fitImage' in kerneltypes.split():
         depends = d.getVar("DEPENDS")
-        depends = "%s u-boot-mkimage-native dtc-native" % depends
+        depends = "%s u-boot-tools-native dtc-native" % depends
         d.setVar("DEPENDS", depends)
 
         uarch = d.getVar("UBOOT_ARCH")
@@ -19,9 +19,9 @@
         else:
             replacementtype = "zImage"
 
-	# Override KERNEL_IMAGETYPE_FOR_MAKE variable, which is internal
-	# to kernel.bbclass . We have to override it, since we pack zImage
-	# (at least for now) into the fitImage .
+        # Override KERNEL_IMAGETYPE_FOR_MAKE variable, which is internal
+        # to kernel.bbclass . We have to override it, since we pack zImage
+        # (at least for now) into the fitImage .
         typeformake = d.getVar("KERNEL_IMAGETYPE_FOR_MAKE") or ""
         if 'fitImage' in typeformake.split():
             d.setVar('KERNEL_IMAGETYPE_FOR_MAKE', typeformake.replace('fitImage', replacementtype))
@@ -30,12 +30,19 @@
         if image:
             d.appendVarFlag('do_assemble_fitimage_initramfs', 'depends', ' ${INITRAMFS_IMAGE}:do_image_complete')
 
+        #check if there are any dtb providers
+        providerdtb = d.getVar("PREFERRED_PROVIDER_virtual/dtb")
+        if providerdtb:
+            d.appendVarFlag('do_assemble_fitimage', 'depends', ' virtual/dtb:do_populate_sysroot')
+            d.appendVarFlag('do_assemble_fitimage_initramfs', 'depends', ' virtual/dtb:do_populate_sysroot')
+            d.setVar('EXTERNAL_KERNEL_DEVICETREE', "${RECIPE_SYSROOT}/boot/devicetree")
+
         # Verified boot will sign the fitImage and append the public key to
         # U-Boot dtb. We ensure the U-Boot dtb is deployed before assembling
         # the fitImage:
         if d.getVar('UBOOT_SIGN_ENABLE') == "1":
             uboot_pn = d.getVar('PREFERRED_PROVIDER_u-boot') or 'u-boot'
-            d.appendVarFlag('do_assemble_fitimage', 'depends', ' %s:do_deploy' % uboot_pn)
+            d.appendVarFlag('do_assemble_fitimage', 'depends', ' %s:do_populate_sysroot' % uboot_pn)
 }
 
 # Options for the device tree compiler passed to mkimage '-D' feature:
@@ -373,7 +380,8 @@
 	#
 	# Step 2: Prepare a DTB image section
 	#
-	if [ -n "${KERNEL_DEVICETREE}" ]; then
+
+	if [ -z "${EXTERNAL_KERNEL_DEVICETREE}" ] && [ -n "${KERNEL_DEVICETREE}" ]; then
 		dtbcount=1
 		for DTB in ${KERNEL_DEVICETREE}; do
 			if echo ${DTB} | grep -q '/dts/'; then
@@ -391,6 +399,16 @@
 		done
 	fi
 
+	if [ -n "${EXTERNAL_KERNEL_DEVICETREE}" ]; then
+		dtbcount=1
+		for DTBFILE in ${EXTERNAL_KERNEL_DEVICETREE}/*.dtb; do
+			DTB=`basename ${DTBFILE}`
+			DTB=$(echo "${DTB}" | tr '/' '_')
+			DTBS="${DTBS} ${DTB}"
+			fitimage_emit_section_dtb ${1} ${DTB} ${DTBFILE}
+		done
+	fi
+
 	#
 	# Step 3: Prepare a setup section. (For x86)
 	#
@@ -456,10 +474,17 @@
 	# Step 7: Sign the image and add public key to U-Boot dtb
 	#
 	if [ "x${UBOOT_SIGN_ENABLE}" = "x1" ] ; then
+		add_key_to_u_boot=""
+		if [ -n "${UBOOT_DTB_BINARY}" ]; then
+			# The u-boot.dtb is a symlink to UBOOT_DTB_IMAGE, so we need copy
+			# both of them, and don't dereference the symlink.
+			cp -P ${STAGING_DATADIR}/u-boot*.dtb ${B}
+			add_key_to_u_boot="-K ${B}/${UBOOT_DTB_BINARY}"
+		fi
 		uboot-mkimage \
 			${@'-D "${UBOOT_MKIMAGE_DTCOPTS}"' if len('${UBOOT_MKIMAGE_DTCOPTS}') else ''} \
 			-F -k "${UBOOT_SIGN_KEYDIR}" \
-			${@'-K "${DEPLOY_DIR_IMAGE}/${UBOOT_DTB_BINARY}"' if len('${UBOOT_DTB_BINARY}') else ''} \
+			$add_key_to_u_boot \
 			-r arch/${ARCH}/boot/${2}
 	fi
 }
@@ -481,7 +506,7 @@
 	fi
 }
 
-addtask assemble_fitimage_initramfs before do_deploy after do_install
+addtask assemble_fitimage_initramfs before do_deploy after do_bundle_initramfs
 
 
 kernel_do_deploy[vardepsexclude] = "DATETIME"
@@ -505,5 +530,11 @@
 			install -m 0644 ${B}/arch/${ARCH}/boot/fitImage-${INITRAMFS_IMAGE} ${DEPLOYDIR}/fitImage-${INITRAMFS_IMAGE_NAME}-${KERNEL_FIT_NAME}.bin
 			ln -snf fitImage-${INITRAMFS_IMAGE_NAME}-${KERNEL_FIT_NAME}.bin ${DEPLOYDIR}/fitImage-${INITRAMFS_IMAGE_NAME}-${KERNEL_FIT_LINK_NAME}
 		fi
+		if [ "${UBOOT_SIGN_ENABLE}" = "1" -a -n "${UBOOT_DTB_BINARY}" ] ; then
+			# UBOOT_DTB_IMAGE is a realfile, but we can't use
+			# ${UBOOT_DTB_IMAGE} since it contains ${PV} which is aimed
+			# for u-boot, but we are in kernel env now.
+			install -m 0644 ${B}/u-boot-${MACHINE}*.dtb ${DEPLOYDIR}/
+		fi
 	fi
 }
diff --git a/poky/meta/classes/kernel-module-split.bbclass b/poky/meta/classes/kernel-module-split.bbclass
index 67ab416..e8d3eb5 100644
--- a/poky/meta/classes/kernel-module-split.bbclass
+++ b/poky/meta/classes/kernel-module-split.bbclass
@@ -48,7 +48,8 @@
         tmpfile = tf[1]
         cmd = "%sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("HOST_PREFIX") or "", file, tmpfile)
         subprocess.check_call(cmd, shell=True)
-        f = open(tmpfile)
+        # errors='replace': Some old kernel versions contain invalid utf-8 characters in mod descriptions (like 0xf6, 'ö')
+        f = open(tmpfile, errors='replace')
         l = f.read().split("\000")
         f.close()
         os.close(tf[0])
@@ -132,7 +133,7 @@
     kernel_package_name = d.getVar("KERNEL_PACKAGE_NAME") or "kernel"
     kernel_version = d.getVar("KERNEL_VERSION")
 
-    module_regex = '^(.*)\.k?o$'
+    module_regex = r'^(.*)\.k?o$'
 
     module_pattern_prefix = d.getVar('KERNEL_MODULE_PACKAGE_PREFIX')
     module_pattern_suffix = d.getVar('KERNEL_MODULE_PACKAGE_SUFFIX')
diff --git a/poky/meta/classes/kernel-uimage.bbclass b/poky/meta/classes/kernel-uimage.bbclass
index c2de6bb..cedb4fa 100644
--- a/poky/meta/classes/kernel-uimage.bbclass
+++ b/poky/meta/classes/kernel-uimage.bbclass
@@ -3,7 +3,7 @@
 python __anonymous () {
     if "uImage" in d.getVar('KERNEL_IMAGETYPES'):
         depends = d.getVar("DEPENDS")
-        depends = "%s u-boot-mkimage-native" % depends
+        depends = "%s u-boot-tools-native" % depends
         d.setVar("DEPENDS", depends)
 
         # Override KERNEL_IMAGETYPE_FOR_MAKE variable, which is internal
diff --git a/poky/meta/classes/kernel-yocto.bbclass b/poky/meta/classes/kernel-yocto.bbclass
index 496c8a7..ed9bcfa 100644
--- a/poky/meta/classes/kernel-yocto.bbclass
+++ b/poky/meta/classes/kernel-yocto.bbclass
@@ -5,12 +5,21 @@
 
 # returns local (absolute) path names for all valid patches in the
 # src_uri
-def find_patches(d):
+def find_patches(d,subdir):
     patches = src_patches(d)
     patch_list=[]
     for p in patches:
-        _, _, local, _, _, _ = bb.fetch.decodeurl(p)
-        patch_list.append(local)
+        _, _, local, _, _, parm = bb.fetch.decodeurl(p)
+        # if patchdir has been passed, we won't be able to apply it so skip
+        # the patch for now, and special processing happens later
+        patchdir = ''
+        if "patchdir" in parm:
+            patchdir = parm["patchdir"]
+        if subdir:
+            if subdir == patchdir:
+                patch_list.append(local)
+        else:
+            patch_list.append(local)
 
     return patch_list
 
@@ -119,8 +128,20 @@
 		fi
 	fi
 
+	# was anyone trying to patch the kernel meta data ?, we need to do
+	# this here, since the scc commands migrate the .cfg fragments to the
+	# kernel source tree, where they'll be used later.
+	check_git_config
+	patches="${@" ".join(find_patches(d,'kernel-meta'))}"
+	for p in $patches; do
+	    (
+		cd ${WORKDIR}/kernel-meta
+		git am -s $p
+	    )
+	done
+
 	sccs_from_src_uri="${@" ".join(find_sccs(d))}"
-	patches="${@" ".join(find_patches(d))}"
+	patches="${@" ".join(find_patches(d,''))}"
 	feat_dirs="${@" ".join(find_kernel_feature_dirs(d))}"
 
 	# a quick check to make sure we don't have duplicate defconfigs
@@ -138,10 +159,10 @@
 	for f in ${feat_dirs}; do
 		if [ -d "${WORKDIR}/$f/meta" ]; then
 			includes="$includes -I${WORKDIR}/$f/kernel-meta"
-	        elif [ -d "${WORKDIR}/$f" ]; then
-			includes="$includes -I${WORKDIR}/$f"
 		elif [ -d "${WORKDIR}/../oe-local-files/$f" ]; then
 			includes="$includes -I${WORKDIR}/../oe-local-files/$f"
+	        elif [ -d "${WORKDIR}/$f" ]; then
+			includes="$includes -I${WORKDIR}/$f"
 		fi
 	done
 	for s in ${sccs} ${patches}; do
diff --git a/poky/meta/classes/kernel.bbclass b/poky/meta/classes/kernel.bbclass
index bd185e2..9da9818 100644
--- a/poky/meta/classes/kernel.bbclass
+++ b/poky/meta/classes/kernel.bbclass
@@ -487,6 +487,15 @@
 		mkdir -p $kerneldir/arch/${ARCH}/include/generated/
 		cp -fR arch/${ARCH}/include/generated/* $kerneldir/arch/${ARCH}/include/generated/
 	fi
+
+	if (grep -q -i -e '^CONFIG_UNWINDER_ORC=y$' $kerneldir/.config); then
+		# With CONFIG_UNWINDER_ORC (the default in 4.14), objtool is required for
+		# out-of-tree modules to be able to generate object files.
+		if [ -x tools/objtool/objtool ]; then
+			mkdir -p ${kerneldir}/tools/objtool
+			cp tools/objtool/objtool ${kerneldir}/tools/objtool/
+		fi
+	fi
 }
 
 # We don't need to stage anything, not the modules/firmware since those would clash with linux-firmware
@@ -580,7 +589,7 @@
 PACKAGESPLITFUNCS_prepend = "split_kernel_packages "
 
 python split_kernel_packages () {
-    do_split_packages(d, root='${nonarch_base_libdir}/firmware', file_regex='^(.*)\.(bin|fw|cis|csp|dsp)$', output_pattern='${KERNEL_PACKAGE_NAME}-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
+    do_split_packages(d, root='${nonarch_base_libdir}/firmware', file_regex=r'^(.*)\.(bin|fw|cis|csp|dsp)$', output_pattern='${KERNEL_PACKAGE_NAME}-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
 }
 
 # Many scripts want to look in arch/$arch/boot for the bootable
diff --git a/poky/meta/classes/libc-package.bbclass b/poky/meta/classes/libc-package.bbclass
index 345ec29..8859dad 100644
--- a/poky/meta/classes/libc-package.bbclass
+++ b/poky/meta/classes/libc-package.bbclass
@@ -37,16 +37,11 @@
                 d.setVar("DEPENDS", depends)
                 d.setVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", "compile")
                 break
-
-    # try to fix disable charsets/locales/locale-code compile fail
-    if bb.utils.contains('DISTRO_FEATURES', 'libc-charsets', True, False, d) and \
-            bb.utils.contains('DISTRO_FEATURES', 'libc-locales', True, False, d) and \
-            bb.utils.contains('DISTRO_FEATURES', 'libc-locale-code', True, False, d):
-        d.setVar('PACKAGE_NO_GCONV', '0')
-    else:
-        d.setVar('PACKAGE_NO_GCONV', '1')
 }
 
+# try to fix disable charsets/locales/locale-code compile fail
+PACKAGE_NO_GCONV ?= "0"
+
 OVERRIDES_append = ":${TARGET_ARCH}-${TARGET_OS}"
 
 locale_base_postinst_ontarget() {
@@ -69,9 +64,14 @@
 	for i in $treedir/${datadir}/i18n/charmaps/*gz; do 
 		gunzip $i
 	done
-	tar -cf - -C ${LOCALETREESRC}${base_libdir} -p . | tar -xf - -C $treedir/${base_libdir}
-	if [ -f ${STAGING_DIR_NATIVE}${prefix_native}/lib/libgcc_s.* ]; then
-		tar -cf - -C ${STAGING_DIR_NATIVE}/${prefix_native}/${base_libdir} -p libgcc_s.* | tar -xf - -C $treedir/${base_libdir}
+	# The extract pattern "./l*.so*" is carefully selected so that it will
+	# match ld*.so and lib*.so*, but not any files in the gconv directory
+	# (if it exists). This makes sure we only unpack the files we need.
+	# This is important in case usrmerge is set in DISTRO_FEATURES, which
+	# means ${base_libdir} == ${libdir}.
+	tar -cf - -C ${LOCALETREESRC}${base_libdir} -p . | tar -xf - -C $treedir/${base_libdir} --wildcards './l*.so*'
+	if [ -f ${STAGING_LIBDIR_NATIVE}/libgcc_s.* ]; then
+		tar -cf - -C ${STAGING_LIBDIR_NATIVE} -p libgcc_s.* | tar -xf - -C $treedir/${base_libdir}
 	fi
 	install -m 0755 ${LOCALETREESRC}${bindir}/localedef $treedir/${base_bindir}
 }
@@ -115,8 +115,8 @@
     def calc_gconv_deps(fn, pkg, file_regex, output_pattern, group):
         deps = []
         f = open(fn, "rb")
-        c_re = re.compile('^copy "(.*)"')
-        i_re = re.compile('^include "(\w+)".*')
+        c_re = re.compile(r'^copy "(.*)"')
+        i_re = re.compile(r'^include "(\w+)".*')
         for l in f.readlines():
             l = l.decode("latin-1")
             m = c_re.match(l) or i_re.match(l)
@@ -130,15 +130,15 @@
         if bpn != 'glibc':
             d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
 
-    do_split_packages(d, gconv_libdir, file_regex='^(.*)\.so$', output_pattern=bpn+'-gconv-%s', \
+    do_split_packages(d, gconv_libdir, file_regex=r'^(.*)\.so$', output_pattern=bpn+'-gconv-%s', \
         description='gconv module for character set %s', hook=calc_gconv_deps, \
         extra_depends=bpn+'-gconv')
 
     def calc_charmap_deps(fn, pkg, file_regex, output_pattern, group):
         deps = []
         f = open(fn, "rb")
-        c_re = re.compile('^copy "(.*)"')
-        i_re = re.compile('^include "(\w+)".*')
+        c_re = re.compile(r'^copy "(.*)"')
+        i_re = re.compile(r'^include "(\w+)".*')
         for l in f.readlines():
             l = l.decode("latin-1")
             m = c_re.match(l) or i_re.match(l)
@@ -152,14 +152,14 @@
         if bpn != 'glibc':
             d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
 
-    do_split_packages(d, charmap_dir, file_regex='^(.*)\.gz$', output_pattern=bpn+'-charmap-%s', \
+    do_split_packages(d, charmap_dir, file_regex=r'^(.*)\.gz$', output_pattern=bpn+'-charmap-%s', \
         description='character map for %s encoding', hook=calc_charmap_deps, extra_depends='')
 
     def calc_locale_deps(fn, pkg, file_regex, output_pattern, group):
         deps = []
         f = open(fn, "rb")
-        c_re = re.compile('^copy "(.*)"')
-        i_re = re.compile('^include "(\w+)".*')
+        c_re = re.compile(r'^copy "(.*)"')
+        i_re = re.compile(r'^include "(\w+)".*')
         for l in f.readlines():
             l = l.decode("latin-1")
             m = c_re.match(l) or i_re.match(l)
@@ -173,13 +173,13 @@
         if bpn != 'glibc':
             d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
 
-    do_split_packages(d, locales_dir, file_regex='(.*)', output_pattern=bpn+'-localedata-%s', \
+    do_split_packages(d, locales_dir, file_regex=r'(.*)', output_pattern=bpn+'-localedata-%s', \
         description='locale definition for %s', hook=calc_locale_deps, extra_depends='')
     d.setVar('PACKAGES', d.getVar('PACKAGES', False) + ' ' + d.getVar('MLPREFIX', False) + bpn + '-gconv')
 
     use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE")
 
-    dot_re = re.compile("(.*)\.(.*)")
+    dot_re = re.compile(r"(.*)\.(.*)")
 
     # Read in supported locales and associated encodings
     supported = {}
@@ -236,6 +236,8 @@
         if use_cross_localedef == "1":
             target_arch = d.getVar('TARGET_ARCH')
             locale_arch_options = { \
+                "arc":     " --uint32-align=4 --little-endian ", \
+                "arceb":   " --uint32-align=4 --big-endian ",    \
                 "arm":     " --uint32-align=4 --little-endian ", \
                 "armeb":   " --uint32-align=4 --big-endian ",    \
                 "aarch64": " --uint32-align=4 --little-endian ",    \
@@ -278,7 +280,7 @@
             qemu_options = d.getVar('QEMU_OPTIONS')
 
             cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \
-                -E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \
+                -E LD_LIBRARY_PATH=%s %s %s${base_bindir}/localedef %s" % \
                 (path, i18npath, qemu, treedir, ldlibdir, qemu_options, treedir, localedef_opts)
 
         commands["%s/%s" % (outputpath, name)] = cmd
@@ -290,7 +292,7 @@
         d.setVar('ALLOW_EMPTY_%s' % pkgname, '1')
         d.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES')))
         rprovides = ' %svirtual-locale-%s' % (mlprefix, legitimize_package_name(name))
-        m = re.match("(.*)_(.*)", name)
+        m = re.match(r"(.*)_(.*)", name)
         if m:
             rprovides += ' %svirtual-locale-%s' % (mlprefix, m.group(1))
         d.setVar('RPROVIDES_%s' % pkgname, rprovides)
@@ -343,11 +345,14 @@
         makefile = oe.path.join(d.getVar("WORKDIR"), "locale-tree", "Makefile")
         m = open(makefile, "w")
         m.write("all: %s\n\n" % " ".join(commands.keys()))
-        for cmd in commands:
+        total = len(commands)
+        for i, cmd in enumerate(commands):
             m.write(cmd + ":\n")
+            m.write("\t@echo 'Progress %d/%d'\n" % (i, total))
             m.write("\t" + commands[cmd] + "\n\n")
         m.close()
         d.setVar("EXTRA_OEMAKE", "-C %s ${PARALLEL_MAKE}" % (os.path.dirname(makefile)))
+        d.setVarFlag("oe_runmake", "progress", "outof:Progress\s(\d+)/(\d+)")
         bb.note("Executing binary locale generation makefile")
         bb.build.exec_func("oe_runmake", d)
         bb.note("collecting binary locales from locale tree")
@@ -356,12 +361,12 @@
     if use_bin in ('compile', 'precompiled'):
         lcsplit = d.getVar('GLIBC_SPLIT_LC_PACKAGES')
         if lcsplit and int(lcsplit):
-            do_split_packages(d, binary_locales_dir, file_regex='^(.*/LC_\w+)', \
+            do_split_packages(d, binary_locales_dir, file_regex=r'^(.*/LC_\w+)', \
                 output_pattern=bpn+'-binary-localedata-%s', \
                 description='binary locale definition for %s', recursive=True,
                 hook=metapkg_hook, extra_depends='', allow_dirs=True, match_path=True)
         else:
-            do_split_packages(d, binary_locales_dir, file_regex='(.*)', \
+            do_split_packages(d, binary_locales_dir, file_regex=r'(.*)', \
                 output_pattern=bpn+'-binary-localedata-%s', \
                 description='binary locale definition for %s', extra_depends='', allow_dirs=True)
     else:
diff --git a/poky/meta/classes/license.bbclass b/poky/meta/classes/license.bbclass
index aec6999..ed91a4b 100644
--- a/poky/meta/classes/license.bbclass
+++ b/poky/meta/classes/license.bbclass
@@ -316,8 +316,8 @@
     This function checks if a recipe has any LICENSE_FLAGS that
     aren't whitelisted.
 
-    If it does, it returns the first LICENSE_FLAGS item missing from the
-    whitelist, or all of the LICENSE_FLAGS if there is no whitelist.
+    If it does, it returns the all LICENSE_FLAGS missing from the whitelist, or
+    all of the LICENSE_FLAGS if there is no whitelist.
 
     If everything is is properly whitelisted, it returns None.
     """
@@ -354,22 +354,23 @@
         return False
 
     def all_license_flags_match(license_flags, whitelist):
-        """ Return first unmatched flag, None if all flags match """
+        """ Return all unmatched flags, None if all flags match """
         pn = d.getVar('PN')
         split_whitelist = whitelist.split()
+        flags = []
         for flag in license_flags.split():
             if not license_flag_matches(flag, split_whitelist, pn):
-                return flag
-        return None
+                flags.append(flag)
+        return flags if flags else None
 
     license_flags = d.getVar('LICENSE_FLAGS')
     if license_flags:
         whitelist = d.getVar('LICENSE_FLAGS_WHITELIST')
         if not whitelist:
-            return license_flags
-        unmatched_flag = all_license_flags_match(license_flags, whitelist)
-        if unmatched_flag:
-            return unmatched_flag
+            return license_flags.split()
+        unmatched_flags = all_license_flags_match(license_flags, whitelist)
+        if unmatched_flags:
+            return unmatched_flags
     return None
 
 def check_license_format(d):
diff --git a/poky/meta/classes/license_image.bbclass b/poky/meta/classes/license_image.bbclass
index b65ff56..6750038 100644
--- a/poky/meta/classes/license_image.bbclass
+++ b/poky/meta/classes/license_image.bbclass
@@ -32,11 +32,12 @@
 
     rootfs_license_manifest = os.path.join(d.getVar('LICENSE_DIRECTORY'),
                         d.getVar('IMAGE_NAME'), 'license.manifest')
-    write_license_files(d, rootfs_license_manifest, pkg_dic)
+    write_license_files(d, rootfs_license_manifest, pkg_dic, rootfs=True)
 }
 
-def write_license_files(d, license_manifest, pkg_dic):
+def write_license_files(d, license_manifest, pkg_dic, rootfs=True):
     import re
+    import stat
 
     bad_licenses = (d.getVar("INCOMPATIBLE_LICENSE") or "").split()
     bad_licenses = map(lambda l: canonical_license(d, l), bad_licenses)
@@ -94,7 +95,7 @@
     # With both options set we see a .5 M increase in core-image-minimal
     copy_lic_manifest = d.getVar('COPY_LIC_MANIFEST')
     copy_lic_dirs = d.getVar('COPY_LIC_DIRS')
-    if copy_lic_manifest == "1":
+    if rootfs and copy_lic_manifest == "1":
         rootfs_license_dir = os.path.join(d.getVar('IMAGE_ROOTFS'), 
                                 'usr', 'share', 'common-licenses')
         bb.utils.mkdirhier(rootfs_license_dir)
@@ -146,6 +147,18 @@
                             continue
 
                         os.link(pkg_license, pkg_rootfs_license)
+            # Fixup file ownership and permissions
+            for walkroot, dirs, files in os.walk(rootfs_license_dir):
+                for f in files:
+                    p = os.path.join(walkroot, f)
+                    os.lchown(p, 0, 0)
+                    if not os.path.islink(p):
+                        os.chmod(p, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
+                for dir in dirs:
+                    p = os.path.join(walkroot, dir)
+                    os.lchown(p, 0, 0)
+                    os.chmod(p, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
+
 
 
 def license_deployed_manifest(d):
@@ -176,7 +189,7 @@
                                     d.getVar('IMAGE_NAME'))
     bb.utils.mkdirhier(lic_manifest_dir)
     image_license_manifest = os.path.join(lic_manifest_dir, 'image_license.manifest')
-    write_license_files(d, image_license_manifest, man_dic)
+    write_license_files(d, image_license_manifest, man_dic, rootfs=False)
 
 def get_deployed_dependencies(d):
     """
diff --git a/poky/meta/classes/mcextend.bbclass b/poky/meta/classes/mcextend.bbclass
new file mode 100644
index 0000000..0f8f962
--- /dev/null
+++ b/poky/meta/classes/mcextend.bbclass
@@ -0,0 +1,16 @@
+python mcextend_virtclass_handler () {
+    cls = e.data.getVar("BBEXTENDCURR")
+    variant = e.data.getVar("BBEXTENDVARIANT")
+    if cls != "mcextend" or not variant:
+        return
+
+    override = ":virtclass-mcextend-" + variant
+
+    e.data.setVar("PN", e.data.getVar("PN", False) + "-" + variant)
+    e.data.setVar("MCNAME", variant)
+    e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + override)
+}
+
+addhandler mcextend_virtclass_handler
+mcextend_virtclass_handler[eventmask] = "bb.event.RecipePreFinalise"
+
diff --git a/poky/meta/classes/meson.bbclass b/poky/meta/classes/meson.bbclass
index 3cbdcf1..115d1ae 100644
--- a/poky/meta/classes/meson.bbclass
+++ b/poky/meta/classes/meson.bbclass
@@ -54,8 +54,12 @@
         return 'ppc'
     elif arch == 'powerpc64':
         return 'ppc64'
+    elif arch == 'armeb':
+        return 'arm'
     elif arch == 'mipsel':
         return 'mips'
+    elif arch == 'mips64el':
+        return 'mips64'
     elif re.match(r"i[3-6]86", arch):
         return "x86"
     else:
@@ -85,6 +89,7 @@
 strip = ${@meson_array('STRIP', d)}
 readelf = ${@meson_array('READELF', d)}
 pkgconfig = 'pkg-config'
+llvm-config = 'llvm-config8.0.0'
 
 [properties]
 needs_exe_wrapper = true
diff --git a/poky/meta/classes/metadata_scm.bbclass b/poky/meta/classes/metadata_scm.bbclass
index fa791f0..58bb4c5 100644
--- a/poky/meta/classes/metadata_scm.bbclass
+++ b/poky/meta/classes/metadata_scm.bbclass
@@ -3,55 +3,15 @@
 
 def base_detect_revision(d):
     path = base_get_scmbasepath(d)
-
-    scms = [base_get_metadata_git_revision]
-
-    for scm in scms:
-        rev = scm(path, d)
-        if rev != "<unknown>":
-            return rev
-
-    return "<unknown>"
+    return base_get_metadata_git_revision(path, d)
 
 def base_detect_branch(d):
     path = base_get_scmbasepath(d)
-
-    scms = [base_get_metadata_git_branch]
-
-    for scm in scms:
-        rev = scm(path, d)
-        if rev != "<unknown>":
-            return rev.strip()
-
-    return "<unknown>"
+    return base_get_metadata_git_branch(path, d)
 
 def base_get_scmbasepath(d):
     return os.path.join(d.getVar('COREBASE'), 'meta')
 
-def base_get_metadata_monotone_branch(path, d):
-    monotone_branch = "<unknown>"
-    try:
-        with open("%s/_MTN/options" % path) as f:
-            monotone_branch = f.read().strip()
-            if monotone_branch.startswith( "database" ):
-                monotone_branch_words = monotone_branch.split()
-                monotone_branch = monotone_branch_words[ monotone_branch_words.index( "branch" )+1][1:-1]
-    except:
-        pass
-    return monotone_branch
-
-def base_get_metadata_monotone_revision(path, d):
-    monotone_revision = "<unknown>"
-    try:
-        with open("%s/_MTN/revision" % path) as f:
-            monotone_revision = f.read().strip()
-            if monotone_revision.startswith( "format_version" ):
-                monotone_revision_words = monotone_revision.split()
-                monotone_revision = monotone_revision_words[ monotone_revision_words.index( "old_revision" )+1][1:-1]
-    except IOError:
-        pass
-    return monotone_revision
-
 def base_get_metadata_svn_revision(path, d):
     # This only works with older subversion. For newer versions 
     # this function will need to be fixed by someone interested
diff --git a/poky/meta/classes/module.bbclass b/poky/meta/classes/module.bbclass
index e344960..c0dfa35 100644
--- a/poky/meta/classes/module.bbclass
+++ b/poky/meta/classes/module.bbclass
@@ -48,6 +48,7 @@
 module_do_install() {
 	unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS
 	oe_runmake DEPMOD=echo MODLIB="${D}${nonarch_base_libdir}/modules/${KERNEL_VERSION}" \
+	           INSTALL_FW_PATH="${D}${nonarch_base_libdir}/firmware" \
 	           CC="${KERNEL_CC}" LD="${KERNEL_LD}" \
 	           O=${STAGING_KERNEL_BUILDDIR} \
 	           ${MODULES_INSTALL_TARGET}
diff --git a/poky/meta/classes/multilib.bbclass b/poky/meta/classes/multilib.bbclass
index 6c6499a..2b761f3 100644
--- a/poky/meta/classes/multilib.bbclass
+++ b/poky/meta/classes/multilib.bbclass
@@ -88,8 +88,6 @@
 addhandler multilib_virtclass_handler
 multilib_virtclass_handler[eventmask] = "bb.event.RecipePreFinalise"
 
-STAGINGCC_prepend = "${BBEXTENDVARIANT}-"
-
 python __anonymous () {
     variant = d.getVar("BBEXTENDVARIANT")
 
diff --git a/poky/meta/classes/native.bbclass b/poky/meta/classes/native.bbclass
index ddccfe2..e9f6c74 100644
--- a/poky/meta/classes/native.bbclass
+++ b/poky/meta/classes/native.bbclass
@@ -119,6 +119,9 @@
 # reused if we manipulate the paths.
 SSTATE_SCAN_CMD ?= "${SSTATE_SCAN_CMD_NATIVE}"
 
+# No strip sysroot when DEBUG_BUILD is enabled
+INHIBIT_SYSROOT_STRIP ?= "${@oe.utils.vartrue('DEBUG_BUILD', '1', '', d)}"
+
 python native_virtclass_handler () {
     pn = e.data.getVar("PN")
     if not pn.endswith("-native"):
@@ -189,3 +192,6 @@
 do_populate_sysroot[stamp-extra-info] = ""
 
 USE_NLS = "no"
+
+RECIPERDEPTASK = "do_populate_sysroot"
+do_populate_sysroot[rdeptask] = "${RECIPERDEPTASK}"
diff --git a/poky/meta/classes/package.bbclass b/poky/meta/classes/package.bbclass
index 66e423e..4c0a859 100644
--- a/poky/meta/classes/package.bbclass
+++ b/poky/meta/classes/package.bbclass
@@ -75,7 +75,7 @@
             return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape')
 
     # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
-    s = re.sub('<U([0-9A-Fa-f]{1,4})>', fixutf, s)
+    s = re.sub(r'<U([0-9A-Fa-f]{1,4})>', fixutf, s)
 
     # Remaining package name validity fixes
     return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
@@ -344,7 +344,7 @@
 
     return debugfiles.keys()
 
-def append_source_info(file, sourcefile, d, fatal=True):
+def source_info(file, d, fatal=True):
     import subprocess
 
     cmd = ["dwarfsrcfiles", file]
@@ -363,22 +363,15 @@
         bb.note(msg)
 
     debugsources = parse_debugsources_from_dwarfsrcfiles_output(output)
-    # filenames are null-separated - this is an artefact of the previous use
-    # of rpm's debugedit, which was writing them out that way, and the code elsewhere
-    # is still assuming that.
-    debuglistoutput = '\0'.join(debugsources) + '\0'
-    lf = bb.utils.lockfile(sourcefile + ".lock")
-    with open(sourcefile, 'a') as sf:
-        sf.write(debuglistoutput)
-    bb.utils.unlockfile(lf)
 
+    return list(debugsources)
 
-def splitdebuginfo(file, dvar, debugdir, debuglibdir, debugappend, debugsrcdir, sourcefile, d):
+def splitdebuginfo(file, dvar, debugdir, debuglibdir, debugappend, debugsrcdir, d):
     # Function to split a single file into two components, one is the stripped
     # target system binary, the other contains any debugging information. The
     # two files are linked to reference each other.
     #
-    # sourcefile is also generated containing a list of debugsources
+    # return a mapping of files:debugsources
 
     import stat
     import subprocess
@@ -386,6 +379,7 @@
     src = file[len(dvar):]
     dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
     debugfile = dvar + dest
+    sources = []
 
     # Split the file...
     bb.utils.mkdirhier(os.path.dirname(debugfile))
@@ -397,7 +391,7 @@
 
     # We ignore kernel modules, we don't generate debug info files.
     if file.find("/lib/modules/") != -1 and file.endswith(".ko"):
-        return 1
+        return (file, sources)
 
     newmode = None
     if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
@@ -407,7 +401,7 @@
 
     # We need to extract the debug src information here...
     if debugsrcdir:
-        append_source_info(file, sourcefile, d)
+        sources = source_info(file, d)
 
     bb.utils.mkdirhier(os.path.dirname(debugfile))
 
@@ -419,17 +413,26 @@
     if newmode:
         os.chmod(file, origmode)
 
-    return 0
+    return (file, sources)
 
-def copydebugsources(debugsrcdir, d):
+def copydebugsources(debugsrcdir, sources, d):
     # The debug src information written out to sourcefile is further processed
     # and copied to the destination here.
 
     import stat
     import subprocess
 
-    sourcefile = d.expand("${WORKDIR}/debugsources.list")
-    if debugsrcdir and os.path.isfile(sourcefile):
+    if debugsrcdir and sources:
+        sourcefile = d.expand("${WORKDIR}/debugsources.list")
+        bb.utils.remove(sourcefile)
+
+        # filenames are null-separated - this is an artefact of the previous use
+        # of rpm's debugedit, which was writing them out that way, and the code elsewhere
+        # is still assuming that.
+        debuglistoutput = '\0'.join(sources) + '\0'
+        with open(sourcefile, 'a') as sf:
+           sf.write(debuglistoutput)
+
         dvar = d.getVar('PKGD')
         strip = d.getVar("STRIP")
         objcopy = d.getVar("OBJCOPY")
@@ -471,7 +474,8 @@
 
         # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
         # Work around this by manually finding and copying any symbolic links that made it through.
-        cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s' 2>/dev/null)" % (dvar, debugsrcdir, dvar, debugsrcdir, workparentdir, dvar, debugsrcdir)
+        cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \
+                (dvar, debugsrcdir, dvar, debugsrcdir, workparentdir, dvar, debugsrcdir)
         subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
 
         # The copy by cpio may have resulted in some empty directories!  Remove these
@@ -932,9 +936,6 @@
         debuglibdir = ""
         debugsrcdir = "/usr/src/debug"
 
-    sourcefile = d.expand("${WORKDIR}/debugsources.list")
-    bb.utils.remove(sourcefile)
-
     #
     # First lets figure out all of the files we may have to process ... do this only once!
     #
@@ -1039,11 +1040,15 @@
     # First lets process debug splitting
     #
     if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
-        oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, debugdir, debuglibdir, debugappend, debugsrcdir, sourcefile, d))
+        results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, debugdir, debuglibdir, debugappend, debugsrcdir, d))
 
         if debugsrcdir and not targetos.startswith("mingw"):
             for file in staticlibs:
-                append_source_info(file, sourcefile, d, fatal=False)
+                results.extend(source_info(file, d, fatal=False))
+
+        sources = set()
+        for r in results:
+            sources.update(r[1])
 
         # Hardlink our debug symbols to the other hardlink copies
         for ref in inodes:
@@ -1091,7 +1096,7 @@
 
         # Process the debugsrcdir if requested...
         # This copies and places the referenced sources for later debugging...
-        copydebugsources(debugsrcdir, d)
+        copydebugsources(debugsrcdir, sources, d)
     #
     # End of debug splitting
     #
@@ -1123,7 +1128,7 @@
     workdir = d.getVar('WORKDIR')
     outdir = d.getVar('DEPLOY_DIR')
     dvar = d.getVar('PKGD')
-    packages = d.getVar('PACKAGES')
+    packages = d.getVar('PACKAGES').split()
     pn = d.getVar('PN')
 
     bb.utils.mkdirhier(outdir)
@@ -1133,32 +1138,34 @@
 
     split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg')
 
-    # If debug-with-srcpkg mode is enabled then the src package is added
-    # into the package list and the source directory as its main content
+    # If debug-with-srcpkg mode is enabled then add the source package if it
+    # doesn't exist and add the source file contents to the source package.
     if split_source_package:
         src_package_name = ('%s-src' % d.getVar('PN'))
-        packages += (' ' + src_package_name)
+        if not src_package_name in packages:
+            packages.append(src_package_name)
         d.setVar('FILES_%s' % src_package_name, '/usr/src/debug')
 
     # Sanity check PACKAGES for duplicates
     # Sanity should be moved to sanity.bbclass once we have the infrastructure
     package_dict = {}
 
-    for i, pkg in enumerate(packages.split()):
+    for i, pkg in enumerate(packages):
         if pkg in package_dict:
             msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
             package_qa_handle_error("packages-list", msg, d)
-        # If debug-with-srcpkg mode is enabled then the src package will have
-        # priority over dbg package when assigning the files.
-        # This allows src package to include source files and remove them from dbg.
-        elif split_source_package and pkg.endswith("-src"):
+        # Ensure the source package gets the chance to pick up the source files
+        # before the debug package by ordering it first in PACKAGES. Whether it
+        # actually picks up any source files is controlled by
+        # PACKAGE_DEBUG_SPLIT_STYLE.
+        elif pkg.endswith("-src"):
             package_dict[pkg] = (10, i)
         elif autodebug and pkg.endswith("-dbg"):
             package_dict[pkg] = (30, i)
         else:
             package_dict[pkg] = (50, i)
-    package_list = sorted(package_dict.keys(), key=package_dict.get)
-    d.setVar('PACKAGES', ' '.join(package_list))
+    packages = sorted(package_dict.keys(), key=package_dict.get)
+    d.setVar('PACKAGES', ' '.join(packages))
     pkgdest = d.getVar('PKGDEST')
 
     seen = []
@@ -1176,7 +1183,7 @@
             if "/.debug/" in path or path.endswith("/.debug"):
                 debug.append(path)
 
-    for pkg in package_list:
+    for pkg in packages:
         root = os.path.join(pkgdest, pkg)
         bb.utils.mkdirhier(root)
 
@@ -1247,7 +1254,7 @@
 
     # Handle LICENSE_EXCLUSION
     package_list = []
-    for pkg in packages.split():
+    for pkg in packages:
         if d.getVar('LICENSE_EXCLUSION-' + pkg):
             msg = "%s has an incompatible license. Excluding from packaging." % pkg
             package_qa_handle_error("incompatible-license", msg, d)
@@ -1438,7 +1445,7 @@
             if fstat.st_ino not in seen:
                 seen.add(fstat.st_ino)
                 total_size += fstat.st_size
-        d.setVar('FILES_INFO', json.dumps(files))
+        d.setVar('FILES_INFO', json.dumps(files, sort_keys=True))
 
         subdata_file = pkgdatadir + "/runtime/%s" % pkg
         sf = open(subdata_file, 'w')
@@ -1561,12 +1568,12 @@
         if pkg not in requires_files:
             requires_files[pkg] = []
 
-        for file in provides:
+        for file in sorted(provides):
             provides_files[pkg].append(file)
             key = "FILERPROVIDES_" + file + "_" + pkg
             d.appendVar(key, " " + " ".join(provides[file]))
 
-        for file in requires:
+        for file in sorted(requires):
             requires_files[pkg].append(file)
             key = "FILERDEPENDS_" + file + "_" + pkg
             d.appendVar(key, " " + " ".join(requires[file]))
@@ -1589,8 +1596,8 @@
         bb.note("not generating shlibs")
         return
 
-    lib_re = re.compile("^.*\.so")
-    libdir_re = re.compile(".*/%s$" % d.getVar('baselib'))
+    lib_re = re.compile(r"^.*\.so")
+    libdir_re = re.compile(r".*/%s$" % d.getVar('baselib'))
 
     packages = d.getVar('PACKAGES')
 
@@ -1631,17 +1638,17 @@
         fd.close()
         rpath = tuple()
         for l in lines:
-            m = re.match("\s+RPATH\s+([^\s]*)", l)
+            m = re.match(r"\s+RPATH\s+([^\s]*)", l)
             if m:
                 rpaths = m.group(1).replace("$ORIGIN", ldir).split(":")
                 rpath = tuple(map(os.path.normpath, rpaths))
         for l in lines:
-            m = re.match("\s+NEEDED\s+([^\s]*)", l)
+            m = re.match(r"\s+NEEDED\s+([^\s]*)", l)
             if m:
                 dep = m.group(1)
                 if dep not in needed:
                     needed.add((dep, file, rpath))
-            m = re.match("\s+SONAME\s+([^\s]*)", l)
+            m = re.match(r"\s+SONAME\s+([^\s]*)", l)
             if m:
                 this_soname = m.group(1)
                 prov = (this_soname, ldir, pkgver)
@@ -1721,7 +1728,7 @@
             out, err = p.communicate()
             # process the output, grabbing all .dll names
             if p.returncode == 0:
-                for m in re.finditer("DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE):
+                for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE):
                     dllname = m.group(1)
                     if dllname:
                         needed[pkg].add((dllname, file, tuple()))
@@ -1867,7 +1874,7 @@
             os.remove(deps_file)
         if len(deps):
             fd = open(deps_file, 'w')
-            for dep in deps:
+            for dep in sorted(deps):
                 fd.write(dep + '\n')
             fd.close()
 }
@@ -1882,9 +1889,9 @@
     shlibs_dirs = d.getVar('SHLIBSDIRS').split()
     shlibswork_dir = d.getVar('SHLIBSWORKDIR')
 
-    pc_re = re.compile('(.*)\.pc$')
-    var_re = re.compile('(.*)=(.*)')
-    field_re = re.compile('(.*): (.*)')
+    pc_re = re.compile(r'(.*)\.pc$')
+    var_re = re.compile(r'(.*)=(.*)')
+    field_re = re.compile(r'(.*): (.*)')
 
     pkgconfig_provided = {}
     pkgconfig_needed = {}
@@ -1932,7 +1939,7 @@
         if not os.path.exists(dir):
             continue
         for file in os.listdir(dir):
-            m = re.match('^(.*)\.pclist$', file)
+            m = re.match(r'^(.*)\.pclist$', file)
             if m:
                 pkg = m.group(1)
                 fd = open(os.path.join(dir, file))
@@ -1988,7 +1995,7 @@
     packages = d.getVar('PACKAGES').split()
     for pkg in packages:
         rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg) or "")
-        for dep in pkglibdeps[pkg]:
+        for dep in sorted(pkglibdeps[pkg]):
             # Add the dep if it's not already there, or if no comparison is set
             if dep not in rdepends:
                 rdepends[dep] = []
@@ -2021,7 +2028,7 @@
         #bb.note('depends for %s is %s' % (base, depends))
         rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg) or "")
 
-        for depend in depends:
+        for depend in sorted(depends):
             if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
                 #bb.note("Skipping %s" % depend)
                 continue
@@ -2042,7 +2049,7 @@
         #bb.note('rdepends for %s is %s' % (base, rdepends))
         rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg) or "")
 
-        for depend in rdepends:
+        for depend in sorted(rdepends):
             if depend.find('virtual-locale-') != -1:
                 #bb.note("Skipping %s" % depend)
                 continue
diff --git a/poky/meta/classes/package_ipk.bbclass b/poky/meta/classes/package_ipk.bbclass
index 5eb910c..d1b317b 100644
--- a/poky/meta/classes/package_ipk.bbclass
+++ b/poky/meta/classes/package_ipk.bbclass
@@ -8,7 +8,7 @@
 PKGWRITEDIRIPK = "${WORKDIR}/deploy-ipks"
 
 # Program to be used to build opkg packages
-OPKGBUILDCMD ??= "opkg-build -Z xz"
+OPKGBUILDCMD ??= 'opkg-build -Z xz -a "${XZ_DEFAULTS}"'
 
 OPKG_ARGS += "--force_postinstall --prefer-arch-to-version"
 OPKG_ARGS += "${@['', '--no-install-recommends'][d.getVar("NO_RECOMMENDATIONS") == "1"]}"
diff --git a/poky/meta/classes/package_rpm.bbclass b/poky/meta/classes/package_rpm.bbclass
index 21ada34..1a64cb2 100644
--- a/poky/meta/classes/package_rpm.bbclass
+++ b/poky/meta/classes/package_rpm.bbclass
@@ -113,6 +113,10 @@
             source_list = os.listdir(ar_outdir)
             source_number = 0
             for source in source_list:
+                # do_deploy_archives may have already run (from sstate) meaning a .src.rpm may already 
+                # exist in ARCHIVER_OUTDIR so skip if present.
+                if source.endswith(".src.rpm"):
+                    continue
                 # The rpmbuild doesn't need the root permission, but it needs
                 # to know the file's user and group name, the only user and
                 # group in fakeroot is "root" when working in fakeroot.
@@ -690,7 +694,7 @@
     cmd = cmd + " --define '_tmppath " + workdir + "'"
     if d.getVarFlag('ARCHIVER_MODE', 'srpm') == '1' and bb.data.inherits_class('archiver', d):
         cmd = cmd + " --define '_sourcedir " + d.getVar('ARCHIVER_OUTDIR') + "'"
-        cmdsrpm = cmd + " --define '_srcrpmdir " + d.getVar('ARCHIVER_OUTDIR') + "'"
+        cmdsrpm = cmd + " --define '_srcrpmdir " + d.getVar('ARCHIVER_RPMOUTDIR') + "'"
         cmdsrpm = cmdsrpm + " -bs " + outspecfile
         # Build the .src.rpm
         d.setVar('SBUILDSPEC', cmdsrpm + "\n")
diff --git a/poky/meta/classes/packagegroup.bbclass b/poky/meta/classes/packagegroup.bbclass
index d540d42..94a59e0 100644
--- a/poky/meta/classes/packagegroup.bbclass
+++ b/poky/meta/classes/packagegroup.bbclass
@@ -48,6 +48,8 @@
 deltask do_install
 deltask do_populate_sysroot
 
+INHIBIT_DEFAULT_DEPS = "1"
+
 python () {
     if bb.data.inherits_class('nativesdk', d):
         return
diff --git a/poky/meta/classes/perl-version.bbclass b/poky/meta/classes/perl-version.bbclass
index bafd965..84b67b8 100644
--- a/poky/meta/classes/perl-version.bbclass
+++ b/poky/meta/classes/perl-version.bbclass
@@ -1,4 +1,4 @@
-PERL_OWN_DIR = "${@["", "/perl-native"][(bb.data.inherits_class('native', d))]}"
+PERL_OWN_DIR = ""
 
 # Determine the staged version of perl from the perl configuration file
 # Assign vardepvalue, because otherwise signature is changed before and after
@@ -6,7 +6,7 @@
 get_perl_version[vardepvalue] = "${PERL_OWN_DIR}"
 def get_perl_version(d):
     import re
-    cfg = d.expand('${STAGING_LIBDIR}${PERL_OWN_DIR}/perl/config.sh')
+    cfg = d.expand('${STAGING_LIBDIR}${PERL_OWN_DIR}/perl5/config.sh')
     try:
         f = open(cfg, 'r')
     except IOError:
@@ -22,3 +22,45 @@
 
 PERLVERSION := "${@get_perl_version(d)}"
 PERLVERSION[vardepvalue] = ""
+
+
+# Determine the staged arch of perl from the perl configuration file
+# Assign vardepvalue, because otherwise signature is changed before and after
+# perl is built (from None to real version in config.sh).
+def get_perl_arch(d):
+    import re
+    cfg = d.expand('${STAGING_LIBDIR}${PERL_OWN_DIR}/perl5/config.sh')
+    try:
+        f = open(cfg, 'r')
+    except IOError:
+        return None
+    l = f.readlines();
+    f.close();
+    r = re.compile("^archname='([^']*)'")
+    for s in l:
+        m = r.match(s)
+        if m:
+            return m.group(1)
+    return None
+
+PERLARCH := "${@get_perl_arch(d)}"
+PERLARCH[vardepvalue] = ""
+
+# Determine the staged arch of perl-native from the perl configuration file
+# Assign vardepvalue, because otherwise signature is changed before and after
+# perl is built (from None to real version in config.sh).
+def get_perl_hostarch(d):
+    import re
+    cfg = d.expand('${STAGING_LIBDIR_NATIVE}/perl5/config.sh')
+    try:
+        f = open(cfg, 'r')
+    except IOError:
+        return None
+    l = f.readlines();
+    f.close();
+    r = re.compile("^archname='([^']*)'")
+    for s in l:
+        m = r.match(s)
+        if m:
+            return m.group(1)
+    return None
diff --git a/poky/meta/classes/pixbufcache.bbclass b/poky/meta/classes/pixbufcache.bbclass
index 3378ff2..b07f51e 100644
--- a/poky/meta/classes/pixbufcache.bbclass
+++ b/poky/meta/classes/pixbufcache.bbclass
@@ -3,7 +3,7 @@
 # packages.
 #
 
-DEPENDS += "qemu-native"
+DEPENDS_append_class-target = " qemu-native"
 inherit qemu
 
 PIXBUF_PACKAGES ??= "${PN}"
@@ -54,7 +54,6 @@
 DEPENDS_append_class-native = " gdk-pixbuf-native"
 SYSROOT_PREPROCESS_FUNCS_append_class-native = " pixbufcache_sstate_postinst"
 
-# See base.bbclass for the other half of this
 pixbufcache_sstate_postinst() {
 	mkdir -p ${SYSROOT_DESTDIR}${bindir}
 	dest=${SYSROOT_DESTDIR}${bindir}/postinst-${PN}
diff --git a/poky/meta/classes/populate_sdk_base.bbclass b/poky/meta/classes/populate_sdk_base.bbclass
index 677ba3c..ebc30d3 100644
--- a/poky/meta/classes/populate_sdk_base.bbclass
+++ b/poky/meta/classes/populate_sdk_base.bbclass
@@ -6,6 +6,7 @@
 COMPLEMENTARY_GLOB[staticdev-pkgs] = '*-staticdev'
 COMPLEMENTARY_GLOB[doc-pkgs] = '*-doc'
 COMPLEMENTARY_GLOB[dbg-pkgs] = '*-dbg'
+COMPLEMENTARY_GLOB[src-pkgs] = '*-src'
 COMPLEMENTARY_GLOB[ptest-pkgs] = '*-ptest'
 
 def complementary_globs(featurevar, d):
@@ -17,7 +18,7 @@
             globs.append(glob)
     return ' '.join(globs)
 
-SDKIMAGE_FEATURES ??= "dev-pkgs dbg-pkgs ${@bb.utils.contains('DISTRO_FEATURES', 'api-documentation', 'doc-pkgs', '', d)}"
+SDKIMAGE_FEATURES ??= "dev-pkgs dbg-pkgs src-pkgs ${@bb.utils.contains('DISTRO_FEATURES', 'api-documentation', 'doc-pkgs', '', d)}"
 SDKIMAGE_INSTALL_COMPLEMENTARY = '${@complementary_globs("SDKIMAGE_FEATURES", d)}'
 
 PACKAGE_ARCHS_append_task-populate-sdk = " sdk-provides-dummy-target"
@@ -225,7 +226,7 @@
 	# Package it up
 	mkdir -p ${SDKDEPLOYDIR}
 	cd ${SDK_OUTPUT}/${SDKPATH}
-	tar ${SDKTAROPTS} -cf - . | xz -T 0 > ${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.tar.xz
+	tar ${SDKTAROPTS} -cf - . | xz ${XZ_DEFAULTS} > ${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.tar.xz
 }
 
 TOOLCHAIN_SHAR_EXT_TMPL ?= "${COREBASE}/meta/files/toolchain-shar-extract.sh"
@@ -257,7 +258,7 @@
 		-e 's#@SDKEXTPATH@#${SDKEXTPATH}#g' \
 		-e 's#@OLDEST_KERNEL@#${SDK_OLDEST_KERNEL}#g' \
 		-e 's#@REAL_MULTIMACH_TARGET_SYS@#${REAL_MULTIMACH_TARGET_SYS}#g' \
-		-e 's#@SDK_TITLE@#${@d.getVar("SDK_TITLE").replace('&', '\&')}#g' \
+		-e 's#@SDK_TITLE@#${@d.getVar("SDK_TITLE").replace('&', '\\&')}#g' \
 		-e 's#@SDK_VERSION@#${SDK_VERSION}#g' \
 		-e '/@SDK_PRE_INSTALL_COMMAND@/d' \
 		-e '/@SDK_POST_INSTALL_COMMAND@/d' \
diff --git a/poky/meta/classes/populate_sdk_ext.bbclass b/poky/meta/classes/populate_sdk_ext.bbclass
index 40b0375..800e117 100644
--- a/poky/meta/classes/populate_sdk_ext.bbclass
+++ b/poky/meta/classes/populate_sdk_ext.bbclass
@@ -589,11 +589,8 @@
 		exit 1
 	fi
 	SDK_EXTENSIBLE="1"
-	if [ "$publish" = "1" ] ; then
-		EXTRA_TAR_OPTIONS="$EXTRA_TAR_OPTIONS --exclude=ext-sdk-prepare.py"
-		if [ "${SDK_EXT_TYPE}" = "minimal" ] ; then
-			EXTRA_TAR_OPTIONS="$EXTRA_TAR_OPTIONS --exclude=sstate-cache"
-		fi
+	if [ "$publish" = "1" ] && [ "${SDK_EXT_TYPE}" = "minimal" ] ; then
+		EXTRA_TAR_OPTIONS="$EXTRA_TAR_OPTIONS --exclude=sstate-cache"
 	fi
 }
 SDK_PRE_INSTALL_COMMAND_task-populate-sdk-ext = "${sdk_ext_preinst}"
@@ -635,6 +632,8 @@
 		# sourcing a script. That is why this has to look so ugly.
 		LOGFILE="$target_sdk_dir/preparing_build_system.log"
 		sh -c ". buildtools/environment-setup* > $LOGFILE && cd $target_sdk_dir/`dirname ${oe_init_build_env_path}` && set $target_sdk_dir && . $target_sdk_dir/${oe_init_build_env_path} $target_sdk_dir >> $LOGFILE && python $target_sdk_dir/ext-sdk-prepare.py $LOGFILE '${SDK_INSTALL_TARGETS}'" || { echo "printf 'ERROR: this SDK was not fully installed and needs reinstalling\n'" >> $env_setup_script ; exit 1 ; }
+	fi
+	if [ -e $target_sdk_dir/ext-sdk-prepare.py ]; then
 		rm $target_sdk_dir/ext-sdk-prepare.py
 	fi
 	echo done
diff --git a/poky/meta/classes/ptest.bbclass b/poky/meta/classes/ptest.bbclass
index 9d8a798..97865c9 100644
--- a/poky/meta/classes/ptest.bbclass
+++ b/poky/meta/classes/ptest.bbclass
@@ -3,6 +3,9 @@
 This package contains a test directory ${PTEST_PATH} for package test purposes."
 
 PTEST_PATH ?= "${libdir}/${BPN}/ptest"
+PTEST_BUILD_HOST_FILES ?= "Makefile"
+PTEST_BUILD_HOST_PATTERN ?= ""
+
 FILES_${PN}-ptest = "${PTEST_PATH}"
 SECTION_${PN}-ptest = "devel"
 ALLOW_EMPTY_${PN}-ptest = "1"
@@ -45,6 +48,21 @@
     fi
     do_install_ptest
     chown -R root:root ${D}${PTEST_PATH}
+
+    # Strip build host paths from any installed Makefile
+    for filename in ${PTEST_BUILD_HOST_FILES}; do
+        for installed_ptest_file in $(find ${D}${PTEST_PATH} -type f -name $filename); do
+            bbnote "Stripping host paths from: $installed_ptest_file"
+            sed -e 's#${HOSTTOOLS_DIR}/*##g' \
+                -e 's#${WORKDIR}/*=#.=#g' \
+                -e 's#${WORKDIR}/*##g' \
+                -i $installed_ptest_file
+            if [ -n "${PTEST_BUILD_HOST_PATTERN}" ]; then
+               sed -E '/${PTEST_BUILD_HOST_PATTERN}/d' \
+                   -i $installed_ptest_file
+            fi
+        done
+    done
 }
 
 do_configure_ptest_base[dirs] = "${B}"
diff --git a/poky/meta/classes/python3-dir.bbclass b/poky/meta/classes/python3-dir.bbclass
index 06bb046..7dd130b 100644
--- a/poky/meta/classes/python3-dir.bbclass
+++ b/poky/meta/classes/python3-dir.bbclass
@@ -1,4 +1,4 @@
-PYTHON_BASEVERSION = "3.5"
+PYTHON_BASEVERSION = "3.7"
 PYTHON_ABI = "m"
 PYTHON_DIR = "python${PYTHON_BASEVERSION}"
 PYTHON_PN = "python3"
diff --git a/poky/meta/classes/python3native.bbclass b/poky/meta/classes/python3native.bbclass
index da12a71..a3acaf6 100644
--- a/poky/meta/classes/python3native.bbclass
+++ b/poky/meta/classes/python3native.bbclass
@@ -9,6 +9,8 @@
 export STAGING_INCDIR
 export STAGING_LIBDIR
 
+export _PYTHON_SYSCONFIGDATA_NAME="_sysconfigdata"
+
 # suppress host user's site-packages dirs.
 export PYTHONNOUSERSITE = "1"
 
diff --git a/poky/meta/classes/sanity.bbclass b/poky/meta/classes/sanity.bbclass
index 374dacf..cab0921 100644
--- a/poky/meta/classes/sanity.bbclass
+++ b/poky/meta/classes/sanity.bbclass
@@ -338,7 +338,7 @@
 def get_filesystem_id(path):
     import subprocess
     try:
-        return subprocess.check_output(["stat", "-f", "-c", "%t", path]).decode('utf-8')
+        return subprocess.check_output(["stat", "-f", "-c", "%t", path]).decode('utf-8').strip()
     except subprocess.CalledProcessError:
         bb.warn("Can't get filesystem id of: %s" % path)
         return None
diff --git a/poky/meta/classes/scons.bbclass b/poky/meta/classes/scons.bbclass
index b9ae19d..9ee7d15 100644
--- a/poky/meta/classes/scons.bbclass
+++ b/poky/meta/classes/scons.bbclass
@@ -2,16 +2,25 @@
 
 EXTRA_OESCONS ?= ""
 
-do_configure[noexec] = "1"
+do_configure() {
+	if [ -n "${CONFIGURESTAMPFILE}" ]; then
+		if [ -e "${CONFIGURESTAMPFILE}" -a "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" -a "${CLEANBROKEN}" != "1" ]; then
+			${STAGING_BINDIR_NATIVE}/scons --clean PREFIX=${prefix} prefix=${prefix} ${EXTRA_OESCONS}
+		fi
+
+		mkdir -p `dirname ${CONFIGURESTAMPFILE}`
+		echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE}
+	fi
+}
 
 scons_do_compile() {
-        ${STAGING_BINDIR_NATIVE}/scons ${PARALLEL_MAKE} PREFIX=${prefix} prefix=${prefix} ${EXTRA_OESCONS} || \
-        die "scons build execution failed."
+	${STAGING_BINDIR_NATIVE}/scons ${PARALLEL_MAKE} PREFIX=${prefix} prefix=${prefix} ${EXTRA_OESCONS} || \
+	die "scons build execution failed."
 }
 
 scons_do_install() {
-        ${STAGING_BINDIR_NATIVE}/scons install_root=${D}${prefix} PREFIX=${prefix} prefix=${prefix} ${EXTRA_OESCONS} install || \
-        die "scons install execution failed."
+	${STAGING_BINDIR_NATIVE}/scons install_root=${D}${prefix} PREFIX=${prefix} prefix=${prefix} ${EXTRA_OESCONS} install || \
+	die "scons install execution failed."
 }
 
 EXPORT_FUNCTIONS do_compile do_install
diff --git a/poky/meta/classes/sstate.bbclass b/poky/meta/classes/sstate.bbclass
index edbfba5..6f51d9c 100644
--- a/poky/meta/classes/sstate.bbclass
+++ b/poky/meta/classes/sstate.bbclass
@@ -11,7 +11,7 @@
 SSTATE_PKGARCH    = "${PACKAGE_ARCH}"
 SSTATE_PKGSPEC    = "sstate:${PN}:${PACKAGE_ARCH}${TARGET_VENDOR}-${TARGET_OS}:${PV}:${PR}:${SSTATE_PKGARCH}:${SSTATE_VERSION}:"
 SSTATE_SWSPEC     = "sstate:${PN}::${PV}:${PR}::${SSTATE_VERSION}:"
-SSTATE_PKGNAME    = "${SSTATE_EXTRAPATH}${@generate_sstatefn(d.getVar('SSTATE_PKGSPEC'), d.getVar('BB_TASKHASH'), d)}"
+SSTATE_PKGNAME    = "${SSTATE_EXTRAPATH}${@generate_sstatefn(d.getVar('SSTATE_PKGSPEC'), d.getVar('BB_UNIHASH'), d)}"
 SSTATE_PKG        = "${SSTATE_DIR}/${SSTATE_PKGNAME}"
 SSTATE_EXTRAPATH   = ""
 SSTATE_EXTRAPATHWILDCARD = ""
@@ -23,6 +23,7 @@
 # We don't want the sstate to depend on things like the distro string
 # of the system, we let the sstate paths take care of this.
 SSTATE_EXTRAPATH[vardepvalue] = ""
+SSTATE_EXTRAPATHWILDCARD[vardepvalue] = ""
 
 # For multilib rpm the allarch packagegroup files can overwrite (in theory they're identical)
 SSTATE_DUPWHITELIST = "${DEPLOY_DIR}/licenses/"
@@ -61,6 +62,7 @@
 SSTATE_MANMACH ?= "${SSTATE_PKGARCH}"
 
 SSTATECREATEFUNCS = "sstate_hardcode_path"
+SSTATECREATEFUNCS[vardeps] = "SSTATE_SCAN_FILES"
 SSTATEPOSTCREATEFUNCS = ""
 SSTATEPREINSTFUNCS = ""
 SSTATEPOSTUNPACKFUNCS = "sstate_hardcode_path_unpack"
@@ -82,6 +84,23 @@
 # Whether to verify the GnUPG signatures when extracting sstate archives
 SSTATE_VERIFY_SIG ?= "0"
 
+SSTATE_HASHEQUIV_METHOD ?= "oe.sstatesig.OEOuthashBasic"
+SSTATE_HASHEQUIV_METHOD[doc] = "The fully-qualified function used to calculate \
+    the output hash for a task, which in turn is used to determine equivalency. \
+    "
+
+SSTATE_HASHEQUIV_SERVER ?= ""
+SSTATE_HASHEQUIV_SERVER[doc] = "The hash equivalence sever. For example, \
+    'http://192.168.0.1:5000'. Do not include a trailing slash \
+    "
+
+SSTATE_HASHEQUIV_REPORT_TASKDATA ?= "0"
+SSTATE_HASHEQUIV_REPORT_TASKDATA[doc] = "Report additional useful data to the \
+    hash equivalency server, such as PN, PV, taskname, etc. This information \
+    is very useful for developers looking at task data, but may leak sensitive \
+    data if the equivalence server is public. \
+    "
+
 python () {
     if bb.data.inherits_class('native', d):
         d.setVar('SSTATE_PKGARCH', d.getVar('BUILD_ARCH', False))
@@ -101,7 +120,7 @@
     if bb.data.inherits_class('native', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross', d):
         d.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/")
         d.setVar('BB_HASHFILENAME', "True ${SSTATE_PKGSPEC} ${SSTATE_SWSPEC}")
-        d.setVar('SSTATE_EXTRAPATHWILDCARD', "*/")
+        d.setVar('SSTATE_EXTRAPATHWILDCARD', "${NATIVELSBSTRING}/")
 
     unique_tasks = sorted(set((d.getVar('SSTATETASKS') or "").split()))
     d.setVar('SSTATETASKS', " ".join(unique_tasks))
@@ -640,7 +659,7 @@
         return
 
     for f in (d.getVar('SSTATECREATEFUNCS') or '').split() + \
-             ['sstate_create_package', 'sstate_sign_package'] + \
+             ['sstate_report_unihash', 'sstate_create_package', 'sstate_sign_package'] + \
              (d.getVar('SSTATEPOSTCREATEFUNCS') or '').split():
         # All hooks should run in SSTATE_BUILDDIR.
         bb.build.exec_func(f, d, (sstatebuild,))
@@ -764,6 +783,14 @@
                            d.getVar('SSTATE_SIG_PASSPHRASE'), armor=False)
 }
 
+python sstate_report_unihash() {
+    report_unihash = getattr(bb.parse.siggen, 'report_unihash', None)
+
+    if report_unihash:
+        ss = sstate_state_fromvars(d)
+        report_unihash(os.getcwd(), ss['task'], d)
+}
+
 #
 # Shell function to decompress and prepare a package for installation
 # Will be run from within SSTATE_INSTDIR.
@@ -780,7 +807,7 @@
 
 BB_HASHCHECK_FUNCTION = "sstate_checkhashes"
 
-def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False):
+def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False, *, sq_unihash=None):
 
     ret = []
     missed = []
@@ -788,6 +815,11 @@
     if siginfo:
         extension = extension + ".siginfo"
 
+    def gethash(task):
+        if sq_unihash is not None:
+            return sq_unihash[task]
+        return sq_hash[task]
+
     def getpathcomponents(task, d):
         # Magic data from BB_HASHFILENAME
         splithashfn = sq_hashfn[task].split(" ")
@@ -810,7 +842,7 @@
 
         spec, extrapath, tname = getpathcomponents(task, d)
 
-        sstatefile = d.expand("${SSTATE_DIR}/" + extrapath + generate_sstatefn(spec, sq_hash[task], d) + "_" + tname + extension)
+        sstatefile = d.expand("${SSTATE_DIR}/" + extrapath + generate_sstatefn(spec, gethash(task), d) + "_" + tname + extension)
 
         if os.path.exists(sstatefile):
             bb.debug(2, "SState: Found valid sstate file %s" % sstatefile)
@@ -872,7 +904,7 @@
             if task in ret:
                 continue
             spec, extrapath, tname = getpathcomponents(task, d)
-            sstatefile = d.expand(extrapath + generate_sstatefn(spec, sq_hash[task], d) + "_" + tname + extension)
+            sstatefile = d.expand(extrapath + generate_sstatefn(spec, gethash(task), d) + "_" + tname + extension)
             tasklist.append((task, sstatefile))
 
         if tasklist:
@@ -898,12 +930,12 @@
         evdata = {'missed': [], 'found': []};
         for task in missed:
             spec, extrapath, tname = getpathcomponents(task, d)
-            sstatefile = d.expand(extrapath + generate_sstatefn(spec, sq_hash[task], d) + "_" + tname + ".tgz")
-            evdata['missed'].append( (sq_fn[task], sq_task[task], sq_hash[task], sstatefile ) )
+            sstatefile = d.expand(extrapath + generate_sstatefn(spec, gethash(task), d) + "_" + tname + ".tgz")
+            evdata['missed'].append( (sq_fn[task], sq_task[task], gethash(task), sstatefile ) )
         for task in ret:
             spec, extrapath, tname = getpathcomponents(task, d)
-            sstatefile = d.expand(extrapath + generate_sstatefn(spec, sq_hash[task], d) + "_" + tname + ".tgz")
-            evdata['found'].append( (sq_fn[task], sq_task[task], sq_hash[task], sstatefile ) )
+            sstatefile = d.expand(extrapath + generate_sstatefn(spec, gethash(task), d) + "_" + tname + ".tgz")
+            evdata['found'].append( (sq_fn[task], sq_task[task], gethash(task), sstatefile ) )
         bb.event.fire(bb.event.MetadataEvent("MissedSstate", evdata), d)
 
     # Print some summary statistics about the current task completion and how much sstate
@@ -1087,12 +1119,15 @@
         with open(i, "r") as f:
             lines = f.readlines()
             for l in lines:
-                (stamp, manifest, workdir) = l.split()
-                if stamp not in stamps and stamp not in preservestamps and stamp in machineindex:
-                    toremove.append(l)
-                    if stamp not in seen:
-                        bb.debug(2, "Stamp %s is not reachable, removing related manifests" % stamp)
-                        seen.append(stamp)
+                try:
+                    (stamp, manifest, workdir) = l.split()
+                    if stamp not in stamps and stamp not in preservestamps and stamp in machineindex:
+                        toremove.append(l)
+                        if stamp not in seen:
+                            bb.debug(2, "Stamp %s is not reachable, removing related manifests" % stamp)
+                            seen.append(stamp)
+                except ValueError:
+                    bb.fatal("Invalid line '%s' in sstate manifest '%s'" % (l, i))
 
         if toremove:
             msg = "Removing %d recipes from the %s sysroot" % (len(toremove), a)
diff --git a/poky/meta/classes/staging.bbclass b/poky/meta/classes/staging.bbclass
index 84e13ba..062b281 100644
--- a/poky/meta/classes/staging.bbclass
+++ b/poky/meta/classes/staging.bbclass
@@ -198,6 +198,10 @@
             if manifest.endswith("-initial.populate_sysroot"):
                 # skip glibc-initial and libgcc-initial due to file overlap
                 continue
+            if not native and (manifest.endswith("-native.populate_sysroot") or "nativesdk-" in manifest):
+                continue
+            if native and not (manifest.endswith("-native.populate_sysroot") or manifest.endswith("-cross.populate_sysroot") or "-cross-" in manifest):
+                continue
             tmanifest = targetdir + "/" + os.path.basename(manifest)
             if os.path.exists(tmanifest):
                 continue
@@ -573,6 +577,7 @@
 }
 extend_recipe_sysroot[vardepsexclude] += "MACHINE_ARCH PACKAGE_EXTRA_ARCHS SDK_ARCH BUILD_ARCH SDK_OS BB_TASKDEPDATA"
 
+do_prepare_recipe_sysroot[deptask] = "do_populate_sysroot"
 python do_prepare_recipe_sysroot () {
     bb.build.exec_func("extend_recipe_sysroot", d)
 }
diff --git a/poky/meta/classes/terminal.bbclass b/poky/meta/classes/terminal.bbclass
index 73e765d..6059ae9 100644
--- a/poky/meta/classes/terminal.bbclass
+++ b/poky/meta/classes/terminal.bbclass
@@ -14,6 +14,7 @@
     return " ".join(o.name for o in oe.terminal.prioritized())
 
 def emit_terminal_func(command, envdata, d):
+    import bb.build
     cmd_func = 'do_terminal'
 
     envdata.setVar(cmd_func, 'exec ' + command)
@@ -25,8 +26,7 @@
     bb.utils.mkdirhier(os.path.dirname(runfile))
 
     with open(runfile, 'w') as script:
-        script.write('#!/usr/bin/env %s\n' % d.getVar('SHELL'))
-        script.write('set -e\n')
+        script.write(bb.build.shell_trap_code())
         bb.data.emit_func(cmd_func, script, envdata)
         script.write(cmd_func)
         script.write("\n")
diff --git a/poky/meta/classes/testimage.bbclass b/poky/meta/classes/testimage.bbclass
index cb8c12a..ff1c53b 100644
--- a/poky/meta/classes/testimage.bbclass
+++ b/poky/meta/classes/testimage.bbclass
@@ -32,6 +32,7 @@
 # Booting is handled by this class, and it's not a test in itself.
 # TEST_QEMUBOOT_TIMEOUT can be used to set the maximum time in seconds the launch code will wait for the login prompt.
 # TEST_QEMUPARAMS can be used to pass extra parameters to qemu, e.g. "-m 1024" for setting the amount of ram to 1 GB.
+# TEST_RUNQEMUPARAMS can be used to pass extra parameters to runqemu, e.g. "gl" to enable OpenGL acceleration.
 
 TEST_LOG_DIR ?= "${WORKDIR}/testimage"
 
@@ -65,6 +66,7 @@
 TEST_QEMUBOOT_TIMEOUT ?= "1000"
 TEST_TARGET ?= "qemu"
 TEST_QEMUPARAMS ?= ""
+TEST_RUNQEMUPARAMS ?= ""
 
 TESTIMAGEDEPENDS = ""
 TESTIMAGEDEPENDS_append_qemuall = " qemu-native:do_populate_sysroot qemu-helper-native:do_populate_sysroot qemu-helper-native:do_addto_recipe_sysroot"
@@ -294,7 +296,7 @@
     try:
         # We need to check if runqemu ends unexpectedly
         # or if the worker send us a SIGTERM
-        tc.target.start(params=d.getVar("TEST_QEMUPARAMS"))
+        tc.target.start(params=d.getVar("TEST_QEMUPARAMS"), runqemuparams=d.getVar("TEST_RUNQEMUPARAMS"))
         results = tc.runTests()
     except (RuntimeError, BlockingIOError) as err:
         if isinstance(err, RuntimeError):
diff --git a/poky/meta/classes/uboot-extlinux-config.bbclass b/poky/meta/classes/uboot-extlinux-config.bbclass
index c65c421..b5b1a81 100644
--- a/poky/meta/classes/uboot-extlinux-config.bbclass
+++ b/poky/meta/classes/uboot-extlinux-config.bbclass
@@ -148,5 +148,7 @@
     except OSError:
         bb.fatal('Unable to open %s' % (cfile))
 }
+UBOOT_EXTLINUX_VARS = "CONSOLE MENU_DESCRIPTION ROOT KERNEL_IMAGE FDTDIR FDT KERNEL_ARGS INITRD"
+do_create_extlinux_config[vardeps] += "${@' '.join(['UBOOT_EXTLINUX_%s_%s' % (v, l) for v in d.getVar('UBOOT_EXTLINUX_VARS').split() for l in d.getVar('UBOOT_EXTLINUX_LABELS').split()])}"
 
 addtask create_extlinux_config before do_install do_deploy after do_compile
diff --git a/poky/meta/classes/uboot-sign.bbclass b/poky/meta/classes/uboot-sign.bbclass
index 8ee904e..9e3d1d6 100644
--- a/poky/meta/classes/uboot-sign.bbclass
+++ b/poky/meta/classes/uboot-sign.bbclass
@@ -19,11 +19,15 @@
 # The tasks sequence is set as below, using DEPLOY_IMAGE_DIR as common place to
 # treat the device tree blob:
 #
-#   u-boot:do_deploy_dtb
-#   u-boot:do_deploy
-#   virtual/kernel:do_assemble_fitimage
-#   u-boot:do_concat_dtb
-#   u-boot:do_install
+# * u-boot:do_install_append
+#   Install UBOOT_DTB_BINARY to datadir, so that kernel can use it for
+#   signing, and kernel will deploy UBOOT_DTB_BINARY after signs it.
+#
+# * virtual/kernel:do_assemble_fitimage
+#   Sign the image
+#
+# * u-boot:do_deploy[postfuncs]
+#   Deploy files like UBOOT_DTB_IMAGE, UBOOT_DTB_SYMLINK and others.
 #
 # For more details on signature process, please refer to U-Boot documentation.
 
@@ -38,58 +42,65 @@
 UBOOT_NODTB_BINARY ?= "u-boot-nodtb.${UBOOT_SUFFIX}"
 UBOOT_NODTB_SYMLINK ?= "u-boot-nodtb-${MACHINE}.${UBOOT_SUFFIX}"
 
-#
-# Following is relevant only for u-boot recipes:
-#
+# Functions in this bbclass is for u-boot only
+UBOOT_PN = "${@d.getVar('PREFERRED_PROVIDER_u-boot') or 'u-boot'}"
 
-do_deploy_dtb () {
-	mkdir -p ${DEPLOYDIR}
-	cd ${DEPLOYDIR}
+concat_dtb() {
+	if [ "${UBOOT_SIGN_ENABLE}" = "1" -a "${PN}" = "${UBOOT_PN}" ]; then
+		mkdir -p ${DEPLOYDIR}
+		if [ -e ${B}/${UBOOT_DTB_BINARY} ]; then
+			ln -sf ${UBOOT_DTB_IMAGE} ${DEPLOYDIR}/${UBOOT_DTB_BINARY}
+			ln -sf ${UBOOT_DTB_IMAGE} ${DEPLOYDIR}/${UBOOT_DTB_SYMLINK}
+		fi
 
-	if [ -f ${B}/${UBOOT_DTB_BINARY} ]; then
-		install ${B}/${UBOOT_DTB_BINARY} ${DEPLOYDIR}/${UBOOT_DTB_IMAGE}
-		rm -f ${UBOOT_DTB_BINARY} ${UBOOT_DTB_SYMLINK}
-		ln -sf ${UBOOT_DTB_IMAGE} ${UBOOT_DTB_SYMLINK}
-		ln -sf ${UBOOT_DTB_IMAGE} ${UBOOT_DTB_BINARY}
-	fi
-	if [ -f ${B}/${UBOOT_NODTB_BINARY} ]; then
-		install ${B}/${UBOOT_NODTB_BINARY} ${DEPLOYDIR}/${UBOOT_NODTB_IMAGE}
-		rm -f ${UBOOT_NODTB_BINARY} ${UBOOT_NODTB_SYMLINK}
-		ln -sf ${UBOOT_NODTB_IMAGE} ${UBOOT_NODTB_SYMLINK}
-		ln -sf ${UBOOT_NODTB_IMAGE} ${UBOOT_NODTB_BINARY}
-	fi
-}
+		if [ -f ${B}/${UBOOT_NODTB_BINARY} ]; then
+            install ${B}/${UBOOT_NODTB_BINARY} ${DEPLOYDIR}/${UBOOT_NODTB_IMAGE}
+            ln -sf ${UBOOT_NODTB_IMAGE} ${UBOOT_NODTB_SYMLINK}
+            ln -sf ${UBOOT_NODTB_IMAGE} ${UBOOT_NODTB_BINARY}
+		fi
 
-do_concat_dtb () {
-	# Concatenate U-Boot w/o DTB & DTB with public key
-	# (cf. kernel-fitimage.bbclass for more details)
-	if [ "x${UBOOT_SIGN_ENABLE}" = "x1" ]; then
+		# Concatenate U-Boot w/o DTB & DTB with public key
+		# (cf. kernel-fitimage.bbclass for more details)
+		deployed_uboot_dtb_binary='${DEPLOY_DIR_IMAGE}/${UBOOT_DTB_IMAGE}'
 		if [ "x${UBOOT_SUFFIX}" = "ximg" -o "x${UBOOT_SUFFIX}" = "xrom" ] && \
-			[ -e "${DEPLOYDIR}/${UBOOT_DTB_IMAGE}" ]; then
+			[ -e "$deployed_uboot_dtb_binary" ]; then
 			cd ${B}
-			oe_runmake EXT_DTB=${DEPLOYDIR}/${UBOOT_DTB_IMAGE}
+			oe_runmake EXT_DTB=$deployed_uboot_dtb_binary
 			install ${B}/${UBOOT_BINARY} ${DEPLOYDIR}/${UBOOT_IMAGE}
-			install ${B}/${UBOOT_BINARY} ${DEPLOY_DIR_IMAGE}/${UBOOT_IMAGE}
-		elif [ -e "${DEPLOYDIR}/${UBOOT_NODTB_IMAGE}" -a -e "${DEPLOYDIR}/${UBOOT_DTB_IMAGE}" ]; then
+		elif [ -e "${DEPLOYDIR}/${UBOOT_NODTB_IMAGE}" -a -e "$deployed_uboot_dtb_binary" ]; then
 			cd ${DEPLOYDIR}
-			cat ${UBOOT_NODTB_IMAGE} ${UBOOT_DTB_IMAGE} | tee ${B}/${UBOOT_BINARY} > ${UBOOT_IMAGE}
-		else
+			cat ${UBOOT_NODTB_IMAGE} $deployed_uboot_dtb_binary | tee ${B}/${UBOOT_BINARY} > ${UBOOT_IMAGE}
+		elif [ -n "${UBOOT_DTB_BINARY}" ]; then
 			bbwarn "Failure while adding public key to u-boot binary. Verified boot won't be available."
 		fi
 	fi
 }
 
+# Install UBOOT_DTB_BINARY to datadir, so that kernel can use it for
+# signing, and kernel will deploy UBOOT_DTB_BINARY after signs it.
+do_install_append() {
+	if [ "${UBOOT_SIGN_ENABLE}" = "1" -a "${PN}" = "${UBOOT_PN}" ]; then
+		if [ -f ${B}/${UBOOT_DTB_BINARY} ]; then
+			install -d ${D}${datadir}
+			# UBOOT_DTB_BINARY is a symlink to UBOOT_DTB_IMAGE, so we
+			# need both of them.
+			install ${B}/${UBOOT_DTB_BINARY} ${D}${datadir}/${UBOOT_DTB_IMAGE}
+			ln -sf ${UBOOT_DTB_IMAGE} ${D}${datadir}/${UBOOT_DTB_BINARY}
+		elif [ -n "${UBOOT_DTB_BINARY}" ]; then
+			bbwarn "${B}/${UBOOT_DTB_BINARY} not found"
+		fi
+	fi
+}
+
 python () {
-	uboot_pn = d.getVar('PREFERRED_PROVIDER_u-boot') or 'u-boot'
-	if d.getVar('UBOOT_SIGN_ENABLE') == '1' and d.getVar('PN') == uboot_pn:
-		kernel_pn = d.getVar('PREFERRED_PROVIDER_virtual/kernel')
+    if d.getVar('UBOOT_SIGN_ENABLE') == '1' and d.getVar('PN') == d.getVar('UBOOT_PN'):
+        kernel_pn = d.getVar('PREFERRED_PROVIDER_virtual/kernel')
 
-		# u-boot.dtb and u-boot-nodtb.bin are deployed _before_ do_deploy
-		# Thus, do_deploy_setscene will also populate them in DEPLOY_IMAGE_DIR
-		bb.build.addtask('do_deploy_dtb', 'do_deploy', 'do_compile', d)
+        # Make "bitbake u-boot -cdeploy" deploys the signed u-boot.dtb
+        d.appendVarFlag('do_deploy', 'depends', ' %s:do_deploy' % kernel_pn)
 
-		# do_concat_dtb is scheduled _before_ do_install as it overwrite the
-		# u-boot.bin in both DEPLOYDIR and DEPLOY_IMAGE_DIR.
-		bb.build.addtask('do_concat_dtb', 'do_install', None, d)
-		d.appendVarFlag('do_concat_dtb', 'depends', ' %s:do_assemble_fitimage' % kernel_pn)
+        # kernerl's do_deploy is a litle special, so we can't use
+        # do_deploy_append, otherwise it would override
+        # kernel_do_deploy.
+        d.appendVarFlag('do_deploy', 'prefuncs', ' concat_dtb')
 }
diff --git a/poky/meta/classes/update-alternatives.bbclass b/poky/meta/classes/update-alternatives.bbclass
index a7f1a6f..537e85d 100644
--- a/poky/meta/classes/update-alternatives.bbclass
+++ b/poky/meta/classes/update-alternatives.bbclass
@@ -89,15 +89,21 @@
     if not 'virtual/update-alternatives' in d.getVar('PROVIDES'):
         d.appendVar('DEPENDS', ' virtual/${MLPREFIX}update-alternatives')
 
-python __anonymous() {
+def update_alternatives_enabled(d):
     # Update Alternatives only works on target packages...
     if bb.data.inherits_class('native', d) or \
        bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d) or \
        bb.data.inherits_class('cross-canadian', d):
-        return
+        return False
 
     # Disable when targeting mingw32 (no target support)
     if d.getVar("TARGET_OS") == "mingw32":
+        return False
+
+    return True
+
+python __anonymous() {
+    if not update_alternatives_enabled(d):
         return
 
     # compute special vardeps
@@ -125,9 +131,21 @@
 populate_packages[vardeps] += "${UPDALTVARS} ${@gen_updatealternativesvars(d)}"
 
 # We need to do the rename after the image creation step, but before
-# the split and strip steps..  packagecopy seems to be the earliest reasonable
-# place.
-python perform_packagecopy_append () {
+# the split and strip steps..  PACKAGE_PREPROCESS_FUNCS is the right
+# place for that.
+PACKAGE_PREPROCESS_FUNCS += "apply_update_alternative_renames"
+python apply_update_alternative_renames () {
+    if not update_alternatives_enabled(d):
+       return
+
+    import re
+
+    def update_files(alt_target, alt_target_rename, pkg, d):
+        f = d.getVar('FILES_' + pkg)
+        if f:
+            f = re.sub(r'(^|\s)%s(\s|$)' % re.escape (alt_target), r'\1%s\2' % alt_target_rename, f)
+            d.setVar('FILES_' + pkg, f)
+
     # Check for deprecated usage...
     pn = d.getVar('BPN')
     if d.getVar('ALTERNATIVE_LINKS') != None:
@@ -137,7 +155,7 @@
     pkgdest = d.getVar('PKGD')
     for pkg in (d.getVar('PACKAGES') or "").split():
         # If the src == dest, we know we need to rename the dest by appending ${BPN}
-        link_rename = {}
+        link_rename = []
         for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg) or "").split():
             alt_link     = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name)
             if not alt_link:
@@ -163,10 +181,11 @@
                 elif os.path.lexists(src):
                     if os.path.islink(src):
                         # Delay rename of links
-                        link_rename[alt_target] = alt_target_rename
+                        link_rename.append((alt_target, alt_target_rename))
                     else:
                         bb.note('%s: Rename %s -> %s' % (pn, alt_target, alt_target_rename))
                         os.rename(src, dest)
+                        update_files(alt_target, alt_target_rename, pkg, d)
                 else:
                     bb.warn("%s: alternative target (%s or %s) does not exist, skipping..." % (pn, alt_target, alt_target_rename))
                     continue
@@ -174,31 +193,35 @@
 
         # Process delayed link names
         # Do these after other renames so we can correct broken links
-        for alt_target in link_rename:
+        for (alt_target, alt_target_rename) in link_rename:
             src = '%s/%s' % (pkgdest, alt_target)
-            dest = '%s/%s' % (pkgdest, link_rename[alt_target])
-            link = os.readlink(src)
+            dest = '%s/%s' % (pkgdest, alt_target_rename)
             link_target = oe.path.realpath(src, pkgdest, True)
 
             if os.path.lexists(link_target):
                 # Ok, the link_target exists, we can rename
-                bb.note('%s: Rename (link) %s -> %s' % (pn, alt_target, link_rename[alt_target]))
+                bb.note('%s: Rename (link) %s -> %s' % (pn, alt_target, alt_target_rename))
                 os.rename(src, dest)
             else:
                 # Try to resolve the broken link to link.${BPN}
                 link_maybe = '%s.%s' % (os.readlink(src), pn)
                 if os.path.lexists(os.path.join(os.path.dirname(src), link_maybe)):
                     # Ok, the renamed link target exists.. create a new link, and remove the original
-                    bb.note('%s: Creating new link %s -> %s' % (pn, link_rename[alt_target], link_maybe))
+                    bb.note('%s: Creating new link %s -> %s' % (pn, alt_target_rename, link_maybe))
                     os.symlink(link_maybe, dest)
                     os.unlink(src)
                 else:
                     bb.warn('%s: Unable to resolve dangling symlink: %s' % (pn, alt_target))
+                    continue
+            update_files(alt_target, alt_target_rename, pkg, d)
 }
 
 PACKAGESPLITFUNCS_prepend = "populate_packages_updatealternatives "
 
 python populate_packages_updatealternatives () {
+    if not update_alternatives_enabled(d):
+        return
+
     pn = d.getVar('BPN')
 
     # Do actual update alternatives processing
@@ -252,10 +275,15 @@
 }
 
 python package_do_filedeps_append () {
+    if update_alternatives_enabled(d):
+        apply_update_alternative_provides(d)
+}
+
+def apply_update_alternative_provides(d):
     pn = d.getVar('BPN')
     pkgdest = d.getVar('PKGDEST')
 
-    for pkg in packages.split():
+    for pkg in d.getVar('PACKAGES').split():
         for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg) or "").split():
             alt_link     = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name)
             alt_target   = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name)
@@ -273,5 +301,4 @@
             d.appendVar('FILERPROVIDES_%s_%s' % (trans_target, pkg), " " + alt_link)
             if not trans_target in (d.getVar('FILERPROVIDESFLIST_%s' % pkg) or ""):
                 d.appendVar('FILERPROVIDESFLIST_%s' % pkg, " " + trans_target)
-}
 
diff --git a/poky/meta/classes/utils.bbclass b/poky/meta/classes/utils.bbclass
index 0016e5c..cd3d057 100644
--- a/poky/meta/classes/utils.bbclass
+++ b/poky/meta/classes/utils.bbclass
@@ -264,7 +264,7 @@
 realpath=\`readlink -fn \$0\`
 realdir=\`dirname \$realpath\`
 export $exportstring
-exec -a \`dirname \$realpath\`/$cmdname \`dirname \$realpath\`/$cmdname.real "\$@"
+exec -a "\$0" \$realdir/$cmdname.real "\$@"
 END
 	chmod +x $cmd
 }
diff --git a/poky/meta/classes/waf.bbclass b/poky/meta/classes/waf.bbclass
index 19e9376..8e6d754 100644
--- a/poky/meta/classes/waf.bbclass
+++ b/poky/meta/classes/waf.bbclass
@@ -1,6 +1,8 @@
 # avoids build breaks when using no-static-libs.inc
 DISABLE_STATIC = ""
 
+B = "${WORKDIR}/build"
+
 EXTRA_OECONF_append = " ${PACKAGECONFIG_CONFARGS}"
 
 python waf_preconfigure() {
@@ -22,16 +24,16 @@
 do_configure[prefuncs] += "waf_preconfigure"
 
 waf_do_configure() {
-	${S}/waf configure --prefix=${prefix} ${WAF_EXTRA_CONF} ${EXTRA_OECONF}
+	(cd ${S} && ./waf configure -o ${B} --prefix=${prefix} ${WAF_EXTRA_CONF} ${EXTRA_OECONF})
 }
 
 do_compile[progress] = "outof:^\[\s*(\d+)/\s*(\d+)\]\s+"
 waf_do_compile()  {
-	${S}/waf build ${@oe.utils.parallel_make_argument(d, '-j%d', limit=64)}
+	(cd ${S} && ./waf build ${@oe.utils.parallel_make_argument(d, '-j%d', limit=64)})
 }
 
 waf_do_install() {
-	${S}/waf install --destdir=${D}
+	(cd ${S} && ./waf install --destdir=${D})
 }
 
 EXPORT_FUNCTIONS do_configure do_compile do_install
diff --git a/poky/meta/classes/xmlcatalog.bbclass b/poky/meta/classes/xmlcatalog.bbclass
new file mode 100644
index 0000000..075aef8
--- /dev/null
+++ b/poky/meta/classes/xmlcatalog.bbclass
@@ -0,0 +1,24 @@
+# A whitespace-separated list of XML catalogs to be registered, for example
+# "${sysconfdir}/xml/docbook-xml.xml".
+XMLCATALOGS ?= ""
+
+SYSROOT_PREPROCESS_FUNCS_append = " xmlcatalog_sstate_postinst"
+
+xmlcatalog_complete() {
+	ROOTCATALOG="${STAGING_ETCDIR_NATIVE}/xml/catalog"
+	if [ ! -f $ROOTCATALOG ]; then
+		mkdir --parents $(dirname $ROOTCATALOG)
+		xmlcatalog --noout --create $ROOTCATALOG
+	fi
+	for CATALOG in ${XMLCATALOGS}; do
+		xmlcatalog --noout --add nextCatalog unused file://$CATALOG $ROOTCATALOG
+	done
+}
+
+xmlcatalog_sstate_postinst() {
+	mkdir -p ${SYSROOT_DESTDIR}${bindir}
+	dest=${SYSROOT_DESTDIR}${bindir}/postinst-${PN}-xmlcatalog
+	echo '#!/bin/sh' > $dest
+	echo '${xmlcatalog_complete}' >> $dest
+	chmod 0755 $dest
+}