Squashed 'import-layers/yocto-poky/' changes from dc8508f6099..67491b0c104

Yocto 2.2.2 (Morty)

Change-Id: Id9a452e28940d9f166957de243d9cb1d8818704e
git-subtree-dir: import-layers/yocto-poky
git-subtree-split: 67491b0c104101bb9f366d697edd23c895be4302
Signed-off-by: Brad Bishop <bradleyb@fuzziesquirrel.com>
diff --git a/import-layers/yocto-poky/meta/classes/archiver.bbclass b/import-layers/yocto-poky/meta/classes/archiver.bbclass
index 9239983..188f8c0 100644
--- a/import-layers/yocto-poky/meta/classes/archiver.bbclass
+++ b/import-layers/yocto-poky/meta/classes/archiver.bbclass
@@ -125,7 +125,7 @@
 # (e.g. git repositories) is "unpacked" and then put into a tarball.
 python do_ar_original() {
 
-    import shutil, tarfile, tempfile
+    import shutil, tempfile
 
     if d.getVarFlag('ARCHIVER_MODE', 'src', True) != "original":
         return
@@ -261,13 +261,9 @@
         filename = '%s.tar.gz' % d.getVar('PF', True)
     tarname = os.path.join(ar_outdir, filename)
 
-    srcdir = srcdir.rstrip('/')
-    dirname = os.path.dirname(srcdir)
-    basename = os.path.basename(srcdir)
-    os.chdir(dirname)
     bb.note('Creating %s' % tarname)
     tar = tarfile.open(tarname, 'w:gz')
-    tar.add(basename)
+    tar.add(srcdir, arcname=os.path.basename(srcdir))
     tar.close()
 
 # creating .diff.gz between source.orig and source
@@ -353,8 +349,8 @@
     bbappend_files = d.getVar('BBINCLUDED', True).split()
     # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend
     # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded.
-    bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" %pn)
-    bbappend_re1 = re.compile( r".*/%s\.bbappend$" %pn)
+    bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" % re.escape(pn))
+    bbappend_re1 = re.compile( r".*/%s\.bbappend$" % re.escape(pn))
     for file in bbappend_files:
         if bbappend_re.match(file) or bbappend_re1.match(file):
             shutil.copy(file, outdir)
diff --git a/import-layers/yocto-poky/meta/classes/cmake.bbclass b/import-layers/yocto-poky/meta/classes/cmake.bbclass
index 3e762de..fad0baa 100644
--- a/import-layers/yocto-poky/meta/classes/cmake.bbclass
+++ b/import-layers/yocto-poky/meta/classes/cmake.bbclass
@@ -19,6 +19,8 @@
 OECMAKE_CXX_FLAGS_RELEASE ?= "-DNDEBUG"
 OECMAKE_C_LINK_FLAGS ?= "${HOST_CC_ARCH} ${TOOLCHAIN_OPTIONS} ${CPPFLAGS} ${LDFLAGS}"
 OECMAKE_CXX_LINK_FLAGS ?= "${HOST_CC_ARCH} ${TOOLCHAIN_OPTIONS} ${CXXFLAGS} ${LDFLAGS}"
+CXXFLAGS += "${HOST_CC_ARCH} ${TOOLCHAIN_OPTIONS}"
+CFLAGS += "${HOST_CC_ARCH} ${TOOLCHAIN_OPTIONS}"
 
 OECMAKE_RPATH ?= ""
 OECMAKE_PERLNATIVE_DIR ??= ""
diff --git a/import-layers/yocto-poky/meta/classes/cve-check.bbclass b/import-layers/yocto-poky/meta/classes/cve-check.bbclass
index 1425a40..75b8fa9 100644
--- a/import-layers/yocto-poky/meta/classes/cve-check.bbclass
+++ b/import-layers/yocto-poky/meta/classes/cve-check.bbclass
@@ -20,6 +20,10 @@
 # the only method to check against CVEs. Running this tool
 # doesn't guarantee your packages are free of CVEs.
 
+# The product name that the CVE database uses.  Defaults to BPN, but may need to
+# be overriden per recipe (for example tiff.bb sets CVE_PRODUCT=libtiff).
+CVE_PRODUCT ?= "${BPN}"
+
 CVE_CHECK_DB_DIR ?= "${DL_DIR}/CVE_CHECK"
 CVE_CHECK_DB_FILE ?= "${CVE_CHECK_DB_DIR}/nvd.db"
 
@@ -39,7 +43,7 @@
 
 # Whitelist for CVE and version of package
 CVE_CHECK_CVE_WHITELIST = "{\
-    'CVE-2014-2524': ('6.3',), \
+    'CVE-2014-2524': ('6.3','5.2',), \
 }"
 
 python do_cve_check () {
@@ -144,7 +148,7 @@
 
     cves_patched = []
     cves_unpatched = []
-    bpn = d.getVar("BPN", True)
+    bpn = d.getVar("CVE_PRODUCT")
     pv = d.getVar("PV", True).split("git+")[0]
     cves = " ".join(patched_cves)
     cve_db_dir = d.getVar("CVE_CHECK_DB_DIR", True)
diff --git a/import-layers/yocto-poky/meta/classes/icecc.bbclass b/import-layers/yocto-poky/meta/classes/icecc.bbclass
index a837894..c572571 100644
--- a/import-layers/yocto-poky/meta/classes/icecc.bbclass
+++ b/import-layers/yocto-poky/meta/classes/icecc.bbclass
@@ -42,6 +42,7 @@
 
 DEPENDS_prepend += "${@icecc_dep_prepend(d)} "
 
+get_cross_kernel_cc[vardepsexclude] += "KERNEL_CC"
 def get_cross_kernel_cc(bb,d):
     kernel_cc = d.getVar('KERNEL_CC', False)
 
diff --git a/import-layers/yocto-poky/meta/classes/image.bbclass b/import-layers/yocto-poky/meta/classes/image.bbclass
index 9f5869e..a9ab2fa 100644
--- a/import-layers/yocto-poky/meta/classes/image.bbclass
+++ b/import-layers/yocto-poky/meta/classes/image.bbclass
@@ -457,7 +457,7 @@
         rm_tmp_images = set()
         def gen_conversion_cmds(bt):
             for ctype in sorted(ctypes):
-                if bt[bt.find('.') + 1:] == ctype:
+                if bt.endswith("." + ctype):
                     type = bt[0:-len(ctype) - 1]
                     if type.startswith("debugfs_"):
                         type = type[8:]
@@ -487,7 +487,7 @@
         # Clean up after applying all conversion commands. Some of them might
         # use the same input, therefore we cannot delete sooner without applying
         # some complex dependency analysis.
-        for image in rm_tmp_images:
+        for image in sorted(rm_tmp_images):
             cmds.append("\trm " + image)
 
         after = 'do_image'
diff --git a/import-layers/yocto-poky/meta/classes/image_types.bbclass b/import-layers/yocto-poky/meta/classes/image_types.bbclass
index 1ce8334..3bfa60b 100644
--- a/import-layers/yocto-poky/meta/classes/image_types.bbclass
+++ b/import-layers/yocto-poky/meta/classes/image_types.bbclass
@@ -17,17 +17,25 @@
                 d += ":do_populate_sysroot"
             deps.add(d)
 
+    # Take a type in the form of foo.bar.car and split it into the items
+    # needed for the image deps "foo", and the conversion deps ["bar", "car"]
+    def split_types(typestring):
+        types = typestring.split(".")
+        return types[0], types[1:]
+
     fstypes = set((d.getVar('IMAGE_FSTYPES', True) or "").split())
     fstypes |= set((d.getVar('IMAGE_FSTYPES_DEBUGFS', True) or "").split())
 
     deps = set()
     for typestring in fstypes:
-        types = typestring.split(".")
-        basetype, resttypes = types[0], types[1:]
-
+        basetype, resttypes = split_types(typestring)
         adddep(d.getVar('IMAGE_DEPENDS_%s' % basetype, True) , deps)
+
         for typedepends in (d.getVar("IMAGE_TYPEDEP_%s" % basetype, True) or "").split():
-            adddep(d.getVar('IMAGE_DEPENDS_%s' % typedepends, True) , deps)
+            base, rest = split_types(typedepends)
+            adddep(d.getVar('IMAGE_DEPENDS_%s' % base, True) , deps)
+            resttypes += rest
+
         for ctype in resttypes:
             adddep(d.getVar("CONVERSION_DEPENDS_%s" % ctype, True), deps)
             adddep(d.getVar("COMPRESS_DEPENDS_%s" % ctype, True), deps)
diff --git a/import-layers/yocto-poky/meta/classes/image_types_uboot.bbclass b/import-layers/yocto-poky/meta/classes/image_types_uboot.bbclass
index 6c8c1ff..933fa4d 100644
--- a/import-layers/yocto-poky/meta/classes/image_types_uboot.bbclass
+++ b/import-layers/yocto-poky/meta/classes/image_types_uboot.bbclass
@@ -3,9 +3,6 @@
 oe_mkimage () {
     mkimage -A ${UBOOT_ARCH} -O linux -T ramdisk -C $2 -n ${IMAGE_NAME} \
         -d ${IMGDEPLOYDIR}/$1 ${IMGDEPLOYDIR}/$1.u-boot
-    if [ x$3 = x"clean" ]; then
-        rm $1
-    fi
 }
 
 CONVERSIONTYPES += "gz.u-boot bz2.u-boot lzma.u-boot u-boot"
@@ -14,13 +11,13 @@
 CONVERSION_CMD_u-boot      = "oe_mkimage ${IMAGE_NAME}.rootfs.${type} none"
 
 CONVERSION_DEPENDS_gz.u-boot = "u-boot-mkimage-native"
-CONVERSION_CMD_gz.u-boot      = "${CONVERSION_CMD_gz}; oe_mkimage ${IMAGE_NAME}.rootfs.${type}.gz gzip clean"
+CONVERSION_CMD_gz.u-boot      = "${CONVERSION_CMD_gz}; oe_mkimage ${IMAGE_NAME}.rootfs.${type}.gz gzip"
 
 CONVERSION_DEPENDS_bz2.u-boot = "u-boot-mkimage-native"
-CONVERSION_CMD_bz2.u-boot      = "${CONVERSION_CMD_bz2}; oe_mkimage ${IMAGE_NAME}.rootfs.${type}.bz2 bzip2 clean"
+CONVERSION_CMD_bz2.u-boot      = "${CONVERSION_CMD_bz2}; oe_mkimage ${IMAGE_NAME}.rootfs.${type}.bz2 bzip2"
 
 CONVERSION_DEPENDS_lzma.u-boot = "u-boot-mkimage-native"
-CONVERSION_CMD_lzma.u-boot      = "${CONVERSION_CMD_lzma}; oe_mkimage ${IMAGE_NAME}.rootfs.${type}.lzma lzma clean"
+CONVERSION_CMD_lzma.u-boot      = "${CONVERSION_CMD_lzma}; oe_mkimage ${IMAGE_NAME}.rootfs.${type}.lzma lzma"
 
 IMAGE_TYPES += "ext2.u-boot ext2.gz.u-boot ext2.bz2.u-boot ext2.lzma.u-boot ext3.gz.u-boot ext4.gz.u-boot cpio.gz.u-boot"
 
diff --git a/import-layers/yocto-poky/meta/classes/insane.bbclass b/import-layers/yocto-poky/meta/classes/insane.bbclass
index 1d73778..7bbe8b6 100644
--- a/import-layers/yocto-poky/meta/classes/insane.bbclass
+++ b/import-layers/yocto-poky/meta/classes/insane.bbclass
@@ -63,6 +63,8 @@
                         "arm" :       (40,     0,    0,          True,          32),
                       },
             "elf" : {
+                        "aarch64" :   (183,    0,    0,          True,          64),
+                        "aarch64_be" :(183,    0,    0,          False,         64),
                         "i586" :      (3,      0,    0,          True,          32),
                         "x86_64":     (62,     0,    0,          True,          64),
                         "epiphany":   (4643,   0,    0,          True,          32),
@@ -403,47 +405,6 @@
     """
     return
 
-QAPATHTEST[unsafe-references-in-binaries] = "package_qa_check_unsafe_references_in_binaries"
-def package_qa_check_unsafe_references_in_binaries(path, name, d, elf, messages):
-    """
-    Ensure binaries in base_[bindir|sbindir|libdir] do not link to files under exec_prefix
-    """
-    if unsafe_references_skippable(path, name, d):
-        return
-
-    if elf:
-        import subprocess as sub
-        pn = d.getVar('PN', True)
-
-        exec_prefix = d.getVar('exec_prefix', True)
-        sysroot_path = d.getVar('STAGING_DIR_TARGET', True)
-        sysroot_path_usr = sysroot_path + exec_prefix
-
-        try:
-            ldd_output = bb.process.Popen(["prelink-rtld", "--root", sysroot_path, path], stdout=sub.PIPE).stdout.read().decode("utf-8")
-        except bb.process.CmdError:
-            error_msg = pn + ": prelink-rtld aborted when processing %s" % path
-            package_qa_handle_error("unsafe-references-in-binaries", error_msg, d)
-            return False
-
-        if sysroot_path_usr in ldd_output:
-            ldd_output = ldd_output.replace(sysroot_path, "")
-
-            pkgdest = d.getVar('PKGDEST', True)
-            packages = d.getVar('PACKAGES', True)
-
-            for package in packages.split():
-                short_path = path.replace('%s/%s' % (pkgdest, package), "", 1)
-                if (short_path != path):
-                    break
-
-            base_err = pn + ": %s, installed in the base_prefix, requires a shared library under exec_prefix (%s)" % (short_path, exec_prefix)
-            for line in ldd_output.split('\n'):
-                if exec_prefix in line:
-                    error_msg = "%s: %s" % (base_err, line.strip())
-                    package_qa_handle_error("unsafe-references-in-binaries", error_msg, d)
-
-            return False
 
 QAPATHTEST[unsafe-references-in-scripts] = "package_qa_check_unsafe_references_in_scripts"
 def package_qa_check_unsafe_references_in_scripts(path, name, d, elf, messages):
@@ -647,8 +608,8 @@
         return
 
     tmpdir = d.getVar('TMPDIR', True)
-    with open(path) as f:
-        file_content = f.read()
+    with open(path, 'rb') as f:
+        file_content = f.read().decode('utf-8', errors='ignore')
         if tmpdir in file_content:
             package_qa_add_message(messages, "buildpaths", "File %s in package contained reference to tmpdir" % package_qa_clean_path(path,d))
 
@@ -820,6 +781,23 @@
 
     return sane
 
+# Run all package-wide warnfuncs and errorfuncs
+def package_qa_package(warnfuncs, errorfuncs, skip, package, d):
+    warnings = {}
+    errors = {}
+
+    for func in warnfuncs:
+        func(package, d, warnings)
+    for func in errorfuncs:
+        func(package, d, errors)
+
+    for w in warnings:
+        package_qa_handle_error(w, warnings[w], d)
+    for e in errors:
+        package_qa_handle_error(e, errors[e], d)
+
+    return len(errors) == 0
+
 # Walk over all files in a directory and call func
 def package_qa_walk(warnfuncs, errorfuncs, skip, package, d):
     import oe.qa
@@ -855,7 +833,7 @@
 
     if not "-dbg" in pkg and not "packagegroup-" in pkg and not "-image" in pkg:
         localdata = bb.data.createCopy(d)
-        localdata.setVar('OVERRIDES', pkg)
+        localdata.setVar('OVERRIDES', localdata.getVar('OVERRIDES', True) + ':' + pkg)
         bb.data.update_data(localdata)
 
         # Now check the RDEPENDS
@@ -983,31 +961,24 @@
     check_valid_deps('RREPLACES')
     check_valid_deps('RCONFLICTS')
 
-QAPATHTEST[expanded-d] = "package_qa_check_expanded_d"
-def package_qa_check_expanded_d(path,name,d,elf,messages):
+QAPKGTEST[expanded-d] = "package_qa_check_expanded_d"
+def package_qa_check_expanded_d(package, d, messages):
     """
     Check for the expanded D (${D}) value in pkg_* and FILES
     variables, warn the user to use it correctly.
     """
-
     sane = True
-    expanded_d = d.getVar('D',True)
+    expanded_d = d.getVar('D', True)
 
-    # Get packages for current recipe and iterate
-    packages = d.getVar('PACKAGES', True).split(" ")
-    for pak in packages:
-    # Go through all variables and check if expanded D is found, warn the user accordingly
-        for var in 'FILES','pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm':
-            bbvar = d.getVar(var + "_" + pak, False)
-            if bbvar:
-                # Bitbake expands ${D} within bbvar during the previous step, so we check for its expanded value
-                if expanded_d in bbvar:
-                    if var == 'FILES':
-                        package_qa_add_message(messages, "expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % pak)
-                        sane = False
-                    else:
-                        package_qa_add_message(messages, "expanded-d", "%s in %s recipe contains ${D}, it should be replaced by $D instead" % (var, pak))
-                        sane = False
+    for var in 'FILES','pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm':
+        bbvar = d.getVar(var + "_" + package, True) or ""
+        if expanded_d in bbvar:
+            if var == 'FILES':
+                package_qa_add_message(messages, "expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % package)
+                sane = False
+            else:
+                package_qa_add_message(messages, "expanded-d", "%s in %s recipe contains ${D}, it should be replaced by $D instead" % (var, package))
+                sane = False
     return sane
 
 def package_qa_check_encoding(keys, encode, d):
@@ -1115,7 +1086,6 @@
     if not packages:
         return
 
-    testmatrix = d.getVarFlags("QAPATHTEST")
     import re
     # The package name matches the [a-z0-9.+-]+ regular expression
     pkgname_pattern = re.compile("^[a-z0-9.+-]+$")
@@ -1125,28 +1095,33 @@
     for dep in taskdepdata:
         taskdeps.add(taskdepdata[dep][0])
 
-    g = globals()
     for package in packages:
+        def parse_test_matrix(matrix_name):
+            testmatrix = d.getVarFlags(matrix_name) or {}
+            g = globals()
+            warnchecks = []
+            for w in (d.getVar("WARN_QA", True) or "").split():
+                if w in skip:
+                   continue
+                if w in testmatrix and testmatrix[w] in g:
+                    warnchecks.append(g[testmatrix[w]])
+                if w == 'unsafe-references-in-binaries':
+                    oe.utils.write_ld_so_conf(d)
+
+            errorchecks = []
+            for e in (d.getVar("ERROR_QA", True) or "").split():
+                if e in skip:
+                   continue
+                if e in testmatrix and testmatrix[e] in g:
+                    errorchecks.append(g[testmatrix[e]])
+                if e == 'unsafe-references-in-binaries':
+                    oe.utils.write_ld_so_conf(d)
+            return warnchecks, errorchecks
+
         skip = (d.getVar('INSANE_SKIP_' + package, True) or "").split()
         if skip:
             bb.note("Package %s skipping QA tests: %s" % (package, str(skip)))
-        warnchecks = []
-        for w in (d.getVar("WARN_QA", True) or "").split():
-            if w in skip:
-               continue
-            if w in testmatrix and testmatrix[w] in g:
-                warnchecks.append(g[testmatrix[w]])
-            if w == 'unsafe-references-in-binaries':
-                oe.utils.write_ld_so_conf(d)
 
-        errorchecks = []
-        for e in (d.getVar("ERROR_QA", True) or "").split():
-            if e in skip:
-               continue
-            if e in testmatrix and testmatrix[e] in g:
-                errorchecks.append(g[testmatrix[e]])
-            if e == 'unsafe-references-in-binaries':
-                oe.utils.write_ld_so_conf(d)
 
         bb.note("Checking Package: %s" % package)
         # Check package name
@@ -1154,8 +1129,11 @@
             package_qa_handle_error("pkgname",
                     "%s doesn't match the [a-z0-9.+-]+ regex" % package, d)
 
-        path = "%s/%s" % (pkgdest, package)
-        package_qa_walk(warnchecks, errorchecks, skip, package, d)
+        warn_checks, error_checks = parse_test_matrix("QAPATHTEST")
+        package_qa_walk(warn_checks, error_checks, skip, package, d)
+
+        warn_checks, error_checks = parse_test_matrix("QAPKGTEST")
+        package_qa_package(warn_checks, error_checks, skip, package, d)
 
         package_qa_check_rdepends(package, pkgdest, skip, taskdeps, packages, d)
         package_qa_check_deps(package, pkgdest, skip, d)
diff --git a/import-layers/yocto-poky/meta/classes/kernel-yocto.bbclass b/import-layers/yocto-poky/meta/classes/kernel-yocto.bbclass
index 6160a29..a60327a 100644
--- a/import-layers/yocto-poky/meta/classes/kernel-yocto.bbclass
+++ b/import-layers/yocto-poky/meta/classes/kernel-yocto.bbclass
@@ -149,12 +149,18 @@
 	elements="`echo -n ${bsp_definition} ${sccs} ${patches} ${KERNEL_FEATURES}`"
 	if [ -n "${elements}" ]; then
 		scc --force -o ${S}/${meta_dir}:cfg,meta ${includes} ${bsp_definition} ${sccs} ${patches} ${KERNEL_FEATURES}
+		if [ $? -ne 0 ]; then
+			bbfatal_log "Could not generate configuration queue for ${KMACHINE}."
+		fi
 	fi
 
 	# run2: only generate patches for elements that have been passed on the SRC_URI
 	elements="`echo -n ${sccs} ${patches} ${KERNEL_FEATURES}`"
 	if [ -n "${elements}" ]; then
 		scc --force -o ${S}/${meta_dir}:patch --cmds patch ${includes} ${sccs} ${patches} ${KERNEL_FEATURES}
+		if [ $? -ne 0 ]; then
+			bbfatal_log "Could not generate configuration queue for ${KMACHINE}."
+		fi
 	fi
 }
 
diff --git a/import-layers/yocto-poky/meta/classes/kernel.bbclass b/import-layers/yocto-poky/meta/classes/kernel.bbclass
index 25a153c..eefe574 100644
--- a/import-layers/yocto-poky/meta/classes/kernel.bbclass
+++ b/import-layers/yocto-poky/meta/classes/kernel.bbclass
@@ -43,12 +43,12 @@
     typeformake = re.sub(r'\.gz', '', types)
     d.setVar('KERNEL_IMAGETYPE_FOR_MAKE', typeformake)
 
-    for type in typeformake.split():
+    for type in types.split():
         typelower = type.lower()
 
         d.appendVar('PACKAGES', ' ' + 'kernel-image-' + typelower)
 
-        d.setVar('FILES_kernel-image-' + typelower, '/boot/' + type + '*')
+        d.setVar('FILES_kernel-image-' + typelower, '/boot/' + type + '-${KERNEL_VERSION_NAME}')
 
         d.appendVar('RDEPENDS_kernel-image', ' ' + 'kernel-image-' + typelower)
 
@@ -165,7 +165,7 @@
 	mkdir -p ${B}/usr
 	# Find and use the first initramfs image archive type we find
 	rm -f ${B}/usr/${INITRAMFS_IMAGE}-${MACHINE}.cpio
-	for img in cpio.gz cpio.lz4 cpio.lzo cpio.lzma cpio.xz; do
+	for img in cpio cpio.gz cpio.lz4 cpio.lzo cpio.lzma cpio.xz; do
 		if [ -e "${DEPLOY_DIR_IMAGE}/${INITRAMFS_IMAGE}-${MACHINE}.$img" ]; then
 			cp ${DEPLOY_DIR_IMAGE}/${INITRAMFS_IMAGE}-${MACHINE}.$img ${B}/usr/.
 			case $img in
@@ -235,12 +235,6 @@
 				mv -f ${KERNEL_OUTPUT_DIR}/$type.bak ${KERNEL_OUTPUT_DIR}/$type
 			fi
 		done
-		# Update install area
-		for type in ${KERNEL_IMAGETYPES} ; do
-			echo "There is kernel image bundled with initramfs: ${B}/${KERNEL_OUTPUT_DIR}/$type.initramfs"
-			install -m 0644 ${B}/${KERNEL_OUTPUT_DIR}/$type.initramfs ${D}/boot/$type-initramfs-${MACHINE}.bin
-			echo "${B}/${KERNEL_OUTPUT_DIR}/$type.initramfs"
-		done
 	fi
 }
 do_bundle_initramfs[dirs] = "${B}"
@@ -270,6 +264,7 @@
 		oe_runmake ${typeformake} CC="${KERNEL_CC}" LD="${KERNEL_LD}" ${KERNEL_EXTRA_ARGS} $use_alternate_initrd
 		for type in ${KERNEL_IMAGETYPES} ; do
 			if test "${typeformake}.gz" = "${type}"; then
+				mkdir -p "${KERNEL_OUTPUT_DIR}"
 				gzip -9c < "${typeformake}" > "${KERNEL_OUTPUT_DIR}/${type}"
 				break;
 			fi
@@ -486,7 +481,7 @@
 FILES_kernel-base = "/lib/modules/${KERNEL_VERSION}/modules.order /lib/modules/${KERNEL_VERSION}/modules.builtin"
 FILES_kernel-image = ""
 FILES_kernel-dev = "/boot/System.map* /boot/Module.symvers* /boot/config* ${KERNEL_SRC_PATH} /lib/modules/${KERNEL_VERSION}/build"
-FILES_kernel-vmlinux = "/boot/vmlinux*"
+FILES_kernel-vmlinux = "/boot/vmlinux-${KERNEL_VERSION_NAME}"
 FILES_kernel-modules = ""
 RDEPENDS_kernel = "kernel-base"
 # Allow machines to override this dependency if kernel image files are
@@ -612,8 +607,6 @@
 		ln -sf ${base_name}.bin ${DEPLOYDIR}/${type}
 	done
 
-	cp ${COREBASE}/meta/files/deploydir_readme.txt ${DEPLOYDIR}/README_-_DO_NOT_DELETE_FILES_IN_THIS_DIRECTORY.txt
-
 	cd ${B}
 	# Update deploy directory
 	for type in ${KERNEL_IMAGETYPES} ; do
diff --git a/import-layers/yocto-poky/meta/classes/license.bbclass b/import-layers/yocto-poky/meta/classes/license.bbclass
index da4fc3e..721343d 100644
--- a/import-layers/yocto-poky/meta/classes/license.bbclass
+++ b/import-layers/yocto-poky/meta/classes/license.bbclass
@@ -279,7 +279,7 @@
     """
 
     dep_files = []
-    excluded_files = ["README_-_DO_NOT_DELETE_FILES_IN_THIS_DIRECTORY.txt"]
+    excluded_files = []
     with open(man_file, "r") as manifest:
         all_files = manifest.read()
     for f in all_files.splitlines():
@@ -351,6 +351,8 @@
             dst = os.path.join(destdir, basename)
             if os.path.exists(dst):
                 os.remove(dst)
+            if os.path.islink(src):
+                src = os.path.realpath(src)
             canlink = os.access(src, os.W_OK) and (os.stat(src).st_dev == os.stat(destdir).st_dev)
             if canlink:
                 try:
diff --git a/import-layers/yocto-poky/meta/classes/mirrors.bbclass b/import-layers/yocto-poky/meta/classes/mirrors.bbclass
index 1184708..2cdc71b 100644
--- a/import-layers/yocto-poky/meta/classes/mirrors.bbclass
+++ b/import-layers/yocto-poky/meta/classes/mirrors.bbclass
@@ -2,24 +2,24 @@
 ${DEBIAN_MIRROR}	http://snapshot.debian.org/archive/debian-archive/20120328T092752Z/debian/pool \n \
 ${DEBIAN_MIRROR}	http://snapshot.debian.org/archive/debian-archive/20110127T084257Z/debian/pool \n \
 ${DEBIAN_MIRROR}	http://snapshot.debian.org/archive/debian-archive/20090802T004153Z/debian/pool \n \
-${DEBIAN_MIRROR}	ftp://ftp.de.debian.org/debian/pool \n \
-${DEBIAN_MIRROR}	ftp://ftp.au.debian.org/debian/pool \n \
-${DEBIAN_MIRROR}	ftp://ftp.cl.debian.org/debian/pool \n \
-${DEBIAN_MIRROR}	ftp://ftp.hr.debian.org/debian/pool \n \
-${DEBIAN_MIRROR}	ftp://ftp.fi.debian.org/debian/pool \n \
-${DEBIAN_MIRROR}	ftp://ftp.hk.debian.org/debian/pool \n \
-${DEBIAN_MIRROR}	ftp://ftp.hu.debian.org/debian/pool \n \
-${DEBIAN_MIRROR}	ftp://ftp.ie.debian.org/debian/pool \n \
-${DEBIAN_MIRROR}	ftp://ftp.it.debian.org/debian/pool \n \
-${DEBIAN_MIRROR}	ftp://ftp.jp.debian.org/debian/pool \n \
-${DEBIAN_MIRROR}	ftp://ftp.no.debian.org/debian/pool \n \
-${DEBIAN_MIRROR}	ftp://ftp.pl.debian.org/debian/pool \n \
-${DEBIAN_MIRROR}	ftp://ftp.ro.debian.org/debian/pool \n \
-${DEBIAN_MIRROR}	ftp://ftp.si.debian.org/debian/pool \n \
-${DEBIAN_MIRROR}	ftp://ftp.es.debian.org/debian/pool \n \
-${DEBIAN_MIRROR}	ftp://ftp.se.debian.org/debian/pool \n \
-${DEBIAN_MIRROR}	ftp://ftp.tr.debian.org/debian/pool \n \
-${GNU_MIRROR}	ftp://mirrors.kernel.org/gnu \n \
+${DEBIAN_MIRROR}	http://ftp.de.debian.org/debian/pool \n \
+${DEBIAN_MIRROR}	http://ftp.au.debian.org/debian/pool \n \
+${DEBIAN_MIRROR}	http://ftp.cl.debian.org/debian/pool \n \
+${DEBIAN_MIRROR}	http://ftp.hr.debian.org/debian/pool \n \
+${DEBIAN_MIRROR}	http://ftp.fi.debian.org/debian/pool \n \
+${DEBIAN_MIRROR}	http://ftp.hk.debian.org/debian/pool \n \
+${DEBIAN_MIRROR}	http://ftp.hu.debian.org/debian/pool \n \
+${DEBIAN_MIRROR}	http://ftp.ie.debian.org/debian/pool \n \
+${DEBIAN_MIRROR}	http://ftp.it.debian.org/debian/pool \n \
+${DEBIAN_MIRROR}	http://ftp.jp.debian.org/debian/pool \n \
+${DEBIAN_MIRROR}	http://ftp.no.debian.org/debian/pool \n \
+${DEBIAN_MIRROR}	http://ftp.pl.debian.org/debian/pool \n \
+${DEBIAN_MIRROR}	http://ftp.ro.debian.org/debian/pool \n \
+${DEBIAN_MIRROR}	http://ftp.si.debian.org/debian/pool \n \
+${DEBIAN_MIRROR}	http://ftp.es.debian.org/debian/pool \n \
+${DEBIAN_MIRROR}	http://ftp.se.debian.org/debian/pool \n \
+${DEBIAN_MIRROR}	http://ftp.tr.debian.org/debian/pool \n \
+${GNU_MIRROR}	https://mirrors.kernel.org/gnu \n \
 ${KERNELORG_MIRROR}	http://www.kernel.org/pub \n \
 ${GNUPG_MIRROR}	ftp://ftp.gnupg.org/gcrypt \n \
 ${GNUPG_MIRROR}	ftp://ftp.franken.de/pub/crypt/mirror/ftp.gnupg.org/gcrypt \n \
diff --git a/import-layers/yocto-poky/meta/classes/nativesdk.bbclass b/import-layers/yocto-poky/meta/classes/nativesdk.bbclass
index a78257c..31dde4a 100644
--- a/import-layers/yocto-poky/meta/classes/nativesdk.bbclass
+++ b/import-layers/yocto-poky/meta/classes/nativesdk.bbclass
@@ -97,3 +97,5 @@
 do_packagedata[stamp-extra-info] = ""
 
 USE_NLS = "${SDKUSE_NLS}"
+
+OLDEST_KERNEL = "${SDK_OLDEST_KERNEL}"
diff --git a/import-layers/yocto-poky/meta/classes/package_ipk.bbclass b/import-layers/yocto-poky/meta/classes/package_ipk.bbclass
index eb00932..e7e7d49 100644
--- a/import-layers/yocto-poky/meta/classes/package_ipk.bbclass
+++ b/import-layers/yocto-poky/meta/classes/package_ipk.bbclass
@@ -48,6 +48,8 @@
             if os.path.exists(p):
                 bb.utils.prunedir(p)
 
+    recipesource = os.path.basename(d.getVar('FILE', True))
+
     for pkg in packages.split():
         localdata = bb.data.createCopy(d)
         root = "%s/%s" % (pkgdest, pkg)
@@ -212,10 +214,7 @@
             ctrlfile.write("Replaces: %s\n" % bb.utils.join_deps(rreplaces))
         if rconflicts:
             ctrlfile.write("Conflicts: %s\n" % bb.utils.join_deps(rconflicts))
-        src_uri = localdata.getVar("SRC_URI", True).strip() or "None"
-        if src_uri:
-            src_uri = re.sub("\s+", " ", src_uri)
-            ctrlfile.write("Source: %s\n" % " ".join(src_uri.split()))
+        ctrlfile.write("Source: %s\n" % recipesource)
         ctrlfile.close()
 
         for script in ["preinst", "postinst", "prerm", "postrm"]:
diff --git a/import-layers/yocto-poky/meta/classes/populate_sdk_base.bbclass b/import-layers/yocto-poky/meta/classes/populate_sdk_base.bbclass
index 4462b52..69aae26 100644
--- a/import-layers/yocto-poky/meta/classes/populate_sdk_base.bbclass
+++ b/import-layers/yocto-poky/meta/classes/populate_sdk_base.bbclass
@@ -89,11 +89,6 @@
 SDK_PACKAGING_COMMAND = "${@'${SDK_PACKAGING_FUNC};' if '${SDK_PACKAGING_FUNC}' else ''}"
 SDK_POSTPROCESS_COMMAND = " create_sdk_files; check_sdk_sysroots; tar_sdk; ${SDK_PACKAGING_COMMAND} "
 
-# Some archs override this, we need the nativesdk version
-# turns out this is hard to get from the datastore due to TRANSLATED_TARGET_ARCH
-# manipulation.
-SDK_OLDEST_KERNEL = "3.2.0"
-
 def populate_sdk_common(d):
     from oe.sdk import populate_sdk
     from oe.manifest import create_manifest, Manifest
@@ -223,7 +218,7 @@
 		-e 's#@SDKEXTPATH@#${SDKEXTPATH}#g' \
 		-e 's#@OLDEST_KERNEL@#${SDK_OLDEST_KERNEL}#g' \
 		-e 's#@REAL_MULTIMACH_TARGET_SYS@#${REAL_MULTIMACH_TARGET_SYS}#g' \
-		-e 's#@SDK_TITLE@#${SDK_TITLE}#g' \
+		-e 's#@SDK_TITLE@#${@d.getVar("SDK_TITLE", True).replace('&', '\&')}#g' \
 		-e 's#@SDK_VERSION@#${SDK_VERSION}#g' \
 		-e '/@SDK_PRE_INSTALL_COMMAND@/d' \
 		-e '/@SDK_POST_INSTALL_COMMAND@/d' \
diff --git a/import-layers/yocto-poky/meta/classes/populate_sdk_ext.bbclass b/import-layers/yocto-poky/meta/classes/populate_sdk_ext.bbclass
index 0f0525d..39f6142 100644
--- a/import-layers/yocto-poky/meta/classes/populate_sdk_ext.bbclass
+++ b/import-layers/yocto-poky/meta/classes/populate_sdk_ext.bbclass
@@ -88,7 +88,7 @@
 def clean_esdk_builddir(d, sdkbasepath):
     """Clean up traces of the fake build for create_filtered_tasklist()"""
     import shutil
-    cleanpaths = 'cache conf/sanity_info conf/templateconf.cfg tmp'.split()
+    cleanpaths = 'cache conf/sanity_info tmp'.split()
     for pth in cleanpaths:
         fullpth = os.path.join(sdkbasepath, pth)
         if os.path.isdir(fullpth):
@@ -305,10 +305,13 @@
             f.write('SIGGEN_LOCKEDSIGS_TASKSIG_CHECK = "warn"\n\n')
 
             # Set up whitelist for run on install
-            f.write('BB_SETSCENE_ENFORCE_WHITELIST = "%:* *:do_shared_workdir *:do_rm_work"\n\n')
+            f.write('BB_SETSCENE_ENFORCE_WHITELIST = "%:* *:do_shared_workdir *:do_rm_work *:do_package"\n\n')
 
             # Hide the config information from bitbake output (since it's fixed within the SDK)
-            f.write('BUILDCFG_HEADER = ""\n')
+            f.write('BUILDCFG_HEADER = ""\n\n')
+
+            # Map gcc-dependent uninative sstate cache for installer usage
+            f.write('SSTATE_MIRRORS = "file://universal/(.*) file://universal-4.9/\\1\\nfile://universal-4.9/(.*) file://universal-4.8/\\1"\n\n')
 
             # Allow additional config through sdk-extra.conf
             fn = bb.cookerdata.findConfigFile('sdk-extra.conf', d)
@@ -344,6 +347,10 @@
                     if line.strip() and not line.startswith('#'):
                         f.write(line)
 
+    # Write a templateconf.cfg
+    with open(baseoutpath + '/conf/templateconf.cfg', 'w') as f:
+        f.write('meta/conf\n')
+
     # Ensure any variables set from the external environment (by way of
     # BB_ENV_EXTRAWHITE) are set in the SDK's configuration
     extralines = []
@@ -370,8 +377,9 @@
 
     sstate_out = baseoutpath + '/sstate-cache'
     bb.utils.remove(sstate_out, True)
-    # uninative.bbclass sets NATIVELSBSTRING to 'universal'
-    fixedlsbstring = 'universal'
+
+    # uninative.bbclass sets NATIVELSBSTRING to 'universal%s' % oe.utils.host_gcc_version(d)
+    fixedlsbstring = "universal%s" % oe.utils.host_gcc_version(d)
 
     sdk_include_toolchain = (d.getVar('SDK_INCLUDE_TOOLCHAIN', True) == '1')
     sdk_ext_type = d.getVar('SDK_EXT_TYPE', True)
diff --git a/import-layers/yocto-poky/meta/classes/rm_work.bbclass b/import-layers/yocto-poky/meta/classes/rm_work.bbclass
index b71a9d1..64b6981 100644
--- a/import-layers/yocto-poky/meta/classes/rm_work.bbclass
+++ b/import-layers/yocto-poky/meta/classes/rm_work.bbclass
@@ -58,7 +58,7 @@
             *do_setscene*)
                 break
                 ;;
-            *sigdata*)
+            *sigdata*|*sigbasedata*)
                 i=dummy
                 break
                 ;;
diff --git a/import-layers/yocto-poky/meta/classes/sanity.bbclass b/import-layers/yocto-poky/meta/classes/sanity.bbclass
index 7682ffb..a11b581 100644
--- a/import-layers/yocto-poky/meta/classes/sanity.bbclass
+++ b/import-layers/yocto-poky/meta/classes/sanity.bbclass
@@ -929,7 +929,9 @@
     # If /bin/sh is a symlink, check that it points to dash or bash
     if os.path.islink('/bin/sh'):
         real_sh = os.path.realpath('/bin/sh')
-        if not real_sh.endswith('/dash') and not real_sh.endswith('/bash'):
+        # Due to update-alternatives, the shell name may take various
+        # forms, such as /bin/dash, bin/bash, /bin/bash.bash ...
+        if '/dash' not in real_sh and '/bash' not in real_sh:
             status.addresult("Error, /bin/sh links to %s, must be dash or bash\n" % real_sh)
 
 def check_sanity(sanity_data):
diff --git a/import-layers/yocto-poky/meta/classes/sstate.bbclass b/import-layers/yocto-poky/meta/classes/sstate.bbclass
index 172384b..5b92c54 100644
--- a/import-layers/yocto-poky/meta/classes/sstate.bbclass
+++ b/import-layers/yocto-poky/meta/classes/sstate.bbclass
@@ -30,8 +30,6 @@
 SSTATE_DUPWHITELIST += "${STAGING_ETCDIR_NATIVE}/sgml ${STAGING_DATADIR_NATIVE}/sgml"
 # Archive the sources for many architectures in one deploy folder
 SSTATE_DUPWHITELIST += "${DEPLOY_DIR_SRC}"
-# Ignore overlapping README
-SSTATE_DUPWHITELIST += "${DEPLOY_DIR}/sdk/README_-_DO_NOT_DELETE_FILES_IN_THIS_DIRECTORY.txt"
 
 SSTATE_SCAN_FILES ?= "*.la *-config *_config"
 SSTATE_SCAN_CMD ?= 'find ${SSTATE_BUILDDIR} \( -name "${@"\" -o -name \"".join(d.getVar("SSTATE_SCAN_FILES", True).split())}" \) -type f'
@@ -457,7 +455,7 @@
     rm_nohash = ".do_%s" % ss['task']
     for stfile in glob.glob(wildcard_stfile):
         # Keep the sigdata
-        if ".sigdata." in stfile:
+        if ".sigdata." in stfile or ".sigbasedata." in stfile:
             continue
         # Preserve taint files in the stamps directory
         if stfile.endswith('.taint'):
@@ -724,6 +722,8 @@
 #
 sstate_unpack_package () {
 	tar -xvzf ${SSTATE_PKG}
+	# update .siginfo atime on local/NFS mirror
+	[ -w ${SSTATE_PKG}.siginfo ] && [ -h ${SSTATE_PKG}.siginfo ] && touch -a ${SSTATE_PKG}.siginfo
 	# Use "! -w ||" to return true for read only files
 	[ ! -w ${SSTATE_PKG} ] || touch --no-dereference ${SSTATE_PKG}
 	[ ! -w ${SSTATE_PKG}.sig ] || [ ! -e ${SSTATE_PKG}.sig ] || touch --no-dereference ${SSTATE_PKG}.sig
diff --git a/import-layers/yocto-poky/meta/classes/staging.bbclass b/import-layers/yocto-poky/meta/classes/staging.bbclass
index a0b09a0..bfabd06 100644
--- a/import-layers/yocto-poky/meta/classes/staging.bbclass
+++ b/import-layers/yocto-poky/meta/classes/staging.bbclass
@@ -171,7 +171,6 @@
 
 SYSROOT_PREPROCESS_FUNCS ?= ""
 SYSROOT_DESTDIR = "${WORKDIR}/sysroot-destdir"
-SYSROOT_LOCK = "${STAGING_DIR}/staging.lock"
 
 # We clean out any existing sstate from the sysroot if we rerun configure
 python sysroot_cleansstate () {
diff --git a/import-layers/yocto-poky/meta/classes/systemd.bbclass b/import-layers/yocto-poky/meta/classes/systemd.bbclass
index d56c760..4ea1f45 100644
--- a/import-layers/yocto-poky/meta/classes/systemd.bbclass
+++ b/import-layers/yocto-poky/meta/classes/systemd.bbclass
@@ -32,7 +32,7 @@
 	systemctl $OPTS ${SYSTEMD_AUTO_ENABLE} ${SYSTEMD_SERVICE}
 
 	if [ -z "$D" -a "${SYSTEMD_AUTO_ENABLE}" = "enable" ]; then
-		systemctl restart ${SYSTEMD_SERVICE}
+		systemctl --no-block restart ${SYSTEMD_SERVICE}
 	fi
 fi
 }
diff --git a/import-layers/yocto-poky/meta/classes/testsdk.bbclass b/import-layers/yocto-poky/meta/classes/testsdk.bbclass
index 77c9203..43342b1 100644
--- a/import-layers/yocto-poky/meta/classes/testsdk.bbclass
+++ b/import-layers/yocto-poky/meta/classes/testsdk.bbclass
@@ -4,13 +4,15 @@
 
 # testsdk.bbclass enables testing for SDK and Extensible SDK
 #
-# For run SDK tests you need to do,
-# - bitbake core-image-sato -c populate_sdk
-# - bitbake core-image-sato -c testsdk
+# To run SDK tests, run the commands:
+# $ bitbake <image-name> -c populate_sdk
+# $ bitbake <image-name> -c testsdk
 #
-# For run eSDK tests you need to do,
-# - bitbake core-image-sato -c populate_sdk_ext
-# - bitbake core-image-sato -c testsdkext
+# To run eSDK tests, run the commands:
+# $ bitbake <image-name> -c populate_sdk_ext
+# $ bitbake <image-name> -c testsdkext
+#
+# where "<image-name>" is an image like core-image-sato.
 
 TEST_LOG_DIR ?= "${WORKDIR}/testimage"
 TESTSDKLOCK = "${TMPDIR}/testsdk.lock"
diff --git a/import-layers/yocto-poky/meta/classes/uboot-sign.bbclass b/import-layers/yocto-poky/meta/classes/uboot-sign.bbclass
index 3c56db8..cef26b1 100644
--- a/import-layers/yocto-poky/meta/classes/uboot-sign.bbclass
+++ b/import-layers/yocto-poky/meta/classes/uboot-sign.bbclass
@@ -68,8 +68,8 @@
 			[ -e "${DEPLOYDIR}/${UBOOT_DTB_IMAGE}" ]; then
 			cd ${B}
 			oe_runmake EXT_DTB=${DEPLOYDIR}/${UBOOT_DTB_IMAGE}
-			install ${S}/${UBOOT_BINARY} ${DEPLOYDIR}/${UBOOT_IMAGE}
-			install ${S}/${UBOOT_BINARY} ${DEPLOY_DIR_IMAGE}/${UBOOT_IMAGE}
+			install ${B}/${UBOOT_BINARY} ${DEPLOYDIR}/${UBOOT_IMAGE}
+			install ${B}/${UBOOT_BINARY} ${DEPLOY_DIR_IMAGE}/${UBOOT_IMAGE}
 		elif [ -e "${DEPLOYDIR}/${UBOOT_NODTB_IMAGE}" -a -e "${DEPLOYDIR}/${UBOOT_DTB_IMAGE}" ]; then
 			cd ${DEPLOYDIR}
 			cat ${UBOOT_NODTB_IMAGE} ${UBOOT_DTB_IMAGE} | tee ${B}/${UBOOT_BINARY} > ${UBOOT_IMAGE}
diff --git a/import-layers/yocto-poky/meta/classes/uninative.bbclass b/import-layers/yocto-poky/meta/classes/uninative.bbclass
index 89cec07..9754669 100644
--- a/import-layers/yocto-poky/meta/classes/uninative.bbclass
+++ b/import-layers/yocto-poky/meta/classes/uninative.bbclass
@@ -1,4 +1,5 @@
-UNINATIVE_LOADER ?= "${STAGING_DIR}-uninative/${BUILD_ARCH}-linux/lib/${@bb.utils.contains('BUILD_ARCH', 'x86_64', 'ld-linux-x86-64.so.2', 'ld-linux.so.2', d)}"
+UNINATIVE_LOADER ?= "${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux/lib/${@bb.utils.contains('BUILD_ARCH', 'x86_64', 'ld-linux-x86-64.so.2', 'ld-linux.so.2', d)}"
+UNINATIVE_STAGING_DIR ?= "${STAGING_DIR}"
 
 UNINATIVE_URL ?= "unset"
 UNINATIVE_TARBALL ?= "${BUILD_ARCH}-nativesdk-libc.tar.bz2"
@@ -7,17 +8,6 @@
 #UNINATIVE_CHECKSUM[x86_64] = "dead"
 UNINATIVE_DLDIR ?= "${DL_DIR}/uninative/"
 
-# https://wiki.debian.org/GCC5
-# We may see binaries built with gcc5 run or linked into gcc4 environment
-# so use the older libstdc++ standard for now until we don't support gcc4
-# on the host system.
-BUILD_CXXFLAGS_append = " -D_GLIBCXX_USE_CXX11_ABI=0"
-
-#
-# icu configure defaults to CXX11 if no -std= option is passed in CXXFLAGS
-# therefore pass one
-BUILD_CXXFLAGS_append_pn-icu-native = " -std=c++98"
-
 addhandler uninative_event_fetchloader
 uninative_event_fetchloader[eventmask] = "bb.event.BuildStarted"
 
@@ -69,7 +59,7 @@
             if localpath != tarballpath and os.path.exists(localpath) and not os.path.exists(tarballpath):
                     os.symlink(localpath, tarballpath)
 
-        cmd = d.expand("mkdir -p ${STAGING_DIR}-uninative; cd ${STAGING_DIR}-uninative; tar -xjf ${UNINATIVE_DLDIR}/%s/${UNINATIVE_TARBALL}; ${STAGING_DIR}-uninative/relocate_sdk.py ${STAGING_DIR}-uninative/${BUILD_ARCH}-linux ${UNINATIVE_LOADER} ${UNINATIVE_LOADER} ${STAGING_DIR}-uninative/${BUILD_ARCH}-linux/${bindir_native}/patchelf-uninative ${STAGING_DIR}-uninative/${BUILD_ARCH}-linux${base_libdir_native}/libc*.so" % chksum)
+        cmd = d.expand("mkdir -p ${UNINATIVE_STAGING_DIR}-uninative; cd ${UNINATIVE_STAGING_DIR}-uninative; tar -xjf ${UNINATIVE_DLDIR}/%s/${UNINATIVE_TARBALL}; ${UNINATIVE_STAGING_DIR}-uninative/relocate_sdk.py ${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux ${UNINATIVE_LOADER} ${UNINATIVE_LOADER} ${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux/${bindir_native}/patchelf-uninative ${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux${base_libdir_native}/libc*.so" % chksum)
         subprocess.check_call(cmd, shell=True)
 
         with open(loaderchksum, "w") as f:
@@ -99,9 +89,9 @@
     loader = d.getVar("UNINATIVE_LOADER", True)
     if os.path.exists(loader):
         bb.debug(2, "Enabling uninative")
-        d.setVar("NATIVELSBSTRING", "universal")
+        d.setVar("NATIVELSBSTRING", "universal%s" % oe.utils.host_gcc_version(d))
         d.appendVar("SSTATEPOSTUNPACKFUNCS", " uninative_changeinterp")
-        d.prependVar("PATH", "${STAGING_DIR}-uninative/${BUILD_ARCH}-linux${bindir_native}:")
+        d.prependVar("PATH", "${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux${bindir_native}:")
 
 python uninative_changeinterp () {
     import subprocess
diff --git a/import-layers/yocto-poky/meta/classes/update-alternatives.bbclass b/import-layers/yocto-poky/meta/classes/update-alternatives.bbclass
index 1fdd681..65929e5 100644
--- a/import-layers/yocto-poky/meta/classes/update-alternatives.bbclass
+++ b/import-layers/yocto-poky/meta/classes/update-alternatives.bbclass
@@ -195,8 +195,8 @@
     pkgdest = d.getVar('PKGD', True)
     for pkg in (d.getVar('PACKAGES', True) or "").split():
         # Create post install/removal scripts
-        alt_setup_links = ""
-        alt_remove_links = ""
+        alt_setup_links = "# Begin section update-alternatives\n"
+        alt_remove_links = "# Begin section update-alternatives\n"
         for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split():
             alt_link     = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True)
             alt_target   = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True)
@@ -219,10 +219,13 @@
             # Default to generate shell script.. eventually we may want to change this...
             alt_target = os.path.normpath(alt_target)
 
-            alt_setup_links  += '\tupdate-alternatives --install %s %s %s %s\n' % (alt_link, alt_name, alt_target, alt_priority)
-            alt_remove_links += '\tupdate-alternatives --remove  %s %s\n' % (alt_name, alt_target)
+            alt_setup_links  += 'update-alternatives --install %s %s %s %s\n' % (alt_link, alt_name, alt_target, alt_priority)
+            alt_remove_links += 'update-alternatives --remove  %s %s\n' % (alt_name, alt_target)
 
-        if alt_setup_links:
+        alt_setup_links += "# End section update-alternatives\n"
+        alt_remove_links += "# End section update-alternatives\n"
+
+        if len(alt_setup_links.splitlines()) > 2:
             # RDEPENDS setup
             provider = d.getVar('VIRTUAL-RUNTIME_update-alternatives', True)
             if provider:
@@ -232,12 +235,24 @@
             bb.note('adding update-alternatives calls to postinst/prerm for %s' % pkg)
             bb.note('%s' % alt_setup_links)
             postinst = d.getVar('pkg_postinst_%s' % pkg, True) or '#!/bin/sh\n'
-            postinst += alt_setup_links
+            postinst = postinst.splitlines(True)
+            try:
+                index = postinst.index('# Begin section update-rc.d\n')
+                postinst.insert(index, alt_setup_links)
+            except ValueError:
+                postinst.append(alt_setup_links)
+            postinst = ''.join(postinst)
             d.setVar('pkg_postinst_%s' % pkg, postinst)
 
             bb.note('%s' % alt_remove_links)
             prerm = d.getVar('pkg_prerm_%s' % pkg, True) or '#!/bin/sh\n'
-            prerm += alt_remove_links
+            prerm = prerm.splitlines(True)
+            try:
+                index = prerm.index('# End section update-rc.d\n')
+                prerm.insert(index + 1, alt_remove_links)
+            except ValueError:
+                prerm.append(alt_remove_links)
+            prerm = ''.join(prerm)
             d.setVar('pkg_prerm_%s' % pkg, prerm)
 }
 
diff --git a/import-layers/yocto-poky/meta/classes/update-rc.d.bbclass b/import-layers/yocto-poky/meta/classes/update-rc.d.bbclass
index 321924b..18df2dc 100644
--- a/import-layers/yocto-poky/meta/classes/update-rc.d.bbclass
+++ b/import-layers/yocto-poky/meta/classes/update-rc.d.bbclass
@@ -26,6 +26,7 @@
 }
 
 updatercd_postinst() {
+# Begin section update-rc.d
 if type update-rc.d >/dev/null 2>/dev/null; then
 	if [ -n "$D" ]; then
 		OPT="-r $D"
@@ -34,12 +35,15 @@
 	fi
 	update-rc.d $OPT ${INITSCRIPT_NAME} ${INITSCRIPT_PARAMS}
 fi
+# End section update-rc.d
 }
 
 updatercd_prerm() {
+# Begin section update-rc.d
 if [ -z "$D" -a -x "${INIT_D_DIR}/${INITSCRIPT_NAME}" ]; then
 	${INIT_D_DIR}/${INITSCRIPT_NAME} stop || :
 fi
+# End section update-rc.d
 }
 
 updatercd_postrm() {
@@ -102,13 +106,25 @@
         postinst = d.getVar('pkg_postinst_%s' % pkg, True)
         if not postinst:
             postinst = '#!/bin/sh\n'
-        postinst += localdata.getVar('updatercd_postinst', True)
+        postinst = postinst.splitlines(True)
+        try:
+            index = postinst.index('# End section update-alternatives\n')
+            postinst.insert(index + 1, localdata.getVar('updatercd_postinst', True))
+        except ValueError:
+            postinst.append(localdata.getVar('updatercd_postinst', True))
+        postinst = ''.join(postinst)
         d.setVar('pkg_postinst_%s' % pkg, postinst)
 
         prerm = d.getVar('pkg_prerm_%s' % pkg, True)
         if not prerm:
             prerm = '#!/bin/sh\n'
-        prerm += localdata.getVar('updatercd_prerm', True)
+        prerm = prerm.splitlines(True)
+        try:
+            index = prerm.index('# Begin section update-alternatives\n')
+            prerm.insert(index, localdata.getVar('updatercd_prerm', True))
+        except ValueError:
+            prerm.append(localdata.getVar('updatercd_prerm', True))
+        prerm = ''.join(prerm)
         d.setVar('pkg_prerm_%s' % pkg, prerm)
 
         postrm = d.getVar('pkg_postrm_%s' % pkg, True)