meta-openembedded and poky: subtree updates

Squash of the following due to dependencies among them
and OpenBMC changes:

meta-openembedded: subtree update:d0748372d2..9201611135
meta-openembedded: subtree update:9201611135..17fd382f34
poky: subtree update:9052e5b32a..2e11d97b6c
poky: subtree update:2e11d97b6c..a8544811d7

The change log was too large for the jenkins plugin
to handle therefore it has been removed. Here is
the first and last commit of each subtree:

meta-openembedded:d0748372d2
      cppzmq: bump to version 4.6.0
meta-openembedded:17fd382f34
      mpv: Remove X11 dependency
poky:9052e5b32a
      package_ipk: Remove pointless comment to trigger rebuild
poky:a8544811d7
      pbzip2: Fix license warning

Change-Id: If0fc6c37629642ee207a4ca2f7aa501a2c673cd6
Signed-off-by: Andrew Geissler <geissonator@yahoo.com>
diff --git a/poky/scripts/buildall-qemu b/poky/scripts/buildall-qemu
new file mode 100755
index 0000000..ca9aafa
--- /dev/null
+++ b/poky/scripts/buildall-qemu
@@ -0,0 +1,120 @@
+#!/bin/sh
+#  Copyright (c) 2020 Wind River Systems, Inc.
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# buildall-qemu: a tool for automating build testing of recipes
+# TODO: Add support for selecting which qemu architectures to build
+# TODO: Add support for queueing up multiple recipe builds
+# TODO: Add more logging options (e.g. local.conf info, bitbake env info)
+
+usage ()
+{
+    base=$(basename "$0")
+    echo "Usage: $base [options] [recipename/target]"
+    echo "Executes a build of a given target for selected LIBCs. With no options, default to both libc and musl."
+    echo "Options:"
+    echo "-l, --libc            Specify one of \"glibc\" or \"musl\""
+}
+
+
+buildall ()
+{
+    # Get path to oe-core directory. Since oe-init-build-env prepends $PATH with
+    # the path to the scripts directory, get it from there
+    SCRIPTS_PATH="$(echo "$PATH" | cut -d ":" -f 1)"
+    OE_CORE_PATH=$(echo "$SCRIPTS_PATH" | sed 's|\(.*\)/.*|\1|') 
+
+    # Get target list and host machine information
+    TARGET_LIST=$(find "$OE_CORE_PATH"/meta/conf/machine -maxdepth 1 -type f | grep qemu | sed 's|.*/||' | sed -e 's/\.conf//')
+
+    # Set LIBC value to use for the builds based on options provided by the user
+    if [ -n "$2" ]
+    then
+	LIBC_LIST="$2"
+	echo "$LIBC_LIST"
+    else
+	LIBC_LIST="glibc musl"
+	echo "$LIBC_LIST"
+    fi
+
+    START_TIME=$(date "+%Y-%m-%d_%H:%M:%S")
+    LOG_FILE="$1-buildall.log"
+    OS_INFO=$(grep "PRETTY_NAME=" /etc/os-release | awk -F "=" '{print $2}' | sed -e 's/^"//' -e 's/"$//')
+
+    # Append an existing log file for this build with .old if one exists
+    if [ -f "${LOG_FILE}" ] 
+    then
+       mv "${LOG_FILE}" "${LOG_FILE}.old"
+    else
+	   touch "${LOG_FILE}"
+    fi
+
+    # Fill the log file with build and host info
+    echo "BUILDALL-QEMU LOG FOR $1" >> "${LOG_FILE}"
+    echo "START TIME: ${START_TIME}" >> "${LOG_FILE}"
+    echo "HOSTNAME: $(uname -n)" >> "${LOG_FILE}"
+    echo "HOST OS: ${OS_INFO}" >> "${LOG_FILE}"
+    echo "HOST KERNEL: $(uname -r)" >> "${LOG_FILE}"
+    echo "===============" >> "${LOG_FILE}"
+    echo "BUILD RESULTS:" >> "${LOG_FILE}"
+
+    # start the builds for each MACHINE and TCLIBC
+    for j in ${LIBC_LIST} 
+    do
+	echo "[$j]" >> "${LOG_FILE}"
+	for i in ${TARGET_LIST} 
+	do
+	    echo "$i" "$j"; \
+	    TCLIBC=$j MACHINE=$i bitbake "$1" && echo "PASS: $i" >> "${LOG_FILE}" || echo "FAIL: $i" >> "${LOG_FILE}"
+	done
+    done
+
+    # Get pass/fail totals and add them to the end of the log
+    PASSED=$(grep "PASS:" "${LOG_FILE}" | wc -l)
+    FAILED=$(grep "FAIL:" "${LOG_FILE}" | wc -l)
+
+    echo "===============" >> "${LOG_FILE}"
+    echo "PASSED: ${PASSED}" >> "${LOG_FILE}"
+    echo "FAILED: ${FAILED}" >> "${LOG_FILE}"
+}
+
+
+# fail entire script if any command fails
+set -e
+
+# print usage and exit if not enough args given
+[ $# -eq 0 ] && usage && exit 1
+
+# handle arguments
+RECIPE=
+while [ $# -gt 0 ]
+do
+    arg=$1
+    case $arg in
+	-l|--libc)
+	if [ "$2" = "glibc" ] || [ "$2" = "musl" ]
+	then
+	    LIBC_LIST="$2"
+	else
+	    echo "Unrecognized libc option."
+	    usage && exit 1
+	fi
+	shift
+	shift
+	;;
+	*)
+        RECIPE="$1"
+	shift
+	;;
+    esac
+done
+
+set -- "$RECIPE"
+
+# run buildall for the given recipe and LIBC
+if [ -n "$1" ]
+then
+	buildall "$1" "$LIBC_LIST"
+fi
+
diff --git a/poky/scripts/combo-layer b/poky/scripts/combo-layer
index 9b50e98..a634dd6 100755
--- a/poky/scripts/combo-layer
+++ b/poky/scripts/combo-layer
@@ -80,7 +80,7 @@
         logger.debug("Loading config file %s" % self.conffile)
         self.parser = configparser.ConfigParser()
         with open(self.conffile) as f:
-            self.parser.readfp(f)
+            self.parser.read_file(f)
 
         # initialize default values
         self.commit_msg_template = "Automatic commit to update last_revision"
diff --git a/poky/scripts/contrib/bb-perf/buildstats-plot.sh b/poky/scripts/contrib/bb-perf/buildstats-plot.sh
index 898834e..45c27d0 100755
--- a/poky/scripts/contrib/bb-perf/buildstats-plot.sh
+++ b/poky/scripts/contrib/bb-perf/buildstats-plot.sh
@@ -39,7 +39,10 @@
 
 BS_DIR="tmp/buildstats"
 N=10
+RECIPE=""
+TASKS="compile:configure:fetch:install:patch:populate_lic:populate_sysroot:unpack"
 STATS="utime"
+ACCUMULATE=""
 SUM=""
 OUTDATA_FILE="$PWD/buildstats-plot.out"
 
@@ -51,11 +54,15 @@
                 (default: "$BS_DIR")
   -n N          Top N recipes to display. Ignored if -S is present
                 (default: "$N")
+  -r recipe     The recipe mask to be searched
+  -t tasks      The tasks to be computed
+                (default: "$TASKS")
   -s stats      The stats to be matched. If more that one stat, units
                 should be the same because data is plot as histogram.
                 (see buildstats.sh -h for all options) or any other defined
                 (build)stat separated by colons, i.e. stime:utime
                 (default: "$STATS")
+  -a            Accumulate all stats values for found recipes
   -S            Sum values for a particular stat for found recipes
   -o            Output data file.
                 (default: "$OUTDATA_FILE")
@@ -64,32 +71,41 @@
 }
 
 # Parse and validate arguments
-while getopts "b:n:s:o:Sh" OPT; do
-	case $OPT in
-	b)
-		BS_DIR="$OPTARG"
-		;;
-	n)
-		N="$OPTARG"
-		;;
-	s)
-	        STATS="$OPTARG"
-	        ;;
-	S)
-	        SUM="y"
-	        ;;
-	o)
-	        OUTDATA_FILE="$OPTARG"
-	        ;;
-	h)
-		usage
-		exit 0
-		;;
-	*)
-		usage
-		exit 1
-		;;
-	esac
+while getopts "b:n:r:t:s:o:aSh" OPT; do
+    case $OPT in
+    b)
+        BS_DIR="$OPTARG"
+        ;;
+    n)
+        N="$OPTARG"
+        ;;
+    r)
+        RECIPE="-r $OPTARG"
+        ;;
+    t)
+        TASKS="$OPTARG"
+        ;;
+    s)
+        STATS="$OPTARG"
+        ;;
+    a)
+        ACCUMULATE="-a"
+        ;;
+    S)
+        SUM="y"
+        ;;
+    o)
+        OUTDATA_FILE="$OPTARG"
+        ;;
+    h)
+        usage
+        exit 0
+        ;;
+    *)
+        usage
+        exit 1
+        ;;
+    esac
 done
 
 # Get number of stats
@@ -101,10 +117,10 @@
 
 # Parse buildstats recipes to produce a single table
 OUTBUILDSTATS="$PWD/buildstats.log"
-$CD/buildstats.sh -H -s "$STATS" -H > $OUTBUILDSTATS
+$CD/buildstats.sh -b "$BS_DIR" -s "$STATS" -t "$TASKS" $RECIPE $ACCUMULATE -H > $OUTBUILDSTATS
 
 # Get headers
-HEADERS=$(cat $OUTBUILDSTATS | sed -n -e '1s/ /-/g' -e '1s/:/ /gp')
+HEADERS=$(cat $OUTBUILDSTATS | sed -n -e 's/\(.*\)/"\1"/' -e '1s/ /\\\\\\\\ /g' -e 's/_/\\\\\\\\_/g' -e '1s/:/" "/gp')
 
 echo -e "set boxwidth 0.9 relative"
 echo -e "set style data histograms"
@@ -113,7 +129,7 @@
 
 # Get output data
 if [ -z "$SUM" ]; then
-    cat $OUTBUILDSTATS | sed -e '1d' | sort -k3 -n -r | head -$N > $OUTDATA_FILE
+    cat $OUTBUILDSTATS | sed -e '1d' -e 's/_/\\\\_/g' | sort -k3 -n -r | head -$N > $OUTDATA_FILE
     # include task at recipe column
     sed -i -e "1i\
 ${HEADERS}" $OUTDATA_FILE
@@ -125,8 +141,8 @@
     declare -a sumargs
     j=0
     for i in `seq $nstats`; do
-	sumargs[j]=sum; j=$(( $j + 1 ))
-	sumargs[j]=`expr 3 + $i - 1`;  j=$(( $j + 1 ))
+        sumargs[j]=sum; j=$(( $j + 1 ))
+        sumargs[j]=`expr 3 + $i - 1`;  j=$(( $j + 1 ))
     done
 
     # Do the processing with datamash
diff --git a/poky/scripts/contrib/bb-perf/buildstats.sh b/poky/scripts/contrib/bb-perf/buildstats.sh
index e9ec2d4..e45cfc1 100755
--- a/poky/scripts/contrib/bb-perf/buildstats.sh
+++ b/poky/scripts/contrib/bb-perf/buildstats.sh
@@ -36,8 +36,10 @@
 Child rusage ru_nivcsw"
 
 BS_DIR="tmp/buildstats"
+RECIPE=""
 TASKS="compile:configure:fetch:install:patch:populate_lic:populate_sysroot:unpack"
 STATS="$TIME"
+ACCUMULATE=""
 HEADER="" # No header by default
 
 function usage {
@@ -46,6 +48,7 @@
 Usage: $CMD [-b buildstats_dir] [-t do_task]
   -b buildstats The path where the folder resides
                 (default: "$BS_DIR")
+  -r recipe     The recipe to be computed
   -t tasks      The tasks to be computed
                 (default: "$TASKS")
   -s stats      The stats to be matched. Options: TIME, IO, RUSAGE, CHILD_RUSAGE
@@ -56,87 +59,109 @@
                     IO=$IO
                     RUSAGE=$RUSAGE
                     CHILD_RUSAGE=$CHILD_RUSAGE
+  -a            Accumulate all stats values for found recipes
   -h            Display this help message
 EOM
 }
 
 # Parse and validate arguments
-while getopts "b:t:s:Hh" OPT; do
-	case $OPT in
-	b)
-		BS_DIR="$OPTARG"
-		;;
-	t)
-		TASKS="$OPTARG"
-		;;
-	s)
-		STATS="$OPTARG"
-		;;
-	H)
-	        HEADER="y"
-	        ;;
-	h)
-		usage
-		exit 0
-		;;
-	*)
-		usage
-		exit 1
-		;;
-	esac
+while getopts "b:r:t:s:aHh" OPT; do
+    case $OPT in
+    b)
+        BS_DIR="$OPTARG"
+        ;;
+    r)
+        RECIPE="$OPTARG"
+        ;;
+    t)
+        TASKS="$OPTARG"
+        ;;
+    s)
+        STATS="$OPTARG"
+        ;;
+    a)
+        ACCUMULATE="y"
+        ;;
+    H)
+        HEADER="y"
+        ;;
+    h)
+        usage
+        exit 0
+        ;;
+    *)
+        usage
+        exit 1
+        ;;
+    esac
 done
 
 # Ensure the buildstats folder exists
 if [ ! -d "$BS_DIR" ]; then
-	echo "ERROR: $BS_DIR does not exist"
-	usage
-	exit 1
+    echo "ERROR: $BS_DIR does not exist"
+    usage
+    exit 1
 fi
 
 stats=""
 IFS=":"
 for stat in ${STATS}; do
-	case $stat in
-	    TIME)
-		stats="${stats}:${TIME}"
-		;;
-	    IO)
-		stats="${stats}:${IO}"
-		;;
-	    RUSAGE)
-		stats="${stats}:${RUSAGE}"
-		;;
-	    CHILD_RUSAGE)
-		stats="${stats}:${CHILD_RUSAGE}"
-		;;
-	    *)
-		stats="${STATS}"
-	esac
+    case $stat in
+        TIME)
+            stats="${stats}:${TIME}"
+            ;;
+        IO)
+            stats="${stats}:${IO}"
+            ;;
+        RUSAGE)
+            stats="${stats}:${RUSAGE}"
+            ;;
+        CHILD_RUSAGE)
+            stats="${stats}:${CHILD_RUSAGE}"
+            ;;
+        *)
+            stats="${STATS}"
+            ;;
+    esac
 done
 
 # remove possible colon at the beginning
 stats="$(echo "$stats" | sed -e 's/^://1')"
 
 # Provide a header if required by the user
-[ -n "$HEADER" ] && { echo "task:recipe:$stats"; }
+if [ -n "$HEADER" ] ; then
+    if [ -n "$ACCUMULATE" ]; then
+        echo "task:recipe:accumulated(${stats//:/;})"
+    else
+        echo "task:recipe:$stats"
+    fi
+fi
 
 for task in ${TASKS}; do
     task="do_${task}"
-    for file in $(find ${BS_DIR} -type f -name ${task} | awk 'BEGIN{ ORS=""; OFS=":" } { print $0,"" }'); do
+    for file in $(find ${BS_DIR} -type f -path *${RECIPE}*/${task} | awk 'BEGIN{ ORS=""; OFS=":" } { print $0,"" }'); do
         recipe="$(basename $(dirname $file))"
-	times=""
-	for stat in ${stats}; do
-	    [ -z "$stat" ] && { echo "empty stats"; }
-	    time=$(sed -n -e "s/^\($stat\): \\(.*\\)/\\2/p" $file)
-	    # in case the stat is not present, set the value as NA
-	    [ -z "$time" ] && { time="NA"; }
-	    # Append it to times
-	    if [ -z "$times" ]; then
-		times="${time}"
-	    else
-		times="${times} ${time}"
-	    fi
-	done
+        times=""
+        for stat in ${stats}; do
+            [ -z "$stat" ] && { echo "empty stats"; }
+            time=$(sed -n -e "s/^\($stat\): \\(.*\\)/\\2/p" $file)
+            # in case the stat is not present, set the value as NA
+            [ -z "$time" ] && { time="NA"; }
+            # Append it to times
+            if [ -z "$times" ]; then
+                times="${time}"
+            else
+                times="${times} ${time}"
+            fi
+        done
+        if [ -n "$ACCUMULATE" ]; then
+            IFS=' '; valuesarray=(${times}); IFS=':'
+            times=0
+            for value in "${valuesarray[@]}"; do
+                [ "$value" == "NA" ] && { echo "ERROR: stat is not present."; usage; exit 1; }
+                times=$(( $times + $value ))
+            done
+        fi
         echo "${task} ${recipe} ${times}"
     done
 done
diff --git a/poky/scripts/gen-lockedsig-cache b/poky/scripts/gen-lockedsig-cache
index 9bfae9d..cd8f9a4 100755
--- a/poky/scripts/gen-lockedsig-cache
+++ b/poky/scripts/gen-lockedsig-cache
@@ -78,11 +78,15 @@
 sstate_content_cache = {}
 for s in sigs:
     prefix = s[:2]
+    prefix2 = s[2:4]
     if prefix not in sstate_content_cache:
-        sstate_content_cache[prefix] = build_sha_cache(prefix)
+        sstate_content_cache[prefix] = {}
+    if prefix2 not in sstate_content_cache[prefix]:
+        sstate_content_cache[prefix][prefix2] = build_sha_cache(prefix + "/" + prefix2)
 
-    for f in sstate_content_cache[prefix][s]:
-        files.add(f)
+    if s in sstate_content_cache[prefix][prefix2]:
+        for f in sstate_content_cache[prefix][prefix2][s]:
+            files.add(f)
 
 elapsed = time.perf_counter() - start_time
 print("Gathering file list took %.1fs" % elapsed)
diff --git a/poky/scripts/install-buildtools b/poky/scripts/install-buildtools
new file mode 100755
index 0000000..c6b3a1e
--- /dev/null
+++ b/poky/scripts/install-buildtools
@@ -0,0 +1,339 @@
+#!/usr/bin/env python3
+
+# Buildtools and buildtools extended installer helper script
+#
+# Copyright (C) 2017-2020 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+#  NOTE: --with-extended-buildtools is on by default
+#
+#  Example usage (extended buildtools from milestone):
+#    (1) using --url and --filename
+#        $ install-buildtools \
+#          --url http://downloads.yoctoproject.org/releases/yocto/milestones/yocto-3.1_M3/buildtools \
+#          --filename x86_64-buildtools-extended-nativesdk-standalone-3.0+snapshot-20200315.sh
+#    (2) using --base-url, --release, --installer-version and --build-date
+#        $ install-buildtools \
+#          --base-url http://downloads.yoctoproject.org/releases/yocto \
+#          --release yocto-3.1_M3 \
+#          --installer-version 3.0+snapshot
+#          --build-date 202000315
+#
+#  Example usage (standard buildtools from release):
+#    (3) using --url and --filename
+#        $ install-buildtools --without-extended-buildtools \
+#          --url http://downloads.yoctoproject.org/releases/yocto/yocto-3.0.2/buildtools \
+#          --filename x86_64-buildtools-nativesdk-standalone-3.0.2.sh
+#    (4) using --base-url, --release and --installer-version
+#        $ install-buildtools --without-extended-buildtools \
+#          --base-url http://downloads.yoctoproject.org/releases/yocto \
+#          --release yocto-3.0.2 \
+#          --installer-version 3.0.2
+#
+
+import argparse
+import logging
+import os
+import re
+import shutil
+import shlex
+import stat
+import subprocess
+import sys
+import tempfile
+from urllib.parse import quote
+
+scripts_path = os.path.dirname(os.path.realpath(__file__))
+lib_path = scripts_path + '/lib'
+sys.path = sys.path + [lib_path]
+import scriptutils
+import scriptpath
+
+
+PROGNAME = 'install-buildtools'
+logger = scriptutils.logger_create(PROGNAME, stream=sys.stdout)
+
+DEFAULT_INSTALL_DIR = os.path.join(os.path.split(scripts_path)[0],'buildtools')
+DEFAULT_BASE_URL = 'http://downloads.yoctoproject.org/releases/yocto'
+DEFAULT_RELEASE = 'yocto-3.1'
+DEFAULT_INSTALLER_VERSION = '3.1'
+DEFAULT_BUILDDATE = ''
+
+# Python version sanity check
+if not (sys.version_info.major == 3 and sys.version_info.minor >= 4):
+    logger.error("This script requires Python 3.4 or greater")
+    logger.error("You have Python %s.%s" %
+	  (sys.version_info.major, sys.version_info.minor))
+    sys.exit(1)
+
+# The following three functions are copied directly from
+# bitbake/lib/bb/utils.py, in order to allow this script
+# to run on versions of python earlier than what bitbake
+# supports (e.g. less than Python 3.5 for YP 3.1 release)
+
+def _hasher(method, filename):
+    import mmap
+
+    with open(filename, "rb") as f:
+        try:
+            with mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) as mm:
+                for chunk in iter(lambda: mm.read(8192), b''):
+                    method.update(chunk)
+        except ValueError:
+            # You can't mmap() an empty file so silence this exception
+            pass
+    return method.hexdigest()
+
+
+def md5_file(filename):
+    """
+    Return the hex string representation of the MD5 checksum of filename.
+    """
+    import hashlib
+    return _hasher(hashlib.md5(), filename)
+
+def sha256_file(filename):
+    """
+    Return the hex string representation of the 256-bit SHA checksum of
+    filename.
+    """
+    import hashlib
+    return _hasher(hashlib.sha256(), filename)
+
+
+def main():
+    global DEFAULT_INSTALL_DIR
+    global DEFAULT_BASE_URL
+    global DEFAULT_RELEASE
+    global DEFAULT_INSTALLER_VERSION
+    global DEFAULT_BUILDDATE
+    filename = ""
+    release = ""
+    buildtools_url = ""
+    install_dir = ""
+
+    parser = argparse.ArgumentParser(
+        description="Buildtools installation helper",
+        add_help=False)
+    parser.add_argument('-u', '--url',
+                        help='URL from where to fetch buildtools SDK installer, not '
+                             'including filename (optional)\n'
+                             'Requires --filename.',
+                        action='store')
+    parser.add_argument('-f', '--filename',
+                        help='filename for the buildtools SDK installer to be installed '
+                             '(optional)\nRequires --url',
+                        action='store')
+    parser.add_argument('-d', '--directory',
+                        default=DEFAULT_INSTALL_DIR,
+                        help='directory where buildtools SDK will be installed (optional)',
+                        action='store')
+    parser.add_argument('-r', '--release',
+                        default=DEFAULT_RELEASE,
+                        help='Yocto Project release string for SDK which will be '
+                             'installed (optional)',
+                        action='store')
+    parser.add_argument('-V', '--installer-version',
+                        default=DEFAULT_INSTALLER_VERSION,
+                        help='version string for the SDK to be installed (optional)',
+                        action='store')
+    parser.add_argument('-b', '--base-url',
+                        default=DEFAULT_BASE_URL,
+                        help='base URL from which to fetch SDK (optional)', action='store')
+    parser.add_argument('-t', '--build-date',
+                        default=DEFAULT_BUILDDATE,
+                        help='Build date of pre-release SDK (optional)', action='store')
+    group = parser.add_mutually_exclusive_group()
+    group.add_argument('--with-extended-buildtools', action='store_true',
+                       dest='with_extended_buildtools',
+                       default=True,
+                       help='enable extended buildtools tarball (on by default)')
+    group.add_argument('--without-extended-buildtools', action='store_false',
+                       dest='with_extended_buildtools',
+                       help='disable extended buildtools (traditional buildtools tarball)')
+    parser.add_argument('-c', '--check', help='enable md5 checksum checking',
+                        default=True,
+                        action='store_true')
+    parser.add_argument('-D', '--debug', help='enable debug output',
+                        action='store_true')
+    parser.add_argument('-q', '--quiet', help='print only errors',
+                        action='store_true')
+
+    parser.add_argument('-h', '--help', action='help',
+                        default=argparse.SUPPRESS,
+                        help='show this help message and exit')
+
+    args = parser.parse_args()
+
+    if args.debug:
+        logger.setLevel(logging.DEBUG)
+    elif args.quiet:
+        logger.setLevel(logging.ERROR)
+
+    if args.url and args.filename:
+        logger.debug("--url and --filename detected. Ignoring --base-url "
+                     "--release --installer-version  arguments.")
+        filename = args.filename
+        buildtools_url = "%s/%s" % (args.url, filename)
+    else:
+        if args.base_url:
+            base_url = args.base_url
+        else:
+            base_url = DEFAULT_BASE_URL
+        if args.release:
+            # check if this is a pre-release "milestone" SDK
+            m = re.search(r"^(?P<distro>[a-zA-Z\-]+)(?P<version>[0-9.]+)(?P<milestone>_M[1-9])$",
+                          args.release)
+            logger.debug("milestone regex: %s" % m)
+            if m and m.group('milestone'):
+                logger.debug("release[distro]: %s" % m.group('distro'))
+                logger.debug("release[version]: %s" % m.group('version'))
+                logger.debug("release[milestone]: %s" % m.group('milestone'))
+                if not args.build_date:
+                    logger.error("Milestone installers require --build-date")
+                else:
+                    if args.with_extended_buildtools:
+                        filename = "x86_64-buildtools-extended-nativesdk-standalone-%s-%s.sh" % (
+                            args.installer_version, args.build_date)
+                    else:
+                        filename = "x86_64-buildtools-nativesdk-standalone-%s-%s.sh" % (
+                            args.installer_version, args.build_date)
+                    safe_filename = quote(filename)
+                    buildtools_url = "%s/milestones/%s/buildtools/%s" % (base_url, args.release, safe_filename)
+            # regular release SDK
+            else:
+                if args.with_extended_buildtools:
+                    filename = "x86_64-buildtools-extended-nativesdk-standalone-%s.sh" % args.installer_version
+                else:
+                    filename = "x86_64-buildtools-nativesdk-standalone-%s.sh" % args.installer_version
+                safe_filename = quote(filename)
+                buildtools_url = "%s/%s/buildtools/%s" % (base_url, args.release, safe_filename)
+
+    tmpsdk_dir = tempfile.mkdtemp()
+    try:
+        # Fetch installer
+        logger.info("Fetching buildtools installer")
+        tmpbuildtools = os.path.join(tmpsdk_dir, filename)
+        ret = subprocess.call("wget -q -O %s %s" %
+                              (tmpbuildtools, buildtools_url), shell=True)
+        if ret != 0:
+            logger.error("Could not download file from %s" % buildtools_url)
+            return ret
+
+        # Verify checksum
+        if args.check:
+            logger.info("Fetching buildtools installer checksum")
+            checksum_type = ""
+            for checksum_type in ["md5sum", "sha256"]:
+                check_url = "{}.{}".format(buildtools_url, checksum_type)
+                checksum_filename = "{}.{}".format(filename, checksum_type)
+                tmpbuildtools_checksum = os.path.join(tmpsdk_dir, checksum_filename)
+                ret = subprocess.call("wget -q -O %s %s" %
+                                      (tmpbuildtools_checksum, check_url), shell=True)
+                if ret == 0:
+                    break
+            else:
+                if ret != 0:
+                    logger.error("Could not download file from %s" % check_url)
+                    return ret
+            regex = re.compile(r"^(?P<checksum>[0-9a-f]+)\s\s(?P<path>.*/)?(?P<filename>.*)$")
+            with open(tmpbuildtools_checksum, 'rb') as f:
+                original = f.read()
+                m = re.search(regex, original.decode("utf-8"))
+                logger.debug("checksum regex match: %s" % m)
+                logger.debug("checksum: %s" % m.group('checksum'))
+                logger.debug("path: %s" % m.group('path'))
+                logger.debug("filename: %s" % m.group('filename'))
+                if filename != m.group('filename'):
+                    logger.error("Filename does not match name in checksum")
+                    return 1
+                checksum = m.group('checksum')
+            if checksum_type == "md5sum":
+                checksum_value = md5_file(tmpbuildtools)
+            else:
+                checksum_value = sha256_file(tmpbuildtools)
+            if checksum == checksum_value:
+                    logger.info("Checksum success")
+            else:
+                logger.error("Checksum %s expected. Actual checksum is %s." %
+                             (checksum, checksum_value))
+
+        # Make installer executable
+        logger.info("Making installer executable")
+        st = os.stat(tmpbuildtools)
+        os.chmod(tmpbuildtools, st.st_mode | stat.S_IEXEC)
+        logger.debug(os.stat(tmpbuildtools))
+        if args.directory:
+            install_dir = args.directory
+            ret = subprocess.call("%s -d %s -y" %
+                                  (tmpbuildtools, install_dir), shell=True)
+        else:
+            install_dir = "/opt/poky/%s" % args.installer_version
+            ret = subprocess.call("%s -y" % tmpbuildtools, shell=True)
+        if ret != 0:
+            logger.error("Could not run buildtools installer")
+
+        # Setup the environment
+        logger.info("Setting up the environment")
+        regex = re.compile(r'^(?P<export>export )?(?P<env_var>[A-Z_]+)=(?P<env_val>.+)$')
+        with open("%s/environment-setup-x86_64-pokysdk-linux" %
+                  install_dir, 'rb') as f:
+            for line in f:
+                match = regex.search(line.decode('utf-8'))
+                logger.debug("export regex: %s" % match)
+                if match:
+                    env_var = match.group('env_var')
+                    logger.debug("env_var: %s" % env_var)
+                    env_val = match.group('env_val')
+                    logger.debug("env_val: %s" % env_val)
+                    os.environ[env_var] = env_val
+
+        # Test installation
+        logger.info("Testing installation")
+        tool = ""
+        m = re.search("extended", tmpbuildtools)
+        logger.debug("extended regex: %s" % m)
+        if args.with_extended_buildtools and not m:
+            logger.info("Ignoring --with-extended-buildtools as filename "
+                        "does not contain 'extended'")
+        if args.with_extended_buildtools and m:
+            tool = 'gcc'
+        else:
+            tool = 'tar'
+        logger.debug("install_dir: %s" % install_dir)
+        cmd = shlex.split("/usr/bin/which %s" % tool)
+        logger.debug("cmd: %s" % cmd)
+        logger.debug("tool: %s" % tool)
+        proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+        output, errors = proc.communicate()
+        logger.debug("proc.args: %s" % proc.args)
+        logger.debug("proc.communicate(): output %s" % output)
+        logger.debug("proc.communicate(): errors %s" % errors)
+        which_tool = output.decode('utf-8')
+        logger.debug("which %s: %s" % (tool, which_tool))
+        ret = proc.returncode
+        if not which_tool.startswith(install_dir):
+            logger.error("Something went wrong: %s not found in %s" %
+                         (tool, install_dir))
+        if ret != 0:
+            logger.error("Something went wrong: installation failed")
+        else:
+            logger.info("Installation successful. Remember to source the "
+                        "environment setup script now and in any new session.")
+        return ret
+
+    finally:
+        # cleanup tmp directory
+        shutil.rmtree(tmpsdk_dir)
+
+
+if __name__ == '__main__':
+    try:
+        ret = main()
+    except Exception:
+        ret = 1
+        import traceback
+
+        traceback.print_exc()
+    sys.exit(ret)
diff --git a/poky/scripts/lib/devtool/standard.py b/poky/scripts/lib/devtool/standard.py
index 1c0cd8a..bab644b 100644
--- a/poky/scripts/lib/devtool/standard.py
+++ b/poky/scripts/lib/devtool/standard.py
@@ -145,8 +145,8 @@
         extracmdopts += ' --src-subdir "%s"' % args.src_subdir
     if args.autorev:
         extracmdopts += ' -a'
-    if args.fetch_dev:
-        extracmdopts += ' --fetch-dev'
+    if args.npm_dev:
+        extracmdopts += ' --npm-dev'
     if args.mirrors:
         extracmdopts += ' --mirrors'
     if args.srcrev:
@@ -260,14 +260,10 @@
                 f.write('}\n')
 
             if bb.data.inherits_class('npm', rd):
-                f.write('do_install_append() {\n')
-                f.write('    # Remove files added to source dir by devtool/externalsrc\n')
-                f.write('    rm -f ${NPM_INSTALLDIR}/singletask.lock\n')
-                f.write('    rm -rf ${NPM_INSTALLDIR}/.git\n')
-                f.write('    rm -rf ${NPM_INSTALLDIR}/oe-local-files\n')
-                f.write('    for symlink in ${EXTERNALSRC_SYMLINKS} ; do\n')
-                f.write('        rm -f ${NPM_INSTALLDIR}/${symlink%%:*}\n')
-                f.write('    done\n')
+                f.write('python do_configure_append() {\n')
+                f.write('    pkgdir = d.getVar("NPM_PACKAGE")\n')
+                f.write('    lockfile = os.path.join(pkgdir, "singletask.lock")\n')
+                f.write('    bb.utils.remove(lockfile)\n')
                 f.write('}\n')
 
         # Check if the new layer provides recipes whose priorities have been
@@ -940,8 +936,10 @@
                         '}\n')
             if rd.getVarFlag('do_menuconfig','task'):
                 f.write('\ndo_configure_append() {\n'
-                '    cp ${B}/.config ${S}/.config.baseline\n'
-                '    ln -sfT ${B}/.config ${S}/.config.new\n'
+                '    if [ ! ${DEVTOOL_DISABLE_MENUCONFIG} ]; then\n'
+                '        cp ${B}/.config ${S}/.config.baseline\n'
+                '        ln -sfT ${B}/.config ${S}/.config.new\n'
+                '    fi\n'
                 '}\n')
             if initial_rev:
                 f.write('\n# initial_rev: %s\n' % initial_rev)
@@ -2197,7 +2195,7 @@
     group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
     group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
     parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI')
-    parser_add.add_argument('--fetch-dev', help='For npm, also fetch devDependencies', action="store_true")
+    parser_add.add_argument('--npm-dev', help='For npm, also fetch devDependencies', action="store_true")
     parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)')
     parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true")
     group = parser_add.add_mutually_exclusive_group()
diff --git a/poky/scripts/lib/recipetool/create.py b/poky/scripts/lib/recipetool/create.py
index 4c4bbad..6cbf4de 100644
--- a/poky/scripts/lib/recipetool/create.py
+++ b/poky/scripts/lib/recipetool/create.py
@@ -477,8 +477,6 @@
             storeTagName = params['tag']
             params['nobranch'] = '1'
             del params['tag']
-        if scheme == 'npm':
-            params['noverify'] = '1'
         fetchuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params))
 
         tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR')
@@ -714,10 +712,8 @@
         lines_after.append('INSANE_SKIP_${PN} += "already-stripped"')
         lines_after.append('')
 
-    if args.fetch_dev:
-        extravalues['fetchdev'] = True
-    else:
-        extravalues['fetchdev'] = None
+    if args.npm_dev:
+        extravalues['NPM_INSTALL_DEV'] = 1
 
     # Find all plugins that want to register handlers
     logger.debug('Loading recipe handlers')
@@ -1313,7 +1309,7 @@
     group.add_argument('-S', '--srcrev', help='Source revision to fetch if fetching from an SCM such as git (default latest)')
     parser_create.add_argument('-B', '--srcbranch', help='Branch in source repository if fetching from an SCM such as git (default master)')
     parser_create.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
-    parser_create.add_argument('--fetch-dev', action="store_true", help='For npm, also fetch devDependencies')
+    parser_create.add_argument('--npm-dev', action="store_true", help='For npm, also fetch devDependencies')
     parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS)
     parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).')
     parser_create.set_defaults(func=create_recipe)
diff --git a/poky/scripts/lib/recipetool/create_buildsys.py b/poky/scripts/lib/recipetool/create_buildsys.py
index 3cb0276..35a97c9 100644
--- a/poky/scripts/lib/recipetool/create_buildsys.py
+++ b/poky/scripts/lib/recipetool/create_buildsys.py
@@ -226,9 +226,9 @@
                         elif pkg == 'PkgConfig':
                             inherits.append('pkgconfig')
                         elif pkg == 'PythonInterp':
-                            inherits.append('pythonnative')
+                            inherits.append('python3native')
                         elif pkg == 'PythonLibs':
-                            inherits.append('python-dir')
+                            inherits.append('python3-dir')
                         else:
                             # Try to map via looking at installed CMake packages in pkgdata
                             dep = find_cmake_package(pkg)
@@ -417,7 +417,7 @@
                 }
         progclassmap = {'gconftool-2': 'gconf',
                 'pkg-config': 'pkgconfig',
-                'python': 'pythonnative',
+                'python': 'python3native',
                 'python3': 'python3native',
                 'perl': 'perlnative',
                 'makeinfo': 'texinfo',
@@ -566,16 +566,7 @@
             elif keyword == 'AX_PROG_XSLTPROC':
                 deps.append('libxslt-native')
             elif keyword in ['AC_PYTHON_DEVEL', 'AX_PYTHON_DEVEL', 'AM_PATH_PYTHON']:
-                pythonclass = 'pythonnative'
-                res = version_re.search(value)
-                if res:
-                    if res.group(1).startswith('3'):
-                        pythonclass = 'python3native'
-                # Avoid replacing python3native with pythonnative
-                if not pythonclass in inherits and not 'python3native' in inherits:
-                    if 'pythonnative' in inherits:
-                        inherits.remove('pythonnative')
-                    inherits.append(pythonclass)
+                pythonclass = 'python3native'
             elif keyword == 'AX_WITH_CURSES':
                 deps.append('ncurses')
             elif keyword == 'AX_PATH_BDB':
diff --git a/poky/scripts/lib/recipetool/create_npm.py b/poky/scripts/lib/recipetool/create_npm.py
index 39429eb..579b7ae 100644
--- a/poky/scripts/lib/recipetool/create_npm.py
+++ b/poky/scripts/lib/recipetool/create_npm.py
@@ -1,321 +1,255 @@
-# Recipe creation tool - node.js NPM module support plugin
-#
 # Copyright (C) 2016 Intel Corporation
+# Copyright (C) 2020 Savoir-Faire Linux
 #
 # SPDX-License-Identifier: GPL-2.0-only
 #
+"""Recipe creation tool - npm module support plugin"""
 
-import os
-import sys
-import logging
-import subprocess
-import tempfile
-import shutil
 import json
-from recipetool.create import RecipeHandler, split_pkg_licenses, handle_license_vars
+import os
+import re
+import sys
+import tempfile
+import bb
+from bb.fetch2.npm import NpmEnvironment
+from bb.fetch2.npmsw import foreach_dependencies
+from recipetool.create import RecipeHandler
+from recipetool.create import guess_license
+from recipetool.create import split_pkg_licenses
 
-logger = logging.getLogger('recipetool')
-
-
-tinfoil = None
+TINFOIL = None
 
 def tinfoil_init(instance):
-    global tinfoil
-    tinfoil = instance
-
+    """Initialize tinfoil"""
+    global TINFOIL
+    TINFOIL = instance
 
 class NpmRecipeHandler(RecipeHandler):
-    lockdownpath = None
+    """Class to handle the npm recipe creation"""
 
-    def _ensure_npm(self, fixed_setup=False):
-        if not tinfoil.recipes_parsed:
-            tinfoil.parse_recipes()
+    @staticmethod
+    def _npm_name(name):
+        """Generate a Yocto friendly npm name"""
+        name = re.sub("/", "-", name)
+        name = name.lower()
+        name = re.sub(r"[^\-a-z0-9]", "", name)
+        name = name.strip("-")
+        return name
+
+    @staticmethod
+    def _get_registry(lines):
+        """Get the registry value from the 'npm://registry' url"""
+        registry = None
+
+        def _handle_registry(varname, origvalue, op, newlines):
+            nonlocal registry
+            if origvalue.startswith("npm://"):
+                registry = re.sub(r"^npm://", "http://", origvalue.split(";")[0])
+            return origvalue, None, 0, True
+
+        bb.utils.edit_metadata(lines, ["SRC_URI"], _handle_registry)
+
+        return registry
+
+    @staticmethod
+    def _ensure_npm():
+        """Check if the 'npm' command is available in the recipes"""
+        if not TINFOIL.recipes_parsed:
+            TINFOIL.parse_recipes()
+
         try:
-            rd = tinfoil.parse_recipe('nodejs-native')
+            d = TINFOIL.parse_recipe("nodejs-native")
         except bb.providers.NoProvider:
-            if fixed_setup:
-                msg = 'nodejs-native is required for npm but is not available within this SDK'
-            else:
-                msg = 'nodejs-native is required for npm but is not available - you will likely need to add a layer that provides nodejs'
-            logger.error(msg)
-            return None
-        bindir = rd.getVar('STAGING_BINDIR_NATIVE')
-        npmpath = os.path.join(bindir, 'npm')
+            bb.error("Nothing provides 'nodejs-native' which is required for the build")
+            bb.note("You will likely need to add a layer that provides nodejs")
+            sys.exit(14)
+
+        bindir = d.getVar("STAGING_BINDIR_NATIVE")
+        npmpath = os.path.join(bindir, "npm")
+
         if not os.path.exists(npmpath):
-            tinfoil.build_targets('nodejs-native', 'addto_recipe_sysroot')
+            TINFOIL.build_targets("nodejs-native", "addto_recipe_sysroot")
+
             if not os.path.exists(npmpath):
-                logger.error('npm required to process specified source, but nodejs-native did not seem to populate it')
-                return None
+                bb.error("Failed to add 'npm' to sysroot")
+                sys.exit(14)
+
         return bindir
 
-    def _handle_license(self, data):
-        '''
-        Handle the license value from an npm package.json file
-        '''
-        license = None
-        if 'license' in data:
-            license = data['license']
-            if isinstance(license, dict):
-                license = license.get('type', None)
-            if license:
-                if 'OR' in license:
-                    license = license.replace('OR', '|')
-                    license = license.replace('AND', '&')
-                    license = license.replace(' ', '_')
-                    if not license[0] == '(':
-                        license = '(' + license + ')'
-                else:
-                    license = license.replace('AND', '&')
-                    if license[0] == '(':
-                        license = license[1:]
-                    if license[-1] == ')':
-                        license = license[:-1]
-                license = license.replace('MIT/X11', 'MIT')
-                license = license.replace('Public Domain', 'PD')
-                license = license.replace('SEE LICENSE IN EULA',
-                                          'SEE-LICENSE-IN-EULA')
-        return license
+    @staticmethod
+    def _npm_global_configs(dev):
+        """Get the npm global configuration"""
+        configs = []
 
-    def _shrinkwrap(self, srctree, localfilesdir, extravalues, lines_before, d):
-        try:
-            runenv = dict(os.environ, PATH=d.getVar('PATH'))
-            bb.process.run('npm shrinkwrap', cwd=srctree, stderr=subprocess.STDOUT, env=runenv, shell=True)
-        except bb.process.ExecutionError as e:
-            logger.warning('npm shrinkwrap failed:\n%s' % e.stdout)
-            return
+        if dev:
+            configs.append(("also", "development"))
+        else:
+            configs.append(("only", "production"))
 
-        tmpfile = os.path.join(localfilesdir, 'npm-shrinkwrap.json')
-        shutil.move(os.path.join(srctree, 'npm-shrinkwrap.json'), tmpfile)
-        extravalues.setdefault('extrafiles', {})
-        extravalues['extrafiles']['npm-shrinkwrap.json'] = tmpfile
-        lines_before.append('NPM_SHRINKWRAP := "${THISDIR}/${PN}/npm-shrinkwrap.json"')
+        configs.append(("save", "false"))
+        configs.append(("package-lock", "false"))
+        configs.append(("shrinkwrap", "false"))
+        return configs
 
-    def _lockdown(self, srctree, localfilesdir, extravalues, lines_before, d):
-        runenv = dict(os.environ, PATH=d.getVar('PATH'))
-        if not NpmRecipeHandler.lockdownpath:
-            NpmRecipeHandler.lockdownpath = tempfile.mkdtemp('recipetool-npm-lockdown')
-            bb.process.run('npm install lockdown --prefix %s' % NpmRecipeHandler.lockdownpath,
-                           cwd=srctree, stderr=subprocess.STDOUT, env=runenv, shell=True)
-        relockbin = os.path.join(NpmRecipeHandler.lockdownpath, 'node_modules', 'lockdown', 'relock.js')
-        if not os.path.exists(relockbin):
-            logger.warning('Could not find relock.js within lockdown directory; skipping lockdown')
-            return
-        try:
-            bb.process.run('node %s' % relockbin, cwd=srctree, stderr=subprocess.STDOUT, env=runenv, shell=True)
-        except bb.process.ExecutionError as e:
-            logger.warning('lockdown-relock failed:\n%s' % e.stdout)
-            return
+    def _run_npm_install(self, d, srctree, registry, dev):
+        """Run the 'npm install' command without building the addons"""
+        configs = self._npm_global_configs(dev)
+        configs.append(("ignore-scripts", "true"))
 
-        tmpfile = os.path.join(localfilesdir, 'lockdown.json')
-        shutil.move(os.path.join(srctree, 'lockdown.json'), tmpfile)
-        extravalues.setdefault('extrafiles', {})
-        extravalues['extrafiles']['lockdown.json'] = tmpfile
-        lines_before.append('NPM_LOCKDOWN := "${THISDIR}/${PN}/lockdown.json"')
+        if registry:
+            configs.append(("registry", registry))
 
-    def _handle_dependencies(self, d, deps, optdeps, devdeps, lines_before, srctree):
-        import scriptutils
-        # If this isn't a single module we need to get the dependencies
-        # and add them to SRC_URI
-        def varfunc(varname, origvalue, op, newlines):
-            if varname == 'SRC_URI':
-                if not origvalue.startswith('npm://'):
-                    src_uri = origvalue.split()
-                    deplist = {}
-                    for dep, depver in optdeps.items():
-                        depdata = self.get_npm_data(dep, depver, d)
-                        if self.check_npm_optional_dependency(depdata):
-                            deplist[dep] = depdata
-                    for dep, depver in devdeps.items():
-                        depdata = self.get_npm_data(dep, depver, d)
-                        if self.check_npm_optional_dependency(depdata):
-                            deplist[dep] = depdata
-                    for dep, depver in deps.items():
-                        depdata = self.get_npm_data(dep, depver, d)
-                        deplist[dep] = depdata
+        bb.utils.remove(os.path.join(srctree, "node_modules"), recurse=True)
 
-                    extra_urls = []
-                    for dep, depdata in deplist.items():
-                        version = depdata.get('version', None)
-                        if version:
-                            url = 'npm://registry.npmjs.org;name=%s;version=%s;subdir=node_modules/%s' % (dep, version, dep)
-                            extra_urls.append(url)
-                    if extra_urls:
-                        scriptutils.fetch_url(tinfoil, ' '.join(extra_urls), None, srctree, logger)
-                        src_uri.extend(extra_urls)
-                        return src_uri, None, -1, True
-            return origvalue, None, 0, True
-        updated, newlines = bb.utils.edit_metadata(lines_before, ['SRC_URI'], varfunc)
-        if updated:
-            del lines_before[:]
-            for line in newlines:
-                # Hack to avoid newlines that edit_metadata inserts
-                if line.endswith('\n'):
-                    line = line[:-1]
-                lines_before.append(line)
-        return updated
+        env = NpmEnvironment(d, configs=configs)
+        env.run("npm install", workdir=srctree)
+
+    def _generate_shrinkwrap(self, d, srctree, dev):
+        """Check and generate the 'npm-shrinkwrap.json' file if needed"""
+        configs = self._npm_global_configs(dev)
+
+        env = NpmEnvironment(d, configs=configs)
+        env.run("npm shrinkwrap", workdir=srctree)
+
+        return os.path.join(srctree, "npm-shrinkwrap.json")
+
+    def _handle_licenses(self, srctree, shrinkwrap_file, dev):
+        """Return the extra license files and the list of packages"""
+        licfiles = []
+        packages = {}
+
+        def _licfiles_append(licfile):
+            """Append 'licfile' to the license files list"""
+            licfilepath = os.path.join(srctree, licfile)
+            licmd5 = bb.utils.md5_file(licfilepath)
+            licfiles.append("file://%s;md5=%s" % (licfile, licmd5))
+
+        # Handle the parent package
+        _licfiles_append("package.json")
+        packages["${PN}"] = ""
+
+        # Handle the dependencies
+        def _handle_dependency(name, params, deptree):
+            suffix = "-".join([self._npm_name(dep) for dep in deptree])
+            destdirs = [os.path.join("node_modules", dep) for dep in deptree]
+            destdir = os.path.join(*destdirs)
+            _licfiles_append(os.path.join(destdir, "package.json"))
+            packages["${PN}-" + suffix] = destdir
+
+        with open(shrinkwrap_file, "r") as f:
+            shrinkwrap = json.load(f)
+
+        foreach_dependencies(shrinkwrap, _handle_dependency, dev)
+
+        return licfiles, packages
 
     def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
-        import bb.utils
-        import oe.package
-        from collections import OrderedDict
+        """Handle the npm recipe creation"""
 
-        if 'buildsystem' in handled:
+        if "buildsystem" in handled:
             return False
 
-        def read_package_json(fn):
-            with open(fn, 'r', errors='surrogateescape') as f:
-                return json.loads(f.read())
+        files = RecipeHandler.checkfiles(srctree, ["package.json"])
 
-        files = RecipeHandler.checkfiles(srctree, ['package.json'])
-        if files:
-            d = bb.data.createCopy(tinfoil.config_data)
-            npm_bindir = self._ensure_npm()
-            if not npm_bindir:
-                sys.exit(14)
-            d.prependVar('PATH', '%s:' % npm_bindir)
+        if not files:
+            return False
 
-            data = read_package_json(files[0])
-            if 'name' in data and 'version' in data:
-                extravalues['PN'] = data['name']
-                extravalues['PV'] = data['version']
-                classes.append('npm')
-                handled.append('buildsystem')
-                if 'description' in data:
-                    extravalues['SUMMARY'] = data['description']
-                if 'homepage' in data:
-                    extravalues['HOMEPAGE'] = data['homepage']
+        with open(files[0], "r") as f:
+            data = json.load(f)
 
-                fetchdev = extravalues['fetchdev'] or None
-                deps, optdeps, devdeps = self.get_npm_package_dependencies(data, fetchdev)
-                self._handle_dependencies(d, deps, optdeps, devdeps, lines_before, srctree)
+        if "name" not in data or "version" not in data:
+            return False
 
-                # Shrinkwrap
-                localfilesdir = tempfile.mkdtemp(prefix='recipetool-npm')
-                self._shrinkwrap(srctree, localfilesdir, extravalues, lines_before, d)
+        extravalues["PN"] = self._npm_name(data["name"])
+        extravalues["PV"] = data["version"]
 
-                # Lockdown
-                self._lockdown(srctree, localfilesdir, extravalues, lines_before, d)
+        if "description" in data:
+            extravalues["SUMMARY"] = data["description"]
 
-                # Split each npm module out to is own package
-                npmpackages = oe.package.npm_split_package_dirs(srctree)
-                licvalues = None
-                for item in handled:
-                    if isinstance(item, tuple):
-                        if item[0] == 'license':
-                            licvalues = item[1]
-                            break
-                if not licvalues:
-                    licvalues = handle_license_vars(srctree, lines_before, handled, extravalues, d)
-                if licvalues:
-                    # Augment the license list with information we have in the packages
-                    licenses = {}
-                    license = self._handle_license(data)
-                    if license:
-                        licenses['${PN}'] = license
-                    for pkgname, pkgitem in npmpackages.items():
-                        _, pdata = pkgitem
-                        license = self._handle_license(pdata)
-                        if license:
-                            licenses[pkgname] = license
-                    # Now write out the package-specific license values
-                    # We need to strip out the json data dicts for this since split_pkg_licenses
-                    # isn't expecting it
-                    packages = OrderedDict((x,y[0]) for x,y in npmpackages.items())
-                    packages['${PN}'] = ''
-                    pkglicenses = split_pkg_licenses(licvalues, packages, lines_after, licenses)
-                    all_licenses = list(set([item.replace('_', ' ') for pkglicense in pkglicenses.values() for item in pkglicense]))
-                    if '&' in all_licenses:
-                        all_licenses.remove('&')
-                    extravalues['LICENSE'] = ' & '.join(all_licenses)
+        if "homepage" in data:
+            extravalues["HOMEPAGE"] = data["homepage"]
 
-                # Need to move S setting after inherit npm
-                for i, line in enumerate(lines_before):
-                    if line.startswith('S ='):
-                        lines_before.pop(i)
-                        lines_after.insert(0, '# Must be set after inherit npm since that itself sets S')
-                        lines_after.insert(1, line)
-                        break
+        dev = bb.utils.to_boolean(str(extravalues.get("NPM_INSTALL_DEV", "0")), False)
+        registry = self._get_registry(lines_before)
 
-                return True
+        bb.note("Checking if npm is available ...")
+        # The native npm is used here (and not the host one) to ensure that the
+        # npm version is high enough to ensure an efficient dependency tree
+        # resolution and avoid issue with the shrinkwrap file format.
+        # Moreover the native npm is mandatory for the build.
+        bindir = self._ensure_npm()
 
-        return False
+        d = bb.data.createCopy(TINFOIL.config_data)
+        d.prependVar("PATH", bindir + ":")
+        d.setVar("S", srctree)
 
-    # FIXME this is duplicated from lib/bb/fetch2/npm.py
-    def _parse_view(self, output):
-        '''
-        Parse the output of npm view --json; the last JSON result
-        is assumed to be the one that we're interested in.
-        '''
-        pdata = None
-        outdeps = {}
-        datalines = []
-        bracelevel = 0
-        for line in output.splitlines():
-            if bracelevel:
-                datalines.append(line)
-            elif '{' in line:
-                datalines = []
-                datalines.append(line)
-            bracelevel = bracelevel + line.count('{') - line.count('}')
-        if datalines:
-            pdata = json.loads('\n'.join(datalines))
-        return pdata
+        bb.note("Generating shrinkwrap file ...")
+        # To generate the shrinkwrap file the dependencies have to be installed
+        # first. During the generation process some files may be updated /
+        # deleted. By default devtool tracks the diffs in the srctree and raises
+        # errors when finishing the recipe if some diffs are found.
+        git_exclude_file = os.path.join(srctree, ".git", "info", "exclude")
+        if os.path.exists(git_exclude_file):
+            with open(git_exclude_file, "r+") as f:
+                lines = f.readlines()
+                for line in ["/node_modules/", "/npm-shrinkwrap.json"]:
+                    if line not in lines:
+                        f.write(line + "\n")
 
-    # FIXME this is effectively duplicated from lib/bb/fetch2/npm.py
-    # (split out from _getdependencies())
-    def get_npm_data(self, pkg, version, d):
-        import bb.fetch2
-        pkgfullname = pkg
-        if version != '*' and not '/' in version:
-            pkgfullname += "@'%s'" % version
-        logger.debug(2, "Calling getdeps on %s" % pkg)
-        runenv = dict(os.environ, PATH=d.getVar('PATH'))
-        fetchcmd = "npm view %s --json" % pkgfullname
-        output, _ = bb.process.run(fetchcmd, stderr=subprocess.STDOUT, env=runenv, shell=True)
-        data = self._parse_view(output)
-        return data
+        lock_file = os.path.join(srctree, "package-lock.json")
+        lock_copy = lock_file + ".copy"
+        if os.path.exists(lock_file):
+            bb.utils.copyfile(lock_file, lock_copy)
 
-    # FIXME this is effectively duplicated from lib/bb/fetch2/npm.py
-    # (split out from _getdependencies())
-    def get_npm_package_dependencies(self, pdata, fetchdev):
-        dependencies = pdata.get('dependencies', {})
-        optionalDependencies = pdata.get('optionalDependencies', {})
-        dependencies.update(optionalDependencies)
-        if fetchdev:
-            devDependencies = pdata.get('devDependencies', {})
-            dependencies.update(devDependencies)
-        else:
-            devDependencies = {}
-        depsfound = {}
-        optdepsfound = {}
-        devdepsfound = {}
-        for dep in dependencies:
-            if dep in optionalDependencies:
-                optdepsfound[dep] = dependencies[dep]
-            elif dep in devDependencies:
-                devdepsfound[dep] = dependencies[dep]
-            else:
-                depsfound[dep] = dependencies[dep]
-        return depsfound, optdepsfound, devdepsfound
+        self._run_npm_install(d, srctree, registry, dev)
+        shrinkwrap_file = self._generate_shrinkwrap(d, srctree, dev)
 
-    # FIXME this is effectively duplicated from lib/bb/fetch2/npm.py
-    # (split out from _getdependencies())
-    def check_npm_optional_dependency(self, pdata):
-        pkg_os = pdata.get('os', None)
-        if pkg_os:
-            if not isinstance(pkg_os, list):
-                pkg_os = [pkg_os]
-            blacklist = False
-            for item in pkg_os:
-                if item.startswith('!'):
-                    blacklist = True
-                    break
-            if (not blacklist and 'linux' not in pkg_os) or '!linux' in pkg_os:
-                pkg = pdata.get('name', 'Unnamed package')
-                logger.debug(2, "Skipping %s since it's incompatible with Linux" % pkg)
-                return False
+        if os.path.exists(lock_copy):
+            bb.utils.movefile(lock_copy, lock_file)
+
+        # Add the shrinkwrap file as 'extrafiles'
+        shrinkwrap_copy = shrinkwrap_file + ".copy"
+        bb.utils.copyfile(shrinkwrap_file, shrinkwrap_copy)
+        extravalues.setdefault("extrafiles", {})
+        extravalues["extrafiles"]["npm-shrinkwrap.json"] = shrinkwrap_copy
+
+        url_local = "npmsw://%s" % shrinkwrap_file
+        url_recipe= "npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json"
+
+        if dev:
+            url_local += ";dev=1"
+            url_recipe += ";dev=1"
+
+        # Add the npmsw url in the SRC_URI of the generated recipe
+        def _handle_srcuri(varname, origvalue, op, newlines):
+            """Update the version value and add the 'npmsw://' url"""
+            value = origvalue.replace("version=" + data["version"], "version=${PV}")
+            value = value.replace("version=latest", "version=${PV}")
+            values = [line.strip() for line in value.strip('\n').splitlines()]
+            values.append(url_recipe)
+            return values, None, 4, False
+
+        (_, newlines) = bb.utils.edit_metadata(lines_before, ["SRC_URI"], _handle_srcuri)
+        lines_before[:] = [line.rstrip('\n') for line in newlines]
+
+        # In order to generate correct licence checksums in the recipe the
+        # dependencies have to be fetched again using the npmsw url
+        bb.note("Fetching npm dependencies ...")
+        bb.utils.remove(os.path.join(srctree, "node_modules"), recurse=True)
+        fetcher = bb.fetch2.Fetch([url_local], d)
+        fetcher.download()
+        fetcher.unpack(srctree)
+
+        bb.note("Handling licences ...")
+        (licfiles, packages) = self._handle_licenses(srctree, shrinkwrap_file, dev)
+        extravalues["LIC_FILES_CHKSUM"] = licfiles
+        split_pkg_licenses(guess_license(srctree, d), packages, lines_after, [])
+
+        classes.append("npm")
+        handled.append("buildsystem")
+
         return True
 
-
 def register_recipe_handlers(handlers):
+    """Register the npm handler"""
     handlers.append((NpmRecipeHandler(), 60))
diff --git a/poky/scripts/lib/resulttool/report.py b/poky/scripts/lib/resulttool/report.py
index 692dd7a..7ceceac 100644
--- a/poky/scripts/lib/resulttool/report.py
+++ b/poky/scripts/lib/resulttool/report.py
@@ -212,7 +212,21 @@
                                  maxlen=maxlen)
         print(output)
 
-    def view_test_report(self, logger, source_dir, branch, commit, tag, use_regression_map, raw_test):
+    def view_test_report(self, logger, source_dir, branch, commit, tag, use_regression_map, raw_test, selected_test_case_only):
+        def print_selected_testcase_result(testresults, selected_test_case_only):
+            for testsuite in testresults:
+                for resultid in testresults[testsuite]:
+                    result = testresults[testsuite][resultid]['result']
+                    test_case_result = result.get(selected_test_case_only, {})
+                    if test_case_result.get('status'):
+                        print('Found selected test case result for %s from %s' % (selected_test_case_only,
+                                                                                           resultid))
+                        print(test_case_result['status'])
+                    else:
+                        print('Could not find selected test case result for %s from %s' % (selected_test_case_only,
+                                                                                           resultid))
+                    if test_case_result.get('log'):
+                        print(test_case_result['log'])
         test_count_reports = []
         configmap = resultutils.store_map
         if use_regression_map:
@@ -235,12 +249,18 @@
             for testsuite in testresults:
                 result = testresults[testsuite].get(raw_test, {})
                 if result:
-                    raw_results[testsuite] = result
+                    raw_results[testsuite] = {raw_test: result}
             if raw_results:
-                print(json.dumps(raw_results, sort_keys=True, indent=4))
+                if selected_test_case_only:
+                    print_selected_testcase_result(raw_results, selected_test_case_only)
+                else:
+                    print(json.dumps(raw_results, sort_keys=True, indent=4))
             else:
                 print('Could not find raw test result for %s' % raw_test)
             return 0
+        if selected_test_case_only:
+            print_selected_testcase_result(testresults, selected_test_case_only)
+            return 0
         for testsuite in testresults:
             for resultid in testresults[testsuite]:
                 skip = False
@@ -268,7 +288,7 @@
 def report(args, logger):
     report = ResultsTextReport()
     report.view_test_report(logger, args.source_dir, args.branch, args.commit, args.tag, args.use_regression_map,
-                            args.raw_test_only)
+                            args.raw_test_only, args.selected_test_case_only)
     return 0
 
 def register_commands(subparsers):
@@ -287,4 +307,7 @@
                               help='instead of the default "store_map", use the "regression_map" for report')
     parser_build.add_argument('-r', '--raw_test_only', default='',
                               help='output raw test result only for the user provided test result id')
-
+    parser_build.add_argument('-s', '--selected_test_case_only', default='',
+                              help='output selected test case result for the user provided test case id, if both test '
+                                   'result id and test case id are provided then output the selected test case result '
+                                   'from the provided test result id')
diff --git a/poky/scripts/lib/resulttool/resultutils.py b/poky/scripts/lib/resulttool/resultutils.py
index f0ae8ec..5fec01f 100644
--- a/poky/scripts/lib/resulttool/resultutils.py
+++ b/poky/scripts/lib/resulttool/resultutils.py
@@ -127,10 +127,7 @@
             data = logdata.get("compressed")
             data = base64.b64decode(data.encode("utf-8"))
             data = zlib.decompress(data)
-            try:
-                return data.decode("utf-8")
-            except UnicodeDecodeError:
-                return data
+            return data.decode("utf-8", errors='ignore')
     return None
 
 def ptestresult_get_log(results, section):
diff --git a/poky/scripts/lib/scriptutils.py b/poky/scripts/lib/scriptutils.py
index 45bdaf5..f92255d 100644
--- a/poky/scripts/lib/scriptutils.py
+++ b/poky/scripts/lib/scriptutils.py
@@ -77,7 +77,6 @@
 
 
 def load_plugins(logger, plugins, pluginpath):
-    import imp
 
     def load_plugin(name):
         logger.debug('Loading plugin %s' % name)
diff --git a/poky/scripts/lib/wic/canned-wks/qemux86-directdisk.wks b/poky/scripts/lib/wic/canned-wks/qemux86-directdisk.wks
index c8d9f12..22b4521 100644
--- a/poky/scripts/lib/wic/canned-wks/qemux86-directdisk.wks
+++ b/poky/scripts/lib/wic/canned-wks/qemux86-directdisk.wks
@@ -4,5 +4,5 @@
 
 include common.wks.inc
 
-bootloader  --timeout=0  --append="vga=0 rw oprofile.timer=1 rootfstype=ext4 "
+bootloader  --timeout=0  --append="rw oprofile.timer=1 rootfstype=ext4 "
 
diff --git a/poky/scripts/lib/wic/engine.py b/poky/scripts/lib/wic/engine.py
index 7e66207..018815b 100644
--- a/poky/scripts/lib/wic/engine.py
+++ b/poky/scripts/lib/wic/engine.py
@@ -280,7 +280,7 @@
     def __getattr__(self, name):
         """Get path to the executable in a lazy way."""
         if name in ("mdir", "mcopy", "mdel", "mdeltree", "sfdisk", "e2fsck",
-                    "resize2fs", "mkswap", "mkdosfs", "debugfs"):
+                    "resize2fs", "mkswap", "mkdosfs", "debugfs","blkid"):
             aname = "_%s" % name
             if aname not in self.__dict__:
                 setattr(self, aname, find_executable(name, self.paths))
@@ -291,7 +291,7 @@
 
     def _get_part_image(self, pnum):
         if pnum not in self.partitions:
-            raise WicError("Partition %s is not in the image")
+            raise WicError("Partition %s is not in the image" % pnum)
         part = self.partitions[pnum]
         # check if fstype is supported
         for fstype in self.fstypes:
@@ -314,6 +314,9 @@
                     seek=self.partitions[pnum].start)
 
     def dir(self, pnum, path):
+        if pnum not in self.partitions:
+            raise WicError("Partition %s is not in the image" % pnum)
+
         if self.partitions[pnum].fstype.startswith('ext'):
             return exec_cmd("{} {} -R 'ls -l {}'".format(self.debugfs,
                                                          self._get_part_image(pnum),
@@ -323,16 +326,31 @@
                                                    self._get_part_image(pnum),
                                                    path))
 
-    def copy(self, src, pnum, path):
+    def copy(self, src, dest):
         """Copy partition image into wic image."""
+        pnum =  dest.part if isinstance(src, str) else src.part
+
         if self.partitions[pnum].fstype.startswith('ext'):
-            cmd = "printf 'cd {}\nwrite {} {}\n' | {} -w {}".\
-                      format(path, src, os.path.basename(src),
+            if isinstance(src, str):
+                cmd = "printf 'cd {}\nwrite {} {}\n' | {} -w {}".\
+                      format(os.path.dirname(dest.path), src, os.path.basename(src),
                              self.debugfs, self._get_part_image(pnum))
+            else: # copy from wic
+                # run both dump and rdump to support both files and directory
+                cmd = "printf 'cd {}\ndump /{} {}\nrdump /{} {}\n' | {} {}".\
+                      format(os.path.dirname(src.path), src.path,
+                             dest, src.path, dest, self.debugfs,
+                             self._get_part_image(pnum))
         else: # fat
-            cmd = "{} -i {} -snop {} ::{}".format(self.mcopy,
+            if isinstance(src, str):
+                cmd = "{} -i {} -snop {} ::{}".format(self.mcopy,
                                                   self._get_part_image(pnum),
-                                                  src, path)
+                                                  src, dest.path)
+            else:
+                cmd = "{} -i {} -snop ::{} {}".format(self.mcopy,
+                                                  self._get_part_image(pnum),
+                                                  src.path, dest)
+
         exec_cmd(cmd, as_shell=True)
         self._put_part_image(pnum)
 
@@ -424,7 +442,7 @@
             outf.flush()
 
         def read_ptable(path):
-            out = exec_cmd("{} -dJ {}".format(self.sfdisk, path))
+            out = exec_cmd("{} -J {}".format(self.sfdisk, path))
             return json.loads(out)
 
         def write_ptable(parts, target):
@@ -525,7 +543,8 @@
                         logger.info("creating swap partition {}".format(pnum))
                         label = part.get("name")
                         label_str = "-L {}".format(label) if label else ''
-                        uuid = part.get("uuid")
+                        out = exec_cmd("{} --probe {}".format(self.blkid, self._get_part_image(pnum)))
+                        uuid = out[out.index("UUID=\"")+6:out.index("UUID=\"")+42]
                         uuid_str = "-U {}".format(uuid) if uuid else ''
                         with open(partfname, 'w') as sparse:
                             os.ftruncate(sparse.fileno(), part['size'] * self._lsector_size)
@@ -551,11 +570,15 @@
 
 def wic_cp(args, native_sysroot):
     """
-    Copy local file or directory to the vfat partition of
+    Copy file or directory to/from the vfat/ext partition of
     partitioned image.
     """
-    disk = Disk(args.dest.image, native_sysroot)
-    disk.copy(args.src, args.dest.part, args.dest.path)
+    if isinstance(args.dest, str):
+        disk = Disk(args.src.image, native_sysroot)
+    else:
+        disk = Disk(args.dest.image, native_sysroot)
+    disk.copy(args.src, args.dest)
+
 
 def wic_rm(args, native_sysroot):
     """
diff --git a/poky/scripts/lib/wic/help.py b/poky/scripts/lib/wic/help.py
index 812ebe3..bd3a2b9 100644
--- a/poky/scripts/lib/wic/help.py
+++ b/poky/scripts/lib/wic/help.py
@@ -341,12 +341,15 @@
 
 wic_cp_usage = """
 
- Copy files and directories to the vfat or ext* partition
+ Copy files and directories to/from the vfat or ext* partition
 
- usage: wic cp <src> <image>:<partition>[<path>] [--native-sysroot <path>]
+ usage: wic cp <src> <dest> [--native-sysroot <path>]
 
- This command  copies local files or directories to the vfat or ext* partitions
-of partitioned  image.
+ source/destination image in format <image>:<partition>[<path>]
+
+ This command copies files or directories either
+  - from local to vfat or ext* partitions of partitioned image
+  - from vfat or ext* partitions of partitioned image to local
 
  See 'wic help cp' for more detailed instructions.
 
@@ -355,16 +358,18 @@
 wic_cp_help = """
 
 NAME
-    wic cp - copy files and directories to the vfat or ext* partitions
+    wic cp - copy files and directories to/from the vfat or ext* partitions
 
 SYNOPSIS
-    wic cp <src> <image>:<partition>
-    wic cp <src> <image>:<partition><path>
-    wic cp <src> <image>:<partition><path> --native-sysroot <path>
+    wic cp <src> <dest>:<partition>
+    wic cp <src>:<partition> <dest>
+    wic cp <src> <dest-image>:<partition><path>
+    wic cp <src> <dest-image>:<partition><path> --native-sysroot <path>
 
 DESCRIPTION
-    This command copies files and directories to the vfat or ext* partition of
-    the partitioned image.
+    This command copies files or directories either
+      - from local to vfat or ext* partitions of partitioned image
+      - from vfat or ext* partitions of partitioned image to local
 
     The first form of it copies file or directory to the root directory of
     the partition:
@@ -397,6 +402,10 @@
                4 files                   0 bytes
                                 15 675 392 bytes free
 
+    The third form of the command copies file or directory from the specified directory
+    on the partition to local:
+       $ wic cp tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.wic:1/vmlinuz test
+
     The -n option is used to specify the path to the native sysroot
     containing the tools(parted and mtools) to use.
 """
@@ -527,7 +536,8 @@
 
     Source plugins can also be implemented and added by external
     layers - any plugins found in a scripts/lib/wic/plugins/source/
-    directory in an external layer will also be made available.
+    or lib/wic/plugins/source/ directory in an external layer will
+    also be made available.
 
     When the wic implementation needs to invoke a partition-specific
     implementation, it looks for the plugin that has the same name as
@@ -960,6 +970,26 @@
                          is omitted, not the directory itself. This option only
                          has an effect with the rootfs source plugin.
 
+         --include-path: This option is specific to wic. It adds the contents
+                         of the given path or a rootfs to the resulting image.
+                         The option contains two fields, the origin and the
+                         destination. When the origin is a rootfs, it follows
+                         the same logic as the rootfs-dir argument and the
+                         permissions and owners are kept. When the origin is a
+                         path, it is relative to the directory in which wic is
+                         running not the rootfs itself so use of an absolute
+                         path is recommended, and the owner and group is set to
+                         root:root. If no destination is given it is
+                         automatically set to the root of the rootfs. This
+                         option only has an effect with the rootfs source
+                         plugin.
+
+         --change-directory: This option is specific to wic. It changes to the
+                             given directory before copying the files. This
+                             option is useful when we want to split a rootfs in
+                             multiple partitions and we want to keep the right
+                             permissions and usernames in all the partitions.
+
          --extra-space: This option is specific to wic. It adds extra
                         space after the space filled by the content
                         of the partition. The final size can go
diff --git a/poky/scripts/lib/wic/ksparser.py b/poky/scripts/lib/wic/ksparser.py
index 6a643ba..b8befe7 100644
--- a/poky/scripts/lib/wic/ksparser.py
+++ b/poky/scripts/lib/wic/ksparser.py
@@ -137,6 +137,8 @@
         part.add_argument('--active', action='store_true')
         part.add_argument('--align', type=int)
         part.add_argument('--exclude-path', nargs='+')
+        part.add_argument('--include-path', nargs='+', action='append')
+        part.add_argument('--change-directory')
         part.add_argument("--extra-space", type=sizetype)
         part.add_argument('--fsoptions', dest='fsopts')
         part.add_argument('--fstype', default='vfat',
@@ -245,6 +247,11 @@
                     elif line.startswith('bootloader'):
                         if not self.bootloader:
                             self.bootloader = parsed
+                            # Concatenate the strings set in APPEND
+                            append_var = get_bitbake_var("APPEND")
+                            if append_var:
+                                self.bootloader.append = ' '.join(filter(None, \
+                                                         (self.bootloader.append, append_var)))
                         else:
                             err = "%s:%d: more than one bootloader specified" \
                                       % (confpath, lineno)
diff --git a/poky/scripts/lib/wic/misc.py b/poky/scripts/lib/wic/misc.py
index 1f199b9..91975ba 100644
--- a/poky/scripts/lib/wic/misc.py
+++ b/poky/scripts/lib/wic/misc.py
@@ -45,7 +45,8 @@
                   "parted": "parted",
                   "sfdisk": "util-linux",
                   "sgdisk": "gptfdisk",
-                  "syslinux": "syslinux"
+                  "syslinux": "syslinux",
+                  "tar": "tar"
                  }
 
 def runtool(cmdln_or_args):
@@ -112,6 +113,15 @@
     """
     return _exec_cmd(cmd_and_args, as_shell)[1]
 
+def find_executable(cmd, paths):
+    recipe = cmd
+    if recipe in NATIVE_RECIPES:
+        recipe =  NATIVE_RECIPES[recipe]
+    provided = get_bitbake_var("ASSUME_PROVIDED")
+    if provided and "%s-native" % recipe in provided:
+        return True
+
+    return spawn.find_executable(cmd, paths)
 
 def exec_native_cmd(cmd_and_args, native_sysroot, pseudo=""):
     """
@@ -136,7 +146,7 @@
     logger.debug("exec_native_cmd: %s", native_cmd_and_args)
 
     # If the command isn't in the native sysroot say we failed.
-    if spawn.find_executable(args[0], native_paths):
+    if find_executable(args[0], native_paths):
         ret, out = _exec_cmd(native_cmd_and_args, True)
     else:
         ret = 127
diff --git a/poky/scripts/lib/wic/partition.py b/poky/scripts/lib/wic/partition.py
index d809408..7d9dd61 100644
--- a/poky/scripts/lib/wic/partition.py
+++ b/poky/scripts/lib/wic/partition.py
@@ -30,6 +30,8 @@
         self.device = None
         self.extra_space = args.extra_space
         self.exclude_path = args.exclude_path
+        self.include_path = args.include_path
+        self.change_directory = args.change_directory
         self.fsopts = args.fsopts
         self.fstype = args.fstype
         self.label = args.label
@@ -189,7 +191,7 @@
                            (self.mountpoint, self.size, self.fixed_size))
 
     def prepare_rootfs(self, cr_workdir, oe_builddir, rootfs_dir,
-                       native_sysroot, real_rootfs = True):
+                       native_sysroot, real_rootfs = True, pseudo_dir = None):
         """
         Prepare content for a rootfs partition i.e. create a partition
         and fill it from a /rootfs dir.
@@ -197,15 +199,14 @@
         Currently handles ext2/3/4, btrfs, vfat and squashfs.
         """
         p_prefix = os.environ.get("PSEUDO_PREFIX", "%s/usr" % native_sysroot)
-        p_localstatedir = os.environ.get("PSEUDO_LOCALSTATEDIR",
-                                         "%s/../pseudo" %  rootfs_dir)
-        p_passwd = os.environ.get("PSEUDO_PASSWD", rootfs_dir)
-        p_nosymlinkexp = os.environ.get("PSEUDO_NOSYMLINKEXP", "1")
-        pseudo = "export PSEUDO_PREFIX=%s;" % p_prefix
-        pseudo += "export PSEUDO_LOCALSTATEDIR=%s;" % p_localstatedir
-        pseudo += "export PSEUDO_PASSWD=%s;" % p_passwd
-        pseudo += "export PSEUDO_NOSYMLINKEXP=%s;" % p_nosymlinkexp
-        pseudo += "%s " % get_bitbake_var("FAKEROOTCMD")
+        if (pseudo_dir):
+            pseudo = "export PSEUDO_PREFIX=%s;" % p_prefix
+            pseudo += "export PSEUDO_LOCALSTATEDIR=%s;" % pseudo_dir
+            pseudo += "export PSEUDO_PASSWD=%s;" % rootfs_dir
+            pseudo += "export PSEUDO_NOSYMLINKEXP=1;"
+            pseudo += "%s " % get_bitbake_var("FAKEROOTCMD")
+        else:
+            pseudo = None
 
         rootfs = "%s/rootfs_%s.%s.%s" % (cr_workdir, self.label,
                                          self.lineno, self.fstype)
diff --git a/poky/scripts/lib/wic/pluginbase.py b/poky/scripts/lib/wic/pluginbase.py
index f74d643..d9b4e57 100644
--- a/poky/scripts/lib/wic/pluginbase.py
+++ b/poky/scripts/lib/wic/pluginbase.py
@@ -18,7 +18,7 @@
 
 PLUGIN_TYPES = ["imager", "source"]
 
-SCRIPTS_PLUGIN_DIR = "scripts/lib/wic/plugins"
+SCRIPTS_PLUGIN_DIR = ["scripts/lib/wic/plugins", "lib/wic/plugins"]
 
 logger = logging.getLogger('wic')
 
@@ -38,10 +38,11 @@
             cls._plugin_dirs = [os.path.join(os.path.dirname(__file__), 'plugins')]
             layers = get_bitbake_var("BBLAYERS") or ''
             for layer_path in layers.split():
-                path = os.path.join(layer_path, SCRIPTS_PLUGIN_DIR)
-                path = os.path.abspath(os.path.expanduser(path))
-                if path not in cls._plugin_dirs and os.path.isdir(path):
-                    cls._plugin_dirs.insert(0, path)
+                for script_plugin_dir in SCRIPTS_PLUGIN_DIR:
+                    path = os.path.join(layer_path, script_plugin_dir)
+                    path = os.path.abspath(os.path.expanduser(path))
+                    if path not in cls._plugin_dirs and os.path.isdir(path):
+                        cls._plugin_dirs.insert(0, path)
 
         if ptype not in PLUGINS:
             # load all ptype plugins
diff --git a/poky/scripts/lib/wic/plugins/imager/direct.py b/poky/scripts/lib/wic/plugins/imager/direct.py
index 2441cc3..2d06c24 100644
--- a/poky/scripts/lib/wic/plugins/imager/direct.py
+++ b/poky/scripts/lib/wic/plugins/imager/direct.py
@@ -403,7 +403,7 @@
                 # Reserve a sector for EBR for every logical partition
                 # before alignment is performed.
                 if part.type == 'logical':
-                    self.offset += 1
+                    self.offset += 2
 
             align_sectors = 0
             if part.align:
@@ -446,7 +446,7 @@
                         self.extendedpart = part.num
                     else:
                         self.extended_size_sec += align_sectors
-                    self.extended_size_sec += part.size_sec + 1
+                    self.extended_size_sec += part.size_sec + 2
                 else:
                     self.primary_part_num += 1
                     part.num = self.primary_part_num
@@ -512,7 +512,7 @@
                 # add a sector at the back, so that there is enough
                 # room for all logical partitions.
                 self._create_partition(self.path, "extended",
-                                       None, part.start - 1,
+                                       None, part.start - 2,
                                        self.extended_size_sec)
 
             if part.fstype == "swap":
@@ -580,9 +580,7 @@
                                 self.native_sysroot)
 
     def cleanup(self):
-        # remove partition images
-        for image in set(self.partimages):
-            os.remove(image)
+        pass
 
     def assemble(self):
         logger.debug("Installing partitions")
diff --git a/poky/scripts/lib/wic/plugins/source/rawcopy.py b/poky/scripts/lib/wic/plugins/source/rawcopy.py
index 82970ce..3c4997d 100644
--- a/poky/scripts/lib/wic/plugins/source/rawcopy.py
+++ b/poky/scripts/lib/wic/plugins/source/rawcopy.py
@@ -57,7 +57,7 @@
             raise WicError("No file specified")
 
         src = os.path.join(kernel_dir, source_params['file'])
-        dst = os.path.join(cr_workdir, "%s.%s" % (source_params['file'], part.lineno))
+        dst = os.path.join(cr_workdir, "%s.%s" % (os.path.basename(source_params['file']), part.lineno))
 
         if not os.path.exists(os.path.dirname(dst)):
             os.makedirs(os.path.dirname(dst))
diff --git a/poky/scripts/lib/wic/plugins/source/rootfs.py b/poky/scripts/lib/wic/plugins/source/rootfs.py
index e26e95b..f1db83f 100644
--- a/poky/scripts/lib/wic/plugins/source/rootfs.py
+++ b/poky/scripts/lib/wic/plugins/source/rootfs.py
@@ -17,10 +17,11 @@
 import sys
 
 from oe.path import copyhardlinktree
+from pathlib import Path
 
 from wic import WicError
 from wic.pluginbase import SourcePlugin
-from wic.misc import get_bitbake_var
+from wic.misc import get_bitbake_var, exec_native_cmd
 
 logger = logging.getLogger('wic')
 
@@ -32,6 +33,22 @@
     name = 'rootfs'
 
     @staticmethod
+    def __validate_path(cmd, rootfs_dir, path):
+        if os.path.isabs(path):
+            logger.error("%s: Must be relative: %s" % (cmd, orig_path))
+            sys.exit(1)
+
+        # Disallow climbing outside of parent directory using '..',
+        # because doing so could be quite disastrous (we will delete the
+        # directory, or modify a directory outside OpenEmbedded).
+        full_path = os.path.realpath(os.path.join(rootfs_dir, path))
+        if not full_path.startswith(os.path.realpath(rootfs_dir)):
+            logger.error("%s: Must point inside the rootfs:" % (cmd, path))
+            sys.exit(1)
+
+        return full_path
+
+    @staticmethod
     def __get_rootfs_dir(rootfs_dir):
         if os.path.isdir(rootfs_dir):
             return os.path.realpath(rootfs_dir)
@@ -44,6 +61,15 @@
 
         return os.path.realpath(image_rootfs_dir)
 
+    @staticmethod
+    def __get_pseudo(native_sysroot, rootfs, pseudo_dir):
+        pseudo = "export PSEUDO_PREFIX=%s/usr;" % native_sysroot
+        pseudo += "export PSEUDO_LOCALSTATEDIR=%s;" % pseudo_dir
+        pseudo += "export PSEUDO_PASSWD=%s;" % rootfs
+        pseudo += "export PSEUDO_NOSYMLINKEXP=1;"
+        pseudo += "%s " % get_bitbake_var("FAKEROOTCMD")
+        return pseudo
+
     @classmethod
     def do_prepare_partition(cls, part, source_params, cr, cr_workdir,
                              oe_builddir, bootimg_dir, kernel_dir,
@@ -68,10 +94,16 @@
                                "it is not a valid path, exiting" % part.rootfs_dir)
 
         part.rootfs_dir = cls.__get_rootfs_dir(rootfs_dir)
+        pseudo_dir = os.path.join(part.rootfs_dir, "../pseudo")
+        if not os.path.lexists(pseudo_dir):
+            logger.warn("%s folder does not exist. "
+                        "Usernames and permissions will be invalid " % pseudo_dir)
+            pseudo_dir = None
 
         new_rootfs = None
+        new_pseudo = None
         # Handle excluded paths.
-        if part.exclude_path is not None:
+        if part.exclude_path or part.include_path or part.change_directory:
             # We need a new rootfs directory we can delete files from. Copy to
             # workdir.
             new_rootfs = os.path.realpath(os.path.join(cr_workdir, "rootfs%d" % part.lineno))
@@ -79,22 +111,93 @@
             if os.path.lexists(new_rootfs):
                 shutil.rmtree(os.path.join(new_rootfs))
 
-            copyhardlinktree(part.rootfs_dir, new_rootfs)
+            if part.change_directory:
+                cd = part.change_directory
+                if cd[-1] == '/':
+                    cd = cd[:-1]
+                orig_dir = cls.__validate_path("--change-directory", part.rootfs_dir, cd)
+            else:
+                orig_dir = part.rootfs_dir
+            copyhardlinktree(orig_dir, new_rootfs)
 
-            for orig_path in part.exclude_path:
+            # Convert the pseudo directory to its new location
+            if (pseudo_dir):
+                new_pseudo = os.path.realpath(
+                             os.path.join(cr_workdir, "pseudo%d" % part.lineno))
+                if os.path.lexists(new_pseudo):
+                    shutil.rmtree(new_pseudo)
+                os.mkdir(new_pseudo)
+                shutil.copy(os.path.join(pseudo_dir, "files.db"),
+                            os.path.join(new_pseudo, "files.db"))
+
+                pseudo_cmd = "%s -B -m %s -M %s" % (cls.__get_pseudo(native_sysroot,
+                                                                     new_rootfs,
+                                                                     new_pseudo),
+                                                    orig_dir, new_rootfs)
+                exec_native_cmd(pseudo_cmd, native_sysroot)
+
+            for in_path in part.include_path or []:
+                #parse arguments
+                include_path = in_path[0]
+                if len(in_path) > 2:
+                    logger.error("'Invalid number of arguments for include-path")
+                    sys.exit(1)
+                if len(in_path) == 2:
+                    path = in_path[1]
+                else:
+                    path = None
+
+                # Pack files to be included into a tar file.
+                # We need to create a tar file, because that way we can keep the
+                # permissions from the files even when they belong to different
+                # pseudo enviroments.
+                # If we simply copy files using copyhardlinktree/copytree... the
+                # copied files will belong to the user running wic.
+                tar_file = os.path.realpath(
+                           os.path.join(cr_workdir, "include-path%d.tar" % part.lineno))
+                if os.path.isfile(include_path):
+                    parent = os.path.dirname(os.path.realpath(include_path))
+                    tar_cmd = "tar c --owner=root --group=root -f %s -C %s %s" % (
+                                tar_file, parent, os.path.relpath(include_path, parent))
+                    exec_native_cmd(tar_cmd, native_sysroot)
+                else:
+                    if include_path in krootfs_dir:
+                        include_path = krootfs_dir[include_path]
+                    include_path = cls.__get_rootfs_dir(include_path)
+                    include_pseudo = os.path.join(include_path, "../pseudo")
+                    if os.path.lexists(include_pseudo):
+                        pseudo = cls.__get_pseudo(native_sysroot, include_path,
+                                                  include_pseudo)
+                        tar_cmd = "tar cf %s -C %s ." % (tar_file, include_path)
+                    else:
+                        pseudo = None
+                        tar_cmd = "tar c --owner=root --group=root -f %s -C %s ." % (
+                                tar_file, include_path)
+                    exec_native_cmd(tar_cmd, native_sysroot, pseudo)
+
+                #create destination
+                if path:
+                    destination = cls.__validate_path("--include-path", new_rootfs, path)
+                    Path(destination).mkdir(parents=True, exist_ok=True)
+                else:
+                    destination = new_rootfs
+
+                #extract destination
+                untar_cmd = "tar xf %s -C %s" % (tar_file, destination)
+                if new_pseudo:
+                    pseudo = cls.__get_pseudo(native_sysroot, new_rootfs, new_pseudo)
+                else:
+                    pseudo = None
+                exec_native_cmd(untar_cmd, native_sysroot, pseudo)
+                os.remove(tar_file)
+
+            for orig_path in part.exclude_path or []:
                 path = orig_path
-                if os.path.isabs(path):
-                    logger.error("Must be relative: --exclude-path=%s" % orig_path)
-                    sys.exit(1)
 
-                full_path = os.path.realpath(os.path.join(new_rootfs, path))
+                full_path = cls.__validate_path("--exclude-path", new_rootfs, path)
 
-                # Disallow climbing outside of parent directory using '..',
-                # because doing so could be quite disastrous (we will delete the
-                # directory).
-                if not full_path.startswith(new_rootfs):
-                    logger.error("'%s' points to a path outside the rootfs" % orig_path)
-                    sys.exit(1)
+                if not os.path.lexists(full_path):
+                    continue
 
                 if path.endswith(os.sep):
                     # Delete content only.
@@ -109,4 +212,5 @@
                     shutil.rmtree(full_path)
 
         part.prepare_rootfs(cr_workdir, oe_builddir,
-                            new_rootfs or part.rootfs_dir, native_sysroot)
+                            new_rootfs or part.rootfs_dir, native_sysroot,
+                            pseudo_dir = new_pseudo or pseudo_dir)
diff --git a/poky/scripts/oe-build-perf-report b/poky/scripts/oe-build-perf-report
index 21bde7e..e781f4f 100755
--- a/poky/scripts/oe-build-perf-report
+++ b/poky/scripts/oe-build-perf-report
@@ -372,7 +372,7 @@
                                chart_opts=chart_opts))
 
 
-def get_buildstats(repo, notes_ref, revs, outdir=None):
+def get_buildstats(repo, notes_ref, notes_ref2, revs, outdir=None):
     """Get the buildstats from git notes"""
     full_ref = 'refs/notes/' + notes_ref
     if not repo.rev_parse(full_ref):
@@ -391,8 +391,13 @@
         for tag in rev.tags:
             log.debug('    %s', tag)
             try:
-                bs_all = json.loads(repo.run_cmd(['notes', '--ref', notes_ref,
-                                                  'show', tag + '^0']))
+                try:
+                    bs_all = json.loads(repo.run_cmd(['notes', '--ref', notes_ref, 'show', tag + '^0']))
+                except GitError:
+                    if notes_ref2:
+                        bs_all = json.loads(repo.run_cmd(['notes', '--ref', notes_ref2, 'show', tag + '^0']))
+                    else:
+                        raise
             except GitError:
                 log.warning("Buildstats not found for %s", tag)
                 bs_all = {}
@@ -589,9 +594,12 @@
     buildstats = None
     if args.dump_buildstats or args.html:
         outdir = 'oe-build-perf-buildstats' if args.dump_buildstats else None
-        notes_ref = 'buildstats/{}/{}/{}'.format(args.hostname, args.branch,
-                                                 args.machine)
-        buildstats = get_buildstats(repo, notes_ref, [rev_l, rev_r], outdir)
+        notes_ref = 'buildstats/{}/{}/{}'.format(args.hostname, args.branch, args.machine)
+        notes_ref2 = None
+        if args.branch2:
+            notes_ref = 'buildstats/{}/{}/{}'.format(args.hostname, args.branch2, args.machine)
+            notes_ref2 = 'buildstats/{}/{}/{}'.format(args.hostname, args.branch, args.machine)
+        buildstats = get_buildstats(repo, notes_ref, notes_ref2, [rev_l, rev_r], outdir)
 
     # Print report
     if not args.html:
diff --git a/poky/scripts/oe-build-perf-test b/poky/scripts/oe-build-perf-test
index 0b13f24..00e00b4 100755
--- a/poky/scripts/oe-build-perf-test
+++ b/poky/scripts/oe-build-perf-test
@@ -1,4 +1,4 @@
-#!/usr/bin/python3
+#!/usr/bin/env python3
 #
 # Build performance test script
 #
diff --git a/poky/scripts/oe-buildenv-internal b/poky/scripts/oe-buildenv-internal
index 96bb0c3..ba0a9b4 100755
--- a/poky/scripts/oe-buildenv-internal
+++ b/poky/scripts/oe-buildenv-internal
@@ -29,22 +29,15 @@
     return 1
 fi
 
-py_v27_check=$(python2 -c 'import sys; print sys.version_info >= (2,7,3)')
-if [ "$py_v27_check" != "True" ]; then
-    echo >&2 "OpenEmbedded requires 'python' to be python v2 (>= 2.7.3), not python v3."
-    echo >&2 "Please upgrade your python v2."
-fi
-unset py_v27_check
-
 # We potentially have code that doesn't parse correctly with older versions 
 # of Python, and rather than fixing that and being eternally vigilant for 
 # any other new feature use, just check the version here.
-py_v34_check=$(python3 -c 'import sys; print(sys.version_info >= (3,4,0))')
-if [ "$py_v34_check" != "True" ]; then
-    echo >&2 "BitBake requires Python 3.4.0 or later as 'python3'"
+py_v35_check=$(python3 -c 'import sys; print(sys.version_info >= (3,5,0))')
+if [ "$py_v35_check" != "True" ]; then
+    echo >&2 "BitBake requires Python 3.5.0 or later as 'python3 (scripts/install-buildtools can be used if needed)'"
     return 1
 fi
-unset py_v34_check
+unset py_v35_check
 
 if [ -z "$BDIR" ]; then
     if [ -z "$1" ]; then
@@ -113,7 +106,8 @@
 HTTPS_PROXY https_proxy FTP_PROXY ftp_proxy FTPS_PROXY ftps_proxy ALL_PROXY \
 all_proxy NO_PROXY no_proxy SSH_AGENT_PID SSH_AUTH_SOCK BB_SRCREV_POLICY \
 SDKMACHINE BB_NUMBER_THREADS BB_NO_NETWORK PARALLEL_MAKE GIT_PROXY_COMMAND \
-SOCKS5_PASSWD SOCKS5_USER SCREENDIR STAMPS_DIR BBPATH_EXTRA BB_SETSCENE_ENFORCE"
+SOCKS5_PASSWD SOCKS5_USER SCREENDIR STAMPS_DIR BBPATH_EXTRA BB_SETSCENE_ENFORCE \
+BB_LOGCONFIG"
 
 BB_ENV_EXTRAWHITE="$(echo $BB_ENV_EXTRAWHITE $BB_ENV_EXTRAWHITE_OE | tr ' ' '\n' | LC_ALL=C sort --unique | tr '\n' ' ')"
 
diff --git a/poky/scripts/oe-pkgdata-browser b/poky/scripts/oe-pkgdata-browser
new file mode 100755
index 0000000..8d22318
--- /dev/null
+++ b/poky/scripts/oe-pkgdata-browser
@@ -0,0 +1,253 @@
+#! /usr/bin/env python3
+
+import os, sys, enum, ast
+
+scripts_path = os.path.dirname(os.path.realpath(__file__))
+lib_path = scripts_path + '/lib'
+sys.path = sys.path + [lib_path]
+
+import scriptpath
+bitbakepath = scriptpath.add_bitbake_lib_path()
+if not bitbakepath:
+    print("Unable to find bitbake by searching parent directory of this script or PATH")
+    sys.exit(1)
+import bb
+
+import gi
+gi.require_version('Gtk', '3.0')
+from gi.repository import Gtk, Gdk, GObject
+
+RecipeColumns = enum.IntEnum("RecipeColumns", {"Recipe": 0})
+PackageColumns = enum.IntEnum("PackageColumns", {"Package": 0, "Size": 1})
+FileColumns = enum.IntEnum("FileColumns", {"Filename": 0, "Size": 1})
+
+import time
+def timeit(f):
+    def timed(*args, **kw):
+        ts = time.time()
+        print ("func:%r calling" % f.__name__)
+        result = f(*args, **kw)
+        te = time.time()
+        print ('func:%r args:[%r, %r] took: %2.4f sec' % \
+          (f.__name__, args, kw, te-ts))
+        return result
+    return timed
+
+def human_size(nbytes):
+    import math
+    suffixes = ['B', 'kB', 'MB', 'GB', 'TB', 'PB']
+    human = nbytes
+    rank = 0
+    if nbytes != 0:
+        rank = int((math.log10(nbytes)) / 3)
+        rank = min(rank, len(suffixes) - 1)
+        human = nbytes / (1000.0 ** rank)
+    f = ('%.2f' % human).rstrip('0').rstrip('.')
+    return '%s %s' % (f, suffixes[rank])
+
+def load(filename, suffix=None):
+    from configparser import ConfigParser
+    from itertools import chain
+
+    parser = ConfigParser()
+    if suffix:
+        parser.optionxform = lambda option: option.replace("_" + suffix, "")
+    with open(filename) as lines:
+        lines = chain(("[fake]",), lines)
+        parser.read_file(lines)
+
+    # TODO extract the data and put it into a real dict so we can transform some
+    # values to ints?
+    return parser["fake"]
+
+def find_pkgdata():
+    import subprocess
+    output = subprocess.check_output(("bitbake", "-e"), universal_newlines=True)
+    for line in output.splitlines():
+        if line.startswith("PKGDATA_DIR="):
+            return line.split("=", 1)[1].strip("\'\"")
+    # TODO exception or something
+    return None
+
+def packages_in_recipe(pkgdata, recipe):
+    """
+    Load the recipe pkgdata to determine the list of runtime packages.
+    """
+    data = load(os.path.join(pkgdata, recipe))
+    packages = data["PACKAGES"].split()
+    return packages
+
+def load_runtime_package(pkgdata, package):
+    return load(os.path.join(pkgdata, "runtime", package), suffix=package)
+
+def recipe_from_package(pkgdata, package):
+    data = load(os.path.join(pkgdata, "runtime", package), suffix=package)
+    return data["PN"]
+
+def summary(data):
+    s = ""
+    s += "{0[PKG]} {0[PKGV]}-{0[PKGR]}\n{0[LICENSE]}\n{0[SUMMARY]}\n".format(data)
+
+    return s
+
+
+class PkgUi():
+    def __init__(self, pkgdata):
+        self.pkgdata = pkgdata
+        self.current_recipe = None
+        self.recipe_iters = {}
+        self.package_iters = {}
+
+        builder = Gtk.Builder()
+        builder.add_from_file(os.path.join(os.path.dirname(__file__), "oe-pkgdata-browser.glade"))
+
+        self.window = builder.get_object("window")
+        self.window.connect("delete-event", Gtk.main_quit)
+
+        self.recipe_store = builder.get_object("recipe_store")
+        self.recipe_view = builder.get_object("recipe_view")
+        self.package_store = builder.get_object("package_store")
+        self.package_view = builder.get_object("package_view")
+
+        # Somehow resizable does not get set via builder xml
+        package_name_column = builder.get_object("package_name_column")
+        package_name_column.set_resizable(True)
+        file_name_column = builder.get_object("file_name_column")
+        file_name_column.set_resizable(True)
+
+        self.recipe_view.get_selection().connect("changed", self.on_recipe_changed)
+        self.package_view.get_selection().connect("changed", self.on_package_changed)
+
+        self.package_store.set_sort_column_id(PackageColumns.Package, Gtk.SortType.ASCENDING)
+        builder.get_object("package_size_column").set_cell_data_func(builder.get_object("package_size_cell"), lambda column, cell, model, iter, data: cell.set_property("text", human_size(model[iter][PackageColumns.Size])))
+
+        self.label = builder.get_object("label1")
+        self.depends_label = builder.get_object("depends_label")
+        self.recommends_label = builder.get_object("recommends_label")
+        self.suggests_label = builder.get_object("suggests_label")
+        self.provides_label = builder.get_object("provides_label")
+
+        self.depends_label.connect("activate-link", self.on_link_activate)
+        self.recommends_label.connect("activate-link", self.on_link_activate)
+        self.suggests_label.connect("activate-link", self.on_link_activate)
+
+        self.file_store = builder.get_object("file_store")
+        self.file_store.set_sort_column_id(FileColumns.Filename, Gtk.SortType.ASCENDING)
+        builder.get_object("file_size_column").set_cell_data_func(builder.get_object("file_size_cell"), lambda column, cell, model, iter, data: cell.set_property("text", human_size(model[iter][FileColumns.Size])))
+
+        self.files_view = builder.get_object("files_scrollview")
+        self.files_label = builder.get_object("files_label")
+
+        self.load_recipes()
+
+        self.recipe_view.set_cursor(Gtk.TreePath.new_first())
+
+        self.window.show()
+
+    def on_link_activate(self, label, url_string):
+        from urllib.parse import urlparse
+        url = urlparse(url_string)
+        if url.scheme == "package":
+            package = url.path
+            recipe = recipe_from_package(self.pkgdata, package)
+
+            it = self.recipe_iters[recipe]
+            path = self.recipe_store.get_path(it)
+            self.recipe_view.set_cursor(path)
+            self.recipe_view.scroll_to_cell(path)
+
+            self.on_recipe_changed(self.recipe_view.get_selection())
+
+            it = self.package_iters[package]
+            path = self.package_store.get_path(it)
+            self.package_view.set_cursor(path)
+            self.package_view.scroll_to_cell(path)
+
+            return True
+        else:
+            return False
+
+    def on_recipe_changed(self, selection):
+        self.package_store.clear()
+        self.package_iters = {}
+
+        (model, it) = selection.get_selected()
+        if not it:
+            return
+
+        recipe = model[it][RecipeColumns.Recipe]
+        packages = packages_in_recipe(self.pkgdata, recipe)
+        for package in packages:
+            # TODO also show PKG after debian-renaming?
+            data = load_runtime_package(self.pkgdata, package)
+            # TODO stash data to avoid reading in on_package_changed
+            self.package_iters[package] = self.package_store.append([package, int(data["PKGSIZE"])])
+
+        package = recipe if recipe in packages else sorted(packages)[0]
+        path = self.package_store.get_path(self.package_iters[package])
+        self.package_view.set_cursor(path)
+        self.package_view.scroll_to_cell(path)
+
+    def on_package_changed(self, selection):
+        self.label.set_text("")
+        self.file_store.clear()
+        self.depends_label.hide()
+        self.recommends_label.hide()
+        self.suggests_label.hide()
+        self.provides_label.hide()
+        self.files_view.hide()
+        self.files_label.hide()
+
+        (model, it) = selection.get_selected()
+        if it is None:
+            return
+
+        package = model[it][PackageColumns.Package]
+        data = load_runtime_package(self.pkgdata, package)
+
+        self.label.set_text(summary(data))
+
+        files = ast.literal_eval(data["FILES_INFO"])
+        if files:
+            self.files_label.set_text("{0} files take {1}.".format(len(files), human_size(int(data["PKGSIZE"]))))
+            self.files_view.show()
+            for filename, size in files.items():
+                self.file_store.append([filename, size])
+        else:
+            self.files_view.hide()
+            self.files_label.set_text("This package has no files.")
+        self.files_label.show()
+
+        def update_deps(field, prefix, label, clickable=True):
+            if field in data:
+                l = []
+                for name, version in bb.utils.explode_dep_versions2(data[field]).items():
+                    if clickable:
+                        l.append("<a href='package:{0}'>{0}</a> {1}".format(name, " ".join(version)).strip())
+                    else:
+                        l.append("{0} {1}".format(name, " ".join(version)).strip())
+                label.set_markup(prefix + ", ".join(l))
+                label.show()
+            else:
+                label.hide()
+        update_deps("RDEPENDS", "Depends: ", self.depends_label)
+        update_deps("RRECOMMENDS", "Recommends: ", self.recommends_label)
+        update_deps("RSUGGESTS", "Suggests: ", self.suggests_label)
+        update_deps("RPROVIDES", "Provides: ", self.provides_label, clickable=False)
+
+    def load_recipes(self):
+        for recipe in sorted(os.listdir(pkgdata)):
+            if os.path.isfile(os.path.join(pkgdata, recipe)):
+                self.recipe_iters[recipe] = self.recipe_store.append([recipe])
+
+if __name__ == "__main__":
+    import argparse
+
+    parser = argparse.ArgumentParser(description='pkgdata browser')
+    parser.add_argument('-p', '--pkgdata', help="Optional location of pkgdata")
+
+    args = parser.parse_args()
+    pkgdata = args.pkgdata if args.pkgdata else find_pkgdata()
+    # TODO assert pkgdata is a directory
+    window = PkgUi(pkgdata)
+    Gtk.main()
diff --git a/poky/scripts/oe-pkgdata-browser.glade b/poky/scripts/oe-pkgdata-browser.glade
new file mode 100644
index 0000000..0d06c82
--- /dev/null
+++ b/poky/scripts/oe-pkgdata-browser.glade
@@ -0,0 +1,337 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Generated with glade 3.18.3 -->
+<interface>
+  <requires lib="gtk+" version="3.12"/>
+  <object class="GtkListStore" id="file_store">
+    <columns>
+      <!-- column-name Filename -->
+      <column type="gchararray"/>
+      <!-- column-name Size -->
+      <column type="glong"/>
+    </columns>
+  </object>
+  <object class="GtkListStore" id="package_store">
+    <columns>
+      <!-- column-name Package -->
+      <column type="gchararray"/>
+      <!-- column-name Size -->
+      <column type="glong"/>
+    </columns>
+  </object>
+  <object class="GtkListStore" id="pkgdata_store">
+    <columns>
+      <!-- column-name Name -->
+      <column type="gchararray"/>
+      <!-- column-name Path -->
+      <column type="gchararray"/>
+    </columns>
+  </object>
+  <object class="GtkListStore" id="recipe_store">
+    <columns>
+      <!-- column-name Recipe -->
+      <column type="gchararray"/>
+    </columns>
+  </object>
+  <object class="GtkWindow" id="window">
+    <property name="can_focus">False</property>
+    <property name="title" translatable="yes">Package Data Browser</property>
+    <property name="default_width">1200</property>
+    <property name="default_height">900</property>
+    <property name="icon_name">accessories-dictionary</property>
+    <property name="has_resize_grip">True</property>
+    <child>
+      <object class="GtkBox" id="box1">
+        <property name="visible">True</property>
+        <property name="can_focus">False</property>
+        <property name="margin_left">4</property>
+        <property name="margin_right">4</property>
+        <property name="margin_top">4</property>
+        <property name="margin_bottom">4</property>
+        <property name="orientation">vertical</property>
+        <property name="spacing">4</property>
+        <child>
+          <object class="GtkComboBox" id="pkgdata_combo">
+            <property name="can_focus">False</property>
+            <property name="model">pkgdata_store</property>
+            <property name="id_column">1</property>
+            <child>
+              <object class="GtkCellRendererText" id="cellrenderertext5"/>
+              <attributes>
+                <attribute name="text">0</attribute>
+              </attributes>
+            </child>
+          </object>
+          <packing>
+            <property name="expand">False</property>
+            <property name="fill">True</property>
+            <property name="position">0</property>
+          </packing>
+        </child>
+        <child>
+          <object class="GtkPaned" id="paned1">
+            <property name="visible">True</property>
+            <property name="can_focus">True</property>
+            <property name="position">400</property>
+            <property name="position_set">True</property>
+            <child>
+              <object class="GtkScrolledWindow" id="scrolledwindow1">
+                <property name="visible">True</property>
+                <property name="can_focus">True</property>
+                <property name="shadow_type">in</property>
+                <property name="min_content_width">100</property>
+                <child>
+                  <object class="GtkTreeView" id="recipe_view">
+                    <property name="visible">True</property>
+                    <property name="can_focus">True</property>
+                    <property name="model">recipe_store</property>
+                    <property name="search_column">0</property>
+                    <property name="fixed_height_mode">True</property>
+                    <property name="show_expanders">False</property>
+                    <child internal-child="selection">
+                      <object class="GtkTreeSelection" id="treeview-selection1"/>
+                    </child>
+                    <child>
+                      <object class="GtkTreeViewColumn" id="treeviewcolumn1">
+                        <property name="sizing">fixed</property>
+                        <property name="title" translatable="yes">Recipe</property>
+                        <child>
+                          <object class="GtkCellRendererText" id="cellrenderertext1"/>
+                          <attributes>
+                            <attribute name="text">0</attribute>
+                          </attributes>
+                        </child>
+                      </object>
+                    </child>
+                  </object>
+                </child>
+              </object>
+              <packing>
+                <property name="resize">False</property>
+                <property name="shrink">True</property>
+              </packing>
+            </child>
+            <child>
+              <object class="GtkPaned" id="paned2">
+                <property name="visible">True</property>
+                <property name="can_focus">True</property>
+                <property name="position">400</property>
+                <property name="position_set">True</property>
+                <child>
+                  <object class="GtkScrolledWindow" id="scrolledwindow2">
+                    <property name="visible">True</property>
+                    <property name="can_focus">True</property>
+                    <property name="shadow_type">in</property>
+                    <property name="min_content_width">100</property>
+                    <child>
+                      <object class="GtkTreeView" id="package_view">
+                        <property name="visible">True</property>
+                        <property name="can_focus">True</property>
+                        <property name="model">package_store</property>
+                        <property name="search_column">0</property>
+                        <property name="show_expanders">False</property>
+                        <child internal-child="selection">
+                          <object class="GtkTreeSelection" id="treeview-selection2"/>
+                        </child>
+                        <child>
+                          <object class="GtkTreeViewColumn" id="package_name_column">
+                            <property name="resizable">True</property>
+                            <property name="sizing">autosize</property>
+                            <property name="title" translatable="yes">Package</property>
+                            <property name="sort_column_id">0</property>
+                            <child>
+                              <object class="GtkCellRendererText" id="cellrenderertext2"/>
+                              <attributes>
+                                <attribute name="text">0</attribute>
+                              </attributes>
+                            </child>
+                          </object>
+                        </child>
+                        <child>
+                          <object class="GtkTreeViewColumn" id="package_size_column">
+                            <property name="resizable">True</property>
+                            <property name="sizing">autosize</property>
+                            <property name="title" translatable="yes">Size</property>
+                            <property name="sort_column_id">1</property>
+                            <child>
+                              <object class="GtkCellRendererText" id="package_size_cell"/>
+                            </child>
+                          </object>
+                        </child>
+                      </object>
+                    </child>
+                  </object>
+                  <packing>
+                    <property name="resize">False</property>
+                    <property name="shrink">True</property>
+                  </packing>
+                </child>
+                <child>
+                  <object class="GtkBox" id="box2">
+                    <property name="visible">True</property>
+                    <property name="can_focus">False</property>
+                    <property name="margin_left">4</property>
+                    <property name="orientation">vertical</property>
+                    <property name="spacing">4</property>
+                    <child>
+                      <object class="GtkLabel" id="label1">
+                        <property name="visible">True</property>
+                        <property name="can_focus">False</property>
+                        <property name="xalign">0</property>
+                        <property name="label" translatable="yes">label</property>
+                      </object>
+                      <packing>
+                        <property name="expand">False</property>
+                        <property name="fill">True</property>
+                        <property name="position">0</property>
+                      </packing>
+                    </child>
+                    <child>
+                      <object class="GtkLabel" id="depends_label">
+                        <property name="visible">True</property>
+                        <property name="can_focus">False</property>
+                        <property name="xalign">0</property>
+                        <property name="label" translatable="yes">depends_label</property>
+                        <property name="wrap">True</property>
+                        <property name="track_visited_links">False</property>
+                      </object>
+                      <packing>
+                        <property name="expand">False</property>
+                        <property name="fill">True</property>
+                        <property name="position">1</property>
+                      </packing>
+                    </child>
+                    <child>
+                      <object class="GtkLabel" id="recommends_label">
+                        <property name="visible">True</property>
+                        <property name="can_focus">False</property>
+                        <property name="xalign">0</property>
+                        <property name="label" translatable="yes">recs_label</property>
+                        <property name="wrap">True</property>
+                        <property name="track_visited_links">False</property>
+                      </object>
+                      <packing>
+                        <property name="expand">False</property>
+                        <property name="fill">True</property>
+                        <property name="position">2</property>
+                      </packing>
+                    </child>
+                    <child>
+                      <object class="GtkLabel" id="suggests_label">
+                        <property name="visible">True</property>
+                        <property name="can_focus">False</property>
+                        <property name="xalign">0</property>
+                        <property name="label" translatable="yes">suggests_label</property>
+                        <property name="wrap">True</property>
+                        <property name="track_visited_links">False</property>
+                      </object>
+                      <packing>
+                        <property name="expand">False</property>
+                        <property name="fill">True</property>
+                        <property name="position">3</property>
+                      </packing>
+                    </child>
+                    <child>
+                      <object class="GtkLabel" id="provides_label">
+                        <property name="visible">True</property>
+                        <property name="can_focus">False</property>
+                        <property name="xalign">0</property>
+                        <property name="label" translatable="yes">provides_label</property>
+                        <property name="wrap">True</property>
+                        <property name="track_visited_links">False</property>
+                      </object>
+                      <packing>
+                        <property name="expand">False</property>
+                        <property name="fill">True</property>
+                        <property name="position">4</property>
+                      </packing>
+                    </child>
+                    <child>
+                      <object class="GtkLabel" id="files_label">
+                        <property name="visible">True</property>
+                        <property name="can_focus">False</property>
+                        <property name="xalign">0</property>
+                        <property name="label" translatable="yes">files_label</property>
+                        <property name="ellipsize">end</property>
+                      </object>
+                      <packing>
+                        <property name="expand">False</property>
+                        <property name="fill">True</property>
+                        <property name="position">5</property>
+                      </packing>
+                    </child>
+                    <child>
+                      <object class="GtkScrolledWindow" id="files_scrollview">
+                        <property name="visible">True</property>
+                        <property name="can_focus">True</property>
+                        <property name="shadow_type">in</property>
+                        <child>
+                          <object class="GtkTreeView" id="files_view">
+                            <property name="visible">True</property>
+                            <property name="can_focus">True</property>
+                            <property name="model">file_store</property>
+                            <property name="rules_hint">True</property>
+                            <property name="search_column">0</property>
+                            <property name="show_expanders">False</property>
+                            <child internal-child="selection">
+                              <object class="GtkTreeSelection" id="treeview-selection3"/>
+                            </child>
+                            <child>
+                              <object class="GtkTreeViewColumn" id="file_name_column">
+                                <property name="title" translatable="yes">Name</property>
+                                <property name="sort_indicator">True</property>
+                                <property name="sort_column_id">0</property>
+                                <child>
+                                  <object class="GtkCellRendererText" id="cellrenderertext3">
+                                    <property name="background_rgba">rgba(0,0,0,0)</property>
+                                  </object>
+                                  <attributes>
+                                    <attribute name="text">0</attribute>
+                                  </attributes>
+                                </child>
+                              </object>
+                            </child>
+                            <child>
+                              <object class="GtkTreeViewColumn" id="file_size_column">
+                                <property name="title" translatable="yes">Size</property>
+                                <property name="sort_indicator">True</property>
+                                <property name="sort_column_id">1</property>
+                                <child>
+                                  <object class="GtkCellRendererText" id="file_size_cell"/>
+                                  <attributes>
+                                    <attribute name="text">1</attribute>
+                                  </attributes>
+                                </child>
+                              </object>
+                            </child>
+                          </object>
+                        </child>
+                      </object>
+                      <packing>
+                        <property name="expand">True</property>
+                        <property name="fill">True</property>
+                        <property name="position">6</property>
+                      </packing>
+                    </child>
+                  </object>
+                  <packing>
+                    <property name="resize">True</property>
+                    <property name="shrink">True</property>
+                  </packing>
+                </child>
+              </object>
+              <packing>
+                <property name="resize">True</property>
+                <property name="shrink">True</property>
+              </packing>
+            </child>
+          </object>
+          <packing>
+            <property name="expand">True</property>
+            <property name="fill">True</property>
+            <property name="position">1</property>
+          </packing>
+        </child>
+      </object>
+    </child>
+  </object>
+</interface>
diff --git a/poky/scripts/oe-run-native b/poky/scripts/oe-run-native
index bea5d69..4e63e69 100755
--- a/poky/scripts/oe-run-native
+++ b/poky/scripts/oe-run-native
@@ -16,7 +16,7 @@
     echo 'OpenEmbedded run-native - runs native tools'
     echo ''
     echo 'arguments:'
-    echo '  native-recipe       The recipe which provoides tool'
+    echo '  native-recipe       The recipe which provides tool'
     echo '  tool                Native tool to run'
     echo ''
     exit 2
diff --git a/poky/scripts/postinst-intercepts/update_desktop_database b/poky/scripts/postinst-intercepts/update_desktop_database
new file mode 100644
index 0000000..8903b49
--- /dev/null
+++ b/poky/scripts/postinst-intercepts/update_desktop_database
@@ -0,0 +1,8 @@
+#!/bin/sh
+#
+# SPDX-License-Identifier: MIT
+#
+# Post-install intercept for mime-xdg.bbclass
+
+update-desktop-database $D${desktop_dir}
+
diff --git a/poky/scripts/postinst-intercepts/update_font_cache b/poky/scripts/postinst-intercepts/update_font_cache
index 3053c70..46bdb8c 100644
--- a/poky/scripts/postinst-intercepts/update_font_cache
+++ b/poky/scripts/postinst-intercepts/update_font_cache
@@ -6,4 +6,6 @@
 set -e
 
 PSEUDO_UNLOAD=1 ${binprefix}qemuwrapper -L $D -E ${fontconfigcacheenv} $D${libexecdir}/${binprefix}fc-cache --sysroot=$D --system-only ${fontconfigcacheparams}
+
 chown -R root:root $D${fontconfigcachedir}
+find $D -type f -name .uuid -exec chown root:root '{}' +
diff --git a/poky/scripts/postinst-intercepts/update_mime_database b/poky/scripts/postinst-intercepts/update_mime_database
new file mode 100644
index 0000000..582d1e1
--- /dev/null
+++ b/poky/scripts/postinst-intercepts/update_mime_database
@@ -0,0 +1,9 @@
+#!/bin/sh
+#
+# SPDX-License-Identifier: MIT
+#
+# Post-install intercept for mime.bbclass
+
+echo "Updating MIME database... this may take a while."
+update-mime-database $D${mimedir}
+
diff --git a/poky/scripts/postinst-intercepts/update_udev_hwdb b/poky/scripts/postinst-intercepts/update_udev_hwdb
index c4fb2bf..102e99b 100644
--- a/poky/scripts/postinst-intercepts/update_udev_hwdb
+++ b/poky/scripts/postinst-intercepts/update_udev_hwdb
@@ -5,5 +5,17 @@
 
 set -e
 
-PSEUDO_UNLOAD=1 ${binprefix}qemuwrapper -L $D $D${libexecdir}/${binprefix}udevadm hwdb --update --root $D
-chown root:root $D${sysconfdir}/udev/hwdb.bin
+case "${PREFERRED_PROVIDER_udev}" in
+	systemd)
+		UDEV_EXTRA_ARGS="--usr"
+		UDEVLIBDIR="${rootlibexecdir}"
+		;;
+
+	*)
+		UDEV_EXTRA_ARGS=""
+		UDEVLIBDIR="${sysconfdir}"
+		;;
+esac
+
+PSEUDO_UNLOAD=1 ${binprefix}qemuwrapper -L $D $D${libexecdir}/${binprefix}udevadm hwdb --update --root $D ${UDEV_EXTRA_ARGS}
+chown root:root $D${UDEVLIBDIR}/udev/hwdb.bin
diff --git a/poky/scripts/pybootchartgui/pybootchartgui/draw.py b/poky/scripts/pybootchartgui/pybootchartgui/draw.py
index add5c53..53324b9 100644
--- a/poky/scripts/pybootchartgui/pybootchartgui/draw.py
+++ b/poky/scripts/pybootchartgui/pybootchartgui/draw.py
@@ -308,7 +308,7 @@
 proc_h = 16 # the height of a process
 leg_s = 10
 MIN_IMG_W = 800
-CUML_HEIGHT = 2000 # Increased value to accomodate CPU and I/O Graphs
+CUML_HEIGHT = 2000 # Increased value to accommodate CPU and I/O Graphs
 OPTIONS = None
 
 def extents(options, xscale, trace):
diff --git a/poky/scripts/pybootchartgui/pybootchartgui/parsing.py b/poky/scripts/pybootchartgui/pybootchartgui/parsing.py
index ef2d3d3..b42dac6 100644
--- a/poky/scripts/pybootchartgui/pybootchartgui/parsing.py
+++ b/poky/scripts/pybootchartgui/pybootchartgui/parsing.py
@@ -18,7 +18,7 @@
 import re
 import sys
 import tarfile
-from time import clock
+import time
 from collections import defaultdict
 from functools import reduce
 
@@ -723,7 +723,7 @@
 
 def _do_parse(writer, state, filename, file):
     writer.info("parsing '%s'" % filename)
-    t1 = clock()
+    t1 = time.process_time()
     name = os.path.basename(filename)
     if name == "proc_diskstats.log":
         state.disk_stats = _parse_proc_disk_stat_log(file)
@@ -743,7 +743,7 @@
         state.monitor_disk = _parse_monitor_disk_log(file)
     elif not filename.endswith('.log'):
         _parse_bitbake_buildstats(writer, state, filename, file)
-    t2 = clock()
+    t2 = time.process_time()
     writer.info("  %s seconds" % str(t2-t1))
     return state
 
diff --git a/poky/scripts/rpm2cpio.sh b/poky/scripts/rpm2cpio.sh
index 876c53c..7cd771b 100755
--- a/poky/scripts/rpm2cpio.sh
+++ b/poky/scripts/rpm2cpio.sh
@@ -22,7 +22,8 @@
 
 	i=0
 	while [ $i -lt 8 ]; do
-		b="$(_dd $(($offset + $i)) bs=1 count=1)"
+ 		b=$(_dd $(($offset + $i)) bs=1 count=1; echo X)
+ 		b=${b%X}
 		[ -z "$b" ] &&
 			b="0" ||
 			b="$(exec printf '%u\n' "'$b")"
diff --git a/poky/scripts/runqemu b/poky/scripts/runqemu
index 5c56c3f..310d79f 100755
--- a/poky/scripts/runqemu
+++ b/poky/scripts/runqemu
@@ -135,6 +135,7 @@
                         'DEPLOY_DIR_IMAGE',
                         'OE_TMPDIR',
                         'OECORE_NATIVE_SYSROOT',
+                        'MULTICONFIG',
                         )
 
         self.qemu_opt = ''
@@ -164,6 +165,7 @@
         self.kvm_enabled = False
         self.vhost_enabled = False
         self.slirp_enabled = False
+        self.net_bridge = None
         self.nfs_instance = 0
         self.nfs_running = False
         self.serialconsole = False
@@ -183,6 +185,8 @@
         self.vmtypes = ('hddimg', 'iso')
         self.fsinfo = {}
         self.network_device = "-device e1000,netdev=net0,mac=@MAC@"
+        self.cmdline_ip_slirp = "ip=dhcp"
+        self.cmdline_ip_tap = "ip=192.168.7.@CLIENT@::192.168.7.@GATEWAY@:255.255.255.0"
         # Use different mac section for tap and slirp to avoid
         # conflicts, e.g., when one is running with tap, the other is
         # running with slirp.
@@ -401,9 +405,7 @@
             self.set("MACHINE", arg)
             return
 
-        cmd = 'MACHINE=%s bitbake -e' % arg
-        logger.info('Running %s...' % cmd)
-        self.bitbake_e = subprocess.check_output(cmd, shell=True).decode('utf-8')
+        self.bitbake_e = self.run_bitbake_env(arg)
         # bitbake -e doesn't report invalid MACHINE as an error, so
         # let's check DEPLOY_DIR_IMAGE to make sure that it is a valid
         # MACHINE.
@@ -486,6 +488,8 @@
                 self.vhost_enabled = True
             elif arg == 'slirp':
                 self.slirp_enabled = True
+            elif arg.startswith('bridge='):
+                self.net_bridge = '%s' % arg[len('bridge='):]
             elif arg == 'snapshot':
                 self.snapshot = True
             elif arg == 'publicvnc':
@@ -803,7 +807,7 @@
     def check_and_set(self):
         """Check configs sanity and set when needed"""
         self.validate_paths()
-        if not self.slirp_enabled:
+        if not self.slirp_enabled and not self.net_bridge:
             check_tun()
         # Check audio
         if self.audio_enabled:
@@ -933,29 +937,30 @@
                     self.set('STAGING_BINDIR_NATIVE', '%s/usr/bin' % self.get('STAGING_DIR_NATIVE'))
 
     def print_config(self):
-        logger.info('Continuing with the following parameters:\n')
+        logoutput = ['Continuing with the following parameters:']
         if not self.fstype in self.vmtypes:
-            print('KERNEL: [%s]' % self.kernel)
+            logoutput.append('KERNEL: [%s]' % self.kernel)
             if self.bios:
-                print('BIOS: [%s]' % self.bios)
+                logoutput.append('BIOS: [%s]' % self.bios)
             if self.dtb:
-                print('DTB: [%s]' % self.dtb)
-        print('MACHINE: [%s]' % self.get('MACHINE'))
+                logoutput.append('DTB: [%s]' % self.dtb)
+        logoutput.append('MACHINE: [%s]' % self.get('MACHINE'))
         try:
             fstype_flags = ' (' + ', '.join(self.fsinfo[self.fstype]) + ')'
         except KeyError:
             fstype_flags = ''
-        print('FSTYPE: [%s%s]' % (self.fstype, fstype_flags))
+        logoutput.append('FSTYPE: [%s%s]' % (self.fstype, fstype_flags))
         if self.fstype  == 'nfs':
-            print('NFS_DIR: [%s]' % self.rootfs)
+            logoutput.append('NFS_DIR: [%s]' % self.rootfs)
         else:
-            print('ROOTFS: [%s]' % self.rootfs)
+            logoutput.append('ROOTFS: [%s]' % self.rootfs)
         if self.ovmf_bios:
-            print('OVMF: %s' % self.ovmf_bios)
+            logoutput.append('OVMF: %s' % self.ovmf_bios)
         if (self.ovmf_secboot_pkkek1):
-            print('SECBOOT PKKEK1: [%s...]' % self.ovmf_secboot_pkkek1[0:100])
-        print('CONFFILE: [%s]' % self.qemuboot)
-        print('')
+            logoutput.append('SECBOOT PKKEK1: [%s...]' % self.ovmf_secboot_pkkek1[0:100])
+        logoutput.append('CONFFILE: [%s]' % self.qemuboot)
+        logoutput.append('')
+        logger.info('\n'.join(logoutput))
 
     def setup_nfs(self):
         if not self.nfs_server:
@@ -985,7 +990,7 @@
             # Use '%s' since they are integers
             os.putenv(k, '%s' % v)
 
-        self.unfs_opts="nfsvers=3,port=%s,udp,mountport=%s" % (nfsd_port, mountd_port)
+        self.unfs_opts="nfsvers=3,port=%s,tcp,mountport=%s" % (nfsd_port, mountd_port)
 
         # Extract .tar.bz2 or .tar.bz if no nfs dir
         if not (self.rootfs and os.path.isdir(self.rootfs)):
@@ -1020,12 +1025,18 @@
 
         self.nfs_running = True
 
+    def setup_net_bridge(self):
+        self.set('NETWORK_CMD', '-netdev bridge,br=%s,id=net0,helper=%s -device virtio-net-pci,netdev=net0 ' % (
+            self.net_bridge, os.path.join(self.bindir_native, 'qemu-oe-bridge-helper')))
+
     def setup_slirp(self):
         """Setup user networking"""
 
         if self.fstype == 'nfs':
             self.setup_nfs()
-        self.kernel_cmdline_script += ' ip=dhcp'
+        netconf = " " + self.cmdline_ip_slirp
+        logger.info("Network configuration:%s", netconf)
+        self.kernel_cmdline_script += netconf
         # Port mapping
         hostfwd = ",hostfwd=tcp::2222-:22,hostfwd=tcp::2323-:23"
         qb_slirp_opt_default = "-netdev user,id=net0%s,tftp=%s" % (hostfwd, self.get('DEPLOY_DIR_IMAGE'))
@@ -1120,7 +1131,11 @@
             uid = os.getuid()
             logger.info("Setting up tap interface under sudo")
             cmd = ('sudo', self.qemuifup, str(uid), str(gid), self.bindir_native)
-            tap = subprocess.check_output(cmd).decode('utf-8').strip()
+            try:
+                tap = subprocess.check_output(cmd).decode('utf-8').strip()
+            except subprocess.CalledProcessError as e:
+                logger.error('Setting up tap device failed:\n%s\nRun runqemu-gen-tapdevs to manually create one.' % str(e))
+                sys.exit(1)
             lockfile = os.path.join(lockdir, tap)
             self.taplock = lockfile + '.lock'
             self.acquire_taplock()
@@ -1129,16 +1144,18 @@
 
         if not tap:
             logger.error("Failed to setup tap device. Run runqemu-gen-tapdevs to manually create.")
-            return 1
+            sys.exit(1)
         self.tap = tap
         tapnum = int(tap[3:])
         gateway = tapnum * 2 + 1
         client = gateway + 1
         if self.fstype == 'nfs':
             self.setup_nfs()
-        netconf = "192.168.7.%s::192.168.7.%s:255.255.255.0" % (client, gateway)
-        logger.info("Network configuration: %s", netconf)
-        self.kernel_cmdline_script += " ip=%s" % netconf
+        netconf = " " + self.cmdline_ip_tap
+        netconf = netconf.replace('@CLIENT@', str(client))
+        netconf = netconf.replace('@GATEWAY@', str(gateway))
+        logger.info("Network configuration:%s", netconf)
+        self.kernel_cmdline_script += netconf
         mac = "%s%02x" % (self.mac_tap, client)
         qb_tap_opt = self.get('QB_TAP_OPT')
         if qb_tap_opt:
@@ -1157,9 +1174,13 @@
         if sys.stdin.isatty():
             self.saved_stty = subprocess.check_output(("stty", "-g")).decode('utf-8').strip()
         self.network_device = self.get('QB_NETWORK_DEVICE') or self.network_device
-        if self.slirp_enabled:
+        if self.net_bridge:
+            self.setup_net_bridge()
+        elif self.slirp_enabled:
+            self.cmdline_ip_slirp = self.get('QB_CMDLINE_IP_SLIRP') or self.cmdline_ip_slirp
             self.setup_slirp()
         else:
+            self.cmdline_ip_tap = self.get('QB_CMDLINE_IP_TAP') or self.cmdline_ip_tap
             self.setup_tap()
 
     def setup_rootfs(self):
@@ -1175,6 +1196,10 @@
         else:
             self.rootfs_options = '-drive file=%s,if=virtio,format=%s' % (self.rootfs, rootfs_format)
 
+        qb_rootfs_extra_opt = self.get("QB_ROOTFS_EXTRA_OPT")
+        if qb_rootfs_extra_opt and not qb_rootfs_extra_opt.startswith(","):
+            qb_rootfs_extra_opt = "," + qb_rootfs_extra_opt
+
         if self.fstype in ('cpio.gz', 'cpio'):
             self.kernel_cmdline = 'root=/dev/ram0 rw debugshell'
             self.rootfs_options = '-initrd %s' % self.rootfs
@@ -1187,11 +1212,15 @@
                     drive_type = self.get('QB_DRIVE_TYPE')
                     if drive_type.startswith("/dev/sd"):
                         logger.info('Using scsi drive')
-                        vm_drive = '-drive if=none,id=hd,file=%s,format=%s -device virtio-scsi-pci,id=scsi -device scsi-hd,drive=hd' \
-                                       % (self.rootfs, rootfs_format)
+                        vm_drive = '-drive if=none,id=hd,file=%s,format=%s -device virtio-scsi-pci,id=scsi -device scsi-hd,drive=hd%s' \
+                                       % (self.rootfs, rootfs_format, qb_rootfs_extra_opt)
                     elif drive_type.startswith("/dev/hd"):
                         logger.info('Using ide drive')
                         vm_drive = "-drive file=%s,format=%s" % (self.rootfs, rootfs_format)
+                    elif drive_type.startswith("/dev/vdb"):
+                        logger.info('Using block virtio drive');
+                        vm_drive = '-drive id=disk0,file=%s,if=none,format=%s -device virtio-blk-device,drive=disk0%s' \
+                                    % (self.rootfs, rootfs_format,qb_rootfs_extra_opt)
                     else:
                         # virtio might have been selected explicitly (just use it), or
                         # is used as fallback (then warn about that).
@@ -1405,10 +1434,7 @@
 
         self.cleaned = True
 
-    def load_bitbake_env(self, mach=None):
-        if self.bitbake_e:
-            return
-
+    def run_bitbake_env(self, mach=None):
         bitbake = shutil.which('bitbake')
         if not bitbake:
             return
@@ -1416,14 +1442,24 @@
         if not mach:
             mach = self.get('MACHINE')
 
+        multiconfig = self.get('MULTICONFIG')
+        if multiconfig:
+            multiconfig = "mc:%s" % multiconfig
+
         if mach:
-            cmd = 'MACHINE=%s bitbake -e' % mach
+            cmd = 'MACHINE=%s bitbake -e %s' % (mach, multiconfig)
         else:
-            cmd = 'bitbake -e'
+            cmd = 'bitbake -e %s' % multiconfig
 
         logger.info('Running %s...' % cmd)
+        return subprocess.check_output(cmd, shell=True).decode('utf-8')
+
+    def load_bitbake_env(self, mach=None):
+        if self.bitbake_e:
+            return
+
         try:
-            self.bitbake_e = subprocess.check_output(cmd, shell=True).decode('utf-8')
+            self.bitbake_e = self.run_bitbake_env(mach=mach)
         except subprocess.CalledProcessError as err:
             self.bitbake_e = ''
             logger.warning("Couldn't run 'bitbake -e' to gather environment information:\n%s" % err.output.decode('utf-8'))
@@ -1438,7 +1474,13 @@
         if result and os.path.exists(result):
             return result
 
-        cmd = ('bitbake', 'qemu-helper-native', '-e')
+        cmd = ['bitbake', '-e']
+        multiconfig = self.get('MULTICONFIG')
+        if multiconfig:
+            cmd.append('mc:%s:qemu-helper-native' % multiconfig)
+        else:
+            cmd.append('qemu-helper-native')
+
         logger.info('Running %s...' % str(cmd))
         out = subprocess.check_output(cmd).decode('utf-8')
 
diff --git a/poky/scripts/runqemu-extract-sdk b/poky/scripts/runqemu-extract-sdk
index 8a4ee90..9bc0c07 100755
--- a/poky/scripts/runqemu-extract-sdk
+++ b/poky/scripts/runqemu-extract-sdk
@@ -69,7 +69,7 @@
 pseudo_state_dir="$SDK_ROOTFS_DIR/../$(basename "$SDK_ROOTFS_DIR").pseudo_state"
 pseudo_state_dir="$(readlink -f $pseudo_state_dir)"
 
-debug_image="`echo $ROOTFS_TARBALL | grep '\-dbg\.tar\.'`"
+debug_image="`echo $ROOTFS_TARBALL | grep '\-dbg\.rootfs\.tar'`"
 
 if [ -e "$pseudo_state_dir" -a -z "$debug_image" ]; then
 	echo "Error: $pseudo_state_dir already exists!"
diff --git a/poky/scripts/tiny/ksize.py b/poky/scripts/tiny/ksize.py
index bc11919..8316b85 100755
--- a/poky/scripts/tiny/ksize.py
+++ b/poky/scripts/tiny/ksize.py
@@ -27,7 +27,7 @@
 class Sizes:
     def __init__(self, glob):
         self.title = glob
-        p = Popen("size -t " + str(glob), shell=True, stdout=PIPE, stderr=PIPE)
+        p = Popen("size -t " + str(glob), shell=True, stdout=PIPE, stderr=PIPE, universal_newlines=True)
         output = p.communicate()[0].splitlines()
         if len(output) > 2:
             sizes = output[-1].split()[0:4]
@@ -49,14 +49,14 @@
         path = os.path.dirname(filename)
 
         p = Popen("ls " + str(path) + "/*.o | grep -v built-in.o",
-                  shell=True, stdout=PIPE, stderr=PIPE)
+                  shell=True, stdout=PIPE, stderr=PIPE, universal_newlines=True)
         glob = ' '.join(p.communicate()[0].splitlines())
         oreport = Report(glob, str(path) + "/*.o")
         oreport.sizes.title = str(path) + "/*.o"
         r.parts.append(oreport)
 
         if subglob:
-            p = Popen("ls " + subglob, shell=True, stdout=PIPE, stderr=PIPE)
+            p = Popen("ls " + subglob, shell=True, stdout=PIPE, stderr=PIPE, universal_newlines=True)
             for f in p.communicate()[0].splitlines():
                 path = os.path.dirname(f)
                 r.parts.append(Report.create(f, path, str(path) + "/*/built-in.o"))
diff --git a/poky/scripts/wic b/poky/scripts/wic
index ea61410..24700f3 100755
--- a/poky/scripts/wic
+++ b/poky/scripts/wic
@@ -392,9 +392,9 @@
 
 def wic_init_parser_cp(subparser):
     subparser.add_argument("src",
-                        help="source spec")
-    subparser.add_argument("dest", type=imgpathtype,
-                        help="image spec: <image>:<vfat partition>[<path>]")
+                        help="image spec: <image>:<vfat partition>[<path>] or <file>")
+    subparser.add_argument("dest",
+                        help="image spec: <image>:<vfat partition>[<path>] or <file>")
     subparser.add_argument("-n", "--native-sysroot",
                         help="path to the native sysroot containing the tools")
 
@@ -522,6 +522,16 @@
                 hlpt[0](hlpt[1], hlpt[2])
             return 0
 
+    # validate wic cp src and dest parameter to identify which one of it is
+    # image and cast it into imgtype
+    if args.command == "cp":
+        if ":" in args.dest:
+            args.dest = imgtype(args.dest)
+        elif ":" in args.src:
+            args.src = imgtype(args.src)
+        else:
+            raise argparse.ArgumentTypeError("no image or partition number specified.")
+
     return hlp.invoke_subcommand(args, parser, hlp.wic_help_usage, subcommands)
 
 
diff --git a/poky/scripts/yocto-check-layer b/poky/scripts/yocto-check-layer
index 010830f..ca6c79b 100755
--- a/poky/scripts/yocto-check-layer
+++ b/poky/scripts/yocto-check-layer
@@ -84,7 +84,7 @@
         logger.setLevel(logging.ERROR)
 
     if not 'BUILDDIR' in os.environ:
-        logger.error("You must source the environment before run this script.")
+        logger.error("You must source the environment before running this script.")
         logger.error("$ source oe-init-build-env")
         return 1
     builddir = os.environ['BUILDDIR']
@@ -92,7 +92,7 @@
 
     layers = detect_layers(args.layers, args.no_auto)
     if not layers:
-        logger.error("Fail to detect layers")
+        logger.error("Failed to detect layers")
         return 1
     if args.additional_layers:
         additional_layers = detect_layers(args.additional_layers, args.no_auto)
@@ -108,7 +108,7 @@
     for layer in layers:
         if layer['type'] == LayerType.ERROR_BSP_DISTRO:
             logger.error("%s: Can't be DISTRO and BSP type at the same time."\
-                     " The conf/distro and conf/machine folders was found."\
+                     " Both conf/distro and conf/machine folders were found."\
                      % layer['name'])
             layers.remove(layer)
         elif layer['type'] == LayerType.ERROR_NO_LAYER_CONF: