reset upstream subtrees to HEAD

Reset the following subtrees on HEAD:
  poky: 8217b477a1(master)
  meta-xilinx: 64aa3d35ae(master)
  meta-openembedded: 0435c9e193(master)
  meta-raspberrypi: 490a4441ac(master)
  meta-security: cb6d1c85ee(master)

Squashed patches:
  meta-phosphor: drop systemd 239 patches
  meta-phosphor: mrw-api: use correct install path

Change-Id: I268e2646d9174ad305630c6bbd3fbc1a6105f43d
Signed-off-by: Brad Bishop <bradleyb@fuzziesquirrel.com>
diff --git a/poky/scripts/autobuilder-worker-prereq-tests b/poky/scripts/autobuilder-worker-prereq-tests
index bb46c69..358dd2b 100755
--- a/poky/scripts/autobuilder-worker-prereq-tests
+++ b/poky/scripts/autobuilder-worker-prereq-tests
@@ -15,6 +15,12 @@
 # test buildistory git repo works?
 #
 
+if [ ! -x $HOME/yocto-autobuilder-helper/scripts/checkvnc ]; then
+    echo "$HOME/yocto-autobuilder-helper should be created."
+    exit 1
+fi
+$HOME/yocto-autobuilder-helper/scripts/checkvnc
+
 . ./oe-init-build-env > /dev/null
 if [ "$?" != "0" ]; then
     exit 1
@@ -53,12 +59,12 @@
 fi
 popd
 bitbake qemu-helper-native
-runqemu qemux86-64
+DISPLAY=:1 runqemu serialstdio qemux86-64
 if [ "$?" != "0" ]; then
     echo "Unable to use runqemu"
     exit 1
 fi
-runqemu qemux86-64 kvm
+DISPLAY=:1 runqemu serialstdio qemux86-64 kvm
 if [ "$?" != "0" ]; then
     echo "Unable to use runqemu with kvm"
     exit 1
diff --git a/poky/scripts/distro/build-recipe-list.py b/poky/scripts/distro/build-recipe-list.py
deleted file mode 100755
index 2162764..0000000
--- a/poky/scripts/distro/build-recipe-list.py
+++ /dev/null
@@ -1,129 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright (c) 2017, Intel Corporation.
-#
-# This program is free software; you can redistribute it and/or modify it
-# under the terms and conditions of the GNU General Public License,
-# version 2, as published by the Free Software Foundation.
-#
-# This program is distributed in the hope it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
-# more details.
-#
-
-import os
-import shutil
-import csv
-import sys
-import argparse
-
-__version__ = "0.1.0"
-
-# set of BPNs
-recipenames = set()
-# map of recipe -> data
-allrecipes = {}
-
-def make_bpn(recipe):
-    prefixes = ("nativesdk-",)
-    suffixes = ("-native", "-cross", "-initial", "-intermediate", "-crosssdk", "-cross-canadian")
-    for ix in prefixes + suffixes:
-        if ix in recipe:
-            recipe = recipe.replace(ix, "")
-    return recipe
-
-def gather_recipes(rows):
-    for row in rows:
-        recipe = row[0]
-        bpn = make_bpn(recipe)
-        if bpn not in recipenames:
-            recipenames.add(bpn)
-        if recipe not in allrecipes:
-            allrecipes[recipe] = row
-
-def generate_recipe_list():
-    # machine list
-    machine_list = ( "qemuarm64", "qemuarm", "qemumips64", "qemumips", "qemuppc", "qemux86-64", "qemux86" )
-    # set filename format
-    fnformat = 'distrodata.%s.csv'
-
-    # store all data files in distrodata
-    datadir = 'distrodata'
-
-    # create the directory if it does not exists
-    if not os.path.exists(datadir):
-        os.mkdir(datadir)
-
-    # doing bitbake distrodata
-    for machine in machine_list:
-        os.system('MACHINE='+ machine + ' bitbake -k universe -c distrodata')
-        shutil.copy('tmp/log/distrodata.csv', 'distrodata/' + fnformat % machine)
-
-    for machine in machine_list:
-        with open('distrodata/' + fnformat % machine) as f:
-            reader = csv.reader(f)
-            rows = reader.__iter__()
-            gather_recipes(rows)
-
-    with open('recipe-list.txt', 'w') as f:
-        for recipe in sorted(recipenames):
-            f.write("%s\n" % recipe)
-    print("file : recipe-list.txt is created with %d entries." % len(recipenames))
-
-    with open('all-recipe-list.txt', 'w') as f:
-        for recipe, row in sorted(allrecipes.items()):
-            f.write("%s\n" % ','.join(row))
-
-
-def diff_for_new_recipes(recipe1, recipe2):
-    prev_recipe_path = recipe1 + '/'
-    curr_recipe_path = recipe2 + '/'
-    if not os.path.isfile(prev_recipe_path + 'recipe-list.txt') or not os.path.isfile(curr_recipe_path + 'recipe-list.txt'):
-        print("recipe files do not exists. please verify that the file exists.")
-        exit(1)
-
-    import csv
-
-    prev = []
-    new = []
-
-    with open(prev_recipe_path + 'recipe-list.txt') as f:
-        prev = f.readlines()
-
-    with open(curr_recipe_path + 'recipe-list.txt') as f:
-        new = f.readlines()
-
-    updates = []
-    for pn in new:
-        if not pn in prev:
-            updates.append(pn.rstrip())
-
-    allrecipe = []
-    with open(recipe1 + '_' + recipe2 + '_new_recipe_list.txt','w') as dr:
-        with open(curr_recipe_path + 'all-recipe-list.txt') as f:
-            reader = csv.reader(f, delimiter=',')
-            for row in reader:
-                if row[0] in updates:
-                    dr.write("%s,%s,%s" % (row[0], row[3], row[5]))
-                    if len(row[9:]) > 0:
-                        dr.write(",%s" % ','.join(row[9:]))
-                    dr.write("\n")
-
-def main(argv):
-    if argv[0] == "generate_recipe_list":
-        generate_recipe_list()
-    elif argv[0] == "compare_recipe":
-        diff_for_new_recipes(argv[1], argv[2])
-    else:
-        print("no such option. choose either 'generate_recipe_list' or 'compare_recipe'")
-
-    exit(0)
-
-if __name__ == "__main__":
-    try:
-        sys.exit(main(sys.argv[1:]))
-    except Exception as e:
-        print("Exception :", e)
-        sys.exit(1)
-
diff --git a/poky/scripts/distro/distrocompare.sh b/poky/scripts/distro/distrocompare.sh
deleted file mode 100755
index 908760c..0000000
--- a/poky/scripts/distro/distrocompare.sh
+++ /dev/null
@@ -1,123 +0,0 @@
-#!/usr/bin/env bash
-#
-# Copyright (c) 2017, Intel Corporation.
-# All rights reserved.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-#
-# distrocompare.sh : provides capability to get a list of new packages
-#                    based on two distinct branches. This script takes
-#                    2 parameters; either a commit-ish or a branch name
-#
-#                    To run : distrocompare.sh <older hash> <newer hash>
-#                    E.g. distrocompare.sh morty 92aa0e7
-#                    E.g. distrocompare.sh morty pyro
-#
-
-# get input as version
-previous_version=$1
-current_version=$2
-
-# set previous and current version
-if [ -z "$2" ]; then
-    previous_version=$1
-    current_version="current"
-fi
-
-# get script location. That's where the source supposedly located as well.
-scriptdir="$( realpath $(dirname "${BASH_SOURCE[0]}" ))"
-sourcedir="$( realpath $scriptdir/../.. )"
-
-# create working directory
-workdir=$(mktemp -d)
-
-# prepare to rollback to the branch if not similar
-branch=`cd $sourcedir; git branch | grep \* | cut -d ' ' -f2`
-
-# set current workdir to store final result
-currentworkdir=`pwd`
-
-# persists the file after local repo change
-cp $scriptdir/build-recipe-list.py $workdir
-
-#==================================================================
-
-function bake_distrodata {
-    # get to source directory of the git
-    cd $sourcedir
-
-    # change the branch / commit. Do not change if input is current
-    if [ "$1" != "current" ]; then
-        output=$(git checkout $1 2>&1)
-
-        # exit if git fails
-        if [[ $output == *"error"* ]]; then
-            echo "git error : $output"
-            echo "exiting ... "
-            rm -rf $workdir
-            exit
-        fi
-    fi
-
-    # make tmp as workdir
-    cd $workdir
-
-    # source oe-init to generate a new build folder
-    source $sourcedir/oe-init-build-env $1
-
-    # if file already exists with distrodata, do not append
-    if ! grep -q "distrodata" "conf/local.conf"; then
-        # add inherit distrodata to local.conf to enable distrodata feature
-        echo 'INHERIT += "distrodata"' >> conf/local.conf
-    fi
-
-    # use from tmp
-    $workdir/build-recipe-list.py generate_recipe_list
-}
-
-bake_distrodata $previous_version
-bake_distrodata $current_version
-
-#==================================================================
-
-cd $workdir
-
-# compare the 2 generated recipe-list.txt
-$workdir/build-recipe-list.py compare_recipe $previous_version $current_version
-
-# copy final result to current working directory
-cp $workdir/*_new_recipe_list.txt $currentworkdir
-
-if [ $? -ne 0 ]; then
-    rm -rf $workdir/$previous_version
-    rm -rf $workdir/$current_version
-    rm $workdir/build-recipe-list.py
-    # preserve the result in /tmp/distrodata if fail to copy the result over
-    exit
-fi
-
-# cleanup
-rm -rf $workdir
-
-# perform rollback branch
-cd $sourcedir
-currentbranch=`git branch | grep \* | cut -d ' ' -f2`
-if [ "$currentbranch" != "$branch" ]; then
-    git checkout $branch
-fi
-
-cd $currentworkdir
-
-#==================================================================
diff --git a/poky/scripts/lib/devtool/__init__.py b/poky/scripts/lib/devtool/__init__.py
index 89f098a..8fc7fff 100644
--- a/poky/scripts/lib/devtool/__init__.py
+++ b/poky/scripts/lib/devtool/__init__.py
@@ -205,6 +205,7 @@
     import oe.patch
     if not os.path.exists(os.path.join(repodir, '.git')):
         bb.process.run('git init', cwd=repodir)
+        bb.process.run('git config --local gc.autodetach 0', cwd=repodir)
         bb.process.run('git add .', cwd=repodir)
         commit_cmd = ['git']
         oe.patch.GitApplyTree.gitCommandUserOptions(commit_cmd, d=d)
diff --git a/poky/scripts/lib/devtool/deploy.py b/poky/scripts/lib/devtool/deploy.py
index 886004b..f345f31 100644
--- a/poky/scripts/lib/devtool/deploy.py
+++ b/poky/scripts/lib/devtool/deploy.py
@@ -211,6 +211,11 @@
         if not args.show_status:
             extraoptions += ' -q'
 
+        scp_sshexec = ''
+        ssh_sshexec = 'ssh'
+        if args.ssh_exec:
+            scp_sshexec = "-S %s" % args.ssh_exec
+            ssh_sshexec = args.ssh_exec
         scp_port = ''
         ssh_port = ''
         if args.port:
@@ -238,7 +243,7 @@
                 for fpath, fsize in filelist:
                     f.write('%s %d\n' % (fpath, fsize))
             # Copy them to the target
-            ret = subprocess.call("scp %s %s %s/* %s:%s" % (scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
+            ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
             if ret != 0:
                 raise DevtoolError('Failed to copy script to %s - rerun with -s to '
                                 'get a complete error message' % args.target)
@@ -246,7 +251,7 @@
             shutil.rmtree(tmpdir)
 
         # Now run the script
-        ret = exec_fakeroot(rd, 'tar cf - . | ssh  %s %s %s \'sh %s %s %s %s\'' % (ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True)
+        ret = exec_fakeroot(rd, 'tar cf - . | %s  %s %s %s \'sh %s %s %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True)
         if ret != 0:
             raise DevtoolError('Deploy failed - rerun with -s to get a complete '
                             'error message')
@@ -276,6 +281,11 @@
     if not args.show_status:
         extraoptions += ' -q'
 
+    scp_sshexec = ''
+    ssh_sshexec = 'ssh'
+    if args.ssh_exec:
+        scp_sshexec = "-S %s" % args.ssh_exec
+        ssh_sshexec = args.ssh_exec
     scp_port = ''
     ssh_port = ''
     if args.port:
@@ -292,7 +302,7 @@
         with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
             f.write(shellscript)
         # Copy it to the target
-        ret = subprocess.call("scp %s %s %s/* %s:%s" % (scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
+        ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
         if ret != 0:
             raise DevtoolError('Failed to copy script to %s - rerun with -s to '
                                 'get a complete error message' % args.target)
@@ -300,7 +310,7 @@
         shutil.rmtree(tmpdir)
 
     # Now run the script
-    ret = subprocess.call('ssh %s %s %s \'sh %s %s\'' % (ssh_port, extraoptions, args.target, tmpscript, args.recipename), shell=True)
+    ret = subprocess.call('%s %s %s %s \'sh %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename), shell=True)
     if ret != 0:
         raise DevtoolError('Undeploy failed - rerun with -s to get a complete '
                            'error message')
@@ -324,6 +334,7 @@
     parser_deploy.add_argument('-n', '--dry-run', help='List files to be deployed only', action='store_true')
     parser_deploy.add_argument('-p', '--no-preserve', help='Do not preserve existing files', action='store_true')
     parser_deploy.add_argument('--no-check-space', help='Do not check for available space before deploying', action='store_true')
+    parser_deploy.add_argument('-e', '--ssh-exec', help='Executable to use in place of ssh')
     parser_deploy.add_argument('-P', '--port', help='Specify port to use for connection to the target')
 
     strip_opts = parser_deploy.add_mutually_exclusive_group(required=False)
@@ -346,5 +357,6 @@
     parser_undeploy.add_argument('-s', '--show-status', help='Show progress/status output', action='store_true')
     parser_undeploy.add_argument('-a', '--all', help='Undeploy all recipes deployed on the target', action='store_true')
     parser_undeploy.add_argument('-n', '--dry-run', help='List files to be undeployed only', action='store_true')
+    parser_undeploy.add_argument('-e', '--ssh-exec', help='Executable to use in place of ssh')
     parser_undeploy.add_argument('-P', '--port', help='Specify port to use for connection to the target')
     parser_undeploy.set_defaults(func=undeploy)
diff --git a/poky/scripts/lib/devtool/standard.py b/poky/scripts/lib/devtool/standard.py
index b7d4d47..ea09bbf 100644
--- a/poky/scripts/lib/devtool/standard.py
+++ b/poky/scripts/lib/devtool/standard.py
@@ -849,9 +849,7 @@
             if bb.data.inherits_class('kernel', rd):
                 f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout '
                         'do_fetch do_unpack do_kernel_configme do_kernel_configcheck"\n')
-                f.write('\ndo_patch() {\n'
-                        '    :\n'
-                        '}\n')
+                f.write('\ndo_patch[noexec] = "1"\n')
                 f.write('\ndo_configure_append() {\n'
                         '    cp ${B}/.config ${S}/.config.baseline\n'
                         '    ln -sfT ${B}/.config ${S}/.config.new\n'
diff --git a/poky/scripts/lib/devtool/upgrade.py b/poky/scripts/lib/devtool/upgrade.py
index 2020077..75e765e 100644
--- a/poky/scripts/lib/devtool/upgrade.py
+++ b/poky/scripts/lib/devtool/upgrade.py
@@ -600,6 +600,20 @@
         tinfoil.shutdown()
     return 0
 
+def check_upgrade_status(args, config, basepath, workspace):
+    if not args.recipe:
+        logger.info("Checking the upstream status for all recipes may take a few minutes")
+    results = oe.recipeutils.get_recipe_upgrade_status(args.recipe)
+    for result in results:
+        # pn, update_status, current, latest, maintainer, latest_commit, no_update_reason
+        if args.all or result[1] != 'MATCH':
+            logger.info("{:25} {:15} {:15} {} {} {}".format(   result[0],
+                                                               result[2],
+                                                               result[1] if result[1] != 'UPDATE' else (result[3] if not result[3].endswith("new-commits-available") else "new commits"),
+                                                               result[4],
+                                                               result[5] if result[5] != 'N/A' else "",
+                                                               "cannot be updated due to: %s" %(result[6]) if result[6] else ""))
+
 def register_commands(subparsers, context):
     """Register devtool subcommands from this plugin"""
 
@@ -627,3 +641,10 @@
                                                   group='info')
     parser_latest_version.add_argument('recipename', help='Name of recipe to query (just name - no version, path or extension)')
     parser_latest_version.set_defaults(func=latest_version)
+
+    parser_check_upgrade_status = subparsers.add_parser('check-upgrade-status', help="Report upgradability for multiple (or all) recipes",
+                                                        description="Prints a table of recipes together with versions currently provided by recipes, and latest upstream versions, when there is a later version available",
+                                                        group='info')
+    parser_check_upgrade_status.add_argument('recipe', help='Name of the recipe to report (omit to report upgrade info for all recipes)', nargs='*')
+    parser_check_upgrade_status.add_argument('--all', '-a', help='Show all recipes, not just recipes needing upgrade', action="store_true")
+    parser_check_upgrade_status.set_defaults(func=check_upgrade_status)
diff --git a/poky/scripts/lib/resulttool/manualexecution.py b/poky/scripts/lib/resulttool/manualexecution.py
index 6487cd9..c94f981 100755
--- a/poky/scripts/lib/resulttool/manualexecution.py
+++ b/poky/scripts/lib/resulttool/manualexecution.py
@@ -24,30 +24,18 @@
     with open(file, "r") as f:
         return json.load(f)
 
-
 class ManualTestRunner(object):
-    def __init__(self):
-        self.jdata = ''
-        self.test_module = ''
-        self.test_cases_id = ''
-        self.configuration = ''
-        self.starttime = ''
-        self.result_id = ''
-        self.write_dir = ''
 
     def _get_testcases(self, file):
         self.jdata = load_json_file(file)
-        self.test_cases_id = []
         self.test_module = self.jdata[0]['test']['@alias'].split('.', 2)[0]
-        for i in self.jdata:
-            self.test_cases_id.append(i['test']['@alias'])
-    
+
     def _get_input(self, config):
         while True:
             output = input('{} = '.format(config))
-            if re.match('^[a-zA-Z0-9_-]+$', output):
+            if re.match('^[a-z0-9-.]+$', output):
                 break
-            print('Only alphanumeric and underscore/hyphen are allowed. Please try again')
+            print('Only lowercase alphanumeric, hyphen and dot are allowed. Please try again')
         return output
 
     def _create_config(self):
@@ -67,44 +55,42 @@
         extra_config = set(store_map['manual']) - set(self.configuration)
         for config in sorted(extra_config):
             print('---------------------------------------------')
-            print('This is configuration #%s. Please provide configuration value(use "None" if not applicable).'
-                  % config)
+            print('This is configuration #%s. Please provide configuration value(use "None" if not applicable).' % config)
             print('---------------------------------------------')
             value_conf = self._get_input('Configuration Value')
             print('---------------------------------------------\n')
             self.configuration[config] = value_conf
 
     def _create_result_id(self):
-        self.result_id = 'manual_' + self.test_module + '_' + self.starttime
+        self.result_id = 'manual_%s_%s' % (self.test_module, self.starttime)
 
-    def _execute_test_steps(self, test_id):
+    def _execute_test_steps(self, test):
         test_result = {}
-        total_steps = len(self.jdata[test_id]['test']['execution'].keys())
         print('------------------------------------------------------------------------')
-        print('Executing test case:' + '' '' + self.test_cases_id[test_id])
+        print('Executing test case: %s' % test['test']['@alias'])
         print('------------------------------------------------------------------------')
-        print('You have total ' + str(total_steps) + ' test steps to be executed.')
+        print('You have total %s test steps to be executed.' % len(test['test']['execution']))
         print('------------------------------------------------------------------------\n')
-        for step in sorted((self.jdata[test_id]['test']['execution']).keys()):
-            print('Step %s: ' % step + self.jdata[test_id]['test']['execution']['%s' % step]['action'])
-            print('Expected output: ' + self.jdata[test_id]['test']['execution']['%s' % step]['expected_results'])
-            done = input('\nPlease press ENTER when you are done to proceed to next step.\n')
+        for step, _ in sorted(test['test']['execution'].items(), key=lambda x: int(x[0])):
+            print('Step %s: %s' % (step, test['test']['execution'][step]['action']))
+            expected_output = test['test']['execution'][step]['expected_results']
+            if expected_output:
+                print('Expected output: %s' % expected_output)
         while True:
-            done = input('\nPlease provide test results: (P)assed/(F)ailed/(B)locked/(S)kipped? \n')
-            done = done.lower()
+            done = input('\nPlease provide test results: (P)assed/(F)ailed/(B)locked/(S)kipped? \n').lower()
             result_types = {'p':'PASSED',
-                                'f':'FAILED',
-                                'b':'BLOCKED',
-                                's':'SKIPPED'}
+                            'f':'FAILED',
+                            'b':'BLOCKED',
+                            's':'SKIPPED'}
             if done in result_types:
                 for r in result_types:
                     if done == r:
                         res = result_types[r]
                         if res == 'FAILED':
                             log_input = input('\nPlease enter the error and the description of the log: (Ex:log:211 Error Bitbake)\n')
-                            test_result.update({self.test_cases_id[test_id]: {'status': '%s' % res, 'log': '%s' % log_input}})
+                            test_result.update({test['test']['@alias']: {'status': '%s' % res, 'log': '%s' % log_input}})
                         else:
-                            test_result.update({self.test_cases_id[test_id]: {'status': '%s' % res}})
+                            test_result.update({test['test']['@alias']: {'status': '%s' % res}})
                 break
             print('Invalid input!')
         return test_result
@@ -119,9 +105,9 @@
         self._create_result_id()
         self._create_write_dir()
         test_results = {}
-        print('\nTotal number of test cases in this test suite: ' + '%s\n' % len(self.jdata))
-        for i in range(0, len(self.jdata)):
-            test_result = self._execute_test_steps(i)
+        print('\nTotal number of test cases in this test suite: %s\n' % len(self.jdata))
+        for t in self.jdata:
+            test_result = self._execute_test_steps(t)
             test_results.update(test_result)
         return self.configuration, self.result_id, self.write_dir, test_results
 
@@ -129,8 +115,7 @@
     testrunner = ManualTestRunner()
     get_configuration, get_result_id, get_write_dir, get_test_results = testrunner.run_test(args.file)
     resultjsonhelper = OETestResultJSONHelper()
-    resultjsonhelper.dump_testresult_file(get_write_dir, get_configuration, get_result_id,
-                                          get_test_results)
+    resultjsonhelper.dump_testresult_file(get_write_dir, get_configuration, get_result_id, get_test_results)
     return 0
 
 def register_commands(subparsers):
diff --git a/poky/scripts/lib/resulttool/resultutils.py b/poky/scripts/lib/resulttool/resultutils.py
index 153f2b8..ad40ac8 100644
--- a/poky/scripts/lib/resulttool/resultutils.py
+++ b/poky/scripts/lib/resulttool/resultutils.py
@@ -15,6 +15,7 @@
 import os
 import json
 import scriptpath
+import copy
 scriptpath.add_oe_lib_path()
 
 flatten_map = {
@@ -60,12 +61,6 @@
         testpath = "/".join(data[res]["configuration"].get(i) for i in configmap[testtype])
         if testpath not in results:
             results[testpath] = {}
-        if 'ptestresult.rawlogs' in data[res]['result']:
-            del data[res]['result']['ptestresult.rawlogs']
-        if 'ptestresult.sections' in data[res]['result']:
-            for i in data[res]['result']['ptestresult.sections']:
-                if 'log' in data[res]['result']['ptestresult.sections'][i]:
-                    del data[res]['result']['ptestresult.sections'][i]['log']
         results[testpath][res] = data[res]
 
 #
@@ -93,15 +88,43 @@
                  newresults[r][i] = results[r][i]
     return newresults
 
-def save_resultsdata(results, destdir, fn="testresults.json"):
+def strip_ptestresults(results):
+    newresults = copy.deepcopy(results)
+    #for a in newresults2:
+    #  newresults = newresults2[a]
+    for res in newresults:
+        if 'result' not in newresults[res]:
+            continue
+        if 'ptestresult.rawlogs' in newresults[res]['result']:
+            del newresults[res]['result']['ptestresult.rawlogs']
+        if 'ptestresult.sections' in newresults[res]['result']:
+            for i in newresults[res]['result']['ptestresult.sections']:
+                if 'log' in newresults[res]['result']['ptestresult.sections'][i]:
+                    del newresults[res]['result']['ptestresult.sections'][i]['log']
+    return newresults
+
+def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, ptestlogs=False):
     for res in results:
         if res:
             dst = destdir + "/" + res + "/" + fn
         else:
             dst = destdir + "/" + fn
         os.makedirs(os.path.dirname(dst), exist_ok=True)
+        resultsout = results[res]
+        if not ptestjson:
+            resultsout = strip_ptestresults(results[res])
         with open(dst, 'w') as f:
-            f.write(json.dumps(results[res], sort_keys=True, indent=4))
+            f.write(json.dumps(resultsout, sort_keys=True, indent=4))
+        for res2 in results[res]:
+            if ptestlogs and 'result' in results[res][res2]:
+                if 'ptestresult.rawlogs' in results[res][res2]['result']:
+                    with open(dst.replace(fn, "ptest-raw.log"), "w+") as f:
+                        f.write(results[res][res2]['result']['ptestresult.rawlogs']['log'])
+                if 'ptestresult.sections' in results[res][res2]['result']:
+                    for i in results[res][res2]['result']['ptestresult.sections']:
+                        if 'log' in results[res][res2]['result']['ptestresult.sections'][i]:
+                            with open(dst.replace(fn, "ptest-%s.log" % i), "w+") as f:
+                                f.write(results[res][res2]['result']['ptestresult.sections'][i]['log'])
 
 def git_get_result(repo, tags):
     git_objs = []
diff --git a/poky/scripts/lib/resulttool/store.py b/poky/scripts/lib/resulttool/store.py
index 5e33716..e4a0807 100644
--- a/poky/scripts/lib/resulttool/store.py
+++ b/poky/scripts/lib/resulttool/store.py
@@ -29,15 +29,18 @@
     try:
         results = {}
         logger.info('Reading files from %s' % args.source)
-        for root, dirs,  files in os.walk(args.source):
-            for name in files:
-                f = os.path.join(root, name)
-                if name == "testresults.json":
-                    resultutils.append_resultsdata(results, f)
-                elif args.all:
-                    dst = f.replace(args.source, tempdir + "/")
-                    os.makedirs(os.path.dirname(dst), exist_ok=True)
-                    shutil.copyfile(f, dst)
+        if os.path.isfile(args.source):
+            resultutils.append_resultsdata(results, args.source)
+        else:
+            for root, dirs,  files in os.walk(args.source):
+                for name in files:
+                    f = os.path.join(root, name)
+                    if name == "testresults.json":
+                        resultutils.append_resultsdata(results, f)
+                    elif args.all:
+                        dst = f.replace(args.source, tempdir + "/")
+                        os.makedirs(os.path.dirname(dst), exist_ok=True)
+                        shutil.copyfile(f, dst)
 
         revisions = {}
 
@@ -65,7 +68,7 @@
             results = revisions[r]
             keywords = {'commit': r[0], 'branch': r[1], "commit_count": r[2]}
             subprocess.check_call(["find", tempdir, "!", "-path", "./.git/*", "-delete"])
-            resultutils.save_resultsdata(results, tempdir)
+            resultutils.save_resultsdata(results, tempdir, ptestlogs=True)
 
             logger.info('Storing test result into git repository %s' % args.git_dir)
 
diff --git a/poky/scripts/lib/scriptutils.py b/poky/scripts/lib/scriptutils.py
index 31e48ea..0633c70 100644
--- a/poky/scripts/lib/scriptutils.py
+++ b/poky/scripts/lib/scriptutils.py
@@ -26,6 +26,8 @@
 import subprocess
 import sys
 import tempfile
+import importlib
+from importlib import machinery
 
 def logger_create(name, stream=None):
     logger = logging.getLogger(name)
@@ -37,12 +39,12 @@
 
 def logger_setup_color(logger, color='auto'):
     from bb.msg import BBLogFormatter
-    console = logging.StreamHandler(sys.stdout)
-    formatter = BBLogFormatter("%(levelname)s: %(message)s")
-    console.setFormatter(formatter)
-    logger.handlers = [console]
-    if color == 'always' or (color=='auto' and console.stream.isatty()):
-        formatter.enable_color()
+
+    for handler in logger.handlers:
+        if (isinstance(handler, logging.StreamHandler) and
+            isinstance(handler.formatter, BBLogFormatter)):
+            if color == 'always' or (color == 'auto' and handler.stream.isatty()):
+                handler.formatter.enable_color()
 
 
 def load_plugins(logger, plugins, pluginpath):
@@ -50,12 +52,9 @@
 
     def load_plugin(name):
         logger.debug('Loading plugin %s' % name)
-        fp, pathname, description = imp.find_module(name, [pluginpath])
-        try:
-            return imp.load_module(name, fp, pathname, description)
-        finally:
-            if fp:
-                fp.close()
+        spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
+        if spec:
+            return spec.loader.load_module()
 
     def plugin_name(filename):
         return os.path.splitext(os.path.basename(filename))[0]
@@ -70,6 +69,7 @@
                 plugin.plugin_init(plugins)
             plugins.append(plugin)
 
+
 def git_convert_standalone_clone(repodir):
     """If specified directory is a git repository, ensure it's a standalone clone"""
     import bb.process
diff --git a/poky/scripts/lib/wic/engine.py b/poky/scripts/lib/wic/engine.py
index ea600d2..ab33fa6 100644
--- a/poky/scripts/lib/wic/engine.py
+++ b/poky/scripts/lib/wic/engine.py
@@ -338,7 +338,7 @@
     def copy(self, src, pnum, path):
         """Copy partition image into wic image."""
         if self.partitions[pnum].fstype.startswith('ext'):
-            cmd = "printf 'cd {}\nwrite {} {}' | {} -w {}".\
+            cmd = "printf 'cd {}\nwrite {} {}\n' | {} -w {}".\
                       format(path, src, os.path.basename(src),
                              self.debugfs, self._get_part_image(pnum))
         else: # fat
diff --git a/poky/scripts/lib/wic/ksparser.py b/poky/scripts/lib/wic/ksparser.py
index 7e5a9c5..08baf76 100644
--- a/poky/scripts/lib/wic/ksparser.py
+++ b/poky/scripts/lib/wic/ksparser.py
@@ -28,14 +28,30 @@
 import os
 import shlex
 import logging
+import re
 
 from argparse import ArgumentParser, ArgumentError, ArgumentTypeError
 
 from wic.engine import find_canned
 from wic.partition import Partition
+from wic.misc import get_bitbake_var
 
 logger = logging.getLogger('wic')
 
+__expand_var_regexp__ = re.compile(r"\${[^{}@\n\t :]+}")
+
+def expand_line(line):
+    while True:
+        m = __expand_var_regexp__.search(line)
+        if not m:
+            return line
+        key = m.group()[2:-1]
+        val = get_bitbake_var(key)
+        if val is None:
+            logger.warning("cannot expand variable %s" % key)
+            return line
+        line = line[:m.start()] + val + line[m.end():]
+
 class KickStartError(Exception):
     """Custom exception."""
     pass
@@ -190,6 +206,7 @@
                 line = line.strip()
                 lineno += 1
                 if line and line[0] != '#':
+                    line = expand_line(line)
                     try:
                         line_args = shlex.split(line)
                         parsed = parser.parse_args(line_args)
diff --git a/poky/scripts/lib/wic/partition.py b/poky/scripts/lib/wic/partition.py
index 3da7e23..ca206ec 100644
--- a/poky/scripts/lib/wic/partition.py
+++ b/poky/scripts/lib/wic/partition.py
@@ -173,7 +173,7 @@
             # Split sourceparams string of the form key1=val1[,key2=val2,...]
             # into a dict.  Also accepts valueless keys i.e. without =
             splitted = self.sourceparams.split(',')
-            srcparams_dict = dict(par.split('=') for par in splitted if par)
+            srcparams_dict = dict(par.split('=', 1) for par in splitted if par)
 
         plugin = PluginMgr.get_plugins('source')[self.source]
         plugin.do_configure_partition(self, srcparams_dict, creator,
diff --git a/poky/scripts/lib/wic/plugins/source/bootimg-efi.py b/poky/scripts/lib/wic/plugins/source/bootimg-efi.py
index 0eb86a0..83a7e18 100644
--- a/poky/scripts/lib/wic/plugins/source/bootimg-efi.py
+++ b/poky/scripts/lib/wic/plugins/source/bootimg-efi.py
@@ -77,12 +77,13 @@
         if not custom_cfg:
             # Create grub configuration using parameters from wks file
             bootloader = creator.ks.bootloader
+            title = source_params.get('title')
 
             grubefi_conf = ""
             grubefi_conf += "serial --unit=0 --speed=115200 --word=8 --parity=no --stop=1\n"
             grubefi_conf += "default=boot\n"
             grubefi_conf += "timeout=%s\n" % bootloader.timeout
-            grubefi_conf += "menuentry 'boot'{\n"
+            grubefi_conf += "menuentry '%s'{\n" % (title if title else "boot")
 
             kernel = "/bzImage"
 
@@ -152,9 +153,10 @@
         if not custom_cfg:
             # Create systemd-boot configuration using parameters from wks file
             kernel = "/bzImage"
+            title = source_params.get('title')
 
             boot_conf = ""
-            boot_conf += "title boot\n"
+            boot_conf += "title %s\n" % (title if title else "boot")
             boot_conf += "linux %s\n" % kernel
             boot_conf += "options LABEL=Boot root=%s %s\n" % \
                              (creator.rootdev, bootloader.append)
diff --git a/poky/scripts/oe-pylint b/poky/scripts/oe-pylint
new file mode 100755
index 0000000..74ff4b8
--- /dev/null
+++ b/poky/scripts/oe-pylint
@@ -0,0 +1,11 @@
+#!/bin/bash
+#
+# Run the pylint3 against our common python module spaces and print a report of potential issues
+#
+this_dir=$(dirname $(readlink -f $0))
+ERRORS="-E"
+IGNORELIST="$ERRORS -d logging-too-many-args -d missing-docstring -d line-too-long -d invalid-name"
+PYTHONPATH=$this_dir/../bitbake/lib/ pylint3 $IGNORELIST bb
+PYTHONPATH=$this_dir/../bitbake/lib/:$this_dir/../meta/lib pylint3 $IGNORELIST -d undefined-variable oe
+PYTHONPATH=$this_dir/../bitbake/lib/:$this_dir/../meta/lib pylint3  $IGNORELIST oeqa
+PYTHONPATH=$this_dir/../bitbake/lib/:$this_dir/../meta/lib:$this_dir/lib pylint3 $IGNORELIST -d undefined-variable argparse_oe buildstats  devtool recipetool scriptpath testcasemgmt build_perf checklayer resulttool  scriptutils wic
\ No newline at end of file
diff --git a/poky/scripts/oepydevshell-internal.py b/poky/scripts/oepydevshell-internal.py
index 04621ae..2f7d5d4 100755
--- a/poky/scripts/oepydevshell-internal.py
+++ b/poky/scripts/oepydevshell-internal.py
@@ -63,7 +63,9 @@
             (ready, _, _) = select.select([pty, sys.stdin], writers , [], 0)
             try:
                 if pty in ready:
-                    i = i + pty.read().decode('utf-8')
+                    readdata = pty.read()
+                    if readdata:
+                        i = i + readdata.decode('utf-8')
                 if i:
                     # Write a page at a time to avoid overflowing output 
                     # d.keys() is a good way to do that
diff --git a/poky/scripts/pythondeps b/poky/scripts/pythondeps
index 590b976..3e13a58 100755
--- a/poky/scripts/pythondeps
+++ b/poky/scripts/pythondeps
@@ -9,7 +9,8 @@
 
 import argparse
 import ast
-import imp
+import importlib
+from importlib import machinery
 import logging
 import os.path
 import sys
@@ -17,10 +18,7 @@
 
 logger = logging.getLogger('pythondeps')
 
-suffixes = []
-for triple in imp.get_suffixes():
-    suffixes.append(triple[0])
-
+suffixes = importlib.machinery.all_suffixes()
 
 class PythonDepError(Exception):
     pass
diff --git a/poky/scripts/resulttool b/poky/scripts/resulttool
index 5a89e1c..18ac101 100755
--- a/poky/scripts/resulttool
+++ b/poky/scripts/resulttool
@@ -51,13 +51,6 @@
 import resulttool.manualexecution
 logger = scriptutils.logger_create('resulttool')
 
-def _validate_user_input_arguments(args):
-    if hasattr(args, "source_dir"):
-        if not os.path.isdir(args.source_dir):
-            logger.error('source_dir argument need to be a directory : %s' % args.source_dir)
-            return False
-    return True
-
 def main():
     parser = argparse_oe.ArgumentParser(description="OEQA test result manipulation tool.",
                                         epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
@@ -80,9 +73,6 @@
     elif args.quiet:
         logger.setLevel(logging.ERROR)
 
-    if not _validate_user_input_arguments(args):
-        return -1
-
     try:
         ret = args.func(args, logger)
     except argparse_oe.ArgumentUsageError as ae:
diff --git a/poky/scripts/runqemu b/poky/scripts/runqemu
index 1c96b29..c0e569c 100755
--- a/poky/scripts/runqemu
+++ b/poky/scripts/runqemu
@@ -74,7 +74,12 @@
   MACHINE - the machine name (optional, autodetected from KERNEL filename if unspecified)
   Simplified QEMU command-line options can be passed with:
     nographic - disable video console
+    sdl - choose the SDL frontend instead of the Gtk+ default
+    gtk-gl - enable virgl-based GL acceleration using Gtk+ frontend
+    gtk-gl-es - enable virgl-based GL acceleration, using OpenGL ES and Gtk+ frontend
+    egl-headless - enable headless EGL output; use vnc or spice to see it
     serial - enable a serial console on /dev/ttyS0
+    serialstdio - enable a serial console on the console (regardless of graphics mode)
     slirp - enable user networking, no root privileges is required
     kvm - enable KVM when running x86/x86_64 (VT-capable CPU required)
     kvm-vhost - enable KVM with vhost when running x86/x86_64 (VT-capable CPU required)
@@ -114,39 +119,6 @@
     if not os.access(dev_tun, os.W_OK):
         raise RunQemuError("TUN control device %s is not writable, please fix (e.g. sudo chmod 666 %s)" % (dev_tun, dev_tun))
 
-def check_libgl(qemu_bin):
-    cmd = ('ldd', qemu_bin)
-    logger.debug('Running %s...' % str(cmd))
-    need_gl = subprocess.check_output(cmd).decode('utf-8')
-    if re.search('libGLU', need_gl):
-        # We can't run without a libGL.so
-        libgl = False
-        check_files = (('/usr/lib/libGL.so', '/usr/lib/libGLU.so'), \
-            ('/usr/lib64/libGL.so', '/usr/lib64/libGLU.so'), \
-            ('/usr/lib/*-linux-gnu/libGL.so', '/usr/lib/*-linux-gnu/libGLU.so'))
-
-        for (f1, f2) in check_files:
-            if re.search('\*', f1):
-                for g1 in glob.glob(f1):
-                    if libgl:
-                        break
-                    if os.path.exists(g1):
-                        for g2 in glob.glob(f2):
-                            if os.path.exists(g2):
-                                libgl = True
-                                break
-                if libgl:
-                    break
-            else:
-                if os.path.exists(f1) and os.path.exists(f2):
-                    libgl = True
-                    break
-        if not libgl:
-            logger.error("You need libGL.so and libGLU.so to exist in your library path to run the QEMU emulator.")
-            logger.error("Ubuntu package names are: libgl1-mesa-dev and libglu1-mesa-dev.")
-            logger.error("Fedora package names are: mesa-libGL-devel mesa-libGLU-devel.")
-            raise RunQemuError('%s requires libGLU, but not found' % qemu_bin)
-
 def get_first_file(cmds):
     """Return first file found in wildcard cmds"""
     for cmd in cmds:
@@ -212,6 +184,7 @@
         self.slirp_enabled = False
         self.nfs_instance = 0
         self.nfs_running = False
+        self.serialconsole = False
         self.serialstdio = False
         self.cleantap = False
         self.saved_stty = ''
@@ -432,8 +405,33 @@
             elif arg == 'nographic':
                 self.qemu_opt_script += ' -nographic'
                 self.kernel_cmdline_script += ' console=ttyS0'
+            elif arg == 'sdl':
+                self.qemu_opt_script += ' -display sdl'
+            elif arg == 'gtk-gl':
+                self.qemu_opt_script += ' -vga virtio -display gtk,gl=on'
+            elif arg == 'gtk-gl-es':
+                self.qemu_opt_script += ' -vga virtio -display gtk,gl=es'
+            elif arg == 'egl-headless':
+                self.qemu_opt_script += ' -vga virtio -display egl-headless'
+                # As runqemu can be run within bitbake (when using testimage, for example),
+                # we need to ensure that we run host pkg-config, and that it does not
+                # get mis-directed to native build paths set by bitbake.
+                try:
+                    del os.environ['PKG_CONFIG_PATH']
+                    del os.environ['PKG_CONFIG_DIR']
+                    del os.environ['PKG_CONFIG_LIBDIR']
+                except KeyError:
+                    pass
+                try:
+                    dripath = subprocess.check_output("PATH=/bin:/usr/bin:$PATH pkg-config --variable=dridriverdir dri", shell=True)
+                except subprocess.CalledProcessError as e:
+                    raise RunQemuError("Could not determine the path to dri drivers on the host via pkg-config.\nPlease install Mesa development files (particularly, dri.pc) on the host machine.")
+                os.environ['LIBGL_DRIVERS_PATH'] = dripath.decode('utf-8').strip()
             elif arg == 'serial':
                 self.kernel_cmdline_script += ' console=ttyS0'
+                self.serialconsole = True
+            elif arg == "serialstdio":
+                self.kernel_cmdline_script += ' console=ttyS0'
                 self.serialstdio = True
             elif arg == 'audio':
                 logger.info("Enabling audio in qemu")
@@ -1166,8 +1164,6 @@
         if not os.access(qemu_bin, os.X_OK):
             raise OEPathError("No QEMU binary '%s' could be found" % qemu_bin)
 
-        check_libgl(qemu_bin)
-
         self.qemu_opt = "%s %s %s %s" % (qemu_bin, self.get('NETWORK_CMD'), self.get('ROOTFS_OPTIONS'), self.get('QB_OPT_APPEND'))
 
         for ovmf in self.ovmf_bios:
@@ -1187,7 +1183,7 @@
         if self.snapshot:
             self.qemu_opt += " -snapshot"
 
-        if self.serialstdio:
+        if self.serialconsole:
             if sys.stdin.isatty():
                 subprocess.check_call(("stty", "intr", "^]"))
                 logger.info("Interrupt character is '^]'")
@@ -1214,7 +1210,7 @@
         #     INIT: Id "S1" respawning too fast: disabled for 5 minutes
         serial_num = len(re.findall("-serial", self.qemu_opt))
         if serial_num == 0:
-            if re.search("-nographic", self.qemu_opt):
+            if re.search("-nographic", self.qemu_opt) or self.serialstdio:
                 self.qemu_opt += " -serial mon:stdio -serial null"
             else:
                 self.qemu_opt += " -serial mon:vc -serial null"