Yocto 2.4

Move OpenBMC to Yocto 2.4(rocko)

Tested: Built and verified Witherspoon and Palmetto images
Change-Id: I12057b18610d6fb0e6903c60213690301e9b0c67
Signed-off-by: Brad Bishop <bradleyb@fuzziesquirrel.com>
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/__init__.py b/import-layers/yocto-poky/scripts/lib/devtool/__init__.py
index d646b0c..94e3d7d 100644
--- a/import-layers/yocto-poky/scripts/lib/devtool/__init__.py
+++ b/import-layers/yocto-poky/scripts/lib/devtool/__init__.py
@@ -115,8 +115,8 @@
         import bb.tinfoil
         tinfoil = bb.tinfoil.Tinfoil(tracking=tracking)
         try:
-            tinfoil.prepare(config_only)
             tinfoil.logger.setLevel(logger.getEffectiveLevel())
+            tinfoil.prepare(config_only)
         except bb.tinfoil.TinfoilUIException:
             tinfoil.shutdown()
             raise DevtoolError('Failed to start bitbake environment')
@@ -191,7 +191,7 @@
         logger.info('Using source tree as build directory since --same-dir specified')
     elif bb.data.inherits_class('autotools-brokensep', d):
         logger.info('Using source tree as build directory since recipe inherits autotools-brokensep')
-    elif d.getVar('B') == os.path.abspath(d.getVar('S')):
+    elif os.path.abspath(d.getVar('B')) == os.path.abspath(d.getVar('S')):
         logger.info('Using source tree as build directory since that would be the default for this recipe')
     else:
         b_is_s = False
@@ -261,34 +261,79 @@
                 targets.append('%s-%s' % (pn, variant))
     return targets
 
-def ensure_npm(config, basepath, fixed_setup=False, check_exists=True):
-    """
-    Ensure that npm is available and either build it or show a
-    reasonable error message
-    """
-    if check_exists:
-        tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
-        try:
-            rd = tinfoil.parse_recipe('nodejs-native')
-            nativepath = rd.getVar('STAGING_BINDIR_NATIVE')
-        finally:
-            tinfoil.shutdown()
-        npmpath = os.path.join(nativepath, 'npm')
-        build_npm = not os.path.exists(npmpath)
-    else:
-        build_npm = True
+def replace_from_file(path, old, new):
+    """Replace strings on a file"""
 
-    if build_npm:
-        logger.info('Building nodejs-native')
+    def read_file(path):
+        data = None
+        with open(path) as f:
+            data = f.read()
+        return data
+
+    def write_file(path, data):
+        if data is None:
+            return
+        wdata = data.rstrip() + "\n"
+        with open(path, "w") as f:
+            f.write(wdata)
+
+    # In case old is None, return immediately
+    if old is None:
+        return
+    try:
+        rdata = read_file(path)
+    except IOError as e:
+        # if file does not exit, just quit, otherwise raise an exception
+        if e.errno == errno.ENOENT:
+            return
+        else:
+            raise
+
+    old_contents = rdata.splitlines()
+    new_contents = []
+    for old_content in old_contents:
         try:
-            exec_build_env_command(config.init_path, basepath,
-                                'bitbake -q nodejs-native -c addto_recipe_sysroot', watch=True)
-        except bb.process.ExecutionError as e:
-            if "Nothing PROVIDES 'nodejs-native'" in e.stdout:
-                if fixed_setup:
-                    msg = 'nodejs-native is required for npm but is not available within this SDK'
-                else:
-                    msg = 'nodejs-native is required for npm but is not available - you will likely need to add a layer that provides nodejs'
-                raise DevtoolError(msg)
-            else:
-                raise
+            new_contents.append(old_content.replace(old, new))
+        except ValueError:
+            pass
+    write_file(path, "\n".join(new_contents))
+
+
+def update_unlockedsigs(basepath, workspace, fixed_setup, extra=None):
+    """ This function will make unlocked-sigs.inc match the recipes in the
+    workspace plus any extras we want unlocked. """
+
+    if not fixed_setup:
+        # Only need to write this out within the eSDK
+        return
+
+    if not extra:
+        extra = []
+
+    confdir = os.path.join(basepath, 'conf')
+    unlockedsigs = os.path.join(confdir, 'unlocked-sigs.inc')
+
+    # Get current unlocked list if any
+    values = {}
+    def get_unlockedsigs_varfunc(varname, origvalue, op, newlines):
+        values[varname] = origvalue
+        return origvalue, None, 0, True
+    if os.path.exists(unlockedsigs):
+        with open(unlockedsigs, 'r') as f:
+            bb.utils.edit_metadata(f, ['SIGGEN_UNLOCKED_RECIPES'], get_unlockedsigs_varfunc)
+    unlocked = sorted(values.get('SIGGEN_UNLOCKED_RECIPES', []))
+
+    # If the new list is different to the current list, write it out
+    newunlocked = sorted(list(workspace.keys()) + extra)
+    if unlocked != newunlocked:
+        bb.utils.mkdirhier(confdir)
+        with open(unlockedsigs, 'w') as f:
+            f.write("# DO NOT MODIFY! YOUR CHANGES WILL BE LOST.\n" +
+                    "# This layer was created by the OpenEmbedded devtool" +
+                    " utility in order to\n" +
+                    "# contain recipes that are unlocked.\n")
+
+            f.write('SIGGEN_UNLOCKED_RECIPES += "\\\n')
+            for pn in newunlocked:
+                f.write('    ' + pn)
+            f.write('"')
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/deploy.py b/import-layers/yocto-poky/scripts/lib/devtool/deploy.py
index b3730ae..9cc4927 100644
--- a/import-layers/yocto-poky/scripts/lib/devtool/deploy.py
+++ b/import-layers/yocto-poky/scripts/lib/devtool/deploy.py
@@ -16,12 +16,16 @@
 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 """Devtool plugin containing the deploy subcommands"""
 
-import os
-import subprocess
 import logging
-import tempfile
+import os
 import shutil
+import subprocess
+import tempfile
+
+import bb.utils
 import argparse_oe
+import oe.types
+
 from devtool import exec_fakeroot, setup_tinfoil, check_workspace_recipe, DevtoolError
 
 logger = logging.getLogger('devtool')
@@ -64,7 +68,7 @@
         lines.append('                rmdir $file > /dev/null 2>&1 || true')
         lines.append('            fi')
         lines.append('        else')
-        lines.append('            rm $file')
+        lines.append('            rm -f $file')
         lines.append('        fi')
     lines.append('    done')
     if not dryrun:
@@ -119,7 +123,11 @@
         # Put any preserved files back
         lines.append('if [ -d $preservedir ] ; then')
         lines.append('    cd $preservedir')
-        lines.append('    find . -type f -exec mv {} /{} \;')
+        # find from busybox might not have -exec, so we don't use that
+        lines.append('    find . -type f | while read file')
+        lines.append('    do')
+        lines.append('        mv $file /$file')
+        lines.append('    done')
         lines.append('    cd /')
         lines.append('    rm -rf $preservedir')
         lines.append('fi')
@@ -136,11 +144,12 @@
     return '\n'.join(lines)
 
 
+
 def deploy(args, config, basepath, workspace):
     """Entry point for the devtool 'deploy' subcommand"""
-    import re
     import math
     import oe.recipeutils
+    import oe.package
 
     check_workspace_recipe(workspace, args.recipename, checksrc=False)
 
@@ -166,6 +175,17 @@
                             'recipe? If so, the install step has not installed '
                             'any files.' % args.recipename)
 
+        if args.strip and not args.dry_run:
+            # Fakeroot copy to new destination
+            srcdir = recipe_outdir
+            recipe_outdir = os.path.join(rd.getVar('WORKDIR'), 'deploy-target-stripped')
+            if os.path.isdir(recipe_outdir):
+                bb.utils.remove(recipe_outdir, True)
+            exec_fakeroot(rd, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True)
+            os.environ['PATH'] = ':'.join([os.environ['PATH'], rd.getVar('PATH') or ''])
+            oe.package.strip_execs(args.recipename, recipe_outdir, rd.getVar('STRIP'), rd.getVar('libdir'),
+                        rd.getVar('base_libdir'))
+
         filelist = []
         ftotalsize = 0
         for root, _, files in os.walk(recipe_outdir):
@@ -185,7 +205,6 @@
                 print('  %s' % item)
             return 0
 
-
         extraoptions = ''
         if args.no_host_check:
             extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
@@ -297,6 +316,7 @@
 
 def register_commands(subparsers, context):
     """Register devtool subcommands from the deploy plugin"""
+
     parser_deploy = subparsers.add_parser('deploy-target',
                                           help='Deploy recipe output files to live target machine',
                                           description='Deploys a recipe\'s build output (i.e. the output of the do_install task) to a live target machine over ssh. By default, any existing files will be preserved instead of being overwritten and will be restored if you run devtool undeploy-target. Note: this only deploys the recipe itself and not any runtime dependencies, so it is assumed that those have been installed on the target beforehand.',
@@ -309,6 +329,15 @@
     parser_deploy.add_argument('-p', '--no-preserve', help='Do not preserve existing files', action='store_true')
     parser_deploy.add_argument('--no-check-space', help='Do not check for available space before deploying', action='store_true')
     parser_deploy.add_argument('-P', '--port', default='22', help='Port to use for connection to the target')
+
+    strip_opts = parser_deploy.add_mutually_exclusive_group(required=False)
+    strip_opts.add_argument('-S', '--strip',
+                               help='Strip executables prior to deploying (default: %(default)s). '
+                                    'The default value of this option can be controlled by setting the strip option in the [Deploy] section to True or False.',
+                               default=oe.types.boolean(context.config.get('Deploy', 'strip', default='0')),
+                               action='store_true')
+    strip_opts.add_argument('--no-strip', help='Do not strip executables prior to deploy', dest='strip', action='store_false')
+
     parser_deploy.set_defaults(func=deploy)
 
     parser_undeploy = subparsers.add_parser('undeploy-target',
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/export.py b/import-layers/yocto-poky/scripts/lib/devtool/export.py
new file mode 100644
index 0000000..13ee258
--- /dev/null
+++ b/import-layers/yocto-poky/scripts/lib/devtool/export.py
@@ -0,0 +1,119 @@
+# Development tool - export command plugin
+#
+# Copyright (C) 2014-2017 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+"""Devtool export plugin"""
+
+import os
+import argparse
+import tarfile
+import logging
+import datetime
+import json
+
+logger = logging.getLogger('devtool')
+
+# output files
+default_arcname_prefix = "workspace-export"
+metadata = '.export_metadata'
+
+def export(args, config, basepath, workspace):
+    """Entry point for the devtool 'export' subcommand"""
+
+    def add_metadata(tar):
+        """Archive the workspace object"""
+        # finally store the workspace metadata
+        with open(metadata, 'w') as fd:
+            fd.write(json.dumps((config.workspace_path, workspace)))
+        tar.add(metadata)
+        os.unlink(metadata)
+
+    def add_recipe(tar, recipe, data):
+        """Archive recipe with proper arcname"""
+        # Create a map of name/arcnames
+        arcnames = []
+        for key, name in data.items():
+            if name:
+                if key == 'srctree':
+                    # all sources, no matter where are located, goes into the sources directory
+                    arcname = 'sources/%s' % recipe
+                else:
+                    arcname = name.replace(config.workspace_path, '')
+                arcnames.append((name, arcname))
+
+        for name, arcname in arcnames:
+            tar.add(name, arcname=arcname)
+
+
+    # Make sure workspace is non-empty and possible listed include/excluded recipes are in workspace
+    if not workspace:
+        logger.info('Workspace contains no recipes, nothing to export')
+        return 0
+    else:
+        for param, recipes in {'include':args.include,'exclude':args.exclude}.items():
+            for recipe in recipes:
+                if recipe not in workspace:
+                    logger.error('Recipe (%s) on %s argument not in the current workspace' % (recipe, param))
+                    return 1
+
+    name = args.file
+
+    default_name = "%s-%s.tar.gz" % (default_arcname_prefix, datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
+    if not name:
+        name = default_name
+    else:
+        # if name is a directory, append the default name
+        if os.path.isdir(name):
+            name = os.path.join(name, default_name)
+
+    if os.path.exists(name) and not args.overwrite:
+        logger.error('Tar archive %s exists. Use --overwrite/-o to overwrite it')
+        return 1
+
+    # if all workspace is excluded, quit
+    if not len(set(workspace.keys()).difference(set(args.exclude))):
+        logger.warn('All recipes in workspace excluded, nothing to export')
+        return 0
+
+    exported = []
+    with tarfile.open(name, 'w:gz') as tar:
+        if args.include:
+            for recipe in args.include:
+                add_recipe(tar, recipe, workspace[recipe])
+                exported.append(recipe)
+        else:
+            for recipe, data in workspace.items():
+                if recipe not in args.exclude:
+                    add_recipe(tar, recipe, data)
+                    exported.append(recipe)
+
+        add_metadata(tar)
+
+    logger.info('Tar archive created at %s with the following recipes: %s' % (name, ', '.join(exported)))
+    return 0
+
+def register_commands(subparsers, context):
+    """Register devtool export subcommands"""
+    parser = subparsers.add_parser('export',
+                                   help='Export workspace into a tar archive',
+                                   description='Export one or more recipes from current workspace into a tar archive',
+                                   group='advanced')
+
+    parser.add_argument('--file', '-f', help='Output archive file name')
+    parser.add_argument('--overwrite', '-o', action="store_true", help='Overwrite previous export tar archive')
+    group = parser.add_mutually_exclusive_group()
+    group.add_argument('--include', '-i', nargs='+', default=[], help='Include recipes into the tar archive')
+    group.add_argument('--exclude', '-e', nargs='+', default=[], help='Exclude recipes into the tar archive')
+    parser.set_defaults(func=export)
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/import.py b/import-layers/yocto-poky/scripts/lib/devtool/import.py
new file mode 100644
index 0000000..c13a180
--- /dev/null
+++ b/import-layers/yocto-poky/scripts/lib/devtool/import.py
@@ -0,0 +1,144 @@
+# Development tool - import command plugin
+#
+# Copyright (C) 2014-2017 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+"""Devtool import plugin"""
+
+import os
+import tarfile
+import logging
+import collections
+import json
+import fnmatch
+
+from devtool import standard, setup_tinfoil, replace_from_file, DevtoolError
+from devtool import export
+
+logger = logging.getLogger('devtool')
+
+def devimport(args, config, basepath, workspace):
+    """Entry point for the devtool 'import' subcommand"""
+
+    def get_pn(name):
+        """ Returns the filename of a workspace recipe/append"""
+        metadata = name.split('/')[-1]
+        fn, _ = os.path.splitext(metadata)
+        return fn
+
+    if not os.path.exists(args.file):
+        raise DevtoolError('Tar archive %s does not exist. Export your workspace using "devtool export"' % args.file)
+
+    with tarfile.open(args.file) as tar:
+        # Get exported metadata
+        export_workspace_path = export_workspace = None
+        try:
+            metadata = tar.getmember(export.metadata)
+        except KeyError as ke:
+            raise DevtoolError('The export metadata file created by "devtool export" was not found. "devtool import" can only be used to import tar archives created by "devtool export".')
+
+        tar.extract(metadata)
+        with open(metadata.name) as fdm:
+            export_workspace_path, export_workspace = json.load(fdm)
+        os.unlink(metadata.name)
+
+        members = tar.getmembers()
+
+        # Get appends and recipes from the exported archive, these
+        # will be needed to find out those appends without corresponding
+        # recipe pair
+        append_fns, recipe_fns = set(), set()
+        for member in members:
+            if member.name.startswith('appends'):
+                append_fns.add(get_pn(member.name))
+            elif member.name.startswith('recipes'):
+                recipe_fns.add(get_pn(member.name))
+
+        # Setup tinfoil, get required data and shutdown
+        tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
+        try:
+            current_fns = [os.path.basename(recipe[0]) for recipe in tinfoil.cooker.recipecaches[''].pkg_fn.items()]
+        finally:
+            tinfoil.shutdown()
+
+        # Find those appends that do not have recipes in current metadata
+        non_importables = []
+        for fn in append_fns - recipe_fns:
+            # Check on current metadata (covering those layers indicated in bblayers.conf)
+            for current_fn in current_fns:
+                if fnmatch.fnmatch(current_fn, '*' + fn.replace('%', '') + '*'):
+                    break
+            else:
+                non_importables.append(fn)
+                logger.warn('No recipe to append %s.bbapppend, skipping' % fn)
+
+        # Extract
+        imported = []
+        for member in members:
+            if member.name == export.metadata:
+                continue
+
+            for nonimp in non_importables:
+                pn = nonimp.split('_')[0]
+                # do not extract data from non-importable recipes or metadata
+                if member.name.startswith('appends/%s' % nonimp) or \
+                        member.name.startswith('recipes/%s' % nonimp) or \
+                        member.name.startswith('sources/%s' % pn):
+                    break
+            else:
+                path = os.path.join(config.workspace_path, member.name)
+                if os.path.exists(path):
+                    # by default, no file overwrite is done unless -o is given by the user
+                    if args.overwrite:
+                        try:
+                            tar.extract(member, path=config.workspace_path)
+                        except PermissionError as pe:
+                            logger.warn(pe)
+                    else:
+                        logger.warn('File already present. Use --overwrite/-o to overwrite it: %s' % member.name)
+                        continue
+                else:
+                    tar.extract(member, path=config.workspace_path)
+
+                # Update EXTERNALSRC and the devtool md5 file
+                if member.name.startswith('appends'):
+                    if export_workspace_path:
+                        # appends created by 'devtool modify' just need to update the workspace
+                        replace_from_file(path, export_workspace_path, config.workspace_path)
+
+                        # appends created by 'devtool add' need replacement of exported source tree
+                        pn = get_pn(member.name).split('_')[0]
+                        exported_srctree = export_workspace[pn]['srctree']
+                        if exported_srctree:
+                            replace_from_file(path, exported_srctree, os.path.join(config.workspace_path, 'sources', pn))
+
+                    standard._add_md5(config, pn, path)
+                    imported.append(pn)
+
+    if imported:
+        logger.info('Imported recipes into workspace %s: %s' % (config.workspace_path, ', '.join(imported)))
+    else:
+        logger.warn('No recipes imported into the workspace')
+
+    return 0
+
+def register_commands(subparsers, context):
+    """Register devtool import subcommands"""
+    parser = subparsers.add_parser('import',
+                                   help='Import exported tar archive into workspace',
+                                   description='Import tar archive previously created by "devtool export" into workspace',
+                                   group='advanced')
+    parser.add_argument('file', metavar='FILE', help='Name of the tar archive to import')
+    parser.add_argument('--overwrite', '-o', action="store_true", help='Overwrite files when extracting')
+    parser.set_defaults(func=devimport)
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/sdk.py b/import-layers/yocto-poky/scripts/lib/devtool/sdk.py
index e8bf0ad..f46577c 100644
--- a/import-layers/yocto-poky/scripts/lib/devtool/sdk.py
+++ b/import-layers/yocto-poky/scripts/lib/devtool/sdk.py
@@ -155,7 +155,7 @@
         if os.path.exists(os.path.join(basepath, 'layers/.git')):
             out = subprocess.check_output("git status --porcelain", shell=True, cwd=layers_dir)
             if not out:
-                ret = subprocess.call("git fetch --all; git reset --hard", shell=True, cwd=layers_dir)
+                ret = subprocess.call("git fetch --all; git reset --hard @{u}", shell=True, cwd=layers_dir)
             else:
                 logger.error("Failed to update metadata as there have been changes made to it. Aborting.");
                 logger.error("Changed files:\n%s" % out);
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/standard.py b/import-layers/yocto-poky/scripts/lib/devtool/standard.py
index 5ff1e23..beea0d4 100644
--- a/import-layers/yocto-poky/scripts/lib/devtool/standard.py
+++ b/import-layers/yocto-poky/scripts/lib/devtool/standard.py
@@ -1,6 +1,6 @@
 # Development tool - standard commands plugin
 #
-# Copyright (C) 2014-2016 Intel Corporation
+# Copyright (C) 2014-2017 Intel Corporation
 #
 # This program is free software; you can redistribute it and/or modify
 # it under the terms of the GNU General Public License version 2 as
@@ -30,7 +30,7 @@
 import glob
 import filecmp
 from collections import OrderedDict
-from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, ensure_npm, DevtoolError
+from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, update_unlockedsigs, DevtoolError
 from devtool import parse_recipe
 
 logger = logging.getLogger('devtool')
@@ -66,6 +66,12 @@
         elif os.path.isdir(args.recipename):
             logger.warn('Ambiguous argument "%s" - assuming you mean it to be the recipe name' % args.recipename)
 
+    if not args.fetchuri:
+        if args.srcrev:
+            raise DevtoolError('The -S/--srcrev option is only valid when fetching from an SCM repository')
+        if args.srcbranch:
+            raise DevtoolError('The -B/--srcbranch option is only valid when fetching from an SCM repository')
+
     if args.srctree and os.path.isfile(args.srctree):
         args.fetchuri = 'file://' + os.path.abspath(args.srctree)
         args.srctree = ''
@@ -128,9 +134,6 @@
         color = args.color
     extracmdopts = ''
     if args.fetchuri:
-        if args.fetchuri.startswith('npm://'):
-            ensure_npm(config, basepath, args.fixed_setup)
-
         source = args.fetchuri
         if srctree:
             extracmdopts += ' -x %s' % srctree
@@ -152,31 +155,24 @@
         extracmdopts += ' -a'
     if args.fetch_dev:
         extracmdopts += ' --fetch-dev'
+    if args.mirrors:
+        extracmdopts += ' --mirrors'
+    if args.srcrev:
+        extracmdopts += ' --srcrev %s' % args.srcrev
+    if args.srcbranch:
+        extracmdopts += ' --srcbranch %s' % args.srcbranch
+    if args.provides:
+        extracmdopts += ' --provides %s' % args.provides
 
     tempdir = tempfile.mkdtemp(prefix='devtool')
     try:
-        builtnpm = False
-        while True:
-            try:
-                stdout, _ = exec_build_env_command(config.init_path, basepath, 'recipetool --color=%s create --devtool -o %s \'%s\' %s' % (color, tempdir, source, extracmdopts), watch=True)
-            except bb.process.ExecutionError as e:
-                if e.exitcode == 14:
-                    if builtnpm:
-                        raise DevtoolError('Re-running recipetool still failed to find npm')
-                    # FIXME this is a horrible hack that is unfortunately
-                    # necessary due to the fact that we can't run bitbake from
-                    # inside recipetool since recipetool keeps tinfoil active
-                    # with references to it throughout the code, so we have
-                    # to exit out and come back here to do it.
-                    ensure_npm(config, basepath, args.fixed_setup, check_exists=False)
-                    logger.info('Re-running recipe creation process after building nodejs')
-                    builtnpm = True
-                    continue
-                elif e.exitcode == 15:
-                    raise DevtoolError('Could not auto-determine recipe name, please specify it on the command line')
-                else:
-                    raise DevtoolError('Command \'%s\' failed' % e.command)
-            break
+        try:
+            stdout, _ = exec_build_env_command(config.init_path, basepath, 'recipetool --color=%s create --devtool -o %s \'%s\' %s' % (color, tempdir, source, extracmdopts), watch=True)
+        except bb.process.ExecutionError as e:
+            if e.exitcode == 15:
+                raise DevtoolError('Could not auto-determine recipe name, please specify it on the command line')
+            else:
+                raise DevtoolError('Command \'%s\' failed' % e.command)
 
         recipes = glob.glob(os.path.join(tempdir, '*.bb'))
         if recipes:
@@ -282,6 +278,24 @@
                 f.write('    done\n')
                 f.write('}\n')
 
+        # Check if the new layer provides recipes whose priorities have been
+        # overriden by PREFERRED_PROVIDER.
+        recipe_name = rd.getVar('PN')
+        provides = rd.getVar('PROVIDES')
+        # Search every item defined in PROVIDES
+        for recipe_provided in provides.split():
+            preferred_provider = 'PREFERRED_PROVIDER_' + recipe_provided
+            current_pprovider = rd.getVar(preferred_provider)
+            if current_pprovider and current_pprovider != recipe_name:
+                if args.fixed_setup:
+                    #if we are inside the eSDK add the new PREFERRED_PROVIDER in the workspace layer.conf
+                    layerconf_file = os.path.join(config.workspace_path, "conf", "layer.conf")
+                    with open(layerconf_file, 'a') as f:
+                        f.write('%s = "%s"\n' % (preferred_provider, recipe_name))
+                else:
+                    logger.warn('Set \'%s\' in order to use the recipe' % preferred_provider)
+                break
+
         _add_md5(config, recipename, appendfile)
 
         logger.info('Recipe %s has been automatically created; further editing may be required to make it fully functional' % recipefile)
@@ -383,7 +397,7 @@
     """Entry point for the devtool 'extract' subcommand"""
     import bb
 
-    tinfoil = _prep_extract_operation(config, basepath, args.recipename)
+    tinfoil = setup_tinfoil(basepath=basepath)
     if not tinfoil:
         # Error already shown
         return 1
@@ -393,7 +407,7 @@
             return 1
 
         srctree = os.path.abspath(args.srctree)
-        initial_rev = _extract_source(srctree, args.keep_temp, args.branch, False, rd, tinfoil)
+        initial_rev = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil)
         logger.info('Source tree extracted to %s' % srctree)
 
         if initial_rev:
@@ -407,7 +421,7 @@
     """Entry point for the devtool 'sync' subcommand"""
     import bb
 
-    tinfoil = _prep_extract_operation(config, basepath, args.recipename)
+    tinfoil = setup_tinfoil(basepath=basepath)
     if not tinfoil:
         # Error already shown
         return 1
@@ -417,7 +431,7 @@
             return 1
 
         srctree = os.path.abspath(args.srctree)
-        initial_rev = _extract_source(srctree, args.keep_temp, args.branch, True, rd, tinfoil)
+        initial_rev = _extract_source(srctree, args.keep_temp, args.branch, True, config, basepath, workspace, args.fixed_setup, rd, tinfoil)
         logger.info('Source tree %s synchronized' % srctree)
 
         if initial_rev:
@@ -428,31 +442,10 @@
         tinfoil.shutdown()
 
 
-def _prep_extract_operation(config, basepath, recipename, tinfoil=None):
-    """HACK: Ugly workaround for making sure that requirements are met when
-       trying to extract a package. Returns the tinfoil instance to be used."""
-    if not tinfoil:
-        tinfoil = setup_tinfoil(basepath=basepath)
-
-    rd = parse_recipe(config, tinfoil, recipename, True)
-    if not rd:
-        return None
-
-    if bb.data.inherits_class('kernel-yocto', rd):
-        tinfoil.shutdown()
-        try:
-            stdout, _ = exec_build_env_command(config.init_path, basepath,
-                                               'bitbake kern-tools-native')
-            tinfoil = setup_tinfoil(basepath=basepath)
-        except bb.process.ExecutionError as err:
-            raise DevtoolError("Failed to build kern-tools-native:\n%s" %
-                               err.stdout)
-    return tinfoil
-
-
-def _extract_source(srctree, keep_temp, devbranch, sync, d, tinfoil):
+def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil):
     """Extract sources of a recipe"""
     import oe.recipeutils
+    import oe.patch
 
     pn = d.getVar('PN')
 
@@ -480,6 +473,12 @@
         os.rmdir(srctree)
 
     initial_rev = None
+
+    appendexisted = False
+    recipefile = d.getVar('FILE')
+    appendfile = recipe_to_append(recipefile, config)
+    is_kernel_yocto = bb.data.inherits_class('kernel-yocto', d)
+
     # We need to redirect WORKDIR, STAMPS_DIR etc. under a temporary
     # directory so that:
     # (a) we pick up all files that get unpacked to the WORKDIR, and
@@ -498,137 +497,56 @@
     try:
         tinfoil.logger.setLevel(logging.WARNING)
 
-        crd = d.createCopy()
-        # Make a subdir so we guard against WORKDIR==S
-        workdir = os.path.join(tempdir, 'workdir')
-        crd.setVar('WORKDIR', workdir)
-        if not crd.getVar('S').startswith(workdir):
-            # Usually a shared workdir recipe (kernel, gcc)
-            # Try to set a reasonable default
-            if bb.data.inherits_class('kernel', d):
-                crd.setVar('S', '${WORKDIR}/source')
+        # FIXME this results in a cache reload under control of tinfoil, which is fine
+        # except we don't get the knotty progress bar
+
+        if os.path.exists(appendfile):
+            appendbackup = os.path.join(tempdir, os.path.basename(appendfile) + '.bak')
+            shutil.copyfile(appendfile, appendbackup)
+        else:
+            appendbackup = None
+            bb.utils.mkdirhier(os.path.dirname(appendfile))
+        logger.debug('writing append file %s' % appendfile)
+        with open(appendfile, 'a') as f:
+            f.write('###--- _extract_source\n')
+            f.write('DEVTOOL_TEMPDIR = "%s"\n' % tempdir)
+            f.write('DEVTOOL_DEVBRANCH = "%s"\n' % devbranch)
+            if not is_kernel_yocto:
+                f.write('PATCHTOOL = "git"\n')
+                f.write('PATCH_COMMIT_FUNCTIONS = "1"\n')
+            f.write('inherit devtool-source\n')
+            f.write('###--- _extract_source\n')
+
+        update_unlockedsigs(basepath, workspace, fixed_setup, [pn])
+
+        sstate_manifests = d.getVar('SSTATE_MANIFESTS')
+        bb.utils.mkdirhier(sstate_manifests)
+        preservestampfile = os.path.join(sstate_manifests, 'preserve-stamps')
+        with open(preservestampfile, 'w') as f:
+            f.write(d.getVar('STAMP'))
+        try:
+            if bb.data.inherits_class('kernel-yocto', d):
+                # We need to generate the kernel config
+                task = 'do_configure'
             else:
-                crd.setVar('S', '${WORKDIR}/%s' % os.path.basename(d.getVar('S')))
-        if bb.data.inherits_class('kernel', d):
-            # We don't want to move the source to STAGING_KERNEL_DIR here
-            crd.setVar('STAGING_KERNEL_DIR', '${S}')
+                task = 'do_patch'
 
-        is_kernel_yocto = bb.data.inherits_class('kernel-yocto', d)
-        if not is_kernel_yocto:
-            crd.setVar('PATCHTOOL', 'git')
-            crd.setVar('PATCH_COMMIT_FUNCTIONS', '1')
+            # Run the fetch + unpack tasks
+            res = tinfoil.build_targets(pn,
+                                        task,
+                                        handle_events=True)
+        finally:
+            if os.path.exists(preservestampfile):
+                os.remove(preservestampfile)
 
-        # Apply our changes to the datastore to the server's datastore
-        for key in crd.localkeys():
-            tinfoil.config_data.setVar('%s_pn-%s' % (key, pn), crd.getVar(key, False))
+        if not res:
+            raise DevtoolError('Extracting source for %s failed' % pn)
 
-        tinfoil.config_data.setVar('STAMPS_DIR', os.path.join(tempdir, 'stamps'))
-        tinfoil.config_data.setVar('T', os.path.join(tempdir, 'temp'))
-        tinfoil.config_data.setVar('BUILDCFG_FUNCS', '')
-        tinfoil.config_data.setVar('BUILDCFG_HEADER', '')
-        tinfoil.config_data.setVar('BB_HASH_IGNORE_MISMATCH', '1')
+        with open(os.path.join(tempdir, 'initial_rev'), 'r') as f:
+            initial_rev = f.read()
 
-        tinfoil.set_event_mask(['bb.event.BuildStarted',
-                                'bb.event.BuildCompleted',
-                                'logging.LogRecord',
-                                'bb.command.CommandCompleted',
-                                'bb.command.CommandFailed',
-                                'bb.build.TaskStarted',
-                                'bb.build.TaskSucceeded',
-                                'bb.build.TaskFailed',
-                                'bb.build.TaskFailedSilent'])
-
-        def runtask(target, task):
-            if tinfoil.build_file(target, task):
-                while True:
-                    event = tinfoil.wait_event(0.25)
-                    if event:
-                        if isinstance(event, bb.command.CommandCompleted):
-                            break
-                        elif isinstance(event, bb.command.CommandFailed):
-                            raise DevtoolError('Task do_%s failed: %s' % (task, event.error))
-                        elif isinstance(event, bb.build.TaskFailed):
-                            raise DevtoolError('Task do_%s failed' % task)
-                        elif isinstance(event, bb.build.TaskStarted):
-                            logger.info('Executing %s...' % event._task)
-                        elif isinstance(event, logging.LogRecord):
-                            if event.levelno <= logging.INFO:
-                                continue
-                            logger.handle(event)
-
-        # we need virtual:native:/path/to/recipe if it's a BBCLASSEXTEND
-        fn = tinfoil.get_recipe_file(pn)
-        runtask(fn, 'unpack')
-
-        if bb.data.inherits_class('kernel-yocto', d):
-            # Extra step for kernel to populate the source directory
-            runtask(fn, 'kernel_checkout')
-
-        srcsubdir = crd.getVar('S')
-
-        # Move local source files into separate subdir
-        recipe_patches = [os.path.basename(patch) for patch in
-                          oe.recipeutils.get_recipe_patches(crd)]
-        local_files = oe.recipeutils.get_recipe_local_files(crd)
-
-        # Ignore local files with subdir={BP}
-        srcabspath = os.path.abspath(srcsubdir)
-        local_files = [fname for fname in local_files if
-                       os.path.exists(os.path.join(workdir, fname)) and
-                       (srcabspath == workdir or not
-                       os.path.join(workdir, fname).startswith(srcabspath +
-                           os.sep))]
-        if local_files:
-            for fname in local_files:
-                _move_file(os.path.join(workdir, fname),
-                           os.path.join(tempdir, 'oe-local-files', fname))
-            with open(os.path.join(tempdir, 'oe-local-files', '.gitignore'),
-                      'w') as f:
-                f.write('# Ignore local files, by default. Remove this file '
-                        'if you want to commit the directory to Git\n*\n')
-
-        if srcsubdir == workdir:
-            # Find non-patch non-local sources that were "unpacked" to srctree
-            # directory
-            src_files = [fname for fname in _ls_tree(workdir) if
-                         os.path.basename(fname) not in recipe_patches]
-            # Force separate S so that patch files can be left out from srctree
-            srcsubdir = tempfile.mkdtemp(dir=workdir)
-            tinfoil.config_data.setVar('S_task-patch', srcsubdir)
-            # Move source files to S
-            for path in src_files:
-                _move_file(os.path.join(workdir, path),
-                           os.path.join(srcsubdir, path))
-        elif os.path.dirname(srcsubdir) != workdir:
-            # Handle if S is set to a subdirectory of the source
-            srcsubdir = os.path.join(workdir, os.path.relpath(srcsubdir, workdir).split(os.sep)[0])
-
-        scriptutils.git_convert_standalone_clone(srcsubdir)
-
-        # Make sure that srcsubdir exists
-        bb.utils.mkdirhier(srcsubdir)
-        if not os.path.exists(srcsubdir) or not os.listdir(srcsubdir):
-            logger.warning("no source unpacked to S, either the %s recipe "
-                           "doesn't use any source or the correct source "
-                           "directory could not be determined" % pn)
-
-        setup_git_repo(srcsubdir, crd.getVar('PV'), devbranch, d=d)
-
-        (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srcsubdir)
-        initial_rev = stdout.rstrip()
-
-        logger.info('Patching...')
-        runtask(fn, 'patch')
-
-        bb.process.run('git tag -f devtool-patched', cwd=srcsubdir)
-
-        kconfig = None
-        if bb.data.inherits_class('kernel-yocto', d):
-            # Store generate and store kernel config
-            logger.info('Generating kernel config')
-            runtask(fn, 'configure')
-            kconfig = os.path.join(crd.getVar('B'), '.config')
-
+        with open(os.path.join(tempdir, 'srcsubdir'), 'r') as f:
+            srcsubdir = f.read()
 
         tempdir_localdir = os.path.join(tempdir, 'oe-local-files')
         srctree_localdir = os.path.join(srctree, 'oe-local-files')
@@ -682,11 +600,15 @@
                 oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=d)
                 bb.process.run('git %s commit -a -m "Committing local file symlinks\n\n%s"' % (' '.join(useroptions), oe.patch.GitApplyTree.ignore_commit_prefix), cwd=srctree)
 
-        if kconfig:
+        if is_kernel_yocto:
             logger.info('Copying kernel config to srctree')
-            shutil.copy2(kconfig, srctree)
+            shutil.copy2(os.path.join(tempdir, '.config'), srctree)
 
     finally:
+        if appendbackup:
+            shutil.copyfile(appendbackup, appendfile)
+        elif os.path.exists(appendfile):
+            os.remove(appendfile)
         if keep_temp:
             logger.info('Preserving temporary directory %s' % tempdir)
         else:
@@ -699,8 +621,11 @@
 
     def addfile(fn):
         md5 = bb.utils.md5_file(fn)
-        with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a') as f:
-            f.write('%s|%s|%s\n' % (recipename, os.path.relpath(fn, config.workspace_path), md5))
+        with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a+') as f:
+            md5_str = '%s|%s|%s\n' % (recipename, os.path.relpath(fn, config.workspace_path), md5)
+            f.seek(0, os.SEEK_SET)
+            if not md5_str in f.read():
+                f.write(md5_str)
 
     if os.path.isdir(filename):
         for root, _, files in os.walk(filename):
@@ -772,13 +697,6 @@
             raise DevtoolError("--no-extract specified and source path %s does "
                             "not exist or is not a directory" %
                             srctree)
-        if not args.no_extract:
-            tinfoil = _prep_extract_operation(config, basepath, pn, tinfoil)
-            if not tinfoil:
-                # Error already shown
-                return 1
-            # We need to re-parse because tinfoil may have been re-initialised
-            rd = parse_recipe(config, tinfoil, args.recipename, True)
 
         recipefile = rd.getVar('FILE')
         appendfile = recipe_to_append(recipefile, config, args.wildcard)
@@ -793,7 +711,7 @@
         initial_rev = None
         commits = []
         if not args.no_extract:
-            initial_rev = _extract_source(srctree, args.keep_temp, args.branch, False, rd, tinfoil)
+            initial_rev = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil)
             if not initial_rev:
                 return 1
             logger.info('Source tree extracted to %s' % srctree)
@@ -842,7 +760,10 @@
 
             if bb.data.inherits_class('kernel', rd):
                 f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout '
-                        'do_fetch do_unpack do_patch do_kernel_configme do_kernel_configcheck"\n')
+                        'do_fetch do_unpack do_kernel_configme do_kernel_configcheck"\n')
+                f.write('\ndo_patch() {\n'
+                        '    :\n'
+                        '}\n')
                 f.write('\ndo_configure_append() {\n'
                         '    cp ${B}/.config ${S}/.config.baseline\n'
                         '    ln -sfT ${B}/.config ${S}/.config.new\n'
@@ -852,6 +773,8 @@
                 for commit in commits:
                     f.write('# commit: %s\n' % commit)
 
+        update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn])
+
         _add_md5(config, pn, appendfile)
 
         logger.info('Recipe %s now set up to build from %s' % (pn, srctree))
@@ -1375,12 +1298,13 @@
         if not no_remove:
             # Find list of existing patches in recipe file
             patches_dir = tempfile.mkdtemp(dir=tempdir)
-            old_srcrev = (rd.getVar('SRCREV', False) or '')
+            old_srcrev = rd.getVar('SRCREV') or ''
             upd_p, new_p, del_p = _export_patches(srctree, rd, old_srcrev,
                                                   patches_dir)
+            logger.debug('Patches: update %s, new %s, delete %s' % (dict(upd_p), dict(new_p), dict(del_p)))
 
             # Remove deleted local files and "overlapping" patches
-            remove_files = list(del_f.values()) + list(upd_p.values())
+            remove_files = list(del_f.values()) + list(upd_p.values()) + list(del_p.values())
             if remove_files:
                 removedentries = _remove_file_entries(srcuri, remove_files)[0]
                 update_srcuri = True
@@ -1612,7 +1536,7 @@
 def status(args, config, basepath, workspace):
     """Entry point for the devtool 'status' subcommand"""
     if workspace:
-        for recipe, value in workspace.items():
+        for recipe, value in sorted(workspace.items()):
             recipefile = value['recipefile']
             if recipefile:
                 recipestr = ' (%s)' % recipefile
@@ -1627,6 +1551,26 @@
 def _reset(recipes, no_clean, config, basepath, workspace):
     """Reset one or more recipes"""
 
+    def clean_preferred_provider(pn, layerconf_path):
+        """Remove PREFERRED_PROVIDER from layer.conf'"""
+        import re
+        layerconf_file = os.path.join(layerconf_path, 'conf', 'layer.conf')
+        new_layerconf_file = os.path.join(layerconf_path, 'conf', '.layer.conf')
+        pprovider_found = False
+        with open(layerconf_file, 'r') as f:
+            lines = f.readlines()
+            with open(new_layerconf_file, 'a') as nf:
+                for line in lines:
+                    pprovider_exp = r'^PREFERRED_PROVIDER_.*? = "' + pn + r'"$'
+                    if not re.match(pprovider_exp, line):
+                        nf.write(line)
+                    else:
+                        pprovider_found = True
+        if pprovider_found:
+            shutil.move(new_layerconf_file, layerconf_file)
+        else:
+            os.remove(new_layerconf_file)
+
     if recipes and not no_clean:
         if len(recipes) == 1:
             logger.info('Cleaning sysroot for recipe %s...' % recipes[0])
@@ -1679,6 +1623,7 @@
                 # This is unlikely, but if it's empty we can just remove it
                 os.rmdir(srctree)
 
+        clean_preferred_provider(pn, config.workspace_path)
 
 def reset(args, config, basepath, workspace):
     """Entry point for the devtool 'reset' subcommand"""
@@ -1834,10 +1779,15 @@
     parser_add.add_argument('--fetch-dev', help='For npm, also fetch devDependencies', action="store_true")
     parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)')
     parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true")
-    parser_add.add_argument('--autorev', '-a', help='When fetching from a git repository, set SRCREV in the recipe to a floating revision instead of fixed', action="store_true")
+    group = parser_add.add_mutually_exclusive_group()
+    group.add_argument('--srcrev', '-S', help='Source revision to fetch if fetching from an SCM such as git (default latest)')
+    group.add_argument('--autorev', '-a', help='When fetching from a git repository, set SRCREV in the recipe to a floating revision instead of fixed', action="store_true")
+    parser_add.add_argument('--srcbranch', '-B', help='Branch in source repository if fetching from an SCM such as git (default master)')
     parser_add.add_argument('--binary', '-b', help='Treat the source tree as something that should be installed verbatim (no compilation, same directory structure). Useful with binary packages e.g. RPMs.', action='store_true')
     parser_add.add_argument('--also-native', help='Also add native variant (i.e. support building recipe for the build host as well as the target machine)', action='store_true')
     parser_add.add_argument('--src-subdir', help='Specify subdirectory within source tree to use', metavar='SUBDIR')
+    parser_add.add_argument('--mirrors', help='Enable PREMIRRORS and MIRRORS for source tree fetching (disable by default).', action="store_true")
+    parser_add.add_argument('--provides', '-p', help='Specify an alias for the item provided by the recipe. E.g. virtual/libgl')
     parser_add.set_defaults(func=add, fixed_setup=context.fixed_setup)
 
     parser_modify = subparsers.add_parser('modify', help='Modify the source for an existing recipe',
@@ -1854,7 +1804,7 @@
     group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
     parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")')
     parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true")
-    parser_modify.set_defaults(func=modify)
+    parser_modify.set_defaults(func=modify, fixed_setup=context.fixed_setup)
 
     parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe',
                                        description='Extracts the source for an existing recipe',
@@ -1863,7 +1813,7 @@
     parser_extract.add_argument('srctree', help='Path to where to extract the source tree')
     parser_extract.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (default "%(default)s")')
     parser_extract.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
-    parser_extract.set_defaults(func=extract, no_workspace=True)
+    parser_extract.set_defaults(func=extract, fixed_setup=context.fixed_setup)
 
     parser_sync = subparsers.add_parser('sync', help='Synchronize the source tree for an existing recipe',
                                        description='Synchronize the previously extracted source tree for an existing recipe',
@@ -1873,7 +1823,7 @@
     parser_sync.add_argument('srctree', help='Path to the source tree')
     parser_sync.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout')
     parser_sync.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
-    parser_sync.set_defaults(func=sync)
+    parser_sync.set_defaults(func=sync, fixed_setup=context.fixed_setup)
 
     parser_rename = subparsers.add_parser('rename', help='Rename a recipe file in the workspace',
                                        description='Renames the recipe file for a recipe in the workspace, changing the name or version part or both, ensuring that all references within the workspace are updated at the same time. Only works when the recipe file itself is in the workspace, e.g. after devtool add. Particularly useful when devtool add did not automatically determine the correct name.',
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/upgrade.py b/import-layers/yocto-poky/scripts/lib/devtool/upgrade.py
index 05fb9e5..f1b3ff0 100644
--- a/import-layers/yocto-poky/scripts/lib/devtool/upgrade.py
+++ b/import-layers/yocto-poky/scripts/lib/devtool/upgrade.py
@@ -1,6 +1,6 @@
 # Development tool - upgrade command plugin
 #
-# Copyright (C) 2014-2015 Intel Corporation
+# Copyright (C) 2014-2017 Intel Corporation
 #
 # This program is free software; you can redistribute it and/or modify
 # it under the terms of the GNU General Public License version 2 as
@@ -33,7 +33,7 @@
 
 import oe.recipeutils
 from devtool import standard
-from devtool import exec_build_env_command, setup_tinfoil, DevtoolError, parse_recipe, use_external_build
+from devtool import exec_build_env_command, setup_tinfoil, DevtoolError, parse_recipe, use_external_build, update_unlockedsigs
 
 logger = logging.getLogger('devtool')
 
@@ -180,7 +180,7 @@
             srcuri = rev_re.sub('', srcuri)
     return srcuri, srcrev
 
-def _extract_new_source(newpv, srctree, no_patch, srcrev, branch, keep_temp, tinfoil, rd):
+def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd):
     """Extract sources of a recipe with a new version"""
 
     def __run(cmd):
@@ -202,15 +202,38 @@
         __run('git tag -f devtool-base-new')
         md5 = None
         sha256 = None
+        if not srcbranch:
+            check_branch, check_branch_err = __run('git branch -r --contains %s' % srcrev)
+            get_branch = [x.strip() for x in check_branch.splitlines()]
+            # Remove HEAD reference point and drop remote prefix
+            get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')]
+            if 'master' in get_branch:
+                # If it is master, we do not need to append 'branch=master' as this is default.
+                # Even with the case where get_branch has multiple objects, if 'master' is one
+                # of them, we should default take from 'master'
+                srcbranch = ''
+            elif len(get_branch) == 1:
+                # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch'
+                srcbranch = get_branch[0]
+            else:
+                # If get_branch contains more than one objects, then display error and exit.
+                mbrch = '\n  ' + '\n  '.join(get_branch)
+                raise DevtoolError('Revision %s was found on multiple branches: %s\nPlease provide the correct branch in the devtool command with "--srcbranch" or "-B" option.' % (srcrev, mbrch))
     else:
         __run('git checkout devtool-base -b devtool-%s' % newpv)
 
         tmpdir = tempfile.mkdtemp(prefix='devtool')
         try:
-            md5, sha256 = scriptutils.fetch_uri(tinfoil.config_data, uri, tmpdir, rev)
-        except bb.fetch2.FetchError as e:
+            checksums, ftmpdir = scriptutils.fetch_url(tinfoil, uri, rev, tmpdir, logger, preserve_tmp=keep_temp)
+        except scriptutils.FetchUrlFailure as e:
             raise DevtoolError(e)
 
+        if ftmpdir and keep_temp:
+            logger.info('Fetch temp directory is %s' % ftmpdir)
+
+        md5 = checksums['md5sum']
+        sha256 = checksums['sha256sum']
+
         tmpsrctree = _get_srctree(tmpdir)
         srctree = os.path.abspath(srctree)
 
@@ -269,7 +292,7 @@
         else:
             shutil.rmtree(tmpsrctree)
 
-    return (rev, md5, sha256)
+    return (rev, md5, sha256, srcbranch)
 
 def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, workspace, tinfoil, rd):
     """Creates the new recipe under workspace"""
@@ -277,7 +300,10 @@
     bpn = rd.getVar('BPN')
     path = os.path.join(workspace, 'recipes', bpn)
     bb.utils.mkdirhier(path)
-    copied, _ = oe.recipeutils.copy_recipe_files(rd, path)
+    copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True)
+    if not copied:
+        raise DevtoolError('Internal error - no files were copied for recipe %s' % bpn)
+    logger.debug('Copied %s to %s' % (copied, path))
 
     oldpv = rd.getVar('PV')
     if not newpv:
@@ -329,6 +355,29 @@
 
     return fullpath, copied
 
+
+def _check_git_config():
+    def getconfig(name):
+        try:
+            value = bb.process.run('git config --global %s' % name)[0].strip()
+        except bb.process.ExecutionError as e:
+            if e.exitcode == 1:
+                value = None
+            else:
+                raise
+        return value
+
+    username = getconfig('user.name')
+    useremail = getconfig('user.email')
+    configerr = []
+    if not username:
+        configerr.append('Please set your name using:\n  git config --global user.name')
+    if not useremail:
+        configerr.append('Please set your email using:\n  git config --global user.email')
+    if configerr:
+        raise DevtoolError('Your git configuration is incomplete which will prevent rebases from working:\n' + '\n'.join(configerr))
+
+
 def upgrade(args, config, basepath, workspace):
     """Entry point for the devtool 'upgrade' subcommand"""
 
@@ -339,6 +388,8 @@
     if args.srcbranch and not args.srcrev:
         raise DevtoolError("If you specify --srcbranch/-B then you must use --srcrev/-S to specify the revision" % args.recipename)
 
+    _check_git_config()
+
     tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
     try:
         rd = parse_recipe(config, tinfoil, args.recipename, True)
@@ -367,11 +418,11 @@
 
         rf = None
         try:
-            rev1 = standard._extract_source(srctree, False, 'devtool-orig', False, rd, tinfoil)
-            rev2, md5, sha256 = _extract_new_source(args.version, srctree, args.no_patch,
-                                                    args.srcrev, args.branch, args.keep_temp,
+            rev1 = standard._extract_source(srctree, False, 'devtool-orig', False, config, basepath, workspace, args.fixed_setup, rd, tinfoil)
+            rev2, md5, sha256, srcbranch = _extract_new_source(args.version, srctree, args.no_patch,
+                                                    args.srcrev, args.srcbranch, args.branch, args.keep_temp,
                                                     tinfoil, rd)
-            rf, copied = _create_new_recipe(args.version, md5, sha256, args.srcrev, args.srcbranch, config.workspace_path, tinfoil, rd)
+            rf, copied = _create_new_recipe(args.version, md5, sha256, args.srcrev, srcbranch, config.workspace_path, tinfoil, rd)
         except bb.process.CmdError as e:
             _upgrade_error(e, rf, srctree)
         except DevtoolError as e:
@@ -381,6 +432,9 @@
         af = _write_append(rf, srctree, args.same_dir, args.no_same_dir, rev2,
                         copied, config.workspace_path, rd)
         standard._add_md5(config, pn, af)
+
+        update_unlockedsigs(basepath, workspace, [pn], args.fixed_setup)
+
         logger.info('Upgraded source extracted to %s' % srctree)
         logger.info('New recipe is %s' % rf)
     finally:
@@ -406,4 +460,4 @@
     group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
     group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
     parser_upgrade.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
-    parser_upgrade.set_defaults(func=upgrade)
+    parser_upgrade.set_defaults(func=upgrade, fixed_setup=context.fixed_setup)
diff --git a/import-layers/yocto-poky/scripts/lib/devtool/utilcmds.py b/import-layers/yocto-poky/scripts/lib/devtool/utilcmds.py
index 0437e64..b745116 100644
--- a/import-layers/yocto-poky/scripts/lib/devtool/utilcmds.py
+++ b/import-layers/yocto-poky/scripts/lib/devtool/utilcmds.py
@@ -30,15 +30,13 @@
 
 logger = logging.getLogger('devtool')
 
-
-def edit_recipe(args, config, basepath, workspace):
-    """Entry point for the devtool 'edit-recipe' subcommand"""
+def _find_recipe_path(args, config, basepath, workspace):
     if args.any_recipe:
         tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
         try:
             rd = parse_recipe(config, tinfoil, args.recipename, True)
             if not rd:
-                return 1
+                raise DevtoolError("Failed to find specified recipe")
             recipefile = rd.getVar('FILE')
         finally:
             tinfoil.shutdown()
@@ -48,8 +46,19 @@
         if not recipefile:
             raise DevtoolError("Recipe file for %s is not under the workspace" %
                                args.recipename)
+    return recipefile
 
-    return scriptutils.run_editor(recipefile)
+
+def find_recipe(args, config, basepath, workspace):
+    """Entry point for the devtool 'find-recipe' subcommand"""
+    recipefile = _find_recipe_path(args, config, basepath, workspace)
+    print(recipefile)
+    return 0
+
+
+def edit_recipe(args, config, basepath, workspace):
+    """Entry point for the devtool 'edit-recipe' subcommand"""
+    return scriptutils.run_editor(_find_recipe_path(args, config, basepath, workspace), logger)
 
 
 def configure_help(args, config, basepath, workspace):
@@ -220,6 +229,14 @@
     parser_edit_recipe.add_argument('--any-recipe', '-a', action="store_true", help='Edit any recipe, not just where the recipe file itself is in the workspace')
     parser_edit_recipe.set_defaults(func=edit_recipe)
 
+    # Find-recipe
+    parser_find_recipe = subparsers.add_parser('find-recipe', help='Find a recipe file in your workspace',
+                                         description='By default, this will find a recipe file in your workspace; you can override this with the -a/--any-recipe option.',
+                                         group='working')
+    parser_find_recipe.add_argument('recipename', help='Recipe to find')
+    parser_find_recipe.add_argument('--any-recipe', '-a', action="store_true", help='Find any recipe, not just where the recipe file itself is in the workspace')
+    parser_find_recipe.set_defaults(func=find_recipe)
+
     # NOTE: Needed to override the usage string here since the default
     # gets the order wrong - recipename must come before --arg
     parser_configure_help = subparsers.add_parser('configure-help', help='Get help on configure script options',