Squashed 'yocto-poky/' content from commit ea562de

git-subtree-dir: yocto-poky
git-subtree-split: ea562de57590c966cd5a75fda8defecd397e6436
diff --git a/scripts/lib/devtool/__init__.py b/scripts/lib/devtool/__init__.py
new file mode 100644
index 0000000..404d3e6
--- /dev/null
+++ b/scripts/lib/devtool/__init__.py
@@ -0,0 +1,138 @@
+#!/usr/bin/env python
+
+# Development tool - utility functions for plugins
+#
+# Copyright (C) 2014 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+"""Devtool plugins module"""
+
+import os
+import sys
+import subprocess
+import logging
+
+logger = logging.getLogger('devtool')
+
+
+class DevtoolError(Exception):
+    """Exception for handling devtool errors"""
+    pass
+
+
+def exec_build_env_command(init_path, builddir, cmd, watch=False, **options):
+    """Run a program in bitbake build context"""
+    import bb
+    if not 'cwd' in options:
+        options["cwd"] = builddir
+    if init_path:
+        # As the OE init script makes use of BASH_SOURCE to determine OEROOT,
+        # and can't determine it when running under dash, we need to set
+        # the executable to bash to correctly set things up
+        if not 'executable' in options:
+            options['executable'] = 'bash'
+        logger.debug('Executing command: "%s" using init path %s' % (cmd, init_path))
+        init_prefix = '. %s %s > /dev/null && ' % (init_path, builddir)
+    else:
+        logger.debug('Executing command "%s"' % cmd)
+        init_prefix = ''
+    if watch:
+        if sys.stdout.isatty():
+            # Fool bitbake into thinking it's outputting to a terminal (because it is, indirectly)
+            cmd = 'script -e -q -c "%s" /dev/null' % cmd
+        return exec_watch('%s%s' % (init_prefix, cmd), **options)
+    else:
+        return bb.process.run('%s%s' % (init_prefix, cmd), **options)
+
+def exec_watch(cmd, **options):
+    """Run program with stdout shown on sys.stdout"""
+    import bb
+    if isinstance(cmd, basestring) and not "shell" in options:
+        options["shell"] = True
+
+    process = subprocess.Popen(
+        cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **options
+    )
+
+    buf = ''
+    while True:
+        out = process.stdout.read(1)
+        if out:
+            sys.stdout.write(out)
+            sys.stdout.flush()
+            buf += out
+        elif out == '' and process.poll() != None:
+            break
+
+    if process.returncode != 0:
+        raise bb.process.ExecutionError(cmd, process.returncode, buf, None)
+
+    return buf, None
+
+def exec_fakeroot(d, cmd, **kwargs):
+    """Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions"""
+    # Grab the command and check it actually exists
+    fakerootcmd = d.getVar('FAKEROOTCMD', True)
+    if not os.path.exists(fakerootcmd):
+        logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built')
+        return 2
+    # Set up the appropriate environment
+    newenv = dict(os.environ)
+    fakerootenv = d.getVar('FAKEROOTENV', True)
+    for varvalue in fakerootenv.split():
+        if '=' in varvalue:
+            splitval = varvalue.split('=', 1)
+            newenv[splitval[0]] = splitval[1]
+    return subprocess.call("%s %s" % (fakerootcmd, cmd), env=newenv, **kwargs)
+
+def setup_tinfoil(config_only=False):
+    """Initialize tinfoil api from bitbake"""
+    import scriptpath
+    bitbakepath = scriptpath.add_bitbake_lib_path()
+    if not bitbakepath:
+        logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
+        sys.exit(1)
+
+    import bb.tinfoil
+    tinfoil = bb.tinfoil.Tinfoil()
+    tinfoil.prepare(config_only)
+    tinfoil.logger.setLevel(logger.getEffectiveLevel())
+    return tinfoil
+
+def get_recipe_file(cooker, pn):
+    """Find recipe file corresponding a package name"""
+    import oe.recipeutils
+    recipefile = oe.recipeutils.pn_to_recipe(cooker, pn)
+    if not recipefile:
+        skipreasons = oe.recipeutils.get_unavailable_reasons(cooker, pn)
+        if skipreasons:
+            logger.error('\n'.join(skipreasons))
+        else:
+            logger.error("Unable to find any recipe file matching %s" % pn)
+    return recipefile
+
+def parse_recipe(config, tinfoil, pn, appends):
+    """Parse recipe of a package"""
+    import oe.recipeutils
+    recipefile = get_recipe_file(tinfoil.cooker, pn)
+    if not recipefile:
+        # Error already logged
+        return None
+    if appends:
+        append_files = tinfoil.cooker.collection.get_file_appends(recipefile)
+        # Filter out appends from the workspace
+        append_files = [path for path in append_files if
+                        not path.startswith(config.workspace_path)]
+    return oe.recipeutils.parse_recipe(recipefile, append_files,
+                                       tinfoil.config_data)
diff --git a/scripts/lib/devtool/build-image.py b/scripts/lib/devtool/build-image.py
new file mode 100644
index 0000000..2c01428
--- /dev/null
+++ b/scripts/lib/devtool/build-image.py
@@ -0,0 +1,91 @@
+# Development tool - build-image plugin
+#
+# Copyright (C) 2015 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+"""Devtool plugin containing the build-image subcommand."""
+
+import os
+import logging
+
+from bb.process import ExecutionError
+from devtool import exec_build_env_command, setup_tinfoil, parse_recipe
+
+logger = logging.getLogger('devtool')
+
+def _get_recipes(workspace, config):
+    """Get list of target recipes from the workspace."""
+    result = []
+    tinfoil = setup_tinfoil()
+    for recipe in workspace:
+        data = parse_recipe(config, tinfoil, recipe, True)
+        if 'class-target' in data.getVar('OVERRIDES', True).split(':'):
+            if recipe in data.getVar('PACKAGES', True):
+                result.append(recipe)
+            else:
+                logger.warning("Skipping recipe %s as it doesn't produce "
+                               "package with the same name", recipe)
+    tinfoil.shutdown()
+    return result
+
+def build_image(args, config, basepath, workspace):
+    """Entry point for the devtool 'build-image' subcommand."""
+    image = args.recipe
+    appendfile = os.path.join(config.workspace_path, 'appends',
+                              '%s.bbappend' % image)
+
+    # remove <image>.bbapend to make sure setup_tinfoil doesn't
+    # breake because of it
+    if os.path.isfile(appendfile):
+        os.unlink(appendfile)
+
+    recipes = _get_recipes(workspace, config)
+    if recipes:
+        with open(appendfile, 'w') as afile:
+            # include selected recipes into the image
+            afile.write('IMAGE_INSTALL_append = " %s"\n' % ' '.join(recipes))
+
+            # Generate notification callback devtool_warn_image_extended
+            afile.write('do_rootfs[prefuncs] += "devtool_warn_image_extended"\n\n')
+            afile.write("python devtool_warn_image_extended() {\n")
+            afile.write("    bb.plain('NOTE: %%s: building with additional '\n"
+                        "             'packages due to \"devtool build-image\"'"
+                        "              %% d.getVar('PN', True))\n"
+                        "    bb.plain('NOTE: delete %%s to clear this' %% \\\n"
+                        "             '%s')\n" % os.path.relpath(appendfile, basepath))
+            afile.write("}\n")
+
+            logger.info('Building image %s with the following '
+                        'additional packages: %s', image, ' '.join(recipes))
+    else:
+        logger.warning('No recipes in workspace, building image %s unmodified', image)
+
+    # run bitbake to build image
+    try:
+        exec_build_env_command(config.init_path, basepath,
+                               'bitbake %s' % image, watch=True)
+    except ExecutionError as err:
+        return err.exitcode
+
+    logger.info('Successfully built %s', image)
+
+def register_commands(subparsers, context):
+    """Register devtool subcommands from the build-image plugin"""
+    parser = subparsers.add_parser('build-image',
+                                   help='Build image including workspace recipe packages',
+                                   description='Builds an image, extending it to include '
+                                   'packages from recipes in the workspace')
+    parser.add_argument('recipe', help='Image recipe to build')
+    parser.set_defaults(func=build_image)
diff --git a/scripts/lib/devtool/build.py b/scripts/lib/devtool/build.py
new file mode 100644
index 0000000..335aff5
--- /dev/null
+++ b/scripts/lib/devtool/build.py
@@ -0,0 +1,77 @@
+# Development tool - build command plugin
+#
+# Copyright (C) 2014-2015 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+"""Devtool build plugin"""
+
+import os
+import bb
+import logging
+import argparse
+import tempfile
+from devtool import exec_build_env_command, DevtoolError
+
+logger = logging.getLogger('devtool')
+
+def plugin_init(pluginlist):
+    """Plugin initialization"""
+    pass
+
+def _create_conf_file(values, conf_file=None):
+    if not conf_file:
+        fd, conf_file = tempfile.mkstemp(suffix='.conf')
+    elif not os.path.exists(os.path.dirname(conf_file)):
+        logger.debug("Creating folder %s" % os.path.dirname(conf_file))
+        bb.utils.mkdirhier(os.path.dirname(conf_file))
+    with open(conf_file, 'w') as f:
+        for key, value in values.iteritems():
+            f.write('%s = "%s"\n' % (key, value))
+    return conf_file
+
+def build(args, config, basepath, workspace):
+    """Entry point for the devtool 'build' subcommand"""
+    if not args.recipename in workspace:
+        raise DevtoolError("no recipe named %s in your workspace" %
+                           args.recipename)
+
+    build_task = config.get('Build', 'build_task', 'populate_sysroot')
+
+    postfile_param = ""
+    postfile = ""
+    if args.disable_parallel_make:
+        logger.info("Disabling 'make' parallelism")
+        postfile = os.path.join(basepath, 'conf', 'disable_parallelism.conf')
+        _create_conf_file({'PARALLEL_MAKE':''}, postfile)
+        postfile_param = "-R %s" % postfile
+    try:
+        exec_build_env_command(config.init_path, basepath, 'bitbake -c %s %s %s' % (build_task, postfile_param, args.recipename), watch=True)
+    except bb.process.ExecutionError as e:
+        # We've already seen the output since watch=True, so just ensure we return something to the user
+        return e.exitcode
+    finally:
+        if postfile:
+            logger.debug('Removing postfile')
+            os.remove(postfile)
+
+    return 0
+
+def register_commands(subparsers, context):
+    """Register devtool subcommands from this plugin"""
+    parser_build = subparsers.add_parser('build', help='Build a recipe',
+                                         description='Builds the specified recipe using bitbake',
+                                         formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+    parser_build.add_argument('recipename', help='Recipe to build')
+    parser_build.add_argument('-s', '--disable-parallel-make', action="store_true", help='Disable make parallelism')
+    parser_build.set_defaults(func=build)
diff --git a/scripts/lib/devtool/deploy.py b/scripts/lib/devtool/deploy.py
new file mode 100644
index 0000000..fa93adf
--- /dev/null
+++ b/scripts/lib/devtool/deploy.py
@@ -0,0 +1,149 @@
+# Development tool - deploy/undeploy command plugin
+#
+# Copyright (C) 2014-2015 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+"""Devtool plugin containing the deploy subcommands"""
+
+import os
+import subprocess
+import logging
+from devtool import exec_fakeroot, setup_tinfoil, DevtoolError
+
+logger = logging.getLogger('devtool')
+
+def deploy(args, config, basepath, workspace):
+    """Entry point for the devtool 'deploy' subcommand"""
+    import re
+    import oe.recipeutils
+
+    if not args.recipename in workspace:
+        raise DevtoolError("no recipe named %s in your workspace" %
+                           args.recipename)
+    try:
+        host, destdir = args.target.split(':')
+    except ValueError:
+        destdir = '/'
+    else:
+        args.target = host
+
+    deploy_dir = os.path.join(basepath, 'target_deploy', args.target)
+    deploy_file = os.path.join(deploy_dir, args.recipename + '.list')
+
+    tinfoil = setup_tinfoil()
+    try:
+        rd = oe.recipeutils.parse_recipe_simple(tinfoil.cooker, args.recipename, tinfoil.config_data)
+    except Exception as e:
+        raise DevtoolError('Exception parsing recipe %s: %s' %
+                           (args.recipename, e))
+    recipe_outdir = rd.getVar('D', True)
+    if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir):
+        raise DevtoolError('No files to deploy - have you built the %s '
+                           'recipe? If so, the install step has not installed '
+                           'any files.' % args.recipename)
+
+    if args.dry_run:
+        print('Files to be deployed for %s on target %s:' % (args.recipename, args.target))
+        for root, _, files in os.walk(recipe_outdir):
+            for fn in files:
+                print('  %s' % os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn))
+        return 0
+
+    if os.path.exists(deploy_file):
+        if undeploy(args, config, basepath, workspace):
+            # Error already shown
+            return 1
+
+    extraoptions = ''
+    if args.no_host_check:
+        extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
+    if args.show_status:
+        tarextractopts = 'xv'
+    else:
+        tarextractopts = 'x'
+        extraoptions += ' -q'
+    # We cannot use scp here, because it doesn't preserve symlinks
+    ret = exec_fakeroot(rd, 'tar cf - . | ssh %s %s \'tar %s -C %s -f -\'' % (extraoptions, args.target, tarextractopts, destdir), cwd=recipe_outdir, shell=True)
+    if ret != 0:
+        raise DevtoolError('Deploy failed - rerun with -s to get a complete '
+                           'error message')
+
+    logger.info('Successfully deployed %s' % recipe_outdir)
+
+    if not os.path.exists(deploy_dir):
+        os.makedirs(deploy_dir)
+
+    files_list = []
+    for root, _, files in os.walk(recipe_outdir):
+        for filename in files:
+            filename = os.path.relpath(os.path.join(root, filename), recipe_outdir)
+            files_list.append(os.path.join(destdir, filename))
+
+    with open(deploy_file, 'w') as fobj:
+        fobj.write('\n'.join(files_list))
+
+    return 0
+
+def undeploy(args, config, basepath, workspace):
+    """Entry point for the devtool 'undeploy' subcommand"""
+    deploy_file = os.path.join(basepath, 'target_deploy', args.target, args.recipename + '.list')
+    if not os.path.exists(deploy_file):
+        raise DevtoolError('%s has not been deployed' % args.recipename)
+
+    if args.dry_run:
+        print('Previously deployed files to be un-deployed for %s on target %s:' % (args.recipename, args.target))
+        with open(deploy_file, 'r') as f:
+            for line in f:
+                print('  %s' % line.rstrip())
+        return 0
+
+    extraoptions = ''
+    if args.no_host_check:
+        extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
+    if not args.show_status:
+        extraoptions += ' -q'
+
+    ret = subprocess.call("scp %s %s %s:/tmp" % (extraoptions, deploy_file, args.target), shell=True)
+    if ret != 0:
+        raise DevtoolError('Failed to copy file list to %s - rerun with -s to '
+                           'get a complete error message' % args.target)
+
+    ret = subprocess.call("ssh %s %s 'xargs -n1 rm -f </tmp/%s'" % (extraoptions, args.target, os.path.basename(deploy_file)), shell=True)
+    if ret == 0:
+        logger.info('Successfully undeployed %s' % args.recipename)
+        os.remove(deploy_file)
+    else:
+        raise DevtoolError('Undeploy failed - rerun with -s to get a complete '
+                           'error message')
+
+    return ret
+
+
+def register_commands(subparsers, context):
+    """Register devtool subcommands from the deploy plugin"""
+    parser_deploy = subparsers.add_parser('deploy-target', help='Deploy recipe output files to live target machine')
+    parser_deploy.add_argument('recipename', help='Recipe to deploy')
+    parser_deploy.add_argument('target', help='Live target machine running an ssh server: user@hostname[:destdir]')
+    parser_deploy.add_argument('-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
+    parser_deploy.add_argument('-s', '--show-status', help='Show progress/status output', action='store_true')
+    parser_deploy.add_argument('-n', '--dry-run', help='List files to be deployed only', action='store_true')
+    parser_deploy.set_defaults(func=deploy)
+
+    parser_undeploy = subparsers.add_parser('undeploy-target', help='Undeploy recipe output files in live target machine')
+    parser_undeploy.add_argument('recipename', help='Recipe to undeploy')
+    parser_undeploy.add_argument('target', help='Live target machine running an ssh server: user@hostname')
+    parser_undeploy.add_argument('-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
+    parser_undeploy.add_argument('-s', '--show-status', help='Show progress/status output', action='store_true')
+    parser_undeploy.add_argument('-n', '--dry-run', help='List files to be undeployed only', action='store_true')
+    parser_undeploy.set_defaults(func=undeploy)
diff --git a/scripts/lib/devtool/package.py b/scripts/lib/devtool/package.py
new file mode 100644
index 0000000..3a7a36b
--- /dev/null
+++ b/scripts/lib/devtool/package.py
@@ -0,0 +1,61 @@
+# Development tool - package command plugin
+#
+# Copyright (C) 2014-2015 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+"""Devtool plugin containing the package subcommands"""
+
+import os
+import subprocess
+import logging
+from bb.process import ExecutionError
+from devtool import exec_build_env_command, setup_tinfoil, DevtoolError
+
+logger = logging.getLogger('devtool')
+
+def plugin_init(pluginlist):
+    """Plugin initialization"""
+    pass
+
+def package(args, config, basepath, workspace):
+    """Entry point for the devtool 'package' subcommand"""
+    if not args.recipename in workspace:
+        raise DevtoolError("no recipe named %s in your workspace" %
+                           args.recipename)
+
+    image_pkgtype = config.get('Package', 'image_pkgtype', '')
+    if not image_pkgtype:
+        tinfoil = setup_tinfoil()
+        try:
+            tinfoil.prepare(config_only=True)
+            image_pkgtype = tinfoil.config_data.getVar('IMAGE_PKGTYPE', True)
+        finally:
+            tinfoil.shutdown()
+
+    package_task = config.get('Package', 'package_task', 'package_write_%s' % image_pkgtype)
+    try:
+        exec_build_env_command(config.init_path, basepath, 'bitbake -c %s %s' % (package_task, args.recipename), watch=True)
+    except bb.process.ExecutionError as e:
+        # We've already seen the output since watch=True, so just ensure we return something to the user
+        return e.exitcode
+    logger.info('Your packages are in %s/tmp/deploy/%s' % (basepath, image_pkgtype))
+
+    return 0
+
+def register_commands(subparsers, context):
+    """Register devtool subcommands from the package plugin"""
+    if context.fixed_setup:
+        parser_package = subparsers.add_parser('package', help='Build packages for a recipe', description='Builds packages for a recipe\'s output files')
+        parser_package.add_argument('recipename', help='Recipe to package')
+        parser_package.set_defaults(func=package)
diff --git a/scripts/lib/devtool/sdk.py b/scripts/lib/devtool/sdk.py
new file mode 100644
index 0000000..2f416b3
--- /dev/null
+++ b/scripts/lib/devtool/sdk.py
@@ -0,0 +1,197 @@
+# Development tool - sdk-update command plugin
+
+import os
+import subprocess
+import logging
+import glob
+import shutil
+import errno
+import sys
+from devtool import exec_build_env_command, setup_tinfoil, DevtoolError
+
+logger = logging.getLogger('devtool')
+
+def plugin_init(pluginlist):
+    """Plugin initialization"""
+    pass
+
+def parse_locked_sigs(sigfile_path):
+    """Return <pn:task>:<hash> dictionary"""
+    sig_dict = {}
+    with open(sigfile_path) as f:
+        lines = f.readlines()
+        for line in lines:
+            if ':' in line:
+                taskkey, _, hashval = line.rpartition(':')
+                sig_dict[taskkey.strip()] = hashval.split()[0]
+    return sig_dict
+
+def generate_update_dict(sigfile_new, sigfile_old):
+    """Return a dict containing <pn:task>:<hash> which indicates what need to be updated"""
+    update_dict = {}
+    sigdict_new = parse_locked_sigs(sigfile_new)
+    sigdict_old = parse_locked_sigs(sigfile_old)
+    for k in sigdict_new:
+        if k not in sigdict_old:
+            update_dict[k] = sigdict_new[k]
+            continue
+        if sigdict_new[k] != sigdict_old[k]:
+            update_dict[k] = sigdict_new[k]
+            continue
+    return update_dict
+
+def get_sstate_objects(update_dict, newsdk_path):
+    """Return a list containing sstate objects which are to be installed"""
+    sstate_objects = []
+    # Ensure newsdk_path points to an extensible SDK
+    sstate_dir = os.path.join(newsdk_path, 'sstate-cache')
+    if not os.path.exists(sstate_dir):
+        logger.error("sstate-cache directory not found under %s" % newsdk_path)
+        raise
+    for k in update_dict:
+        files = set()
+        hashval = update_dict[k]
+        p = sstate_dir + '/' + hashval[:2] + '/*' + hashval + '*.tgz'
+        files |= set(glob.glob(p))
+        p = sstate_dir + '/*/' + hashval[:2] + '/*' + hashval + '*.tgz'
+        files |= set(glob.glob(p))
+        files = list(files)
+        if len(files) == 1:
+            sstate_objects.extend(files)
+        elif len(files) > 1:
+            logger.error("More than one matching sstate object found for %s" % hashval)
+
+    return sstate_objects
+
+def mkdir(d):
+    try:
+        os.makedirs(d)
+    except OSError as e:
+        if e.errno != errno.EEXIST:
+            raise e
+
+def install_sstate_objects(sstate_objects, src_sdk, dest_sdk):
+    """Install sstate objects into destination SDK"""
+    sstate_dir = os.path.join(dest_sdk, 'sstate-cache')
+    if not os.path.exists(sstate_dir):
+        logger.error("Missing sstate-cache directory in %s, it might not be an extensible SDK." % dest_sdk)
+        raise
+    for sb in sstate_objects:
+        dst = sb.replace(src_sdk, dest_sdk)
+        destdir = os.path.dirname(dst)
+        mkdir(destdir)
+        logger.debug("Copying %s to %s" % (sb, dst))
+        shutil.copy(sb, dst)
+
+def sdk_update(args, config, basepath, workspace):
+    # Fetch locked-sigs.inc file from remote/local destination
+    from ConfigParser import NoSectionError
+    updateserver = args.updateserver
+    if not updateserver:
+        try:
+            updateserver = config.get('SDK', 'updateserver', None)
+        except NoSectionError:
+            pass
+    if not updateserver:
+        raise DevtoolError("Update server not specified in config file, you must specify it on the command line")
+    logger.debug("updateserver: %s" % args.updateserver)
+
+    # Make sure we are using sdk-update from within SDK
+    logger.debug("basepath = %s" % basepath)
+    old_locked_sig_file_path = os.path.join(basepath, 'conf/locked-sigs.inc')
+    if not os.path.exists(old_locked_sig_file_path):
+        logger.error("Not using devtool's sdk-update command from within an extensible SDK. Please specify correct basepath via --basepath option")
+        return -1
+    else:
+        logger.debug("Found conf/locked-sigs.inc in %s" % basepath)
+
+    if ':' in args.updateserver:
+        is_remote = True
+    else:
+        is_remote = False
+
+    if not is_remote:
+        # devtool sdk-update /local/path/to/latest/sdk
+        new_locked_sig_file_path = os.path.join(args.updateserver, 'conf/locked-sigs.inc')
+        if not os.path.exists(new_locked_sig_file_path):
+            logger.error("%s doesn't exist or is not an extensible SDK" % args.updateserver)
+            return -1
+        else:
+            logger.debug("Found conf/locked-sigs.inc in %s" % args.updateserver)
+        update_dict = generate_update_dict(new_locked_sig_file_path, old_locked_sig_file_path)
+        logger.debug("update_dict = %s" % update_dict)
+        sstate_objects = get_sstate_objects(update_dict, args.updateserver)
+        logger.debug("sstate_objects = %s" % sstate_objects)
+        if len(sstate_objects) == 0:
+            logger.info("No need to update.")
+            return 0
+        logger.info("Installing sstate objects into %s", basepath)
+        install_sstate_objects(sstate_objects, args.updateserver.rstrip('/'), basepath)
+        logger.info("Updating configuration files")
+        new_conf_dir = os.path.join(args.updateserver, 'conf')
+        old_conf_dir = os.path.join(basepath, 'conf')
+        shutil.rmtree(old_conf_dir)
+        shutil.copytree(new_conf_dir, old_conf_dir)
+        logger.info("Updating layers")
+        new_layers_dir = os.path.join(args.updateserver, 'layers')
+        old_layers_dir = os.path.join(basepath, 'layers')
+        shutil.rmtree(old_layers_dir)
+        shutil.copytree(new_layers_dir, old_layers_dir)
+    else:
+        # devtool sdk-update http://myhost/sdk
+        tmpsdk_dir = '/tmp/sdk-ext'
+        if os.path.exists(tmpsdk_dir):
+            shutil.rmtree(tmpsdk_dir)
+        os.makedirs(tmpsdk_dir)
+        os.makedirs(os.path.join(tmpsdk_dir, 'conf'))
+        # Fetch locked-sigs.inc from update server
+        ret = subprocess.call("wget -q -O - %s/conf/locked-sigs.inc > %s/locked-sigs.inc" % (args.updateserver, os.path.join(tmpsdk_dir, 'conf')), shell=True)
+        if ret != 0:
+            logger.error("Fetching conf/locked-sigs.inc from %s to %s/locked-sigs.inc failed" % (args.updateserver, os.path.join(tmpsdk_dir, 'conf')))
+            return ret
+        else:
+            logger.info("Fetching conf/locked-sigs.inc from %s to %s/locked-sigs.inc succeeded" % (args.updateserver, os.path.join(tmpsdk_dir, 'conf')))
+        new_locked_sig_file_path = os.path.join(tmpsdk_dir, 'conf/locked-sigs.inc')
+        update_dict = generate_update_dict(new_locked_sig_file_path, old_locked_sig_file_path)
+        logger.debug("update_dict = %s" % update_dict)
+        if len(update_dict) == 0:
+            logger.info("No need to update.")
+            return 0
+        # Update metadata
+        logger.debug("Updating meta data via git ...")
+        # Try using 'git pull', if failed, use 'git clone'
+        if os.path.exists(os.path.join(basepath, 'layers/.git')):
+            ret = subprocess.call("cd layers && git pull", shell=True)
+        else:
+            ret = -1
+        if ret != 0:
+            ret = subprocess.call("rm -rf layers && git clone %s/layers" % args.updateserver, shell=True)
+        if ret != 0:
+            logger.error("Updating meta data via git failed")
+            return ret
+        logger.debug("Updating conf files ...")
+        conf_files = ['local.conf', 'bblayers.conf', 'devtool.conf', 'work-config.inc', 'locked-sigs.inc']
+        for conf in conf_files:
+            ret = subprocess.call("wget -q -O - %s/conf/%s > conf/%s" % (args.updateserver, conf, conf), shell=True)
+            if ret != 0:
+                logger.error("Update %s failed" % conf)
+                return ret
+        with open(os.path.join(basepath, 'conf/local.conf'), 'a') as f:
+            f.write('SSTATE_MIRRORS_append = " file://.* %s/sstate-cache/PATH \\n "\n' % args.updateserver)
+
+    # Run bitbake command for the whole SDK
+    sdk_targets = config.get('SDK', 'sdk_targets')
+    logger.info("Executing 'bitbake %s' ... (This may take some time.)" % sdk_targets)
+    try:
+        exec_build_env_command(config.init_path, basepath, 'bitbake %s' % sdk_targets)
+    except:
+        logger.error('bitbake %s failed' % sdk_targets)
+        return -1
+    return 0
+
+def register_commands(subparsers, context):
+    """Register devtool subcommands from the sdk plugin"""
+    if context.fixed_setup:
+        parser_sdk = subparsers.add_parser('sdk-update', help='Update SDK components from a nominated location')
+        parser_sdk.add_argument('updateserver', help='The update server to fetch latest SDK components from', nargs='?')
+        parser_sdk.set_defaults(func=sdk_update)
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py
new file mode 100644
index 0000000..d5900b4
--- /dev/null
+++ b/scripts/lib/devtool/standard.py
@@ -0,0 +1,930 @@
+# Development tool - standard commands plugin
+#
+# Copyright (C) 2014-2015 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+"""Devtool standard plugins"""
+
+import os
+import sys
+import re
+import shutil
+import tempfile
+import logging
+import argparse
+import scriptutils
+import errno
+from devtool import exec_build_env_command, setup_tinfoil, DevtoolError
+from devtool import parse_recipe
+
+logger = logging.getLogger('devtool')
+
+
+def add(args, config, basepath, workspace):
+    """Entry point for the devtool 'add' subcommand"""
+    import bb
+    import oe.recipeutils
+
+    if args.recipename in workspace:
+        raise DevtoolError("recipe %s is already in your workspace" %
+                            args.recipename)
+
+    reason = oe.recipeutils.validate_pn(args.recipename)
+    if reason:
+        raise DevtoolError(reason)
+
+    srctree = os.path.abspath(args.srctree)
+    if os.path.exists(srctree):
+        if args.fetch:
+            if not os.path.isdir(srctree):
+                raise DevtoolError("Cannot fetch into source tree path %s as "
+                                   "it exists and is not a directory" %
+                                   srctree)
+            elif os.listdir(srctree):
+                raise DevtoolError("Cannot fetch into source tree path %s as "
+                                   "it already exists and is non-empty" %
+                                   srctree)
+    elif not args.fetch:
+        raise DevtoolError("Specified source tree %s could not be found" %
+                           srctree)
+
+    appendpath = os.path.join(config.workspace_path, 'appends')
+    if not os.path.exists(appendpath):
+        os.makedirs(appendpath)
+
+    recipedir = os.path.join(config.workspace_path, 'recipes', args.recipename)
+    bb.utils.mkdirhier(recipedir)
+    rfv = None
+    if args.version:
+        if '_' in args.version or ' ' in args.version:
+            raise DevtoolError('Invalid version string "%s"' % args.version)
+        rfv = args.version
+    if args.fetch:
+        if args.fetch.startswith('git://'):
+            rfv = 'git'
+        elif args.fetch.startswith('svn://'):
+            rfv = 'svn'
+        elif args.fetch.startswith('hg://'):
+            rfv = 'hg'
+    if rfv:
+        bp = "%s_%s" % (args.recipename, rfv)
+    else:
+        bp = args.recipename
+    recipefile = os.path.join(recipedir, "%s.bb" % bp)
+    if sys.stdout.isatty():
+        color = 'always'
+    else:
+        color = args.color
+    extracmdopts = ''
+    if args.fetch:
+        source = args.fetch
+        extracmdopts = '-x %s' % srctree
+    else:
+        source = srctree
+    if args.version:
+        extracmdopts += ' -V %s' % args.version
+    try:
+        stdout, _ = exec_build_env_command(config.init_path, basepath, 'recipetool --color=%s create -o %s "%s" %s' % (color, recipefile, source, extracmdopts))
+        logger.info('Recipe %s has been automatically created; further editing may be required to make it fully functional' % recipefile)
+    except bb.process.ExecutionError as e:
+        raise DevtoolError('Command \'%s\' failed:\n%s' % (e.command, e.stdout))
+
+    _add_md5(config, args.recipename, recipefile)
+
+    initial_rev = None
+    if os.path.exists(os.path.join(srctree, '.git')):
+        (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree)
+        initial_rev = stdout.rstrip()
+
+    appendfile = os.path.join(appendpath, '%s.bbappend' % bp)
+    with open(appendfile, 'w') as f:
+        f.write('inherit externalsrc\n')
+        f.write('EXTERNALSRC = "%s"\n' % srctree)
+        if args.same_dir:
+            f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree)
+        if initial_rev:
+            f.write('\n# initial_rev: %s\n' % initial_rev)
+
+    _add_md5(config, args.recipename, appendfile)
+
+    return 0
+
+
+def _check_compatible_recipe(pn, d):
+    """Check if the recipe is supported by devtool"""
+    if pn == 'perf':
+        raise DevtoolError("The perf recipe does not actually check out "
+                           "source and thus cannot be supported by this tool")
+
+    if pn in ['kernel-devsrc', 'package-index'] or pn.startswith('gcc-source'):
+        raise DevtoolError("The %s recipe is not supported by this tool" % pn)
+
+    if bb.data.inherits_class('image', d):
+        raise DevtoolError("The %s recipe is an image, and therefore is not "
+                           "supported by this tool" % pn)
+
+    if bb.data.inherits_class('populate_sdk', d):
+        raise DevtoolError("The %s recipe is an SDK, and therefore is not "
+                           "supported by this tool" % pn)
+
+    if bb.data.inherits_class('packagegroup', d):
+        raise DevtoolError("The %s recipe is a packagegroup, and therefore is "
+                           "not supported by this tool" % pn)
+
+    if bb.data.inherits_class('meta', d):
+        raise DevtoolError("The %s recipe is a meta-recipe, and therefore is "
+                           "not supported by this tool" % pn)
+
+    if bb.data.inherits_class('externalsrc', d) and d.getVar('EXTERNALSRC', True):
+        raise DevtoolError("externalsrc is currently enabled for the %s "
+                           "recipe. This prevents the normal do_patch task "
+                           "from working. You will need to disable this "
+                           "first." % pn)
+
+def _ls_tree(directory):
+    """Recursive listing of files in a directory"""
+    ret = []
+    for root, dirs, files in os.walk(directory):
+        ret.extend([os.path.relpath(os.path.join(root, fname), directory) for
+                    fname in files])
+    return ret
+
+
+def extract(args, config, basepath, workspace):
+    """Entry point for the devtool 'extract' subcommand"""
+    import bb
+
+    tinfoil = _prep_extract_operation(config, basepath, args.recipename)
+
+    rd = parse_recipe(config, tinfoil, args.recipename, True)
+    if not rd:
+        return 1
+
+    srctree = os.path.abspath(args.srctree)
+    initial_rev = _extract_source(srctree, args.keep_temp, args.branch, rd)
+    logger.info('Source tree extracted to %s' % srctree)
+
+    if initial_rev:
+        return 0
+    else:
+        return 1
+
+class BbTaskExecutor(object):
+    """Class for executing bitbake tasks for a recipe
+
+    FIXME: This is very awkward. Unfortunately it's not currently easy to
+    properly execute tasks outside of bitbake itself, until then this has to
+    suffice if we are to handle e.g. linux-yocto's extra tasks
+    """
+
+    def __init__(self, rdata):
+        self.rdata = rdata
+        self.executed = []
+
+    def exec_func(self, func, report):
+        """Run bitbake task function"""
+        if not func in self.executed:
+            deps = self.rdata.getVarFlag(func, 'deps')
+            if deps:
+                for taskdepfunc in deps:
+                    self.exec_func(taskdepfunc, True)
+            if report:
+                logger.info('Executing %s...' % func)
+            fn = self.rdata.getVar('FILE', True)
+            localdata = bb.build._task_data(fn, func, self.rdata)
+            bb.build.exec_func(func, localdata)
+            self.executed.append(func)
+
+
+def _prep_extract_operation(config, basepath, recipename):
+    """HACK: Ugly workaround for making sure that requirements are met when
+       trying to extract a package. Returns the tinfoil instance to be used."""
+    tinfoil = setup_tinfoil()
+    rd = parse_recipe(config, tinfoil, recipename, True)
+
+    if bb.data.inherits_class('kernel-yocto', rd):
+        tinfoil.shutdown()
+        try:
+            stdout, _ = exec_build_env_command(config.init_path, basepath,
+                                               'bitbake kern-tools-native')
+            tinfoil = setup_tinfoil()
+        except bb.process.ExecutionError as err:
+            raise DevtoolError("Failed to build kern-tools-native:\n%s" %
+                               err.stdout)
+    return tinfoil
+
+
+def _extract_source(srctree, keep_temp, devbranch, d):
+    """Extract sources of a recipe"""
+    import bb.event
+    import oe.recipeutils
+
+    def eventfilter(name, handler, event, d):
+        """Bitbake event filter for devtool extract operation"""
+        if name == 'base_eventhandler':
+            return True
+        else:
+            return False
+
+    if hasattr(bb.event, 'set_eventfilter'):
+        bb.event.set_eventfilter(eventfilter)
+
+    pn = d.getVar('PN', True)
+
+    _check_compatible_recipe(pn, d)
+
+    if os.path.exists(srctree):
+        if not os.path.isdir(srctree):
+            raise DevtoolError("output path %s exists and is not a directory" %
+                               srctree)
+        elif os.listdir(srctree):
+            raise DevtoolError("output path %s already exists and is "
+                               "non-empty" % srctree)
+
+    # Prepare for shutil.move later on
+    bb.utils.mkdirhier(srctree)
+    os.rmdir(srctree)
+
+    # We don't want notes to be printed, they are too verbose
+    origlevel = bb.logger.getEffectiveLevel()
+    if logger.getEffectiveLevel() > logging.DEBUG:
+        bb.logger.setLevel(logging.WARNING)
+
+    initial_rev = None
+    tempdir = tempfile.mkdtemp(prefix='devtool')
+    try:
+        crd = d.createCopy()
+        # Make a subdir so we guard against WORKDIR==S
+        workdir = os.path.join(tempdir, 'workdir')
+        crd.setVar('WORKDIR', workdir)
+        crd.setVar('T', os.path.join(tempdir, 'temp'))
+        if not crd.getVar('S', True).startswith(workdir):
+            # Usually a shared workdir recipe (kernel, gcc)
+            # Try to set a reasonable default
+            if bb.data.inherits_class('kernel', d):
+                crd.setVar('S', '${WORKDIR}/source')
+            else:
+                crd.setVar('S', '${WORKDIR}/${BP}')
+        if bb.data.inherits_class('kernel', d):
+            # We don't want to move the source to STAGING_KERNEL_DIR here
+            crd.setVar('STAGING_KERNEL_DIR', '${S}')
+
+        task_executor = BbTaskExecutor(crd)
+
+        crd.setVar('EXTERNALSRC_forcevariable', '')
+
+        logger.info('Fetching %s...' % pn)
+        task_executor.exec_func('do_fetch', False)
+        logger.info('Unpacking...')
+        task_executor.exec_func('do_unpack', False)
+        if bb.data.inherits_class('kernel-yocto', d):
+            # Extra step for kernel to populate the source directory
+            logger.info('Doing kernel checkout...')
+            task_executor.exec_func('do_kernel_checkout', False)
+        srcsubdir = crd.getVar('S', True)
+        if srcsubdir == workdir:
+            # Find non-patch sources that were "unpacked" to srctree directory
+            recipe_patches = [os.path.basename(patch) for patch in
+                              oe.recipeutils.get_recipe_patches(crd)]
+            src_files = [fname for fname in _ls_tree(workdir) if
+                         os.path.basename(fname) not in recipe_patches]
+            # Force separate S so that patch files can be left out from srctree
+            srcsubdir = tempfile.mkdtemp(dir=workdir)
+            crd.setVar('S', srcsubdir)
+            # Move source files to S
+            for path in src_files:
+                tgt_dir = os.path.join(srcsubdir, os.path.dirname(path))
+                bb.utils.mkdirhier(tgt_dir)
+                shutil.move(os.path.join(workdir, path), tgt_dir)
+        elif os.path.dirname(srcsubdir) != workdir:
+            # Handle if S is set to a subdirectory of the source
+            srcsubdir = os.path.join(workdir, os.path.relpath(srcsubdir, workdir).split(os.sep)[0])
+
+        scriptutils.git_convert_standalone_clone(srcsubdir)
+
+        patchdir = os.path.join(srcsubdir, 'patches')
+        haspatches = False
+        if os.path.exists(patchdir):
+            if os.listdir(patchdir):
+                haspatches = True
+            else:
+                os.rmdir(patchdir)
+
+        if not os.listdir(srcsubdir):
+            raise DevtoolError("no source unpacked to S, perhaps the %s "
+                               "recipe doesn't use any source?" % pn)
+
+        if not os.path.exists(os.path.join(srcsubdir, '.git')):
+            bb.process.run('git init', cwd=srcsubdir)
+            bb.process.run('git add .', cwd=srcsubdir)
+            bb.process.run('git commit -q -m "Initial commit from upstream at version %s"' % crd.getVar('PV', True), cwd=srcsubdir)
+
+        (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srcsubdir)
+        initial_rev = stdout.rstrip()
+
+        bb.process.run('git checkout -b %s' % devbranch, cwd=srcsubdir)
+        bb.process.run('git tag -f devtool-base', cwd=srcsubdir)
+        crd.setVar('PATCHTOOL', 'git')
+
+        logger.info('Patching...')
+        task_executor.exec_func('do_patch', False)
+
+        bb.process.run('git tag -f devtool-patched', cwd=srcsubdir)
+
+        if os.path.exists(patchdir):
+            shutil.rmtree(patchdir)
+            if haspatches:
+                bb.process.run('git checkout patches', cwd=srcsubdir)
+
+        shutil.move(srcsubdir, srctree)
+    finally:
+        bb.logger.setLevel(origlevel)
+
+        if keep_temp:
+            logger.info('Preserving temporary directory %s' % tempdir)
+        else:
+            shutil.rmtree(tempdir)
+    return initial_rev
+
+def _add_md5(config, recipename, filename):
+    """Record checksum of a file (or recursively for a directory) to the md5-file of the workspace"""
+    import bb.utils
+
+    def addfile(fn):
+        md5 = bb.utils.md5_file(fn)
+        with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a') as f:
+            f.write('%s|%s|%s\n' % (recipename, os.path.relpath(fn, config.workspace_path), md5))
+
+    if os.path.isdir(filename):
+        for root, _, files in os.walk(os.path.dirname(filename)):
+            for f in files:
+                addfile(os.path.join(root, f))
+    else:
+        addfile(filename)
+
+def _check_preserve(config, recipename):
+    """Check if a file was manually changed and needs to be saved in 'attic'
+       directory"""
+    import bb.utils
+    origfile = os.path.join(config.workspace_path, '.devtool_md5')
+    newfile = os.path.join(config.workspace_path, '.devtool_md5_new')
+    preservepath = os.path.join(config.workspace_path, 'attic')
+    with open(origfile, 'r') as f:
+        with open(newfile, 'w') as tf:
+            for line in f.readlines():
+                splitline = line.rstrip().split('|')
+                if splitline[0] == recipename:
+                    removefile = os.path.join(config.workspace_path, splitline[1])
+                    try:
+                        md5 = bb.utils.md5_file(removefile)
+                    except IOError as err:
+                        if err.errno == 2:
+                            # File no longer exists, skip it
+                            continue
+                        else:
+                            raise
+                    if splitline[2] != md5:
+                        bb.utils.mkdirhier(preservepath)
+                        preservefile = os.path.basename(removefile)
+                        logger.warn('File %s modified since it was written, preserving in %s' % (preservefile, preservepath))
+                        shutil.move(removefile, os.path.join(preservepath, preservefile))
+                    else:
+                        os.remove(removefile)
+                else:
+                    tf.write(line)
+    os.rename(newfile, origfile)
+
+def modify(args, config, basepath, workspace):
+    """Entry point for the devtool 'modify' subcommand"""
+    import bb
+    import oe.recipeutils
+
+    if args.recipename in workspace:
+        raise DevtoolError("recipe %s is already in your workspace" %
+                           args.recipename)
+
+    if not args.extract and not os.path.isdir(args.srctree):
+        raise DevtoolError("directory %s does not exist or not a directory "
+                           "(specify -x to extract source from recipe)" %
+                           args.srctree)
+    if args.extract:
+        tinfoil = _prep_extract_operation(config, basepath, args.recipename)
+    else:
+        tinfoil = setup_tinfoil()
+
+    rd = parse_recipe(config, tinfoil, args.recipename, True)
+    if not rd:
+        return 1
+    recipefile = rd.getVar('FILE', True)
+    appendname = os.path.splitext(os.path.basename(recipefile))[0]
+    if args.wildcard:
+        appendname = re.sub(r'_.*', '_%', appendname)
+    appendpath = os.path.join(config.workspace_path, 'appends')
+    appendfile = os.path.join(appendpath, appendname + '.bbappend')
+    if os.path.exists(appendfile):
+        raise DevtoolError("Another variant of recipe %s is already in your "
+                           "workspace (only one variant of a recipe can "
+                           "currently be worked on at once)"
+                           % args.recipename)
+
+    _check_compatible_recipe(args.recipename, rd)
+
+    initial_rev = None
+    commits = []
+    srctree = os.path.abspath(args.srctree)
+    if args.extract:
+        initial_rev = _extract_source(args.srctree, False, args.branch, rd)
+        if not initial_rev:
+            return 1
+        logger.info('Source tree extracted to %s' % srctree)
+        # Get list of commits since this revision
+        (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_rev, cwd=args.srctree)
+        commits = stdout.split()
+    else:
+        if os.path.exists(os.path.join(args.srctree, '.git')):
+            # Check if it's a tree previously extracted by us
+            try:
+                (stdout, _) = bb.process.run('git branch --contains devtool-base', cwd=args.srctree)
+            except bb.process.ExecutionError:
+                stdout = ''
+            for line in stdout.splitlines():
+                if line.startswith('*'):
+                    (stdout, _) = bb.process.run('git rev-parse devtool-base', cwd=args.srctree)
+                    initial_rev = stdout.rstrip()
+            if not initial_rev:
+                # Otherwise, just grab the head revision
+                (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=args.srctree)
+                initial_rev = stdout.rstrip()
+
+    # Check that recipe isn't using a shared workdir
+    s = os.path.abspath(rd.getVar('S', True))
+    workdir = os.path.abspath(rd.getVar('WORKDIR', True))
+    if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir:
+        # Handle if S is set to a subdirectory of the source
+        srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1]
+        srctree = os.path.join(srctree, srcsubdir)
+
+    if not os.path.exists(appendpath):
+        os.makedirs(appendpath)
+    with open(appendfile, 'w') as f:
+        f.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n\n')
+        f.write('inherit externalsrc\n')
+        f.write('# NOTE: We use pn- overrides here to avoid affecting multiple variants in the case where the recipe uses BBCLASSEXTEND\n')
+        f.write('EXTERNALSRC_pn-%s = "%s"\n' % (args.recipename, srctree))
+
+        b_is_s = True
+        if args.no_same_dir:
+            logger.info('using separate build directory since --no-same-dir specified')
+            b_is_s = False
+        elif args.same_dir:
+            logger.info('using source tree as build directory since --same-dir specified')
+        elif bb.data.inherits_class('autotools-brokensep', rd):
+            logger.info('using source tree as build directory since original recipe inherits autotools-brokensep')
+        elif rd.getVar('B', True) == s:
+            logger.info('using source tree as build directory since that is the default for this recipe')
+        else:
+            b_is_s = False
+        if b_is_s:
+            f.write('EXTERNALSRC_BUILD_pn-%s = "%s"\n' % (args.recipename, srctree))
+
+        if bb.data.inherits_class('kernel', rd):
+            f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout do_fetch do_unpack"\n')
+        if initial_rev:
+            f.write('\n# initial_rev: %s\n' % initial_rev)
+            for commit in commits:
+                f.write('# commit: %s\n' % commit)
+
+    _add_md5(config, args.recipename, appendfile)
+
+    logger.info('Recipe %s now set up to build from %s' % (args.recipename, srctree))
+
+    return 0
+
+def _get_patchset_revs(args, srctree, recipe_path):
+    """Get initial and update rev of a recipe. These are the start point of the
+    whole patchset and start point for the patches to be re-generated/updated.
+    """
+    import bb
+
+    if args.initial_rev:
+        return args.initial_rev, args.initial_rev
+
+    # Parse initial rev from recipe
+    commits = []
+    initial_rev = None
+    with open(recipe_path, 'r') as f:
+        for line in f:
+            if line.startswith('# initial_rev:'):
+                initial_rev = line.split(':')[-1].strip()
+            elif line.startswith('# commit:'):
+                commits.append(line.split(':')[-1].strip())
+
+    update_rev = initial_rev
+    if initial_rev:
+        # Find first actually changed revision
+        stdout, _ = bb.process.run('git rev-list --reverse %s..HEAD' %
+                                   initial_rev, cwd=srctree)
+        newcommits = stdout.split()
+        for i in xrange(min(len(commits), len(newcommits))):
+            if newcommits[i] == commits[i]:
+                update_rev = commits[i]
+
+    return initial_rev, update_rev
+
+def _remove_patch_entries(srcuri, patchlist):
+    """Remove patch entries from SRC_URI"""
+    remaining = patchlist[:]
+    entries = []
+    for patch in patchlist:
+        patchfile = os.path.basename(patch)
+        for i in xrange(len(srcuri)):
+            if srcuri[i].startswith('file://') and os.path.basename(srcuri[i].split(';')[0]) == patchfile:
+                entries.append(srcuri[i])
+                remaining.remove(patch)
+                srcuri.pop(i)
+                break
+    return entries, remaining
+
+def _remove_patch_files(args, patches, destpath):
+    """Unlink existing patch files"""
+    for patchfile in patches:
+        if args.append:
+            if not destpath:
+                raise Exception('destpath should be set here')
+            patchfile = os.path.join(destpath, os.path.basename(patchfile))
+
+        if os.path.exists(patchfile):
+            logger.info('Removing patch %s' % patchfile)
+            # FIXME "git rm" here would be nice if the file in question is
+            #       tracked
+            # FIXME there's a chance that this file is referred to by
+            #       another recipe, in which case deleting wouldn't be the
+            #       right thing to do
+            os.remove(patchfile)
+            # Remove directory if empty
+            try:
+                os.rmdir(os.path.dirname(patchfile))
+            except OSError as ose:
+                if ose.errno != errno.ENOTEMPTY:
+                    raise
+
+def _update_recipe_srcrev(args, srctree, rd, config_data):
+    """Implement the 'srcrev' mode of update-recipe"""
+    import bb
+    import oe.recipeutils
+    from oe.patch import GitApplyTree
+
+    recipefile = rd.getVar('FILE', True)
+    logger.info('Updating SRCREV in recipe %s' % os.path.basename(recipefile))
+
+    # Get HEAD revision
+    try:
+        stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree)
+    except bb.process.ExecutionError as err:
+        raise DevtoolError('Failed to get HEAD revision in %s: %s' %
+                           (srctree, err))
+    srcrev = stdout.strip()
+    if len(srcrev) != 40:
+        raise DevtoolError('Invalid hash returned by git: %s' % stdout)
+
+    destpath = None
+    removepatches = []
+    patchfields = {}
+    patchfields['SRCREV'] = srcrev
+    orig_src_uri = rd.getVar('SRC_URI', False) or ''
+    if not args.no_remove:
+        # Find list of existing patches in recipe file
+        existing_patches = oe.recipeutils.get_recipe_patches(rd)
+
+        old_srcrev = (rd.getVar('SRCREV', False) or '')
+        tempdir = tempfile.mkdtemp(prefix='devtool')
+        try:
+            GitApplyTree.extractPatches(srctree, old_srcrev, tempdir)
+            newpatches = os.listdir(tempdir)
+            for patch in existing_patches:
+                patchfile = os.path.basename(patch)
+                if patchfile in newpatches:
+                    removepatches.append(patch)
+        finally:
+            shutil.rmtree(tempdir)
+
+        if removepatches:
+            srcuri = orig_src_uri.split()
+            removedentries, _ = _remove_patch_entries(srcuri, removepatches)
+            if removedentries:
+                patchfields['SRC_URI'] = ' '.join(srcuri)
+
+    if args.append:
+        _, destpath = oe.recipeutils.bbappend_recipe(
+                rd, args.append, None, wildcardver=args.wildcard_version,
+                extralines=patchfields)
+    else:
+        oe.recipeutils.patch_recipe(rd, recipefile, patchfields)
+
+    if not 'git://' in orig_src_uri:
+        logger.info('You will need to update SRC_URI within the recipe to '
+                    'point to a git repository where you have pushed your '
+                    'changes')
+
+    _remove_patch_files(args, removepatches, destpath)
+
+def _update_recipe_patch(args, config, srctree, rd, config_data):
+    """Implement the 'patch' mode of update-recipe"""
+    import bb
+    import oe.recipeutils
+    from oe.patch import GitApplyTree
+
+    recipefile = rd.getVar('FILE', True)
+    append = os.path.join(config.workspace_path, 'appends', '%s.bbappend' %
+                          os.path.splitext(os.path.basename(recipefile))[0])
+    if not os.path.exists(append):
+        raise DevtoolError('unable to find workspace bbappend for recipe %s' %
+                           args.recipename)
+
+    initial_rev, update_rev = _get_patchset_revs(args, srctree, append)
+    if not initial_rev:
+        raise DevtoolError('Unable to find initial revision - please specify '
+                           'it with --initial-rev')
+
+    # Find list of existing patches in recipe file
+    existing_patches = oe.recipeutils.get_recipe_patches(rd)
+
+    removepatches = []
+    seqpatch_re = re.compile('^([0-9]{4}-)?(.+)')
+    if not args.no_remove:
+        # Get all patches from source tree and check if any should be removed
+        tempdir = tempfile.mkdtemp(prefix='devtool')
+        try:
+            GitApplyTree.extractPatches(srctree, initial_rev, tempdir)
+            # Strip numbering from patch names. If it's a git sequence named
+            # patch, the numbers might not match up since we are starting from
+            # a different revision This does assume that people are using
+            # unique shortlog values, but they ought to be anyway...
+            newpatches = [seqpatch_re.match(fname).group(2) for fname in
+                          os.listdir(tempdir)]
+            for patch in existing_patches:
+                basename = seqpatch_re.match(
+                                os.path.basename(patch)).group(2)
+                if basename not in newpatches:
+                    removepatches.append(patch)
+        finally:
+            shutil.rmtree(tempdir)
+
+    # Get updated patches from source tree
+    tempdir = tempfile.mkdtemp(prefix='devtool')
+    try:
+        GitApplyTree.extractPatches(srctree, update_rev, tempdir)
+
+        # Match up and replace existing patches with corresponding new patches
+        updatepatches = False
+        updaterecipe = False
+        destpath = None
+        newpatches = os.listdir(tempdir)
+        if args.append:
+            patchfiles = {}
+            for patch in existing_patches:
+                patchfile = os.path.basename(patch)
+                if patchfile in newpatches:
+                    patchfiles[os.path.join(tempdir, patchfile)] = patchfile
+                    newpatches.remove(patchfile)
+            for patchfile in newpatches:
+                patchfiles[os.path.join(tempdir, patchfile)] = None
+
+            if patchfiles or removepatches:
+                removevalues = None
+                if removepatches:
+                    srcuri = (rd.getVar('SRC_URI', False) or '').split()
+                    removedentries, remaining = _remove_patch_entries(
+                                                    srcuri, removepatches)
+                    if removedentries or remaining:
+                        remaining = ['file://' + os.path.basename(item) for
+                                     item in remaining]
+                        removevalues = {'SRC_URI': removedentries + remaining}
+                _, destpath = oe.recipeutils.bbappend_recipe(
+                                rd, args.append, patchfiles,
+                                removevalues=removevalues)
+            else:
+                logger.info('No patches needed updating')
+        else:
+            for patch in existing_patches:
+                patchfile = os.path.basename(patch)
+                if patchfile in newpatches:
+                    logger.info('Updating patch %s' % patchfile)
+                    shutil.move(os.path.join(tempdir, patchfile), patch)
+                    newpatches.remove(patchfile)
+                    updatepatches = True
+            srcuri = (rd.getVar('SRC_URI', False) or '').split()
+            if newpatches:
+                # Add any patches left over
+                patchdir = os.path.join(os.path.dirname(recipefile),
+                                        rd.getVar('BPN', True))
+                bb.utils.mkdirhier(patchdir)
+                for patchfile in newpatches:
+                    logger.info('Adding new patch %s' % patchfile)
+                    shutil.move(os.path.join(tempdir, patchfile),
+                                os.path.join(patchdir, patchfile))
+                    srcuri.append('file://%s' % patchfile)
+                    updaterecipe = True
+            if removepatches:
+                removedentries, _ = _remove_patch_entries(srcuri, removepatches)
+                if removedentries:
+                    updaterecipe = True
+            if updaterecipe:
+                logger.info('Updating recipe %s' % os.path.basename(recipefile))
+                oe.recipeutils.patch_recipe(rd, recipefile,
+                                            {'SRC_URI': ' '.join(srcuri)})
+            elif not updatepatches:
+                # Neither patches nor recipe were updated
+                logger.info('No patches need updating')
+    finally:
+        shutil.rmtree(tempdir)
+
+    _remove_patch_files(args, removepatches, destpath)
+
+def _guess_recipe_update_mode(srctree, rdata):
+    """Guess the recipe update mode to use"""
+    src_uri = (rdata.getVar('SRC_URI', False) or '').split()
+    git_uris = [uri for uri in src_uri if uri.startswith('git://')]
+    if not git_uris:
+        return 'patch'
+    # Just use the first URI for now
+    uri = git_uris[0]
+    # Check remote branch
+    params = bb.fetch.decodeurl(uri)[5]
+    upstr_branch = params['branch'] if 'branch' in params else 'master'
+    # Check if current branch HEAD is found in upstream branch
+    stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree)
+    head_rev = stdout.rstrip()
+    stdout, _ = bb.process.run('git branch -r --contains %s' % head_rev,
+                               cwd=srctree)
+    remote_brs = [branch.strip() for branch in stdout.splitlines()]
+    if 'origin/' + upstr_branch in remote_brs:
+        return 'srcrev'
+
+    return 'patch'
+
+def update_recipe(args, config, basepath, workspace):
+    """Entry point for the devtool 'update-recipe' subcommand"""
+    if not args.recipename in workspace:
+        raise DevtoolError("no recipe named %s in your workspace" %
+                           args.recipename)
+
+    if args.append:
+        if not os.path.exists(args.append):
+            raise DevtoolError('bbappend destination layer directory "%s" '
+                               'does not exist' % args.append)
+        if not os.path.exists(os.path.join(args.append, 'conf', 'layer.conf')):
+            raise DevtoolError('conf/layer.conf not found in bbappend '
+                               'destination layer "%s"' % args.append)
+
+    tinfoil = setup_tinfoil()
+
+    rd = parse_recipe(config, tinfoil, args.recipename, True)
+    if not rd:
+        return 1
+
+    srctree = workspace[args.recipename]['srctree']
+    if args.mode == 'auto':
+        mode = _guess_recipe_update_mode(srctree, rd)
+    else:
+        mode = args.mode
+
+    if mode == 'srcrev':
+        _update_recipe_srcrev(args, srctree, rd, tinfoil.config_data)
+    elif mode == 'patch':
+        _update_recipe_patch(args, config, srctree, rd, tinfoil.config_data)
+    else:
+        raise DevtoolError('update_recipe: invalid mode %s' % mode)
+
+    rf = rd.getVar('FILE', True)
+    if rf.startswith(config.workspace_path):
+        logger.warn('Recipe file %s has been updated but is inside the workspace - you will need to move it (and any associated files next to it) out to the desired layer before using "devtool reset" in order to keep any changes' % rf)
+
+    return 0
+
+
+def status(args, config, basepath, workspace):
+    """Entry point for the devtool 'status' subcommand"""
+    if workspace:
+        for recipe, value in workspace.iteritems():
+            print("%s: %s" % (recipe, value['srctree']))
+    else:
+        logger.info('No recipes currently in your workspace - you can use "devtool modify" to work on an existing recipe or "devtool add" to add a new one')
+    return 0
+
+
+def reset(args, config, basepath, workspace):
+    """Entry point for the devtool 'reset' subcommand"""
+    import bb
+    if args.recipename:
+        if args.all:
+            raise DevtoolError("Recipe cannot be specified if -a/--all is used")
+        elif not args.recipename in workspace:
+            raise DevtoolError("no recipe named %s in your workspace" %
+                               args.recipename)
+    elif not args.all:
+        raise DevtoolError("Recipe must be specified, or specify -a/--all to "
+                           "reset all recipes")
+    if args.all:
+        recipes = workspace
+    else:
+        recipes = [args.recipename]
+
+    for pn in recipes:
+        if not args.no_clean:
+            logger.info('Cleaning sysroot for recipe %s...' % pn)
+            try:
+                exec_build_env_command(config.init_path, basepath, 'bitbake -c clean %s' % pn)
+            except bb.process.ExecutionError as e:
+                raise DevtoolError('Command \'%s\' failed, output:\n%s\nIf you '
+                                   'wish, you may specify -n/--no-clean to '
+                                   'skip running this command when resetting' %
+                                   (e.command, e.stdout))
+
+        _check_preserve(config, pn)
+
+        preservepath = os.path.join(config.workspace_path, 'attic', pn)
+        def preservedir(origdir):
+            if os.path.exists(origdir):
+                for root, dirs, files in os.walk(origdir):
+                    for fn in files:
+                        logger.warn('Preserving %s in %s' % (fn, preservepath))
+                        bb.utils.mkdirhier(preservepath)
+                        shutil.move(os.path.join(origdir, fn), os.path.join(preservepath, fn))
+                    for dn in dirs:
+                        os.rmdir(os.path.join(root, dn))
+                os.rmdir(origdir)
+
+        preservedir(os.path.join(config.workspace_path, 'recipes', pn))
+        # We don't automatically create this dir next to appends, but the user can
+        preservedir(os.path.join(config.workspace_path, 'appends', pn))
+
+    return 0
+
+
+def register_commands(subparsers, context):
+    """Register devtool subcommands from this plugin"""
+    parser_add = subparsers.add_parser('add', help='Add a new recipe',
+                                       description='Adds a new recipe')
+    parser_add.add_argument('recipename', help='Name for new recipe to add')
+    parser_add.add_argument('srctree', help='Path to external source tree')
+    parser_add.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
+    parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree', metavar='URI')
+    parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)')
+    parser_add.set_defaults(func=add)
+
+    parser_modify = subparsers.add_parser('modify', help='Modify the source for an existing recipe',
+                                       description='Enables modifying the source for an existing recipe',
+                                       formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+    parser_modify.add_argument('recipename', help='Name for recipe to edit')
+    parser_modify.add_argument('srctree', help='Path to external source tree')
+    parser_modify.add_argument('--wildcard', '-w', action="store_true", help='Use wildcard for unversioned bbappend')
+    parser_modify.add_argument('--extract', '-x', action="store_true", help='Extract source as well')
+    group = parser_modify.add_mutually_exclusive_group()
+    group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
+    group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
+    parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (only when using -x)')
+    parser_modify.set_defaults(func=modify)
+
+    parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe',
+                                       description='Extracts the source for an existing recipe',
+                                       formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+    parser_extract.add_argument('recipename', help='Name for recipe to extract the source for')
+    parser_extract.add_argument('srctree', help='Path to where to extract the source tree')
+    parser_extract.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout')
+    parser_extract.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
+    parser_extract.set_defaults(func=extract)
+
+    parser_update_recipe = subparsers.add_parser('update-recipe', help='Apply changes from external source tree to recipe',
+                                       description='Applies changes from external source tree to a recipe (updating/adding/removing patches as necessary, or by updating SRCREV)')
+    parser_update_recipe.add_argument('recipename', help='Name of recipe to update')
+    parser_update_recipe.add_argument('--mode', '-m', choices=['patch', 'srcrev', 'auto'], default='auto', help='Update mode (where %(metavar)s is %(choices)s; default is %(default)s)', metavar='MODE')
+    parser_update_recipe.add_argument('--initial-rev', help='Starting revision for patches')
+    parser_update_recipe.add_argument('--append', '-a', help='Write changes to a bbappend in the specified layer instead of the recipe', metavar='LAYERDIR')
+    parser_update_recipe.add_argument('--wildcard-version', '-w', help='In conjunction with -a/--append, use a wildcard to make the bbappend apply to any recipe version', action='store_true')
+    parser_update_recipe.add_argument('--no-remove', '-n', action="store_true", help='Don\'t remove patches, only add or update')
+    parser_update_recipe.set_defaults(func=update_recipe)
+
+    parser_status = subparsers.add_parser('status', help='Show workspace status',
+                                          description='Lists recipes currently in your workspace and the paths to their respective external source trees',
+                                          formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+    parser_status.set_defaults(func=status)
+
+    parser_reset = subparsers.add_parser('reset', help='Remove a recipe from your workspace',
+                                         description='Removes the specified recipe from your workspace (resetting its state)',
+                                         formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+    parser_reset.add_argument('recipename', nargs='?', help='Recipe to reset')
+    parser_reset.add_argument('--all', '-a', action="store_true", help='Reset all recipes (clear workspace)')
+    parser_reset.add_argument('--no-clean', '-n', action="store_true", help='Don\'t clean the sysroot to remove recipe output')
+    parser_reset.set_defaults(func=reset)
diff --git a/scripts/lib/devtool/upgrade.py b/scripts/lib/devtool/upgrade.py
new file mode 100644
index 0000000..86443b0
--- /dev/null
+++ b/scripts/lib/devtool/upgrade.py
@@ -0,0 +1,354 @@
+# Development tool - upgrade command plugin
+#
+# Copyright (C) 2014-2015 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+"""Devtool upgrade plugin"""
+
+import os
+import sys
+import re
+import shutil
+import tempfile
+import logging
+import argparse
+import scriptutils
+import errno
+import bb
+import oe.recipeutils
+from devtool import standard
+from devtool import exec_build_env_command, setup_tinfoil, DevtoolError, parse_recipe
+
+logger = logging.getLogger('devtool')
+
+def plugin_init(pluginlist):
+    """Plugin initialization"""
+    pass
+
+def _run(cmd, cwd=''):
+    logger.debug("Running command %s> %s" % (cwd,cmd))
+    return bb.process.run('%s' % cmd, cwd=cwd)
+
+def _get_srctree(tmpdir):
+    srctree = tmpdir
+    dirs = os.listdir(tmpdir)
+    if len(dirs) == 1:
+        srctree = os.path.join(tmpdir, dirs[0])
+    return srctree
+
+def _copy_source_code(orig, dest):
+    for path in standard._ls_tree(orig):
+        dest_dir = os.path.join(dest, os.path.dirname(path))
+        bb.utils.mkdirhier(dest_dir)
+        dest_path = os.path.join(dest, path)
+        os.rename(os.path.join(orig, path), dest_path)
+
+def _get_checksums(rf):
+    import re
+    checksums = {}
+    with open(rf) as f:
+        for line in f:
+            for cs in ['md5sum', 'sha256sum']:
+                m = re.match("^SRC_URI\[%s\].*=.*\"(.*)\"" % cs, line)
+                if m:
+                    checksums[cs] = m.group(1)
+    return checksums
+
+def _replace_checksums(rf, md5, sha256):
+    if not md5 and not sha256:
+        return
+    checksums = {'md5sum':md5, 'sha256sum':sha256}
+    with open(rf + ".tmp", "w+") as tmprf:
+        with open(rf) as f:
+            for line in f:
+                m = None
+                for cs in checksums.keys():
+                    m = re.match("^SRC_URI\[%s\].*=.*\"(.*)\"" % cs, line)
+                    if m:
+                        if checksums[cs]:
+                            oldcheck = m.group(1)
+                            newcheck = checksums[cs]
+                            line = line.replace(oldcheck, newcheck)
+                        break
+                tmprf.write(line)
+    os.rename(rf + ".tmp", rf)
+
+
+def _remove_patch_dirs(recipefolder):
+    for root, dirs, files in os.walk(recipefolder):
+        for d in dirs:
+            shutil.rmtree(os.path.join(root,d))
+
+def _recipe_contains(rf, var):
+    import re
+    found = False
+    with open(rf) as f:
+        for line in f:
+            if re.match("^%s.*=.*" % var, line):
+                found = True
+                break
+    return found
+
+def _rename_recipe_dirs(oldpv, newpv, path):
+    for root, dirs, files in os.walk(path):
+        for olddir in dirs:
+            if olddir.find(oldpv) != -1:
+                newdir = olddir.replace(oldpv, newpv)
+                if olddir != newdir:
+                    _run('mv %s %s' % (olddir, newdir))
+
+def _rename_recipe_file(bpn, oldpv, newpv, path):
+    oldrecipe = "%s_%s.bb" % (bpn, oldpv)
+    newrecipe = "%s_%s.bb" % (bpn, newpv)
+    if os.path.isfile(os.path.join(path, oldrecipe)):
+        if oldrecipe != newrecipe:
+            _run('mv %s %s' % (oldrecipe, newrecipe), cwd=path)
+    else:
+        recipe = "%s_git.bb" % bpn
+        if os.path.isfile(os.path.join(path, recipe)):
+            newrecipe = recipe
+            raise DevtoolError("Original recipe not found on workspace")
+    return os.path.join(path, newrecipe)
+
+def _rename_recipe_files(bpn, oldpv, newpv, path):
+    _rename_recipe_dirs(oldpv, newpv, path)
+    return _rename_recipe_file(bpn, oldpv, newpv, path)
+
+def _use_external_build(same_dir, no_same_dir, d):
+    b_is_s = True
+    if no_same_dir:
+        logger.info('using separate build directory since --no-same-dir specified')
+        b_is_s = False
+    elif same_dir:
+        logger.info('using source tree as build directory since --same-dir specified')
+    elif bb.data.inherits_class('autotools-brokensep', d):
+        logger.info('using source tree as build directory since original recipe inherits autotools-brokensep')
+    elif d.getVar('B', True) == os.path.abspath(d.getVar('S', True)):
+        logger.info('using source tree as build directory since that is the default for this recipe')
+    else:
+        b_is_s = False
+    return b_is_s
+
+def _write_append(rc, srctree, same_dir, no_same_dir, rev, workspace, d):
+    """Writes an append file"""
+    if not os.path.exists(rc):
+        raise DevtoolError("bbappend not created because %s does not exist" % rc)
+
+    appendpath = os.path.join(workspace, 'appends')
+    if not os.path.exists(appendpath):
+        bb.utils.mkdirhier(appendpath)
+
+    brf = os.path.basename(os.path.splitext(rc)[0]) # rc basename
+
+    srctree = os.path.abspath(srctree)
+    pn = d.getVar('PN',True)
+    af = os.path.join(appendpath, '%s.bbappend' % brf)
+    with open(af, 'w') as f:
+        f.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n\n')
+        f.write('inherit externalsrc\n')
+        f.write(('# NOTE: We use pn- overrides here to avoid affecting'
+                 'multiple variants in the case where the recipe uses BBCLASSEXTEND\n'))
+        f.write('EXTERNALSRC_pn-%s = "%s"\n' % (pn, srctree))
+        if _use_external_build(same_dir, no_same_dir, d):
+            f.write('EXTERNALSRC_BUILD_pn-%s = "%s"\n' % (pn, srctree))
+        if rev:
+            f.write('\n# initial_rev: %s\n' % rev)
+    return af
+
+def _cleanup_on_error(rf, srctree):
+    rfp = os.path.split(rf)[0] # recipe folder
+    rfpp = os.path.split(rfp)[0] # recipes folder
+    if os.path.exists(rfp):
+        shutil.rmtree(b)
+    if not len(os.listdir(rfpp)):
+        os.rmdir(rfpp)
+    srctree = os.path.abspath(srctree)
+    if os.path.exists(srctree):
+        shutil.rmtree(srctree)
+
+def _upgrade_error(e, rf, srctree):
+    if rf:
+        cleanup_on_error(rf, srctree)
+    logger.error(e)
+    raise DevtoolError(e)
+
+def _get_uri(rd):
+    srcuris = rd.getVar('SRC_URI', True).split()
+    if not len(srcuris):
+        raise DevtoolError('SRC_URI not found on recipe')
+    srcuri = srcuris[0] # it is assumed, URI is at first position
+    srcrev = '${AUTOREV}'
+    if '://' in srcuri:
+        # Fetch a URL
+        rev_re = re.compile(';rev=([^;]+)')
+        res = rev_re.search(srcuri)
+        if res:
+            srcrev = res.group(1)
+            srcuri = rev_re.sub('', srcuri)
+    return srcuri, srcrev
+
+def _extract_new_source(newpv, srctree, no_patch, srcrev, branch, keep_temp, tinfoil, rd):
+    """Extract sources of a recipe with a new version"""
+
+    def __run(cmd):
+        """Simple wrapper which calls _run with srctree as cwd"""
+        return _run(cmd, srctree)
+
+    crd = rd.createCopy()
+
+    pv = crd.getVar('PV', True)
+    crd.setVar('PV', newpv)
+
+    tmpsrctree = None
+    uri, rev = _get_uri(crd)
+    if srcrev:
+        rev = srcrev
+    if uri.startswith('git://'):
+        __run('git checkout %s' % rev)
+        __run('git tag -f devtool-base-new')
+        md5 = None
+        sha256 = None
+    else:
+        __run('git checkout -b devtool-%s' % newpv)
+
+        tmpdir = tempfile.mkdtemp(prefix='devtool')
+        try:
+            md5, sha256 = scriptutils.fetch_uri(tinfoil.config_data, uri, tmpdir, rev)
+        except bb.fetch2.FetchError as e:
+            raise DevtoolError(e)
+
+        tmpsrctree = _get_srctree(tmpdir)
+
+        scrtree = os.path.abspath(srctree)
+
+        _copy_source_code(tmpsrctree, srctree)
+
+        (stdout,_) = __run('git ls-files --modified --others --exclude-standard')
+        for f in stdout.splitlines():
+            __run('git add "%s"' % f)
+
+        __run('git commit -q -m "Commit of upstream changes at version %s" --allow-empty' % newpv)
+        __run('git tag -f devtool-base-%s' % newpv)
+
+    (stdout, _) = __run('git rev-parse HEAD')
+    rev = stdout.rstrip()
+
+    if no_patch:
+        patches = oe.recipeutils.get_recipe_patches(crd)
+        if len(patches):
+            logger.warn('By user choice, the following patches will NOT be applied')
+            for patch in patches:
+                logger.warn("%s" % os.path.basename(patch))
+    else:
+        try:
+            __run('git checkout devtool-patched -b %s' % branch)
+            __run('git rebase %s' % rev)
+            if uri.startswith('git://'):
+                suffix = 'new'
+            else:
+                suffix = newpv
+            __run('git tag -f devtool-patched-%s' % suffix)
+        except bb.process.ExecutionError as e:
+            logger.warn('Command \'%s\' failed:\n%s' % (e.command, e.stdout))
+
+    if tmpsrctree:
+        if keep_temp:
+            logger.info('Preserving temporary directory %s' % tmpsrctree)
+        else:
+            shutil.rmtree(tmpsrctree)
+
+    return (rev, md5, sha256)
+
+def _create_new_recipe(newpv, md5, sha256, workspace, rd):
+    """Creates the new recipe under workspace"""
+    crd = rd.createCopy()
+
+    bpn = crd.getVar('BPN', True)
+    path = os.path.join(workspace, 'recipes', bpn)
+    bb.utils.mkdirhier(path)
+    oe.recipeutils.copy_recipe_files(crd, path)
+
+    oldpv = crd.getVar('PV', True)
+    if not newpv:
+        newpv = oldpv
+    fullpath = _rename_recipe_files(bpn, oldpv, newpv, path)
+
+    if _recipe_contains(fullpath, 'PV') and newpv != oldpv:
+        oe.recipeutils.patch_recipe(d, fullpath, {'PV':newpv})
+
+    if md5 and sha256:
+        # Unfortunately, oe.recipeutils.patch_recipe cannot update flags.
+        # once the latter feature is implemented, we should call patch_recipe
+        # instead of the following function
+        _replace_checksums(fullpath, md5, sha256)
+
+    return fullpath
+
+def upgrade(args, config, basepath, workspace):
+    """Entry point for the devtool 'upgrade' subcommand"""
+
+    if args.recipename in workspace:
+        raise DevtoolError("recipe %s is already in your workspace" % args.recipename)
+    if not args.version and not args.srcrev:
+        raise DevtoolError("You must provide a version using the --version/-V option, or for recipes that fetch from an SCM such as git, the --srcrev/-S option")
+
+    reason = oe.recipeutils.validate_pn(args.recipename)
+    if reason:
+        raise DevtoolError(reason)
+
+    tinfoil = setup_tinfoil()
+
+    rd = parse_recipe(config, tinfoil, args.recipename, True)
+    if not rd:
+        return 1
+
+    standard._check_compatible_recipe(args.recipename, rd)
+    if rd.getVar('PV', True) == args.version and rd.getVar('SRCREV', True) == args.srcrev:
+        raise DevtoolError("Current and upgrade versions are the same version" % version)
+
+    rf = None
+    try:
+        rev1 = standard._extract_source(args.srctree, False, 'devtool-orig', rd)
+        rev2, md5, sha256 = _extract_new_source(args.version, args.srctree, args.no_patch,
+                                                args.srcrev, args.branch, args.keep_temp,
+                                                tinfoil, rd)
+        rf = _create_new_recipe(args.version, md5, sha256, config.workspace_path, rd)
+    except bb.process.CmdError as e:
+        _upgrade_error(e, rf, args.srctree)
+    except DevtoolError as e:
+        _upgrade_error(e, rf, args.srctree)
+    standard._add_md5(config, args.recipename, os.path.dirname(rf))
+
+    af = _write_append(rf, args.srctree, args.same_dir, args.no_same_dir, rev2,
+                       config.workspace_path, rd)
+    standard._add_md5(config, args.recipename, af)
+    logger.info('Upgraded source extracted to %s' % args.srctree)
+    return 0
+
+def register_commands(subparsers, context):
+    """Register devtool subcommands from this plugin"""
+    parser_upgrade = subparsers.add_parser('upgrade', help='Upgrade an existing recipe',
+                                           description='Upgrades an existing recipe to a new upstream version')
+    parser_upgrade.add_argument('recipename', help='Name for recipe to extract the source for')
+    parser_upgrade.add_argument('srctree', help='Path to where to extract the source tree')
+    parser_upgrade.add_argument('--version', '-V', help='Version to upgrade to (PV)')
+    parser_upgrade.add_argument('--srcrev', '-S', help='Source revision to upgrade to (if fetching from an SCM such as git)')
+    parser_upgrade.add_argument('--branch', '-b', default="devtool", help='Name for new development branch to checkout (default "%(default)s")')
+    parser_upgrade.add_argument('--no-patch', action="store_true", help='Do not apply patches from the recipe to the new source code')
+    group = parser_upgrade.add_mutually_exclusive_group()
+    group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
+    group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
+    parser_upgrade.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
+    parser_upgrade.set_defaults(func=upgrade)