Squashed 'yocto-poky/' content from commit ea562de

git-subtree-dir: yocto-poky
git-subtree-split: ea562de57590c966cd5a75fda8defecd397e6436
diff --git a/meta/lib/oeqa/__init__.py b/meta/lib/oeqa/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/meta/lib/oeqa/__init__.py
diff --git a/meta/lib/oeqa/controllers/__init__.py b/meta/lib/oeqa/controllers/__init__.py
new file mode 100644
index 0000000..8eda927
--- /dev/null
+++ b/meta/lib/oeqa/controllers/__init__.py
@@ -0,0 +1,3 @@
+# Enable other layers to have modules in the same named directory
+from pkgutil import extend_path
+__path__ = extend_path(__path__, __name__)
diff --git a/meta/lib/oeqa/controllers/masterimage.py b/meta/lib/oeqa/controllers/masterimage.py
new file mode 100644
index 0000000..522f9eb
--- /dev/null
+++ b/meta/lib/oeqa/controllers/masterimage.py
@@ -0,0 +1,201 @@
+# Copyright (C) 2014 Intel Corporation
+#
+# Released under the MIT license (see COPYING.MIT)
+
+# This module adds support to testimage.bbclass to deploy images and run
+# tests using a "master image" - this is a "known good" image that is
+# installed onto the device as part of initial setup and will be booted into
+# with no interaction; we can then use it to deploy the image to be tested
+# to a second partition before running the tests.
+#
+# For an example master image, see core-image-testmaster
+# (meta/recipes-extended/images/core-image-testmaster.bb)
+
+import os
+import bb
+import traceback
+import time
+import subprocess
+
+import oeqa.targetcontrol
+import oeqa.utils.sshcontrol as sshcontrol
+import oeqa.utils.commands as commands
+from oeqa.utils import CommandError
+
+from abc import ABCMeta, abstractmethod
+
+class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget):
+
+    __metaclass__ = ABCMeta
+
+    supported_image_fstypes = ['tar.gz', 'tar.bz2']
+
+    def __init__(self, d):
+        super(MasterImageHardwareTarget, self).__init__(d)
+
+        # target ip
+        addr = d.getVar("TEST_TARGET_IP", True) or bb.fatal('Please set TEST_TARGET_IP with the IP address of the machine you want to run the tests on.')
+        self.ip = addr.split(":")[0]
+        try:
+            self.port = addr.split(":")[1]
+        except IndexError:
+            self.port = None
+        bb.note("Target IP: %s" % self.ip)
+        self.server_ip = d.getVar("TEST_SERVER_IP", True)
+        if not self.server_ip:
+            try:
+                self.server_ip = subprocess.check_output(['ip', 'route', 'get', self.ip ]).split("\n")[0].split()[-1]
+            except Exception as e:
+                bb.fatal("Failed to determine the host IP address (alternatively you can set TEST_SERVER_IP with the IP address of this machine): %s" % e)
+        bb.note("Server IP: %s" % self.server_ip)
+
+        # test rootfs + kernel
+        self.image_fstype = self.get_image_fstype(d)
+        self.rootfs = os.path.join(d.getVar("DEPLOY_DIR_IMAGE", True), d.getVar("IMAGE_LINK_NAME", True) + '.' + self.image_fstype)
+        self.kernel = os.path.join(d.getVar("DEPLOY_DIR_IMAGE", True), d.getVar("KERNEL_IMAGETYPE", False) + '-' + d.getVar('MACHINE', False) + '.bin')
+        if not os.path.isfile(self.rootfs):
+            # we could've checked that IMAGE_FSTYPES contains tar.gz but the config for running testimage might not be
+            # the same as the config with which the image was build, ie
+            # you bitbake core-image-sato with IMAGE_FSTYPES += "tar.gz"
+            # and your autobuilder overwrites the config, adds the test bits and runs bitbake core-image-sato -c testimage
+            bb.fatal("No rootfs found. Did you build the image ?\nIf yes, did you build it with IMAGE_FSTYPES += \"tar.gz\" ? \
+                      \nExpected path: %s" % self.rootfs)
+        if not os.path.isfile(self.kernel):
+            bb.fatal("No kernel found. Expected path: %s" % self.kernel)
+
+        # master ssh connection
+        self.master = None
+        # if the user knows what they are doing, then by all means...
+        self.user_cmds = d.getVar("TEST_DEPLOY_CMDS", True)
+        self.deploy_cmds = None
+
+        # this is the name of the command that controls the power for a board
+        # e.g: TEST_POWERCONTROL_CMD = "/home/user/myscripts/powercontrol.py ${MACHINE} what-ever-other-args-the-script-wants"
+        # the command should take as the last argument "off" and "on" and "cycle" (off, on)
+        self.powercontrol_cmd = d.getVar("TEST_POWERCONTROL_CMD", True) or None
+        self.powercontrol_args = d.getVar("TEST_POWERCONTROL_EXTRA_ARGS", False) or ""
+
+        self.serialcontrol_cmd = d.getVar("TEST_SERIALCONTROL_CMD", True) or None
+        self.serialcontrol_args = d.getVar("TEST_SERIALCONTROL_EXTRA_ARGS", False) or ""
+
+        self.origenv = os.environ
+        if self.powercontrol_cmd or self.serialcontrol_cmd:
+            # the external script for controlling power might use ssh
+            # ssh + keys means we need the original user env
+            bborigenv = d.getVar("BB_ORIGENV", False) or {}
+            for key in bborigenv:
+                val = bborigenv.getVar(key, True)
+                if val is not None:
+                    self.origenv[key] = str(val)
+
+        if self.powercontrol_cmd:
+            if self.powercontrol_args:
+                self.powercontrol_cmd = "%s %s" % (self.powercontrol_cmd, self.powercontrol_args)
+        if self.serialcontrol_cmd:
+            if self.serialcontrol_args:
+                self.serialcontrol_cmd = "%s %s" % (self.serialcontrol_cmd, self.serialcontrol_args)
+
+    def power_ctl(self, msg):
+        if self.powercontrol_cmd:
+            cmd = "%s %s" % (self.powercontrol_cmd, msg)
+            try:
+                commands.runCmd(cmd, assert_error=False, preexec_fn=os.setsid, env=self.origenv)
+            except CommandError as e:
+                bb.fatal(str(e))
+
+    def power_cycle(self, conn):
+        if self.powercontrol_cmd:
+            # be nice, don't just cut power
+            conn.run("shutdown -h now")
+            time.sleep(10)
+            self.power_ctl("cycle")
+        else:
+            status, output = conn.run("reboot")
+            if status != 0:
+                bb.error("Failed rebooting target and no power control command defined. You need to manually reset the device.\n%s" % output)
+
+    def _wait_until_booted(self):
+        ''' Waits until the target device has booted (if we have just power cycled it) '''
+        # Subclasses with better methods of determining boot can override this
+        time.sleep(120)
+
+    def deploy(self):
+        # base class just sets the ssh log file for us
+        super(MasterImageHardwareTarget, self).deploy()
+        self.master = sshcontrol.SSHControl(ip=self.ip, logfile=self.sshlog, timeout=600, port=self.port)
+        status, output = self.master.run("cat /etc/masterimage")
+        if status != 0:
+            # We're not booted into the master image, so try rebooting
+            bb.plain("%s - booting into the master image" % self.pn)
+            self.power_ctl("cycle")
+            self._wait_until_booted()
+
+        bb.plain("%s - deploying image on target" % self.pn)
+        status, output = self.master.run("cat /etc/masterimage")
+        if status != 0:
+            bb.fatal("No ssh connectivity or target isn't running a master image.\n%s" % output)
+        if self.user_cmds:
+            self.deploy_cmds = self.user_cmds.split("\n")
+        try:
+            self._deploy()
+        except Exception as e:
+            bb.fatal("Failed deploying test image: %s" % e)
+
+    @abstractmethod
+    def _deploy(self):
+        pass
+
+    def start(self, params=None):
+        bb.plain("%s - boot test image on target" % self.pn)
+        self._start()
+        # set the ssh object for the target/test image
+        self.connection = sshcontrol.SSHControl(self.ip, logfile=self.sshlog, port=self.port)
+        bb.plain("%s - start running tests" % self.pn)
+
+    @abstractmethod
+    def _start(self):
+        pass
+
+    def stop(self):
+        bb.plain("%s - reboot/powercycle target" % self.pn)
+        self.power_cycle(self.connection)
+
+
+class GummibootTarget(MasterImageHardwareTarget):
+
+    def __init__(self, d):
+        super(GummibootTarget, self).__init__(d)
+        # this the value we need to set in the LoaderEntryOneShot EFI variable
+        # so the system boots the 'test' bootloader label and not the default
+        # The first four bytes are EFI bits, and the rest is an utf-16le string
+        # (EFI vars values need to be utf-16)
+        # $ echo -en "test\0" | iconv -f ascii -t utf-16le | hexdump -C
+        # 00000000  74 00 65 00 73 00 74 00  00 00                    |t.e.s.t...|
+        self.efivarvalue = r'\x07\x00\x00\x00\x74\x00\x65\x00\x73\x00\x74\x00\x00\x00'
+        self.deploy_cmds = [
+                'mount -L boot /boot',
+                'mkdir -p /mnt/testrootfs',
+                'mount -L testrootfs /mnt/testrootfs',
+                'modprobe efivarfs',
+                'mount -t efivarfs efivarfs /sys/firmware/efi/efivars',
+                'cp ~/test-kernel /boot',
+                'rm -rf /mnt/testrootfs/*',
+                'tar xvf ~/test-rootfs.%s -C /mnt/testrootfs' % self.image_fstype,
+                'printf "%s" > /sys/firmware/efi/efivars/LoaderEntryOneShot-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f' % self.efivarvalue
+                ]
+
+    def _deploy(self):
+        # make sure these aren't mounted
+        self.master.run("umount /boot; umount /mnt/testrootfs; umount /sys/firmware/efi/efivars;")
+        # from now on, every deploy cmd should return 0
+        # else an exception will be thrown by sshcontrol
+        self.master.ignore_status = False
+        self.master.copy_to(self.rootfs, "~/test-rootfs." + self.image_fstype)
+        self.master.copy_to(self.kernel, "~/test-kernel")
+        for cmd in self.deploy_cmds:
+            self.master.run(cmd)
+
+    def _start(self, params=None):
+        self.power_cycle(self.master)
+        # there are better ways than a timeout but this should work for now
+        time.sleep(120)
diff --git a/meta/lib/oeqa/controllers/testtargetloader.py b/meta/lib/oeqa/controllers/testtargetloader.py
new file mode 100644
index 0000000..a1b7b1d
--- /dev/null
+++ b/meta/lib/oeqa/controllers/testtargetloader.py
@@ -0,0 +1,70 @@
+import types
+import bb
+import os
+
+# This class is responsible for loading a test target controller
+class TestTargetLoader:
+
+    # Search oeqa.controllers module directory for and return a controller  
+    # corresponding to the given target name. 
+    # AttributeError raised if not found.
+    # ImportError raised if a provided module can not be imported.
+    def get_controller_module(self, target, bbpath):
+        controllerslist = self.get_controller_modulenames(bbpath)
+        bb.note("Available controller modules: %s" % str(controllerslist))
+        controller = self.load_controller_from_name(target, controllerslist)
+        return controller
+
+    # Return a list of all python modules in lib/oeqa/controllers for each
+    # layer in bbpath
+    def get_controller_modulenames(self, bbpath):
+
+        controllerslist = []
+
+        def add_controller_list(path):
+            if not os.path.exists(os.path.join(path, '__init__.py')):
+                bb.fatal('Controllers directory %s exists but is missing __init__.py' % path)
+            files = sorted([f for f in os.listdir(path) if f.endswith('.py') and not f.startswith('_')])
+            for f in files:
+                module = 'oeqa.controllers.' + f[:-3]
+                if module not in controllerslist:
+                    controllerslist.append(module)
+                else:
+                    bb.warn("Duplicate controller module found for %s, only one added. Layers should create unique controller module names" % module)
+
+        for p in bbpath:
+            controllerpath = os.path.join(p, 'lib', 'oeqa', 'controllers')
+            bb.debug(2, 'Searching for target controllers in %s' % controllerpath)
+            if os.path.exists(controllerpath):
+                add_controller_list(controllerpath)
+        return controllerslist
+
+    # Search for and return a controller from given target name and
+    # set of module names. 
+    # Raise AttributeError if not found.
+    # Raise ImportError if a provided module can not be imported
+    def load_controller_from_name(self, target, modulenames):
+        for name in modulenames:
+            obj = self.load_controller_from_module(target, name)
+            if obj:
+                return obj
+        raise AttributeError("Unable to load {0} from available modules: {1}".format(target, str(modulenames)))
+
+    # Search for and return a controller or None from given module name
+    def load_controller_from_module(self, target, modulename):
+        obj = None
+        # import module, allowing it to raise import exception
+        module = __import__(modulename, globals(), locals(), [target])
+        # look for target class in the module, catching any exceptions as it
+        # is valid that a module may not have the target class.
+        try:
+            obj = getattr(module, target)
+            if obj: 
+                from oeqa.targetcontrol import BaseTarget
+                if (not isinstance(obj, (type, types.ClassType))):
+                    bb.warn("Target {0} found, but not of type Class".format(target))
+                if( not issubclass(obj, BaseTarget)):
+                    bb.warn("Target {0} found, but subclass is not BaseTarget".format(target))
+        except:
+            obj = None
+        return obj
diff --git a/meta/lib/oeqa/oetest.py b/meta/lib/oeqa/oetest.py
new file mode 100644
index 0000000..0fe68d4
--- /dev/null
+++ b/meta/lib/oeqa/oetest.py
@@ -0,0 +1,214 @@
+# Copyright (C) 2013 Intel Corporation
+#
+# Released under the MIT license (see COPYING.MIT)
+
+# Main unittest module used by testimage.bbclass
+# This provides the oeRuntimeTest base class which is inherited by all tests in meta/lib/oeqa/runtime.
+
+# It also has some helper functions and it's responsible for actually starting the tests
+
+import os, re, mmap
+import unittest
+import inspect
+import subprocess
+import bb
+from oeqa.utils.decorators import LogResults, gettag
+from sys import exc_info, exc_clear
+
+def getVar(obj):
+    #extend form dict, if a variable didn't exists, need find it in testcase
+    class VarDict(dict):
+        def __getitem__(self, key):
+            return gettag(obj, key)
+    return VarDict()
+
+def checkTags(tc, tagexp):
+    return eval(tagexp, None, getVar(tc))
+
+
+def filterByTagExp(testsuite, tagexp):
+    if not tagexp:
+        return testsuite
+    caseList = []
+    for each in testsuite:
+        if not isinstance(each, unittest.BaseTestSuite):
+            if checkTags(each, tagexp):
+                caseList.append(each)
+        else:
+            caseList.append(filterByTagExp(each, tagexp))
+    return testsuite.__class__(caseList)
+
+def loadTests(tc, type="runtime"):
+    if type == "runtime":
+        # set the context object passed from the test class
+        setattr(oeTest, "tc", tc)
+        # set ps command to use
+        setattr(oeRuntimeTest, "pscmd", "ps -ef" if oeTest.hasPackage("procps") else "ps")
+        # prepare test suite, loader and runner
+        suite = unittest.TestSuite()
+    elif type == "sdk":
+        # set the context object passed from the test class
+        setattr(oeTest, "tc", tc)
+    testloader = unittest.TestLoader()
+    testloader.sortTestMethodsUsing = None
+    suites = [testloader.loadTestsFromName(name) for name in tc.testslist]
+    suites = filterByTagExp(suites, getattr(tc, "tagexp", None))
+
+    def getTests(test):
+        '''Return all individual tests executed when running the suite.'''
+        # Unfortunately unittest does not have an API for this, so we have
+        # to rely on implementation details. This only needs to work
+        # for TestSuite containing TestCase.
+        method = getattr(test, '_testMethodName', None)
+        if method:
+            # leaf case: a TestCase
+            yield test
+        else:
+            # Look into TestSuite.
+            tests = getattr(test, '_tests', [])
+            for t1 in tests:
+                for t2 in getTests(t1):
+                    yield t2
+
+    # Determine dependencies between suites by looking for @skipUnlessPassed
+    # method annotations. Suite A depends on suite B if any method in A
+    # depends on a method on B.
+    for suite in suites:
+        suite.dependencies = []
+        suite.depth = 0
+        for test in getTests(suite):
+            methodname = getattr(test, '_testMethodName', None)
+            if methodname:
+                method = getattr(test, methodname)
+                depends_on = getattr(method, '_depends_on', None)
+                if depends_on:
+                    for dep_suite in suites:
+                        if depends_on in [getattr(t, '_testMethodName', None) for t in getTests(dep_suite)]:
+                            if dep_suite not in suite.dependencies and \
+                               dep_suite is not suite:
+                                suite.dependencies.append(dep_suite)
+                            break
+                    else:
+                        bb.warn("Test %s was declared as @skipUnlessPassed('%s') but that test is either not defined or not active. Will run the test anyway." %
+                                (test, depends_on))
+    # Use brute-force topological sort to determine ordering. Sort by
+    # depth (higher depth = must run later), with original ordering to
+    # break ties.
+    def set_suite_depth(suite):
+        for dep in suite.dependencies:
+            new_depth = set_suite_depth(dep) + 1
+            if new_depth > suite.depth:
+                suite.depth = new_depth
+        return suite.depth
+    for index, suite in enumerate(suites):
+        set_suite_depth(suite)
+        suite.index = index
+    suites.sort(cmp=lambda a,b: cmp((a.depth, a.index), (b.depth, b.index)))
+    return testloader.suiteClass(suites)
+
+def runTests(tc, type="runtime"):
+
+    suite = loadTests(tc, type)
+    bb.note("Test modules  %s" % tc.testslist)
+    if hasattr(tc, "tagexp") and tc.tagexp:
+        bb.note("Filter test cases by tags: %s" % tc.tagexp)
+    bb.note("Found %s tests" % suite.countTestCases())
+    runner = unittest.TextTestRunner(verbosity=2)
+    result = runner.run(suite)
+
+    return result
+
+@LogResults
+class oeTest(unittest.TestCase):
+
+    longMessage = True
+
+    @classmethod
+    def hasPackage(self, pkg):
+        for item in oeTest.tc.pkgmanifest.split('\n'):
+            if re.match(pkg, item):
+                return True
+        return False
+
+    @classmethod
+    def hasFeature(self,feature):
+
+        if feature in oeTest.tc.imagefeatures or \
+                feature in oeTest.tc.distrofeatures:
+            return True
+        else:
+            return False
+
+class oeRuntimeTest(oeTest):
+    def __init__(self, methodName='runTest'):
+        self.target = oeRuntimeTest.tc.target
+        super(oeRuntimeTest, self).__init__(methodName)
+
+    def setUp(self):
+        # Check if test needs to run
+        if self.tc.sigterm:
+            self.fail("Got SIGTERM")
+        elif (type(self.target).__name__ == "QemuTarget"):
+            self.assertTrue(self.target.check(), msg = "Qemu not running?")
+
+    def tearDown(self):
+        # If a test fails or there is an exception
+        if not exc_info() == (None, None, None):
+            exc_clear()
+            #Only dump for QemuTarget
+            if (type(self.target).__name__ == "QemuTarget"):
+                self.tc.host_dumper.create_dir(self._testMethodName)
+                self.tc.host_dumper.dump_host()
+                self.target.target_dumper.dump_target(
+                        self.tc.host_dumper.dump_dir)
+                print ("%s dump data stored in %s" % (self._testMethodName,
+                         self.tc.host_dumper.dump_dir))
+
+    #TODO: use package_manager.py to install packages on any type of image
+    def install_packages(self, packagelist):
+        for package in packagelist:
+            (status, result) = self.target.run("smart install -y "+package)
+            if status != 0:
+                return status
+
+class oeSDKTest(oeTest):
+    def __init__(self, methodName='runTest'):
+        self.sdktestdir = oeSDKTest.tc.sdktestdir
+        super(oeSDKTest, self).__init__(methodName)
+
+    @classmethod
+    def hasHostPackage(self, pkg):
+
+        if re.search(pkg, oeTest.tc.hostpkgmanifest):
+            return True
+        return False
+
+    def _run(self, cmd):
+        return subprocess.check_output(cmd, shell=True)
+
+def getmodule(pos=2):
+    # stack returns a list of tuples containg frame information
+    # First element of the list the is current frame, caller is 1
+    frameinfo = inspect.stack()[pos]
+    modname = inspect.getmodulename(frameinfo[1])
+    #modname = inspect.getmodule(frameinfo[0]).__name__
+    return modname
+
+def skipModule(reason, pos=2):
+    modname = getmodule(pos)
+    if modname not in oeTest.tc.testsrequired:
+        raise unittest.SkipTest("%s: %s" % (modname, reason))
+    else:
+        raise Exception("\nTest %s wants to be skipped.\nReason is: %s" \
+                "\nTest was required in TEST_SUITES, so either the condition for skipping is wrong" \
+                "\nor the image really doesn't have the required feature/package when it should." % (modname, reason))
+
+def skipModuleIf(cond, reason):
+
+    if cond:
+        skipModule(reason, 3)
+
+def skipModuleUnless(cond, reason):
+
+    if not cond:
+        skipModule(reason, 3)
diff --git a/meta/lib/oeqa/runexported.py b/meta/lib/oeqa/runexported.py
new file mode 100755
index 0000000..96442b1
--- /dev/null
+++ b/meta/lib/oeqa/runexported.py
@@ -0,0 +1,146 @@
+#!/usr/bin/env python
+
+
+# Copyright (C) 2013 Intel Corporation
+#
+# Released under the MIT license (see COPYING.MIT)
+
+# This script should be used outside of the build system to run image tests.
+# It needs a json file as input as exported by the build.
+# E.g for an already built image:
+#- export the tests:
+#   TEST_EXPORT_ONLY = "1"
+#   TEST_TARGET  = "simpleremote"
+#   TEST_TARGET_IP = "192.168.7.2"
+#   TEST_SERVER_IP = "192.168.7.1"
+# bitbake core-image-sato -c testimage
+# Setup your target, e.g for qemu: runqemu core-image-sato
+# cd build/tmp/testimage/core-image-sato
+# ./runexported.py testdata.json
+
+import sys
+import os
+import time
+from optparse import OptionParser
+
+try:
+    import simplejson as json
+except ImportError:
+    import json
+
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "oeqa")))
+
+from oeqa.oetest import runTests
+from oeqa.utils.sshcontrol import SSHControl
+from oeqa.utils.dump import get_host_dumper
+
+# this isn't pretty but we need a fake target object
+# for running the tests externally as we don't care
+# about deploy/start we only care about the connection methods (run, copy)
+class FakeTarget(object):
+    def __init__(self, d):
+        self.connection = None
+        self.ip = None
+        self.server_ip = None
+        self.datetime = time.strftime('%Y%m%d%H%M%S',time.gmtime())
+        self.testdir = d.getVar("TEST_LOG_DIR", True)
+        self.pn = d.getVar("PN", True)
+
+    def exportStart(self):
+        self.sshlog = os.path.join(self.testdir, "ssh_target_log.%s" % self.datetime)
+        sshloglink = os.path.join(self.testdir, "ssh_target_log")
+        if os.path.islink(sshloglink):
+            os.unlink(sshloglink)
+        os.symlink(self.sshlog, sshloglink)
+        print("SSH log file: %s" %  self.sshlog)
+        self.connection = SSHControl(self.ip, logfile=self.sshlog)
+
+    def run(self, cmd, timeout=None):
+        return self.connection.run(cmd, timeout)
+
+    def copy_to(self, localpath, remotepath):
+        return self.connection.copy_to(localpath, remotepath)
+
+    def copy_from(self, remotepath, localpath):
+        return self.connection.copy_from(remotepath, localpath)
+
+
+class MyDataDict(dict):
+    def getVar(self, key, unused = None):
+        return self.get(key, "")
+
+class TestContext(object):
+    def __init__(self):
+        self.d = None
+        self.target = None
+
+def main():
+
+    usage = "usage: %prog [options] <json file>"
+    parser = OptionParser(usage=usage)
+    parser.add_option("-t", "--target-ip", dest="ip", help="The IP address of the target machine. Use this to \
+            overwrite the value determined from TEST_TARGET_IP at build time")
+    parser.add_option("-s", "--server-ip", dest="server_ip", help="The IP address of this machine. Use this to \
+            overwrite the value determined from TEST_SERVER_IP at build time.")
+    parser.add_option("-d", "--deploy-dir", dest="deploy_dir", help="Full path to the package feeds, that this \
+            the contents of what used to be DEPLOY_DIR on the build machine. If not specified it will use the value \
+            specified in the json if that directory actually exists or it will error out.")
+    parser.add_option("-l", "--log-dir", dest="log_dir", help="This sets the path for TEST_LOG_DIR. If not specified \
+            the current dir is used. This is used for usually creating a ssh log file and a scp test file.")
+
+    (options, args) = parser.parse_args()
+    if len(args) != 1:
+        parser.error("Incorrect number of arguments. The one and only argument should be a json file exported by the build system")
+
+    with open(args[0], "r") as f:
+        loaded = json.load(f)
+
+    if options.ip:
+        loaded["target"]["ip"] = options.ip
+    if options.server_ip:
+        loaded["target"]["server_ip"] = options.server_ip
+
+    d = MyDataDict()
+    for key in loaded["d"].keys():
+        d[key] = loaded["d"][key]
+
+    if options.log_dir:
+        d["TEST_LOG_DIR"] = options.log_dir
+    else:
+        d["TEST_LOG_DIR"] = os.path.abspath(os.path.dirname(__file__))
+    if options.deploy_dir:
+        d["DEPLOY_DIR"] = options.deploy_dir
+    else:
+        if not os.path.isdir(d["DEPLOY_DIR"]):
+            raise Exception("The path to DEPLOY_DIR does not exists: %s" % d["DEPLOY_DIR"])
+
+
+    target = FakeTarget(d)
+    for key in loaded["target"].keys():
+        setattr(target, key, loaded["target"][key])
+
+    host_dumper = get_host_dumper(d)
+    host_dumper.parent_dir = loaded["host_dumper"]["parent_dir"]
+    host_dumper.cmds = loaded["host_dumper"]["cmds"]
+
+    tc = TestContext()
+    setattr(tc, "d", d)
+    setattr(tc, "target", target)
+    setattr(tc, "host_dumper", host_dumper)
+    for key in loaded.keys():
+        if key != "d" and key != "target" and key != "host_dumper":
+            setattr(tc, key, loaded[key])
+
+    target.exportStart()
+    runTests(tc)
+
+    return 0
+
+if __name__ == "__main__":
+    try:
+        ret = main()
+    except Exception:
+        ret = 1
+        import traceback
+        traceback.print_exc(5)
+    sys.exit(ret)
diff --git a/meta/lib/oeqa/runtime/__init__.py b/meta/lib/oeqa/runtime/__init__.py
new file mode 100644
index 0000000..4cf3fa7
--- /dev/null
+++ b/meta/lib/oeqa/runtime/__init__.py
@@ -0,0 +1,3 @@
+# Enable other layers to have tests in the same named directory
+from pkgutil import extend_path
+__path__ = extend_path(__path__, __name__)
diff --git a/meta/lib/oeqa/runtime/_ptest.py b/meta/lib/oeqa/runtime/_ptest.py
new file mode 100644
index 0000000..81c9c43
--- /dev/null
+++ b/meta/lib/oeqa/runtime/_ptest.py
@@ -0,0 +1,125 @@
+import unittest, os, shutil
+from oeqa.oetest import oeRuntimeTest, skipModule
+from oeqa.utils.decorators import *
+from oeqa.utils.logparser import *
+from oeqa.utils.httpserver import HTTPService
+import bb
+import glob
+from oe.package_manager import RpmPkgsList
+import subprocess
+
+def setUpModule():
+    if not oeRuntimeTest.hasFeature("package-management"):
+        skipModule("Image doesn't have package management feature")
+    if not oeRuntimeTest.hasPackage("smart"):
+        skipModule("Image doesn't have smart installed")
+    if "package_rpm" != oeRuntimeTest.tc.d.getVar("PACKAGE_CLASSES", True).split()[0]:
+        skipModule("Rpm is not the primary package manager")
+
+class PtestRunnerTest(oeRuntimeTest):
+
+    # a ptest log parser
+    def parse_ptest(self, logfile):
+        parser = Lparser(test_0_pass_regex="^PASS:(.+)", test_0_fail_regex="^FAIL:(.+)", section_0_begin_regex="^BEGIN: .*/(.+)/ptest", section_0_end_regex="^END: .*/(.+)/ptest")
+        parser.init()
+        result = Result()
+
+        with open(logfile) as f:
+            for line in f:
+                result_tuple = parser.parse_line(line)
+                if not result_tuple:
+                    continue
+                result_tuple = line_type, category, status, name = parser.parse_line(line)
+
+                if line_type == 'section' and status == 'begin':
+                    current_section = name
+                    continue
+
+                if line_type == 'section' and status == 'end':
+                    current_section = None
+                    continue
+
+                if line_type == 'test' and status == 'pass':
+                    result.store(current_section, name, status)
+                    continue
+
+                if line_type == 'test' and status == 'fail':
+                    result.store(current_section, name, status)
+                    continue
+
+        result.sort_tests()
+        return result
+
+    @classmethod
+    def setUpClass(self):
+        #note the existing channels that are on the board before creating new ones
+#        self.existingchannels = set()
+#        (status, result) = oeRuntimeTest.tc.target.run('smart channel --show | grep "\["', 0)
+#        for x in result.split("\n"):
+#            self.existingchannels.add(x)
+        self.repo_server = HTTPService(oeRuntimeTest.tc.d.getVar('DEPLOY_DIR', True), oeRuntimeTest.tc.target.server_ip)
+        self.repo_server.start()
+
+    @classmethod
+    def tearDownClass(self):
+        self.repo_server.stop()
+        #remove created channels to be able to repeat the tests on same image
+#        (status, result) = oeRuntimeTest.tc.target.run('smart channel --show | grep "\["', 0)
+#        for x in result.split("\n"):
+#            if x not in self.existingchannels:
+#                oeRuntimeTest.tc.target.run('smart channel --remove '+x[1:-1]+' -y', 0)
+
+    def add_smart_channel(self):
+        image_pkgtype = self.tc.d.getVar('IMAGE_PKGTYPE', True)
+        deploy_url = 'http://%s:%s/%s' %(self.target.server_ip, self.repo_server.port, image_pkgtype)
+        pkgarchs = self.tc.d.getVar('PACKAGE_ARCHS', True).replace("-","_").split()
+        for arch in os.listdir('%s/%s' % (self.repo_server.root_dir, image_pkgtype)):
+            if arch in pkgarchs:
+                self.target.run('smart channel -y --add {a} type=rpm-md baseurl={u}/{a}'.format(a=arch, u=deploy_url), 0)
+        self.target.run('smart update', 0)
+
+    def install_complementary(self, globs=None):
+        installed_pkgs_file = os.path.join(oeRuntimeTest.tc.d.getVar('WORKDIR', True),
+                                           "installed_pkgs.txt")
+        self.pkgs_list = RpmPkgsList(oeRuntimeTest.tc.d, oeRuntimeTest.tc.d.getVar('IMAGE_ROOTFS', True), oeRuntimeTest.tc.d.getVar('arch_var', True), oeRuntimeTest.tc.d.getVar('os_var', True))
+        with open(installed_pkgs_file, "w+") as installed_pkgs:
+            installed_pkgs.write(self.pkgs_list.list("arch"))
+
+        cmd = [bb.utils.which(os.getenv('PATH'), "oe-pkgdata-util"),
+               "-p", oeRuntimeTest.tc.d.getVar('PKGDATA_DIR', True), "glob", installed_pkgs_file,
+               globs]
+        try:
+            bb.note("Installing complementary packages ...")
+            complementary_pkgs = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+        except subprocess.CalledProcessError as e:
+            bb.fatal("Could not compute complementary packages list. Command "
+                     "'%s' returned %d:\n%s" %
+                     (' '.join(cmd), e.returncode, e.output))
+
+        return complementary_pkgs.split()
+
+    def setUp(self):
+        self.ptest_log = os.path.join(oeRuntimeTest.tc.d.getVar("TEST_LOG_DIR",True), "ptest-%s.log" % oeRuntimeTest.tc.d.getVar('DATETIME', True))
+
+    @skipUnlessPassed('test_ssh')
+    def test_ptestrunner(self):
+        self.add_smart_channel()
+        (runnerstatus, result) = self.target.run('which ptest-runner', 0)
+        cond = oeRuntimeTest.hasPackage("ptest-runner") and oeRuntimeTest.hasFeature("ptest") and oeRuntimeTest.hasPackage("-ptest") and (runnerstatus != 0)
+        if cond:
+            self.install_packages(self.install_complementary("*-ptest"))
+            self.install_packages(['ptest-runner'])
+
+        (runnerstatus, result) = self.target.run('/usr/bin/ptest-runner > /tmp/ptest.log 2>&1', 0)
+        #exit code is !=0 even if ptest-runner executes because some ptest tests fail.
+        self.assertTrue(runnerstatus != 127, msg="Cannot execute ptest-runner!")
+        self.target.copy_from('/tmp/ptest.log', self.ptest_log)
+        shutil.copyfile(self.ptest_log, "ptest.log")
+
+        result = self.parse_ptest("ptest.log")
+        log_results_to_location = "./results"
+        if os.path.exists(log_results_to_location):
+            shutil.rmtree(log_results_to_location)
+        os.makedirs(log_results_to_location)
+
+        result.log_as_files(log_results_to_location, test_status = ['pass','fail'])
diff --git a/meta/lib/oeqa/runtime/_qemutiny.py b/meta/lib/oeqa/runtime/_qemutiny.py
new file mode 100644
index 0000000..a3c29f3
--- /dev/null
+++ b/meta/lib/oeqa/runtime/_qemutiny.py
@@ -0,0 +1,9 @@
+import unittest
+from oeqa.oetest import oeRuntimeTest
+from oeqa.utils.qemutinyrunner import *
+
+class QemuTinyTest(oeRuntimeTest):
+
+    def test_boot_tiny(self):
+        (status, output) = self.target.run_serial('uname -a')
+        self.assertTrue("yocto-tiny" in output, msg="Cannot detect poky tiny boot!")
\ No newline at end of file
diff --git a/meta/lib/oeqa/runtime/buildcvs.py b/meta/lib/oeqa/runtime/buildcvs.py
new file mode 100644
index 0000000..fe6cbfb
--- /dev/null
+++ b/meta/lib/oeqa/runtime/buildcvs.py
@@ -0,0 +1,31 @@
+from oeqa.oetest import oeRuntimeTest, skipModule
+from oeqa.utils.decorators import *
+from oeqa.utils.targetbuild import TargetBuildProject
+
+def setUpModule():
+    if not oeRuntimeTest.hasFeature("tools-sdk"):
+        skipModule("Image doesn't have tools-sdk in IMAGE_FEATURES")
+
+class BuildCvsTest(oeRuntimeTest):
+
+    @classmethod
+    def setUpClass(self):
+        self.project = TargetBuildProject(oeRuntimeTest.tc.target, oeRuntimeTest.tc.d,
+                        "http://ftp.gnu.org/non-gnu/cvs/source/feature/1.12.13/cvs-1.12.13.tar.bz2")
+        self.project.download_archive()
+
+    @testcase(205)
+    @skipUnlessPassed("test_ssh")
+    def test_cvs(self):
+        self.assertEqual(self.project.run_configure(), 0,
+                        msg="Running configure failed")
+
+        self.assertEqual(self.project.run_make(), 0,
+                        msg="Running make failed")
+
+        self.assertEqual(self.project.run_install(), 0,
+                        msg="Running make install failed")
+
+    @classmethod
+    def tearDownClass(self):
+        self.project.clean()
diff --git a/meta/lib/oeqa/runtime/buildiptables.py b/meta/lib/oeqa/runtime/buildiptables.py
new file mode 100644
index 0000000..09e252d
--- /dev/null
+++ b/meta/lib/oeqa/runtime/buildiptables.py
@@ -0,0 +1,31 @@
+from oeqa.oetest import oeRuntimeTest, skipModule
+from oeqa.utils.decorators import *
+from oeqa.utils.targetbuild import TargetBuildProject
+
+def setUpModule():
+    if not oeRuntimeTest.hasFeature("tools-sdk"):
+        skipModule("Image doesn't have tools-sdk in IMAGE_FEATURES")
+
+class BuildIptablesTest(oeRuntimeTest):
+
+    @classmethod
+    def setUpClass(self):
+        self.project = TargetBuildProject(oeRuntimeTest.tc.target, oeRuntimeTest.tc.d,
+                        "http://netfilter.org/projects/iptables/files/iptables-1.4.13.tar.bz2")
+        self.project.download_archive()
+
+    @testcase(206)
+    @skipUnlessPassed("test_ssh")
+    def test_iptables(self):
+        self.assertEqual(self.project.run_configure(), 0,
+                        msg="Running configure failed")
+
+        self.assertEqual(self.project.run_make(), 0,
+                        msg="Running make failed")
+
+        self.assertEqual(self.project.run_install(), 0,
+                        msg="Running make install failed")
+
+    @classmethod
+    def tearDownClass(self):
+        self.project.clean()
diff --git a/meta/lib/oeqa/runtime/buildsudoku.py b/meta/lib/oeqa/runtime/buildsudoku.py
new file mode 100644
index 0000000..802b060
--- /dev/null
+++ b/meta/lib/oeqa/runtime/buildsudoku.py
@@ -0,0 +1,28 @@
+from oeqa.oetest import oeRuntimeTest, skipModule
+from oeqa.utils.decorators import *
+from oeqa.utils.targetbuild import TargetBuildProject
+
+def setUpModule():
+    if not oeRuntimeTest.hasFeature("tools-sdk"):
+        skipModule("Image doesn't have tools-sdk in IMAGE_FEATURES")
+
+class SudokuTest(oeRuntimeTest):
+
+    @classmethod
+    def setUpClass(self):
+        self.project = TargetBuildProject(oeRuntimeTest.tc.target, oeRuntimeTest.tc.d,
+                        "http://downloads.sourceforge.net/project/sudoku-savant/sudoku-savant/sudoku-savant-1.3/sudoku-savant-1.3.tar.bz2")
+        self.project.download_archive()
+
+    @testcase(207)
+    @skipUnlessPassed("test_ssh")
+    def test_sudoku(self):
+        self.assertEqual(self.project.run_configure(), 0,
+                        msg="Running configure failed")
+
+        self.assertEqual(self.project.run_make(), 0,
+                        msg="Running make failed")
+
+    @classmethod
+    def tearDownClass(self):
+        self.project.clean()
diff --git a/meta/lib/oeqa/runtime/connman.py b/meta/lib/oeqa/runtime/connman.py
new file mode 100644
index 0000000..ee69e5d
--- /dev/null
+++ b/meta/lib/oeqa/runtime/connman.py
@@ -0,0 +1,54 @@
+import unittest
+from oeqa.oetest import oeRuntimeTest, skipModule
+from oeqa.utils.decorators import *
+
+def setUpModule():
+    if not oeRuntimeTest.hasPackage("connman"):
+        skipModule("No connman package in image")
+
+
+class ConnmanTest(oeRuntimeTest):
+
+    def service_status(self, service):
+        if oeRuntimeTest.hasFeature("systemd"):
+            (status, output) = self.target.run('systemctl status -l %s' % service)
+            return output
+        else:
+            return "Unable to get status or logs for %s" % service
+
+    @testcase(961)
+    @skipUnlessPassed('test_ssh')
+    def test_connmand_help(self):
+        (status, output) = self.target.run('/usr/sbin/connmand --help')
+        self.assertEqual(status, 0, msg="status and output: %s and %s" % (status,output))
+
+    @testcase(221)
+    @skipUnlessPassed('test_connmand_help')
+    def test_connmand_running(self):
+        (status, output) = self.target.run(oeRuntimeTest.pscmd + ' | grep [c]onnmand')
+        if status != 0:
+            print self.service_status("connman")
+            self.fail("No connmand process running")
+
+    @testcase(223)
+    def test_only_one_connmand_in_background(self):
+        """
+        Summary:     Only one connmand in background
+        Expected:    There will be only one connmand instance in background.
+        Product:     BSPs
+        Author:      Alexandru Georgescu <alexandru.c.georgescu@intel.com>
+        AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
+        """
+
+        # Make sure that 'connmand' is running in background
+        (status, output) = self.target.run(oeRuntimeTest.pscmd + ' | grep [c]onnmand')
+        self.assertEqual(0, status, 'Failed to find "connmand" process running in background.')
+
+        # Start a new instance of 'connmand'
+        (status, output) = self.target.run('connmand')
+        self.assertEqual(0, status, 'Failed to start a new "connmand" process.')
+
+        # Make sure that only one 'connmand' is running in background
+        (status, output) = self.target.run(oeRuntimeTest.pscmd + ' | grep [c]onnmand | wc -l')
+        self.assertEqual(0, status, 'Failed to find "connmand" process running in background.')
+        self.assertEqual(1, int(output), 'Found {} connmand processes running, expected 1.'.format(output))
diff --git a/meta/lib/oeqa/runtime/date.py b/meta/lib/oeqa/runtime/date.py
new file mode 100644
index 0000000..3a8fe84
--- /dev/null
+++ b/meta/lib/oeqa/runtime/date.py
@@ -0,0 +1,31 @@
+from oeqa.oetest import oeRuntimeTest
+from oeqa.utils.decorators import *
+import re
+
+class DateTest(oeRuntimeTest):
+
+    def setUp(self):
+        if oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager", True) == "systemd":
+            self.target.run('systemctl stop systemd-timesyncd')
+
+    def tearDown(self):
+        if oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager", True) == "systemd":
+            self.target.run('systemctl start systemd-timesyncd')
+
+    @testcase(211)
+    @skipUnlessPassed("test_ssh")
+    def test_date(self):
+        (status, output) = self.target.run('date +"%Y-%m-%d %T"')
+        self.assertEqual(status, 0, msg="Failed to get initial date, output: %s" % output)
+        oldDate = output
+
+        sampleDate = '"2016-08-09 10:00:00"'
+        (status, output) = self.target.run("date -s %s" % sampleDate)
+        self.assertEqual(status, 0, msg="Date set failed, output: %s" % output)
+
+        (status, output) = self.target.run("date -R")
+        p = re.match('Tue, 09 Aug 2016 10:00:.. \+0000', output)
+        self.assertTrue(p, msg="The date was not set correctly, output: %s" % output)
+
+        (status, output) = self.target.run('date -s "%s"' % oldDate)
+        self.assertEqual(status, 0, msg="Failed to reset date, output: %s" % output)
diff --git a/meta/lib/oeqa/runtime/df.py b/meta/lib/oeqa/runtime/df.py
new file mode 100644
index 0000000..09569d5
--- /dev/null
+++ b/meta/lib/oeqa/runtime/df.py
@@ -0,0 +1,12 @@
+import unittest
+from oeqa.oetest import oeRuntimeTest
+from oeqa.utils.decorators import *
+
+
+class DfTest(oeRuntimeTest):
+
+    @testcase(234)
+    @skipUnlessPassed("test_ssh")
+    def test_df(self):
+        (status,output) = self.target.run("df / | sed -n '2p' | awk '{print $4}'")
+        self.assertTrue(int(output)>5120, msg="Not enough space on image. Current size is %s" % output)
diff --git a/meta/lib/oeqa/runtime/dmesg.py b/meta/lib/oeqa/runtime/dmesg.py
new file mode 100644
index 0000000..5831471
--- /dev/null
+++ b/meta/lib/oeqa/runtime/dmesg.py
@@ -0,0 +1,12 @@
+import unittest
+from oeqa.oetest import oeRuntimeTest
+from oeqa.utils.decorators import *
+
+
+class DmesgTest(oeRuntimeTest):
+
+    @testcase(215)
+    @skipUnlessPassed('test_ssh')
+    def test_dmesg(self):
+        (status, output) = self.target.run('dmesg | grep -v mmci-pl18x | grep -v "error changing net interface name" | grep -iv "dma timeout" | grep -v usbhid | grep -i error')
+        self.assertEqual(status, 1, msg = "Error messages in dmesg log: %s" % output)
diff --git a/meta/lib/oeqa/runtime/files/hellomod.c b/meta/lib/oeqa/runtime/files/hellomod.c
new file mode 100644
index 0000000..a383397
--- /dev/null
+++ b/meta/lib/oeqa/runtime/files/hellomod.c
@@ -0,0 +1,19 @@
+#include <linux/module.h>
+#include <linux/kernel.h>
+#include <linux/init.h>
+
+static int __init hello_init(void)
+{
+        printk(KERN_INFO "Hello world!\n");
+        return 0;
+}
+
+static void __exit hello_cleanup(void)
+{
+        printk(KERN_INFO "Cleaning up hellomod.\n");
+}
+
+module_init(hello_init);
+module_exit(hello_cleanup);
+
+MODULE_LICENSE("GPL");
diff --git a/meta/lib/oeqa/runtime/files/hellomod_makefile b/meta/lib/oeqa/runtime/files/hellomod_makefile
new file mode 100644
index 0000000..b92d5c8
--- /dev/null
+++ b/meta/lib/oeqa/runtime/files/hellomod_makefile
@@ -0,0 +1,8 @@
+obj-m := hellomod.o
+KDIR := /usr/src/kernel
+
+all:
+	$(MAKE) -C $(KDIR) M=$(PWD) modules
+
+clean:
+	$(MAKE) -C $(KDIR) M=$(PWD) clean
diff --git a/meta/lib/oeqa/runtime/files/test.c b/meta/lib/oeqa/runtime/files/test.c
new file mode 100644
index 0000000..2d8389c
--- /dev/null
+++ b/meta/lib/oeqa/runtime/files/test.c
@@ -0,0 +1,26 @@
+#include <stdio.h>
+#include <math.h>
+#include <stdlib.h>
+
+double convert(long long l)
+{
+  return (double)l;
+}
+
+int main(int argc, char * argv[]) {
+
+  long long l = 10;
+  double f;
+  double check = 10.0;
+
+  f = convert(l);
+  printf("convert: %lld => %f\n", l, f);
+  if ( f != check ) exit(1);
+
+  f = 1234.67;
+  check = 1234.0;
+  printf("floorf(%f) = %f\n", f, floorf(f));
+  if ( floorf(f) != check) exit(1);
+
+  return 0;
+}
diff --git a/meta/lib/oeqa/runtime/files/test.cpp b/meta/lib/oeqa/runtime/files/test.cpp
new file mode 100644
index 0000000..9e1a764
--- /dev/null
+++ b/meta/lib/oeqa/runtime/files/test.cpp
@@ -0,0 +1,3 @@
+#include <limits>
+
+int main() {}
\ No newline at end of file
diff --git a/meta/lib/oeqa/runtime/files/test.pl b/meta/lib/oeqa/runtime/files/test.pl
new file mode 100644
index 0000000..689c8f1
--- /dev/null
+++ b/meta/lib/oeqa/runtime/files/test.pl
@@ -0,0 +1,2 @@
+$a = 9.01e+21 - 9.01e+21 + 0.01;
+print ("the value of a is ", $a, "\n");
diff --git a/meta/lib/oeqa/runtime/files/test.py b/meta/lib/oeqa/runtime/files/test.py
new file mode 100644
index 0000000..f3a2273
--- /dev/null
+++ b/meta/lib/oeqa/runtime/files/test.py
@@ -0,0 +1,6 @@
+import os
+
+os.system('touch /tmp/testfile.python')
+
+a = 9.01e+21 - 9.01e+21 + 0.01
+print "the value of a is %s" % a
diff --git a/meta/lib/oeqa/runtime/files/testmakefile b/meta/lib/oeqa/runtime/files/testmakefile
new file mode 100644
index 0000000..ca1844e
--- /dev/null
+++ b/meta/lib/oeqa/runtime/files/testmakefile
@@ -0,0 +1,5 @@
+test: test.o
+	gcc -o test test.o -lm
+test.o: test.c
+	gcc -c test.c
+
diff --git a/meta/lib/oeqa/runtime/gcc.py b/meta/lib/oeqa/runtime/gcc.py
new file mode 100644
index 0000000..d90cd17
--- /dev/null
+++ b/meta/lib/oeqa/runtime/gcc.py
@@ -0,0 +1,47 @@
+import unittest
+import os
+from oeqa.oetest import oeRuntimeTest, skipModule
+from oeqa.utils.decorators import *
+
+def setUpModule():
+    if not oeRuntimeTest.hasFeature("tools-sdk"):
+        skipModule("Image doesn't have tools-sdk in IMAGE_FEATURES")
+
+
+class GccCompileTest(oeRuntimeTest):
+
+    @classmethod
+    def setUpClass(self):
+        oeRuntimeTest.tc.target.copy_to(os.path.join(oeRuntimeTest.tc.filesdir, "test.c"), "/tmp/test.c")
+        oeRuntimeTest.tc.target.copy_to(os.path.join(oeRuntimeTest.tc.filesdir, "testmakefile"), "/tmp/testmakefile")
+        oeRuntimeTest.tc.target.copy_to(os.path.join(oeRuntimeTest.tc.filesdir, "test.cpp"), "/tmp/test.cpp")
+
+    @testcase(203)
+    def test_gcc_compile(self):
+        (status, output) = self.target.run('gcc /tmp/test.c -o /tmp/test -lm')
+        self.assertEqual(status, 0, msg="gcc compile failed, output: %s" % output)
+        (status, output) = self.target.run('/tmp/test')
+        self.assertEqual(status, 0, msg="running compiled file failed, output %s" % output)
+
+    @testcase(200)
+    def test_gpp_compile(self):
+        (status, output) = self.target.run('g++ /tmp/test.c -o /tmp/test -lm')
+        self.assertEqual(status, 0, msg="g++ compile failed, output: %s" % output)
+        (status, output) = self.target.run('/tmp/test')
+        self.assertEqual(status, 0, msg="running compiled file failed, output %s" % output)
+
+    @testcase(1142)
+    def test_gpp2_compile(self):
+        (status, output) = self.target.run('g++ /tmp/test.cpp -o /tmp/test -lm')
+        self.assertEqual(status, 0, msg="g++ compile failed, output: %s" % output)
+        (status, output) = self.target.run('/tmp/test')
+        self.assertEqual(status, 0, msg="running compiled file failed, output %s" % output)
+
+    @testcase(204)
+    def test_make(self):
+        (status, output) = self.target.run('cd /tmp; make -f testmakefile')
+        self.assertEqual(status, 0, msg="running make failed, output %s" % output)
+
+    @classmethod
+    def tearDownClass(self):
+        oeRuntimeTest.tc.target.run("rm /tmp/test.c /tmp/test.o /tmp/test /tmp/testmakefile")
diff --git a/meta/lib/oeqa/runtime/kernelmodule.py b/meta/lib/oeqa/runtime/kernelmodule.py
new file mode 100644
index 0000000..2e81720
--- /dev/null
+++ b/meta/lib/oeqa/runtime/kernelmodule.py
@@ -0,0 +1,34 @@
+import unittest
+import os
+from oeqa.oetest import oeRuntimeTest, skipModule
+from oeqa.utils.decorators import *
+
+def setUpModule():
+    if not oeRuntimeTest.hasFeature("tools-sdk"):
+        skipModule("Image doesn't have tools-sdk in IMAGE_FEATURES")
+
+
+class KernelModuleTest(oeRuntimeTest):
+
+    def setUp(self):
+        self.target.copy_to(os.path.join(oeRuntimeTest.tc.filesdir, "hellomod.c"), "/tmp/hellomod.c")
+        self.target.copy_to(os.path.join(oeRuntimeTest.tc.filesdir, "hellomod_makefile"), "/tmp/Makefile")
+
+    @testcase('316')
+    @skipUnlessPassed('test_ssh')
+    @skipUnlessPassed('test_gcc_compile')
+    def test_kernel_module(self):
+        cmds = [
+            'cd /usr/src/kernel && make scripts',
+            'cd /tmp && make',
+            'cd /tmp && insmod hellomod.ko',
+            'lsmod | grep hellomod',
+            'dmesg | grep Hello',
+            'rmmod hellomod', 'dmesg | grep "Cleaning up hellomod"'
+            ]
+        for cmd in cmds:
+            (status, output) = self.target.run(cmd, 900)
+            self.assertEqual(status, 0, msg="\n".join([cmd, output]))
+
+    def tearDown(self):
+        self.target.run('rm -f /tmp/Makefile /tmp/hellomod.c')
diff --git a/meta/lib/oeqa/runtime/ldd.py b/meta/lib/oeqa/runtime/ldd.py
new file mode 100644
index 0000000..47b3885
--- /dev/null
+++ b/meta/lib/oeqa/runtime/ldd.py
@@ -0,0 +1,21 @@
+import unittest
+from oeqa.oetest import oeRuntimeTest, skipModule
+from oeqa.utils.decorators import *
+
+def setUpModule():
+    if not oeRuntimeTest.hasFeature("tools-sdk"):
+        skipModule("Image doesn't have tools-sdk in IMAGE_FEATURES")
+
+class LddTest(oeRuntimeTest):
+
+    @testcase(962)
+    @skipUnlessPassed('test_ssh')
+    def test_ldd_exists(self):
+        (status, output) = self.target.run('which ldd')
+        self.assertEqual(status, 0, msg = "ldd does not exist in PATH: which ldd: %s" % output)
+
+    @testcase(239)
+    @skipUnlessPassed('test_ldd_exists')
+    def test_ldd_rtldlist_check(self):
+        (status, output) = self.target.run('for i in $(which ldd | xargs cat | grep "^RTLDLIST"|cut -d\'=\' -f2|tr -d \'"\'); do test -f $i && echo $i && break; done')
+        self.assertEqual(status, 0, msg = "ldd path not correct or RTLDLIST files don't exist. ")
diff --git a/meta/lib/oeqa/runtime/logrotate.py b/meta/lib/oeqa/runtime/logrotate.py
new file mode 100644
index 0000000..86d791c
--- /dev/null
+++ b/meta/lib/oeqa/runtime/logrotate.py
@@ -0,0 +1,28 @@
+# This test should cover https://bugzilla.yoctoproject.org/tr_show_case.cgi?case_id=289 testcase
+# Note that the image under test must have logrotate installed
+
+import unittest
+from oeqa.oetest import oeRuntimeTest, skipModule
+from oeqa.utils.decorators import *
+
+def setUpModule():
+    if not oeRuntimeTest.hasPackage("logrotate"):
+        skipModule("No logrotate package in image")
+
+
+class LogrotateTest(oeRuntimeTest):
+
+    @skipUnlessPassed("test_ssh")
+    def test_1_logrotate_setup(self):
+        (status, output) = self.target.run('mkdir /home/root/logrotate_dir')
+        self.assertEqual(status, 0, msg = "Could not create logrotate_dir. Output: %s" % output)
+        (status, output) = self.target.run("sed -i 's#wtmp {#wtmp {\\n    olddir /home/root/logrotate_dir#' /etc/logrotate.conf")
+        self.assertEqual(status, 0, msg = "Could not write to logrotate.conf file. Status and output: %s and %s)" % (status, output))
+
+    @testcase(289)
+    @skipUnlessPassed("test_1_logrotate_setup")
+    def test_2_logrotate(self):
+        (status, output) = self.target.run('logrotate -f /etc/logrotate.conf')
+        self.assertEqual(status, 0, msg = "logrotate service could not be reloaded. Status and output: %s and %s" % (status, output))
+        output = self.target.run('ls -la /home/root/logrotate_dir/ | wc -l')[1]
+        self.assertTrue(int(output)>=3, msg = "new logfile could not be created. List of files within log directory: %s" %(self.target.run('ls -la /home/root/logrotate_dir')[1]))
diff --git a/meta/lib/oeqa/runtime/multilib.py b/meta/lib/oeqa/runtime/multilib.py
new file mode 100644
index 0000000..e1bcc42
--- /dev/null
+++ b/meta/lib/oeqa/runtime/multilib.py
@@ -0,0 +1,48 @@
+import unittest
+from oeqa.oetest import oeRuntimeTest, skipModule
+from oeqa.utils.decorators import *
+
+def setUpModule():
+    multilibs = oeRuntimeTest.tc.d.getVar("MULTILIBS", True) or ""
+    if "multilib:lib32" not in multilibs:
+        skipModule("this isn't a multilib:lib32 image")
+
+
+class MultilibTest(oeRuntimeTest):
+
+    def parse(self, s):
+        """
+        Parse the output of readelf -h and return the binary class, or fail.
+        """
+        l = [l.split()[1] for l in s.split('\n') if "Class:" in l]
+        if l:
+            return l[0]
+        else:
+            self.fail("Cannot parse readelf output\n" + s)
+
+    @skipUnlessPassed('test_ssh')
+    def test_check_multilib_libc(self):
+        """
+        Check that a multilib image has both 32-bit and 64-bit libc in.
+        """
+
+        (status, output) = self.target.run("readelf -h /lib/libc.so.6")
+        self.assertEqual(status, 0, "Failed to readelf /lib/libc.so.6")
+        class32 = self.parse(output)
+
+        (status, output) = self.target.run("readelf -h /lib64/libc.so.6")
+        self.assertEqual(status, 0, "Failed to readelf /lib64/libc.so.6")
+        class64 = self.parse(output)
+
+        self.assertEqual(class32, "ELF32", msg="/lib/libc.so.6 isn't ELF32 (is %s)" % class32)
+        self.assertEqual(class64, "ELF64", msg="/lib64/libc.so.6 isn't ELF64 (is %s)" % class64)
+
+    @testcase('279')
+    @skipUnlessPassed('test_check_multilib_libc')
+    def test_file_connman(self):
+        self.assertTrue(oeRuntimeTest.hasPackage('lib32-connman-gnome'), msg="This test assumes lib32-connman-gnome is installed")
+
+        (status, output) = self.target.run("readelf -h /usr/bin/connman-applet")
+        self.assertEqual(status, 0, "Failed to readelf /usr/bin/connman-applet")
+        theclass = self.parse(output)
+        self.assertEqual(theclass, "ELF32", msg="connman-applet isn't ELF32 (is %s)" % theclass)
diff --git a/meta/lib/oeqa/runtime/pam.py b/meta/lib/oeqa/runtime/pam.py
new file mode 100644
index 0000000..c8205c9
--- /dev/null
+++ b/meta/lib/oeqa/runtime/pam.py
@@ -0,0 +1,25 @@
+# This test should cover https://bugzilla.yoctoproject.org/tr_show_case.cgi?case_id=287 testcase
+# Note that the image under test must have "pam" in DISTRO_FEATURES
+
+import unittest
+from oeqa.oetest import oeRuntimeTest, skipModule
+from oeqa.utils.decorators import *
+
+def setUpModule():
+    if not oeRuntimeTest.hasFeature("pam"):
+        skipModule("target doesn't have 'pam' in DISTRO_FEATURES")
+
+
+class PamBasicTest(oeRuntimeTest):
+
+    @testcase(287)
+    @skipUnlessPassed('test_ssh')
+    def test_pam(self):
+        (status, output) = self.target.run('login --help')
+        self.assertEqual(status, 1, msg = "login command does not work as expected. Status and output:%s and %s" %(status, output))
+        (status, output) = self.target.run('passwd --help')
+        self.assertEqual(status, 0, msg = "passwd command does not work as expected. Status and output:%s and %s" %(status, output))
+        (status, output) = self.target.run('su --help')
+        self.assertEqual(status, 0, msg = "su command does not work as expected. Status and output:%s and %s" %(status, output))
+        (status, output) = self.target.run('useradd --help')
+        self.assertEqual(status, 0, msg = "useradd command does not work as expected. Status and output:%s and %s" %(status, output))
diff --git a/meta/lib/oeqa/runtime/parselogs.py b/meta/lib/oeqa/runtime/parselogs.py
new file mode 100644
index 0000000..e20947b
--- /dev/null
+++ b/meta/lib/oeqa/runtime/parselogs.py
@@ -0,0 +1,257 @@
+import os
+import unittest
+import subprocess
+from oeqa.oetest import oeRuntimeTest
+from oeqa.utils.decorators import *
+
+#in the future these lists could be moved outside of module
+errors = ["error", "cannot", "can\'t", "failed"]
+
+common_errors = [
+    "(WW) warning, (EE) error, (NI) not implemented, (??) unknown.",
+    "dma timeout",
+    "can\'t add hid device:",
+    "usbhid: probe of ",
+    "_OSC failed (AE_ERROR)",
+    "_OSC failed (AE_SUPPORT)",
+    "AE_ALREADY_EXISTS",
+    "ACPI _OSC request failed (AE_SUPPORT)",
+    "can\'t disable ASPM",
+    "Failed to load module \"vesa\"",
+    "Failed to load module vesa",
+    "Failed to load module \"modesetting\"",
+    "Failed to load module modesetting",
+    "Failed to load module \"glx\"",
+    "Failed to load module \"fbdev\"",
+    "Failed to load module fbdev",
+    "Failed to load module glx",
+    "[drm] Cannot find any crtc or sizes - going 1024x768",
+    "_OSC failed (AE_NOT_FOUND); disabling ASPM",
+    "Open ACPI failed (/var/run/acpid.socket) (No such file or directory)",
+    "NX (Execute Disable) protection cannot be enabled: non-PAE kernel!",
+    "hd.: possibly failed opcode",
+    'NETLINK INITIALIZATION FAILED',
+    'kernel: Cannot find map file',
+    'omap_hwmod: debugss: _wait_target_disable failed',
+    'VGA arbiter: cannot open kernel arbiter, no multi-card support',
+    'Failed to find URL:http://ipv4.connman.net/online/status.html',
+    'Online check failed for',
+    ]
+
+x86_common = [
+    '[drm:psb_do_init] *ERROR* Debug is',
+    'wrong ELF class',
+    'Could not enable PowerButton event',
+    'probe of LNXPWRBN:00 failed with error -22',
+] + common_errors
+
+qemux86_common = [
+    'Fast TSC calibration', 
+    'wrong ELF class',
+    "fail to add MMCONFIG information, can't access extended PCI configuration space under this bridge.",
+    "can't claim BAR ",
+] + common_errors
+
+ignore_errors = { 
+    'default' : common_errors,
+    'qemux86' : [
+        'Failed to access perfctr msr (MSR',
+        ] + qemux86_common,
+    'qemux86-64' : qemux86_common,
+    'qemumips' : [
+        'Failed to load module "glx"',
+        'pci 0000:00:00.0: [Firmware Bug]: reg 0x..: invalid BAR (can\'t size)',
+        ] + common_errors,
+    'qemumips64' : [
+        'pci 0000:00:00.0: [Firmware Bug]: reg 0x..: invalid BAR (can\'t size)',
+         ] + common_errors,
+    'qemuppc' : [
+        'PCI 0000:00 Cannot reserve Legacy IO [io  0x0000-0x0fff]',
+        'host side 80-wire cable detection failed, limiting max speed',
+        'mode "640x480" test failed',
+        'Failed to load module "glx"',
+        ] + common_errors,
+    'qemuarm' : [
+        'mmci-pl18x: probe of fpga:05 failed with error -22',
+        'mmci-pl18x: probe of fpga:0b failed with error -22',
+        'Failed to load module "glx"'
+        ] + common_errors,
+    'qemuarm64' : [
+        'Fatal server error:',
+        '(EE) Server terminated with error (1). Closing log file.',
+        ] + common_errors,
+    'emenlow' : [
+        '[Firmware Bug]: ACPI: No _BQC method, cannot determine initial brightness',
+        '(EE) Failed to load module "psb"',
+        '(EE) Failed to load module psb',
+        '(EE) Failed to load module "psbdrv"',
+        '(EE) Failed to load module psbdrv',
+        '(EE) open /dev/fb0: No such file or directory',
+        '(EE) AIGLX: reverting to software rendering',
+        ] + x86_common,
+    'core2_32' : [
+        'ACPI: No _BQC method, cannot determine initial brightness',
+        '[Firmware Bug]: ACPI: No _BQC method, cannot determine initial brightness',
+        '(EE) Failed to load module "psb"',
+        '(EE) Failed to load module psb',
+        '(EE) Failed to load module "psbdrv"',
+        '(EE) Failed to load module psbdrv',
+        '(EE) open /dev/fb0: No such file or directory',
+        '(EE) AIGLX: reverting to software rendering',
+        ] + x86_common,
+    'intel-corei7-64' : [
+        "controller can't do DEVSLP, turning off",
+        ] + common_errors,
+    'crownbay' : x86_common,
+    'genericx86' : x86_common,
+    'genericx86-64' : x86_common,
+    'edgerouter' : [
+        'Fatal server error:',
+        ] + common_errors,
+    'minnow' : [
+        'netlink init failed',
+        ] + common_errors,
+    'jasperforest' : [
+        'Activated service \'org.bluez\' failed:',
+        'Unable to find NFC netlink family',
+        'netlink init failed',
+        ] + common_errors,
+}
+
+log_locations = ["/var/log/","/var/log/dmesg", "/tmp/dmesg_output.log"]
+
+class ParseLogsTest(oeRuntimeTest):
+
+    @classmethod
+    def setUpClass(self):
+        self.errors = errors
+        self.ignore_errors = ignore_errors
+        self.log_locations = log_locations
+        self.msg = ""
+
+    def getMachine(self):
+        return oeRuntimeTest.tc.d.getVar("MACHINE", True)
+
+    #get some information on the CPU of the machine to display at the beginning of the output. This info might be useful in some cases.
+    def getHardwareInfo(self):
+        hwi = ""
+        (status, cpu_name) = self.target.run("cat /proc/cpuinfo | grep \"model name\" | head -n1 | awk 'BEGIN{FS=\":\"}{print $2}'")
+        (status, cpu_physical_cores) = self.target.run("cat /proc/cpuinfo | grep \"cpu cores\" | head -n1 | awk {'print $4'}")
+        (status, cpu_logical_cores) = self.target.run("cat /proc/cpuinfo | grep \"processor\" | wc -l")
+        (status, cpu_arch) = self.target.run("uname -m")
+        hwi += "Machine information: \n"
+        hwi += "*******************************\n"
+        hwi += "Machine name: "+self.getMachine()+"\n"
+        hwi += "CPU: "+str(cpu_name)+"\n"
+        hwi += "Arch: "+str(cpu_arch)+"\n"
+        hwi += "Physical cores: "+str(cpu_physical_cores)+"\n"
+        hwi += "Logical cores: "+str(cpu_logical_cores)+"\n"
+        hwi += "*******************************\n"
+        return hwi
+
+    #go through the log locations provided and if it's a folder create a list with all the .log files in it, if it's a file just add 
+    #it to that list
+    def getLogList(self, log_locations):
+        logs = []
+        for location in log_locations:
+            (status, output) = self.target.run("test -f "+str(location))
+            if (status == 0):
+                logs.append(str(location))
+            else:
+                (status, output) = self.target.run("test -d "+str(location))
+                if (status == 0):
+                    (status, output) = self.target.run("find "+str(location)+"/*.log -maxdepth 1 -type f")
+                    if (status == 0):
+                        output = output.splitlines()
+                        for logfile in output:
+                            logs.append(os.path.join(location,str(logfile)))
+        return logs
+
+    #copy the log files to be parsed locally
+    def transfer_logs(self, log_list):
+        target_logs = 'target_logs'
+        if not os.path.exists(target_logs):
+            os.makedirs(target_logs)
+        for f in log_list:
+            self.target.copy_from(f, target_logs)
+
+    #get the local list of logs
+    def get_local_log_list(self, log_locations):
+        self.transfer_logs(self.getLogList(log_locations))
+        logs = [ os.path.join('target_logs',f) for f in os.listdir('target_logs') if os.path.isfile(os.path.join('target_logs',f)) ]
+        return logs
+
+    #build the grep command to be used with filters and exclusions
+    def build_grepcmd(self, errors, ignore_errors, log):
+        grepcmd = "grep "
+        grepcmd +="-Ei \""
+        for error in errors:
+            grepcmd += error+"|"
+        grepcmd = grepcmd[:-1]
+        grepcmd += "\" "+str(log)+" | grep -Eiv \'"
+        try:
+            errorlist = ignore_errors[self.getMachine()]
+        except KeyError:
+            self.msg += "No ignore list found for this machine, using default\n"
+            errorlist = ignore_errors['default']
+        for ignore_error in errorlist:
+            ignore_error = ignore_error.replace("(", "\(")
+            ignore_error = ignore_error.replace(")", "\)")
+            ignore_error = ignore_error.replace("'", ".")
+            ignore_error = ignore_error.replace("?", "\?")
+            ignore_error = ignore_error.replace("[", "\[")
+            ignore_error = ignore_error.replace("]", "\]")
+            ignore_error = ignore_error.replace("*", "\*")
+            grepcmd += ignore_error+"|"
+        grepcmd = grepcmd[:-1]
+        grepcmd += "\'"
+        return grepcmd
+
+    #grep only the errors so that their context could be collected. Default context is 10 lines before and after the error itself
+    def parse_logs(self, errors, ignore_errors, logs, lines_before = 10, lines_after = 10):
+        results = {}
+        rez = []
+        grep_output = ''
+        for log in logs:
+            result = None
+            thegrep = self.build_grepcmd(errors, ignore_errors, log)
+            try:
+                result = subprocess.check_output(thegrep, shell=True)
+            except:
+                pass
+            if (result is not None):
+                results[log.replace('target_logs/','')] = {}
+                rez = result.splitlines()
+                for xrez in rez:
+                    command = "grep \"\\"+str(xrez)+"\" -B "+str(lines_before)+" -A "+str(lines_after)+" "+str(log)
+                    try:
+                        grep_output = subprocess.check_output(command, shell=True)
+                    except:
+                        pass
+                    results[log.replace('target_logs/','')][xrez]=grep_output
+        return results
+
+    #get the output of dmesg and write it in a file. This file is added to log_locations.
+    def write_dmesg(self):
+        (status, dmesg) = self.target.run("dmesg")
+        (status, dmesg2) = self.target.run("echo \""+str(dmesg)+"\" > /tmp/dmesg_output.log")
+
+    @testcase(1059)
+    @skipUnlessPassed('test_ssh')
+    def test_parselogs(self):
+        self.write_dmesg()
+        log_list = self.get_local_log_list(self.log_locations)
+        result = self.parse_logs(self.errors, self.ignore_errors, log_list)
+        print self.getHardwareInfo()
+        errcount = 0
+        for log in result:
+            self.msg += "Log: "+log+"\n"
+            self.msg += "-----------------------\n"
+            for error in result[log]:
+                errcount += 1
+                self.msg += "Central error: "+str(error)+"\n"
+                self.msg +=  "***********************\n"
+                self.msg +=  result[str(log)][str(error)]+"\n"
+                self.msg +=  "***********************\n"
+        self.msg += "%s errors found in logs." % errcount
+        self.assertEqual(errcount, 0, msg=self.msg)
diff --git a/meta/lib/oeqa/runtime/perl.py b/meta/lib/oeqa/runtime/perl.py
new file mode 100644
index 0000000..e044d0a
--- /dev/null
+++ b/meta/lib/oeqa/runtime/perl.py
@@ -0,0 +1,30 @@
+import unittest
+import os
+from oeqa.oetest import oeRuntimeTest, skipModule
+from oeqa.utils.decorators import *
+
+def setUpModule():
+    if not oeRuntimeTest.hasPackage("perl"):
+        skipModule("No perl package in the image")
+
+
+class PerlTest(oeRuntimeTest):
+
+    @classmethod
+    def setUpClass(self):
+        oeRuntimeTest.tc.target.copy_to(os.path.join(oeRuntimeTest.tc.filesdir, "test.pl"), "/tmp/test.pl")
+
+    @testcase(1141)
+    def test_perl_exists(self):
+        (status, output) = self.target.run('which perl')
+        self.assertEqual(status, 0, msg="Perl binary not in PATH or not on target.")
+
+    @testcase(208)
+    def test_perl_works(self):
+        (status, output) = self.target.run('perl /tmp/test.pl')
+        self.assertEqual(status, 0, msg="Exit status was not 0. Output: %s" % output)
+        self.assertEqual(output, "the value of a is 0.01", msg="Incorrect output: %s" % output)
+
+    @classmethod
+    def tearDownClass(self):
+        oeRuntimeTest.tc.target.run("rm /tmp/test.pl")
diff --git a/meta/lib/oeqa/runtime/ping.py b/meta/lib/oeqa/runtime/ping.py
new file mode 100644
index 0000000..80c4601
--- /dev/null
+++ b/meta/lib/oeqa/runtime/ping.py
@@ -0,0 +1,22 @@
+import subprocess
+import unittest
+import sys
+import time
+from oeqa.oetest import oeRuntimeTest
+from oeqa.utils.decorators import *
+
+class PingTest(oeRuntimeTest):
+
+    @testcase(964)
+    def test_ping(self):
+        output = ''
+        count = 0
+        endtime = time.time() + 60
+        while count < 5 and time.time() < endtime:
+            proc = subprocess.Popen("ping -c 1 %s" % self.target.ip, shell=True, stdout=subprocess.PIPE)
+            output += proc.communicate()[0]
+            if proc.poll() == 0:
+                count += 1
+            else:
+                count = 0
+        self.assertEqual(count, 5, msg = "Expected 5 consecutive replies, got %d.\nping output is:\n%s" % (count,output))
diff --git a/meta/lib/oeqa/runtime/python.py b/meta/lib/oeqa/runtime/python.py
new file mode 100644
index 0000000..26edb7a
--- /dev/null
+++ b/meta/lib/oeqa/runtime/python.py
@@ -0,0 +1,35 @@
+import unittest
+import os
+from oeqa.oetest import oeRuntimeTest, skipModule
+from oeqa.utils.decorators import *
+
+def setUpModule():
+    if not oeRuntimeTest.hasPackage("python"):
+        skipModule("No python package in the image")
+
+
+class PythonTest(oeRuntimeTest):
+
+    @classmethod
+    def setUpClass(self):
+        oeRuntimeTest.tc.target.copy_to(os.path.join(oeRuntimeTest.tc.filesdir, "test.py"), "/tmp/test.py")
+
+    @testcase(1145)
+    def test_python_exists(self):
+        (status, output) = self.target.run('which python')
+        self.assertEqual(status, 0, msg="Python binary not in PATH or not on target.")
+
+    @testcase(965)
+    def test_python_stdout(self):
+        (status, output) = self.target.run('python /tmp/test.py')
+        self.assertEqual(status, 0, msg="Exit status was not 0. Output: %s" % output)
+        self.assertEqual(output, "the value of a is 0.01", msg="Incorrect output: %s" % output)
+
+    @testcase(1146)
+    def test_python_testfile(self):
+        (status, output) = self.target.run('ls /tmp/testfile.python')
+        self.assertEqual(status, 0, msg="Python test file generate failed.")
+
+    @classmethod
+    def tearDownClass(self):
+        oeRuntimeTest.tc.target.run("rm /tmp/test.py /tmp/testfile.python")
diff --git a/meta/lib/oeqa/runtime/rpm.py b/meta/lib/oeqa/runtime/rpm.py
new file mode 100644
index 0000000..32aae24
--- /dev/null
+++ b/meta/lib/oeqa/runtime/rpm.py
@@ -0,0 +1,101 @@
+import unittest
+import os
+import fnmatch
+from oeqa.oetest import oeRuntimeTest, skipModule
+from oeqa.utils.decorators import *
+
+def setUpModule():
+    if not oeRuntimeTest.hasFeature("package-management"):
+            skipModule("rpm module skipped: target doesn't have package-management in IMAGE_FEATURES")
+    if "package_rpm" != oeRuntimeTest.tc.d.getVar("PACKAGE_CLASSES", True).split()[0]:
+            skipModule("rpm module skipped: target doesn't have rpm as primary package manager")
+
+
+class RpmBasicTest(oeRuntimeTest):
+
+    @testcase(960)
+    @skipUnlessPassed('test_ssh')
+    def test_rpm_help(self):
+        (status, output) = self.target.run('rpm --help')
+        self.assertEqual(status, 0, msg="status and output: %s and %s" % (status,output))
+
+    @testcase(191)
+    @skipUnlessPassed('test_rpm_help')
+    def test_rpm_query(self):
+        (status, output) = self.target.run('rpm -q rpm')
+        self.assertEqual(status, 0, msg="status and output: %s and %s" % (status,output))
+
+class RpmInstallRemoveTest(oeRuntimeTest):
+
+    @classmethod
+    def setUpClass(self):
+        pkgarch = oeRuntimeTest.tc.d.getVar('TUNE_PKGARCH', True).replace("-", "_")
+        rpmdir = os.path.join(oeRuntimeTest.tc.d.getVar('DEPLOY_DIR', True), "rpm", pkgarch)
+        # pick rpm-doc as a test file to get installed, because it's small and it will always be built for standard targets
+        for f in fnmatch.filter(os.listdir(rpmdir), "rpm-doc-*.%s.rpm" % pkgarch):
+            testrpmfile = f
+        oeRuntimeTest.tc.target.copy_to(os.path.join(rpmdir,testrpmfile), "/tmp/rpm-doc.rpm")
+
+    @testcase(192)
+    @skipUnlessPassed('test_rpm_help')
+    def test_rpm_install(self):
+        (status, output) = self.target.run('rpm -ivh /tmp/rpm-doc.rpm')
+        self.assertEqual(status, 0, msg="Failed to install rpm-doc package: %s" % output)
+
+    @testcase(194)
+    @skipUnlessPassed('test_rpm_install')
+    def test_rpm_remove(self):
+        (status,output) = self.target.run('rpm -e rpm-doc')
+        self.assertEqual(status, 0, msg="Failed to remove rpm-doc package: %s" % output)
+
+    @testcase(1096)
+    @skipUnlessPassed('test_ssh')
+    def test_rpm_query_nonroot(self):
+        (status, output) = self.target.run('useradd test1')
+        self.assertTrue(status == 0, msg="Failed to create new user")
+        (status, output) = self.target.run('sudo -u test1 id')
+        self.assertTrue('(test1)' in output, msg="Failed to execute as new user")
+        (status, output) = self.target.run('sudo -u test1 rpm -qa')
+        self.assertEqual(status, 0, msg="status: %s. Cannot run rpm -qa" % status)
+
+    @testcase(195)
+    @skipUnlessPassed('test_rpm_install')
+    def test_check_rpm_install_removal_log_file_size(self):
+        """
+        Summary:     Check rpm install/removal log file size
+        Expected:    There should be some method to keep rpm log in a small size .
+        Product:     BSPs
+        Author:      Alexandru Georgescu <alexandru.c.georgescu@intel.com>
+        AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
+        """
+        db_files_cmd = 'ls /var/lib/rpm/__db.*'
+        get_log_size_cmd = "du /var/lib/rpm/log/log.* | awk '{print $1}'"
+
+        # Make sure that some database files are under /var/lib/rpm as '__db.xxx'
+        (status, output) = self.target.run(db_files_cmd)
+        self.assertEqual(0, status, 'Failed to find database files under /var/lib/rpm/ as __db.xxx')
+
+        # Remove the package just in case
+        self.target.run('rpm -e rpm-doc')
+
+        # Install/Remove a package 10 times
+        for i in range(10):
+            (status, output) = self.target.run('rpm -ivh /tmp/rpm-doc.rpm')
+            self.assertEqual(0, status, "Failed to install rpm-doc package. Reason: {}".format(output))
+
+            (status, output) = self.target.run('rpm -e rpm-doc')
+            self.assertEqual(0, status, "Failed to remove rpm-doc package. Reason: {}".format(output))
+
+        # Get the size of log file
+        (status, output) = self.target.run(get_log_size_cmd)
+        self.assertEqual(0, status, 'Failed to get the final size of the log file.')
+
+        # Compare each log size
+        for log_file_size in output:
+            self.assertLessEqual(int(log_file_size), 11264,
+                                   'Log file size is greater that expected (~10MB), found {} bytes'.format(log_file_size))
+
+    @classmethod
+    def tearDownClass(self):
+        oeRuntimeTest.tc.target.run('rm -f /tmp/rpm-doc.rpm')
+
diff --git a/meta/lib/oeqa/runtime/scanelf.py b/meta/lib/oeqa/runtime/scanelf.py
new file mode 100644
index 0000000..43a024a
--- /dev/null
+++ b/meta/lib/oeqa/runtime/scanelf.py
@@ -0,0 +1,28 @@
+import unittest
+from oeqa.oetest import oeRuntimeTest, skipModule
+from oeqa.utils.decorators import *
+
+def setUpModule():
+    if not oeRuntimeTest.hasPackage("pax-utils"):
+        skipModule("pax-utils package not installed")
+
+class ScanelfTest(oeRuntimeTest):
+
+    def setUp(self):
+        self.scancmd = 'scanelf --quiet --recursive --mount --ldpath --path'
+
+    @testcase(966)
+    @skipUnlessPassed('test_ssh')
+    def test_scanelf_textrel(self):
+        # print TEXTREL information
+        self.scancmd += " --textrel"
+        (status, output) = self.target.run(self.scancmd)
+        self.assertEqual(output.strip(), "", "\n".join([self.scancmd, output]))
+
+    @testcase(967)
+    @skipUnlessPassed('test_ssh')
+    def test_scanelf_rpath(self):
+        # print RPATH information
+        self.scancmd += " --rpath"
+        (status, output) = self.target.run(self.scancmd)
+        self.assertEqual(output.strip(), "", "\n".join([self.scancmd, output]))
diff --git a/meta/lib/oeqa/runtime/scp.py b/meta/lib/oeqa/runtime/scp.py
new file mode 100644
index 0000000..48e87d2
--- /dev/null
+++ b/meta/lib/oeqa/runtime/scp.py
@@ -0,0 +1,22 @@
+import os
+from oeqa.oetest import oeRuntimeTest, skipModule
+from oeqa.utils.decorators import skipUnlessPassed, testcase
+
+def setUpModule():
+    if not (oeRuntimeTest.hasPackage("dropbear") or oeRuntimeTest.hasPackage("openssh-sshd")):
+        skipModule("No ssh package in image")
+
+class ScpTest(oeRuntimeTest):
+
+    @testcase(220)
+    @skipUnlessPassed('test_ssh')
+    def test_scp_file(self):
+        test_log_dir = oeRuntimeTest.tc.d.getVar("TEST_LOG_DIR", True)
+        test_file_path = os.path.join(test_log_dir, 'test_scp_file')
+        with open(test_file_path, 'w') as test_scp_file:
+            test_scp_file.seek(2 ** 22 - 1)
+            test_scp_file.write(os.linesep)
+        (status, output) = self.target.copy_to(test_file_path, '/tmp/test_scp_file')
+        self.assertEqual(status, 0, msg = "File could not be copied. Output: %s" % output)
+        (status, output) = self.target.run("ls -la /tmp/test_scp_file")
+        self.assertEqual(status, 0, msg = "SCP test failed")
diff --git a/meta/lib/oeqa/runtime/skeletoninit.py b/meta/lib/oeqa/runtime/skeletoninit.py
new file mode 100644
index 0000000..cb0cb9b
--- /dev/null
+++ b/meta/lib/oeqa/runtime/skeletoninit.py
@@ -0,0 +1,29 @@
+# This test should cover https://bugzilla.yoctoproject.org/tr_show_case.cgi?case_id=284 testcase
+# Note that the image under test must have meta-skeleton layer in bblayers and IMAGE_INSTALL_append = " service" in local.conf
+
+import unittest
+from oeqa.oetest import oeRuntimeTest, skipModule
+from oeqa.utils.decorators import *
+
+def setUpModule():
+    if not oeRuntimeTest.hasPackage("service"):
+        skipModule("No service package in image")
+
+
+class SkeletonBasicTest(oeRuntimeTest):
+
+    @skipUnlessPassed('test_ssh')
+    @unittest.skipIf("systemd" == oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager", False), "Not appropiate for systemd image")
+    def test_skeleton_availability(self):
+        (status, output) = self.target.run('ls /etc/init.d/skeleton')
+        self.assertEqual(status, 0, msg = "skeleton init script not found. Output:\n%s " % output)
+        (status, output) =  self.target.run('ls /usr/sbin/skeleton-test')
+        self.assertEqual(status, 0, msg = "skeleton-test not found. Output:\n%s" % output)
+
+    @testcase(284)
+    @skipUnlessPassed('test_skeleton_availability')
+    @unittest.skipIf("systemd" == oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager", False), "Not appropiate for systemd image")
+    def test_skeleton_script(self):
+        output1 = self.target.run("/etc/init.d/skeleton start")[1]
+        (status, output2) = self.target.run(oeRuntimeTest.pscmd + ' | grep [s]keleton-test')
+        self.assertEqual(status, 0, msg = "Skeleton script could not be started:\n%s\n%s" % (output1, output2))
diff --git a/meta/lib/oeqa/runtime/smart.py b/meta/lib/oeqa/runtime/smart.py
new file mode 100644
index 0000000..e41668d
--- /dev/null
+++ b/meta/lib/oeqa/runtime/smart.py
@@ -0,0 +1,175 @@
+import unittest
+import re
+from oeqa.oetest import oeRuntimeTest, skipModule
+from oeqa.utils.decorators import *
+from oeqa.utils.httpserver import HTTPService
+
+def setUpModule():
+    if not oeRuntimeTest.hasFeature("package-management"):
+        skipModule("Image doesn't have package management feature")
+    if not oeRuntimeTest.hasPackage("smart"):
+        skipModule("Image doesn't have smart installed")
+    if "package_rpm" != oeRuntimeTest.tc.d.getVar("PACKAGE_CLASSES", True).split()[0]:
+        skipModule("Rpm is not the primary package manager")
+
+class SmartTest(oeRuntimeTest):
+
+    @skipUnlessPassed('test_smart_help')
+    def smart(self, command, expected = 0):
+        command = 'smart %s' % command
+        status, output = self.target.run(command, 1500)
+        message = os.linesep.join([command, output])
+        self.assertEqual(status, expected, message)
+        self.assertFalse("Cannot allocate memory" in output, message)
+        return output
+
+class SmartBasicTest(SmartTest):
+
+    @testcase(716)
+    @skipUnlessPassed('test_ssh')
+    def test_smart_help(self):
+        self.smart('--help')
+
+    @testcase(968)
+    def test_smart_version(self):
+        self.smart('--version')
+
+    @testcase(721)
+    def test_smart_info(self):
+        self.smart('info python-smartpm')
+
+    @testcase(421)
+    def test_smart_query(self):
+        self.smart('query python-smartpm')
+
+    @testcase(720)
+    def test_smart_search(self):
+        self.smart('search python-smartpm')
+
+    @testcase(722)
+    def test_smart_stats(self):
+        self.smart('stats')
+
+class SmartRepoTest(SmartTest):
+
+    @classmethod
+    def setUpClass(self):
+        self.repolist = []
+        self.repo_server = HTTPService(oeRuntimeTest.tc.d.getVar('DEPLOY_DIR', True), oeRuntimeTest.tc.target.server_ip)
+        self.repo_server.start()
+
+    @classmethod
+    def tearDownClass(self):
+        self.repo_server.stop()
+        for i in self.repolist:
+            oeRuntimeTest.tc.target.run('smart channel -y --remove '+str(i))
+
+    @testcase(1143)
+    def test_smart_channel(self):
+        self.smart('channel', 1)
+
+    @testcase(719)
+    def test_smart_channel_add(self):
+        image_pkgtype = self.tc.d.getVar('IMAGE_PKGTYPE', True)
+        deploy_url = 'http://%s:%s/%s' %(self.target.server_ip, self.repo_server.port, image_pkgtype)
+        pkgarchs = self.tc.d.getVar('PACKAGE_ARCHS', True).replace("-","_").split()
+        for arch in os.listdir('%s/%s' % (self.repo_server.root_dir, image_pkgtype)):
+            if arch in pkgarchs:
+                self.smart('channel -y --add {a} type=rpm-md baseurl={u}/{a}'.format(a=arch, u=deploy_url))
+                self.repolist.append(arch)
+        self.smart('update')
+
+    @testcase(969)
+    def test_smart_channel_help(self):
+        self.smart('channel --help')
+
+    @testcase(970)
+    def test_smart_channel_list(self):
+        self.smart('channel --list')
+
+    @testcase(971)
+    def test_smart_channel_show(self):
+        self.smart('channel --show')
+
+    @testcase(717)
+    def test_smart_channel_rpmsys(self):
+        self.smart('channel --show rpmsys')
+        self.smart('channel --disable rpmsys')
+        self.smart('channel --enable rpmsys')
+
+    @testcase(1144)
+    @skipUnlessPassed('test_smart_channel_add')
+    def test_smart_install(self):
+        self.smart('remove -y psplash-default')
+        self.smart('install -y psplash-default')
+
+    @testcase(728)
+    @skipUnlessPassed('test_smart_install')
+    def test_smart_install_dependency(self):
+        self.smart('remove -y psplash')
+        self.smart('install -y psplash-default')
+
+    @testcase(723)
+    @skipUnlessPassed('test_smart_channel_add')
+    def test_smart_install_from_disk(self):
+        self.smart('remove -y psplash-default')
+        self.smart('download psplash-default')
+        self.smart('install -y ./psplash-default*')
+
+    @testcase(725)
+    @skipUnlessPassed('test_smart_channel_add')
+    def test_smart_install_from_http(self):
+        output = self.smart('download --urls psplash-default')
+        url = re.search('(http://.*/psplash-default.*\.rpm)', output)
+        self.assertTrue(url, msg="Couln't find download url in %s" % output)
+        self.smart('remove -y psplash-default')
+        self.smart('install -y %s' % url.group(0))
+
+    @testcase(729)
+    @skipUnlessPassed('test_smart_install')
+    def test_smart_reinstall(self):
+        self.smart('reinstall -y psplash-default')
+
+    @testcase(727)
+    @skipUnlessPassed('test_smart_channel_add')
+    def test_smart_remote_repo(self):
+        self.smart('update')
+        self.smart('install -y psplash')
+        self.smart('remove -y psplash')
+
+    @testcase(726)
+    def test_smart_local_dir(self):
+        self.target.run('mkdir /tmp/myrpmdir')
+        self.smart('channel --add myrpmdir type=rpm-dir path=/tmp/myrpmdir -y')
+        self.target.run('cd /tmp/myrpmdir')
+        self.smart('download psplash')
+        output = self.smart('channel --list')
+        for i in output.split("\n"):
+            if ("rpmsys" != str(i)) and ("myrpmdir" != str(i)):
+                self.smart('channel --disable '+str(i))
+        self.target.run('cd /home/root')
+        self.smart('install psplash')
+        for i in output.split("\n"):
+            if ("rpmsys" != str(i)) and ("myrpmdir" != str(i)):
+                self.smart('channel --enable '+str(i))
+        self.smart('channel --remove myrpmdir -y')
+        self.target.run("rm -rf /tmp/myrpmdir")
+
+    @testcase(718)
+    def test_smart_add_rpmdir(self):
+        self.target.run('mkdir /tmp/myrpmdir')
+        self.smart('channel --add myrpmdir type=rpm-dir path=/tmp/myrpmdir -y')
+        self.smart('channel --disable myrpmdir -y')
+        output = self.smart('channel --show myrpmdir')
+        self.assertTrue("disabled = yes" in output, msg="Failed to disable rpm dir")
+        self.smart('channel --enable  myrpmdir -y')
+        output = self.smart('channel --show myrpmdir')
+        self.assertFalse("disabled = yes" in output, msg="Failed to enable rpm dir")
+        self.smart('channel --remove myrpmdir -y')
+        self.target.run("rm -rf /tmp/myrpmdir")
+
+    @testcase(731)
+    @skipUnlessPassed('test_smart_channel_add')
+    def test_smart_remove_package(self):
+        self.smart('install -y psplash')
+        self.smart('remove -y psplash')
\ No newline at end of file
diff --git a/meta/lib/oeqa/runtime/ssh.py b/meta/lib/oeqa/runtime/ssh.py
new file mode 100644
index 0000000..0e76d5d
--- /dev/null
+++ b/meta/lib/oeqa/runtime/ssh.py
@@ -0,0 +1,19 @@
+import subprocess
+import unittest
+import sys
+from oeqa.oetest import oeRuntimeTest, skipModule
+from oeqa.utils.decorators import *
+
+def setUpModule():
+    if not (oeRuntimeTest.hasPackage("dropbear") or oeRuntimeTest.hasPackage("openssh")):
+        skipModule("No ssh package in image")
+
+class SshTest(oeRuntimeTest):
+
+    @testcase(224)
+    @skipUnlessPassed('test_ping')
+    def test_ssh(self):
+        (status, output) = self.target.run('uname -a')
+        self.assertEqual(status, 0, msg="SSH Test failed: %s" % output)
+        (status, output) = self.target.run('cat /etc/masterimage')
+        self.assertEqual(status, 1, msg="This isn't the right image  - /etc/masterimage shouldn't be here %s" % output)
diff --git a/meta/lib/oeqa/runtime/syslog.py b/meta/lib/oeqa/runtime/syslog.py
new file mode 100644
index 0000000..2601dd9
--- /dev/null
+++ b/meta/lib/oeqa/runtime/syslog.py
@@ -0,0 +1,45 @@
+import unittest
+from oeqa.oetest import oeRuntimeTest, skipModule
+from oeqa.utils.decorators import *
+
+def setUpModule():
+    if not (oeRuntimeTest.hasPackage("busybox-syslog") or oeRuntimeTest.hasPackage("sysklogd")):
+        skipModule("No syslog package in image")
+
+class SyslogTest(oeRuntimeTest):
+
+    @testcase(201)
+    @skipUnlessPassed("test_syslog_help")
+    def test_syslog_running(self):
+        (status,output) = self.target.run(oeRuntimeTest.pscmd + ' | grep -i [s]yslogd')
+        self.assertEqual(status, 0, msg="no syslogd process, ps output: %s" % self.target.run(oeRuntimeTest.pscmd)[1])
+
+class SyslogTestConfig(oeRuntimeTest):
+
+    @testcase(1149)
+    @skipUnlessPassed("test_syslog_running")
+    def test_syslog_logger(self):
+        (status,output) = self.target.run('logger foobar && test -e /var/log/messages && grep foobar /var/log/messages || logread | grep foobar')
+        self.assertEqual(status, 0, msg="Test log string not found in /var/log/messages. Output: %s " % output)
+
+    @testcase(1150)
+    @skipUnlessPassed("test_syslog_running")
+    def test_syslog_restart(self):
+        if "systemd" != oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager", False):
+            (status,output) = self.target.run('/etc/init.d/syslog restart')
+        else:
+            (status,output) = self.target.run('systemctl restart syslog.service')
+
+    @testcase(202)
+    @skipUnlessPassed("test_syslog_restart")
+    @skipUnlessPassed("test_syslog_logger")
+    @unittest.skipIf("systemd" == oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager", False), "Not appropiate for systemd image")
+    @unittest.skipIf(oeRuntimeTest.hasPackage("sysklogd") or not oeRuntimeTest.hasPackage("busybox"), "Non-busybox syslog")
+    def test_syslog_startup_config(self):
+        self.target.run('echo "LOGFILE=/var/log/test" >> /etc/syslog-startup.conf')
+        (status,output) = self.target.run('/etc/init.d/syslog restart')
+        self.assertEqual(status, 0, msg="Could not restart syslog service. Status and output: %s and %s" % (status,output))
+        (status,output) = self.target.run('logger foobar && grep foobar /var/log/test')
+        self.assertEqual(status, 0, msg="Test log string not found. Output: %s " % output)
+        self.target.run("sed -i 's#LOGFILE=/var/log/test##' /etc/syslog-startup.conf")
+        self.target.run('/etc/init.d/syslog restart')
diff --git a/meta/lib/oeqa/runtime/systemd.py b/meta/lib/oeqa/runtime/systemd.py
new file mode 100644
index 0000000..c74394c
--- /dev/null
+++ b/meta/lib/oeqa/runtime/systemd.py
@@ -0,0 +1,101 @@
+import unittest
+import re
+from oeqa.oetest import oeRuntimeTest, skipModule
+from oeqa.utils.decorators import *
+
+def setUpModule():
+    if not oeRuntimeTest.hasFeature("systemd"):
+            skipModule("target doesn't have systemd in DISTRO_FEATURES")
+    if "systemd" != oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager", True):
+            skipModule("systemd is not the init manager for this image")
+
+
+class SystemdTest(oeRuntimeTest):
+
+    def systemctl(self, action = '', target = '', expected = 0, verbose = False):
+        command = 'systemctl %s %s' % (action, target)
+        status, output = self.target.run(command)
+        message = '\n'.join([command, output])
+        if status != expected and verbose:
+            message += self.target.run('systemctl status --full %s' % target)[1]
+        self.assertEqual(status, expected, message)
+        return output
+
+
+class SystemdBasicTests(SystemdTest):
+
+    @skipUnlessPassed('test_ssh')
+    def test_systemd_basic(self):
+        self.systemctl('--version')
+
+    @testcase(551)
+    @skipUnlessPassed('test_system_basic')
+    def test_systemd_list(self):
+        self.systemctl('list-unit-files')
+
+    def settle(self):
+        """
+        Block until systemd has finished activating any units being activated,
+        or until two minutes has elapsed.
+
+        Returns a tuple, either (True, '') if all units have finished
+        activating, or (False, message string) if there are still units
+        activating (generally, failing units that restart).
+        """
+        import time
+        endtime = time.time() + (60 * 2)
+        while True:
+            status, output = self.target.run('systemctl --state=activating')
+            if "0 loaded units listed" in output:
+                return (True, '')
+            if time.time() >= endtime:
+                return (False, output)
+            time.sleep(10)
+
+    @testcase(550)
+    @skipUnlessPassed('test_systemd_basic')
+    def test_systemd_failed(self):
+        settled, output = self.settle()
+        self.assertTrue(settled, msg="Timed out waiting for systemd to settle:\n" + output)
+
+        output = self.systemctl('list-units', '--failed')
+        match = re.search("0 loaded units listed", output)
+        if not match:
+            output += self.systemctl('status --full --failed')
+        self.assertTrue(match, msg="Some systemd units failed:\n%s" % output)
+
+
+class SystemdServiceTests(SystemdTest):
+
+    def check_for_avahi(self):
+        if not self.hasPackage('avahi-daemon'):
+            raise unittest.SkipTest("Testcase dependency not met: need avahi-daemon installed on target")
+
+    @skipUnlessPassed('test_systemd_basic')
+    def test_systemd_status(self):
+        self.check_for_avahi()
+        self.systemctl('status --full', 'avahi-daemon.service')
+
+    @testcase(695)
+    @skipUnlessPassed('test_systemd_status')
+    def test_systemd_stop_start(self):
+        self.check_for_avahi()
+        self.systemctl('stop', 'avahi-daemon.service')
+        self.systemctl('is-active', 'avahi-daemon.service', expected=3, verbose=True)
+        self.systemctl('start','avahi-daemon.service')
+        self.systemctl('is-active', 'avahi-daemon.service', verbose=True)
+
+    @testcase(696)
+    @skipUnlessPassed('test_systemd_basic')
+    def test_systemd_disable_enable(self):
+        self.check_for_avahi()
+        self.systemctl('disable', 'avahi-daemon.service')
+        self.systemctl('is-enabled', 'avahi-daemon.service', expected=1)
+        self.systemctl('enable', 'avahi-daemon.service')
+        self.systemctl('is-enabled', 'avahi-daemon.service')
+
+class SystemdJournalTests(SystemdTest):
+    @skipUnlessPassed('test_ssh')
+    def test_systemd_journal(self):
+        (status, output) = self.target.run('journalctl')
+        self.assertEqual(status, 0, output)
diff --git a/meta/lib/oeqa/runtime/vnc.py b/meta/lib/oeqa/runtime/vnc.py
new file mode 100644
index 0000000..f31deff
--- /dev/null
+++ b/meta/lib/oeqa/runtime/vnc.py
@@ -0,0 +1,20 @@
+from oeqa.oetest import oeRuntimeTest, skipModuleUnless
+from oeqa.utils.decorators import *
+import re
+
+def setUpModule():
+    skipModuleUnless(oeRuntimeTest.hasPackage('x11vnc'), "No x11vnc package in image")
+
+class VNCTest(oeRuntimeTest):
+
+    @testcase(213)
+    @skipUnlessPassed('test_ssh')
+    def test_vnc(self):
+        (status, output) = self.target.run('x11vnc -display :0 -bg -o x11vnc.log')
+        self.assertEqual(status, 0, msg="x11vnc server failed to start: %s" % output)
+        port = re.search('PORT=[0-9]*', output)
+        self.assertTrue(port, msg="Listening port not specified in command output: %s" %output)
+
+        vncport = port.group(0).split('=')[1]
+        (status, output) = self.target.run('netstat -ntl | grep ":%s"' % vncport)
+        self.assertEqual(status, 0, msg="x11vnc server not running on port %s\n\n%s" % (vncport, self.target.run('netstat -ntl; cat x11vnc.log')[1]))
diff --git a/meta/lib/oeqa/runtime/x32lib.py b/meta/lib/oeqa/runtime/x32lib.py
new file mode 100644
index 0000000..ce5e214
--- /dev/null
+++ b/meta/lib/oeqa/runtime/x32lib.py
@@ -0,0 +1,18 @@
+import unittest
+from oeqa.oetest import oeRuntimeTest, skipModule
+from oeqa.utils.decorators import *
+
+def setUpModule():
+        #check if DEFAULTTUNE is set and it's value is: x86-64-x32
+        defaulttune = oeRuntimeTest.tc.d.getVar("DEFAULTTUNE", True)
+        if "x86-64-x32" not in defaulttune:
+            skipModule("DEFAULTTUNE is not set to x86-64-x32")
+
+class X32libTest(oeRuntimeTest):
+
+    @testcase(281)
+    @skipUnlessPassed("test_ssh")
+    def test_x32_file(self):
+        status1 = self.target.run("readelf -h /bin/ls | grep Class | grep ELF32")[0]
+        status2 = self.target.run("readelf -h /bin/ls | grep Machine | grep X86-64")[0]
+        self.assertTrue(status1 == 0 and status2 == 0, msg="/bin/ls isn't an X86-64 ELF32 binary. readelf says: %s" % self.target.run("readelf -h /bin/ls")[1])
diff --git a/meta/lib/oeqa/runtime/xorg.py b/meta/lib/oeqa/runtime/xorg.py
new file mode 100644
index 0000000..12bcd37
--- /dev/null
+++ b/meta/lib/oeqa/runtime/xorg.py
@@ -0,0 +1,16 @@
+import unittest
+from oeqa.oetest import oeRuntimeTest, skipModule
+from oeqa.utils.decorators import *
+
+def setUpModule():
+    if not oeRuntimeTest.hasFeature("x11-base"):
+            skipModule("target doesn't have x11 in IMAGE_FEATURES")
+
+
+class XorgTest(oeRuntimeTest):
+
+    @testcase(1151)
+    @skipUnlessPassed('test_ssh')
+    def test_xorg_running(self):
+        (status, output) = self.target.run(oeRuntimeTest.pscmd + ' |  grep -v xinit | grep [X]org')
+        self.assertEqual(status, 0, msg="Xorg does not appear to be running %s" % self.target.run(oeRuntimeTest.pscmd)[1])
diff --git a/meta/lib/oeqa/sdk/__init__.py b/meta/lib/oeqa/sdk/__init__.py
new file mode 100644
index 0000000..4cf3fa7
--- /dev/null
+++ b/meta/lib/oeqa/sdk/__init__.py
@@ -0,0 +1,3 @@
+# Enable other layers to have tests in the same named directory
+from pkgutil import extend_path
+__path__ = extend_path(__path__, __name__)
diff --git a/meta/lib/oeqa/sdk/buildcvs.py b/meta/lib/oeqa/sdk/buildcvs.py
new file mode 100644
index 0000000..c7146fa
--- /dev/null
+++ b/meta/lib/oeqa/sdk/buildcvs.py
@@ -0,0 +1,25 @@
+from oeqa.oetest import oeSDKTest, skipModule
+from oeqa.utils.decorators import *
+from oeqa.utils.targetbuild import SDKBuildProject
+
+class BuildCvsTest(oeSDKTest):
+
+    @classmethod
+    def setUpClass(self):
+        self.project = SDKBuildProject(oeSDKTest.tc.sdktestdir + "/cvs/", oeSDKTest.tc.sdkenv, oeSDKTest.tc.d,
+                        "http://ftp.gnu.org/non-gnu/cvs/source/feature/1.12.13/cvs-1.12.13.tar.bz2")
+        self.project.download_archive()
+
+    def test_cvs(self):
+        self.assertEqual(self.project.run_configure(), 0,
+                        msg="Running configure failed")
+
+        self.assertEqual(self.project.run_make(), 0,
+                        msg="Running make failed")
+
+        self.assertEqual(self.project.run_install(), 0,
+                        msg="Running make install failed")
+
+    @classmethod
+    def tearDownClass(self):
+        self.project.clean()
diff --git a/meta/lib/oeqa/sdk/buildiptables.py b/meta/lib/oeqa/sdk/buildiptables.py
new file mode 100644
index 0000000..062e531
--- /dev/null
+++ b/meta/lib/oeqa/sdk/buildiptables.py
@@ -0,0 +1,26 @@
+from oeqa.oetest import oeSDKTest
+from oeqa.utils.decorators import *
+from oeqa.utils.targetbuild import SDKBuildProject
+
+
+class BuildIptablesTest(oeSDKTest):
+
+    @classmethod
+    def setUpClass(self):
+        self.project = SDKBuildProject(oeSDKTest.tc.sdktestdir + "/iptables/", oeSDKTest.tc.sdkenv, oeSDKTest.tc.d,
+                        "http://netfilter.org/projects/iptables/files/iptables-1.4.13.tar.bz2")
+        self.project.download_archive()
+
+    def test_iptables(self):
+        self.assertEqual(self.project.run_configure(), 0,
+                        msg="Running configure failed")
+
+        self.assertEqual(self.project.run_make(), 0,
+                        msg="Running make failed")
+
+        self.assertEqual(self.project.run_install(), 0,
+                        msg="Running make install failed")
+
+    @classmethod
+    def tearDownClass(self):
+        self.project.clean()
diff --git a/meta/lib/oeqa/sdk/buildsudoku.py b/meta/lib/oeqa/sdk/buildsudoku.py
new file mode 100644
index 0000000..dea77c6
--- /dev/null
+++ b/meta/lib/oeqa/sdk/buildsudoku.py
@@ -0,0 +1,26 @@
+from oeqa.oetest import oeSDKTest, skipModule
+from oeqa.utils.decorators import *
+from oeqa.utils.targetbuild import SDKBuildProject
+
+def setUpModule():
+    if not oeSDKTest.hasPackage("gtk\+"):
+        skipModule("Image doesn't have gtk+ in manifest")
+
+class SudokuTest(oeSDKTest):
+
+    @classmethod
+    def setUpClass(self):
+        self.project = SDKBuildProject(oeSDKTest.tc.sdktestdir + "/sudoku/", oeSDKTest.tc.sdkenv, oeSDKTest.tc.d,
+                        "http://downloads.sourceforge.net/project/sudoku-savant/sudoku-savant/sudoku-savant-1.3/sudoku-savant-1.3.tar.bz2")
+        self.project.download_archive()
+
+    def test_sudoku(self):
+        self.assertEqual(self.project.run_configure(), 0,
+                        msg="Running configure failed")
+
+        self.assertEqual(self.project.run_make(), 0,
+                        msg="Running make failed")
+
+    @classmethod
+    def tearDownClass(self):
+        self.project.clean()
diff --git a/meta/lib/oeqa/sdk/gcc.py b/meta/lib/oeqa/sdk/gcc.py
new file mode 100644
index 0000000..67994b9
--- /dev/null
+++ b/meta/lib/oeqa/sdk/gcc.py
@@ -0,0 +1,36 @@
+import unittest
+import os
+import shutil
+from oeqa.oetest import oeSDKTest, skipModule
+from oeqa.utils.decorators import *
+
+def setUpModule():
+    machine = oeSDKTest.tc.d.getVar("MACHINE", True)
+    if not oeSDKTest.hasHostPackage("packagegroup-cross-canadian-" + machine):
+        skipModule("SDK doesn't contain a cross-canadian toolchain")
+
+
+class GccCompileTest(oeSDKTest):
+
+    @classmethod
+    def setUpClass(self):
+        for f in ['test.c', 'test.cpp', 'testmakefile']:
+            shutil.copyfile(os.path.join(self.tc.filesdir, f), self.tc.sdktestdir + f)
+
+    def test_gcc_compile(self):
+        self._run('$CC %s/test.c -o %s/test -lm' % (self.tc.sdktestdir, self.tc.sdktestdir))
+
+    def test_gpp_compile(self):
+        self._run('$CXX %s/test.c -o %s/test -lm' % (self.tc.sdktestdir, self.tc.sdktestdir))
+
+    def test_gpp2_compile(self):
+        self._run('$CXX %s/test.cpp -o %s/test -lm' % (self.tc.sdktestdir, self.tc.sdktestdir))
+
+    def test_make(self):
+        self._run('cd %s; make -f testmakefile' % self.tc.sdktestdir)
+
+    @classmethod
+    def tearDownClass(self):
+        files = [self.tc.sdktestdir + f for f in ['test.c', 'test.cpp', 'test.o', 'test', 'testmakefile']]
+        for f in files:
+            bb.utils.remove(f)
diff --git a/meta/lib/oeqa/sdk/perl.py b/meta/lib/oeqa/sdk/perl.py
new file mode 100644
index 0000000..45f422e
--- /dev/null
+++ b/meta/lib/oeqa/sdk/perl.py
@@ -0,0 +1,28 @@
+import unittest
+import os
+import shutil
+from oeqa.oetest import oeSDKTest, skipModule
+from oeqa.utils.decorators import *
+
+def setUpModule():
+    if not oeSDKTest.hasHostPackage("nativesdk-perl"):
+        skipModule("No perl package in the SDK")
+
+
+class PerlTest(oeSDKTest):
+
+    @classmethod
+    def setUpClass(self):
+        for f in ['test.pl']:
+            shutil.copyfile(os.path.join(self.tc.filesdir, f), self.tc.sdktestdir + f)
+        self.testfile = self.tc.sdktestdir + "test.pl"
+
+    def test_perl_exists(self):
+        self._run('which perl')
+
+    def test_perl_works(self):
+        self._run('perl %s/test.pl' % self.tc.sdktestdir)
+
+    @classmethod
+    def tearDownClass(self):
+        bb.utils.remove("%s/test.pl" % self.tc.sdktestdir)
diff --git a/meta/lib/oeqa/sdk/python.py b/meta/lib/oeqa/sdk/python.py
new file mode 100644
index 0000000..896fab4
--- /dev/null
+++ b/meta/lib/oeqa/sdk/python.py
@@ -0,0 +1,32 @@
+import unittest
+import os
+import shutil
+from oeqa.oetest import oeSDKTest, skipModule
+from oeqa.utils.decorators import *
+
+def setUpModule():
+    if not oeSDKTest.hasHostPackage("nativesdk-python"):
+        skipModule("No python package in the SDK")
+
+
+class PythonTest(oeSDKTest):
+
+    @classmethod
+    def setUpClass(self):
+        for f in ['test.py']:
+            shutil.copyfile(os.path.join(self.tc.filesdir, f), self.tc.sdktestdir + f)
+
+    def test_python_exists(self):
+        self._run('which python')
+
+    def test_python_stdout(self):
+        output = self._run('python %s/test.py' % self.tc.sdktestdir)
+        self.assertEqual(output.strip(), "the value of a is 0.01", msg="Incorrect output: %s" % output)
+
+    def test_python_testfile(self):
+        self._run('ls /tmp/testfile.python')
+
+    @classmethod
+    def tearDownClass(self):
+        bb.utils.remove("%s/test.py" % self.tc.sdktestdir)
+        bb.utils.remove("/tmp/testfile.python")
diff --git a/meta/lib/oeqa/selftest/__init__.py b/meta/lib/oeqa/selftest/__init__.py
new file mode 100644
index 0000000..3ad9513
--- /dev/null
+++ b/meta/lib/oeqa/selftest/__init__.py
@@ -0,0 +1,2 @@
+from pkgutil import extend_path
+__path__ = extend_path(__path__, __name__)
diff --git a/meta/lib/oeqa/selftest/_sstatetests_noauto.py b/meta/lib/oeqa/selftest/_sstatetests_noauto.py
new file mode 100644
index 0000000..fc9ae7e
--- /dev/null
+++ b/meta/lib/oeqa/selftest/_sstatetests_noauto.py
@@ -0,0 +1,95 @@
+import datetime
+import unittest
+import os
+import re
+import shutil
+
+import oeqa.utils.ftools as ftools
+from oeqa.selftest.base import oeSelfTest
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_test_layer
+from oeqa.selftest.sstate import SStateBase
+
+
+class RebuildFromSState(SStateBase):
+
+    @classmethod
+    def setUpClass(self):
+        self.builddir = os.path.join(os.environ.get('BUILDDIR'))
+
+    def get_dep_targets(self, primary_targets):
+        found_targets = []
+        bitbake("-g " + ' '.join(map(str, primary_targets)))
+        with open(os.path.join(self.builddir, 'pn-buildlist'), 'r') as pnfile:
+            found_targets = pnfile.read().splitlines()
+        return found_targets
+
+    def configure_builddir(self, builddir):
+        os.mkdir(builddir)
+        self.track_for_cleanup(builddir)
+        os.mkdir(os.path.join(builddir, 'conf'))
+        shutil.copyfile(os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf'), os.path.join(builddir, 'conf/local.conf'))
+        config = {}
+        config['default_sstate_dir'] = "SSTATE_DIR ?= \"${TOPDIR}/sstate-cache\""
+        config['null_sstate_mirrors'] = "SSTATE_MIRRORS = \"\""
+        config['default_tmp_dir'] = "TMPDIR = \"${TOPDIR}/tmp\""
+        for key in config:
+            ftools.append_file(os.path.join(builddir, 'conf/selftest.inc'), config[key])
+        shutil.copyfile(os.path.join(os.environ.get('BUILDDIR'), 'conf/bblayers.conf'), os.path.join(builddir, 'conf/bblayers.conf'))
+        try:
+            shutil.copyfile(os.path.join(os.environ.get('BUILDDIR'), 'conf/auto.conf'), os.path.join(builddir, 'conf/auto.conf'))
+        except:
+            pass
+
+    def hardlink_tree(self, src, dst):
+        os.mkdir(dst)
+        self.track_for_cleanup(dst)
+        for root, dirs, files in os.walk(src):
+            if root == src:
+                continue
+            os.mkdir(os.path.join(dst, root.split(src)[1][1:]))
+            for sstate_file in files:
+                os.link(os.path.join(root, sstate_file), os.path.join(dst, root.split(src)[1][1:], sstate_file))
+
+    def run_test_sstate_rebuild(self, primary_targets, relocate=False, rebuild_dependencies=False):
+        buildA = os.path.join(self.builddir, 'buildA')
+        if relocate:
+            buildB = os.path.join(self.builddir, 'buildB')
+        else:
+            buildB = buildA
+
+        if rebuild_dependencies:
+            rebuild_targets = self.get_dep_targets(primary_targets)
+        else:
+            rebuild_targets = primary_targets
+
+        self.configure_builddir(buildA)
+        runCmd((". %s/oe-init-build-env %s && " % (get_bb_var('COREBASE'), buildA)) + 'bitbake  ' + ' '.join(map(str, primary_targets)), shell=True, executable='/bin/bash')
+        self.hardlink_tree(os.path.join(buildA, 'sstate-cache'), os.path.join(self.builddir, 'sstate-cache-buildA'))
+        shutil.rmtree(buildA)
+
+        failed_rebuild = []
+        failed_cleansstate = []
+        for target in rebuild_targets:
+            self.configure_builddir(buildB)
+            self.hardlink_tree(os.path.join(self.builddir, 'sstate-cache-buildA'), os.path.join(buildB, 'sstate-cache'))
+
+            result_cleansstate = runCmd((". %s/oe-init-build-env %s && " % (get_bb_var('COREBASE'), buildB)) + 'bitbake -ccleansstate ' + target, ignore_status=True, shell=True, executable='/bin/bash')
+            if not result_cleansstate.status == 0:
+                failed_cleansstate.append(target)
+                shutil.rmtree(buildB)
+                continue
+
+            result_build = runCmd((". %s/oe-init-build-env %s && " % (get_bb_var('COREBASE'), buildB)) + 'bitbake ' + target, ignore_status=True, shell=True, executable='/bin/bash')
+            if not result_build.status == 0:
+                failed_rebuild.append(target)
+
+            shutil.rmtree(buildB)
+
+        self.assertFalse(failed_rebuild, msg="The following recipes have failed to rebuild: %s" % ' '.join(map(str, failed_rebuild)))
+        self.assertFalse(failed_cleansstate, msg="The following recipes have failed cleansstate(all others have passed both cleansstate and rebuild from sstate tests): %s" % ' '.join(map(str, failed_cleansstate)))
+
+    def test_sstate_relocation(self):
+        self.run_test_sstate_rebuild(['core-image-sato-sdk'], relocate=True, rebuild_dependencies=True)
+
+    def test_sstate_rebuild(self):
+        self.run_test_sstate_rebuild(['core-image-sato-sdk'], relocate=False, rebuild_dependencies=True)
diff --git a/meta/lib/oeqa/selftest/_toaster.py b/meta/lib/oeqa/selftest/_toaster.py
new file mode 100644
index 0000000..c424659
--- /dev/null
+++ b/meta/lib/oeqa/selftest/_toaster.py
@@ -0,0 +1,320 @@
+import unittest
+import os
+import sys
+import shlex, subprocess
+import urllib, commands, time, getpass, re, json, shlex
+
+import oeqa.utils.ftools as ftools
+from oeqa.selftest.base import oeSelfTest
+from oeqa.utils.commands import runCmd
+
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../../', 'bitbake/lib/toaster')))
+os.environ.setdefault("DJANGO_SETTINGS_MODULE", "toastermain.settings")
+
+import toastermain.settings
+from django.db.models import Q
+from orm.models import *
+from oeqa.utils.decorators import testcase
+
+class ToasterSetup(oeSelfTest):
+
+    def recipe_parse(self, file_path, var):
+        for line in open(file_path,'r'):
+            if line.find(var) > -1:
+                val = line.split(" = ")[1].replace("\"", "").strip()
+                return val
+
+    def fix_file_path(self, file_path):
+        if ":" in file_path:
+            file_path=file_path.split(":")[2]
+        return file_path
+
+class Toaster_DB_Tests(ToasterSetup):
+
+    # Check if build name is unique - tc_id=795
+    @testcase(795)
+    def test_Build_Unique_Name(self):
+        all_builds = Build.objects.all().count()
+        distinct_builds = Build.objects.values('id').distinct().count()
+        self.assertEqual(distinct_builds, all_builds, msg = 'Build name is not unique')
+
+    # Check if build coocker log path is unique - tc_id=819
+    @testcase(819)
+    def test_Build_Unique_Cooker_Log_Path(self):
+        distinct_path = Build.objects.values('cooker_log_path').distinct().count()
+        total_builds = Build.objects.values('id').count()
+        self.assertEqual(distinct_path, total_builds, msg = 'Build coocker log path is not unique')
+
+    # Check if task order is unique for one build - tc=824
+    @testcase(824)
+    def test_Task_Unique_Order(self):
+        builds = Build.objects.values('id')
+        cnt_err = []
+        for build in builds:
+            total_task_order = Task.objects.filter(build = build['id']).values('order').count()
+            distinct_task_order = Task.objects.filter(build = build['id']).values('order').distinct().count()
+            if (total_task_order != distinct_task_order):
+                cnt_err.append(build['id'])
+        self.assertEqual(len(cnt_err), 0, msg = 'Errors for build id: %s' % cnt_err)
+
+    # Check task order sequence for one build - tc=825
+    @testcase(825)
+    def test_Task_Order_Sequence(self):
+        builds = builds = Build.objects.values('id')
+        cnt_err = []
+        for build in builds:
+            tasks = Task.objects.filter(Q(build = build['id']), ~Q(order = None), ~Q(task_name__contains = '_setscene')).values('id', 'order').order_by("order")
+            cnt_tasks = 0
+            for task in tasks:
+                cnt_tasks += 1
+                if (task['order'] != cnt_tasks):
+                    cnt_err.append(task['id'])
+        self.assertEqual(len(cnt_err), 0, msg = 'Errors for task id: %s' % cnt_err)
+
+    # Check if disk_io matches the difference between EndTimeIO and StartTimeIO in build stats - tc=828
+    ### this needs to be updated ###
+    #def test_Task_Disk_IO_TC828(self):
+
+    # Check if outcome = 2 (SSTATE) then sstate_result must be 3 (RESTORED) - tc=832
+    @testcase(832)
+    def test_Task_If_Outcome_2_Sstate_Result_Must_Be_3(self):
+        tasks = Task.objects.filter(outcome = 2).values('id', 'sstate_result')
+        cnt_err = []
+        for task in tasks:
+            if (row['sstate_result'] != 3):
+                cnt_err.append(task['id'])
+        self.assertEqual(len(cnt_err), 0, msg = 'Errors for task id: %s' % cnt_err)
+
+    # Check if outcome = 1 (COVERED) or 3 (EXISTING) then sstate_result must be 0 (SSTATE_NA) - tc=833
+    @testcase(833)
+    def test_Task_If_Outcome_1_3_Sstate_Result_Must_Be_0(self):
+        tasks = Task.objects.filter(outcome__in = (1, 3)).values('id', 'sstate_result')
+        cnt_err = []
+        for task in tasks:
+            if (task['sstate_result'] != 0):
+                cnt_err.append(task['id'])
+        self.assertEqual(len(cnt_err), 0, msg = 'Errors for task id: %s' % cnt_err)
+
+    # Check if outcome is 0 (SUCCESS) or 4 (FAILED) then sstate_result must be 0 (NA), 1 (MISS) or 2 (FAILED) - tc=834
+    @testcase(834)
+    def test_Task_If_Outcome_0_4_Sstate_Result_Must_Be_0_1_2(self):
+        tasks = Task.objects.filter(outcome__in = (0, 4)).values('id', 'sstate_result')
+        cnt_err = []
+        for task in tasks:
+            if (task['sstate_result'] not in [0, 1, 2]):
+                cnt_err.append(task['id'])
+        self.assertEqual(len(cnt_err), 0, msg = 'Errors for task id: %s' % cnt_err)
+
+    # Check if task_executed = TRUE (1), script_type must be 0 (CODING_NA), 2 (CODING_PYTHON), 3 (CODING_SHELL) - tc=891
+    @testcase(891)
+    def test_Task_If_Task_Executed_True_Script_Type_0_2_3(self):
+        tasks = Task.objects.filter(task_executed = 1).values('id', 'script_type')
+        cnt_err = []
+        for task in tasks:
+            if (task['script_type'] not in [0, 2, 3]):
+                cnt_err.append(task['id'])
+        self.assertEqual(len(cnt_err), 0, msg = 'Errors for task id: %s' % cnt_err)
+
+    # Check if task_executed = TRUE (1), outcome must be 0 (SUCCESS) or 4 (FAILED) - tc=836
+    @testcase(836)
+    def test_Task_If_Task_Executed_True_Outcome_0_4(self):
+        tasks = Task.objects.filter(task_executed = 1).values('id', 'outcome')
+        cnt_err = []
+        for task in tasks:
+            if (task['outcome'] not in [0, 4]):
+                cnt_err.append(task['id'])
+        self.assertEqual(len(cnt_err), 0, msg = 'Errors for task id: %s' % cnt_err)
+
+    # Check if task_executed = FALSE (0), script_type must be 0 - tc=890
+    @testcase(890)
+    def test_Task_If_Task_Executed_False_Script_Type_0(self):
+        tasks = Task.objects.filter(task_executed = 0).values('id', 'script_type')
+        cnt_err = []
+        for task in tasks:
+            if (task['script_type'] != 0):
+                cnt_err.append(task['id'])
+        self.assertEqual(len(cnt_err), 0, msg = 'Errors for task id: %s' % cnt_err)
+
+    # Check if task_executed = FALSE (0) and build outcome = SUCCEEDED (0), task outcome must be 1 (COVERED), 2 (CACHED), 3 (PREBUILT), 5 (EMPTY) - tc=837
+    @testcase(837)
+    def test_Task_If_Task_Executed_False_Outcome_1_2_3_5(self):
+        builds = Build.objects.filter(outcome = 0).values('id')
+        cnt_err = []
+        for build in builds:
+            tasks = Task.objects.filter(build = build['id'], task_executed = 0).values('id', 'outcome')
+            for task in tasks:
+                if (task['outcome'] not in [1, 2, 3, 5]):
+                    cnt_err.append(task['id'])
+        self.assertEqual(len(cnt_err), 0, msg = 'Errors for task id: %s' % cnt_err)
+
+    # Key verification - tc=888
+    @testcase(888)
+    def test_Target_Installed_Package(self):
+        rows = Target_Installed_Package.objects.values('id', 'target_id', 'package_id')
+        cnt_err = []
+        for row in rows:
+            target = Target.objects.filter(id = row['target_id']).values('id')
+            package = Package.objects.filter(id = row['package_id']).values('id')
+            if (not target or not package):
+                cnt_err.append(row['id'])
+        self.assertEqual(len(cnt_err), 0, msg = 'Errors for target installed package id: %s' % cnt_err)
+
+    # Key verification - tc=889
+    @testcase(889)
+    def test_Task_Dependency(self):
+        rows = Task_Dependency.objects.values('id', 'task_id', 'depends_on_id')
+        cnt_err = []
+        for row in rows:
+            task_id = Task.objects.filter(id = row['task_id']).values('id')
+            depends_on_id = Task.objects.filter(id = row['depends_on_id']).values('id')
+            if (not task_id or not depends_on_id):
+                cnt_err.append(row['id'])
+        self.assertEqual(len(cnt_err), 0, msg = 'Errors for task dependency id: %s' % cnt_err)
+
+    # Check if build target file_name is populated only if is_image=true AND orm_build.outcome=0 then if the file exists and its size matches the file_size value
+    ### Need to add the tc in the test run
+    @testcase(1037)
+    def test_Target_File_Name_Populated(self):
+        builds = Build.objects.filter(outcome = 0).values('id')
+        for build in builds:
+            targets = Target.objects.filter(build_id = build['id'], is_image = 1).values('id')
+            for target in targets:
+                target_files = Target_Image_File.objects.filter(target_id = target['id']).values('id', 'file_name', 'file_size')
+                cnt_err = []
+                for file_info in target_files:
+                    target_id = file_info['id']
+                    target_file_name = file_info['file_name']
+                    target_file_size = file_info['file_size']
+                    if (not target_file_name or not target_file_size):
+                        cnt_err.append(target_id)
+                    else:
+                        if (not os.path.exists(target_file_name)):
+                            cnt_err.append(target_id)
+                        else:
+                            if (os.path.getsize(target_file_name) != target_file_size):
+                                cnt_err.append(target_id)
+        self.assertEqual(len(cnt_err), 0, msg = 'Errors for target image file id: %s' % cnt_err)
+
+    # Key verification - tc=884
+    @testcase(884)
+    def test_Package_Dependency(self):
+        cnt_err = []
+        deps = Package_Dependency.objects.values('id', 'package_id', 'depends_on_id')
+        for dep in deps:
+            if (dep['package_id'] == dep['depends_on_id']):
+                cnt_err.append(dep['id'])
+        self.assertEqual(len(cnt_err), 0, msg = 'Errors for package dependency id: %s' % cnt_err)
+
+    # Recipe key verification, recipe name does not depends on a recipe having the same name - tc=883
+    @testcase(883)
+    def test_Recipe_Dependency(self):
+        deps = Recipe_Dependency.objects.values('id', 'recipe_id', 'depends_on_id')
+        cnt_err = []
+        for dep in deps:
+            if (not dep['recipe_id'] or not dep['depends_on_id']):
+                cnt_err.append(dep['id'])
+            else:
+                name = Recipe.objects.filter(id = dep['recipe_id']).values('name')
+                dep_name = Recipe.objects.filter(id = dep['depends_on_id']).values('name')
+                if (name == dep_name):
+                    cnt_err.append(dep['id'])
+        self.assertEqual(len(cnt_err), 0, msg = 'Errors for recipe dependency id: %s' % cnt_err)
+
+    # Check if package name does not start with a number (0-9) - tc=846
+    @testcase(846)
+    def test_Package_Name_For_Number(self):
+        packages = Package.objects.filter(~Q(size = -1)).values('id', 'name')
+        cnt_err = []
+        for package in packages:
+            if (package['name'][0].isdigit() is True):
+                cnt_err.append(package['id'])
+        self.assertEqual(len(cnt_err), 0, msg = 'Errors for package id: %s' % cnt_err)
+
+    # Check if package version starts with a number (0-9) - tc=847
+    @testcase(847)
+    def test_Package_Version_Starts_With_Number(self):
+        packages = Package.objects.filter(~Q(size = -1)).values('id', 'version')
+        cnt_err = []
+        for package in packages:
+            if (package['version'][0].isdigit() is False):
+                cnt_err.append(package['id'])
+        self.assertEqual(len(cnt_err), 0, msg = 'Errors for package id: %s' % cnt_err)
+
+    # Check if package revision starts with 'r' - tc=848
+    @testcase(848)
+    def test_Package_Revision_Starts_With_r(self):
+        packages = Package.objects.filter(~Q(size = -1)).values('id', 'revision')
+        cnt_err = []
+        for package in packages:
+            if (package['revision'][0].startswith("r") is False):
+                cnt_err.append(package['id'])
+        self.assertEqual(len(cnt_err), 0, msg = 'Errors for package id: %s' % cnt_err)
+
+    # Check the validity of the package build_id
+    ### TC must be added in test run
+    @testcase(1038)
+    def test_Package_Build_Id(self):
+        packages = Package.objects.filter(~Q(size = -1)).values('id', 'build_id')
+        cnt_err = []
+        for package in packages:
+            build_id = Build.objects.filter(id = package['build_id']).values('id')
+            if (not build_id):
+                cnt_err.append(package['id'])
+        self.assertEqual(len(cnt_err), 0, msg = 'Errors for package id: %s' % cnt_err)
+
+    # Check the validity of package recipe_id
+    ### TC must be added in test run
+    @testcase(1039)
+    def test_Package_Recipe_Id(self):
+        packages = Package.objects.filter(~Q(size = -1)).values('id', 'recipe_id')
+        cnt_err = []
+        for package in packages:
+            recipe_id = Recipe.objects.filter(id = package['recipe_id']).values('id')
+            if (not recipe_id):
+                cnt_err.append(package['id'])
+        self.assertEqual(len(cnt_err), 0, msg = 'Errors for package id: %s' % cnt_err)
+
+    # Check if package installed_size field is not null
+    ### TC must be aded in test run
+    @testcase(1040)
+    def test_Package_Installed_Size_Not_NULL(self):
+        packages = Package.objects.filter(installed_size__isnull = True).values('id')
+        cnt_err = []
+        for package in packages:
+            cnt_err.append(package['id'])
+        self.assertEqual(len(cnt_err), 0, msg = 'Errors for package id: %s' % cnt_err)
+
+    # Check if all layers requests return exit code is 200 - tc=843
+    @testcase(843)
+    def test_Layers_Requests_Exit_Code(self):
+        layers = Layer.objects.values('id', 'layer_index_url')
+        cnt_err = []
+        for layer in layers:
+            resp = urllib.urlopen(layer['layer_index_url'])
+            if (resp.getcode() != 200):
+                cnt_err.append(layer['id'])
+        self.assertEqual(len(cnt_err), 0, msg = 'Errors for layer id: %s' % cnt_err)
+
+    # Check if django server starts regardless of the timezone set on the machine - tc=905
+    @testcase(905)
+    def test_Start_Django_Timezone(self):
+        current_path = os.getcwd()
+        zonefilelist = []
+        ZONEINFOPATH = '/usr/share/zoneinfo/'
+        os.chdir("../bitbake/lib/toaster/")
+        cnt_err = 0
+        for filename in os.listdir(ZONEINFOPATH):
+            if os.path.isfile(os.path.join(ZONEINFOPATH, filename)):
+                zonefilelist.append(filename)
+        for k in range(len(zonefilelist)):
+            if k <= 5:
+                files = zonefilelist[k]
+                os.system("export TZ="+str(files)+"; python manage.py runserver > /dev/null 2>&1 &")
+                time.sleep(3)
+                pid = subprocess.check_output("ps aux | grep '[/u]sr/bin/python manage.py runserver' | awk '{print $2}'", shell = True)
+                if pid:
+                    os.system("kill -9 "+str(pid))
+                else:
+                    cnt_err.append(zonefilelist[k])
+        self.assertEqual(cnt_err, 0, msg = 'Errors django server does not start with timezone: %s' % cnt_err)
+        os.chdir(current_path)
diff --git a/meta/lib/oeqa/selftest/base.py b/meta/lib/oeqa/selftest/base.py
new file mode 100644
index 0000000..b2faa66
--- /dev/null
+++ b/meta/lib/oeqa/selftest/base.py
@@ -0,0 +1,153 @@
+# Copyright (c) 2013 Intel Corporation
+#
+# Released under the MIT license (see COPYING.MIT)
+
+
+# DESCRIPTION
+# Base class inherited by test classes in meta/lib/selftest
+
+import unittest
+import os
+import sys
+import shutil
+import logging
+import errno
+
+import oeqa.utils.ftools as ftools
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_test_layer
+from oeqa.utils.decorators import LogResults
+
+@LogResults
+class oeSelfTest(unittest.TestCase):
+
+    log = logging.getLogger("selftest.base")
+    longMessage = True
+
+    def __init__(self, methodName="runTest"):
+        self.builddir = os.environ.get("BUILDDIR")
+        self.localconf_path = os.path.join(self.builddir, "conf/local.conf")
+        self.testinc_path = os.path.join(self.builddir, "conf/selftest.inc")
+        self.local_bblayers_path = os.path.join(self.builddir, "conf/bblayers.conf")
+        self.testinc_bblayers_path = os.path.join(self.builddir, "conf/bblayers.inc")
+        self.testlayer_path = oeSelfTest.testlayer_path
+        self._extra_tear_down_commands = []
+        self._track_for_cleanup = []
+        super(oeSelfTest, self).__init__(methodName)
+
+    def setUp(self):
+        os.chdir(self.builddir)
+        # we don't know what the previous test left around in config or inc files
+        # if it failed so we need a fresh start
+        try:
+            os.remove(self.testinc_path)
+        except OSError as e:
+            if e.errno != errno.ENOENT:
+                raise
+        for root, _, files in os.walk(self.testlayer_path):
+            for f in files:
+                if f == 'test_recipe.inc':
+                    os.remove(os.path.join(root, f))
+        try:
+            os.remove(self.testinc_bblayers_path)
+        except OSError as e:
+            if e.errno != errno.ENOENT:
+                raise
+        # tests might need their own setup
+        # but if they overwrite this one they have to call
+        # super each time, so let's give them an alternative
+        self.setUpLocal()
+
+    def setUpLocal(self):
+        pass
+
+    def tearDown(self):
+        if self._extra_tear_down_commands:
+            failed_extra_commands = []
+            for command in self._extra_tear_down_commands:
+                result = runCmd(command, ignore_status=True)
+                if not result.status ==  0:
+                    failed_extra_commands.append(command)
+            if failed_extra_commands:
+                self.log.warning("tearDown commands have failed: %s" % ', '.join(map(str, failed_extra_commands)))
+                self.log.debug("Trying to move on.")
+            self._extra_tear_down_commands = []
+
+        if self._track_for_cleanup:
+            for path in self._track_for_cleanup:
+                if os.path.isdir(path):
+                    shutil.rmtree(path)
+                if os.path.isfile(path):
+                    os.remove(path)
+            self._track_for_cleanup = []
+
+        self.tearDownLocal()
+
+    def tearDownLocal(self):
+        pass
+
+    # add test specific commands to the tearDown method.
+    def add_command_to_tearDown(self, command):
+        self.log.debug("Adding command '%s' to tearDown for this test." % command)
+        self._extra_tear_down_commands.append(command)
+    # add test specific files or directories to be removed in the tearDown method
+    def track_for_cleanup(self, path):
+        self.log.debug("Adding path '%s' to be cleaned up when test is over" % path)
+        self._track_for_cleanup.append(path)
+
+    # write to <builddir>/conf/selftest.inc
+    def write_config(self, data):
+        self.log.debug("Writing to: %s\n%s\n" % (self.testinc_path, data))
+        ftools.write_file(self.testinc_path, data)
+
+    # append to <builddir>/conf/selftest.inc
+    def append_config(self, data):
+        self.log.debug("Appending to: %s\n%s\n" % (self.testinc_path, data))
+        ftools.append_file(self.testinc_path, data)
+
+    # remove data from <builddir>/conf/selftest.inc
+    def remove_config(self, data):
+        self.log.debug("Removing from: %s\n\%s\n" % (self.testinc_path, data))
+        ftools.remove_from_file(self.testinc_path, data)
+
+    # write to meta-sefltest/recipes-test/<recipe>/test_recipe.inc
+    def write_recipeinc(self, recipe, data):
+        inc_file = os.path.join(self.testlayer_path, 'recipes-test', recipe, 'test_recipe.inc')
+        self.log.debug("Writing to: %s\n%s\n" % (inc_file, data))
+        ftools.write_file(inc_file, data)
+
+    # append data to meta-sefltest/recipes-test/<recipe>/test_recipe.inc
+    def append_recipeinc(self, recipe, data):
+        inc_file = os.path.join(self.testlayer_path, 'recipes-test', recipe, 'test_recipe.inc')
+        self.log.debug("Appending to: %s\n%s\n" % (inc_file, data))
+        ftools.append_file(inc_file, data)
+
+    # remove data from meta-sefltest/recipes-test/<recipe>/test_recipe.inc
+    def remove_recipeinc(self, recipe, data):
+        inc_file = os.path.join(self.testlayer_path, 'recipes-test', recipe, 'test_recipe.inc')
+        self.log.debug("Removing from: %s\n%s\n" % (inc_file, data))
+        ftools.remove_from_file(inc_file, data)
+
+    # delete meta-sefltest/recipes-test/<recipe>/test_recipe.inc file
+    def delete_recipeinc(self, recipe):
+        inc_file = os.path.join(self.testlayer_path, 'recipes-test', recipe, 'test_recipe.inc')
+        self.log.debug("Deleting file: %s" % inc_file)
+        try:
+            os.remove(inc_file)
+        except OSError as e:
+            if e.errno != errno.ENOENT:
+                raise
+
+    # write to <builddir>/conf/bblayers.inc
+    def write_bblayers_config(self, data):
+        self.log.debug("Writing to: %s\n%s\n" % (self.testinc_bblayers_path, data))
+        ftools.write_file(self.testinc_bblayers_path, data)
+
+    # append to <builddir>/conf/bblayers.inc
+    def append_bblayers_config(self, data):
+        self.log.debug("Appending to: %s\n%s\n" % (self.testinc_bblayers_path, data))
+        ftools.append_file(self.testinc_bblayers_path, data)
+
+    # remove data from <builddir>/conf/bblayers.inc
+    def remove_bblayers_config(self, data):
+        self.log.debug("Removing from: %s\n\%s\n" % (self.testinc_bblayers_path, data))
+        ftools.remove_from_file(self.testinc_bblayers_path, data)
diff --git a/meta/lib/oeqa/selftest/bblayers.py b/meta/lib/oeqa/selftest/bblayers.py
new file mode 100644
index 0000000..20c17e4
--- /dev/null
+++ b/meta/lib/oeqa/selftest/bblayers.py
@@ -0,0 +1,62 @@
+import unittest
+import os
+import logging
+import re
+import shutil
+
+import oeqa.utils.ftools as ftools
+from oeqa.selftest.base import oeSelfTest
+from oeqa.utils.commands import runCmd, get_bb_var
+from oeqa.utils.decorators import testcase
+
+class BitbakeLayers(oeSelfTest):
+
+    @testcase(756)
+    def test_bitbakelayers_showcrossdepends(self):
+        result = runCmd('bitbake-layers show-cross-depends')
+        self.assertTrue('aspell' in result.output, msg = "No dependencies were shown. bitbake-layers show-cross-depends output: %s" % result.output)
+
+    @testcase(83)
+    def test_bitbakelayers_showlayers(self):
+        result = runCmd('bitbake-layers show-layers')
+        self.assertTrue('meta-selftest' in result.output, msg = "No layers were shown. bitbake-layers show-layers output: %s" % result.output)
+
+    @testcase(93)
+    def test_bitbakelayers_showappends(self):
+        result = runCmd('bitbake-layers show-appends')
+        self.assertTrue('xcursor-transparent-theme_0.1.1.bbappend' in result.output, msg="xcursor-transparent-theme_0.1.1.bbappend file was not recognised.  bitbake-layers show-appends output: %s" % result.output)
+
+    @testcase(90)
+    def test_bitbakelayers_showoverlayed(self):
+        result = runCmd('bitbake-layers show-overlayed')
+        self.assertTrue('aspell' in result.output, msg="aspell overlayed recipe was not recognised bitbake-layers show-overlayed %s" % result.output)
+
+    @testcase(95)
+    def test_bitbakelayers_flatten(self):
+        testoutdir = os.path.join(self.builddir, 'test_bitbakelayers_flatten')
+        self.assertFalse(os.path.isdir(testoutdir), msg = "test_bitbakelayers_flatten should not exist at this point in time")
+        self.track_for_cleanup(testoutdir)
+        result = runCmd('bitbake-layers flatten %s' % testoutdir)
+        bb_file = os.path.join(testoutdir, 'recipes-graphics/xcursor-transparent-theme/xcursor-transparent-theme_0.1.1.bb')
+        self.assertTrue(os.path.isfile(bb_file), msg = "Cannot find xcursor-transparent-theme_0.1.1.bb in the test_bitbakelayers_flatten local dir.")
+        contents = ftools.read_file(bb_file)
+        find_in_contents = re.search("##### bbappended from meta-selftest #####\n(.*\n)*include test_recipe.inc", contents)
+        self.assertTrue(find_in_contents, msg = "Flattening layers did not work. bitbake-layers flatten output: %s" % result.output)
+
+    @testcase(1195)
+    def test_bitbakelayers_add_remove(self):
+        test_layer = os.path.join(get_bb_var('COREBASE'), 'meta-skeleton')
+        result = runCmd('bitbake-layers show-layers')
+        self.assertNotIn('meta-skeleton', result.output, "This test cannot run with meta-skeleton in bblayers.conf. bitbake-layers show-layers output: %s" % result.output)
+        result = runCmd('bitbake-layers add-layer %s' % test_layer)
+        result = runCmd('bitbake-layers show-layers')
+        self.assertIn('meta-skeleton', result.output, msg = "Something wrong happened. meta-skeleton layer was not added to conf/bblayers.conf.  bitbake-layers show-layers output: %s" % result.output)
+        result = runCmd('bitbake-layers remove-layer %s' % test_layer)
+        result = runCmd('bitbake-layers show-layers')
+        self.assertNotIn('meta-skeleton', result.output, msg = "meta-skeleton should have been removed at this step.  bitbake-layers show-layers output: %s" % result.output)
+        result = runCmd('bitbake-layers add-layer %s' % test_layer)
+        result = runCmd('bitbake-layers show-layers')
+        self.assertIn('meta-skeleton', result.output, msg = "Something wrong happened. meta-skeleton layer was not added to conf/bblayers.conf.  bitbake-layers show-layers output: %s" % result.output)
+        result = runCmd('bitbake-layers remove-layer */meta-skeleton')
+        result = runCmd('bitbake-layers show-layers')
+        self.assertNotIn('meta-skeleton', result.output, msg = "meta-skeleton should have been removed at this step.  bitbake-layers show-layers output: %s" % result.output)
diff --git a/meta/lib/oeqa/selftest/bbtests.py b/meta/lib/oeqa/selftest/bbtests.py
new file mode 100644
index 0000000..3d6860f
--- /dev/null
+++ b/meta/lib/oeqa/selftest/bbtests.py
@@ -0,0 +1,201 @@
+import unittest
+import os
+import logging
+import re
+import shutil
+
+import oeqa.utils.ftools as ftools
+from oeqa.selftest.base import oeSelfTest
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var
+from oeqa.utils.decorators import testcase
+
+class BitbakeTests(oeSelfTest):
+
+    @testcase(789)
+    def test_run_bitbake_from_dir_1(self):
+        os.chdir(os.path.join(self.builddir, 'conf'))
+        self.assertEqual(bitbake('-e').status, 0, msg = "bitbake couldn't run from \"conf\" dir")
+
+    @testcase(790)
+    def test_run_bitbake_from_dir_2(self):
+        my_env = os.environ.copy()
+        my_env['BBPATH'] = my_env['BUILDDIR']
+        os.chdir(os.path.dirname(os.environ['BUILDDIR']))
+        self.assertEqual(bitbake('-e', env=my_env).status, 0, msg = "bitbake couldn't run from builddir")
+
+    @testcase(806)
+    def test_event_handler(self):
+        self.write_config("INHERIT += \"test_events\"")
+        result = bitbake('m4-native')
+        find_build_started = re.search("NOTE: Test for bb\.event\.BuildStarted(\n.*)*NOTE: Preparing RunQueue", result.output)
+        find_build_completed = re.search("Tasks Summary:.*(\n.*)*NOTE: Test for bb\.event\.BuildCompleted", result.output)
+        self.assertTrue(find_build_started, msg = "Match failed in:\n%s"  % result.output)
+        self.assertTrue(find_build_completed, msg = "Match failed in:\n%s" % result.output)
+        self.assertFalse('Test for bb.event.InvalidEvent' in result.output, msg = "\"Test for bb.event.InvalidEvent\" message found during bitbake process. bitbake output: %s" % result.output)
+
+    @testcase(103)
+    def test_local_sstate(self):
+        bitbake('m4-native -ccleansstate')
+        bitbake('m4-native')
+        bitbake('m4-native -cclean')
+        result = bitbake('m4-native')
+        find_setscene = re.search("m4-native.*do_.*_setscene", result.output)
+        self.assertTrue(find_setscene, msg = "No \"m4-native.*do_.*_setscene\" message found during bitbake m4-native. bitbake output: %s" % result.output )
+
+    @testcase(105)
+    def test_bitbake_invalid_recipe(self):
+        result = bitbake('-b asdf', ignore_status=True)
+        self.assertTrue("ERROR: Unable to find any recipe file matching 'asdf'" in result.output, msg = "Though asdf recipe doesn't exist, bitbake didn't output any err. message. bitbake output: %s" % result.output)
+
+    @testcase(107)
+    def test_bitbake_invalid_target(self):
+        result = bitbake('asdf', ignore_status=True)
+        self.assertTrue("ERROR: Nothing PROVIDES 'asdf'" in result.output, msg = "Though no 'asdf' target exists, bitbake didn't output any err. message. bitbake output: %s" % result.output)
+
+    @testcase(106)
+    def test_warnings_errors(self):
+        result = bitbake('-b asdf', ignore_status=True)
+        find_warnings = re.search("Summary: There w.{2,3}? [1-9][0-9]* WARNING messages* shown", result.output)
+        find_errors = re.search("Summary: There w.{2,3}? [1-9][0-9]* ERROR messages* shown", result.output)
+        self.assertTrue(find_warnings, msg="Did not find the mumber of warnings at the end of the build:\n" + result.output)
+        self.assertTrue(find_errors, msg="Did not find the mumber of errors at the end of the build:\n" + result.output)
+
+    @testcase(108)
+    def test_invalid_patch(self):
+        self.write_recipeinc('man', 'SRC_URI += "file://man-1.5h1-make.patch"')
+        result = bitbake('man -c patch', ignore_status=True)
+        self.delete_recipeinc('man')
+        bitbake('-cclean man')
+        self.assertTrue("ERROR: Function failed: patch_do_patch" in result.output, msg = "Though no man-1.5h1-make.patch file exists, bitbake didn't output any err. message. bitbake output: %s" % result.output)
+
+    @testcase(163)
+    def test_force_task(self):
+        bitbake('m4-native')
+        self.add_command_to_tearDown('bitbake -c clean m4-native')
+        result = bitbake('-C compile m4-native')
+        look_for_tasks = ['do_compile', 'do_install', 'do_populate_sysroot']
+        for task in look_for_tasks:
+            find_task = re.search("m4-native.*%s" % task, result.output)
+            self.assertTrue(find_task, msg = "Couldn't find %s task. bitbake output %s" % (task, result.output))
+
+    @testcase(167)
+    def test_bitbake_g(self):
+        result = bitbake('-g core-image-full-cmdline')
+        for f in ['pn-buildlist', 'pn-depends.dot', 'package-depends.dot', 'task-depends.dot']:
+            self.addCleanup(os.remove, f)
+        self.assertTrue('NOTE: PN build list saved to \'pn-buildlist\'' in result.output, msg = "No dependency \"pn-buildlist\" file was generated for the given task target. bitbake output: %s" % result.output)
+        self.assertTrue('openssh' in ftools.read_file(os.path.join(self.builddir, 'pn-buildlist')), msg = "No \"openssh\" dependency found in pn-buildlist file.")
+
+    @testcase(899)
+    def test_image_manifest(self):
+        bitbake('core-image-minimal')
+        deploydir = get_bb_var("DEPLOY_DIR_IMAGE", target="core-image-minimal")
+        imagename = get_bb_var("IMAGE_LINK_NAME", target="core-image-minimal")
+        manifest = os.path.join(deploydir, imagename + ".manifest")
+        self.assertTrue(os.path.islink(manifest), msg="No manifest file created for image. It should have been created in %s" % manifest)
+
+    @testcase(168)
+    def test_invalid_recipe_src_uri(self):
+        data = 'SRC_URI = "file://invalid"'
+        self.write_recipeinc('man', data)
+        self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
+SSTATE_DIR = \"${TOPDIR}/download-selftest\"
+""")
+        bitbake('-ccleanall man')
+        result = bitbake('-c fetch man', ignore_status=True)
+        bitbake('-ccleanall man')
+        self.delete_recipeinc('man')
+        self.assertEqual(result.status, 1, msg="Command succeded when it should have failed. bitbake output: %s" % result.output)
+        self.assertTrue('Fetcher failure: Unable to find file file://invalid anywhere. The paths that were searched were:' in result.output, msg = "\"invalid\" file \
+doesn't exist, yet no error message encountered. bitbake output: %s" % result.output)
+        self.assertTrue('ERROR: Function failed: Fetcher failure for URL: \'file://invalid\'. Unable to fetch URL from any source.' in result.output, msg = "\"invalid\" file \
+doesn't exist, yet fetcher didn't report any error. bitbake output: %s" % result.output)
+
+    @testcase(171)
+    def test_rename_downloaded_file(self):
+        self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
+SSTATE_DIR = \"${TOPDIR}/download-selftest\"
+""")
+        data = 'SRC_URI_append = ";downloadfilename=test-aspell.tar.gz"'
+        self.write_recipeinc('aspell', data)
+        bitbake('-ccleanall aspell')
+        result = bitbake('-c fetch aspell', ignore_status=True)
+        self.delete_recipeinc('aspell')
+        self.addCleanup(bitbake, '-ccleanall aspell')
+        self.assertEqual(result.status, 0, msg = "Couldn't fetch aspell. %s" % result.output)
+        self.assertTrue(os.path.isfile(os.path.join(get_bb_var("DL_DIR"), 'test-aspell.tar.gz')), msg = "File rename failed. No corresponding test-aspell.tar.gz file found under %s" % str(get_bb_var("DL_DIR")))
+        self.assertTrue(os.path.isfile(os.path.join(get_bb_var("DL_DIR"), 'test-aspell.tar.gz.done')), "File rename failed. No corresponding test-aspell.tar.gz.done file found under %s" % str(get_bb_var("DL_DIR")))
+
+    @testcase(1028)
+    def test_environment(self):
+        self.append_config("TEST_ENV=\"localconf\"")
+        self.addCleanup(self.remove_config, "TEST_ENV=\"localconf\"")
+        result = runCmd('bitbake -e | grep TEST_ENV=')
+        self.assertTrue('localconf' in result.output, msg = "bitbake didn't report any value for TEST_ENV variable. To test, run 'bitbake -e | grep TEST_ENV='")
+
+    @testcase(1029)
+    def test_dry_run(self):
+        result = runCmd('bitbake -n m4-native')
+        self.assertEqual(0, result.status, "bitbake dry run didn't run as expected. %s" % result.output)
+
+    @testcase(1030)
+    def test_just_parse(self):
+        result = runCmd('bitbake -p')
+        self.assertEqual(0, result.status, "errors encountered when parsing recipes. %s" % result.output)
+
+    @testcase(1031)
+    def test_version(self):
+        result = runCmd('bitbake -s | grep wget')
+        find = re.search("wget *:([0-9a-zA-Z\.\-]+)", result.output)
+        self.assertTrue(find, "No version returned for searched recipe. bitbake output: %s" % result.output)
+
+    @testcase(1032)
+    def test_prefile(self):
+        preconf = os.path.join(self.builddir, 'conf/prefile.conf')
+        self.track_for_cleanup(preconf)
+        ftools.write_file(preconf ,"TEST_PREFILE=\"prefile\"")
+        result = runCmd('bitbake -r conf/prefile.conf -e | grep TEST_PREFILE=')
+        self.assertTrue('prefile' in result.output, "Preconfigure file \"prefile.conf\"was not taken into consideration. ")
+        self.append_config("TEST_PREFILE=\"localconf\"")
+        self.addCleanup(self.remove_config, "TEST_PREFILE=\"localconf\"")
+        result = runCmd('bitbake -r conf/prefile.conf -e | grep TEST_PREFILE=')
+        self.assertTrue('localconf' in result.output, "Preconfigure file \"prefile.conf\"was not taken into consideration.")
+
+    @testcase(1033)
+    def test_postfile(self):
+        postconf = os.path.join(self.builddir, 'conf/postfile.conf')
+        self.track_for_cleanup(postconf)
+        ftools.write_file(postconf , "TEST_POSTFILE=\"postfile\"")
+        self.append_config("TEST_POSTFILE=\"localconf\"")
+        self.addCleanup(self.remove_config, "TEST_POSTFILE=\"localconf\"")
+        result = runCmd('bitbake -R conf/postfile.conf -e | grep TEST_POSTFILE=')
+        self.assertTrue('postfile' in result.output, "Postconfigure file \"postfile.conf\"was not taken into consideration.")
+
+    @testcase(1034)
+    def test_checkuri(self):
+        result = runCmd('bitbake -c checkuri m4')
+        self.assertEqual(0, result.status, msg = "\"checkuri\" task was not executed. bitbake output: %s" % result.output)
+
+    @testcase(1035)
+    def test_continue(self):
+        self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
+SSTATE_DIR = \"${TOPDIR}/download-selftest\"
+""")
+        self.write_recipeinc('man',"\ndo_fail_task () {\nexit 1 \n}\n\naddtask do_fail_task before do_fetch\n" )
+        runCmd('bitbake -c cleanall man xcursor-transparent-theme')
+        result = runCmd('bitbake man xcursor-transparent-theme -k', ignore_status=True)
+        errorpos = result.output.find('ERROR: Function failed: do_fail_task')
+        manver = re.search("NOTE: recipe xcursor-transparent-theme-(.*?): task do_unpack: Started", result.output)
+        continuepos = result.output.find('NOTE: recipe xcursor-transparent-theme-%s: task do_unpack: Started' % manver.group(1))
+        self.assertLess(errorpos,continuepos, msg = "bitbake didn't pass do_fail_task. bitbake output: %s" % result.output)
+
+    @testcase(1119)
+    def test_non_gplv3(self):
+        data = 'INCOMPATIBLE_LICENSE = "GPLv3"'
+        conf = os.path.join(self.builddir, 'conf/local.conf')
+        ftools.append_file(conf ,data)
+        self.addCleanup(ftools.remove_from_file, conf ,data)
+        result = bitbake('readline', ignore_status=True)
+        self.assertEqual(result.status, 0, "Bitbake failed, exit code %s, output %s" % (result.status, result.output))
+        self.assertFalse(os.path.isfile(os.path.join(self.builddir, 'tmp/deploy/licenses/readline/generic_GPLv3')))
+        self.assertTrue(os.path.isfile(os.path.join(self.builddir, 'tmp/deploy/licenses/readline/generic_GPLv2')))
diff --git a/meta/lib/oeqa/selftest/buildhistory.py b/meta/lib/oeqa/selftest/buildhistory.py
new file mode 100644
index 0000000..d8cae46
--- /dev/null
+++ b/meta/lib/oeqa/selftest/buildhistory.py
@@ -0,0 +1,45 @@
+import unittest
+import os
+import re
+import shutil
+import datetime
+
+import oeqa.utils.ftools as ftools
+from oeqa.selftest.base import oeSelfTest
+from oeqa.utils.commands import Command, runCmd, bitbake, get_bb_var, get_test_layer
+
+
+class BuildhistoryBase(oeSelfTest):
+
+    def config_buildhistory(self, tmp_bh_location=False):
+        if (not 'buildhistory' in get_bb_var('USER_CLASSES')) and (not 'buildhistory' in get_bb_var('INHERIT')):
+            add_buildhistory_config = 'INHERIT += "buildhistory"\nBUILDHISTORY_COMMIT = "1"'
+            self.append_config(add_buildhistory_config)
+
+        if tmp_bh_location:
+            # Using a temporary buildhistory location for testing
+            tmp_bh_dir = os.path.join(self.builddir, "tmp_buildhistory_%s" % datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
+            buildhistory_dir_config = "BUILDHISTORY_DIR = \"%s\"" % tmp_bh_dir
+            self.append_config(buildhistory_dir_config)
+            self.track_for_cleanup(tmp_bh_dir)
+
+    def run_buildhistory_operation(self, target, global_config='', target_config='', change_bh_location=False, expect_error=False, error_regex=''):
+        if change_bh_location:
+            tmp_bh_location = True
+        else:
+            tmp_bh_location = False
+        self.config_buildhistory(tmp_bh_location)
+
+        self.append_config(global_config)
+        self.append_recipeinc(target, target_config)
+        bitbake("-cclean %s" % target)
+        result = bitbake(target, ignore_status=True)
+        self.remove_config(global_config)
+        self.remove_recipeinc(target, target_config)
+
+        if expect_error:
+            self.assertEqual(result.status, 1, msg="Error expected for global config '%s' and target config '%s'" % (global_config, target_config))
+            search_for_error = re.search(error_regex, result.output)
+            self.assertTrue(search_for_error, msg="Could not find desired error in output: %s" % error_regex)
+        else:
+            self.assertEqual(result.status, 0, msg="Command 'bitbake %s' has failed unexpectedly: %s" % (target, result.output))
diff --git a/meta/lib/oeqa/selftest/buildoptions.py b/meta/lib/oeqa/selftest/buildoptions.py
new file mode 100644
index 0000000..483803b
--- /dev/null
+++ b/meta/lib/oeqa/selftest/buildoptions.py
@@ -0,0 +1,149 @@
+import unittest
+import os
+import logging
+import re
+import glob as g
+import pexpect as p
+
+from oeqa.selftest.base import oeSelfTest
+from oeqa.selftest.buildhistory import BuildhistoryBase
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var
+import oeqa.utils.ftools as ftools
+from oeqa.utils.decorators import testcase
+
+class ImageOptionsTests(oeSelfTest):
+
+    @testcase(761)
+    def test_incremental_image_generation(self):
+        image_pkgtype = get_bb_var("IMAGE_PKGTYPE")
+        if image_pkgtype != 'rpm':
+            self.skipTest('Not using RPM as main package format')
+        bitbake("-c cleanall core-image-minimal")
+        self.write_config('INC_RPM_IMAGE_GEN = "1"')
+        self.append_config('IMAGE_FEATURES += "ssh-server-openssh"')
+        bitbake("core-image-minimal")
+        log_data_file = os.path.join(get_bb_var("WORKDIR", "core-image-minimal"), "temp/log.do_rootfs")
+        log_data_created = ftools.read_file(log_data_file)
+        incremental_created = re.search("NOTE: load old install solution for incremental install\nNOTE: old install solution not exist\nNOTE: creating new install solution for incremental install(\n.*)*NOTE: Installing the following packages:.*packagegroup-core-ssh-openssh", log_data_created)
+        self.remove_config('IMAGE_FEATURES += "ssh-server-openssh"')
+        self.assertTrue(incremental_created, msg = "Match failed in:\n%s" % log_data_created)
+        bitbake("core-image-minimal")
+        log_data_removed = ftools.read_file(log_data_file)
+        incremental_removed = re.search("NOTE: load old install solution for incremental install\nNOTE: creating new install solution for incremental install(\n.*)*NOTE: incremental removed:.*openssh-sshd-.*", log_data_removed)
+        self.assertTrue(incremental_removed, msg = "Match failed in:\n%s" % log_data_removed)
+
+    @testcase(925)
+    def test_rm_old_image(self):
+        bitbake("core-image-minimal")
+        deploydir = get_bb_var("DEPLOY_DIR_IMAGE", target="core-image-minimal")
+        imagename = get_bb_var("IMAGE_LINK_NAME", target="core-image-minimal")
+        deploydir_files = os.listdir(deploydir)
+        track_original_files = []
+        for image_file in deploydir_files:
+            if imagename in image_file and os.path.islink(os.path.join(deploydir, image_file)):
+                track_original_files.append(os.path.realpath(os.path.join(deploydir, image_file)))
+        self.append_config("RM_OLD_IMAGE = \"1\"")
+        bitbake("-C rootfs core-image-minimal")
+        deploydir_files = os.listdir(deploydir)
+        remaining_not_expected = [path for path in track_original_files if os.path.basename(path) in deploydir_files]
+        self.assertFalse(remaining_not_expected, msg="\nThe following image files were not removed: %s" % ', '.join(map(str, remaining_not_expected)))
+
+    @testcase(286)
+    def test_ccache_tool(self):
+        bitbake("ccache-native")
+        self.assertTrue(os.path.isfile(os.path.join(get_bb_var('STAGING_BINDIR_NATIVE', 'ccache-native'), "ccache")), msg = "No ccache found under %s" % str(get_bb_var('STAGING_BINDIR_NATIVE', 'ccache-native')))
+        self.write_config('INHERIT += "ccache"')
+        bitbake("m4 -c cleansstate")
+        bitbake("m4 -c compile")
+        self.addCleanup(bitbake, 'ccache-native -ccleansstate')
+        res = runCmd("grep ccache %s" % (os.path.join(get_bb_var("WORKDIR","m4"),"temp/log.do_compile")), ignore_status=True)
+        self.assertEqual(0, res.status, msg="No match for ccache in m4 log.do_compile. For further details: %s" % os.path.join(get_bb_var("WORKDIR","m4"),"temp/log.do_compile"))
+
+
+class DiskMonTest(oeSelfTest):
+
+    @testcase(277)
+    def test_stoptask_behavior(self):
+        self.write_config('BB_DISKMON_DIRS = "STOPTASKS,${TMPDIR},100000G,100K"')
+        res = bitbake("m4", ignore_status = True)
+        self.assertTrue('ERROR: No new tasks can be executed since the disk space monitor action is "STOPTASKS"!' in res.output, msg = "Tasks should have stopped. Disk monitor is set to STOPTASK: %s" % res.output)
+        self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output))
+        self.write_config('BB_DISKMON_DIRS = "ABORT,${TMPDIR},100000G,100K"')
+        res = bitbake("m4", ignore_status = True)
+        self.assertTrue('ERROR: Immediately abort since the disk space monitor action is "ABORT"!' in res.output, "Tasks should have been aborted immediatelly. Disk monitor is set to ABORT: %s" % res.output)
+        self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output))
+        self.write_config('BB_DISKMON_DIRS = "WARN,${TMPDIR},100000G,100K"')
+        res = bitbake("m4")
+        self.assertTrue('WARNING: The free space' in res.output, msg = "A warning should have been displayed for disk monitor is set to WARN: %s" %res.output)
+
+class SanityOptionsTest(oeSelfTest):
+
+    @testcase(927)
+    def test_options_warnqa_errorqa_switch(self):
+        bitbake("xcursor-transparent-theme -ccleansstate")
+
+        if "packages-list" not in get_bb_var("ERROR_QA"):
+            self.write_config("ERROR_QA_append = \" packages-list\"")
+
+        self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"')
+        res = bitbake("xcursor-transparent-theme", ignore_status=True)
+        self.delete_recipeinc('xcursor-transparent-theme')
+        self.assertTrue("ERROR: QA Issue: xcursor-transparent-theme-dbg is listed in PACKAGES multiple times, this leads to packaging errors." in res.output, msg=res.output)
+        self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output))
+        self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"')
+        self.append_config('ERROR_QA_remove = "packages-list"')
+        self.append_config('WARN_QA_append = " packages-list"')
+        bitbake("xcursor-transparent-theme -ccleansstate")
+        res = bitbake("xcursor-transparent-theme")
+        self.delete_recipeinc('xcursor-transparent-theme')
+        self.assertTrue("WARNING: QA Issue: xcursor-transparent-theme-dbg is listed in PACKAGES multiple times, this leads to packaging errors." in res.output, msg=res.output)
+
+    @testcase(278)
+    def test_sanity_userspace_dependency(self):
+        self.append_config('WARN_QA_append = " unsafe-references-in-binaries unsafe-references-in-scripts"')
+        bitbake("-ccleansstate gzip nfs-utils")
+        res = bitbake("gzip nfs-utils")
+        self.assertTrue("WARNING: QA Issue: gzip" in res.output, "WARNING: QA Issue: gzip message is not present in bitbake's output: %s" % res.output)
+        self.assertTrue("WARNING: QA Issue: nfs-utils" in res.output, "WARNING: QA Issue: nfs-utils message is not present in bitbake's output: %s" % res.output)
+
+class BuildhistoryTests(BuildhistoryBase):
+
+    @testcase(293)
+    def test_buildhistory_basic(self):
+        self.run_buildhistory_operation('xcursor-transparent-theme')
+        self.assertTrue(os.path.isdir(get_bb_var('BUILDHISTORY_DIR')), "buildhistory dir was not created.")
+
+    @testcase(294)
+    def test_buildhistory_buildtime_pr_backwards(self):
+        self.add_command_to_tearDown('cleanup-workdir')
+        target = 'xcursor-transparent-theme'
+        error = "ERROR: QA Issue: Package version for package %s went backwards which would break package feeds from (.*-r1 to .*-r0)" % target
+        self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True)
+        self.run_buildhistory_operation(target, target_config="PR = \"r0\"", change_bh_location=False, expect_error=True, error_regex=error)
+
+class BuildImagesTest(oeSelfTest):
+    @testcase(563)
+    def test_directfb(self):
+        """
+        This method is used to test the build of directfb image for arm arch.
+        In essence we build a coreimagedirectfb and test the exitcode of bitbake that in case of success is 0.
+        """
+        self.add_command_to_tearDown('cleanupworkdir')
+        self.write_config("DISTRO_FEATURES_remove = \"x11\"\nDISTRO_FEATURES_append = \" directfb\"\nMACHINE ??= \"qemuarm\"")
+        res = bitbake("core-image-directfb", ignore_status=True)
+        self.assertEqual(res.status, 0, "\ncoreimagedirectfb failed to build. Please check logs for further details.\nbitbake output %s" % res.output)
+
+class ArchiverTest(oeSelfTest):
+    @testcase(926)
+    def test_arch_work_dir_and_export_source(self):
+        """
+        Test for archiving the work directory and exporting the source files.
+        """
+        self.add_command_to_tearDown('cleanupworkdir')
+        self.write_config("INHERIT = \"archiver\"\nARCHIVER_MODE[src] = \"original\"\nARCHIVER_MODE[srpm] = \"1\"")
+        res = bitbake("xcursor-transparent-theme", ignore_status=True)
+        self.assertEqual(res.status, 0, "\nCouldn't build xcursortransparenttheme.\nbitbake output %s" % res.output)
+        pkgs_path = g.glob(str(self.builddir) + "/tmp/deploy/sources/allarch*/xcurs*")
+        src_file_glob = str(pkgs_path[0]) + "/xcursor*.src.rpm"
+        tar_file_glob = str(pkgs_path[0]) + "/xcursor*.tar.gz"
+        self.assertTrue((g.glob(src_file_glob) and g.glob(tar_file_glob)), "Couldn't find .src.rpm and .tar.gz files under tmp/deploy/sources/allarch*/xcursor*")
diff --git a/meta/lib/oeqa/selftest/devtool.py b/meta/lib/oeqa/selftest/devtool.py
new file mode 100644
index 0000000..6e731d6
--- /dev/null
+++ b/meta/lib/oeqa/selftest/devtool.py
@@ -0,0 +1,971 @@
+import unittest
+import os
+import logging
+import re
+import shutil
+import tempfile
+import glob
+
+import oeqa.utils.ftools as ftools
+from oeqa.selftest.base import oeSelfTest
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer, runqemu
+from oeqa.utils.decorators import testcase
+
+class DevtoolBase(oeSelfTest):
+
+    def _test_recipe_contents(self, recipefile, checkvars, checkinherits):
+        with open(recipefile, 'r') as f:
+            for line in f:
+                if '=' in line:
+                    splitline = line.split('=', 1)
+                    var = splitline[0].rstrip()
+                    value = splitline[1].strip().strip('"')
+                    if var in checkvars:
+                        needvalue = checkvars.pop(var)
+                        self.assertEqual(value, needvalue, 'values for %s do not match' % var)
+                if line.startswith('inherit '):
+                    inherits = line.split()[1:]
+
+        self.assertEqual(checkvars, {}, 'Some variables not found: %s' % checkvars)
+
+        for inherit in checkinherits:
+            self.assertIn(inherit, inherits, 'Missing inherit of %s' % inherit)
+
+    def _check_bbappend(self, testrecipe, recipefile, appenddir):
+        result = runCmd('bitbake-layers show-appends', cwd=self.builddir)
+        resultlines = result.output.splitlines()
+        inrecipe = False
+        bbappends = []
+        bbappendfile = None
+        for line in resultlines:
+            if inrecipe:
+                if line.startswith(' '):
+                    bbappends.append(line.strip())
+                else:
+                    break
+            elif line == '%s:' % os.path.basename(recipefile):
+                inrecipe = True
+        self.assertLessEqual(len(bbappends), 2, '%s recipe is being bbappended by another layer - bbappends found:\n  %s' % (testrecipe, '\n  '.join(bbappends)))
+        for bbappend in bbappends:
+            if bbappend.startswith(appenddir):
+                bbappendfile = bbappend
+                break
+        else:
+            self.fail('bbappend for recipe %s does not seem to be created in test layer' % testrecipe)
+        return bbappendfile
+
+    def _create_temp_layer(self, templayerdir, addlayer, templayername, priority=999, recipepathspec='recipes-*/*'):
+        create_temp_layer(templayerdir, templayername, priority, recipepathspec)
+        if addlayer:
+            self.add_command_to_tearDown('bitbake-layers remove-layer %s || true' % templayerdir)
+            result = runCmd('bitbake-layers add-layer %s' % templayerdir, cwd=self.builddir)
+
+    def _process_ls_output(self, output):
+        """
+        Convert ls -l output to a format we can reasonably compare from one context
+        to another (e.g. from host to target)
+        """
+        filelist = []
+        for line in output.splitlines():
+            splitline = line.split()
+            # Remove trailing . on perms
+            splitline[0] = splitline[0].rstrip('.')
+            # Remove leading . on paths
+            splitline[-1] = splitline[-1].lstrip('.')
+            # Drop fields we don't want to compare
+            del splitline[7]
+            del splitline[6]
+            del splitline[5]
+            del splitline[4]
+            del splitline[1]
+            filelist.append(' '.join(splitline))
+        return filelist
+
+
+class DevtoolTests(DevtoolBase):
+
+    @testcase(1158)
+    def test_create_workspace(self):
+        # Check preconditions
+        workspacedir = os.path.join(self.builddir, 'workspace')
+        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+        result = runCmd('bitbake-layers show-layers')
+        self.assertTrue('/workspace' not in result.output, 'This test cannot be run with a workspace layer in bblayers.conf')
+        # Try creating a workspace layer with a specific path
+        tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+        self.track_for_cleanup(tempdir)
+        result = runCmd('devtool create-workspace %s' % tempdir)
+        self.assertTrue(os.path.isfile(os.path.join(tempdir, 'conf', 'layer.conf')), msg = "No workspace created. devtool output: %s " % result.output)
+        result = runCmd('bitbake-layers show-layers')
+        self.assertIn(tempdir, result.output)
+        # Try creating a workspace layer with the default path
+        self.track_for_cleanup(workspacedir)
+        self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+        result = runCmd('devtool create-workspace')
+        self.assertTrue(os.path.isfile(os.path.join(workspacedir, 'conf', 'layer.conf')), msg = "No workspace created. devtool output: %s " % result.output)
+        result = runCmd('bitbake-layers show-layers')
+        self.assertNotIn(tempdir, result.output)
+        self.assertIn(workspacedir, result.output)
+
+    @testcase(1159)
+    def test_devtool_add(self):
+        # Check preconditions
+        workspacedir = os.path.join(self.builddir, 'workspace')
+        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+        # Fetch source
+        tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+        self.track_for_cleanup(tempdir)
+        url = 'http://www.ivarch.com/programs/sources/pv-1.5.3.tar.bz2'
+        result = runCmd('wget %s' % url, cwd=tempdir)
+        result = runCmd('tar xfv pv-1.5.3.tar.bz2', cwd=tempdir)
+        srcdir = os.path.join(tempdir, 'pv-1.5.3')
+        self.assertTrue(os.path.isfile(os.path.join(srcdir, 'configure')), 'Unable to find configure script in source directory')
+        # Test devtool add
+        self.track_for_cleanup(workspacedir)
+        self.add_command_to_tearDown('bitbake -c cleansstate pv')
+        self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+        result = runCmd('devtool add pv %s' % srcdir)
+        self.assertTrue(os.path.exists(os.path.join(workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created')
+        # Test devtool status
+        result = runCmd('devtool status')
+        self.assertIn('pv', result.output)
+        self.assertIn(srcdir, result.output)
+        # Clean up anything in the workdir/sysroot/sstate cache (have to do this *after* devtool add since the recipe only exists then)
+        bitbake('pv -c cleansstate')
+        # Test devtool build
+        result = runCmd('devtool build pv')
+        installdir = get_bb_var('D', 'pv')
+        self.assertTrue(installdir, 'Could not query installdir variable')
+        bindir = get_bb_var('bindir', 'pv')
+        self.assertTrue(bindir, 'Could not query bindir variable')
+        if bindir[0] == '/':
+            bindir = bindir[1:]
+        self.assertTrue(os.path.isfile(os.path.join(installdir, bindir, 'pv')), 'pv binary not found in D')
+
+    @testcase(1162)
+    def test_devtool_add_library(self):
+        # Check preconditions
+        workspacedir = os.path.join(self.builddir, 'workspace')
+        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+        # We don't have the ability to pick up this dependency automatically yet...
+        bitbake('libusb1')
+        # Fetch source
+        tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+        self.track_for_cleanup(tempdir)
+        url = 'http://www.intra2net.com/en/developer/libftdi/download/libftdi1-1.1.tar.bz2'
+        result = runCmd('wget %s' % url, cwd=tempdir)
+        result = runCmd('tar xfv libftdi1-1.1.tar.bz2', cwd=tempdir)
+        srcdir = os.path.join(tempdir, 'libftdi1-1.1')
+        self.assertTrue(os.path.isfile(os.path.join(srcdir, 'CMakeLists.txt')), 'Unable to find CMakeLists.txt in source directory')
+        # Test devtool add (and use -V so we test that too)
+        self.track_for_cleanup(workspacedir)
+        self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+        result = runCmd('devtool add libftdi %s -V 1.1' % srcdir)
+        self.assertTrue(os.path.exists(os.path.join(workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created')
+        # Test devtool status
+        result = runCmd('devtool status')
+        self.assertIn('libftdi', result.output)
+        self.assertIn(srcdir, result.output)
+        # Clean up anything in the workdir/sysroot/sstate cache (have to do this *after* devtool add since the recipe only exists then)
+        bitbake('libftdi -c cleansstate')
+        # Test devtool build
+        result = runCmd('devtool build libftdi')
+        staging_libdir = get_bb_var('STAGING_LIBDIR', 'libftdi')
+        self.assertTrue(staging_libdir, 'Could not query STAGING_LIBDIR variable')
+        self.assertTrue(os.path.isfile(os.path.join(staging_libdir, 'libftdi1.so.2.1.0')), "libftdi binary not found in STAGING_LIBDIR. Output of devtool build libftdi %s" % result.output)
+        # Test devtool reset
+        stampprefix = get_bb_var('STAMP', 'libftdi')
+        result = runCmd('devtool reset libftdi')
+        result = runCmd('devtool status')
+        self.assertNotIn('libftdi', result.output)
+        self.assertTrue(stampprefix, 'Unable to get STAMP value for recipe libftdi')
+        matches = glob.glob(stampprefix + '*')
+        self.assertFalse(matches, 'Stamp files exist for recipe libftdi that should have been cleaned')
+        self.assertFalse(os.path.isfile(os.path.join(staging_libdir, 'libftdi1.so.2.1.0')), 'libftdi binary still found in STAGING_LIBDIR after cleaning')
+
+    @testcase(1160)
+    def test_devtool_add_fetch(self):
+        # Check preconditions
+        workspacedir = os.path.join(self.builddir, 'workspace')
+        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+        # Fetch source
+        tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+        self.track_for_cleanup(tempdir)
+        testver = '0.23'
+        url = 'https://pypi.python.org/packages/source/M/MarkupSafe/MarkupSafe-%s.tar.gz' % testver
+        testrecipe = 'python-markupsafe'
+        srcdir = os.path.join(tempdir, testrecipe)
+        # Test devtool add
+        self.track_for_cleanup(workspacedir)
+        self.add_command_to_tearDown('bitbake -c cleansstate %s' % testrecipe)
+        self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+        result = runCmd('devtool add %s %s -f %s' % (testrecipe, srcdir, url))
+        self.assertTrue(os.path.exists(os.path.join(workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created. %s' % result.output)
+        self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory')
+        # Test devtool status
+        result = runCmd('devtool status')
+        self.assertIn(testrecipe, result.output)
+        self.assertIn(srcdir, result.output)
+        # Check recipe
+        recipefile = get_bb_var('FILE', testrecipe)
+        self.assertIn('%s.bb' % testrecipe, recipefile, 'Recipe file incorrectly named')
+        checkvars = {}
+        checkvars['S'] = '${WORKDIR}/MarkupSafe-%s' % testver
+        checkvars['SRC_URI'] = url
+        self._test_recipe_contents(recipefile, checkvars, [])
+        # Try with version specified
+        result = runCmd('devtool reset -n %s' % testrecipe)
+        shutil.rmtree(srcdir)
+        result = runCmd('devtool add %s %s -f %s -V %s' % (testrecipe, srcdir, url, testver))
+        self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory')
+        # Test devtool status
+        result = runCmd('devtool status')
+        self.assertIn(testrecipe, result.output)
+        self.assertIn(srcdir, result.output)
+        # Check recipe
+        recipefile = get_bb_var('FILE', testrecipe)
+        self.assertIn('%s_%s.bb' % (testrecipe, testver), recipefile, 'Recipe file incorrectly named')
+        checkvars = {}
+        checkvars['S'] = '${WORKDIR}/MarkupSafe-${PV}'
+        checkvars['SRC_URI'] = url.replace(testver, '${PV}')
+        self._test_recipe_contents(recipefile, checkvars, [])
+
+    @testcase(1161)
+    def test_devtool_add_fetch_git(self):
+        # Check preconditions
+        workspacedir = os.path.join(self.builddir, 'workspace')
+        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+        # Fetch source
+        tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+        self.track_for_cleanup(tempdir)
+        url = 'git://git.yoctoproject.org/libmatchbox'
+        checkrev = '462f0652055d89c648ddd54fd7b03f175c2c6973'
+        testrecipe = 'libmatchbox2'
+        srcdir = os.path.join(tempdir, testrecipe)
+        # Test devtool add
+        self.track_for_cleanup(workspacedir)
+        self.add_command_to_tearDown('bitbake -c cleansstate %s' % testrecipe)
+        self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+        result = runCmd('devtool add %s %s -f %s' % (testrecipe, srcdir, url))
+        self.assertTrue(os.path.exists(os.path.join(workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created: %s' % result.output)
+        self.assertTrue(os.path.isfile(os.path.join(srcdir, 'configure.ac')), 'Unable to find configure.ac in source directory')
+        # Test devtool status
+        result = runCmd('devtool status')
+        self.assertIn(testrecipe, result.output)
+        self.assertIn(srcdir, result.output)
+        # Check recipe
+        recipefile = get_bb_var('FILE', testrecipe)
+        self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named')
+        checkvars = {}
+        checkvars['S'] = '${WORKDIR}/git'
+        checkvars['PV'] = '1.0+git${SRCPV}'
+        checkvars['SRC_URI'] = url
+        checkvars['SRCREV'] = '${AUTOREV}'
+        self._test_recipe_contents(recipefile, checkvars, [])
+        # Try with revision and version specified
+        result = runCmd('devtool reset -n %s' % testrecipe)
+        shutil.rmtree(srcdir)
+        url_rev = '%s;rev=%s' % (url, checkrev)
+        result = runCmd('devtool add %s %s -f "%s" -V 1.5' % (testrecipe, srcdir, url_rev))
+        self.assertTrue(os.path.isfile(os.path.join(srcdir, 'configure.ac')), 'Unable to find configure.ac in source directory')
+        # Test devtool status
+        result = runCmd('devtool status')
+        self.assertIn(testrecipe, result.output)
+        self.assertIn(srcdir, result.output)
+        # Check recipe
+        recipefile = get_bb_var('FILE', testrecipe)
+        self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named')
+        checkvars = {}
+        checkvars['S'] = '${WORKDIR}/git'
+        checkvars['PV'] = '1.5+git${SRCPV}'
+        checkvars['SRC_URI'] = url
+        checkvars['SRCREV'] = checkrev
+        self._test_recipe_contents(recipefile, checkvars, [])
+
+    @testcase(1164)
+    def test_devtool_modify(self):
+        # Check preconditions
+        workspacedir = os.path.join(self.builddir, 'workspace')
+        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+        # Clean up anything in the workdir/sysroot/sstate cache
+        bitbake('mdadm -c cleansstate')
+        # Try modifying a recipe
+        tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+        self.track_for_cleanup(tempdir)
+        self.track_for_cleanup(workspacedir)
+        self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+        self.add_command_to_tearDown('bitbake -c clean mdadm')
+        result = runCmd('devtool modify mdadm -x %s' % tempdir)
+        self.assertTrue(os.path.exists(os.path.join(tempdir, 'Makefile')), 'Extracted source could not be found')
+        self.assertTrue(os.path.isdir(os.path.join(tempdir, '.git')), 'git repository for external source tree not found')
+        self.assertTrue(os.path.exists(os.path.join(workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created')
+        matches = glob.glob(os.path.join(workspacedir, 'appends', 'mdadm_*.bbappend'))
+        self.assertTrue(matches, 'bbappend not created %s' % result.output)
+        # Test devtool status
+        result = runCmd('devtool status')
+        self.assertIn('mdadm', result.output)
+        self.assertIn(tempdir, result.output)
+        # Check git repo
+        result = runCmd('git status --porcelain', cwd=tempdir)
+        self.assertEqual(result.output.strip(), "", 'Created git repo is not clean')
+        result = runCmd('git symbolic-ref HEAD', cwd=tempdir)
+        self.assertEqual(result.output.strip(), "refs/heads/devtool", 'Wrong branch in git repo')
+        # Try building
+        bitbake('mdadm')
+        # Try making (minor) modifications to the source
+        result = runCmd("sed -i 's!^\.TH.*!.TH MDADM 8 \"\" v9.999-custom!' %s" % os.path.join(tempdir, 'mdadm.8.in'))
+        bitbake('mdadm -c package')
+        pkgd = get_bb_var('PKGD', 'mdadm')
+        self.assertTrue(pkgd, 'Could not query PKGD variable')
+        mandir = get_bb_var('mandir', 'mdadm')
+        self.assertTrue(mandir, 'Could not query mandir variable')
+        if mandir[0] == '/':
+            mandir = mandir[1:]
+        with open(os.path.join(pkgd, mandir, 'man8', 'mdadm.8'), 'r') as f:
+            for line in f:
+                if line.startswith('.TH'):
+                    self.assertEqual(line.rstrip(), '.TH MDADM 8 "" v9.999-custom', 'man file not modified. man searched file path: %s' % os.path.join(pkgd, mandir, 'man8', 'mdadm.8'))
+        # Test devtool reset
+        stampprefix = get_bb_var('STAMP', 'mdadm')
+        result = runCmd('devtool reset mdadm')
+        result = runCmd('devtool status')
+        self.assertNotIn('mdadm', result.output)
+        self.assertTrue(stampprefix, 'Unable to get STAMP value for recipe mdadm')
+        matches = glob.glob(stampprefix + '*')
+        self.assertFalse(matches, 'Stamp files exist for recipe mdadm that should have been cleaned')
+
+    @testcase(1166)
+    def test_devtool_modify_invalid(self):
+        # Check preconditions
+        workspacedir = os.path.join(self.builddir, 'workspace')
+        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+        # Try modifying some recipes
+        tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+        self.track_for_cleanup(tempdir)
+        self.track_for_cleanup(workspacedir)
+        self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+
+        testrecipes = 'perf kernel-devsrc package-index core-image-minimal meta-toolchain packagegroup-core-sdk meta-ide-support'.split()
+        # Find actual name of gcc-source since it now includes the version - crude, but good enough for this purpose
+        result = runCmd('bitbake-layers show-recipes gcc-source*')
+        reading = False
+        for line in result.output.splitlines():
+            if line.startswith('=='):
+                reading = True
+            elif reading and not line.startswith(' '):
+                testrecipes.append(line.split(':')[0])
+        for testrecipe in testrecipes:
+            # Check it's a valid recipe
+            bitbake('%s -e' % testrecipe)
+            # devtool extract should fail
+            result = runCmd('devtool extract %s %s' % (testrecipe, os.path.join(tempdir, testrecipe)), ignore_status=True)
+            self.assertNotEqual(result.status, 0, 'devtool extract on %s should have failed. devtool output: %s' % (testrecipe, result.output))
+            self.assertNotIn('Fetching ', result.output, 'devtool extract on %s should have errored out before trying to fetch' % testrecipe)
+            self.assertIn('ERROR: ', result.output, 'devtool extract on %s should have given an ERROR' % testrecipe)
+            # devtool modify should fail
+            result = runCmd('devtool modify %s -x %s' % (testrecipe, os.path.join(tempdir, testrecipe)), ignore_status=True)
+            self.assertNotEqual(result.status, 0, 'devtool modify on %s should have failed. devtool output: %s' %  (testrecipe, result.output))
+            self.assertIn('ERROR: ', result.output, 'devtool modify on %s should have given an ERROR' % testrecipe)
+
+    def test_devtool_modify_native(self):
+        # Check preconditions
+        workspacedir = os.path.join(self.builddir, 'workspace')
+        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+        # Try modifying some recipes
+        tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+        self.track_for_cleanup(tempdir)
+        self.track_for_cleanup(workspacedir)
+        self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+
+        bbclassextended = False
+        inheritnative = False
+        testrecipes = 'mtools-native apt-native desktop-file-utils-native'.split()
+        for testrecipe in testrecipes:
+            checkextend = 'native' in (get_bb_var('BBCLASSEXTEND', testrecipe) or '').split()
+            if not bbclassextended:
+                bbclassextended = checkextend
+            if not inheritnative:
+                inheritnative = not checkextend
+            result = runCmd('devtool modify %s -x %s' % (testrecipe, os.path.join(tempdir, testrecipe)))
+            self.assertNotIn('ERROR: ', result.output, 'ERROR in devtool modify output: %s' % result.output)
+            result = runCmd('devtool build %s' % testrecipe)
+            self.assertNotIn('ERROR: ', result.output, 'ERROR in devtool build output: %s' % result.output)
+            result = runCmd('devtool reset %s' % testrecipe)
+            self.assertNotIn('ERROR: ', result.output, 'ERROR in devtool reset output: %s' % result.output)
+
+        self.assertTrue(bbclassextended, 'None of these recipes are BBCLASSEXTENDed to native - need to adjust testrecipes list: %s' % ', '.join(testrecipes))
+        self.assertTrue(inheritnative, 'None of these recipes do "inherit native" - need to adjust testrecipes list: %s' % ', '.join(testrecipes))
+
+
+    @testcase(1165)
+    def test_devtool_modify_git(self):
+        # Check preconditions
+        workspacedir = os.path.join(self.builddir, 'workspace')
+        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+        testrecipe = 'mkelfimage'
+        src_uri = get_bb_var('SRC_URI', testrecipe)
+        self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe)
+        # Clean up anything in the workdir/sysroot/sstate cache
+        bitbake('%s -c cleansstate' % testrecipe)
+        # Try modifying a recipe
+        tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+        self.track_for_cleanup(tempdir)
+        self.track_for_cleanup(workspacedir)
+        self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+        self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
+        result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
+        self.assertTrue(os.path.exists(os.path.join(tempdir, 'Makefile')), 'Extracted source could not be found')
+        self.assertTrue(os.path.isdir(os.path.join(tempdir, '.git')), 'git repository for external source tree not found')
+        self.assertTrue(os.path.exists(os.path.join(workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created. devtool output: %s' % result.output)
+        matches = glob.glob(os.path.join(workspacedir, 'appends', 'mkelfimage_*.bbappend'))
+        self.assertTrue(matches, 'bbappend not created')
+        # Test devtool status
+        result = runCmd('devtool status')
+        self.assertIn(testrecipe, result.output)
+        self.assertIn(tempdir, result.output)
+        # Check git repo
+        result = runCmd('git status --porcelain', cwd=tempdir)
+        self.assertEqual(result.output.strip(), "", 'Created git repo is not clean')
+        result = runCmd('git symbolic-ref HEAD', cwd=tempdir)
+        self.assertEqual(result.output.strip(), "refs/heads/devtool", 'Wrong branch in git repo')
+        # Try building
+        bitbake(testrecipe)
+
+    @testcase(1167)
+    def test_devtool_modify_localfiles(self):
+        # Check preconditions
+        workspacedir = os.path.join(self.builddir, 'workspace')
+        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+        testrecipe = 'lighttpd'
+        src_uri = (get_bb_var('SRC_URI', testrecipe) or '').split()
+        foundlocal = False
+        for item in src_uri:
+            if item.startswith('file://') and '.patch' not in item:
+                foundlocal = True
+                break
+        self.assertTrue(foundlocal, 'This test expects the %s recipe to fetch local files and it seems that it no longer does' % testrecipe)
+        # Clean up anything in the workdir/sysroot/sstate cache
+        bitbake('%s -c cleansstate' % testrecipe)
+        # Try modifying a recipe
+        tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+        self.track_for_cleanup(tempdir)
+        self.track_for_cleanup(workspacedir)
+        self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+        self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
+        result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
+        self.assertTrue(os.path.exists(os.path.join(tempdir, 'configure.ac')), 'Extracted source could not be found')
+        self.assertTrue(os.path.exists(os.path.join(workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created')
+        matches = glob.glob(os.path.join(workspacedir, 'appends', '%s_*.bbappend' % testrecipe))
+        self.assertTrue(matches, 'bbappend not created')
+        # Test devtool status
+        result = runCmd('devtool status')
+        self.assertIn(testrecipe, result.output)
+        self.assertIn(tempdir, result.output)
+        # Try building
+        bitbake(testrecipe)
+
+    @testcase(1169)
+    def test_devtool_update_recipe(self):
+        # Check preconditions
+        workspacedir = os.path.join(self.builddir, 'workspace')
+        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+        testrecipe = 'minicom'
+        recipefile = get_bb_var('FILE', testrecipe)
+        src_uri = get_bb_var('SRC_URI', testrecipe)
+        self.assertNotIn('git://', src_uri, 'This test expects the %s recipe to NOT be a git recipe' % testrecipe)
+        result = runCmd('git status . --porcelain', cwd=os.path.dirname(recipefile))
+        self.assertEqual(result.output.strip(), "", '%s recipe is not clean' % testrecipe)
+        # First, modify a recipe
+        tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+        self.track_for_cleanup(tempdir)
+        self.track_for_cleanup(workspacedir)
+        self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+        # (don't bother with cleaning the recipe on teardown, we won't be building it)
+        result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
+        # Check git repo
+        self.assertTrue(os.path.isdir(os.path.join(tempdir, '.git')), 'git repository for external source tree not found')
+        result = runCmd('git status --porcelain', cwd=tempdir)
+        self.assertEqual(result.output.strip(), "", 'Created git repo is not clean')
+        result = runCmd('git symbolic-ref HEAD', cwd=tempdir)
+        self.assertEqual(result.output.strip(), "refs/heads/devtool", 'Wrong branch in git repo')
+        # Add a couple of commits
+        # FIXME: this only tests adding, need to also test update and remove
+        result = runCmd('echo "Additional line" >> README', cwd=tempdir)
+        result = runCmd('git commit -a -m "Change the README"', cwd=tempdir)
+        result = runCmd('echo "A new file" > devtool-new-file', cwd=tempdir)
+        result = runCmd('git add devtool-new-file', cwd=tempdir)
+        result = runCmd('git commit -m "Add a new file"', cwd=tempdir)
+        self.add_command_to_tearDown('cd %s; rm %s/*.patch; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
+        result = runCmd('devtool update-recipe %s' % testrecipe)
+        result = runCmd('git status . --porcelain', cwd=os.path.dirname(recipefile))
+        self.assertNotEqual(result.output.strip(), "", '%s recipe should be modified' % testrecipe)
+        status = result.output.splitlines()
+        self.assertEqual(len(status), 3, 'Less/more files modified than expected. Entire status:\n%s' % result.output)
+        for line in status:
+            if line.endswith('0001-Change-the-README.patch'):
+                self.assertEqual(line[:3], '?? ', 'Unexpected status in line: %s' % line)
+            elif line.endswith('0002-Add-a-new-file.patch'):
+                self.assertEqual(line[:3], '?? ', 'Unexpected status in line: %s' % line)
+            elif re.search('%s_[^_]*.bb$' % testrecipe, line):
+                self.assertEqual(line[:3], ' M ', 'Unexpected status in line: %s' % line)
+            else:
+                raise AssertionError('Unexpected modified file in status: %s' % line)
+
+    @testcase(1172)
+    def test_devtool_update_recipe_git(self):
+        # Check preconditions
+        workspacedir = os.path.join(self.builddir, 'workspace')
+        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+        testrecipe = 'mtd-utils'
+        recipefile = get_bb_var('FILE', testrecipe)
+        src_uri = get_bb_var('SRC_URI', testrecipe)
+        self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe)
+        patches = []
+        for entry in src_uri.split():
+            if entry.startswith('file://') and entry.endswith('.patch'):
+                patches.append(entry[7:].split(';')[0])
+        self.assertGreater(len(patches), 0, 'The %s recipe does not appear to contain any patches, so this test will not be effective' % testrecipe)
+        result = runCmd('git status . --porcelain', cwd=os.path.dirname(recipefile))
+        self.assertEqual(result.output.strip(), "", '%s recipe is not clean' % testrecipe)
+        # First, modify a recipe
+        tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+        self.track_for_cleanup(tempdir)
+        self.track_for_cleanup(workspacedir)
+        self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+        # (don't bother with cleaning the recipe on teardown, we won't be building it)
+        result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
+        # Check git repo
+        self.assertTrue(os.path.isdir(os.path.join(tempdir, '.git')), 'git repository for external source tree not found')
+        result = runCmd('git status --porcelain', cwd=tempdir)
+        self.assertEqual(result.output.strip(), "", 'Created git repo is not clean')
+        result = runCmd('git symbolic-ref HEAD', cwd=tempdir)
+        self.assertEqual(result.output.strip(), "refs/heads/devtool", 'Wrong branch in git repo')
+        # Add a couple of commits
+        # FIXME: this only tests adding, need to also test update and remove
+        result = runCmd('echo "# Additional line" >> Makefile', cwd=tempdir)
+        result = runCmd('git commit -a -m "Change the Makefile"', cwd=tempdir)
+        result = runCmd('echo "A new file" > devtool-new-file', cwd=tempdir)
+        result = runCmd('git add devtool-new-file', cwd=tempdir)
+        result = runCmd('git commit -m "Add a new file"', cwd=tempdir)
+        self.add_command_to_tearDown('cd %s; rm -rf %s; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
+        result = runCmd('devtool update-recipe -m srcrev %s' % testrecipe)
+        result = runCmd('git status . --porcelain', cwd=os.path.dirname(recipefile))
+        self.assertNotEqual(result.output.strip(), "", '%s recipe should be modified' % testrecipe)
+        status = result.output.splitlines()
+        for line in status:
+            for patch in patches:
+                if line.endswith(patch):
+                    self.assertEqual(line[:3], ' D ', 'Unexpected status in line: %s' % line)
+                    break
+            else:
+                if re.search('%s_[^_]*.bb$' % testrecipe, line):
+                    self.assertEqual(line[:3], ' M ', 'Unexpected status in line: %s' % line)
+                else:
+                    raise AssertionError('Unexpected modified file in status: %s' % line)
+        result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile))
+        addlines = ['SRCREV = ".*"', 'SRC_URI = "git://git.infradead.org/mtd-utils.git"']
+        srcurilines = src_uri.split()
+        srcurilines[0] = 'SRC_URI = "' + srcurilines[0]
+        srcurilines.append('"')
+        removelines = ['SRCREV = ".*"'] + srcurilines
+        for line in result.output.splitlines():
+            if line.startswith('+++') or line.startswith('---'):
+                continue
+            elif line.startswith('+'):
+                matched = False
+                for item in addlines:
+                    if re.match(item, line[1:].strip()):
+                        matched = True
+                        break
+                self.assertTrue(matched, 'Unexpected diff add line: %s' % line)
+            elif line.startswith('-'):
+                matched = False
+                for item in removelines:
+                    if re.match(item, line[1:].strip()):
+                        matched = True
+                        break
+                self.assertTrue(matched, 'Unexpected diff remove line: %s' % line)
+        # Now try with auto mode
+        runCmd('cd %s; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, os.path.basename(recipefile)))
+        result = runCmd('devtool update-recipe %s' % testrecipe)
+        result = runCmd('git rev-parse --show-toplevel')
+        topleveldir = result.output.strip()
+        result = runCmd('git status . --porcelain', cwd=os.path.dirname(recipefile))
+        status = result.output.splitlines()
+        relpatchpath = os.path.join(os.path.relpath(os.path.dirname(recipefile), topleveldir), testrecipe)
+        expectedstatus = [('M', os.path.relpath(recipefile, topleveldir)),
+                          ('??', '%s/0001-Change-the-Makefile.patch' % relpatchpath),
+                          ('??', '%s/0002-Add-a-new-file.patch' % relpatchpath)]
+        for line in status:
+            statusline = line.split(None, 1)
+            for fstatus, fn in expectedstatus:
+                if fn == statusline[1]:
+                    if fstatus != statusline[0]:
+                        self.fail('Unexpected status in line: %s' % line)
+                    break
+            else:
+                self.fail('Unexpected modified file in line: %s' % line)
+
+    @testcase(1170)
+    def test_devtool_update_recipe_append(self):
+        # Check preconditions
+        workspacedir = os.path.join(self.builddir, 'workspace')
+        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+        testrecipe = 'mdadm'
+        recipefile = get_bb_var('FILE', testrecipe)
+        src_uri = get_bb_var('SRC_URI', testrecipe)
+        self.assertNotIn('git://', src_uri, 'This test expects the %s recipe to NOT be a git recipe' % testrecipe)
+        result = runCmd('git status . --porcelain', cwd=os.path.dirname(recipefile))
+        self.assertEqual(result.output.strip(), "", '%s recipe is not clean' % testrecipe)
+        # First, modify a recipe
+        tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+        tempsrcdir = os.path.join(tempdir, 'source')
+        templayerdir = os.path.join(tempdir, 'layer')
+        self.track_for_cleanup(tempdir)
+        self.track_for_cleanup(workspacedir)
+        self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+        # (don't bother with cleaning the recipe on teardown, we won't be building it)
+        result = runCmd('devtool modify %s -x %s' % (testrecipe, tempsrcdir))
+        # Check git repo
+        self.assertTrue(os.path.isdir(os.path.join(tempsrcdir, '.git')), 'git repository for external source tree not found')
+        result = runCmd('git status --porcelain', cwd=tempsrcdir)
+        self.assertEqual(result.output.strip(), "", 'Created git repo is not clean')
+        result = runCmd('git symbolic-ref HEAD', cwd=tempsrcdir)
+        self.assertEqual(result.output.strip(), "refs/heads/devtool", 'Wrong branch in git repo')
+        # Add a commit
+        result = runCmd("sed 's!\\(#define VERSION\\W*\"[^\"]*\\)\"!\\1-custom\"!' -i ReadMe.c", cwd=tempsrcdir)
+        result = runCmd('git commit -a -m "Add our custom version"', cwd=tempsrcdir)
+        self.add_command_to_tearDown('cd %s; rm -f %s/*.patch; git checkout .' % (os.path.dirname(recipefile), testrecipe))
+        # Create a temporary layer and add it to bblayers.conf
+        self._create_temp_layer(templayerdir, True, 'selftestupdaterecipe')
+        # Create the bbappend
+        result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir))
+        self.assertNotIn('WARNING:', result.output)
+        # Check recipe is still clean
+        result = runCmd('git status . --porcelain', cwd=os.path.dirname(recipefile))
+        self.assertEqual(result.output.strip(), "", '%s recipe is not clean' % testrecipe)
+        # Check bbappend was created
+        splitpath = os.path.dirname(recipefile).split(os.sep)
+        appenddir = os.path.join(templayerdir, splitpath[-2], splitpath[-1])
+        bbappendfile = self._check_bbappend(testrecipe, recipefile, appenddir)
+        patchfile = os.path.join(appenddir, testrecipe, '0001-Add-our-custom-version.patch')
+        self.assertTrue(os.path.exists(patchfile), 'Patch file not created')
+
+        # Check bbappend contents
+        expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+                         '\n',
+                         'SRC_URI += "file://0001-Add-our-custom-version.patch"\n',
+                         '\n']
+        with open(bbappendfile, 'r') as f:
+            self.assertEqual(expectedlines, f.readlines())
+
+        # Check we can run it again and bbappend isn't modified
+        result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir))
+        with open(bbappendfile, 'r') as f:
+            self.assertEqual(expectedlines, f.readlines())
+        # Drop new commit and check patch gets deleted
+        result = runCmd('git reset HEAD^', cwd=tempsrcdir)
+        result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir))
+        self.assertFalse(os.path.exists(patchfile), 'Patch file not deleted')
+        expectedlines2 = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+                         '\n']
+        with open(bbappendfile, 'r') as f:
+            self.assertEqual(expectedlines2, f.readlines())
+        # Put commit back and check we can run it if layer isn't in bblayers.conf
+        os.remove(bbappendfile)
+        result = runCmd('git commit -a -m "Add our custom version"', cwd=tempsrcdir)
+        result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir)
+        result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir))
+        self.assertIn('WARNING: Specified layer is not currently enabled in bblayers.conf', result.output)
+        self.assertTrue(os.path.exists(patchfile), 'Patch file not created (with disabled layer)')
+        with open(bbappendfile, 'r') as f:
+            self.assertEqual(expectedlines, f.readlines())
+        # Deleting isn't expected to work under these circumstances
+
+    @testcase(1171)
+    def test_devtool_update_recipe_append_git(self):
+        # Check preconditions
+        workspacedir = os.path.join(self.builddir, 'workspace')
+        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+        testrecipe = 'mtd-utils'
+        recipefile = get_bb_var('FILE', testrecipe)
+        src_uri = get_bb_var('SRC_URI', testrecipe)
+        self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe)
+        for entry in src_uri.split():
+            if entry.startswith('git://'):
+                git_uri = entry
+                break
+        result = runCmd('git status . --porcelain', cwd=os.path.dirname(recipefile))
+        self.assertEqual(result.output.strip(), "", '%s recipe is not clean' % testrecipe)
+        # First, modify a recipe
+        tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+        tempsrcdir = os.path.join(tempdir, 'source')
+        templayerdir = os.path.join(tempdir, 'layer')
+        self.track_for_cleanup(tempdir)
+        self.track_for_cleanup(workspacedir)
+        self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+        # (don't bother with cleaning the recipe on teardown, we won't be building it)
+        result = runCmd('devtool modify %s -x %s' % (testrecipe, tempsrcdir))
+        # Check git repo
+        self.assertTrue(os.path.isdir(os.path.join(tempsrcdir, '.git')), 'git repository for external source tree not found')
+        result = runCmd('git status --porcelain', cwd=tempsrcdir)
+        self.assertEqual(result.output.strip(), "", 'Created git repo is not clean')
+        result = runCmd('git symbolic-ref HEAD', cwd=tempsrcdir)
+        self.assertEqual(result.output.strip(), "refs/heads/devtool", 'Wrong branch in git repo')
+        # Add a commit
+        result = runCmd('echo "# Additional line" >> Makefile', cwd=tempsrcdir)
+        result = runCmd('git commit -a -m "Change the Makefile"', cwd=tempsrcdir)
+        self.add_command_to_tearDown('cd %s; rm -f %s/*.patch; git checkout .' % (os.path.dirname(recipefile), testrecipe))
+        # Create a temporary layer
+        os.makedirs(os.path.join(templayerdir, 'conf'))
+        with open(os.path.join(templayerdir, 'conf', 'layer.conf'), 'w') as f:
+            f.write('BBPATH .= ":${LAYERDIR}"\n')
+            f.write('BBFILES += "${LAYERDIR}/recipes-*/*/*.bbappend"\n')
+            f.write('BBFILE_COLLECTIONS += "oeselftesttemplayer"\n')
+            f.write('BBFILE_PATTERN_oeselftesttemplayer = "^${LAYERDIR}/"\n')
+            f.write('BBFILE_PRIORITY_oeselftesttemplayer = "999"\n')
+            f.write('BBFILE_PATTERN_IGNORE_EMPTY_oeselftesttemplayer = "1"\n')
+        self.add_command_to_tearDown('bitbake-layers remove-layer %s || true' % templayerdir)
+        result = runCmd('bitbake-layers add-layer %s' % templayerdir, cwd=self.builddir)
+        # Create the bbappend
+        result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir))
+        self.assertNotIn('WARNING:', result.output)
+        # Check recipe is still clean
+        result = runCmd('git status . --porcelain', cwd=os.path.dirname(recipefile))
+        self.assertEqual(result.output.strip(), "", '%s recipe is not clean' % testrecipe)
+        # Check bbappend was created
+        splitpath = os.path.dirname(recipefile).split(os.sep)
+        appenddir = os.path.join(templayerdir, splitpath[-2], splitpath[-1])
+        bbappendfile = self._check_bbappend(testrecipe, recipefile, appenddir)
+        self.assertFalse(os.path.exists(os.path.join(appenddir, testrecipe)), 'Patch directory should not be created')
+
+        # Check bbappend contents
+        result = runCmd('git rev-parse HEAD', cwd=tempsrcdir)
+        expectedlines = ['SRCREV = "%s"\n' % result.output,
+                         '\n',
+                         'SRC_URI = "%s"\n' % git_uri,
+                         '\n']
+        with open(bbappendfile, 'r') as f:
+            self.assertEqual(expectedlines, f.readlines())
+
+        # Check we can run it again and bbappend isn't modified
+        result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir))
+        with open(bbappendfile, 'r') as f:
+            self.assertEqual(expectedlines, f.readlines())
+        # Drop new commit and check SRCREV changes
+        result = runCmd('git reset HEAD^', cwd=tempsrcdir)
+        result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir))
+        self.assertFalse(os.path.exists(os.path.join(appenddir, testrecipe)), 'Patch directory should not be created')
+        result = runCmd('git rev-parse HEAD', cwd=tempsrcdir)
+        expectedlines = ['SRCREV = "%s"\n' % result.output,
+                         '\n',
+                         'SRC_URI = "%s"\n' % git_uri,
+                         '\n']
+        with open(bbappendfile, 'r') as f:
+            self.assertEqual(expectedlines, f.readlines())
+        # Put commit back and check we can run it if layer isn't in bblayers.conf
+        os.remove(bbappendfile)
+        result = runCmd('git commit -a -m "Change the Makefile"', cwd=tempsrcdir)
+        result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir)
+        result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir))
+        self.assertIn('WARNING: Specified layer is not currently enabled in bblayers.conf', result.output)
+        self.assertFalse(os.path.exists(os.path.join(appenddir, testrecipe)), 'Patch directory should not be created')
+        result = runCmd('git rev-parse HEAD', cwd=tempsrcdir)
+        expectedlines = ['SRCREV = "%s"\n' % result.output,
+                         '\n',
+                         'SRC_URI = "%s"\n' % git_uri,
+                         '\n']
+        with open(bbappendfile, 'r') as f:
+            self.assertEqual(expectedlines, f.readlines())
+        # Deleting isn't expected to work under these circumstances
+
+    @testcase(1163)
+    def test_devtool_extract(self):
+        # Check preconditions
+        workspacedir = os.path.join(self.builddir, 'workspace')
+        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+        tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+        # Try devtool extract
+        self.track_for_cleanup(tempdir)
+        self.track_for_cleanup(workspacedir)
+        self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+        result = runCmd('devtool extract remake %s' % tempdir)
+        self.assertTrue(os.path.exists(os.path.join(tempdir, 'Makefile.am')), 'Extracted source could not be found')
+        self.assertTrue(os.path.isdir(os.path.join(tempdir, '.git')), 'git repository for external source tree not found')
+
+    @testcase(1168)
+    def test_devtool_reset_all(self):
+        # Check preconditions
+        workspacedir = os.path.join(self.builddir, 'workspace')
+        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+        tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+        self.track_for_cleanup(tempdir)
+        self.track_for_cleanup(workspacedir)
+        self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+        testrecipe1 = 'mdadm'
+        testrecipe2 = 'cronie'
+        result = runCmd('devtool modify -x %s %s' % (testrecipe1, os.path.join(tempdir, testrecipe1)))
+        result = runCmd('devtool modify -x %s %s' % (testrecipe2, os.path.join(tempdir, testrecipe2)))
+        result = runCmd('devtool build %s' % testrecipe1)
+        result = runCmd('devtool build %s' % testrecipe2)
+        stampprefix1 = get_bb_var('STAMP', testrecipe1)
+        self.assertTrue(stampprefix1, 'Unable to get STAMP value for recipe %s' % testrecipe1)
+        stampprefix2 = get_bb_var('STAMP', testrecipe2)
+        self.assertTrue(stampprefix2, 'Unable to get STAMP value for recipe %s' % testrecipe2)
+        result = runCmd('devtool reset -a')
+        self.assertIn(testrecipe1, result.output)
+        self.assertIn(testrecipe2, result.output)
+        result = runCmd('devtool status')
+        self.assertNotIn(testrecipe1, result.output)
+        self.assertNotIn(testrecipe2, result.output)
+        matches1 = glob.glob(stampprefix1 + '*')
+        self.assertFalse(matches1, 'Stamp files exist for recipe %s that should have been cleaned' % testrecipe1)
+        matches2 = glob.glob(stampprefix2 + '*')
+        self.assertFalse(matches2, 'Stamp files exist for recipe %s that should have been cleaned' % testrecipe2)
+
+    def test_devtool_deploy_target(self):
+        # NOTE: Whilst this test would seemingly be better placed as a runtime test,
+        # unfortunately the runtime tests run under bitbake and you can't run
+        # devtool within bitbake (since devtool needs to run bitbake itself).
+        # Additionally we are testing build-time functionality as well, so
+        # really this has to be done as an oe-selftest test.
+        #
+        # Check preconditions
+        machine = get_bb_var('MACHINE')
+        if not machine.startswith('qemu'):
+            self.skipTest('This test only works with qemu machines')
+        if not os.path.exists('/etc/runqemu-nosudo'):
+            self.skipTest('You must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
+        result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ip tuntap show', ignore_status=True)
+        if result.status != 0:
+            result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ifconfig -a', ignore_status=True)
+            if result.status != 0:
+                self.skipTest('Failed to determine if tap devices exist with ifconfig or ip: %s' % result.output)
+        for line in result.output.splitlines():
+            if line.startswith('tap'):
+                break
+        else:
+            self.skipTest('No tap devices found - you must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
+        workspacedir = os.path.join(self.builddir, 'workspace')
+        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+        # Definitions
+        testrecipe = 'mdadm'
+        testfile = '/sbin/mdadm'
+        testimage = 'oe-selftest-image'
+        testcommand = '/sbin/mdadm --help'
+        # Build an image to run
+        bitbake("%s qemu-native qemu-helper-native" % testimage)
+        deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
+        self.add_command_to_tearDown('bitbake -c clean %s' % testimage)
+        self.add_command_to_tearDown('rm -f %s/%s*' % (deploy_dir_image, testimage))
+        # Clean recipe so the first deploy will fail
+        bitbake("%s -c clean" % testrecipe)
+        # Try devtool modify
+        tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+        self.track_for_cleanup(tempdir)
+        self.track_for_cleanup(workspacedir)
+        self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+        self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
+        result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
+        # Test that deploy-target at this point fails (properly)
+        result = runCmd('devtool deploy-target -n %s root@localhost' % testrecipe, ignore_status=True)
+        self.assertNotEqual(result.output, 0, 'devtool deploy-target should have failed, output: %s' % result.output)
+        self.assertNotIn(result.output, 'Traceback', 'devtool deploy-target should have failed with a proper error not a traceback, output: %s' % result.output)
+        result = runCmd('devtool build %s' % testrecipe)
+        # First try a dry-run of deploy-target
+        result = runCmd('devtool deploy-target -n %s root@localhost' % testrecipe)
+        self.assertIn('  %s' % testfile, result.output)
+        # Boot the image
+        with runqemu(testimage, self) as qemu:
+            # Now really test deploy-target
+            result = runCmd('devtool deploy-target -c %s root@%s' % (testrecipe, qemu.ip))
+            # Run a test command to see if it was installed properly
+            sshargs = '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
+            result = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, testcommand))
+            # Check if it deployed all of the files with the right ownership/perms
+            # First look on the host - need to do this under pseudo to get the correct ownership/perms
+            installdir = get_bb_var('D', testrecipe)
+            fakerootenv = get_bb_var('FAKEROOTENV', testrecipe)
+            fakerootcmd = get_bb_var('FAKEROOTCMD', testrecipe)
+            result = runCmd('%s %s find . -type f -exec ls -l {} \;' % (fakerootenv, fakerootcmd), cwd=installdir)
+            filelist1 = self._process_ls_output(result.output)
+
+            # Now look on the target
+            tempdir2 = tempfile.mkdtemp(prefix='devtoolqa')
+            self.track_for_cleanup(tempdir2)
+            tmpfilelist = os.path.join(tempdir2, 'files.txt')
+            with open(tmpfilelist, 'w') as f:
+                for line in filelist1:
+                    splitline = line.split()
+                    f.write(splitline[-1] + '\n')
+            result = runCmd('cat %s | ssh -q %s root@%s \'xargs ls -l\'' % (tmpfilelist, sshargs, qemu.ip))
+            filelist2 = self._process_ls_output(result.output)
+            filelist1.sort(key=lambda item: item.split()[-1])
+            filelist2.sort(key=lambda item: item.split()[-1])
+            self.assertEqual(filelist1, filelist2)
+            # Test undeploy-target
+            result = runCmd('devtool undeploy-target -c %s root@%s' % (testrecipe, qemu.ip))
+            result = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, testcommand), ignore_status=True)
+            self.assertNotEqual(result, 0, 'undeploy-target did not remove command as it should have')
+
+    def test_devtool_build_image(self):
+        """Test devtool build-image plugin"""
+        # Check preconditions
+        workspacedir = os.path.join(self.builddir, 'workspace')
+        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+        image = 'core-image-minimal'
+        self.track_for_cleanup(workspacedir)
+        self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+        self.add_command_to_tearDown('bitbake -c clean %s' % image)
+        bitbake('%s -c clean' % image)
+        # Add target and native recipes to workspace
+        for recipe in ('mdadm', 'parted-native'):
+            tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+            self.track_for_cleanup(tempdir)
+            self.add_command_to_tearDown('bitbake -c clean %s' % recipe)
+            runCmd('devtool modify %s -x %s' % (recipe, tempdir))
+        # Try to build image
+        result = runCmd('devtool build-image %s' % image)
+        self.assertNotEqual(result, 0, 'devtool build-image failed')
+        # Check if image.bbappend has required content
+        bbappend = os.path.join(workspacedir, 'appends', image+'.bbappend')
+        self.assertTrue(os.path.isfile(bbappend), 'bbappend not created %s' % result.output)
+        # NOTE: native recipe parted-native should not be in IMAGE_INSTALL_append
+        self.assertTrue('IMAGE_INSTALL_append = " mdadm"\n' in open(bbappend).readlines(),
+                        'IMAGE_INSTALL_append = " mdadm" not found in %s' % bbappend)
+
+    def test_devtool_upgrade(self):
+        # Check preconditions
+        workspacedir = os.path.join(self.builddir, 'workspace')
+        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+        # Check parameters
+        result = runCmd('devtool upgrade -h')
+        for param in 'recipename srctree --version -V --branch -b --keep-temp --no-patch'.split():
+            self.assertIn(param, result.output)
+        # For the moment, we are using a real recipe.
+        recipe='devtool-upgrade'
+        version='0.2'
+        tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+        # Check that recipe is not already under devtool control
+        result = runCmd('devtool status')
+        self.assertNotIn(recipe, result.output)
+        # Check upgrade. Code does not check if new PV is older or newer that current PV, so, it may be that
+        # we are downgrading instead of upgrading.
+        result = runCmd('devtool upgrade %s %s -V %s' % (recipe, tempdir, version))
+        # Check if srctree at least is populated
+        self.assertTrue(len(os.listdir(tempdir)) > 0, 'scrtree (%s) should be populated with new (%s) source code' % (tempdir, version))
+        # Check new recipe folder is present
+        self.assertTrue(os.path.exists(os.path.join(workspacedir,'recipes',recipe)), 'Recipe folder should exist')
+        # Check new recipe file is present
+        self.assertTrue(os.path.exists(os.path.join(workspacedir,'recipes',recipe,"%s_%s.bb" % (recipe,version))), 'Recipe folder should exist')
+        # Check devtool status and make sure recipe is present
+        result = runCmd('devtool status')
+        self.assertIn(recipe, result.output)
+        self.assertIn(tempdir, result.output)
+        # Check devtool reset recipe
+        result = runCmd('devtool reset %s -n' % recipe)
+        result = runCmd('devtool status')
+        self.assertNotIn(recipe, result.output)
+        self.track_for_cleanup(tempdir)
+        self.track_for_cleanup(workspacedir)
+        self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
diff --git a/meta/lib/oeqa/selftest/imagefeatures.py b/meta/lib/oeqa/selftest/imagefeatures.py
new file mode 100644
index 0000000..fcffc42
--- /dev/null
+++ b/meta/lib/oeqa/selftest/imagefeatures.py
@@ -0,0 +1,168 @@
+from oeqa.selftest.base import oeSelfTest
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, runqemu
+from oeqa.utils.decorators import testcase
+from oeqa.utils.sshcontrol import SSHControl
+import os
+import sys
+import logging
+
+class ImageFeatures(oeSelfTest):
+
+    test_user = 'tester'
+    root_user = 'root'
+
+    @testcase(1107)
+    def test_non_root_user_can_connect_via_ssh_without_password(self):
+        """
+        Summary: Check if non root user can connect via ssh without password
+        Expected: 1. Connection to the image via ssh using root user without providing a password should be allowed.
+                  2. Connection to the image via ssh using tester user without providing a password should be allowed.
+        Product: oe-core
+        Author: Ionut Chisanovici <ionutx.chisanovici@intel.com>
+        AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
+        """
+
+        features = 'EXTRA_IMAGE_FEATURES = "ssh-server-openssh empty-root-password allow-empty-password"\n'
+        features += 'INHERIT += "extrausers"\n'
+        features += 'EXTRA_USERS_PARAMS = "useradd -p \'\' {}; usermod -s /bin/sh {};"'.format(self.test_user, self.test_user)
+
+        # Append 'features' to local.conf
+        self.append_config(features)
+
+        # Build a core-image-minimal
+        bitbake('core-image-minimal')
+
+        with runqemu("core-image-minimal", self) as qemu:
+            # Attempt to ssh with each user into qemu with empty password
+            for user in [self.root_user, self.test_user]:
+                ssh = SSHControl(ip=qemu.ip, logfile=qemu.sshlog, user=user)
+                status, output = ssh.run("true")
+                self.assertEqual(status, 0, 'ssh to user %s failed with %s' % (user, output))
+
+    @testcase(1115)
+    def test_all_users_can_connect_via_ssh_without_password(self):
+        """
+        Summary:     Check if all users can connect via ssh without password
+        Expected: 1. Connection to the image via ssh using root user without providing a password should NOT be allowed.
+                  2. Connection to the image via ssh using tester user without providing a password should be allowed.
+        Product:     oe-core
+        Author:      Ionut Chisanovici <ionutx.chisanovici@intel.com>
+        AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
+        """
+
+        features = 'EXTRA_IMAGE_FEATURES = "ssh-server-openssh allow-empty-password"\n'
+        features += 'INHERIT += "extrausers"\n'
+        features += 'EXTRA_USERS_PARAMS = "useradd -p \'\' {}; usermod -s /bin/sh {};"'.format(self.test_user, self.test_user)
+
+        # Append 'features' to local.conf
+        self.append_config(features)
+
+        # Build a core-image-minimal
+        bitbake('core-image-minimal')
+
+        with runqemu("core-image-minimal", self) as qemu:
+            # Attempt to ssh with each user into qemu with empty password
+            for user in [self.root_user, self.test_user]:
+                ssh = SSHControl(ip=qemu.ip, logfile=qemu.sshlog, user=user)
+                status, output = ssh.run("true")
+                if user == 'root':
+                    self.assertNotEqual(status, 0, 'ssh to user root was allowed when it should not have been')
+                else:
+                    self.assertEqual(status, 0, 'ssh to user tester failed with %s' % output)
+
+
+    @testcase(1114)
+    def test_rpm_version_4_support_on_image(self):
+        """
+        Summary:     Check rpm version 4 support on image
+        Expected:    Rpm version must be 4.x
+        Product:     oe-core
+        Author:      Ionut Chisanovici <ionutx.chisanovici@intel.com>
+        AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
+        """
+
+        features = 'PREFERRED_VERSION_rpm = "4.%"\n'
+        features += 'PREFERRED_VERSION_rpm-native = "4.%"\n'
+        # Use openssh in IMAGE_INSTALL instead of ssh-server-openssh in EXTRA_IMAGE_FEATURES as a workaround for bug 8047
+        features += 'IMAGE_INSTALL_append = " openssh"\n'
+        features += 'EXTRA_IMAGE_FEATURES = "empty-root-password allow-empty-password package-management"\n'
+        features += 'RPMROOTFSDEPENDS_remove = "rpmresolve-native:do_populate_sysroot"'
+
+        # Append 'features' to local.conf
+        self.append_config(features)
+
+        # Build a core-image-minimal
+        bitbake('core-image-minimal')
+
+        # Check the native version of rpm is correct
+        native_bindir = get_bb_var('STAGING_BINDIR_NATIVE')
+        result = runCmd(os.path.join(native_bindir, 'rpm') + ' --version')
+        self.assertIn('version 4.', result.output)
+
+        # Check manifest for the rpm package
+        deploydir = get_bb_var('DEPLOY_DIR_IMAGE')
+        imgname = get_bb_var('IMAGE_LINK_NAME', 'core-image-minimal')
+        with open(os.path.join(deploydir, imgname) + '.manifest', 'r') as f:
+            for line in f:
+                splitline = line.split()
+                if len(splitline) > 2:
+                    rpm_version = splitline[2]
+                    if splitline[0] == 'rpm':
+                        if not rpm_version.startswith('4.'):
+                            self.fail('rpm version %s found in image, expected 4.x' % rpm_version)
+                        break
+            else:
+                self.fail('No rpm package found in image')
+
+        # Now do a couple of runtime tests
+        with runqemu("core-image-minimal", self) as qemu:
+            command = "rpm --version"
+            status, output = qemu.run(command)
+            self.assertEqual(0, status, 'Failed to run command "%s": %s' % (command, output))
+            found_rpm_version = output.strip()
+
+            # Make sure the retrieved rpm version is the expected one
+            if rpm_version not in found_rpm_version:
+                self.fail('RPM version is not {}, found instead {}.'.format(rpm_version, found_rpm_version))
+
+            # Test that the rpm database is there and working
+            command = "rpm -qa"
+            status, output = qemu.run(command)
+            self.assertEqual(0, status, 'Failed to run command "%s": %s' % (command, output))
+            self.assertIn('packagegroup-core-boot', output)
+            self.assertIn('busybox', output)
+
+
+    @testcase(1116)
+    def test_clutter_image_can_be_built(self):
+        """
+        Summary:     Check if clutter image can be built
+        Expected:    1. core-image-clutter can be built
+        Product:     oe-core
+        Author:      Ionut Chisanovici <ionutx.chisanovici@intel.com>
+        AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
+        """
+
+        # Build a core-image-clutter
+        bitbake('core-image-clutter')
+
+    @testcase(1117)
+    def test_wayland_support_in_image(self):
+        """
+        Summary:     Check Wayland support in image
+        Expected:    1. Wayland image can be build
+                     2. Wayland feature can be installed
+        Product:     oe-core
+        Author:      Ionut Chisanovici <ionutx.chisanovici@intel.com>
+        AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
+        """
+
+        features = 'DISTRO_FEATURES_append = " wayland"\n'
+        features += 'CORE_IMAGE_EXTRA_INSTALL += "wayland weston"'
+
+        # Append 'features' to local.conf
+        self.append_config(features)
+
+        # Build a core-image-weston
+        bitbake('core-image-weston')
+
diff --git a/meta/lib/oeqa/selftest/layerappend.py b/meta/lib/oeqa/selftest/layerappend.py
new file mode 100644
index 0000000..a82a6c8
--- /dev/null
+++ b/meta/lib/oeqa/selftest/layerappend.py
@@ -0,0 +1,96 @@
+import unittest
+import os
+import logging
+import re
+
+from oeqa.selftest.base import oeSelfTest
+from oeqa.selftest.buildhistory import BuildhistoryBase
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var
+import oeqa.utils.ftools as ftools
+from oeqa.utils.decorators import testcase
+
+class LayerAppendTests(oeSelfTest):
+    layerconf = """
+# We have a conf and classes directory, append to BBPATH
+BBPATH .= ":${LAYERDIR}"
+
+# We have a recipes directory, add to BBFILES
+BBFILES += "${LAYERDIR}/recipes*/*.bb ${LAYERDIR}/recipes*/*.bbappend"
+
+BBFILE_COLLECTIONS += "meta-layerINT"
+BBFILE_PATTERN_meta-layerINT := "^${LAYERDIR}/"
+BBFILE_PRIORITY_meta-layerINT = "6"
+"""
+    recipe = """
+LICENSE="CLOSED"
+INHIBIT_DEFAULT_DEPS = "1"
+
+python do_build() {
+    bb.plain('Building ...')
+}
+addtask build
+"""
+    append = """
+FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"
+
+SRC_URI_append = " file://appendtest.txt"
+
+sysroot_stage_all_append() {
+	install -m 644 ${WORKDIR}/appendtest.txt ${SYSROOT_DESTDIR}/
+}
+
+"""
+
+    append2 = """
+FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"
+
+SRC_URI_append += "file://appendtest.txt"
+"""
+    layerappend = "BBLAYERS += \"COREBASE/meta-layertest0 COREBASE/meta-layertest1 COREBASE/meta-layertest2\""
+
+    def tearDownLocal(self):
+        ftools.remove_from_file(self.builddir + "/conf/bblayers.conf", self.layerappend.replace("COREBASE", self.builddir + "/.."))
+
+    @testcase(1196)
+    def test_layer_appends(self):
+        corebase = get_bb_var("COREBASE")
+        stagingdir = get_bb_var("STAGING_DIR_TARGET")
+        for l in ["0", "1", "2"]:
+            layer = os.path.join(corebase, "meta-layertest" + l)
+            self.assertFalse(os.path.exists(layer))
+            os.mkdir(layer)
+            os.mkdir(layer + "/conf")
+            with open(layer + "/conf/layer.conf", "w") as f:
+                f.write(self.layerconf.replace("INT", l))
+            os.mkdir(layer + "/recipes-test")
+            if l == "0":
+                with open(layer + "/recipes-test/layerappendtest.bb", "w") as f:
+                    f.write(self.recipe)
+            elif l == "1":
+                with open(layer + "/recipes-test/layerappendtest.bbappend", "w") as f:
+                    f.write(self.append)
+                os.mkdir(layer + "/recipes-test/layerappendtest")
+                with open(layer + "/recipes-test/layerappendtest/appendtest.txt", "w") as f:
+                    f.write("Layer 1 test")
+            elif l == "2":
+                with open(layer + "/recipes-test/layerappendtest.bbappend", "w") as f:
+                    f.write(self.append2)
+                os.mkdir(layer + "/recipes-test/layerappendtest")
+                with open(layer + "/recipes-test/layerappendtest/appendtest.txt", "w") as f:
+                    f.write("Layer 2 test")
+            self.track_for_cleanup(layer)
+        ftools.append_file(self.builddir + "/conf/bblayers.conf", self.layerappend.replace("COREBASE", self.builddir + "/.."))
+        bitbake("layerappendtest")
+        data = ftools.read_file(stagingdir + "/appendtest.txt")
+        self.assertEqual(data, "Layer 2 test")
+        os.remove(corebase + "/meta-layertest2/recipes-test/layerappendtest/appendtest.txt")
+        bitbake("layerappendtest")
+        data = ftools.read_file(stagingdir + "/appendtest.txt")
+        self.assertEqual(data, "Layer 1 test")
+        with open(corebase + "/meta-layertest2/recipes-test/layerappendtest/appendtest.txt", "w") as f:
+            f.write("Layer 2 test")
+        bitbake("layerappendtest")
+        data = ftools.read_file(stagingdir + "/appendtest.txt")
+        self.assertEqual(data, "Layer 2 test")
+
+
diff --git a/meta/lib/oeqa/selftest/lic-checksum.py b/meta/lib/oeqa/selftest/lic-checksum.py
new file mode 100644
index 0000000..bd3b9a1
--- /dev/null
+++ b/meta/lib/oeqa/selftest/lic-checksum.py
@@ -0,0 +1,31 @@
+import os
+import tempfile
+
+from oeqa.selftest.base import oeSelfTest
+from oeqa.utils.commands import bitbake
+from oeqa.utils import CommandError
+from oeqa.utils.decorators import testcase
+
+class LicenseTests(oeSelfTest):
+
+    # Verify that changing a license file that has an absolute path causes
+    # the license qa to fail due to a mismatched md5sum.
+    @testcase(1197)
+    def test_nonmatching_checksum(self):
+        bitbake_cmd = '-c configure emptytest'
+        error_msg = 'ERROR: emptytest: The new md5 checksum is 8d777f385d3dfec8815d20f7496026dc'
+
+        lic_file, lic_path = tempfile.mkstemp()
+        os.close(lic_file)
+        self.track_for_cleanup(lic_path)
+
+        self.write_recipeinc('emptytest', 'INHIBIT_DEFAULT_DEPS = "1"')
+        self.append_recipeinc('emptytest', 'LIC_FILES_CHKSUM = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"' % lic_path)
+        result = bitbake(bitbake_cmd)
+
+        with open(lic_path, "w") as f:
+            f.write("data")
+
+        result = bitbake(bitbake_cmd, ignore_status=True)
+        if error_msg not in result.output:
+            raise AssertionError(result.output)
diff --git a/meta/lib/oeqa/selftest/oescripts.py b/meta/lib/oeqa/selftest/oescripts.py
new file mode 100644
index 0000000..31cd508
--- /dev/null
+++ b/meta/lib/oeqa/selftest/oescripts.py
@@ -0,0 +1,54 @@
+import datetime
+import unittest
+import os
+import re
+import shutil
+
+import oeqa.utils.ftools as ftools
+from oeqa.selftest.base import oeSelfTest
+from oeqa.selftest.buildhistory import BuildhistoryBase
+from oeqa.utils.commands import Command, runCmd, bitbake, get_bb_var, get_test_layer
+from oeqa.utils.decorators import testcase
+
+class TestScripts(oeSelfTest):
+
+    @testcase(300)
+    def test_cleanup_workdir(self):
+        path = os.path.dirname(get_bb_var('WORKDIR', 'gzip'))
+        old_version_recipe = os.path.join(get_bb_var('COREBASE'), 'meta/recipes-extended/gzip/gzip_1.3.12.bb')
+        old_version = '1.3.12'
+        bitbake("-ccleansstate gzip")
+        bitbake("-ccleansstate -b %s" % old_version_recipe)
+        if os.path.exists(get_bb_var('WORKDIR', "-b %s" % old_version_recipe)):
+            shutil.rmtree(get_bb_var('WORKDIR', "-b %s" % old_version_recipe))
+        if os.path.exists(get_bb_var('WORKDIR', 'gzip')):
+            shutil.rmtree(get_bb_var('WORKDIR', 'gzip'))
+
+        if os.path.exists(path):
+            initial_contents = os.listdir(path)
+        else:
+            initial_contents = []
+
+        bitbake('gzip')
+        intermediary_contents = os.listdir(path)
+        bitbake("-b %s" % old_version_recipe)
+        runCmd('cleanup-workdir')
+        remaining_contents = os.listdir(path)
+
+        expected_contents = [x for x in intermediary_contents if x not in initial_contents]
+        remaining_not_expected = [x for x in remaining_contents if x not in expected_contents]
+        self.assertFalse(remaining_not_expected, msg="Not all necessary content has been deleted from %s: %s" % (path, ', '.join(map(str, remaining_not_expected))))
+        expected_not_remaining = [x for x in expected_contents if x not in remaining_contents]
+        self.assertFalse(expected_not_remaining, msg="The script removed extra contents from %s: %s" % (path, ', '.join(map(str, expected_not_remaining))))
+
+class BuildhistoryDiffTests(BuildhistoryBase):
+
+    @testcase(295)
+    def test_buildhistory_diff(self):
+        self.add_command_to_tearDown('cleanup-workdir')
+        target = 'xcursor-transparent-theme'
+        self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True)
+        self.run_buildhistory_operation(target, target_config="PR = \"r0\"", change_bh_location=False, expect_error=True)
+        result = runCmd("buildhistory-diff -p %s" % get_bb_var('BUILDHISTORY_DIR'))
+        expected_output = 'PR changed from "r1" to "r0"'
+        self.assertTrue(expected_output in result.output, msg="Did not find expected output: %s" % result.output)
diff --git a/meta/lib/oeqa/selftest/pkgdata.py b/meta/lib/oeqa/selftest/pkgdata.py
new file mode 100644
index 0000000..138b03a
--- /dev/null
+++ b/meta/lib/oeqa/selftest/pkgdata.py
@@ -0,0 +1,226 @@
+import unittest
+import os
+import tempfile
+import logging
+import fnmatch
+
+import oeqa.utils.ftools as ftools
+from oeqa.selftest.base import oeSelfTest
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var
+from oeqa.utils.decorators import testcase
+
+class OePkgdataUtilTests(oeSelfTest):
+
+    @classmethod
+    def setUpClass(cls):
+        # Ensure we have the right data in pkgdata
+        logger = logging.getLogger("selftest")
+        logger.info('Running bitbake to generate pkgdata')
+        bitbake('glibc busybox zlib bash')
+
+    @testcase(1203)
+    def test_lookup_pkg(self):
+        # Forward tests
+        result = runCmd('oe-pkgdata-util lookup-pkg "glibc busybox"')
+        self.assertEqual(result.output, 'libc6\nbusybox')
+        result = runCmd('oe-pkgdata-util lookup-pkg zlib-dev')
+        self.assertEqual(result.output, 'libz-dev')
+        result = runCmd('oe-pkgdata-util lookup-pkg nonexistentpkg', ignore_status=True)
+        self.assertEqual(result.status, 1, "Status different than 1. output: %s" % result.output)
+        self.assertEqual(result.output, 'ERROR: The following packages could not be found: nonexistentpkg')
+        # Reverse tests
+        result = runCmd('oe-pkgdata-util lookup-pkg -r "libc6 busybox"')
+        self.assertEqual(result.output, 'glibc\nbusybox')
+        result = runCmd('oe-pkgdata-util lookup-pkg -r libz-dev')
+        self.assertEqual(result.output, 'zlib-dev')
+        result = runCmd('oe-pkgdata-util lookup-pkg -r nonexistentpkg', ignore_status=True)
+        self.assertEqual(result.status, 1, "Status different than 1. output: %s" % result.output)
+        self.assertEqual(result.output, 'ERROR: The following packages could not be found: nonexistentpkg')
+
+    @testcase(1205)
+    def test_read_value(self):
+        result = runCmd('oe-pkgdata-util read-value PN libz1')
+        self.assertEqual(result.output, 'zlib')
+        result = runCmd('oe-pkgdata-util read-value PKGSIZE bash')
+        pkgsize = int(result.output.strip())
+        self.assertGreater(pkgsize, 1, "Size should be greater than 1. %s" % result.output)
+
+    @testcase(1198)
+    def test_find_path(self):
+        result = runCmd('oe-pkgdata-util find-path /lib/libc.so.6')
+        self.assertEqual(result.output, 'glibc: /lib/libc.so.6')
+        result = runCmd('oe-pkgdata-util find-path /bin/bash')
+        self.assertEqual(result.output, 'bash: /bin/bash')
+        result = runCmd('oe-pkgdata-util find-path /not/exist', ignore_status=True)
+        self.assertEqual(result.status, 1, "Status different than 1. output: %s" % result.output)
+        self.assertEqual(result.output, 'ERROR: Unable to find any package producing path /not/exist')
+
+    @testcase(1204)
+    def test_lookup_recipe(self):
+        result = runCmd('oe-pkgdata-util lookup-recipe "libc6-staticdev busybox"')
+        self.assertEqual(result.output, 'glibc\nbusybox')
+        result = runCmd('oe-pkgdata-util lookup-recipe libz-dbg')
+        self.assertEqual(result.output, 'zlib')
+        result = runCmd('oe-pkgdata-util lookup-recipe nonexistentpkg', ignore_status=True)
+        self.assertEqual(result.status, 1, "Status different than 1. output: %s" % result.output)
+        self.assertEqual(result.output, 'ERROR: The following packages could not be found: nonexistentpkg')
+
+    @testcase(1202)
+    def test_list_pkgs(self):
+        # No arguments
+        result = runCmd('oe-pkgdata-util list-pkgs')
+        pkglist = result.output.split()
+        self.assertIn('glibc-utils', pkglist, "Listed packages: %s" % result.output)
+        self.assertIn('zlib-dev', pkglist, "Listed packages: %s" % result.output)
+        # No pkgspec, runtime
+        result = runCmd('oe-pkgdata-util list-pkgs -r')
+        pkglist = result.output.split()
+        self.assertIn('libc6-utils', pkglist, "Listed packages: %s" % result.output)
+        self.assertIn('libz-dev', pkglist, "Listed packages: %s" % result.output)
+        # With recipe specified
+        result = runCmd('oe-pkgdata-util list-pkgs -p zlib')
+        pkglist = sorted(result.output.split())
+        try:
+            pkglist.remove('zlib-ptest') # in case ptest is disabled
+        except ValueError:
+            pass
+        self.assertEqual(pkglist, ['zlib', 'zlib-dbg', 'zlib-dev', 'zlib-doc', 'zlib-staticdev'], "Packages listed after remove: %s" % result.output)
+        # With recipe specified, runtime
+        result = runCmd('oe-pkgdata-util list-pkgs -p zlib -r')
+        pkglist = sorted(result.output.split())
+        try:
+            pkglist.remove('libz-ptest') # in case ptest is disabled
+        except ValueError:
+            pass
+        self.assertEqual(pkglist, ['libz-dbg', 'libz-dev', 'libz-doc', 'libz-staticdev', 'libz1'], "Packages listed after remove: %s" % result.output)
+        # With recipe specified and unpackaged
+        result = runCmd('oe-pkgdata-util list-pkgs -p zlib -u')
+        pkglist = sorted(result.output.split())
+        self.assertIn('zlib-locale', pkglist, "Listed packages: %s" % result.output)
+        # With recipe specified and unpackaged, runtime
+        result = runCmd('oe-pkgdata-util list-pkgs -p zlib -u -r')
+        pkglist = sorted(result.output.split())
+        self.assertIn('libz-locale', pkglist, "Listed packages: %s" % result.output)
+        # With recipe specified and pkgspec
+        result = runCmd('oe-pkgdata-util list-pkgs -p zlib "*-d*"')
+        pkglist = sorted(result.output.split())
+        self.assertEqual(pkglist, ['zlib-dbg', 'zlib-dev', 'zlib-doc'], "Packages listed: %s" % result.output)
+        # With recipe specified and pkgspec, runtime
+        result = runCmd('oe-pkgdata-util list-pkgs -p zlib -r "*-d*"')
+        pkglist = sorted(result.output.split())
+        self.assertEqual(pkglist, ['libz-dbg', 'libz-dev', 'libz-doc'], "Packages listed: %s" % result.output)
+
+    @testcase(1201)
+    def test_list_pkg_files(self):
+        def splitoutput(output):
+            files = {}
+            curpkg = None
+            for line in output.splitlines():
+                if line.startswith('\t'):
+                    self.assertTrue(curpkg, 'Unexpected non-package line:\n%s' % line)
+                    files[curpkg].append(line.strip())
+                else:
+                    self.assertTrue(line.rstrip().endswith(':'), 'Invalid package line in output:\n%s' % line)
+                    curpkg = line.split(':')[0]
+                    files[curpkg] = []
+            return files
+        base_libdir = get_bb_var('base_libdir')
+        libdir = get_bb_var('libdir')
+        includedir = get_bb_var('includedir')
+        mandir = get_bb_var('mandir')
+        # Test recipe-space package name
+        result = runCmd('oe-pkgdata-util list-pkg-files zlib-dev zlib-doc')
+        files = splitoutput(result.output)
+        self.assertIn('zlib-dev', files.keys(), "listed pkgs. files: %s" %result.output)
+        self.assertIn('zlib-doc', files.keys(), "listed pkgs. files: %s" %result.output)
+        self.assertIn(os.path.join(includedir, 'zlib.h'), files['zlib-dev'])
+        self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['zlib-doc'])
+        # Test runtime package name
+        result = runCmd('oe-pkgdata-util list-pkg-files -r libz1 libz-dev')
+        files = splitoutput(result.output)
+        self.assertIn('libz1', files.keys(), "listed pkgs. files: %s" %result.output)
+        self.assertIn('libz-dev', files.keys(), "listed pkgs. files: %s" %result.output)
+        self.assertGreater(len(files['libz1']), 1)
+        libspec = os.path.join(base_libdir, 'libz.so.1.*')
+        found = False
+        for fileitem in files['libz1']:
+            if fnmatch.fnmatchcase(fileitem, libspec):
+                found = True
+                break
+        self.assertTrue(found, 'Could not find zlib library file %s in libz1 package file list: %s' % (libspec, files['libz1']))
+        self.assertIn(os.path.join(includedir, 'zlib.h'), files['libz-dev'])
+        # Test recipe
+        result = runCmd('oe-pkgdata-util list-pkg-files -p zlib')
+        files = splitoutput(result.output)
+        self.assertIn('zlib-dbg', files.keys(), "listed pkgs. files: %s" %result.output)
+        self.assertIn('zlib-doc', files.keys(), "listed pkgs. files: %s" %result.output)
+        self.assertIn('zlib-dev', files.keys(), "listed pkgs. files: %s" %result.output)
+        self.assertIn('zlib-staticdev', files.keys(), "listed pkgs. files: %s" %result.output)
+        self.assertIn('zlib', files.keys(), "listed pkgs. files: %s" %result.output)
+        self.assertNotIn('zlib-locale', files.keys(), "listed pkgs. files: %s" %result.output)
+        # (ignore ptest, might not be there depending on config)
+        self.assertIn(os.path.join(includedir, 'zlib.h'), files['zlib-dev'])
+        self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['zlib-doc'])
+        self.assertIn(os.path.join(libdir, 'libz.a'), files['zlib-staticdev'])
+        # Test recipe, runtime
+        result = runCmd('oe-pkgdata-util list-pkg-files -p zlib -r')
+        files = splitoutput(result.output)
+        self.assertIn('libz-dbg', files.keys(), "listed pkgs. files: %s" %result.output)
+        self.assertIn('libz-doc', files.keys(), "listed pkgs. files: %s" %result.output)
+        self.assertIn('libz-dev', files.keys(), "listed pkgs. files: %s" %result.output)
+        self.assertIn('libz-staticdev', files.keys(), "listed pkgs. files: %s" %result.output)
+        self.assertIn('libz1', files.keys(), "listed pkgs. files: %s" %result.output)
+        self.assertNotIn('libz-locale', files.keys(), "listed pkgs. files: %s" %result.output)
+        self.assertIn(os.path.join(includedir, 'zlib.h'), files['libz-dev'])
+        self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['libz-doc'])
+        self.assertIn(os.path.join(libdir, 'libz.a'), files['libz-staticdev'])
+        # Test recipe, unpackaged
+        result = runCmd('oe-pkgdata-util list-pkg-files -p zlib -u')
+        files = splitoutput(result.output)
+        self.assertIn('zlib-dbg', files.keys(), "listed pkgs. files: %s" %result.output)
+        self.assertIn('zlib-doc', files.keys(), "listed pkgs. files: %s" %result.output)
+        self.assertIn('zlib-dev', files.keys(), "listed pkgs. files: %s" %result.output)
+        self.assertIn('zlib-staticdev', files.keys(), "listed pkgs. files: %s" %result.output)
+        self.assertIn('zlib', files.keys(), "listed pkgs. files: %s" %result.output)
+        self.assertIn('zlib-locale', files.keys(), "listed pkgs. files: %s" %result.output) # this is the key one
+        self.assertIn(os.path.join(includedir, 'zlib.h'), files['zlib-dev'])
+        self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['zlib-doc'])
+        self.assertIn(os.path.join(libdir, 'libz.a'), files['zlib-staticdev'])
+        # Test recipe, runtime, unpackaged
+        result = runCmd('oe-pkgdata-util list-pkg-files -p zlib -r -u')
+        files = splitoutput(result.output)
+        self.assertIn('libz-dbg', files.keys(), "listed pkgs. files: %s" %result.output)
+        self.assertIn('libz-doc', files.keys(), "listed pkgs. files: %s" %result.output)
+        self.assertIn('libz-dev', files.keys(), "listed pkgs. files: %s" %result.output)
+        self.assertIn('libz-staticdev', files.keys(), "listed pkgs. files: %s" %result.output)
+        self.assertIn('libz1', files.keys(), "listed pkgs. files: %s" %result.output)
+        self.assertIn('libz-locale', files.keys(), "listed pkgs. files: %s" %result.output) # this is the key one
+        self.assertIn(os.path.join(includedir, 'zlib.h'), files['libz-dev'])
+        self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['libz-doc'])
+        self.assertIn(os.path.join(libdir, 'libz.a'), files['libz-staticdev'])
+
+    @testcase(1200)
+    def test_glob(self):
+        tempdir = tempfile.mkdtemp(prefix='pkgdataqa')
+        self.track_for_cleanup(tempdir)
+        pkglistfile = os.path.join(tempdir, 'pkglist')
+        with open(pkglistfile, 'w') as f:
+            f.write('libc6\n')
+            f.write('libz1\n')
+            f.write('busybox\n')
+        result = runCmd('oe-pkgdata-util glob %s "*-dev"' % pkglistfile)
+        desiredresult = ['libc6-dev', 'libz-dev', 'busybox-dev']
+        self.assertEqual(sorted(result.output.split()), sorted(desiredresult))
+        # The following should not error (because when we use this during rootfs construction, sometimes the complementary package won't exist)
+        result = runCmd('oe-pkgdata-util glob %s "*-nonexistent"' % pkglistfile)
+        self.assertEqual(result.output, '')
+        # Test exclude option
+        result = runCmd('oe-pkgdata-util glob %s "*-dev *-dbg" -x "^libz"' % pkglistfile)
+        resultlist = result.output.split()
+        self.assertNotIn('libz-dev', resultlist)
+        self.assertNotIn('libz-dbg', resultlist)
+
+    @testcase(1206)
+    def test_specify_pkgdatadir(self):
+        result = runCmd('oe-pkgdata-util -p %s lookup-pkg glibc' % get_bb_var('PKGDATA_DIR'))
+        self.assertEqual(result.output, 'libc6')
diff --git a/meta/lib/oeqa/selftest/prservice.py b/meta/lib/oeqa/selftest/prservice.py
new file mode 100644
index 0000000..4187fbf
--- /dev/null
+++ b/meta/lib/oeqa/selftest/prservice.py
@@ -0,0 +1,121 @@
+import unittest
+import os
+import logging
+import re
+import shutil
+import datetime
+
+import oeqa.utils.ftools as ftools
+from oeqa.selftest.base import oeSelfTest
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var
+from oeqa.utils.decorators import testcase
+
+class BitbakePrTests(oeSelfTest):
+
+    def get_pr_version(self, package_name):
+        pkgdata_dir = get_bb_var('PKGDATA_DIR')
+        package_data_file = os.path.join(pkgdata_dir, 'runtime', package_name)
+        package_data = ftools.read_file(package_data_file)
+        find_pr = re.search("PKGR: r[0-9]+\.([0-9]+)", package_data)
+        self.assertTrue(find_pr, "No PKG revision found in %s" % package_data_file)
+        return int(find_pr.group(1))
+
+    def get_task_stamp(self, package_name, recipe_task):
+        stampdata = get_bb_var('STAMP', target=package_name).split('/')
+        prefix = stampdata[-1]
+        package_stamps_path = "/".join(stampdata[:-1])
+        stamps = []
+        for stamp in os.listdir(package_stamps_path):
+            find_stamp = re.match("%s\.%s\.([a-z0-9]{32})" % (prefix, recipe_task), stamp)
+            if find_stamp:
+                stamps.append(find_stamp.group(1))
+        self.assertFalse(len(stamps) == 0, msg="Cound not find stamp for task %s for recipe %s" % (recipe_task, package_name))
+        self.assertFalse(len(stamps) > 1, msg="Found multiple %s stamps for the %s recipe in the %s directory." % (recipe_task, package_name, package_stamps_path))
+        return str(stamps[0])
+
+    def increment_package_pr(self, package_name):
+        inc_data = "do_package_append() {\nbb.build.exec_func('do_test_prserv', d)\n}\ndo_test_prserv() {\necho \"The current date is: %s\"\n}" % datetime.datetime.now()
+        self.write_recipeinc(package_name, inc_data)
+        bitbake("-ccleansstate %s" % package_name)
+        res = bitbake(package_name, ignore_status=True)
+        self.delete_recipeinc(package_name)
+        self.assertEqual(res.status, 0, msg=res.output)
+        self.assertTrue("NOTE: Started PRServer with DBfile" in res.output, msg=res.output)
+
+    def config_pr_tests(self, package_name, package_type='rpm', pr_socket='localhost:0'):
+        config_package_data = 'PACKAGE_CLASSES = "package_%s"' % package_type
+        self.write_config(config_package_data)
+        config_server_data = 'PRSERV_HOST = "%s"' % pr_socket
+        self.append_config(config_server_data)
+
+    def run_test_pr_service(self, package_name, package_type='rpm', track_task='do_package', pr_socket='localhost:0'):
+        self.config_pr_tests(package_name, package_type, pr_socket)
+
+        self.increment_package_pr(package_name)
+        pr_1 = self.get_pr_version(package_name)
+        stamp_1 = self.get_task_stamp(package_name, track_task)
+
+        self.increment_package_pr(package_name)
+        pr_2 = self.get_pr_version(package_name)
+        stamp_2 = self.get_task_stamp(package_name, track_task)
+
+        bitbake("-ccleansstate %s" % package_name)
+        self.assertTrue(pr_2 - pr_1 == 1, "Step between same pkg. revision is greater than 1")
+        self.assertTrue(stamp_1 != stamp_2, "Different pkg rev. but same stamp: %s" % stamp_1)
+
+    def run_test_pr_export_import(self, package_name, replace_current_db=True):
+        self.config_pr_tests(package_name)
+
+        self.increment_package_pr(package_name)
+        pr_1 = self.get_pr_version(package_name)
+
+        exported_db_path = os.path.join(self.builddir, 'export.inc')
+        export_result = runCmd("bitbake-prserv-tool export %s" % exported_db_path, ignore_status=True)
+        self.assertEqual(export_result.status, 0, msg="PR Service database export failed: %s" % export_result.output)
+
+        if replace_current_db:
+            current_db_path = os.path.join(get_bb_var('PERSISTENT_DIR'), 'prserv.sqlite3')
+            self.assertTrue(os.path.exists(current_db_path), msg="Path to current PR Service database is invalid: %s" % current_db_path)
+            os.remove(current_db_path)
+
+        import_result = runCmd("bitbake-prserv-tool import %s" % exported_db_path, ignore_status=True)
+        os.remove(exported_db_path)
+        self.assertEqual(import_result.status, 0, msg="PR Service database import failed: %s" % import_result.output)
+
+        self.increment_package_pr(package_name)
+        pr_2 = self.get_pr_version(package_name)
+
+        bitbake("-ccleansstate %s" % package_name)
+        self.assertTrue(pr_2 - pr_1 == 1, "Step between same pkg. revision is greater than 1")
+
+    @testcase(930)
+    def test_import_export_replace_db(self):
+        self.run_test_pr_export_import('m4')
+
+    @testcase(931)
+    def test_import_export_override_db(self):
+        self.run_test_pr_export_import('m4', replace_current_db=False)
+
+    @testcase(932)
+    def test_pr_service_rpm_arch_dep(self):
+        self.run_test_pr_service('m4', 'rpm', 'do_package')
+
+    @testcase(934)
+    def test_pr_service_deb_arch_dep(self):
+        self.run_test_pr_service('m4', 'deb', 'do_package')
+
+    @testcase(933)
+    def test_pr_service_ipk_arch_dep(self):
+        self.run_test_pr_service('m4', 'ipk', 'do_package')
+
+    @testcase(935)
+    def test_pr_service_rpm_arch_indep(self):
+        self.run_test_pr_service('xcursor-transparent-theme', 'rpm', 'do_package')
+
+    @testcase(937)
+    def test_pr_service_deb_arch_indep(self):
+        self.run_test_pr_service('xcursor-transparent-theme', 'deb', 'do_package')
+
+    @testcase(936)
+    def test_pr_service_ipk_arch_indep(self):
+        self.run_test_pr_service('xcursor-transparent-theme', 'ipk', 'do_package')
diff --git a/meta/lib/oeqa/selftest/recipetool.py b/meta/lib/oeqa/selftest/recipetool.py
new file mode 100644
index 0000000..c34ad68
--- /dev/null
+++ b/meta/lib/oeqa/selftest/recipetool.py
@@ -0,0 +1,554 @@
+import os
+import logging
+import tempfile
+import urlparse
+
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer
+from oeqa.utils.decorators import testcase
+from oeqa.selftest import devtool
+
+
+templayerdir = None
+
+
+def setUpModule():
+    global templayerdir
+    templayerdir = tempfile.mkdtemp(prefix='recipetoolqa')
+    create_temp_layer(templayerdir, 'selftestrecipetool')
+    runCmd('bitbake-layers add-layer %s' % templayerdir)
+
+
+def tearDownModule():
+    runCmd('bitbake-layers remove-layer %s' % templayerdir, ignore_status=True)
+    runCmd('rm -rf %s' % templayerdir)
+
+
+class RecipetoolBase(devtool.DevtoolBase):
+    def setUpLocal(self):
+        self.templayerdir = templayerdir
+        self.tempdir = tempfile.mkdtemp(prefix='recipetoolqa')
+        self.track_for_cleanup(self.tempdir)
+        self.testfile = os.path.join(self.tempdir, 'testfile')
+        with open(self.testfile, 'w') as f:
+            f.write('Test file\n')
+
+    def tearDownLocal(self):
+        runCmd('rm -rf %s/recipes-*' % self.templayerdir)
+
+    def _try_recipetool_appendcmd(self, cmd, testrecipe, expectedfiles, expectedlines=None):
+        result = runCmd(cmd)
+        self.assertNotIn('Traceback', result.output)
+
+        # Check the bbappend was created and applies properly
+        recipefile = get_bb_var('FILE', testrecipe)
+        bbappendfile = self._check_bbappend(testrecipe, recipefile, self.templayerdir)
+
+        # Check the bbappend contents
+        if expectedlines is not None:
+            with open(bbappendfile, 'r') as f:
+                self.assertEqual(expectedlines, f.readlines(), "Expected lines are not present in %s" % bbappendfile)
+
+        # Check file was copied
+        filesdir = os.path.join(os.path.dirname(bbappendfile), testrecipe)
+        for expectedfile in expectedfiles:
+            self.assertTrue(os.path.isfile(os.path.join(filesdir, expectedfile)), 'Expected file %s to be copied next to bbappend, but it wasn\'t' % expectedfile)
+
+        # Check no other files created
+        createdfiles = []
+        for root, _, files in os.walk(filesdir):
+            for f in files:
+                createdfiles.append(os.path.relpath(os.path.join(root, f), filesdir))
+        self.assertTrue(sorted(createdfiles), sorted(expectedfiles))
+
+        return bbappendfile, result.output
+
+
+class RecipetoolTests(RecipetoolBase):
+    @classmethod
+    def setUpClass(cls):
+        # Ensure we have the right data in shlibs/pkgdata
+        logger = logging.getLogger("selftest")
+        logger.info('Running bitbake to generate pkgdata')
+        bitbake('-c packagedata base-files coreutils busybox selftest-recipetool-appendfile')
+
+    @classmethod
+    def tearDownClass(cls):
+        # Shouldn't leave any traces of this artificial recipe behind
+        bitbake('-c cleansstate selftest-recipetool-appendfile')
+
+    def _try_recipetool_appendfile(self, testrecipe, destfile, newfile, options, expectedlines, expectedfiles):
+        cmd = 'recipetool appendfile %s %s %s %s' % (self.templayerdir, destfile, newfile, options)
+        return self._try_recipetool_appendcmd(cmd, testrecipe, expectedfiles, expectedlines)
+
+    def _try_recipetool_appendfile_fail(self, destfile, newfile, checkerror):
+        cmd = 'recipetool appendfile %s %s %s' % (self.templayerdir, destfile, newfile)
+        result = runCmd(cmd, ignore_status=True)
+        self.assertNotEqual(result.status, 0, 'Command "%s" should have failed but didn\'t' % cmd)
+        self.assertNotIn('Traceback', result.output)
+        for errorstr in checkerror:
+            self.assertIn(errorstr, result.output)
+
+    @testcase(1177)
+    def test_recipetool_appendfile_basic(self):
+        # Basic test
+        expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+                        '\n']
+        _, output = self._try_recipetool_appendfile('base-files', '/etc/motd', self.testfile, '', expectedlines, ['motd'])
+        self.assertNotIn('WARNING: ', output)
+
+    @testcase(1183)
+    def test_recipetool_appendfile_invalid(self):
+        # Test some commands that should error
+        self._try_recipetool_appendfile_fail('/etc/passwd', self.testfile, ['ERROR: /etc/passwd cannot be handled by this tool', 'useradd', 'extrausers'])
+        self._try_recipetool_appendfile_fail('/etc/timestamp', self.testfile, ['ERROR: /etc/timestamp cannot be handled by this tool'])
+        self._try_recipetool_appendfile_fail('/dev/console', self.testfile, ['ERROR: /dev/console cannot be handled by this tool'])
+
+    @testcase(1176)
+    def test_recipetool_appendfile_alternatives(self):
+        # Now try with a file we know should be an alternative
+        # (this is very much a fake example, but one we know is reliably an alternative)
+        self._try_recipetool_appendfile_fail('/bin/ls', self.testfile, ['ERROR: File /bin/ls is an alternative possibly provided by the following recipes:', 'coreutils', 'busybox'])
+        corebase = get_bb_var('COREBASE')
+        # Need a test file - should be executable
+        testfile2 = os.path.join(corebase, 'oe-init-build-env')
+        testfile2name = os.path.basename(testfile2)
+        expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+                         '\n',
+                         'SRC_URI += "file://%s"\n' % testfile2name,
+                         '\n',
+                         'do_install_append() {\n',
+                         '    install -d ${D}${base_bindir}\n',
+                         '    install -m 0755 ${WORKDIR}/%s ${D}${base_bindir}/ls\n' % testfile2name,
+                         '}\n']
+        self._try_recipetool_appendfile('coreutils', '/bin/ls', testfile2, '-r coreutils', expectedlines, [testfile2name])
+        # Now try bbappending the same file again, contents should not change
+        bbappendfile, _ = self._try_recipetool_appendfile('coreutils', '/bin/ls', self.testfile, '-r coreutils', expectedlines, [testfile2name])
+        # But file should have
+        copiedfile = os.path.join(os.path.dirname(bbappendfile), 'coreutils', testfile2name)
+        result = runCmd('diff -q %s %s' % (testfile2, copiedfile), ignore_status=True)
+        self.assertNotEqual(result.status, 0, 'New file should have been copied but was not %s' % result.output)
+
+    @testcase(1178)
+    def test_recipetool_appendfile_binary(self):
+        # Try appending a binary file
+        # /bin/ls can be a symlink to /usr/bin/ls
+        ls = os.path.realpath("/bin/ls")
+        result = runCmd('recipetool appendfile %s /bin/ls %s -r coreutils' % (self.templayerdir, ls))
+        self.assertIn('WARNING: ', result.output)
+        self.assertIn('is a binary', result.output)
+
+    @testcase(1173)
+    def test_recipetool_appendfile_add(self):
+        corebase = get_bb_var('COREBASE')
+        # Try arbitrary file add to a recipe
+        expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+                         '\n',
+                         'SRC_URI += "file://testfile"\n',
+                         '\n',
+                         'do_install_append() {\n',
+                         '    install -d ${D}${datadir}\n',
+                         '    install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n',
+                         '}\n']
+        self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase', expectedlines, ['testfile'])
+        # Try adding another file, this time where the source file is executable
+        # (so we're testing that, plus modifying an existing bbappend)
+        testfile2 = os.path.join(corebase, 'oe-init-build-env')
+        testfile2name = os.path.basename(testfile2)
+        expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+                         '\n',
+                         'SRC_URI += "file://testfile \\\n',
+                         '            file://%s \\\n' % testfile2name,
+                         '            "\n',
+                         '\n',
+                         'do_install_append() {\n',
+                         '    install -d ${D}${datadir}\n',
+                         '    install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n',
+                         '    install -m 0755 ${WORKDIR}/%s ${D}${datadir}/scriptname\n' % testfile2name,
+                         '}\n']
+        self._try_recipetool_appendfile('netbase', '/usr/share/scriptname', testfile2, '-r netbase', expectedlines, ['testfile', testfile2name])
+
+    @testcase(1174)
+    def test_recipetool_appendfile_add_bindir(self):
+        # Try arbitrary file add to a recipe, this time to a location such that should be installed as executable
+        expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+                         '\n',
+                         'SRC_URI += "file://testfile"\n',
+                         '\n',
+                         'do_install_append() {\n',
+                         '    install -d ${D}${bindir}\n',
+                         '    install -m 0755 ${WORKDIR}/testfile ${D}${bindir}/selftest-recipetool-testbin\n',
+                         '}\n']
+        _, output = self._try_recipetool_appendfile('netbase', '/usr/bin/selftest-recipetool-testbin', self.testfile, '-r netbase', expectedlines, ['testfile'])
+        self.assertNotIn('WARNING: ', output)
+
+    @testcase(1175)
+    def test_recipetool_appendfile_add_machine(self):
+        # Try arbitrary file add to a recipe, this time to a location such that should be installed as executable
+        expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+                         '\n',
+                         'PACKAGE_ARCH = "${MACHINE_ARCH}"\n',
+                         '\n',
+                         'SRC_URI_append_mymachine = " file://testfile"\n',
+                         '\n',
+                         'do_install_append_mymachine() {\n',
+                         '    install -d ${D}${datadir}\n',
+                         '    install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n',
+                         '}\n']
+        _, output = self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase -m mymachine', expectedlines, ['mymachine/testfile'])
+        self.assertNotIn('WARNING: ', output)
+
+    @testcase(1184)
+    def test_recipetool_appendfile_orig(self):
+        # A file that's in SRC_URI and in do_install with the same name
+        expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+                         '\n']
+        _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-orig', self.testfile, '', expectedlines, ['selftest-replaceme-orig'])
+        self.assertNotIn('WARNING: ', output)
+
+    @testcase(1191)
+    def test_recipetool_appendfile_todir(self):
+        # A file that's in SRC_URI and in do_install with destination directory rather than file
+        expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+                         '\n']
+        _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-todir', self.testfile, '', expectedlines, ['selftest-replaceme-todir'])
+        self.assertNotIn('WARNING: ', output)
+
+    @testcase(1187)
+    def test_recipetool_appendfile_renamed(self):
+        # A file that's in SRC_URI with a different name to the destination file
+        expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+                         '\n']
+        _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-renamed', self.testfile, '', expectedlines, ['file1'])
+        self.assertNotIn('WARNING: ', output)
+
+    @testcase(1190)
+    def test_recipetool_appendfile_subdir(self):
+        # A file that's in SRC_URI in a subdir
+        expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+                         '\n',
+                         'SRC_URI += "file://testfile"\n',
+                         '\n',
+                         'do_install_append() {\n',
+                         '    install -d ${D}${datadir}\n',
+                         '    install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-subdir\n',
+                         '}\n']
+        _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-subdir', self.testfile, '', expectedlines, ['testfile'])
+        self.assertNotIn('WARNING: ', output)
+
+    @testcase(1189)
+    def test_recipetool_appendfile_src_glob(self):
+        # A file that's in SRC_URI as a glob
+        expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+                         '\n',
+                         'SRC_URI += "file://testfile"\n',
+                         '\n',
+                         'do_install_append() {\n',
+                         '    install -d ${D}${datadir}\n',
+                         '    install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-src-globfile\n',
+                         '}\n']
+        _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-src-globfile', self.testfile, '', expectedlines, ['testfile'])
+        self.assertNotIn('WARNING: ', output)
+
+    @testcase(1181)
+    def test_recipetool_appendfile_inst_glob(self):
+        # A file that's in do_install as a glob
+        expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+                         '\n']
+        _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-globfile', self.testfile, '', expectedlines, ['selftest-replaceme-inst-globfile'])
+        self.assertNotIn('WARNING: ', output)
+
+    @testcase(1182)
+    def test_recipetool_appendfile_inst_todir_glob(self):
+        # A file that's in do_install as a glob with destination as a directory
+        expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+                         '\n']
+        _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-todir-globfile', self.testfile, '', expectedlines, ['selftest-replaceme-inst-todir-globfile'])
+        self.assertNotIn('WARNING: ', output)
+
+    @testcase(1185)
+    def test_recipetool_appendfile_patch(self):
+        # A file that's added by a patch in SRC_URI
+        expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+                         '\n',
+                         'SRC_URI += "file://testfile"\n',
+                         '\n',
+                         'do_install_append() {\n',
+                         '    install -d ${D}${sysconfdir}\n',
+                         '    install -m 0644 ${WORKDIR}/testfile ${D}${sysconfdir}/selftest-replaceme-patched\n',
+                         '}\n']
+        _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/etc/selftest-replaceme-patched', self.testfile, '', expectedlines, ['testfile'])
+        for line in output.splitlines():
+            if line.startswith('WARNING: '):
+                self.assertIn('add-file.patch', line, 'Unexpected warning found in output:\n%s' % line)
+                break
+        else:
+            self.fail('Patch warning not found in output:\n%s' % output)
+
+    @testcase(1188)
+    def test_recipetool_appendfile_script(self):
+        # Now, a file that's in SRC_URI but installed by a script (so no mention in do_install)
+        expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+                         '\n',
+                         'SRC_URI += "file://testfile"\n',
+                         '\n',
+                         'do_install_append() {\n',
+                         '    install -d ${D}${datadir}\n',
+                         '    install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-scripted\n',
+                         '}\n']
+        _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-scripted', self.testfile, '', expectedlines, ['testfile'])
+        self.assertNotIn('WARNING: ', output)
+
+    @testcase(1180)
+    def test_recipetool_appendfile_inst_func(self):
+        # A file that's installed from a function called by do_install
+        expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+                         '\n']
+        _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-func', self.testfile, '', expectedlines, ['selftest-replaceme-inst-func'])
+        self.assertNotIn('WARNING: ', output)
+
+    @testcase(1186)
+    def test_recipetool_appendfile_postinstall(self):
+        # A file that's created by a postinstall script (and explicitly mentioned in it)
+        # First try without specifying recipe
+        self._try_recipetool_appendfile_fail('/usr/share/selftest-replaceme-postinst', self.testfile, ['File /usr/share/selftest-replaceme-postinst may be written out in a pre/postinstall script of the following recipes:', 'selftest-recipetool-appendfile'])
+        # Now specify recipe
+        expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+                         '\n',
+                         'SRC_URI += "file://testfile"\n',
+                         '\n',
+                         'do_install_append() {\n',
+                         '    install -d ${D}${datadir}\n',
+                         '    install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-postinst\n',
+                         '}\n']
+        _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-postinst', self.testfile, '-r selftest-recipetool-appendfile', expectedlines, ['testfile'])
+
+    @testcase(1179)
+    def test_recipetool_appendfile_extlayer(self):
+        # Try creating a bbappend in a layer that's not in bblayers.conf and has a different structure
+        exttemplayerdir = os.path.join(self.tempdir, 'extlayer')
+        self._create_temp_layer(exttemplayerdir, False, 'oeselftestextlayer', recipepathspec='metadata/recipes/recipes-*/*')
+        result = runCmd('recipetool appendfile %s /usr/share/selftest-replaceme-orig %s' % (exttemplayerdir, self.testfile))
+        self.assertNotIn('Traceback', result.output)
+        createdfiles = []
+        for root, _, files in os.walk(exttemplayerdir):
+            for f in files:
+                createdfiles.append(os.path.relpath(os.path.join(root, f), exttemplayerdir))
+        createdfiles.remove('conf/layer.conf')
+        expectedfiles = ['metadata/recipes/recipes-test/selftest-recipetool-appendfile/selftest-recipetool-appendfile.bbappend',
+                         'metadata/recipes/recipes-test/selftest-recipetool-appendfile/selftest-recipetool-appendfile/selftest-replaceme-orig']
+        self.assertEqual(sorted(createdfiles), sorted(expectedfiles))
+
+    @testcase(1192)
+    def test_recipetool_appendfile_wildcard(self):
+
+        def try_appendfile_wc(options):
+            result = runCmd('recipetool appendfile %s /etc/profile %s %s' % (self.templayerdir, self.testfile, options))
+            self.assertNotIn('Traceback', result.output)
+            bbappendfile = None
+            for root, _, files in os.walk(self.templayerdir):
+                for f in files:
+                    if f.endswith('.bbappend'):
+                        bbappendfile = f
+                        break
+            if not bbappendfile:
+                self.fail('No bbappend file created')
+            runCmd('rm -rf %s/recipes-*' % self.templayerdir)
+            return bbappendfile
+
+        # Check without wildcard option
+        recipefn = os.path.basename(get_bb_var('FILE', 'base-files'))
+        filename = try_appendfile_wc('')
+        self.assertEqual(filename, recipefn.replace('.bb', '.bbappend'))
+        # Now check with wildcard option
+        filename = try_appendfile_wc('-w')
+        self.assertEqual(filename, recipefn.split('_')[0] + '_%.bbappend')
+
+    @testcase(1193)
+    def test_recipetool_create(self):
+        # Try adding a recipe
+        tempsrc = os.path.join(self.tempdir, 'srctree')
+        os.makedirs(tempsrc)
+        recipefile = os.path.join(self.tempdir, 'logrotate_3.8.7.bb')
+        srcuri = 'https://fedorahosted.org/releases/l/o/logrotate/logrotate-3.8.7.tar.gz'
+        result = runCmd('recipetool create -o %s %s -x %s' % (recipefile, srcuri, tempsrc))
+        self.assertTrue(os.path.isfile(recipefile))
+        checkvars = {}
+        checkvars['LICENSE'] = 'GPLv2'
+        checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=18810669f13b87348459e611d31ab760'
+        checkvars['SRC_URI'] = 'https://fedorahosted.org/releases/l/o/logrotate/logrotate-${PV}.tar.gz'
+        checkvars['SRC_URI[md5sum]'] = '99e08503ef24c3e2e3ff74cc5f3be213'
+        checkvars['SRC_URI[sha256sum]'] = 'f6ba691f40e30e640efa2752c1f9499a3f9738257660994de70a45fe00d12b64'
+        self._test_recipe_contents(recipefile, checkvars, [])
+
+    @testcase(1194)
+    def test_recipetool_create_git(self):
+        # Ensure we have the right data in shlibs/pkgdata
+        bitbake('libpng pango libx11 libxext jpeg')
+        # Try adding a recipe
+        tempsrc = os.path.join(self.tempdir, 'srctree')
+        os.makedirs(tempsrc)
+        recipefile = os.path.join(self.tempdir, 'libmatchbox.bb')
+        srcuri = 'git://git.yoctoproject.org/libmatchbox'
+        result = runCmd('recipetool create -o %s %s -x %s' % (recipefile, srcuri, tempsrc))
+        self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output)
+        checkvars = {}
+        checkvars['LICENSE'] = 'LGPLv2.1'
+        checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=7fbc338309ac38fefcd64b04bb903e34'
+        checkvars['S'] = '${WORKDIR}/git'
+        checkvars['PV'] = '1.0+git${SRCPV}'
+        checkvars['SRC_URI'] = srcuri
+        checkvars['DEPENDS'] = 'libpng pango libx11 libxext jpeg'
+        inherits = ['autotools', 'pkgconfig']
+        self._test_recipe_contents(recipefile, checkvars, inherits)
+
+
+class RecipetoolAppendsrcBase(RecipetoolBase):
+    def _try_recipetool_appendsrcfile(self, testrecipe, newfile, destfile, options, expectedlines, expectedfiles):
+        cmd = 'recipetool appendsrcfile %s %s %s %s %s' % (options, self.templayerdir, testrecipe, newfile, destfile)
+        return self._try_recipetool_appendcmd(cmd, testrecipe, expectedfiles, expectedlines)
+
+    def _try_recipetool_appendsrcfiles(self, testrecipe, newfiles, expectedlines=None, expectedfiles=None, destdir=None, options=''):
+
+        if destdir:
+            options += ' -D %s' % destdir
+
+        if expectedfiles is None:
+            expectedfiles = [os.path.basename(f) for f in newfiles]
+
+        cmd = 'recipetool appendsrcfiles %s %s %s %s' % (options, self.templayerdir, testrecipe, ' '.join(newfiles))
+        return self._try_recipetool_appendcmd(cmd, testrecipe, expectedfiles, expectedlines)
+
+    def _try_recipetool_appendsrcfile_fail(self, testrecipe, newfile, destfile, checkerror):
+        cmd = 'recipetool appendsrcfile %s %s %s %s' % (self.templayerdir, testrecipe, newfile, destfile or '')
+        result = runCmd(cmd, ignore_status=True)
+        self.assertNotEqual(result.status, 0, 'Command "%s" should have failed but didn\'t' % cmd)
+        self.assertNotIn('Traceback', result.output)
+        for errorstr in checkerror:
+            self.assertIn(errorstr, result.output)
+
+    @staticmethod
+    def _get_first_file_uri(recipe):
+        '''Return the first file:// in SRC_URI for the specified recipe.'''
+        src_uri = get_bb_var('SRC_URI', recipe).split()
+        for uri in src_uri:
+            p = urlparse.urlparse(uri)
+            if p.scheme == 'file':
+                return p.netloc + p.path
+
+    def _test_appendsrcfile(self, testrecipe, filename=None, destdir=None, has_src_uri=True, srcdir=None, newfile=None, options=''):
+        if newfile is None:
+            newfile = self.testfile
+
+        if srcdir:
+            if destdir:
+                expected_subdir = os.path.join(srcdir, destdir)
+            else:
+                expected_subdir = srcdir
+        else:
+            options += " -W"
+            expected_subdir = destdir
+
+        if filename:
+            if destdir:
+                destpath = os.path.join(destdir, filename)
+            else:
+                destpath = filename
+        else:
+            filename = os.path.basename(newfile)
+            if destdir:
+                destpath = destdir + os.sep
+            else:
+                destpath = '.' + os.sep
+
+        expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n',
+                         '\n']
+        if has_src_uri:
+            uri = 'file://%s' % filename
+            if expected_subdir:
+                uri += ';subdir=%s' % expected_subdir
+            expectedlines[0:0] = ['SRC_URI += "%s"\n' % uri,
+                                  '\n']
+
+        return self._try_recipetool_appendsrcfile(testrecipe, newfile, destpath, options, expectedlines, [filename])
+
+    def _test_appendsrcfiles(self, testrecipe, newfiles, expectedfiles=None, destdir=None, options=''):
+        if expectedfiles is None:
+            expectedfiles = [os.path.basename(n) for n in newfiles]
+
+        self._try_recipetool_appendsrcfiles(testrecipe, newfiles, expectedfiles=expectedfiles, destdir=destdir, options=options)
+
+        src_uri = get_bb_var('SRC_URI', testrecipe).split()
+        for f in expectedfiles:
+            if destdir:
+                self.assertIn('file://%s;subdir=%s' % (f, destdir), src_uri)
+            else:
+                self.assertIn('file://%s' % f, src_uri)
+
+        recipefile = get_bb_var('FILE', testrecipe)
+        bbappendfile = self._check_bbappend(testrecipe, recipefile, self.templayerdir)
+        filesdir = os.path.join(os.path.dirname(bbappendfile), testrecipe)
+        filesextrapaths = get_bb_var('FILESEXTRAPATHS', testrecipe).split(':')
+        self.assertIn(filesdir, filesextrapaths)
+
+
+class RecipetoolAppendsrcTests(RecipetoolAppendsrcBase):
+    def test_recipetool_appendsrcfile_basic(self):
+        self._test_appendsrcfile('base-files', 'a-file')
+
+    def test_recipetool_appendsrcfile_basic_wildcard(self):
+        testrecipe = 'base-files'
+        self._test_appendsrcfile(testrecipe, 'a-file', options='-w')
+        recipefile = get_bb_var('FILE', testrecipe)
+        bbappendfile = self._check_bbappend(testrecipe, recipefile, self.templayerdir)
+        self.assertEqual(os.path.basename(bbappendfile), '%s_%%.bbappend' % testrecipe)
+
+    def test_recipetool_appendsrcfile_subdir_basic(self):
+        self._test_appendsrcfile('base-files', 'a-file', 'tmp')
+
+    def test_recipetool_appendsrcfile_subdir_basic_dirdest(self):
+        self._test_appendsrcfile('base-files', destdir='tmp')
+
+    def test_recipetool_appendsrcfile_srcdir_basic(self):
+        testrecipe = 'bash'
+        srcdir = get_bb_var('S', testrecipe)
+        workdir = get_bb_var('WORKDIR', testrecipe)
+        subdir = os.path.relpath(srcdir, workdir)
+        self._test_appendsrcfile(testrecipe, 'a-file', srcdir=subdir)
+
+    def test_recipetool_appendsrcfile_existing_in_src_uri(self):
+        testrecipe = 'base-files'
+        filepath = self._get_first_file_uri(testrecipe)
+        self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe)
+        self._test_appendsrcfile(testrecipe, filepath, has_src_uri=False)
+
+    def test_recipetool_appendsrcfile_existing_in_src_uri_diff_params(self):
+        testrecipe = 'base-files'
+        subdir = 'tmp'
+        filepath = self._get_first_file_uri(testrecipe)
+        self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe)
+
+        output = self._test_appendsrcfile(testrecipe, filepath, subdir, has_src_uri=False)
+        self.assertTrue(any('with different parameters' in l for l in output))
+
+    def test_recipetool_appendsrcfile_replace_file_srcdir(self):
+        testrecipe = 'bash'
+        filepath = 'Makefile.in'
+        srcdir = get_bb_var('S', testrecipe)
+        workdir = get_bb_var('WORKDIR', testrecipe)
+        subdir = os.path.relpath(srcdir, workdir)
+
+        self._test_appendsrcfile(testrecipe, filepath, srcdir=subdir)
+        bitbake('%s:do_unpack' % testrecipe)
+        self.assertEqual(open(self.testfile, 'r').read(), open(os.path.join(srcdir, filepath), 'r').read())
+
+    def test_recipetool_appendsrcfiles_basic(self, destdir=None):
+        newfiles = [self.testfile]
+        for i in range(1, 5):
+            testfile = os.path.join(self.tempdir, 'testfile%d' % i)
+            with open(testfile, 'w') as f:
+                f.write('Test file %d\n' % i)
+            newfiles.append(testfile)
+        self._test_appendsrcfiles('gcc', newfiles, destdir=destdir, options='-W')
+
+    def test_recipetool_appendsrcfiles_basic_subdir(self):
+        self.test_recipetool_appendsrcfiles_basic(destdir='testdir')
diff --git a/meta/lib/oeqa/selftest/sstate.py b/meta/lib/oeqa/selftest/sstate.py
new file mode 100644
index 0000000..5989724
--- /dev/null
+++ b/meta/lib/oeqa/selftest/sstate.py
@@ -0,0 +1,53 @@
+import datetime
+import unittest
+import os
+import re
+import shutil
+
+import oeqa.utils.ftools as ftools
+from oeqa.selftest.base import oeSelfTest
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_test_layer
+
+
+class SStateBase(oeSelfTest):
+
+    def setUpLocal(self):
+        self.temp_sstate_location = None
+        self.sstate_path = get_bb_var('SSTATE_DIR')
+        self.distro = get_bb_var('NATIVELSBSTRING')
+        self.distro_specific_sstate = os.path.join(self.sstate_path, self.distro)
+
+    # Creates a special sstate configuration with the option to add sstate mirrors
+    def config_sstate(self, temp_sstate_location=False, add_local_mirrors=[]):
+        self.temp_sstate_location = temp_sstate_location
+
+        if self.temp_sstate_location:
+            temp_sstate_path = os.path.join(self.builddir, "temp_sstate_%s" % datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
+            config_temp_sstate = "SSTATE_DIR = \"%s\"" % temp_sstate_path
+            self.append_config(config_temp_sstate)
+            self.track_for_cleanup(temp_sstate_path)
+        self.sstate_path = get_bb_var('SSTATE_DIR')
+        self.distro = get_bb_var('NATIVELSBSTRING')
+        self.distro_specific_sstate = os.path.join(self.sstate_path, self.distro)
+
+        if add_local_mirrors:
+            config_set_sstate_if_not_set = 'SSTATE_MIRRORS ?= ""'
+            self.append_config(config_set_sstate_if_not_set)
+            for local_mirror in add_local_mirrors:
+                self.assertFalse(os.path.join(local_mirror) == os.path.join(self.sstate_path), msg='Cannot add the current sstate path as a sstate mirror')
+                config_sstate_mirror = "SSTATE_MIRRORS += \"file://.* file:///%s/PATH\"" % local_mirror
+                self.append_config(config_sstate_mirror)
+
+    # Returns a list containing sstate files
+    def search_sstate(self, filename_regex, distro_specific=True, distro_nonspecific=True):
+        result = []
+        for root, dirs, files in os.walk(self.sstate_path):
+            if distro_specific and re.search("%s/[a-z0-9]{2}$" % self.distro, root):
+                for f in files:
+                    if re.search(filename_regex, f):
+                        result.append(f)
+            if distro_nonspecific and re.search("%s/[a-z0-9]{2}$" % self.sstate_path, root):
+                for f in files:
+                    if re.search(filename_regex, f):
+                        result.append(f)
+        return result
diff --git a/meta/lib/oeqa/selftest/sstatetests.py b/meta/lib/oeqa/selftest/sstatetests.py
new file mode 100644
index 0000000..6906b21
--- /dev/null
+++ b/meta/lib/oeqa/selftest/sstatetests.py
@@ -0,0 +1,308 @@
+import datetime
+import unittest
+import os
+import re
+import shutil
+
+import oeqa.utils.ftools as ftools
+from oeqa.selftest.base import oeSelfTest
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_test_layer
+from oeqa.selftest.sstate import SStateBase
+from oeqa.utils.decorators import testcase
+
+class SStateTests(SStateBase):
+
+    # Test sstate files creation and their location
+    def run_test_sstate_creation(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True, should_pass=True):
+        self.config_sstate(temp_sstate_location)
+
+        if  self.temp_sstate_location:
+            bitbake(['-cclean'] + targets)
+        else:
+            bitbake(['-ccleansstate'] + targets)
+
+        bitbake(targets)
+        file_tracker = self.search_sstate('|'.join(map(str, targets)), distro_specific, distro_nonspecific)
+        if should_pass:
+            self.assertTrue(file_tracker , msg="Could not find sstate files for: %s" % ', '.join(map(str, targets)))
+        else:
+            self.assertTrue(not file_tracker , msg="Found sstate files in the wrong place for: %s" % ', '.join(map(str, targets)))
+
+    @testcase(975)
+    def test_sstate_creation_distro_specific_pass(self):
+        targetarch = get_bb_var('TUNE_ARCH')
+        self.run_test_sstate_creation(['binutils-cross-'+ targetarch, 'binutils-native'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
+
+    @testcase(975)
+    def test_sstate_creation_distro_specific_fail(self):
+        targetarch = get_bb_var('TUNE_ARCH')
+        self.run_test_sstate_creation(['binutils-cross-'+ targetarch, 'binutils-native'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True, should_pass=False)
+
+    @testcase(976)
+    def test_sstate_creation_distro_nonspecific_pass(self):
+        self.run_test_sstate_creation(['glibc-initial'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
+
+    @testcase(976)
+    def test_sstate_creation_distro_nonspecific_fail(self):
+        self.run_test_sstate_creation(['glibc-initial'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True, should_pass=False)
+
+
+    # Test the sstate files deletion part of the do_cleansstate task
+    def run_test_cleansstate_task(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True):
+        self.config_sstate(temp_sstate_location)
+
+        bitbake(['-ccleansstate'] + targets)
+
+        bitbake(targets)
+        tgz_created = self.search_sstate('|'.join(map(str, [s + '.*?\.tgz$' for s in targets])), distro_specific, distro_nonspecific)
+        self.assertTrue(tgz_created, msg="Could not find sstate .tgz files for: %s" % ', '.join(map(str, targets)))
+
+        siginfo_created = self.search_sstate('|'.join(map(str, [s + '.*?\.siginfo$' for s in targets])), distro_specific, distro_nonspecific)
+        self.assertTrue(siginfo_created, msg="Could not find sstate .siginfo files for: %s" % ', '.join(map(str, targets)))
+
+        bitbake(['-ccleansstate'] + targets)
+        tgz_removed = self.search_sstate('|'.join(map(str, [s + '.*?\.tgz$' for s in targets])), distro_specific, distro_nonspecific)
+        self.assertTrue(not tgz_removed, msg="do_cleansstate didn't remove .tgz sstate files for: %s" % ', '.join(map(str, targets)))
+
+    @testcase(977)
+    def test_cleansstate_task_distro_specific_nonspecific(self):
+        targetarch = get_bb_var('TUNE_ARCH')
+        self.run_test_cleansstate_task(['binutils-cross-' + targetarch, 'binutils-native', 'glibc-initial'], distro_specific=True, distro_nonspecific=True, temp_sstate_location=True)
+
+    @testcase(977)
+    def test_cleansstate_task_distro_nonspecific(self):
+        self.run_test_cleansstate_task(['glibc-initial'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
+
+    @testcase(977)
+    def test_cleansstate_task_distro_specific(self):
+        targetarch = get_bb_var('TUNE_ARCH')
+        self.run_test_cleansstate_task(['binutils-cross-'+ targetarch, 'binutils-native', 'glibc-initial'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
+
+
+    # Test rebuilding of distro-specific sstate files
+    def run_test_rebuild_distro_specific_sstate(self, targets, temp_sstate_location=True):
+        self.config_sstate(temp_sstate_location)
+
+        bitbake(['-ccleansstate'] + targets)
+
+        bitbake(targets)
+        self.assertTrue(self.search_sstate('|'.join(map(str, [s + '.*?\.tgz$' for s in targets])), distro_specific=False, distro_nonspecific=True) == [], msg="Found distro non-specific sstate for: %s" % ', '.join(map(str, targets)))
+        file_tracker_1 = self.search_sstate('|'.join(map(str, [s + '.*?\.tgz$' for s in targets])), distro_specific=True, distro_nonspecific=False)
+        self.assertTrue(len(file_tracker_1) >= len(targets), msg = "Not all sstate files ware created for: %s" % ', '.join(map(str, targets)))
+
+        self.track_for_cleanup(self.distro_specific_sstate + "_old")
+        shutil.copytree(self.distro_specific_sstate, self.distro_specific_sstate + "_old")
+        shutil.rmtree(self.distro_specific_sstate)
+
+        bitbake(['-cclean'] + targets)
+        bitbake(targets)
+        file_tracker_2 = self.search_sstate('|'.join(map(str, [s + '.*?\.tgz$' for s in targets])), distro_specific=True, distro_nonspecific=False)
+        self.assertTrue(len(file_tracker_2) >= len(targets), msg = "Not all sstate files ware created for: %s" % ', '.join(map(str, targets)))
+
+        not_recreated = [x for x in file_tracker_1 if x not in file_tracker_2]
+        self.assertTrue(not_recreated == [], msg="The following sstate files ware not recreated: %s" % ', '.join(map(str, not_recreated)))
+
+        created_once = [x for x in file_tracker_2 if x not in file_tracker_1]
+        self.assertTrue(created_once == [], msg="The following sstate files ware created only in the second run: %s" % ', '.join(map(str, created_once)))
+
+    @testcase(175)
+    def test_rebuild_distro_specific_sstate_cross_native_targets(self):
+        targetarch = get_bb_var('TUNE_ARCH')
+        self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + targetarch, 'binutils-native'], temp_sstate_location=True)
+
+    @testcase(175)
+    def test_rebuild_distro_specific_sstate_cross_target(self):
+        targetarch = get_bb_var('TUNE_ARCH')
+        self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + targetarch], temp_sstate_location=True)
+
+    @testcase(175)
+    def test_rebuild_distro_specific_sstate_native_target(self):
+        self.run_test_rebuild_distro_specific_sstate(['binutils-native'], temp_sstate_location=True)
+
+
+    # Test the sstate-cache-management script. Each element in the global_config list is used with the corresponding element in the target_config list
+    # global_config elements are expected to not generate any sstate files that would be removed by sstate-cache-management.sh (such as changing the value of MACHINE)
+    def run_test_sstate_cache_management_script(self, target, global_config=[''], target_config=[''], ignore_patterns=[]):
+        self.assertTrue(global_config)
+        self.assertTrue(target_config)
+        self.assertTrue(len(global_config) == len(target_config), msg='Lists global_config and target_config should have the same number of elements')
+        self.config_sstate(temp_sstate_location=True, add_local_mirrors=[self.sstate_path])
+
+        # If buildhistory is enabled, we need to disable version-going-backwards QA checks for this test. It may report errors otherwise.
+        if ('buildhistory' in get_bb_var('USER_CLASSES')) or ('buildhistory' in get_bb_var('INHERIT')):
+            remove_errors_config = 'ERROR_QA_remove = "version-going-backwards"'
+            self.append_config(remove_errors_config)
+
+        # For not this only checks if random sstate tasks are handled correctly as a group.
+        # In the future we should add control over what tasks we check for.
+
+        sstate_archs_list = []
+        expected_remaining_sstate = []
+        for idx in range(len(target_config)):
+            self.append_config(global_config[idx])
+            self.append_recipeinc(target, target_config[idx])
+            sstate_arch = get_bb_var('SSTATE_PKGARCH', target)
+            if not sstate_arch in sstate_archs_list:
+                sstate_archs_list.append(sstate_arch)
+            if target_config[idx] == target_config[-1]:
+                target_sstate_before_build = self.search_sstate(target + '.*?\.tgz$')
+            bitbake("-cclean %s" % target)
+            result = bitbake(target, ignore_status=True)
+            if target_config[idx] == target_config[-1]:
+                target_sstate_after_build = self.search_sstate(target + '.*?\.tgz$')
+                expected_remaining_sstate += [x for x in target_sstate_after_build if x not in target_sstate_before_build if not any(pattern in x for pattern in ignore_patterns)]
+            self.remove_config(global_config[idx])
+            self.remove_recipeinc(target, target_config[idx])
+            self.assertEqual(result.status, 0, msg = "build of %s failed with %s" % (target, result.output))
+
+        runCmd("sstate-cache-management.sh -y --cache-dir=%s --remove-duplicated --extra-archs=%s" % (self.sstate_path, ','.join(map(str, sstate_archs_list))))
+        actual_remaining_sstate = [x for x in self.search_sstate(target + '.*?\.tgz$') if not any(pattern in x for pattern in ignore_patterns)]
+
+        actual_not_expected = [x for x in actual_remaining_sstate if x not in expected_remaining_sstate]
+        self.assertFalse(actual_not_expected, msg="Files should have been removed but ware not: %s" % ', '.join(map(str, actual_not_expected)))
+        expected_not_actual = [x for x in expected_remaining_sstate if x not in actual_remaining_sstate]
+        self.assertFalse(expected_not_actual, msg="Extra files ware removed: %s" ', '.join(map(str, expected_not_actual)))
+
+    @testcase(973)
+    def test_sstate_cache_management_script_using_pr_1(self):
+        global_config = []
+        target_config = []
+        global_config.append('')
+        target_config.append('PR = "0"')
+        self.run_test_sstate_cache_management_script('m4', global_config,  target_config, ignore_patterns=['populate_lic'])
+
+    @testcase(978)
+    def test_sstate_cache_management_script_using_pr_2(self):
+        global_config = []
+        target_config = []
+        global_config.append('')
+        target_config.append('PR = "0"')
+        global_config.append('')
+        target_config.append('PR = "1"')
+        self.run_test_sstate_cache_management_script('m4', global_config,  target_config, ignore_patterns=['populate_lic'])
+
+    @testcase(979)
+    def test_sstate_cache_management_script_using_pr_3(self):
+        global_config = []
+        target_config = []
+        global_config.append('MACHINE = "qemux86-64"')
+        target_config.append('PR = "0"')
+        global_config.append(global_config[0])
+        target_config.append('PR = "1"')
+        global_config.append('MACHINE = "qemux86"')
+        target_config.append('PR = "1"')
+        self.run_test_sstate_cache_management_script('m4', global_config,  target_config, ignore_patterns=['populate_lic'])
+
+    @testcase(974)
+    def test_sstate_cache_management_script_using_machine(self):
+        global_config = []
+        target_config = []
+        global_config.append('MACHINE = "qemux86-64"')
+        target_config.append('')
+        global_config.append('MACHINE = "qemux86"')
+        target_config.append('')
+        self.run_test_sstate_cache_management_script('m4', global_config,  target_config, ignore_patterns=['populate_lic'])
+
+    @testcase(1270)
+    def test_sstate_32_64_same_hash(self):
+        """
+        The sstate checksums for both native and target should not vary whether
+        they're built on a 32 or 64 bit system. Rather than requiring two different 
+        build machines and running a builds, override the variables calling uname()
+        manually and check using bitbake -S.
+        """
+
+        topdir = get_bb_var('TOPDIR')
+        targetvendor = get_bb_var('TARGET_VENDOR')
+        self.write_config("""
+TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
+BUILD_ARCH = \"x86_64\"
+BUILD_OS = \"linux\"
+""")
+        self.track_for_cleanup(topdir + "/tmp-sstatesamehash")
+        bitbake("core-image-sato -S none")
+        self.write_config("""
+TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
+BUILD_ARCH = \"i686\"
+BUILD_OS = \"linux\"
+""")
+        self.track_for_cleanup(topdir + "/tmp-sstatesamehash2")
+        bitbake("core-image-sato -S none")
+
+        def get_files(d):
+            f = []
+            for root, dirs, files in os.walk(d):
+                f.extend(os.path.join(root, name) for name in files)
+            return f
+        files1 = get_files(topdir + "/tmp-sstatesamehash/stamps/")
+        files2 = get_files(topdir + "/tmp-sstatesamehash2/stamps/")
+        files2 = [x.replace("tmp-sstatesamehash2", "tmp-sstatesamehash").replace("i686-linux", "x86_64-linux").replace("i686" + targetvendor + "-linux", "x86_64" + targetvendor + "-linux", ) for x in files2]
+        self.assertItemsEqual(files1, files2)
+
+
+    @testcase(1271)
+    def test_sstate_nativelsbstring_same_hash(self):
+        """
+        The sstate checksums should be independent of whichever NATIVELSBSTRING is
+        detected. Rather than requiring two different build machines and running 
+        builds, override the variables manually and check using bitbake -S.
+        """
+
+        topdir = get_bb_var('TOPDIR')
+        self.write_config("""
+TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
+NATIVELSBSTRING = \"DistroA\"
+""")
+        self.track_for_cleanup(topdir + "/tmp-sstatesamehash")
+        bitbake("core-image-sato -S none")
+        self.write_config("""
+TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
+NATIVELSBSTRING = \"DistroB\"
+""")
+        self.track_for_cleanup(topdir + "/tmp-sstatesamehash2")
+        bitbake("core-image-sato -S none")
+
+        def get_files(d):
+            f = []
+            for root, dirs, files in os.walk(d):
+                f.extend(os.path.join(root, name) for name in files)
+            return f
+        files1 = get_files(topdir + "/tmp-sstatesamehash/stamps/")
+        files2 = get_files(topdir + "/tmp-sstatesamehash2/stamps/")
+        files2 = [x.replace("tmp-sstatesamehash2", "tmp-sstatesamehash") for x in files2]
+        self.assertItemsEqual(files1, files2)
+
+    def test_sstate_allarch_samesigs(self):
+        """
+        The sstate checksums off allarch packages should be independent of whichever 
+        MACHINE is set. Check this using bitbake -S.
+        """
+
+        topdir = get_bb_var('TOPDIR')
+        targetos = get_bb_var('TARGET_OS')
+        targetvendor = get_bb_var('TARGET_VENDOR')
+        self.write_config("""
+TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
+MACHINE = \"qemux86\"
+""")
+        self.track_for_cleanup(topdir + "/tmp-sstatesamehash")
+        bitbake("world -S none")
+        self.write_config("""
+TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
+MACHINE = \"qemuarm\"
+""")
+        self.track_for_cleanup(topdir + "/tmp-sstatesamehash2")
+        bitbake("world -S none")
+
+        def get_files(d):
+            f = []
+            for root, dirs, files in os.walk(d):
+                for name in files:
+                    if "do_build" not in name:
+                        f.append(os.path.join(root, name))
+            return f
+        files1 = get_files(topdir + "/tmp-sstatesamehash/stamps/all" + targetvendor + "-" + targetos)
+        files2 = get_files(topdir + "/tmp-sstatesamehash2/stamps/all" + targetvendor + "-" + targetos)
+        files2 = [x.replace("tmp-sstatesamehash2", "tmp-sstatesamehash") for x in files2]
+        self.maxDiff = None
+        self.assertItemsEqual(files1, files2)
diff --git a/meta/lib/oeqa/selftest/wic.py b/meta/lib/oeqa/selftest/wic.py
new file mode 100644
index 0000000..3dc54a4
--- /dev/null
+++ b/meta/lib/oeqa/selftest/wic.py
@@ -0,0 +1,236 @@
+#!/usr/bin/env python
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Copyright (c) 2015, Intel Corporation.
+# All rights reserved.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# AUTHORS
+# Ed Bartosh <ed.bartosh@linux.intel.com>
+
+"""Test cases for wic."""
+
+import os
+import sys
+
+from glob import glob
+from shutil import rmtree
+
+from oeqa.selftest.base import oeSelfTest
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var
+
+class Wic(oeSelfTest):
+    """Wic test class."""
+
+    resultdir = "/var/tmp/wic/build/"
+
+    @classmethod
+    def setUpClass(cls):
+        """Build wic runtime dependencies."""
+        bitbake('syslinux syslinux-native parted-native gptfdisk-native '
+                'dosfstools-native mtools-native')
+        Wic.image_is_ready = False
+
+    def setUp(self):
+        """This code is executed before each test method."""
+        if not Wic.image_is_ready:
+            # build core-image-minimal with required features
+            features = 'IMAGE_FSTYPES += " hddimg"\nMACHINE_FEATURES_append = " efi"\n'
+            self.append_config(features)
+            bitbake('core-image-minimal')
+            # set this class variable to avoid buiding image many times
+            Wic.image_is_ready = True
+
+        rmtree(self.resultdir, ignore_errors=True)
+
+    def test01_help(self):
+        """Test wic --help"""
+        self.assertEqual(0, runCmd('wic --help').status)
+
+    def test02_createhelp(self):
+        """Test wic create --help"""
+        self.assertEqual(0, runCmd('wic create --help').status)
+
+    def test03_listhelp(self):
+        """Test wic list --help"""
+        self.assertEqual(0, runCmd('wic list --help').status)
+
+    def test04_build_image_name(self):
+        """Test wic create directdisk --image-name core-image-minimal"""
+        self.assertEqual(0, runCmd("wic create directdisk "
+                                   "--image-name core-image-minimal").status)
+        self.assertEqual(1, len(glob(self.resultdir + "directdisk-*.direct")))
+
+    def test05_build_artifacts(self):
+        """Test wic create directdisk providing all artifacts."""
+        vars = dict((var.lower(), get_bb_var(var, 'core-image-minimal')) \
+                        for var in ('STAGING_DATADIR', 'DEPLOY_DIR_IMAGE',
+                                    'STAGING_DIR_NATIVE', 'IMAGE_ROOTFS'))
+        status = runCmd("wic create directdisk "
+                        "-b %(staging_datadir)s "
+                        "-k %(deploy_dir_image)s "
+                        "-n %(staging_dir_native)s "
+                        "-r %(image_rootfs)s" % vars).status
+        self.assertEqual(0, status)
+        self.assertEqual(1, len(glob(self.resultdir + "directdisk-*.direct")))
+
+    def test06_gpt_image(self):
+        """Test creation of core-image-minimal with gpt table and UUID boot"""
+        self.assertEqual(0, runCmd("wic create directdisk-gpt "
+                                   "--image-name core-image-minimal").status)
+        self.assertEqual(1, len(glob(self.resultdir + "directdisk-*.direct")))
+
+    def test07_unsupported_subcommand(self):
+        """Test unsupported subcommand"""
+        self.assertEqual(1, runCmd('wic unsupported',
+                         ignore_status=True).status)
+
+    def test08_no_command(self):
+        """Test wic without command"""
+        self.assertEqual(1, runCmd('wic', ignore_status=True).status)
+
+    def test09_help_kickstart(self):
+        """Test wic help overview"""
+        self.assertEqual(0, runCmd('wic help overview').status)
+
+    def test10_help_plugins(self):
+        """Test wic help plugins"""
+        self.assertEqual(0, runCmd('wic help plugins').status)
+
+    def test11_help_kickstart(self):
+        """Test wic help kickstart"""
+        self.assertEqual(0, runCmd('wic help kickstart').status)
+
+    def test12_compress_gzip(self):
+        """Test compressing an image with gzip"""
+        self.assertEqual(0, runCmd("wic create directdisk "
+                                   "--image-name core-image-minimal "
+                                   "-c gzip").status)
+        self.assertEqual(1, len(glob(self.resultdir + \
+                                         "directdisk-*.direct.gz")))
+
+    def test13_compress_gzip(self):
+        """Test compressing an image with bzip2"""
+        self.assertEqual(0, runCmd("wic create directdisk "
+                                   "--image-name core-image-minimal "
+                                   "-c bzip2").status)
+        self.assertEqual(1, len(glob(self.resultdir + \
+                                         "directdisk-*.direct.bz2")))
+
+    def test14_compress_gzip(self):
+        """Test compressing an image with xz"""
+        self.assertEqual(0, runCmd("wic create directdisk "
+                                   "--image-name core-image-minimal "
+                                   "-c xz").status)
+        self.assertEqual(1, len(glob(self.resultdir + \
+                                         "directdisk-*.direct.xz")))
+
+    def test15_wrong_compressor(self):
+        """Test how wic breaks if wrong compressor is provided"""
+        self.assertEqual(2, runCmd("wic create directdisk "
+                                   "--image-name core-image-minimal "
+                                   "-c wrong", ignore_status=True).status)
+
+    def test16_rootfs_indirect_recipes(self):
+        """Test usage of rootfs plugin with rootfs recipes"""
+        wks = "directdisk-multi-rootfs"
+        self.assertEqual(0, runCmd("wic create %s "
+                                   "--image-name core-image-minimal "
+                                   "--rootfs rootfs1=core-image-minimal "
+                                   "--rootfs rootfs2=core-image-minimal" \
+                                   % wks).status)
+        self.assertEqual(1, len(glob(self.resultdir + "%s*.direct" % wks)))
+
+    def test17_rootfs_artifacts(self):
+        """Test usage of rootfs plugin with rootfs paths"""
+        vars = dict((var.lower(), get_bb_var(var, 'core-image-minimal')) \
+                        for var in ('STAGING_DATADIR', 'DEPLOY_DIR_IMAGE',
+                                    'STAGING_DIR_NATIVE', 'IMAGE_ROOTFS'))
+        vars['wks'] = "directdisk-multi-rootfs"
+        status = runCmd("wic create %(wks)s "
+                        "-b %(staging_datadir)s "
+                        "-k %(deploy_dir_image)s "
+                        "-n %(staging_dir_native)s "
+                        "--rootfs-dir rootfs1=%(image_rootfs)s "
+                        "--rootfs-dir rootfs2=%(image_rootfs)s" \
+                        % vars).status
+        self.assertEqual(0, status)
+        self.assertEqual(1, len(glob(self.resultdir + \
+                                     "%(wks)s-*.direct" % vars)))
+
+    def test18_iso_image(self):
+        """Test creation of hybrid iso imagewith legacy and EFI boot"""
+        self.assertEqual(0, runCmd("wic create mkhybridiso "
+                                   "--image-name core-image-minimal").status)
+        self.assertEqual(1, len(glob(self.resultdir + "HYBRID_ISO_IMG-*.direct")))
+        self.assertEqual(1, len(glob(self.resultdir + "HYBRID_ISO_IMG-*.iso")))
+
+    def test19_image_env(self):
+        """Test generation of <image>.env files."""
+        image = 'core-image-minimal'
+        stdir = get_bb_var('STAGING_DIR_TARGET', image)
+        imgdatadir = os.path.join(stdir, 'imgdata')
+
+        basename = get_bb_var('IMAGE_BASENAME', image)
+        self.assertEqual(basename, image)
+        path = os.path.join(imgdatadir, basename) + '.env'
+        self.assertTrue(os.path.isfile(path))
+
+        wicvars = set(get_bb_var('WICVARS', image).split())
+        # filter out optional variables
+        wicvars = wicvars.difference(('HDDDIR', 'IMAGE_BOOT_FILES',
+                                      'INITRD', 'ISODIR'))
+        with open(path) as envfile:
+            content = dict(line.split("=", 1) for line in envfile)
+            # test if variables used by wic present in the .env file
+            for var in wicvars:
+                self.assertTrue(var in content, "%s is not in .env file" % var)
+                self.assertTrue(content[var])
+
+    def test20_wic_image_type(self):
+        """Test building wic images by bitbake"""
+        self.assertEqual(0, bitbake('wic-image-minimal').status)
+
+        deploy_dir = get_bb_var('DEPLOY_DIR_IMAGE')
+        machine = get_bb_var('MACHINE')
+        prefix = os.path.join(deploy_dir, 'wic-image-minimal-%s.' % machine)
+        # check if we have result image and manifests symlinks
+        # pointing to existing files
+        for suffix in ('wic.bz2', 'manifest'):
+            path = prefix + suffix
+            self.assertTrue(os.path.islink(path))
+            self.assertTrue(os.path.isfile(os.path.realpath(path)))
+
+    def test21_qemux86_directdisk(self):
+        """Test creation of qemux-86-directdisk image"""
+        image = "qemux86-directdisk"
+        self.assertEqual(0, runCmd("wic create %s -e core-image-minimal" \
+                                   % image).status)
+        self.assertEqual(1, len(glob(self.resultdir + "%s-*direct" % image)))
+
+    def test22_mkgummidisk(self):
+        """Test creation of mkgummidisk image"""
+        image = "mkgummidisk"
+        self.assertEqual(0, runCmd("wic create %s -e core-image-minimal" \
+                                   % image).status)
+        self.assertEqual(1, len(glob(self.resultdir + "%s-*direct" % image)))
+
+    def test23_mkefidisk(self):
+        """Test creation of mkefidisk image"""
+        image = "mkefidisk"
+        self.assertEqual(0, runCmd("wic create %s -e core-image-minimal" \
+                                   % image).status)
+        self.assertEqual(1, len(glob(self.resultdir + "%s-*direct" % image)))
diff --git a/meta/lib/oeqa/targetcontrol.py b/meta/lib/oeqa/targetcontrol.py
new file mode 100644
index 0000000..edc0d01
--- /dev/null
+++ b/meta/lib/oeqa/targetcontrol.py
@@ -0,0 +1,240 @@
+# Copyright (C) 2013 Intel Corporation
+#
+# Released under the MIT license (see COPYING.MIT)
+
+# This module is used by testimage.bbclass for setting up and controlling a target machine.
+
+import os
+import shutil
+import subprocess
+import bb
+import traceback
+import sys
+import logging
+from oeqa.utils.sshcontrol import SSHControl
+from oeqa.utils.qemurunner import QemuRunner
+from oeqa.utils.qemutinyrunner import QemuTinyRunner
+from oeqa.utils.dump import TargetDumper
+from oeqa.controllers.testtargetloader import TestTargetLoader
+from abc import ABCMeta, abstractmethod
+
+def get_target_controller(d):
+    testtarget = d.getVar("TEST_TARGET", True)
+    # old, simple names
+    if testtarget == "qemu":
+        return QemuTarget(d)
+    elif testtarget == "simpleremote":
+        return SimpleRemoteTarget(d)
+    else:
+        # use the class name
+        try:
+            # is it a core class defined here?
+            controller = getattr(sys.modules[__name__], testtarget)
+        except AttributeError:
+            # nope, perhaps a layer defined one
+            try:
+                bbpath = d.getVar("BBPATH", True).split(':')
+                testtargetloader = TestTargetLoader()
+                controller = testtargetloader.get_controller_module(testtarget, bbpath)
+            except ImportError as e:
+                bb.fatal("Failed to import {0} from available controller modules:\n{1}".format(testtarget,traceback.format_exc()))
+            except AttributeError as e:
+                bb.fatal("Invalid TEST_TARGET - " + str(e))
+        return controller(d)
+
+
+class BaseTarget(object):
+
+    __metaclass__ = ABCMeta
+
+    supported_image_fstypes = []
+
+    def __init__(self, d):
+        self.connection = None
+        self.ip = None
+        self.server_ip = None
+        self.datetime = d.getVar('DATETIME', True)
+        self.testdir = d.getVar("TEST_LOG_DIR", True)
+        self.pn = d.getVar("PN", True)
+
+    @abstractmethod
+    def deploy(self):
+
+        self.sshlog = os.path.join(self.testdir, "ssh_target_log.%s" % self.datetime)
+        sshloglink = os.path.join(self.testdir, "ssh_target_log")
+        if os.path.islink(sshloglink):
+            os.unlink(sshloglink)
+        os.symlink(self.sshlog, sshloglink)
+        bb.note("SSH log file: %s" %  self.sshlog)
+
+    @abstractmethod
+    def start(self, params=None):
+        pass
+
+    @abstractmethod
+    def stop(self):
+        pass
+
+    @classmethod
+    def get_extra_files(self):
+        return None
+
+    @classmethod
+    def match_image_fstype(self, d, image_fstypes=None):
+        if not image_fstypes:
+            image_fstypes = d.getVar('IMAGE_FSTYPES', True).split(' ')
+        possible_image_fstypes = [fstype for fstype in self.supported_image_fstypes if fstype in image_fstypes]
+        if possible_image_fstypes:
+            return possible_image_fstypes[0]
+        else:
+            return None
+
+    def get_image_fstype(self, d):
+        image_fstype = self.match_image_fstype(d)
+        if image_fstype:
+            return image_fstype
+        else:
+            bb.fatal("IMAGE_FSTYPES should contain a Target Controller supported image fstype: %s " % ', '.join(map(str, self.supported_image_fstypes)))
+
+    def restart(self, params=None):
+        self.stop()
+        self.start(params)
+
+    def run(self, cmd, timeout=None):
+        return self.connection.run(cmd, timeout)
+
+    def copy_to(self, localpath, remotepath):
+        return self.connection.copy_to(localpath, remotepath)
+
+    def copy_from(self, remotepath, localpath):
+        return self.connection.copy_from(remotepath, localpath)
+
+
+
+class QemuTarget(BaseTarget):
+
+    supported_image_fstypes = ['ext3', 'ext4', 'cpio.gz']
+
+    def __init__(self, d):
+
+        super(QemuTarget, self).__init__(d)
+
+        self.image_fstype = self.get_image_fstype(d)
+        self.qemulog = os.path.join(self.testdir, "qemu_boot_log.%s" % self.datetime)
+        self.origrootfs = os.path.join(d.getVar("DEPLOY_DIR_IMAGE", True),  d.getVar("IMAGE_LINK_NAME", True) + '.' + self.image_fstype)
+        self.rootfs = os.path.join(self.testdir, d.getVar("IMAGE_LINK_NAME", True) + '-testimage.' + self.image_fstype)
+        self.kernel = os.path.join(d.getVar("DEPLOY_DIR_IMAGE", True), d.getVar("KERNEL_IMAGETYPE", False) + '-' + d.getVar('MACHINE', False) + '.bin')
+        dump_target_cmds = d.getVar("testimage_dump_target", True)
+        dump_host_cmds = d.getVar("testimage_dump_host", True)
+        dump_dir = d.getVar("TESTIMAGE_DUMP_DIR", True)
+
+        # Log QemuRunner log output to a file
+        import oe.path
+        bb.utils.mkdirhier(self.testdir)
+        self.qemurunnerlog = os.path.join(self.testdir, 'qemurunner_log.%s' % self.datetime)
+        logger = logging.getLogger('BitBake.QemuRunner')
+        loggerhandler = logging.FileHandler(self.qemurunnerlog)
+        loggerhandler.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
+        logger.addHandler(loggerhandler)
+        oe.path.symlink(os.path.basename(self.qemurunnerlog), os.path.join(self.testdir, 'qemurunner_log'), force=True)
+
+        if d.getVar("DISTRO", True) == "poky-tiny":
+            self.runner = QemuTinyRunner(machine=d.getVar("MACHINE", True),
+                            rootfs=self.rootfs,
+                            tmpdir = d.getVar("TMPDIR", True),
+                            deploy_dir_image = d.getVar("DEPLOY_DIR_IMAGE", True),
+                            display = d.getVar("BB_ORIGENV", False).getVar("DISPLAY", True),
+                            logfile = self.qemulog,
+                            kernel = self.kernel,
+                            boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT", True)))
+        else:
+            self.runner = QemuRunner(machine=d.getVar("MACHINE", True),
+                            rootfs=self.rootfs,
+                            tmpdir = d.getVar("TMPDIR", True),
+                            deploy_dir_image = d.getVar("DEPLOY_DIR_IMAGE", True),
+                            display = d.getVar("BB_ORIGENV", False).getVar("DISPLAY", True),
+                            logfile = self.qemulog,
+                            boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT", True)),
+                            dump_dir = dump_dir,
+                            dump_host_cmds = d.getVar("testimage_dump_host", True))
+
+        self.target_dumper = TargetDumper(dump_target_cmds, dump_dir, self.runner)
+
+    def deploy(self):
+        try:
+            bb.utils.mkdirhier(self.testdir)
+            shutil.copyfile(self.origrootfs, self.rootfs)
+        except Exception as e:
+            bb.fatal("Error copying rootfs: %s" % e)
+
+        qemuloglink = os.path.join(self.testdir, "qemu_boot_log")
+        if os.path.islink(qemuloglink):
+            os.unlink(qemuloglink)
+        os.symlink(self.qemulog, qemuloglink)
+
+        bb.note("rootfs file: %s" %  self.rootfs)
+        bb.note("Qemu log file: %s" % self.qemulog)
+        super(QemuTarget, self).deploy()
+
+    def start(self, params=None):
+        if self.runner.start(params):
+            self.ip = self.runner.ip
+            self.server_ip = self.runner.server_ip
+            self.connection = SSHControl(ip=self.ip, logfile=self.sshlog)
+        else:
+            self.stop()
+            if os.path.exists(self.qemulog):
+                with open(self.qemulog, 'r') as f:
+                    bb.error("Qemu log output from %s:\n%s" % (self.qemulog, f.read()))
+            raise bb.build.FuncFailed("%s - FAILED to start qemu - check the task log and the boot log" % self.pn)
+
+    def check(self):
+        return self.runner.is_alive()
+
+    def stop(self):
+        self.runner.stop()
+        self.connection = None
+        self.ip = None
+        self.server_ip = None
+
+    def restart(self, params=None):
+        if self.runner.restart(params):
+            self.ip = self.runner.ip
+            self.server_ip = self.runner.server_ip
+            self.connection = SSHControl(ip=self.ip, logfile=self.sshlog)
+        else:
+            raise bb.build.FuncFailed("%s - FAILED to re-start qemu - check the task log and the boot log" % self.pn)
+
+    def run_serial(self, command):
+        return self.runner.run_serial(command)
+
+
+class SimpleRemoteTarget(BaseTarget):
+
+    def __init__(self, d):
+        super(SimpleRemoteTarget, self).__init__(d)
+        addr = d.getVar("TEST_TARGET_IP", True) or bb.fatal('Please set TEST_TARGET_IP with the IP address of the machine you want to run the tests on.')
+        self.ip = addr.split(":")[0]
+        try:
+            self.port = addr.split(":")[1]
+        except IndexError:
+            self.port = None
+        bb.note("Target IP: %s" % self.ip)
+        self.server_ip = d.getVar("TEST_SERVER_IP", True)
+        if not self.server_ip:
+            try:
+                self.server_ip = subprocess.check_output(['ip', 'route', 'get', self.ip ]).split("\n")[0].split()[-1]
+            except Exception as e:
+                bb.fatal("Failed to determine the host IP address (alternatively you can set TEST_SERVER_IP with the IP address of this machine): %s" % e)
+        bb.note("Server IP: %s" % self.server_ip)
+
+    def deploy(self):
+        super(SimpleRemoteTarget, self).deploy()
+
+    def start(self, params=None):
+        self.connection = SSHControl(self.ip, logfile=self.sshlog, port=self.port)
+
+    def stop(self):
+        self.connection = None
+        self.ip = None
+        self.server_ip = None
diff --git a/meta/lib/oeqa/utils/__init__.py b/meta/lib/oeqa/utils/__init__.py
new file mode 100644
index 0000000..2260046
--- /dev/null
+++ b/meta/lib/oeqa/utils/__init__.py
@@ -0,0 +1,15 @@
+# Enable other layers to have modules in the same named directory
+from pkgutil import extend_path
+__path__ = extend_path(__path__, __name__)
+
+
+# Borrowed from CalledProcessError
+
+class CommandError(Exception):
+    def __init__(self, retcode, cmd, output = None):
+        self.retcode = retcode
+        self.cmd = cmd
+        self.output = output
+    def __str__(self):
+        return "Command '%s' returned non-zero exit status %d with output: %s" % (self.cmd, self.retcode, self.output)
+
diff --git a/meta/lib/oeqa/utils/commands.py b/meta/lib/oeqa/utils/commands.py
new file mode 100644
index 0000000..08e2cbb
--- /dev/null
+++ b/meta/lib/oeqa/utils/commands.py
@@ -0,0 +1,224 @@
+# Copyright (c) 2013-2014 Intel Corporation
+#
+# Released under the MIT license (see COPYING.MIT)
+
+# DESCRIPTION
+# This module is mainly used by scripts/oe-selftest and modules under meta/oeqa/selftest
+# It provides a class and methods for running commands on the host in a convienent way for tests.
+
+
+
+import os
+import sys
+import signal
+import subprocess
+import threading
+import logging
+from oeqa.utils import CommandError
+from oeqa.utils import ftools
+import re
+import contextlib
+
+class Command(object):
+    def __init__(self, command, bg=False, timeout=None, data=None, **options):
+
+        self.defaultopts = {
+            "stdout": subprocess.PIPE,
+            "stderr": subprocess.STDOUT,
+            "stdin": None,
+            "shell": False,
+            "bufsize": -1,
+        }
+
+        self.cmd = command
+        self.bg = bg
+        self.timeout = timeout
+        self.data = data
+
+        self.options = dict(self.defaultopts)
+        if isinstance(self.cmd, basestring):
+            self.options["shell"] = True
+        if self.data:
+            self.options['stdin'] = subprocess.PIPE
+        self.options.update(options)
+
+        self.status = None
+        self.output = None
+        self.error = None
+        self.thread = None
+
+        self.log = logging.getLogger("utils.commands")
+
+    def run(self):
+        self.process = subprocess.Popen(self.cmd, **self.options)
+
+        def commThread():
+            self.output, self.error = self.process.communicate(self.data)
+
+        self.thread = threading.Thread(target=commThread)
+        self.thread.start()
+
+        self.log.debug("Running command '%s'" % self.cmd)
+
+        if not self.bg:
+            self.thread.join(self.timeout)
+            self.stop()
+
+    def stop(self):
+        if self.thread.isAlive():
+            self.process.terminate()
+            # let's give it more time to terminate gracefully before killing it
+            self.thread.join(5)
+            if self.thread.isAlive():
+                self.process.kill()
+                self.thread.join()
+
+        self.output = self.output.rstrip()
+        self.status = self.process.poll()
+
+        self.log.debug("Command '%s' returned %d as exit code." % (self.cmd, self.status))
+        # logging the complete output is insane
+        # bitbake -e output is really big
+        # and makes the log file useless
+        if self.status:
+            lout = "\n".join(self.output.splitlines()[-20:])
+            self.log.debug("Last 20 lines:\n%s" % lout)
+
+
+class Result(object):
+    pass
+
+
+def runCmd(command, ignore_status=False, timeout=None, assert_error=True, **options):
+    result = Result()
+
+    cmd = Command(command, timeout=timeout, **options)
+    cmd.run()
+
+    result.command = command
+    result.status = cmd.status
+    result.output = cmd.output
+    result.pid = cmd.process.pid
+
+    if result.status and not ignore_status:
+        if assert_error:
+            raise AssertionError("Command '%s' returned non-zero exit status %d:\n%s" % (command, result.status, result.output))
+        else:
+            raise CommandError(result.status, command, result.output)
+
+    return result
+
+
+def bitbake(command, ignore_status=False, timeout=None, postconfig=None, **options):
+
+    if postconfig:
+        postconfig_file = os.path.join(os.environ.get('BUILDDIR'), 'oeqa-post.conf')
+        ftools.write_file(postconfig_file, postconfig)
+        extra_args = "-R %s" % postconfig_file
+    else:
+        extra_args = ""
+
+    if isinstance(command, basestring):
+        cmd = "bitbake " + extra_args + " " + command
+    else:
+        cmd = [ "bitbake" ] + [a for a in (command + extra_args.split(" ")) if a not in [""]]
+
+    try:
+        return runCmd(cmd, ignore_status, timeout, **options)
+    finally:
+        if postconfig:
+            os.remove(postconfig_file)
+
+
+def get_bb_env(target=None, postconfig=None):
+    if target:
+        return bitbake("-e %s" % target, postconfig=postconfig).output
+    else:
+        return bitbake("-e", postconfig=postconfig).output
+
+def get_bb_var(var, target=None, postconfig=None):
+    val = None
+    bbenv = get_bb_env(target, postconfig=postconfig)
+    lastline = None
+    for line in bbenv.splitlines():
+        if re.search("^(export )?%s=" % var, line):
+            val = line.split('=', 1)[1]
+            val = val.strip('\"')
+            break
+        elif re.match("unset %s$" % var, line):
+            # Handle [unexport] variables
+            if lastline.startswith('#   "'):
+                val = lastline.split('\"')[1]
+                break
+        lastline = line
+    return val
+
+def get_test_layer():
+    layers = get_bb_var("BBLAYERS").split()
+    testlayer = None
+    for l in layers:
+        if '~' in l:
+            l = os.path.expanduser(l)
+        if "/meta-selftest" in l and os.path.isdir(l):
+            testlayer = l
+            break
+    return testlayer
+
+def create_temp_layer(templayerdir, templayername, priority=999, recipepathspec='recipes-*/*'):
+    os.makedirs(os.path.join(templayerdir, 'conf'))
+    with open(os.path.join(templayerdir, 'conf', 'layer.conf'), 'w') as f:
+        f.write('BBPATH .= ":${LAYERDIR}"\n')
+        f.write('BBFILES += "${LAYERDIR}/%s/*.bb \\' % recipepathspec)
+        f.write('            ${LAYERDIR}/%s/*.bbappend"\n' % recipepathspec)
+        f.write('BBFILE_COLLECTIONS += "%s"\n' % templayername)
+        f.write('BBFILE_PATTERN_%s = "^${LAYERDIR}/"\n' % templayername)
+        f.write('BBFILE_PRIORITY_%s = "%d"\n' % (templayername, priority))
+        f.write('BBFILE_PATTERN_IGNORE_EMPTY_%s = "1"\n' % templayername)
+
+
+@contextlib.contextmanager
+def runqemu(pn, test):
+
+    import bb.tinfoil
+    import bb.build
+
+    tinfoil = bb.tinfoil.Tinfoil()
+    tinfoil.prepare(False)
+    try:
+        tinfoil.logger.setLevel(logging.WARNING)
+        import oeqa.targetcontrol
+        tinfoil.config_data.setVar("TEST_LOG_DIR", "${WORKDIR}/testimage")
+        tinfoil.config_data.setVar("TEST_QEMUBOOT_TIMEOUT", "1000")
+        import oe.recipeutils
+        recipefile = oe.recipeutils.pn_to_recipe(tinfoil.cooker, pn)
+        recipedata = oe.recipeutils.parse_recipe(recipefile, [], tinfoil.config_data)
+
+        # The QemuRunner log is saved out, but we need to ensure it is at the right
+        # log level (and then ensure that since it's a child of the BitBake logger,
+        # we disable propagation so we don't then see the log events on the console)
+        logger = logging.getLogger('BitBake.QemuRunner')
+        logger.setLevel(logging.DEBUG)
+        logger.propagate = False
+        logdir = recipedata.getVar("TEST_LOG_DIR", True)
+
+        qemu = oeqa.targetcontrol.QemuTarget(recipedata)
+    finally:
+        # We need to shut down tinfoil early here in case we actually want
+        # to run tinfoil-using utilities with the running QEMU instance.
+        # Luckily QemuTarget doesn't need it after the constructor.
+        tinfoil.shutdown()
+
+    try:
+        qemu.deploy()
+        try:
+            qemu.start()
+        except bb.build.FuncFailed:
+            raise Exception('Failed to start QEMU - see the logs in %s' % logdir)
+
+        yield qemu
+
+    finally:
+        try:
+            qemu.stop()
+        except:
+            pass
diff --git a/meta/lib/oeqa/utils/decorators.py b/meta/lib/oeqa/utils/decorators.py
new file mode 100644
index 0000000..162a88f
--- /dev/null
+++ b/meta/lib/oeqa/utils/decorators.py
@@ -0,0 +1,222 @@
+# Copyright (C) 2013 Intel Corporation
+#
+# Released under the MIT license (see COPYING.MIT)
+
+# Some custom decorators that can be used by unittests
+# Most useful is skipUnlessPassed which can be used for
+# creating dependecies between two test methods.
+
+import os
+import logging
+import sys
+import unittest
+import threading
+import signal
+from functools import wraps
+
+#get the "result" object from one of the upper frames provided that one of these upper frames is a unittest.case frame
+class getResults(object):
+    def __init__(self):
+        #dynamically determine the unittest.case frame and use it to get the name of the test method
+        ident = threading.current_thread().ident
+        upperf = sys._current_frames()[ident]
+        while (upperf.f_globals['__name__'] != 'unittest.case'):
+            upperf = upperf.f_back
+
+        def handleList(items):
+            ret = []
+            # items is a list of tuples, (test, failure) or (_ErrorHandler(), Exception())
+            for i in items:
+                s = i[0].id()
+                #Handle the _ErrorHolder objects from skipModule failures
+                if "setUpModule (" in s:
+                    ret.append(s.replace("setUpModule (", "").replace(")",""))
+                else:
+                    ret.append(s)
+            return ret
+        self.faillist = handleList(upperf.f_locals['result'].failures)
+        self.errorlist = handleList(upperf.f_locals['result'].errors)
+        self.skiplist = handleList(upperf.f_locals['result'].skipped)
+
+    def getFailList(self):
+        return self.faillist
+
+    def getErrorList(self):
+        return self.errorlist
+
+    def getSkipList(self):
+        return self.skiplist
+
+class skipIfFailure(object):
+
+    def __init__(self,testcase):
+        self.testcase = testcase
+
+    def __call__(self,f):
+        def wrapped_f(*args):
+            res = getResults()
+            if self.testcase in (res.getFailList() or res.getErrorList()):
+                raise unittest.SkipTest("Testcase dependency not met: %s" % self.testcase)
+            return f(*args)
+        wrapped_f.__name__ = f.__name__
+        return wrapped_f
+
+class skipIfSkipped(object):
+
+    def __init__(self,testcase):
+        self.testcase = testcase
+
+    def __call__(self,f):
+        def wrapped_f(*args):
+            res = getResults()
+            if self.testcase in res.getSkipList():
+                raise unittest.SkipTest("Testcase dependency not met: %s" % self.testcase)
+            return f(*args)
+        wrapped_f.__name__ = f.__name__
+        return wrapped_f
+
+class skipUnlessPassed(object):
+
+    def __init__(self,testcase):
+        self.testcase = testcase
+
+    def __call__(self,f):
+        def wrapped_f(*args):
+            res = getResults()
+            if self.testcase in res.getSkipList() or \
+                    self.testcase in res.getFailList() or \
+                    self.testcase in res.getErrorList():
+                raise unittest.SkipTest("Testcase dependency not met: %s" % self.testcase)
+            return f(*args)
+        wrapped_f.__name__ = f.__name__
+        wrapped_f._depends_on = self.testcase
+        return wrapped_f
+
+class testcase(object):
+
+    def __init__(self, test_case):
+        self.test_case = test_case
+
+    def __call__(self, func):
+        def wrapped_f(*args):
+            return func(*args)
+        wrapped_f.test_case = self.test_case
+        wrapped_f.__name__ = func.__name__
+        return wrapped_f
+
+class NoParsingFilter(logging.Filter):
+    def filter(self, record):
+        return record.levelno == 100
+
+def LogResults(original_class):
+    orig_method = original_class.run
+
+    #rewrite the run method of unittest.TestCase to add testcase logging
+    def run(self, result, *args, **kws):
+        orig_method(self, result, *args, **kws)
+        passed = True
+        testMethod = getattr(self, self._testMethodName)
+        #if test case is decorated then use it's number, else use it's name
+        try:
+            test_case = testMethod.test_case
+        except AttributeError:
+            test_case = self._testMethodName
+
+        class_name = str(testMethod.im_class).split("'")[1]
+
+        #create custom logging level for filtering.
+        custom_log_level = 100
+        logging.addLevelName(custom_log_level, 'RESULTS')
+        caller = os.path.basename(sys.argv[0])
+
+        def results(self, message, *args, **kws):
+            if self.isEnabledFor(custom_log_level):
+                self.log(custom_log_level, message, *args, **kws)
+        logging.Logger.results = results
+
+        logging.basicConfig(filename=os.path.join(os.getcwd(),'results-'+caller+'.log'),
+                            filemode='w',
+                            format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+                            datefmt='%H:%M:%S',
+                            level=custom_log_level)
+        for handler in logging.root.handlers:
+            handler.addFilter(NoParsingFilter())
+        local_log = logging.getLogger(caller)
+
+        #check status of tests and record it
+
+        for (name, msg) in result.errors:
+            if (self._testMethodName == str(name).split(' ')[0]) and (class_name in str(name).split(' ')[1]):
+                local_log.results("Testcase "+str(test_case)+": ERROR")
+                local_log.results("Testcase "+str(test_case)+":\n"+msg)
+                passed = False
+        for (name, msg) in result.failures:
+            if (self._testMethodName == str(name).split(' ')[0]) and (class_name in str(name).split(' ')[1]):
+                local_log.results("Testcase "+str(test_case)+": FAILED")
+                local_log.results("Testcase "+str(test_case)+":\n"+msg)
+                passed = False
+        for (name, msg) in result.skipped:
+            if (self._testMethodName == str(name).split(' ')[0]) and (class_name in str(name).split(' ')[1]):
+                local_log.results("Testcase "+str(test_case)+": SKIPPED")
+                passed = False
+        if passed:
+            local_log.results("Testcase "+str(test_case)+": PASSED")
+
+    original_class.run = run
+    return original_class
+
+class TimeOut(BaseException):
+    pass
+
+def timeout(seconds):
+    def decorator(fn):
+        if hasattr(signal, 'alarm'):
+            @wraps(fn)
+            def wrapped_f(*args, **kw):
+                current_frame = sys._getframe()
+                def raiseTimeOut(signal, frame):
+                    if frame is not current_frame:
+                        raise TimeOut('%s seconds' % seconds)
+                prev_handler = signal.signal(signal.SIGALRM, raiseTimeOut)
+                try:
+                    signal.alarm(seconds)
+                    return fn(*args, **kw)
+                finally:
+                    signal.alarm(0)
+                    signal.signal(signal.SIGALRM, prev_handler)
+            return wrapped_f
+        else:
+            return fn
+    return decorator
+
+__tag_prefix = "tag__"
+def tag(*args, **kwargs):
+    """Decorator that adds attributes to classes or functions
+    for use with the Attribute (-a) plugin.
+    """
+    def wrap_ob(ob):
+        for name in args:
+            setattr(ob, __tag_prefix + name, True)
+        for name, value in kwargs.iteritems():
+            setattr(ob, __tag_prefix + name, value)
+        return ob
+    return wrap_ob
+
+def gettag(obj, key, default=None):
+    key = __tag_prefix + key
+    if not isinstance(obj, unittest.TestCase):
+        return getattr(obj, key, default)
+    tc_method = getattr(obj, obj._testMethodName)
+    ret = getattr(tc_method, key, getattr(obj, key, default))
+    return ret
+
+def getAllTags(obj):
+    def __gettags(o):
+        r = {k[len(__tag_prefix):]:getattr(o,k) for k in dir(o) if k.startswith(__tag_prefix)}
+        return r
+    if not isinstance(obj, unittest.TestCase):
+        return __gettags(obj)
+    tc_method = getattr(obj, obj._testMethodName)
+    ret = __gettags(obj)
+    ret.update(__gettags(tc_method))
+    return ret
diff --git a/meta/lib/oeqa/utils/dump.py b/meta/lib/oeqa/utils/dump.py
new file mode 100644
index 0000000..4ae871c
--- /dev/null
+++ b/meta/lib/oeqa/utils/dump.py
@@ -0,0 +1,87 @@
+import os
+import sys
+import errno
+import datetime
+import itertools
+from commands import runCmd
+
+def get_host_dumper(d):
+    cmds = d.getVar("testimage_dump_host", True)
+    parent_dir = d.getVar("TESTIMAGE_DUMP_DIR", True)
+    return HostDumper(cmds, parent_dir)
+
+
+class BaseDumper(object):
+    """ Base class to dump commands from host/target """
+
+    def __init__(self, cmds, parent_dir):
+        self.cmds = []
+        self.parent_dir = parent_dir
+        if not cmds:
+            return
+        for cmd in cmds.split('\n'):
+            cmd = cmd.lstrip()
+            if not cmd or cmd[0] == '#':
+                continue
+            self.cmds.append(cmd)
+
+    def create_dir(self, dir_suffix):
+        dump_subdir = ("%s_%s" % (
+                datetime.datetime.now().strftime('%Y%m%d%H%M'),
+                dir_suffix))
+        dump_dir = os.path.join(self.parent_dir, dump_subdir)
+        try:
+            os.makedirs(dump_dir)
+        except OSError as err:
+            if err.errno != errno.EEXIST:
+                raise err
+        self.dump_dir = dump_dir
+
+    def _write_dump(self, command, output):
+        if isinstance(self, HostDumper):
+            prefix = "host"
+        elif isinstance(self, TargetDumper):
+            prefix = "target"
+        else:
+            prefix = "unknown"
+        for i in itertools.count():
+            filename = "%s_%02d_%s" % (prefix, i, command)
+            fullname = os.path.join(self.dump_dir, filename)
+            if not os.path.exists(fullname):
+                break
+        with open(fullname, 'w') as dump_file:
+            dump_file.write(output)
+
+
+class HostDumper(BaseDumper):
+    """ Class to get dumps from the host running the tests """
+
+    def __init__(self, cmds, parent_dir):
+        super(HostDumper, self).__init__(cmds, parent_dir)
+
+    def dump_host(self, dump_dir=""):
+        if dump_dir:
+            self.dump_dir = dump_dir
+        for cmd in self.cmds:
+            result = runCmd(cmd, ignore_status=True)
+            self._write_dump(cmd.split()[0], result.output)
+
+
+class TargetDumper(BaseDumper):
+    """ Class to get dumps from target, it only works with QemuRunner """
+
+    def __init__(self, cmds, parent_dir, qemurunner):
+        super(TargetDumper, self).__init__(cmds, parent_dir)
+        self.runner = qemurunner
+
+    def dump_target(self, dump_dir=""):
+        if dump_dir:
+            self.dump_dir = dump_dir
+        for cmd in self.cmds:
+            # We can continue with the testing if serial commands fail
+            try:
+                (status, output) = self.runner.run_serial(cmd)
+                self._write_dump(cmd.split()[0], output)
+            except:
+                print("Tried to dump info from target but "
+                        "serial console failed")
diff --git a/meta/lib/oeqa/utils/ftools.py b/meta/lib/oeqa/utils/ftools.py
new file mode 100644
index 0000000..64ebe3d
--- /dev/null
+++ b/meta/lib/oeqa/utils/ftools.py
@@ -0,0 +1,27 @@
+import os
+import re
+
+def write_file(path, data):
+    wdata = data.rstrip() + "\n"
+    with open(path, "w") as f:
+        f.write(wdata)
+
+def append_file(path, data):
+    wdata = data.rstrip() + "\n"
+    with open(path, "a") as f:
+            f.write(wdata)
+
+def read_file(path):
+    data = None
+    with open(path) as f:
+        data = f.read()
+    return data
+
+def remove_from_file(path, data):
+    lines = read_file(path).splitlines()
+    rmdata = data.strip().splitlines()
+    for l in rmdata:
+        for c in range(0, lines.count(l)):
+            i = lines.index(l)
+            del(lines[i])
+    write_file(path, "\n".join(lines))
diff --git a/meta/lib/oeqa/utils/httpserver.py b/meta/lib/oeqa/utils/httpserver.py
new file mode 100644
index 0000000..76518d8
--- /dev/null
+++ b/meta/lib/oeqa/utils/httpserver.py
@@ -0,0 +1,35 @@
+import SimpleHTTPServer
+import multiprocessing
+import os
+
+class HTTPServer(SimpleHTTPServer.BaseHTTPServer.HTTPServer):
+
+    def server_start(self, root_dir):
+        import signal
+        signal.signal(signal.SIGTERM, signal.SIG_DFL)
+        os.chdir(root_dir)
+        self.serve_forever()
+
+class HTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
+
+    def log_message(self, format_str, *args):
+        pass
+
+class HTTPService(object):
+
+    def __init__(self, root_dir, host=''):
+        self.root_dir = root_dir
+        self.host = host
+        self.port = 0
+
+    def start(self):
+        self.server = HTTPServer((self.host, self.port), HTTPRequestHandler)
+        if self.port == 0:
+            self.port = self.server.server_port
+        self.process = multiprocessing.Process(target=self.server.server_start, args=[self.root_dir])
+        self.process.start()
+
+    def stop(self):
+        self.server.server_close()
+        self.process.terminate()
+        self.process.join()
diff --git a/meta/lib/oeqa/utils/logparser.py b/meta/lib/oeqa/utils/logparser.py
new file mode 100644
index 0000000..87b5035
--- /dev/null
+++ b/meta/lib/oeqa/utils/logparser.py
@@ -0,0 +1,125 @@
+#!/usr/bin/env python
+
+import sys
+import os
+import re
+import ftools
+
+
+# A parser that can be used to identify weather a line is a test result or a section statement.
+class Lparser(object):
+
+    def __init__(self, test_0_pass_regex, test_0_fail_regex, section_0_begin_regex=None, section_0_end_regex=None, **kwargs):
+        # Initialize the arguments dictionary
+        if kwargs:
+            self.args = kwargs
+        else:
+            self.args = {}
+
+        # Add the default args to the dictionary
+        self.args['test_0_pass_regex'] = test_0_pass_regex
+        self.args['test_0_fail_regex'] = test_0_fail_regex
+        if section_0_begin_regex:
+            self.args['section_0_begin_regex'] = section_0_begin_regex
+        if section_0_end_regex:
+            self.args['section_0_end_regex'] = section_0_end_regex
+
+        self.test_possible_status = ['pass', 'fail', 'error']
+        self.section_possible_status = ['begin', 'end']
+
+        self.initialized = False
+
+
+    # Initialize the parser with the current configuration
+    def init(self):
+
+        # extra arguments can be added by the user to define new test and section categories. They must follow a pre-defined pattern: <type>_<category_name>_<status>_regex
+        self.test_argument_pattern = "^test_(.+?)_(%s)_regex" % '|'.join(map(str, self.test_possible_status))
+        self.section_argument_pattern = "^section_(.+?)_(%s)_regex" % '|'.join(map(str, self.section_possible_status))
+
+        # Initialize the test and section regex dictionaries
+        self.test_regex = {}
+        self.section_regex ={}
+
+        for arg, value in self.args.items():
+            if not value:
+                raise Exception('The value of provided argument %s is %s. Should have a valid value.' % (key, value))
+            is_test =  re.search(self.test_argument_pattern, arg)
+            is_section = re.search(self.section_argument_pattern, arg)
+            if is_test:
+                if not is_test.group(1) in self.test_regex:
+                    self.test_regex[is_test.group(1)] = {}
+                self.test_regex[is_test.group(1)][is_test.group(2)] = re.compile(value)
+            elif is_section:
+                if not is_section.group(1) in self.section_regex:
+                    self.section_regex[is_section.group(1)] = {}
+                self.section_regex[is_section.group(1)][is_section.group(2)] = re.compile(value)
+            else:
+                # TODO: Make these call a traceback instead of a simple exception..
+                raise Exception("The provided argument name does not correspond to any valid type. Please give one of the following types:\nfor tests: %s\nfor sections: %s" % (self.test_argument_pattern, self.section_argument_pattern))
+
+        self.initialized = True
+
+    # Parse a line and return a tuple containing the type of result (test/section) and its category, status and name
+    def parse_line(self, line):
+        if not self.initialized:
+            raise Exception("The parser is not initialized..")
+
+        for test_category, test_status_list in self.test_regex.items():
+            for test_status, status_regex in test_status_list.items():
+                test_name = status_regex.search(line)
+                if test_name:
+                    return ['test', test_category, test_status, test_name.group(1)]
+
+        for section_category, section_status_list in self.section_regex.items():
+            for section_status, status_regex in section_status_list.items():
+                section_name = status_regex.search(line)
+                if section_name:
+                    return ['section', section_category, section_status, section_name.group(1)]
+        return None
+
+
+class Result(object):
+
+    def __init__(self):
+        self.result_dict = {}
+
+    def store(self, section, test, status):
+        if not section in self.result_dict:
+            self.result_dict[section] = []
+
+        self.result_dict[section].append((test, status))
+
+    # sort tests by the test name(the first element of the tuple), for each section. This can be helpful when using git to diff for changes by making sure they are always in the same order.
+    def sort_tests(self):
+        for package in self.result_dict:
+            sorted_results = sorted(self.result_dict[package], key=lambda tup: tup[0])
+            self.result_dict[package] = sorted_results
+
+    # Log the results as files. The file name is the section name and the contents are the tests in that section.
+    def log_as_files(self, target_dir, test_status):
+        status_regex = re.compile('|'.join(map(str, test_status)))
+        if not type(test_status) == type([]):
+            raise Exception("test_status should be a list. Got " + str(test_status) + " instead.")
+        if not os.path.exists(target_dir):
+            raise Exception("Target directory does not exist: %s" % target_dir)
+
+        for section, test_results in self.result_dict.items():
+            prefix = ''
+            for x in test_status:
+                prefix +=x+'.'
+            if (section != ''):
+                prefix += section
+            section_file = os.path.join(target_dir, prefix)
+            # purge the file contents if it exists
+            open(section_file, 'w').close()
+            for test_result in test_results:
+                (test_name, status) = test_result
+                # we log only the tests with status in the test_status list
+                match_status = status_regex.search(status)
+                if match_status:
+                    ftools.append_file(section_file, status + ": " + test_name)
+
+    # Not yet implemented!
+    def log_to_lava(self):
+        pass
diff --git a/meta/lib/oeqa/utils/qemurunner.py b/meta/lib/oeqa/utils/qemurunner.py
new file mode 100644
index 0000000..d32c9db
--- /dev/null
+++ b/meta/lib/oeqa/utils/qemurunner.py
@@ -0,0 +1,519 @@
+# Copyright (C) 2013 Intel Corporation
+#
+# Released under the MIT license (see COPYING.MIT)
+
+# This module provides a class for starting qemu images using runqemu.
+# It's used by testimage.bbclass.
+
+import subprocess
+import os
+import time
+import signal
+import re
+import socket
+import select
+import errno
+import threading
+from oeqa.utils.dump import HostDumper
+
+import logging
+logger = logging.getLogger("BitBake.QemuRunner")
+
+class QemuRunner:
+
+    def __init__(self, machine, rootfs, display, tmpdir, deploy_dir_image, logfile, boottime, dump_dir, dump_host_cmds):
+
+        # Popen object for runqemu
+        self.runqemu = None
+        # pid of the qemu process that runqemu will start
+        self.qemupid = None
+        # target ip - from the command line
+        self.ip = None
+        # host ip - where qemu is running
+        self.server_ip = None
+
+        self.machine = machine
+        self.rootfs = rootfs
+        self.display = display
+        self.tmpdir = tmpdir
+        self.deploy_dir_image = deploy_dir_image
+        self.logfile = logfile
+        self.boottime = boottime
+        self.logged = False
+        self.thread = None
+
+        self.runqemutime = 60
+        self.host_dumper = HostDumper(dump_host_cmds, dump_dir)
+
+    def create_socket(self):
+        try:
+            sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+            sock.setblocking(0)
+            sock.bind(("127.0.0.1",0))
+            sock.listen(2)
+            port = sock.getsockname()[1]
+            logger.info("Created listening socket for qemu serial console on: 127.0.0.1:%s" % port)
+            return (sock, port)
+
+        except socket.error:
+            sock.close()
+            raise
+
+    def log(self, msg):
+        if self.logfile:
+            with open(self.logfile, "a") as f:
+                f.write("%s" % msg)
+
+    def getOutput(self, o):
+        import fcntl
+        fl = fcntl.fcntl(o, fcntl.F_GETFL)
+        fcntl.fcntl(o, fcntl.F_SETFL, fl | os.O_NONBLOCK)
+        return os.read(o.fileno(), 1000000)
+
+
+    def handleSIGCHLD(self, signum, frame):
+        if self.runqemu and self.runqemu.poll():
+            if self.runqemu.returncode:
+                logger.info('runqemu exited with code %d' % self.runqemu.returncode)
+                logger.info("Output from runqemu:\n%s" % self.getOutput(self.runqemu.stdout))
+                self.stop()
+                self._dump_host()
+                raise SystemExit
+
+    def start(self, qemuparams = None):
+        if self.display:
+            os.environ["DISPLAY"] = self.display
+        else:
+            logger.error("To start qemu I need a X desktop, please set DISPLAY correctly (e.g. DISPLAY=:1)")
+            return False
+        if not os.path.exists(self.rootfs):
+            logger.error("Invalid rootfs %s" % self.rootfs)
+            return False
+        if not os.path.exists(self.tmpdir):
+            logger.error("Invalid TMPDIR path %s" % self.tmpdir)
+            return False
+        else:
+            os.environ["OE_TMPDIR"] = self.tmpdir
+        if not os.path.exists(self.deploy_dir_image):
+            logger.error("Invalid DEPLOY_DIR_IMAGE path %s" % self.deploy_dir_image)
+            return False
+        else:
+            os.environ["DEPLOY_DIR_IMAGE"] = self.deploy_dir_image
+
+        try:
+            threadsock, threadport = self.create_socket()
+            self.server_socket, self.serverport = self.create_socket()
+        except socket.error, msg:
+            logger.error("Failed to create listening socket: %s" % msg[1])
+            return False
+
+        # Set this flag so that Qemu doesn't do any grabs as SDL grabs interact
+        # badly with screensavers.
+        os.environ["QEMU_DONT_GRAB"] = "1"
+        self.qemuparams = 'bootparams="console=tty1 console=ttyS0,115200n8" qemuparams="-serial tcp:127.0.0.1:{}"'.format(threadport)
+        if qemuparams:
+            self.qemuparams = self.qemuparams[:-1] + " " + qemuparams + " " + '\"'
+
+        self.origchldhandler = signal.getsignal(signal.SIGCHLD)
+        signal.signal(signal.SIGCHLD, self.handleSIGCHLD)
+
+        launch_cmd = 'runqemu tcpserial=%s %s %s %s' % (self.serverport, self.machine, self.rootfs, self.qemuparams)
+        # FIXME: We pass in stdin=subprocess.PIPE here to work around stty
+        # blocking at the end of the runqemu script when using this within
+        # oe-selftest (this makes stty error out immediately). There ought
+        # to be a proper fix but this will suffice for now.
+        self.runqemu = subprocess.Popen(launch_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, stdin=subprocess.PIPE, preexec_fn=os.setpgrp)
+        output = self.runqemu.stdout
+
+        #
+        # We need the preexec_fn above so that all runqemu processes can easily be killed 
+        # (by killing their process group). This presents a problem if this controlling
+        # process itself is killed however since those processes don't notice the death 
+        # of the parent and merrily continue on.
+        #
+        # Rather than hack runqemu to deal with this, we add something here instead. 
+        # Basically we fork off another process which holds an open pipe to the parent
+        # and also is setpgrp. If/when the pipe sees EOF from the parent dieing, it kills
+        # the process group. This is like pctrl's PDEATHSIG but for a process group
+        # rather than a single process.
+        #
+        r, w = os.pipe()
+        self.monitorpid = os.fork()
+        if self.monitorpid:
+            os.close(r)
+            self.monitorpipe = os.fdopen(w, "w")
+        else:
+            # child process
+            os.setpgrp()
+            os.close(w)
+            r = os.fdopen(r)
+            x = r.read()
+            os.killpg(os.getpgid(self.runqemu.pid), signal.SIGTERM)
+            sys.exit(0)
+
+        logger.info("runqemu started, pid is %s" % self.runqemu.pid)
+        logger.info("waiting at most %s seconds for qemu pid" % self.runqemutime)
+        endtime = time.time() + self.runqemutime
+        while not self.is_alive() and time.time() < endtime:
+            if self.runqemu.poll():
+                if self.runqemu.returncode:
+                    # No point waiting any longer
+                    logger.info('runqemu exited with code %d' % self.runqemu.returncode)
+                    self._dump_host()
+                    self.stop()
+                    logger.info("Output from runqemu:\n%s" % self.getOutput(output))
+                    return False
+            time.sleep(1)
+
+        if self.is_alive():
+            logger.info("qemu started - qemu procces pid is %s" % self.qemupid)
+            cmdline = ''
+            with open('/proc/%s/cmdline' % self.qemupid) as p:
+                cmdline = p.read()
+            try:
+                ips = re.findall("((?:[0-9]{1,3}\.){3}[0-9]{1,3})", cmdline.split("ip=")[1])
+                if not ips or len(ips) != 3:
+                    raise ValueError
+                else:
+                    self.ip = ips[0]
+                    self.server_ip = ips[1]
+            except IndexError, ValueError:
+                logger.info("Couldn't get ip from qemu process arguments! Here is the qemu command line used:\n%s\nand output from runqemu:\n%s" % (cmdline, self.getOutput(output)))
+                self._dump_host()
+                self.stop()
+                return False
+            logger.info("qemu cmdline used:\n{}".format(cmdline))
+            logger.info("Target IP: %s" % self.ip)
+            logger.info("Server IP: %s" % self.server_ip)
+
+            logger.info("Starting logging thread")
+            self.thread = LoggingThread(self.log, threadsock, logger)
+            self.thread.start()
+            if not self.thread.connection_established.wait(self.boottime):
+                logger.error("Didn't receive a console connection from qemu. "
+                             "Here is the qemu command line used:\n%s\nand "
+                             "output from runqemu:\n%s" % (cmdline,
+                                                           self.getOutput(output)))
+                self.stop_thread()
+                return False
+
+            logger.info("Waiting at most %d seconds for login banner" % self.boottime)
+            endtime = time.time() + self.boottime
+            socklist = [self.server_socket]
+            reachedlogin = False
+            stopread = False
+            qemusock = None
+            bootlog = ''
+            while time.time() < endtime and not stopread:
+                sread, swrite, serror = select.select(socklist, [], [], 5)
+                for sock in sread:
+                    if sock is self.server_socket:
+                        qemusock, addr = self.server_socket.accept()
+                        qemusock.setblocking(0)
+                        socklist.append(qemusock)
+                        socklist.remove(self.server_socket)
+                        logger.info("Connection from %s:%s" % addr)
+                    else:
+                        data = sock.recv(1024)
+                        if data:
+                            bootlog += data
+                            if re.search(".* login:", bootlog):
+                                self.server_socket = qemusock
+                                stopread = True
+                                reachedlogin = True
+                                logger.info("Reached login banner")
+                        else:
+                            socklist.remove(sock)
+                            sock.close()
+                            stopread = True
+
+            if not reachedlogin:
+                logger.info("Target didn't reached login boot in %d seconds" % self.boottime)
+                lines = "\n".join(bootlog.splitlines()[-25:])
+                logger.info("Last 25 lines of text:\n%s" % lines)
+                logger.info("Check full boot log: %s" % self.logfile)
+                self._dump_host()
+                self.stop()
+                return False
+
+            # If we are not able to login the tests can continue
+            try:
+                (status, output) = self.run_serial("root\n", raw=True)
+                if re.search("root@[a-zA-Z0-9\-]+:~#", output):
+                    self.logged = True
+                    logger.info("Logged as root in serial console")
+                else:
+                    logger.info("Couldn't login into serial console"
+                            " as root using blank password")
+            except:
+                logger.info("Serial console failed while trying to login")
+
+        else:
+            logger.info("Qemu pid didn't appeared in %s seconds" % self.runqemutime)
+            self._dump_host()
+            self.stop()
+            logger.info("Output from runqemu:\n%s" % self.getOutput(output))
+            return False
+
+        return self.is_alive()
+
+    def stop(self):
+        self.stop_thread()
+        if self.runqemu:
+            signal.signal(signal.SIGCHLD, self.origchldhandler)
+            os.kill(self.monitorpid, signal.SIGKILL)
+            logger.info("Sending SIGTERM to runqemu")
+            try:
+                os.killpg(os.getpgid(self.runqemu.pid), signal.SIGTERM)
+            except OSError as e:
+                if e.errno != errno.ESRCH:
+                    raise
+            endtime = time.time() + self.runqemutime
+            while self.runqemu.poll() is None and time.time() < endtime:
+                time.sleep(1)
+            if self.runqemu.poll() is None:
+                logger.info("Sending SIGKILL to runqemu")
+                os.killpg(os.getpgid(self.runqemu.pid), signal.SIGKILL)
+            self.runqemu = None
+        if hasattr(self, 'server_socket') and self.server_socket:
+            self.server_socket.close()
+            self.server_socket = None
+        self.qemupid = None
+        self.ip = None
+        signal.signal(signal.SIGCHLD, self.origchldhandler)
+
+    def stop_thread(self):
+        if self.thread and self.thread.is_alive():
+            self.thread.stop()
+            self.thread.join()
+
+    def restart(self, qemuparams = None):
+        logger.info("Restarting qemu process")
+        if self.runqemu.poll() is None:
+            self.stop()
+        if self.start(qemuparams):
+            return True
+        return False
+
+    def is_alive(self):
+        if not self.runqemu:
+            return False
+        qemu_child = self.find_child(str(self.runqemu.pid))
+        if qemu_child:
+            self.qemupid = qemu_child[0]
+            if os.path.exists("/proc/" + str(self.qemupid)):
+                return True
+        return False
+
+    def find_child(self,parent_pid):
+        #
+        # Walk the process tree from the process specified looking for a qemu-system. Return its [pid'cmd]
+        #
+        ps = subprocess.Popen(['ps', 'axww', '-o', 'pid,ppid,command'], stdout=subprocess.PIPE).communicate()[0]
+        processes = ps.split('\n')
+        nfields = len(processes[0].split()) - 1
+        pids = {}
+        commands = {}
+        for row in processes[1:]:
+            data = row.split(None, nfields)
+            if len(data) != 3:
+                continue
+            if data[1] not in pids:
+                pids[data[1]] = []
+
+            pids[data[1]].append(data[0])
+            commands[data[0]] = data[2]
+
+        if parent_pid not in pids:
+            return []
+
+        parents = []
+        newparents = pids[parent_pid]
+        while newparents:
+            next = []
+            for p in newparents:
+                if p in pids:
+                    for n in pids[p]:
+                        if n not in parents and n not in next:
+                            next.append(n)
+                if p not in parents:
+                    parents.append(p)
+                    newparents = next
+        #print "Children matching %s:" % str(parents)
+        for p in parents:
+            # Need to be careful here since runqemu-internal runs "ldd qemu-system-xxxx"
+            # Also, old versions of ldd (2.11) run "LD_XXXX qemu-system-xxxx"
+            basecmd = commands[p].split()[0]
+            basecmd = os.path.basename(basecmd)
+            if "qemu-system" in basecmd and "-serial tcp" in commands[p]:
+                return [int(p),commands[p]]
+
+    def run_serial(self, command, raw=False):
+        # We assume target system have echo to get command status
+        if not raw:
+            command = "%s; echo $?\n" % command
+        self.server_socket.sendall(command)
+        data = ''
+        status = 0
+        stopread = False
+        endtime = time.time()+5
+        while time.time()<endtime and not stopread:
+            sread, _, _ = select.select([self.server_socket],[],[],5)
+            for sock in sread:
+                answer = sock.recv(1024)
+                if answer:
+                    data += answer
+                    # Search the prompt to stop
+                    if re.search("[a-zA-Z0-9]+@[a-zA-Z0-9\-]+:~#", data):
+                        stopread = True
+                        break
+                else:
+                    raise Exception("No data on serial console socket")
+        if data:
+            if raw:
+                status = 1
+            else:
+                # Remove first line (command line) and last line (prompt)
+                data = data[data.find('$?\r\n')+4:data.rfind('\r\n')]
+                index = data.rfind('\r\n')
+                if index == -1:
+                    status_cmd = data
+                    data = ""
+                else:
+                    status_cmd = data[index+2:]
+                    data = data[:index]
+                if (status_cmd == "0"):
+                    status = 1
+        return (status, str(data))
+
+
+    def _dump_host(self):
+        self.host_dumper.create_dir("qemu")
+        logger.warn("Qemu ended unexpectedly, dump data from host"
+                " is in %s" % self.host_dumper.dump_dir)
+        self.host_dumper.dump_host()
+
+# This class is for reading data from a socket and passing it to logfunc
+# to be processed. It's completely event driven and has a straightforward
+# event loop. The mechanism for stopping the thread is a simple pipe which
+# will wake up the poll and allow for tearing everything down.
+class LoggingThread(threading.Thread):
+    def __init__(self, logfunc, sock, logger):
+        self.connection_established = threading.Event()
+        self.serversock = sock
+        self.logfunc = logfunc
+        self.logger = logger
+        self.readsock = None
+        self.running = False
+
+        self.errorevents = select.POLLERR | select.POLLHUP | select.POLLNVAL
+        self.readevents = select.POLLIN | select.POLLPRI
+
+        threading.Thread.__init__(self, target=self.threadtarget)
+
+    def threadtarget(self):
+        try:
+            self.eventloop()
+        finally:
+            self.teardown()
+
+    def run(self):
+        self.logger.info("Starting logging thread")
+        self.readpipe, self.writepipe = os.pipe()
+        threading.Thread.run(self)
+
+    def stop(self):
+        self.logger.info("Stopping logging thread")
+        if self.running:
+            os.write(self.writepipe, "stop")
+
+    def teardown(self):
+        self.logger.info("Tearing down logging thread")
+        self.close_socket(self.serversock)
+
+        if self.readsock is not None:
+            self.close_socket(self.readsock)
+
+        self.close_ignore_error(self.readpipe)
+        self.close_ignore_error(self.writepipe)
+        self.running = False
+
+    def eventloop(self):
+        poll = select.poll()
+        eventmask = self.errorevents | self.readevents
+        poll.register(self.serversock.fileno())
+        poll.register(self.readpipe, eventmask)
+
+        breakout = False
+        self.running = True
+        self.logger.info("Starting thread event loop")
+        while not breakout:
+            events = poll.poll()
+            for event in events:
+                # An error occurred, bail out
+                if event[1] & self.errorevents:
+                    raise Exception(self.stringify_event(event[1]))
+
+                # Event to stop the thread
+                if self.readpipe == event[0]:
+                    self.logger.info("Stop event received")
+                    breakout = True
+                    break
+
+                # A connection request was received
+                elif self.serversock.fileno() == event[0]:
+                    self.logger.info("Connection request received")
+                    self.readsock, _ = self.serversock.accept()
+                    self.readsock.setblocking(0)
+                    poll.unregister(self.serversock.fileno())
+                    poll.register(self.readsock.fileno())
+
+                    self.logger.info("Setting connection established event")
+                    self.connection_established.set()
+
+                # Actual data to be logged
+                elif self.readsock.fileno() == event[0]:
+                    data = self.recv(1024)
+                    self.logfunc(data)
+
+    # Since the socket is non-blocking make sure to honor EAGAIN
+    # and EWOULDBLOCK.
+    def recv(self, count):
+        try:
+            data = self.readsock.recv(count)
+        except socket.error as e:
+            if e.errno == errno.EAGAIN or e.errno == errno.EWOULDBLOCK:
+                return ''
+            else:
+                raise
+
+        if data is None:
+            raise Exception("No data on read ready socket")
+        elif not data:
+            # This actually means an orderly shutdown
+            # happened. But for this code it counts as an
+            # error since the connection shouldn't go away
+            # until qemu exits.
+            raise Exception("Console connection closed unexpectedly")
+
+        return data
+
+    def stringify_event(self, event):
+        val = ''
+        if select.POLLERR == event:
+            val = 'POLLER'
+        elif select.POLLHUP == event:
+            val = 'POLLHUP'
+        elif select.POLLNVAL == event:
+            val = 'POLLNVAL'
+        return val
+
+    def close_socket(self, sock):
+        sock.shutdown(socket.SHUT_RDWR)
+        sock.close()
+
+    def close_ignore_error(self, fd):
+        try:
+            os.close(fd)
+        except OSError:
+            pass
diff --git a/meta/lib/oeqa/utils/qemutinyrunner.py b/meta/lib/oeqa/utils/qemutinyrunner.py
new file mode 100644
index 0000000..4f95101
--- /dev/null
+++ b/meta/lib/oeqa/utils/qemutinyrunner.py
@@ -0,0 +1,170 @@
+# Copyright (C) 2015 Intel Corporation
+#
+# Released under the MIT license (see COPYING.MIT)
+
+# This module provides a class for starting qemu images of poky tiny.
+# It's used by testimage.bbclass.
+
+import subprocess
+import os
+import time
+import signal
+import re
+import socket
+import select
+import bb
+from qemurunner import QemuRunner
+
+class QemuTinyRunner(QemuRunner):
+
+    def __init__(self, machine, rootfs, display, tmpdir, deploy_dir_image, logfile, kernel, boottime):
+
+        # Popen object for runqemu
+        self.runqemu = None
+        # pid of the qemu process that runqemu will start
+        self.qemupid = None
+        # target ip - from the command line
+        self.ip = None
+        # host ip - where qemu is running
+        self.server_ip = None
+
+        self.machine = machine
+        self.rootfs = rootfs
+        self.display = display
+        self.tmpdir = tmpdir
+        self.deploy_dir_image = deploy_dir_image
+        self.logfile = logfile
+        self.boottime = boottime
+
+        self.runqemutime = 60
+        self.socketfile = "console.sock"
+        self.server_socket = None
+        self.kernel = kernel
+
+
+    def create_socket(self):
+        tries = 3
+        while tries > 0:
+            try:
+                self.server_socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+                self.server_socket.connect(self.socketfile)
+                bb.note("Created listening socket for qemu serial console.")
+                tries = 0
+            except socket.error, msg:
+                self.server_socket.close()
+                bb.fatal("Failed to create listening socket.")
+                tries -= 1
+
+    def log(self, msg):
+        if self.logfile:
+            with open(self.logfile, "a") as f:
+                f.write("%s" % msg)
+
+    def start(self, qemuparams = None):
+
+        if self.display:
+            os.environ["DISPLAY"] = self.display
+        else:
+            bb.error("To start qemu I need a X desktop, please set DISPLAY correctly (e.g. DISPLAY=:1)")
+            return False
+        if not os.path.exists(self.rootfs):
+            bb.error("Invalid rootfs %s" % self.rootfs)
+            return False
+        if not os.path.exists(self.tmpdir):
+            bb.error("Invalid TMPDIR path %s" % self.tmpdir)
+            return False
+        else:
+            os.environ["OE_TMPDIR"] = self.tmpdir
+        if not os.path.exists(self.deploy_dir_image):
+            bb.error("Invalid DEPLOY_DIR_IMAGE path %s" % self.deploy_dir_image)
+            return False
+        else:
+            os.environ["DEPLOY_DIR_IMAGE"] = self.deploy_dir_image
+
+        # Set this flag so that Qemu doesn't do any grabs as SDL grabs interact
+        # badly with screensavers.
+        os.environ["QEMU_DONT_GRAB"] = "1"
+        self.qemuparams = '--append "root=/dev/ram0 console=ttyS0" -nographic -serial unix:%s,server,nowait' % self.socketfile
+
+        launch_cmd = 'qemu-system-i386 -kernel %s -initrd %s %s' % (self.kernel, self.rootfs, self.qemuparams)
+        self.runqemu = subprocess.Popen(launch_cmd,shell=True,stdout=subprocess.PIPE,stderr=subprocess.STDOUT,preexec_fn=os.setpgrp)
+
+        bb.note("runqemu started, pid is %s" % self.runqemu.pid)
+        bb.note("waiting at most %s seconds for qemu pid" % self.runqemutime)
+        endtime = time.time() + self.runqemutime
+        while not self.is_alive() and time.time() < endtime:
+            time.sleep(1)
+
+        if self.is_alive():
+            bb.note("qemu started - qemu procces pid is %s" % self.qemupid)
+            self.create_socket()
+        else:
+            bb.note("Qemu pid didn't appeared in %s seconds" % self.runqemutime)
+            output = self.runqemu.stdout
+            self.stop()
+            bb.note("Output from runqemu:\n%s" % output.read())
+            return False
+
+        return self.is_alive()
+
+    def run_serial(self, command):
+        self.server_socket.sendall(command+'\n')
+        data = ''
+        status = 0
+        stopread = False
+        endtime = time.time()+5
+        while time.time()<endtime and not stopread:
+                sread, _, _ = select.select([self.server_socket],[],[],5)
+                for sock in sread:
+                        answer = sock.recv(1024)
+                        if answer:
+                                data += answer
+                        else:
+                                sock.close()
+                                stopread = True
+        if not data:
+            status = 1
+        return (status, str(data))
+
+    def find_child(self,parent_pid):
+        #
+        # Walk the process tree from the process specified looking for a qemu-system. Return its [pid'cmd]
+        #
+        ps = subprocess.Popen(['ps', 'axww', '-o', 'pid,ppid,command'], stdout=subprocess.PIPE).communicate()[0]
+        processes = ps.split('\n')
+        nfields = len(processes[0].split()) - 1
+        pids = {}
+        commands = {}
+        for row in processes[1:]:
+            data = row.split(None, nfields)
+            if len(data) != 3:
+                continue
+            if data[1] not in pids:
+                pids[data[1]] = []
+
+            pids[data[1]].append(data[0])
+            commands[data[0]] = data[2]
+
+        if parent_pid not in pids:
+            return []
+
+        parents = []
+        newparents = pids[parent_pid]
+        while newparents:
+            next = []
+            for p in newparents:
+                if p in pids:
+                    for n in pids[p]:
+                        if n not in parents and n not in next:
+                            next.append(n)
+                if p not in parents:
+                    parents.append(p)
+                    newparents = next
+        #print "Children matching %s:" % str(parents)
+        for p in parents:
+            # Need to be careful here since runqemu-internal runs "ldd qemu-system-xxxx"
+            # Also, old versions of ldd (2.11) run "LD_XXXX qemu-system-xxxx"
+            basecmd = commands[p].split()[0]
+            basecmd = os.path.basename(basecmd)
+            if "qemu-system" in basecmd and "-serial unix" in commands[p]:
+                return [int(p),commands[p]]
\ No newline at end of file
diff --git a/meta/lib/oeqa/utils/sshcontrol.py b/meta/lib/oeqa/utils/sshcontrol.py
new file mode 100644
index 0000000..00f5051
--- /dev/null
+++ b/meta/lib/oeqa/utils/sshcontrol.py
@@ -0,0 +1,154 @@
+# Copyright (C) 2013 Intel Corporation
+#
+# Released under the MIT license (see COPYING.MIT)
+
+# Provides a class for setting up ssh connections,
+# running commands and copying files to/from a target.
+# It's used by testimage.bbclass and tests in lib/oeqa/runtime.
+
+import subprocess
+import time
+import os
+import select
+
+
+class SSHProcess(object):
+    def __init__(self, **options):
+
+        self.defaultopts = {
+            "stdout": subprocess.PIPE,
+            "stderr": subprocess.STDOUT,
+            "stdin": None,
+            "shell": False,
+            "bufsize": -1,
+            "preexec_fn": os.setsid,
+        }
+        self.options = dict(self.defaultopts)
+        self.options.update(options)
+        self.status = None
+        self.output = None
+        self.process = None
+        self.starttime = None
+        self.logfile = None
+
+        # Unset DISPLAY which means we won't trigger SSH_ASKPASS
+        env = os.environ.copy()
+        if "DISPLAY" in env:
+            del env['DISPLAY']
+        self.options['env'] = env
+
+    def log(self, msg):
+        if self.logfile:
+            with open(self.logfile, "a") as f:
+               f.write("%s" % msg)
+
+    def _run(self, command, timeout=None, logfile=None):
+        self.logfile = logfile
+        self.starttime = time.time()
+        output = ''
+        self.process = subprocess.Popen(command, **self.options)
+        if timeout:
+            endtime = self.starttime + timeout
+            eof = False
+            while time.time() < endtime and not eof:
+                if select.select([self.process.stdout], [], [], 5)[0] != []:
+                    data = os.read(self.process.stdout.fileno(), 1024)
+                    if not data:
+                        self.process.stdout.close()
+                        eof = True
+                    else:
+                        output += data
+                        self.log(data)
+                        endtime = time.time() + timeout
+
+
+            # process hasn't returned yet
+            if not eof:
+                self.process.terminate()
+                time.sleep(5)
+                try:
+                    self.process.kill()
+                except OSError:
+                    pass
+                lastline = "\nProcess killed - no output for %d seconds. Total running time: %d seconds." % (timeout, time.time() - self.starttime)
+                self.log(lastline)
+                output += lastline
+        else:
+            output = self.process.communicate()[0]
+            self.log(output.rstrip())
+
+        self.status = self.process.wait()
+        self.output = output.rstrip()
+
+    def run(self, command, timeout=None, logfile=None):
+        try:
+            self._run(command, timeout, logfile)
+        except:
+            # Need to guard against a SystemExit or other exception occuring whilst running
+            # and ensure we don't leave a process behind.
+            if self.process.poll() is None:
+                self.process.kill()
+                self.status = self.process.wait()
+            raise
+        return (self.status, self.output)
+
+class SSHControl(object):
+    def __init__(self, ip, logfile=None, timeout=300, user='root', port=None):
+        self.ip = ip
+        self.defaulttimeout = timeout
+        self.ignore_status = True
+        self.logfile = logfile
+        self.user = user
+        self.ssh_options = [
+                '-o', 'UserKnownHostsFile=/dev/null',
+                '-o', 'StrictHostKeyChecking=no',
+                '-o', 'LogLevel=ERROR'
+                ]
+        self.ssh = ['ssh', '-l', self.user ] + self.ssh_options
+        self.scp = ['scp'] + self.ssh_options
+        if port:
+            self.ssh = self.ssh + [ '-p', port ]
+            self.scp = self.scp + [ '-P', port ]
+
+    def log(self, msg):
+        if self.logfile:
+            with open(self.logfile, "a") as f:
+                f.write("%s\n" % msg)
+
+    def _internal_run(self, command, timeout=None, ignore_status = True):
+        self.log("[Running]$ %s" % " ".join(command))
+
+        proc = SSHProcess()
+        status, output = proc.run(command, timeout, logfile=self.logfile)
+
+        self.log("[Command returned '%d' after %.2f seconds]" % (status, time.time() - proc.starttime))
+
+        if status and not ignore_status:
+            raise AssertionError("Command '%s' returned non-zero exit status %d:\n%s" % (command, status, output))
+
+        return (status, output)
+
+    def run(self, command, timeout=None):
+        """
+        command - ssh command to run
+        timeout=<val> - kill command if there is no output after <val> seconds
+        timeout=None - kill command if there is no output after a default value seconds
+        timeout=0 - no timeout, let command run until it returns
+        """
+
+        # We need to source /etc/profile for a proper PATH on the target
+        command = self.ssh + [self.ip, ' . /etc/profile; ' + command]
+
+        if timeout is None:
+            return self._internal_run(command, self.defaulttimeout, self.ignore_status)
+        if timeout == 0:
+            return self._internal_run(command, None, self.ignore_status)
+        return self._internal_run(command, timeout, self.ignore_status)
+
+    def copy_to(self, localpath, remotepath):
+        command = self.scp + [localpath, '%s@%s:%s' % (self.user, self.ip, remotepath)]
+        return self._internal_run(command, ignore_status=False)
+
+    def copy_from(self, remotepath, localpath):
+        command = self.scp + ['%s@%s:%s' % (self.user, self.ip, remotepath), localpath]
+        return self._internal_run(command, ignore_status=False)
diff --git a/meta/lib/oeqa/utils/targetbuild.py b/meta/lib/oeqa/utils/targetbuild.py
new file mode 100644
index 0000000..f850d78
--- /dev/null
+++ b/meta/lib/oeqa/utils/targetbuild.py
@@ -0,0 +1,137 @@
+# Copyright (C) 2013 Intel Corporation
+#
+# Released under the MIT license (see COPYING.MIT)
+
+# Provides a class for automating build tests for projects
+
+import os
+import re
+import bb.utils
+import subprocess
+from abc import ABCMeta, abstractmethod
+
+class BuildProject():
+
+    __metaclass__ = ABCMeta
+
+    def __init__(self, d, uri, foldername=None, tmpdir="/tmp/"):
+        self.d = d
+        self.uri = uri
+        self.archive = os.path.basename(uri)
+        self.localarchive = os.path.join(tmpdir,self.archive)
+        self.fname = re.sub(r'.tar.bz2|tar.gz$', '', self.archive)
+        if foldername:
+            self.fname = foldername
+
+    # Download self.archive to self.localarchive
+    def _download_archive(self):
+
+        dl_dir = self.d.getVar("DL_DIR", True)
+        if dl_dir and os.path.exists(os.path.join(dl_dir, self.archive)):
+            bb.utils.copyfile(os.path.join(dl_dir, self.archive), self.localarchive)
+            return
+
+        exportvars = ['HTTP_PROXY', 'http_proxy',
+                      'HTTPS_PROXY', 'https_proxy',
+                      'FTP_PROXY', 'ftp_proxy',
+                      'FTPS_PROXY', 'ftps_proxy',
+                      'NO_PROXY', 'no_proxy',
+                      'ALL_PROXY', 'all_proxy',
+                      'SOCKS5_USER', 'SOCKS5_PASSWD']
+
+        cmd = ''
+        for var in exportvars:
+            val = self.d.getVar(var, True)
+            if val:
+                cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
+
+        cmd = cmd + "wget -O %s %s" % (self.localarchive, self.uri)
+        subprocess.check_call(cmd, shell=True)
+
+    # This method should provide a way to run a command in the desired environment.
+    @abstractmethod
+    def _run(self, cmd):
+        pass
+
+    # The timeout parameter of target.run is set to 0 to make the ssh command
+    # run with no timeout.
+    def run_configure(self, configure_args='', extra_cmds=''):
+        return self._run('cd %s; %s ./configure %s' % (self.targetdir, extra_cmds, configure_args))
+
+    def run_make(self, make_args=''):
+        return self._run('cd %s; make %s' % (self.targetdir, make_args))
+
+    def run_install(self, install_args=''):
+        return self._run('cd %s; make install %s' % (self.targetdir, install_args))
+
+    def clean(self):
+        self._run('rm -rf %s' % self.targetdir)
+        subprocess.call('rm -f %s' % self.localarchive, shell=True)
+        pass
+
+class TargetBuildProject(BuildProject):
+
+    def __init__(self, target, d, uri, foldername=None):
+        self.target = target
+        self.targetdir = "~/"
+        BuildProject.__init__(self, d, uri, foldername, tmpdir="/tmp")
+
+    def download_archive(self):
+
+        self._download_archive()
+
+        (status, output) = self.target.copy_to(self.localarchive, self.targetdir)
+        if status != 0:
+            raise Exception("Failed to copy archive to target, output: %s" % output)
+
+        (status, output) = self.target.run('tar xf %s%s -C %s' % (self.targetdir, self.archive, self.targetdir))
+        if status != 0:
+            raise Exception("Failed to extract archive, output: %s" % output)
+
+        #Change targetdir to project folder
+        self.targetdir = self.targetdir + self.fname
+
+    # The timeout parameter of target.run is set to 0 to make the ssh command
+    # run with no timeout.
+    def _run(self, cmd):
+        return self.target.run(cmd, 0)[0]
+
+
+class SDKBuildProject(BuildProject):
+
+    def __init__(self, testpath, sdkenv, d, uri, foldername=None):
+        self.sdkenv = sdkenv
+        self.testdir = testpath
+        self.targetdir = testpath
+        bb.utils.mkdirhier(testpath)
+        self.datetime = d.getVar('DATETIME', True)
+        self.testlogdir = d.getVar("TEST_LOG_DIR", True)
+        bb.utils.mkdirhier(self.testlogdir)
+        self.logfile = os.path.join(self.testlogdir, "sdk_target_log.%s" % self.datetime)
+        BuildProject.__init__(self, d, uri, foldername, tmpdir=testpath)
+
+    def download_archive(self):
+
+        self._download_archive()
+
+        cmd = 'tar xf %s%s -C %s' % (self.targetdir, self.archive, self.targetdir)
+        subprocess.check_call(cmd, shell=True)
+
+        #Change targetdir to project folder
+        self.targetdir = self.targetdir + self.fname
+
+    def run_configure(self, configure_args=''):
+        return super(SDKBuildProject, self).run_configure(configure_args=(configure_args or '$CONFIGURE_FLAGS'), extra_cmds=' gnu-configize; ')
+
+    def run_install(self, install_args=''):
+        return super(SDKBuildProject, self).run_install(install_args=(install_args or "DESTDIR=%s/../install" % self.targetdir))
+
+    def log(self, msg):
+        if self.logfile:
+            with open(self.logfile, "a") as f:
+               f.write("%s\n" % msg)
+
+    def _run(self, cmd):
+        self.log("Running . %s; " % self.sdkenv + cmd)
+        return subprocess.call(". %s; " % self.sdkenv + cmd, shell=True)
+