Merge commit 'f1e5d6968976c2341c6d554bfcc8895f1b33c26b' from yocto-2.0.1
diff --git a/yocto-poky/bitbake/bin/bitbake b/yocto-poky/bitbake/bin/bitbake
index e3d138b..981b2c0 100755
--- a/yocto-poky/bitbake/bin/bitbake
+++ b/yocto-poky/bitbake/bin/bitbake
@@ -35,7 +35,7 @@
 from bb import cookerdata
 from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException
 
-__version__ = "1.27.1"
+__version__ = "1.28.0"
 
 if __name__ == "__main__":
     if __version__ != bb.__version__:
diff --git a/yocto-poky/bitbake/bin/bitbake-worker b/yocto-poky/bitbake/bin/bitbake-worker
index af17b87..3390f63 100755
--- a/yocto-poky/bitbake/bin/bitbake-worker
+++ b/yocto-poky/bitbake/bin/bitbake-worker
@@ -10,6 +10,7 @@
 import select
 import errno
 import signal
+from multiprocessing import Lock
 
 # Users shouldn't be running this code directly
 if len(sys.argv) != 2 or not sys.argv[1].startswith("decafbad"):
@@ -44,6 +45,9 @@
 
 worker_pipe = sys.stdout.fileno()
 bb.utils.nonblockingfd(worker_pipe)
+# Need to guard against multiprocessing being used in child processes
+# and multiple processes trying to write to the parent at the same time
+worker_pipe_lock = None
 
 handler = bb.event.LogHandler()
 logger.addHandler(handler)
@@ -85,10 +89,13 @@
 
 def worker_child_fire(event, d):
     global worker_pipe
+    global worker_pipe_lock
 
     data = "<event>" + pickle.dumps(event) + "</event>"
     try:
+        worker_pipe_lock.acquire()
         worker_pipe.write(data)
+        worker_pipe_lock.release()
     except IOError:
         sigterm_handler(None, None)
         raise
@@ -157,6 +164,7 @@
     if pid == 0:
         def child():
             global worker_pipe
+            global worker_pipe_lock
             pipein.close()
 
             signal.signal(signal.SIGTERM, sigterm_handler)
@@ -169,6 +177,7 @@
             bb.event.worker_pid = os.getpid()
             bb.event.worker_fire = worker_child_fire
             worker_pipe = pipeout
+            worker_pipe_lock = Lock()
 
             # Make the child the process group leader and ensure no
             # child process will be controlled by the current terminal
diff --git a/yocto-poky/bitbake/bin/toaster b/yocto-poky/bitbake/bin/toaster
index 411ce2c..2c3432c 100755
--- a/yocto-poky/bitbake/bin/toaster
+++ b/yocto-poky/bitbake/bin/toaster
@@ -54,35 +54,55 @@
     fi
 
     retval=0
-    if [ "$TOASTER_MANAGED" '=' '1' ]; then
-        python $BBBASEDIR/lib/toaster/manage.py syncdb || retval=1
-    else
-        python $BBBASEDIR/lib/toaster/manage.py syncdb --noinput || retval=1
-    fi
+    # you can always add a superuser later via
+    # python bitbake/lib/toaster/manage.py python manage.py createsuperuser --username=<ME>
+    python $BBBASEDIR/lib/toaster/manage.py syncdb --noinput || retval=1
+
     python $BBBASEDIR/lib/toaster/manage.py migrate orm || retval=2
+
     if [ $retval -eq 1 ]; then
-        echo "Failed db sync, stopping system start" 1>&2
-    elif [ $retval -eq 2 ]; then
-        printf "\nError on migration, trying to recover... \n"
+        echo "Failed db sync, aborting system start" 1>&2
+        return $retval
+    fi
+
+    python $BBBASEDIR/lib/toaster/manage.py migrate orm || retval=1
+
+    if [ $retval -eq 1 ]; then
+        printf "\nError on orm migration, rolling back...\n"
         python $BBBASEDIR/lib/toaster/manage.py migrate orm 0001_initial --fake
-        retval=0
-        python $BBBASEDIR/lib/toaster/manage.py migrate orm || retval=1
+        return $retval
     fi
+
+    python $BBBASEDIR/lib/toaster/manage.py migrate bldcontrol || retval=1
+
+    if [ $retval -eq 1 ]; then
+        printf "\nError on bldcontrol migration, rolling back...\n"
+        python $BBBASEDIR/lib/toaster/manage.py migrate bldcontrol 0001_initial --fake
+        return $retval
+    fi
+
     if [ "$TOASTER_MANAGED" = '1' ]; then
-        python $BBBASEDIR/lib/toaster/manage.py migrate bldcontrol || retval=1
-        python $BBBASEDIR/lib/toaster/manage.py checksettings  --traceback || retval=1
+        python $BBBASEDIR/lib/toaster/manage.py checksettings --traceback || retval=1
     fi
-    if [ $retval -eq 0 ]; then
-        echo "Starting webserver..."
-        python $BBBASEDIR/lib/toaster/manage.py runserver "0.0.0.0:$WEB_PORT" </dev/null >>${BUILDDIR}/toaster_web.log 2>&1 & echo $! >${BUILDDIR}/.toastermain.pid
-        sleep 1
-        if ! cat "${BUILDDIR}/.toastermain.pid" | xargs -I{} kill -0 {} ; then
-            retval=1
-            rm "${BUILDDIR}/.toastermain.pid"
-        else
-            echo "Webserver address:  http://0.0.0.0:$WEB_PORT/"
-        fi
+
+    if [ $retval -eq 1 ]; then
+        printf "\nError while checking settings; aborting\n"
+        return $retval
     fi
+
+    echo "Starting webserver..."
+
+    python $BBBASEDIR/lib/toaster/manage.py runserver "0.0.0.0:$WEB_PORT" </dev/null >>${BUILDDIR}/toaster_web.log 2>&1 & echo $! >${BUILDDIR}/.toastermain.pid
+
+    sleep 1
+
+    if ! cat "${BUILDDIR}/.toastermain.pid" | xargs -I{} kill -0 {} ; then
+        retval=1
+        rm "${BUILDDIR}/.toastermain.pid"
+    else
+        echo "Webserver address:  http://0.0.0.0:$WEB_PORT/"
+    fi
+
     return $retval
 }
 
@@ -156,6 +176,8 @@
     TOASTER=$0
 fi
 
+[ `basename \"$0\"` = `basename \"${TOASTER}\"` ] && TOASTER_MANAGED=1
+
 BBBASEDIR=`dirname $TOASTER`/..
 
 RUNNING=0
@@ -163,7 +185,34 @@
 NOTOASTERUI=0
 WEBSERVER=1
 TOASTER_BRBE=""
-WEB_PORT="8000"
+if [ "$WEB_PORT" = "" ]; then
+    WEB_PORT="8000"
+fi
+# this is the configuraton file we are using for toaster
+# note default is assuming yocto. Override this if you are
+# running in a pure OE environment and use the toasterconf.json
+# in meta/conf/toasterconf.json
+# note: for future there are a number of relative path assumptions
+# in the local layers that currently prevent using an arbitrary
+# toasterconf.json
+if [ "$TOASTER_CONF" = "" ]; then
+    TOASTER_CONF="$(dirname $TOASTER)/../../meta-yocto/conf/toasterconf.json"
+    export TOASTER_CONF=$(python -c "import os; print os.path.realpath('$TOASTER_CONF')")
+fi
+if [ ! -f $TOASTER_CONF ]; then
+    echo "$TOASTER_CONF configuration file not found. set TOASTER_CONF to specify a path"
+    [ "$TOASTER_MANAGED" = '1' ] && exit 1 || return 1
+fi
+# this defines the dir toaster will use for
+# 1) clones of layers (in _toaster_clones )
+# 2) the build dir (in build)
+# 3) the sqlite db if that is being used.
+# 4) pid's we need to clean up on exit/shutdown
+# note: for future. in order to make this an arbitrary directory, we need to
+# make sure that the toaster.sqlite file doesn't default to `pwd` like it currently does.
+export TOASTER_DIR=`pwd`
+
+
 NOBROWSER=0
 
 for param in $*; do
@@ -185,9 +234,7 @@
     esac
 done
 
-[ -n "${BASH_SOURCE}" ] && SRCFILE=${BASH_SOURCE} || SRCFILE=$_
-
-if [ `basename \"$0\"` = `basename \"${SRCFILE}\"` ]; then
+if [ "$TOASTER_MANAGED" = '1' ]; then
     # We are called as standalone. We refuse to run in a build environment - we need the interactive mode for that.
     # Start just the web server, point the web browser to the interface, and start any Django services.
 
@@ -197,7 +244,7 @@
     fi
 
     if [ -n "$BUILDDIR" ]; then
-        printf "Error: It looks like you sourced oe-init-build-env. Toaster cannot start in build mode from an oe-core build environment.\n You should be starting Toaster from a new terminal window." 1>&2
+        printf "Error: It looks like you sourced oe-init-build-env. Toaster cannot start in build mode from an oe-core build environment.\n You should be starting Toaster from a new terminal window.\n" 1>&2
         exit 1
     fi
 
@@ -234,7 +281,6 @@
             do_cleanup
         fi
     }
-    TOASTER_MANAGED=1
     export TOASTER_MANAGED=1
     if [ $WEBSERVER -gt 0 ] && ! webserverStartAll; then
         echo "Failed to start the web server, stopping" 1>&2
diff --git a/yocto-poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.xml b/yocto-poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.xml
index 05e1b95..e5aeffc 100644
--- a/yocto-poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.xml
+++ b/yocto-poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.xml
@@ -856,6 +856,56 @@
             </glossdef>
         </glossentry>
 
+        <glossentry id='var-BB_TASK_IONICE_LEVEL'><glossterm>BB_TASK_IONICE_LEVEL</glossterm>
+            <glossdef>
+                <para>
+                    Allows adjustment of a task's Input/Output priority.
+                    During Autobuilder testing, random failures can occur
+                    for tasks due to I/O starvation.
+                    These failures occur during various QEMU runtime timeouts.
+                    You can use the <filename>BB_TASK_IONICE_LEVEL</filename>
+                    variable to adjust the I/O priority of these tasks.
+                    <note>
+                        This variable works similarly to the
+                        <link linkend='var-BB_TASK_NICE_LEVEL'><filename>BB_TASK_NICE_LEVEL</filename></link>
+                        variable except with a task's I/O priorities.
+                    </note>
+                </para>
+
+                <para>
+                    Set the variable as follows:
+                    <literallayout class='monospaced'>
+     BB_TASK_IONICE_LEVEL = "<replaceable>class</replaceable>.<replaceable>prio</replaceable>"
+                    </literallayout>
+                    For <replaceable>class</replaceable>, the default value is
+                    "2", which is a best effort.
+                    You can use "1" for realtime and "3" for idle.
+                    If you want to use realtime, you must have superuser
+                    privileges.
+                </para>
+
+                <para>
+                    For <replaceable>prio</replaceable>, you can use any
+                    value from "0", which is the highest priority, to "7",
+                    which is the lowest.
+                    The default value is "4".
+                    You do not need any special privileges to use this range
+                    of priority values.
+                    <note>
+                        In order for your I/O priority settings to take effect,
+                        you need the Completely Fair Queuing (CFQ) Scheduler
+                        selected for the backing block device.
+                        To select the scheduler, use the following command form
+                        where <replaceable>device</replaceable> is the device
+                        (e.g. sda, sdb, and so forth):
+                        <literallayout class='monospaced'>
+      $ sudo sh -c “echo cfq > /sys/block/<replaceable>device</replaceable>/queu/scheduler
+                        </literallayout>
+                    </note>
+                </para>
+            </glossdef>
+        </glossentry>
+
         <glossentry id='var-BB_TASK_NICE_LEVEL'><glossterm>BB_TASK_NICE_LEVEL</glossterm>
             <glossdef>
                 <para>
@@ -1173,6 +1223,15 @@
             </glossdef>
         </glossentry>
 
+        <glossentry id='var-BBTARGETS'><glossterm>BBTARGETS</glossterm>
+            <glossdef>
+                <para>
+                    Allows you to use a configuration file to add to the list
+                    of command-line target recipes you want to build.
+                </para>
+            </glossdef>
+        </glossentry>
+
         <glossentry id='var-BBVERSIONS'><glossterm>BBVERSIONS</glossterm>
             <glossdef>
                 <para>
diff --git a/yocto-poky/bitbake/lib/bb/__init__.py b/yocto-poky/bitbake/lib/bb/__init__.py
index 1f7946e..ac62d26 100644
--- a/yocto-poky/bitbake/lib/bb/__init__.py
+++ b/yocto-poky/bitbake/lib/bb/__init__.py
@@ -21,7 +21,7 @@
 # with this program; if not, write to the Free Software Foundation, Inc.,
 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
-__version__ = "1.27.1"
+__version__ = "1.28.0"
 
 import sys
 if sys.version_info < (2, 7, 3):
diff --git a/yocto-poky/bitbake/lib/bb/build.py b/yocto-poky/bitbake/lib/bb/build.py
index 948c395..22428a6 100644
--- a/yocto-poky/bitbake/lib/bb/build.py
+++ b/yocto-poky/bitbake/lib/bb/build.py
@@ -413,6 +413,13 @@
         nice = int(nice) - curnice
         newnice = os.nice(nice)
         logger.debug(1, "Renice to %s " % newnice)
+    ionice = localdata.getVar("BB_TASK_IONICE_LEVEL", True)
+    if ionice:
+        try:
+            cls, prio = ionice.split(".", 1)
+            bb.utils.ioprio_set(os.getpid(), int(cls), int(prio))
+        except:
+            bb.warn("Invalid ionice level %s" % ionice)
 
     bb.utils.mkdirhier(tempdir)
 
diff --git a/yocto-poky/bitbake/lib/bb/command.py b/yocto-poky/bitbake/lib/bb/command.py
index 398c1d6..74106d1 100644
--- a/yocto-poky/bitbake/lib/bb/command.py
+++ b/yocto-poky/bitbake/lib/bb/command.py
@@ -181,6 +181,16 @@
         value = str(params[1])
         command.cooker.data.setVar(varname, value)
 
+    def getSetVariable(self, command, params):
+        """
+        Read the value of a variable from data and set it into the datastore
+        which effectively expands and locks the value.
+        """
+        varname = params[0]
+        result = self.getVariable(command, params)
+        command.cooker.data.setVar(varname, result)
+        return result
+
     def setConfig(self, command, params):
         """
         Set the value of variable in configuration
diff --git a/yocto-poky/bitbake/lib/bb/cooker.py b/yocto-poky/bitbake/lib/bb/cooker.py
index a0d7d59..4df8881 100644
--- a/yocto-poky/bitbake/lib/bb/cooker.py
+++ b/yocto-poky/bitbake/lib/bb/cooker.py
@@ -255,6 +255,11 @@
         self.state = state.initial
         self.caches_array = []
 
+        # Need to preserve BB_CONSOLELOG over resets
+        consolelog = None
+        if hasattr(self, "data"):
+            consolelog = self.data.getVar("BB_CONSOLELOG", True)
+
         if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
             self.enableDataTracking()
 
@@ -281,6 +286,8 @@
         self.data = self.databuilder.data
         self.data_hash = self.databuilder.data_hash
 
+        if consolelog:
+            self.data.setVar("BB_CONSOLELOG", consolelog)
 
         # we log all events to a file if so directed
         if self.configuration.writeeventlog:
@@ -531,6 +538,11 @@
         for o in options:
             if o in ['prefile', 'postfile']:
                 clean = False
+                server_val = getattr(self.configuration, "%s_server" % o)
+                if not options[o] and server_val:
+                    # restore value provided on server start
+                    setattr(self.configuration, o, server_val)
+                    continue
             setattr(self.configuration, o, options[o])
         for k in bb.utils.approved_variables():
             if k in environment and k not in self.configuration.env:
@@ -1391,10 +1403,28 @@
         build.reset_cache()
         self.buildSetVars()
 
+        # If we are told to do the None task then query the default task
+        if (task == None):
+            task = self.configuration.cmd
+
+        if not task.startswith("do_"):
+            task = "do_%s" % task
+
         taskdata, runlist, fulltargetlist = self.buildTaskData(targets, task, self.configuration.abort)
 
         buildname = self.data.getVar("BUILDNAME", False)
-        bb.event.fire(bb.event.BuildStarted(buildname, fulltargetlist), self.data)
+
+        # make targets to always look as <target>:do_<task>
+        ntargets = []
+        for target in fulltargetlist:
+            if ":" in target:
+                if ":do_" not in target:
+                    target = "%s:do_%s" % tuple(target.split(":", 1))
+            else:
+                target = "%s:%s" % (target, task)
+            ntargets.append(target)
+
+        bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.data)
 
         rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
         if 'universe' in targets:
diff --git a/yocto-poky/bitbake/lib/bb/cookerdata.py b/yocto-poky/bitbake/lib/bb/cookerdata.py
index f19c283..671c0cb 100644
--- a/yocto-poky/bitbake/lib/bb/cookerdata.py
+++ b/yocto-poky/bitbake/lib/bb/cookerdata.py
@@ -63,9 +63,9 @@
             raise Exception("Unable to set configuration option 'cmd' on the server: %s" % error)
 
         if not self.options.pkgs_to_build:
-            bbpkgs, error = server.runCommand(["getVariable", "BBPKGS"])
+            bbpkgs, error = server.runCommand(["getVariable", "BBTARGETS"])
             if error:
-                raise Exception("Unable to get the value of BBPKGS from the server: %s" % error)
+                raise Exception("Unable to get the value of BBTARGETS from the server: %s" % error)
             if bbpkgs:
                 self.options.pkgs_to_build.extend(bbpkgs.split())
 
@@ -129,6 +129,8 @@
         self.extra_assume_provided = []
         self.prefile = []
         self.postfile = []
+        self.prefile_server = []
+        self.postfile_server = []
         self.debug = 0
         self.cmd = None
         self.abort = True
diff --git a/yocto-poky/bitbake/lib/bb/fetch2/__init__.py b/yocto-poky/bitbake/lib/bb/fetch2/__init__.py
index 288a1c8..a9c044b 100644
--- a/yocto-poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/yocto-poky/bitbake/lib/bb/fetch2/__init__.py
@@ -955,7 +955,7 @@
                 origud.method.download(origud, ld)
                 if hasattr(origud.method,"build_mirror_data"):
                     origud.method.build_mirror_data(origud, ld)
-            return ud.localpath
+            return origud.localpath
         # Otherwise the result is a local file:// and we symlink to it
         if not os.path.exists(origud.localpath):
             if os.path.islink(origud.localpath):
diff --git a/yocto-poky/bitbake/lib/bb/fetch2/hg.py b/yocto-poky/bitbake/lib/bb/fetch2/hg.py
index bbb4ed9..3b743ff 100644
--- a/yocto-poky/bitbake/lib/bb/fetch2/hg.py
+++ b/yocto-poky/bitbake/lib/bb/fetch2/hg.py
@@ -28,6 +28,7 @@
 import sys
 import logging
 import bb
+import errno
 from bb import data
 from bb.fetch2 import FetchMethod
 from bb.fetch2 import FetchError
diff --git a/yocto-poky/bitbake/lib/bb/fetch2/svn.py b/yocto-poky/bitbake/lib/bb/fetch2/svn.py
index 1733c2b..8a29193 100644
--- a/yocto-poky/bitbake/lib/bb/fetch2/svn.py
+++ b/yocto-poky/bitbake/lib/bb/fetch2/svn.py
@@ -54,6 +54,11 @@
 
         ud.module = ud.parm["module"]
 
+        if not "path_spec" in ud.parm:
+            ud.path_spec = ud.module
+        else:
+            ud.path_spec = ud.parm["path_spec"]
+
         # Create paths to svn checkouts
         relpath = self._strip_leading_slashes(ud.path)
         ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
@@ -102,7 +107,7 @@
 
             if command == "fetch":
                 transportuser = ud.parm.get("transportuser", "")
-                svncmd = "%s co %s %s://%s%s/%s%s %s" % (ud.basecmd, " ".join(options), proto, transportuser, svnroot, ud.module, suffix, ud.module)
+                svncmd = "%s co %s %s://%s%s/%s%s %s" % (ud.basecmd, " ".join(options), proto, transportuser, svnroot, ud.module, suffix, ud.path_spec)
             elif command == "update":
                 svncmd = "%s update %s" % (ud.basecmd, " ".join(options))
             else:
@@ -149,7 +154,7 @@
 
         os.chdir(ud.pkgdir)
         # tar them up to a defined filename
-        runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
+        runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d, cleanup = [ud.localpath])
 
     def clean(self, ud, d):
         """ Clean SVN specific files and dirs """
diff --git a/yocto-poky/bitbake/lib/bb/main.py b/yocto-poky/bitbake/lib/bb/main.py
index 8762f72..c8530fc 100755
--- a/yocto-poky/bitbake/lib/bb/main.py
+++ b/yocto-poky/bitbake/lib/bb/main.py
@@ -383,6 +383,13 @@
         # Collect the feature set for the UI
         featureset = getattr(ui_module, "featureSet", [])
 
+    if configParams.server_only:
+        for param in ('prefile', 'postfile'):
+            value = getattr(configParams, param)
+            if value:
+                setattr(configuration, "%s_server" % param, value)
+                param = "%s_server" % param
+
     if not configParams.remote_server:
         # we start a server with a given configuration
         server = start_server(servermodule, configParams, configuration, featureset)
diff --git a/yocto-poky/bitbake/lib/bb/runqueue.py b/yocto-poky/bitbake/lib/bb/runqueue.py
index 2b71eed..878028a 100644
--- a/yocto-poky/bitbake/lib/bb/runqueue.py
+++ b/yocto-poky/bitbake/lib/bb/runqueue.py
@@ -797,6 +797,15 @@
                         st = "do_%s" % st
                     invalidate_task(fn, st, True)
 
+        # Create and print to the logs a virtual/xxxx -> PN (fn) table
+        virtmap = taskData.get_providermap()
+        virtpnmap = {}
+        for v in virtmap:
+            virtpnmap[v] = self.dataCache.pkg_fn[virtmap[v]]
+            bb.debug(2, "%s resolved to: %s (%s)" % (v, virtpnmap[v], virtmap[v]))
+        if hasattr(bb.parse.siggen, "tasks_resolved"):
+            bb.parse.siggen.tasks_resolved(virtmap, virtpnmap, self.dataCache)
+
         # Iterate over the task list and call into the siggen code
         dealtwith = set()
         todeal = set(range(len(self.runq_fnid)))
diff --git a/yocto-poky/bitbake/lib/bb/siggen.py b/yocto-poky/bitbake/lib/bb/siggen.py
index 2985272..0352e45 100644
--- a/yocto-poky/bitbake/lib/bb/siggen.py
+++ b/yocto-poky/bitbake/lib/bb/siggen.py
@@ -80,6 +80,7 @@
         self.taskdeps = {}
         self.runtaskdeps = {}
         self.file_checksum_values = {}
+        self.taints = {}
         self.gendeps = {}
         self.lookupcache = {}
         self.pkgnameextract = re.compile("(?P<fn>.*)\..*")
@@ -199,11 +200,14 @@
         if 'nostamp' in taskdep and task in taskdep['nostamp']:
             # Nostamp tasks need an implicit taint so that they force any dependent tasks to run
             import uuid
-            data = data + str(uuid.uuid4())
+            taint = str(uuid.uuid4())
+            data = data + taint
+            self.taints[k] = "nostamp:" + taint
 
         taint = self.read_taint(fn, task, dataCache.stamp[fn])
         if taint:
             data = data + taint
+            self.taints[k] = taint
             logger.warn("%s is tainted from a forced run" % k)
 
         h = hashlib.md5(data).hexdigest()
@@ -247,6 +251,10 @@
         if taint:
             data['taint'] = taint
 
+        if runtime and k in self.taints:
+            if 'nostamp:' in self.taints[k]:
+                data['taint'] = self.taints[k]
+
         fd, tmpfile = tempfile.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.")
         try:
             with os.fdopen(fd, "wb") as stream:
diff --git a/yocto-poky/bitbake/lib/bb/taskdata.py b/yocto-poky/bitbake/lib/bb/taskdata.py
index 5fab704..4d12b33 100644
--- a/yocto-poky/bitbake/lib/bb/taskdata.py
+++ b/yocto-poky/bitbake/lib/bb/taskdata.py
@@ -612,6 +612,18 @@
                 break
         # self.dump_data()
 
+    def get_providermap(self):
+        virts = []
+        virtmap = {}
+
+        for name in self.build_names_index:
+            if name.startswith("virtual/"):
+                virts.append(name)
+        for v in virts:
+            if self.have_build_target(v):
+                virtmap[v] = self.fn_index[self.get_provider(v)[0]]
+        return virtmap
+
     def dump_data(self):
         """
         Dump some debug information on the internal data structures
diff --git a/yocto-poky/bitbake/lib/bb/tests/utils.py b/yocto-poky/bitbake/lib/bb/tests/utils.py
index 9171509..a035ccf 100644
--- a/yocto-poky/bitbake/lib/bb/tests/utils.py
+++ b/yocto-poky/bitbake/lib/bb/tests/utils.py
@@ -376,3 +376,206 @@
         (updated, newlines) = bb.utils.edit_metadata(self._origfile.splitlines(True), varlist, handle_var)
         self.assertTrue(updated, 'List should be updated but isn\'t')
         self.assertEqual(newlines, newfile5.splitlines(True))
+
+
+class EditBbLayersConf(unittest.TestCase):
+
+    def _test_bblayers_edit(self, before, after, add, remove, notadded, notremoved):
+        with tempfile.NamedTemporaryFile('w', delete=False) as tf:
+            tf.write(before)
+            tf.close()
+            try:
+                actual_notadded, actual_notremoved = bb.utils.edit_bblayers_conf(tf.name, add, remove)
+                with open(tf.name) as f:
+                    actual_after = f.readlines()
+                self.assertEqual(after.splitlines(True), actual_after)
+                self.assertEqual(notadded, actual_notadded)
+                self.assertEqual(notremoved, actual_notremoved)
+            finally:
+                os.remove(tf.name)
+
+
+    def test_bblayers_remove(self):
+        before = r"""
+# A comment
+
+BBPATH = "${TOPDIR}"
+BBFILES ?= ""
+BBLAYERS = " \
+  /home/user/path/layer1 \
+  /home/user/path/layer2 \
+  /home/user/path/subpath/layer3 \
+  /home/user/path/layer4 \
+  "
+"""
+        after = r"""
+# A comment
+
+BBPATH = "${TOPDIR}"
+BBFILES ?= ""
+BBLAYERS = " \
+  /home/user/path/layer1 \
+  /home/user/path/subpath/layer3 \
+  /home/user/path/layer4 \
+  "
+"""
+        self._test_bblayers_edit(before, after,
+                                 None,
+                                 '/home/user/path/layer2',
+                                 [],
+                                 [])
+
+
+    def test_bblayers_add(self):
+        before = r"""
+# A comment
+
+BBPATH = "${TOPDIR}"
+BBFILES ?= ""
+BBLAYERS = " \
+  /home/user/path/layer1 \
+  /home/user/path/layer2 \
+  /home/user/path/subpath/layer3 \
+  /home/user/path/layer4 \
+  "
+"""
+        after = r"""
+# A comment
+
+BBPATH = "${TOPDIR}"
+BBFILES ?= ""
+BBLAYERS = " \
+  /home/user/path/layer1 \
+  /home/user/path/layer2 \
+  /home/user/path/subpath/layer3 \
+  /home/user/path/layer4 \
+  /other/path/to/layer5 \
+  "
+"""
+        self._test_bblayers_edit(before, after,
+                                 '/other/path/to/layer5/',
+                                 None,
+                                 [],
+                                 [])
+
+
+    def test_bblayers_add_remove(self):
+        before = r"""
+# A comment
+
+BBPATH = "${TOPDIR}"
+BBFILES ?= ""
+BBLAYERS = " \
+  /home/user/path/layer1 \
+  /home/user/path/layer2 \
+  /home/user/path/subpath/layer3 \
+  /home/user/path/layer4 \
+  "
+"""
+        after = r"""
+# A comment
+
+BBPATH = "${TOPDIR}"
+BBFILES ?= ""
+BBLAYERS = " \
+  /home/user/path/layer1 \
+  /home/user/path/layer2 \
+  /home/user/path/layer4 \
+  /other/path/to/layer5 \
+  "
+"""
+        self._test_bblayers_edit(before, after,
+                                 ['/other/path/to/layer5', '/home/user/path/layer2/'], '/home/user/path/subpath/layer3/',
+                                 ['/home/user/path/layer2'],
+                                 [])
+
+
+    def test_bblayers_add_remove_home(self):
+        before = r"""
+# A comment
+
+BBPATH = "${TOPDIR}"
+BBFILES ?= ""
+BBLAYERS = " \
+  ~/path/layer1 \
+  ~/path/layer2 \
+  ~/otherpath/layer3 \
+  ~/path/layer4 \
+  "
+"""
+        after = r"""
+# A comment
+
+BBPATH = "${TOPDIR}"
+BBFILES ?= ""
+BBLAYERS = " \
+  ~/path/layer2 \
+  ~/path/layer4 \
+  ~/path2/layer5 \
+  "
+"""
+        self._test_bblayers_edit(before, after,
+                                 [os.environ['HOME'] + '/path/layer4', '~/path2/layer5'],
+                                 [os.environ['HOME'] + '/otherpath/layer3', '~/path/layer1', '~/path/notinlist'],
+                                 [os.environ['HOME'] + '/path/layer4'],
+                                 ['~/path/notinlist'])
+
+
+    def test_bblayers_add_remove_plusequals(self):
+        before = r"""
+# A comment
+
+BBPATH = "${TOPDIR}"
+BBFILES ?= ""
+BBLAYERS += " \
+  /home/user/path/layer1 \
+  /home/user/path/layer2 \
+  "
+"""
+        after = r"""
+# A comment
+
+BBPATH = "${TOPDIR}"
+BBFILES ?= ""
+BBLAYERS += " \
+  /home/user/path/layer2 \
+  /home/user/path/layer3 \
+  "
+"""
+        self._test_bblayers_edit(before, after,
+                                 '/home/user/path/layer3',
+                                 '/home/user/path/layer1',
+                                 [],
+                                 [])
+
+
+    def test_bblayers_add_remove_plusequals2(self):
+        before = r"""
+# A comment
+
+BBPATH = "${TOPDIR}"
+BBFILES ?= ""
+BBLAYERS += " \
+  /home/user/path/layer1 \
+  /home/user/path/layer2 \
+  /home/user/path/layer3 \
+  "
+BBLAYERS += "/home/user/path/layer4"
+BBLAYERS += "/home/user/path/layer5"
+"""
+        after = r"""
+# A comment
+
+BBPATH = "${TOPDIR}"
+BBFILES ?= ""
+BBLAYERS += " \
+  /home/user/path/layer2 \
+  /home/user/path/layer3 \
+  "
+BBLAYERS += "/home/user/path/layer5"
+BBLAYERS += "/home/user/otherpath/layer6"
+"""
+        self._test_bblayers_edit(before, after,
+                                 ['/home/user/otherpath/layer6', '/home/user/path/layer3'], ['/home/user/path/layer1', '/home/user/path/layer4', '/home/user/path/layer7'],
+                                 ['/home/user/path/layer3'],
+                                 ['/home/user/path/layer7'])
diff --git a/yocto-poky/bitbake/lib/bb/tinfoil.py b/yocto-poky/bitbake/lib/bb/tinfoil.py
index 1ea46d8..7aa653f 100644
--- a/yocto-poky/bitbake/lib/bb/tinfoil.py
+++ b/yocto-poky/bitbake/lib/bb/tinfoil.py
@@ -36,13 +36,13 @@
 
         # Set up logging
         self.logger = logging.getLogger('BitBake')
-        console = logging.StreamHandler(output)
-        bb.msg.addDefaultlogFilter(console)
+        self._log_hdlr = logging.StreamHandler(output)
+        bb.msg.addDefaultlogFilter(self._log_hdlr)
         format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
         if output.isatty():
             format.enable_color()
-        console.setFormatter(format)
-        self.logger.addHandler(console)
+        self._log_hdlr.setFormatter(format)
+        self.logger.addHandler(self._log_hdlr)
 
         self.config = CookerConfiguration()
         configparams = TinfoilConfigParameters(parse_only=True)
@@ -88,6 +88,7 @@
         self.cooker.shutdown(force=True)
         self.cooker.post_serve()
         self.cooker.unlockBitbake()
+        self.logger.removeHandler(self._log_hdlr)
 
 class TinfoilConfigParameters(ConfigParameters):
 
diff --git a/yocto-poky/bitbake/lib/bb/ui/buildinfohelper.py b/yocto-poky/bitbake/lib/bb/ui/buildinfohelper.py
index 6e313fe..78f1e92 100644
--- a/yocto-poky/bitbake/lib/bb/ui/buildinfohelper.py
+++ b/yocto-poky/bitbake/lib/bb/ui/buildinfohelper.py
@@ -66,6 +66,7 @@
 
     def __init__(self):
         self.layer_version_objects = []
+        self.layer_version_built = []
         self.task_objects = {}
         self.recipe_objects = {}
 
@@ -94,8 +95,8 @@
 
         created = False
         if not key in vars(self)[dictname].keys():
-            vars(self)[dictname][key] = clazz.objects.create(**kwargs)
-            created = True
+            vars(self)[dictname][key], created = \
+                clazz.objects.get_or_create(**kwargs)
 
         return (vars(self)[dictname][key], created)
 
@@ -161,8 +162,6 @@
             build.bitbake_version=build_info['bitbake_version']
             build.save()
 
-            Target.objects.filter(build = build).delete()
-
         else:
             build = Build.objects.create(
                                     project = prj,
@@ -183,18 +182,26 @@
 
         return build
 
-    def create_target_objects(self, target_info):
-        assert 'build' in target_info
-        assert 'targets' in target_info
-
-        targets = []
-        for tgt_name in target_info['targets']:
-            tgt_object = Target.objects.create( build = target_info['build'],
-                                    target = tgt_name,
-                                    is_image = False,
-                                    )
-            targets.append(tgt_object)
-        return targets
+    @staticmethod
+    def get_or_create_targets(target_info):
+        result = []
+        for target in target_info['targets']:
+            task = ''
+            if ':' in target:
+                target, task = target.split(':', 1)
+            if task.startswith('do_'):
+                task = task[3:]
+            if task == 'build':
+                task = ''
+            obj, created = Target.objects.get_or_create(build=target_info['build'],
+                                                        target=target)
+            if created:
+                obj.is_image = False
+                if task:
+                    obj.task = task
+                obj.save()
+            result.append(obj)
+        return result
 
     def update_build_object(self, build, errors, warnings, taskfailures):
         assert isinstance(build,Build)
@@ -269,23 +276,66 @@
 
         assert not recipe_information['file_path'].startswith("/")      # we should have layer-relative paths at all times
 
-        recipe_object, created = self._cached_get_or_create(Recipe, layer_version=recipe_information['layer_version'],
+
+        def update_recipe_obj(recipe_object):
+            object_changed = False
+            for v in vars(recipe_object):
+                if v in recipe_information.keys():
+                    object_changed = True
+                    vars(recipe_object)[v] = recipe_information[v]
+
+            if object_changed:
+                recipe_object.save()
+
+        recipe, created = self._cached_get_or_create(Recipe, layer_version=recipe_information['layer_version'],
                                      file_path=recipe_information['file_path'], pathflags = recipe_information['pathflags'])
-        if created and must_exist:
-            raise NotExisting("Recipe object created when expected to exist", recipe_information)
 
-        object_changed = False
-        for v in vars(recipe_object):
-            if v in recipe_information.keys():
-                object_changed = True
-                vars(recipe_object)[v] = recipe_information[v]
+        update_recipe_obj(recipe)
 
-        if object_changed:
-            recipe_object.save()
+        built_recipe = None
+        # Create a copy of the recipe for historical puposes and update it
+        for built_layer in self.layer_version_built:
+            if built_layer.layer == recipe_information['layer_version'].layer:
+                built_recipe, c = self._cached_get_or_create(Recipe,
+                        layer_version=built_layer,
+                        file_path=recipe_information['file_path'],
+                        pathflags = recipe_information['pathflags'])
+                update_recipe_obj(built_recipe)
+                break
 
-        return recipe_object
+
+        # If we're in analysis mode then we are wholly responsible for the data
+        # and therefore we return the 'real' recipe rather than the build
+        # history copy of the recipe.
+        if  recipe_information['layer_version'].build is not None and \
+            recipe_information['layer_version'].build.project == \
+                Project.objects.get_default_project():
+            return recipe
+
+        return built_recipe
 
     def get_update_layer_version_object(self, build_obj, layer_obj, layer_version_information):
+        if isinstance(layer_obj, Layer_Version):
+            # We already found our layer version for this build so just
+            # update it with the new build information
+            logger.debug("We found our layer from toaster")
+            layer_obj.local_path = layer_version_information['local_path']
+            layer_obj.save()
+            self.layer_version_objects.append(layer_obj)
+
+            # create a new copy of this layer version as a snapshot for
+            # historical purposes
+            layer_copy, c = Layer_Version.objects.get_or_create(build=build_obj,
+                            layer=layer_obj.layer,
+                            commit=layer_version_information['commit'],
+                            local_path = layer_version_information['local_path'],
+                            )
+            logger.info("created new historical layer version %d", layer_copy.pk)
+
+            self.layer_version_built.append(layer_copy)
+
+            return layer_obj
+
         assert isinstance(build_obj, Build)
         assert isinstance(layer_obj, Layer)
         assert 'branch' in layer_version_information
@@ -293,14 +343,20 @@
         assert 'priority' in layer_version_information
         assert 'local_path' in layer_version_information
 
+        # If we're doing a command line build then associate this new layer with the
+        # project to avoid it 'contaminating' toaster data
+        project = None
+        if build_obj.project == Project.objects.get_default_project():
+            project = build_obj.project
+
         layer_version_object, _ = Layer_Version.objects.get_or_create(
-                                    build = build_obj,
-                                    layer = layer_obj,
-                                    branch = layer_version_information['branch'],
-                                    commit = layer_version_information['commit'],
-                                    priority = layer_version_information['priority'],
-                                    local_path = layer_version_information['local_path'],
-                                    )
+                                  build = build_obj,
+                                  layer = layer_obj,
+                                  branch = layer_version_information['branch'],
+                                  commit = layer_version_information['commit'],
+                                  priority = layer_version_information['priority'],
+                                  local_path = layer_version_information['local_path'],
+                                  project=project)
 
         self.layer_version_objects.append(layer_version_object)
 
@@ -335,8 +391,15 @@
                     localdirname = os.path.join(bc.be.sourcedir, localdirname)
                 #logger.debug(1, "Localdirname %s lcal_path %s" % (localdirname, layer_information['local_path']))
                 if localdirname.startswith(layer_information['local_path']):
+                  # If the build request came from toaster this field
+                  # should contain the information from the layer_version
+                  # That created this build request.
+                    if brl.layer_version:
+                        return brl.layer_version
+
                     # we matched the BRLayer, but we need the layer_version that generated this BR; reverse of the Project.schedule_build()
                     #logger.debug(1, "Matched %s to BRlayer %s" % (pformat(layer_information["local_path"]), localdirname))
+
                     for pl in buildrequest.project.projectlayer_set.filter(layercommit__layer__name = brl.name):
                         if pl.layercommit.layer.vcs_url == brl.giturl :
                             layer = pl.layercommit.layer
@@ -353,26 +416,29 @@
         files = filedata['files']
         syms = filedata['syms']
 
-        # we insert directories, ordered by name depth
+        # always create the root directory as a special case;
+        # note that this is never displayed, so the owner, group,
+        # size, permission are irrelevant
+        tf_obj = Target_File.objects.create(target = target_obj,
+                                            path = '/',
+                                            size = 0,
+                                            owner = '',
+                                            group = '',
+                                            permission = '',
+                                            inodetype = Target_File.ITYPE_DIRECTORY)
+        tf_obj.save()
+
+        # insert directories, ordered by name depth
         for d in sorted(dirs, key=lambda x:len(x[-1].split("/"))):
             (user, group, size) = d[1:4]
             permission = d[0][1:]
             path = d[4].lstrip(".")
+
+            # we already created the root directory, so ignore any
+            # entry for it
             if len(path) == 0:
-                # we create the root directory as a special case
-                path = "/"
-                tf_obj = Target_File.objects.create(
-                        target = target_obj,
-                        path = path,
-                        size = size,
-                        inodetype = Target_File.ITYPE_DIRECTORY,
-                        permission = permission,
-                        owner = user,
-                        group = group,
-                        )
-                tf_obj.directory = tf_obj
-                tf_obj.save()
                 continue
+
             parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
             if len(parent_path) == 0:
                 parent_path = "/"
@@ -461,6 +527,12 @@
         errormsg = ""
         for p in packagedict:
             searchname = p
+            if p not in pkgpnmap:
+                logger.warning("Image packages list contains %p, but is"
+                               " missing from all packages list where the"
+                               " metadata comes from. Skipping...", p)
+                continue
+
             if 'OPKGN' in pkgpnmap[p].keys():
                 searchname = pkgpnmap[p]['OPKGN']
 
@@ -504,13 +576,20 @@
                 elif deptype == 'recommends':
                     tdeptype = Package_Dependency.TYPE_TRECOMMENDS
 
-                packagedeps_objs.append(Package_Dependency( package = packagedict[p]['object'],
-                                        depends_on = packagedict[px]['object'],
-                                        dep_type = tdeptype,
-                                        target = target_obj))
+                try:
+                    packagedeps_objs.append(Package_Dependency(
+                        package = packagedict[p]['object'],
+                        depends_on = packagedict[px]['object'],
+                        dep_type = tdeptype,
+                        target = target_obj))
+                except KeyError as e:
+                    logger.warn("Could not add dependency to the package %s "
+                                "because %s is an unknown package", p, px)
 
         if len(packagedeps_objs) > 0:
             Package_Dependency.objects.bulk_create(packagedeps_objs)
+        else:
+            logger.info("No package dependencies created")
 
         if len(errormsg) > 0:
             logger.warn("buildinfohelper: target_package_info could not identify recipes: \n%s", errormsg)
@@ -686,6 +765,7 @@
     def __init__(self, server, has_build_history = False):
         self.internal_state = {}
         self.internal_state['taskdata'] = {}
+        self.internal_state['targets'] = []
         self.task_order = 0
         self.autocommit_step = 1
         self.server = server
@@ -704,7 +784,7 @@
     ## methods to convert event/external info into objects that the ORM layer uses
 
 
-    def _get_build_information(self, consolelogfile):
+    def _get_build_information(self, build_log_path):
         build_info = {}
         # Generate an identifier for each new build
 
@@ -713,7 +793,7 @@
         build_info['distro_version'] = self.server.runCommand(["getVariable", "DISTRO_VERSION"])[0]
         build_info['started_on'] = timezone.now()
         build_info['completed_on'] = timezone.now()
-        build_info['cooker_log_path'] = consolelogfile
+        build_info['cooker_log_path'] = build_log_path
         build_info['build_name'] = self.server.runCommand(["getVariable", "BUILDNAME"])[0]
         build_info['bitbake_version'] = self.server.runCommand(["getVariable", "BB_VERSION"])[0]
 
@@ -764,8 +844,15 @@
                 if not localdirname.startswith("/"):
                     localdirname = os.path.join(bc.be.sourcedir, localdirname)
                 if path.startswith(localdirname):
+                    # If the build request came from toaster this field
+                    # should contain the information from the layer_version
+                    # That created this build request.
+                    if brl.layer_version:
+                        return brl.layer_version
+
                     #logger.warn("-- managed: matched path %s with layer %s " % (path, localdirname))
                     # we matched the BRLayer, but we need the layer_version that generated this br
+
                     for lvo in self.orm_wrapper.layer_version_objects:
                         if brl.name == lvo.layer.name:
                             return lvo
@@ -774,7 +861,7 @@
         logger.warn("Could not match layer version for recipe path %s : %s", path, self.orm_wrapper.layer_version_objects)
 
         #mockup the new layer
-        unknown_layer, _ = Layer.objects.get_or_create(name="__FIXME__unidentified_layer", layer_index_url="")
+        unknown_layer, _ = Layer.objects.get_or_create(name="Unidentified layer", layer_index_url="")
         unknown_layer_version_obj, _ = Layer_Version.objects.get_or_create(layer = unknown_layer, build = self.internal_state['build'])
 
         # append it so we don't run into this error again and again
@@ -847,9 +934,9 @@
                 logger.warn("buildinfohelper: cannot identify layer exception:%s ", nee)
 
 
-    def store_started_build(self, event, consolelogfile):
+    def store_started_build(self, event, build_log_path):
         assert '_pkgs' in vars(event)
-        build_information = self._get_build_information(consolelogfile)
+        build_information = self._get_build_information(build_log_path)
 
         build_obj = self.orm_wrapper.create_build_object(build_information, self.brbe, self.project)
 
@@ -869,7 +956,7 @@
         target_information['targets'] = event._pkgs
         target_information['build'] = build_obj
 
-        self.internal_state['targets'] = self.orm_wrapper.create_target_objects(target_information)
+        self.internal_state['targets'] = self.orm_wrapper.get_or_create_targets(target_information)
 
         # Save build configuration
         data = self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0]
@@ -996,7 +1083,7 @@
             task_information['disk_io'] = taskstats['disk_io']
             if 'elapsed_time' in taskstats:
                 task_information['elapsed_time'] = taskstats['elapsed_time']
-            self.orm_wrapper.get_update_task_object(task_information, True)  # must exist
+            self.orm_wrapper.get_update_task_object(task_information)
 
     def update_and_store_task(self, event):
         assert 'taskfile' in vars(event)
@@ -1097,15 +1184,22 @@
         # for all image targets
         for target in self.internal_state['targets']:
             if target.is_image:
+                pkgdata = BuildInfoHelper._get_data_from_event(event)['pkgdata']
+                imgdata = BuildInfoHelper._get_data_from_event(event)['imgdata'][target.target]
+                filedata = BuildInfoHelper._get_data_from_event(event)['filedata'][target.target]
+
                 try:
-                    pkgdata = BuildInfoHelper._get_data_from_event(event)['pkgdata']
-                    imgdata = BuildInfoHelper._get_data_from_event(event)['imgdata'][target.target]
                     self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes'])
-                    filedata = BuildInfoHelper._get_data_from_event(event)['filedata'][target.target]
+                except KeyError as e:
+                    logger.warn("KeyError in save_target_package_information"
+                                "%s ", e)
+
+                try:
                     self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata)
-                except KeyError:
-                    # we must have not got the data for this image, nothing to save
-                    pass
+                except KeyError as e:
+                    logger.warn("KeyError in save_target_file_information"
+                                "%s ", e)
+
 
 
 
@@ -1306,7 +1400,9 @@
 
         log_information = {}
         log_information['build'] = self.internal_state['build']
-        if event.levelno == formatter.ERROR:
+        if event.levelno == formatter.CRITICAL:
+            log_information['level'] = LogMessage.CRITICAL
+        elif event.levelno == formatter.ERROR:
             log_information['level'] = LogMessage.ERROR
         elif event.levelno == formatter.WARNING:
             log_information['level'] = LogMessage.WARNING
@@ -1319,6 +1415,7 @@
         log_information['pathname'] = event.pathname
         log_information['lineno'] = event.lineno
         logger.info("Logging error 2: %s", log_information)
+
         self.orm_wrapper.create_logmessage(log_information)
 
     def close(self, errorcode):
diff --git a/yocto-poky/bitbake/lib/bb/ui/knotty.py b/yocto-poky/bitbake/lib/bb/ui/knotty.py
index 2bee242..90c3183 100644
--- a/yocto-poky/bitbake/lib/bb/ui/knotty.py
+++ b/yocto-poky/bitbake/lib/bb/ui/knotty.py
@@ -230,7 +230,7 @@
     if error:
         logger.error("Unable to get the value of BBINCLUDELOGS_LINES variable: %s" % error)
         raise BaseException(error)
-    consolelogfile, error = server.runCommand(["getVariable", "BB_CONSOLELOG"])
+    consolelogfile, error = server.runCommand(["getSetVariable", "BB_CONSOLELOG"])
     if error:
         logger.error("Unable to get the value of BB_CONSOLELOG variable: %s" % error)
         raise BaseException(error)
diff --git a/yocto-poky/bitbake/lib/bb/ui/toasterui.py b/yocto-poky/bitbake/lib/bb/ui/toasterui.py
index e0c278b..3d26150 100644
--- a/yocto-poky/bitbake/lib/bb/ui/toasterui.py
+++ b/yocto-poky/bitbake/lib/bb/ui/toasterui.py
@@ -21,6 +21,7 @@
 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 from __future__ import division
+import time
 import sys
 try:
     import bb
@@ -43,8 +44,6 @@
 logger = logging.getLogger("ToasterLogger")
 interactive = sys.stdout.isatty()
 
-
-
 def _log_settings_from_server(server):
     # Get values of variables which control our output
     includelogs, error = server.runCommand(["getVariable", "BBINCLUDELOGS"])
@@ -59,12 +58,56 @@
     if error:
         logger.error("Unable to get the value of BB_CONSOLELOG variable: %s", error)
         raise BaseException(error)
-    return includelogs, loglines, consolelogfile
+    return consolelogfile
 
+# create a log file for a single build and direct the logger at it;
+# log file name is timestamped to the millisecond (depending
+# on system clock accuracy) to ensure it doesn't overlap with
+# other log file names
+#
+# returns (log file, path to log file) for a build
+def _open_build_log(log_dir):
+    format_str = "%(levelname)s: %(message)s"
 
-def main(server, eventHandler, params ):
+    now = time.time()
+    now_ms = int((now - int(now)) * 1000)
+    time_str = time.strftime('build_%Y%m%d_%H%M%S', time.localtime(now))
+    log_file_name = time_str + ('.%d.log' % now_ms)
+    build_log_file_path = os.path.join(log_dir, log_file_name)
+
+    build_log = logging.FileHandler(build_log_file_path)
+
+    logformat = bb.msg.BBLogFormatter(format_str)
+    build_log.setFormatter(logformat)
+
+    bb.msg.addDefaultlogFilter(build_log)
+    logger.addHandler(build_log)
+
+    return (build_log, build_log_file_path)
+
+# stop logging to the build log if it exists
+def _close_build_log(build_log):
+    if build_log:
+        build_log.flush()
+        build_log.close()
+        logger.removeHandler(build_log)
+
+def main(server, eventHandler, params):
+    # set to a logging.FileHandler instance when a build starts;
+    # see _open_build_log()
+    build_log = None
+
+    # set to the log path when a build starts
+    build_log_file_path = None
+
     helper = uihelper.BBUIHelper()
 
+    # TODO don't use log output to determine when bitbake has started
+    #
+    # WARNING: this log handler cannot be removed, as localhostbecontroller
+    # relies on output in the toaster_ui.log file to determine whether
+    # the bitbake server has started, which only happens if
+    # this logger is setup here (see the TODO in the loop below)
     console = logging.StreamHandler(sys.stdout)
     format_str = "%(levelname)s: %(message)s"
     formatter = bb.msg.BBLogFormatter(format_str)
@@ -73,8 +116,6 @@
     logger.addHandler(console)
     logger.setLevel(logging.INFO)
 
-    _, _, consolelogfile = _log_settings_from_server(server)
-
     # verify and warn
     build_history_enabled = True
     inheritlist, _ = server.runCommand(["getVariable", "INHERIT"])
@@ -87,8 +128,9 @@
         logger.error("ToasterUI can only work in observer mode")
         return 1
 
-
+    # set to 1 when toasterui needs to shut down
     main.shutdown = 0
+
     interrupted = False
     return_value = 0
     errors = 0
@@ -98,25 +140,31 @@
 
     buildinfohelper = BuildInfoHelper(server, build_history_enabled)
 
-    if buildinfohelper.brbe is not None and consolelogfile:
-        # if we are under managed mode we have no other UI and we need to write our own file
-        bb.utils.mkdirhier(os.path.dirname(consolelogfile))
-        conlogformat = bb.msg.BBLogFormatter(format_str)
-        consolelog = logging.FileHandler(consolelogfile)
-        bb.msg.addDefaultlogFilter(consolelog)
-        consolelog.setFormatter(conlogformat)
-        logger.addHandler(consolelog)
-
+    # write our own log files into bitbake's log directory;
+    # we're only interested in the path to the parent directory of
+    # this file, as we're writing our own logs into the same directory
+    consolelogfile = _log_settings_from_server(server)
+    log_dir = os.path.dirname(consolelogfile)
+    bb.utils.mkdirhier(log_dir)
 
     while True:
         try:
             event = eventHandler.waitEvent(0.25)
             if first:
                 first = False
+
+                # TODO don't use log output to determine when bitbake has started
+                #
+                # this is the line localhostbecontroller needs to
+                # see in toaster_ui.log which it uses to decide whether
+                # the bitbake server has started...
                 logger.info("ToasterUI waiting for events")
 
             if event is None:
                 if main.shutdown > 0:
+                    # if shutting down, close any open build log first
+                    _close_build_log(build_log)
+
                     break
                 continue
 
@@ -125,19 +173,32 @@
             # pylint: disable=protected-access
             # the code will look into the protected variables of the event; no easy way around this
 
+            # we treat ParseStarted as the first event of toaster-triggered
+            # builds; that way we get the Build Configuration included in the log
+            # and any errors that occur before BuildStarted is fired
+            if isinstance(event, bb.event.ParseStarted):
+                if not (build_log and build_log_file_path):
+                    build_log, build_log_file_path = _open_build_log(log_dir)
+                continue
+
             if isinstance(event, bb.event.BuildStarted):
-                buildinfohelper.store_started_build(event, consolelogfile)
+                # command-line builds don't fire a ParseStarted event,
+                # so we have to start the log file for those on BuildStarted instead
+                if not (build_log and build_log_file_path):
+                    build_log, build_log_file_path = _open_build_log(log_dir)
+
+                buildinfohelper.store_started_build(event, build_log_file_path)
 
             if isinstance(event, (bb.build.TaskStarted, bb.build.TaskSucceeded, bb.build.TaskFailedSilent)):
                 buildinfohelper.update_and_store_task(event)
-                logger.warn("Logfile for task %s", event.logfile)
+                logger.info("Logfile for task %s", event.logfile)
                 continue
 
             if isinstance(event, bb.build.TaskBase):
                 logger.info(event._message)
 
             if isinstance(event, bb.event.LogExecTTY):
-                logger.warn(event.msg)
+                logger.info(event.msg)
                 continue
 
             if isinstance(event, logging.LogRecord):
@@ -145,10 +206,12 @@
                     event.levelno = formatter.ERROR
 
                 buildinfohelper.store_log_event(event)
+
                 if event.levelno >= formatter.ERROR:
                     errors = errors + 1
                 elif event.levelno == formatter.WARNING:
                     warnings = warnings + 1
+
                 # For "normal" logging conditions, don't show note logs from tasks
                 # but do show them if the user has changed the default log level to
                 # include verbose/debug messages
@@ -169,8 +232,6 @@
             # timing and error informations from the parsing phase in Toaster
             if isinstance(event, (bb.event.SanityCheckPassed, bb.event.SanityCheck)):
                 continue
-            if isinstance(event, bb.event.ParseStarted):
-                continue
             if isinstance(event, bb.event.ParseProgress):
                 continue
             if isinstance(event, bb.event.ParseCompleted):
@@ -246,6 +307,12 @@
                     errorcode = 1
                     logger.error("Command execution failed: %s", event.error)
 
+                # turn off logging to the current build log
+                _close_build_log(build_log)
+
+                # reset ready for next BuildStarted
+                build_log = None
+
                 # update the build info helper on BuildCompleted, not on CommandXXX
                 buildinfohelper.update_build_information(event, errors, warnings, taskfailures)
                 buildinfohelper.close(errorcode)
@@ -254,7 +321,6 @@
 
                 # we start a new build info
                 if buildinfohelper.brbe is not None:
-
                     logger.debug("ToasterUI under BuildEnvironment management - exiting after the build")
                     server.terminateServer()
                 else:
@@ -296,8 +362,9 @@
                 continue
 
             if isinstance(event, bb.cooker.CookerExit):
-                # exit when the server exits
-                break
+                # shutdown when bitbake server shuts down
+                main.shutdown = 1
+                continue
 
             # ignore
             if isinstance(event, (bb.event.BuildBase,
@@ -308,14 +375,15 @@
                                   bb.event.OperationProgress,
                                   bb.command.CommandFailed,
                                   bb.command.CommandExit,
-                                  bb.command.CommandCompleted)):
+                                  bb.command.CommandCompleted,
+                                  bb.event.ReachableStamps)):
                 continue
 
             if isinstance(event, bb.event.DepTreeGenerated):
                 buildinfohelper.store_dependency_information(event)
                 continue
 
-            logger.error("Unknown event: %s", event)
+            logger.warn("Unknown event: %s", event)
             return_value += 1
 
         except EnvironmentError as ioerror:
@@ -335,7 +403,7 @@
             if tb is not None:
                 curr = tb
                 while curr is not None:
-                    logger.warn("Error data dump %s\n%s\n" , traceback.format_tb(curr,1), pformat(curr.tb_frame.f_locals))
+                    logger.error("Error data dump %s\n%s\n" , traceback.format_tb(curr,1), pformat(curr.tb_frame.f_locals))
                     curr = curr.tb_next
 
             # save them to database, if possible; if it fails, we already logged to console.
@@ -347,9 +415,8 @@
             # make sure we return with an error
             return_value += 1
 
-    if interrupted:
-        if return_value == 0:
-            return_value += 1
+    if interrupted and return_value == 0:
+        return_value += 1
 
     logger.warn("Return value is %d", return_value)
     return return_value
diff --git a/yocto-poky/bitbake/lib/bb/utils.py b/yocto-poky/bitbake/lib/bb/utils.py
index 91faa49..31ec2b7 100644
--- a/yocto-poky/bitbake/lib/bb/utils.py
+++ b/yocto-poky/bitbake/lib/bb/utils.py
@@ -1177,7 +1177,7 @@
             if not skip:
                 if checkspc:
                     checkspc = False
-                    if newlines[-1] == '\n' and line == '\n':
+                    if newlines and newlines[-1] == '\n' and line == '\n':
                         # Squash blank line if there are two consecutive blanks after a removal
                         continue
                 newlines.append(line)
@@ -1201,7 +1201,19 @@
 
 
 def edit_bblayers_conf(bblayers_conf, add, remove):
-    """Edit bblayers.conf, adding and/or removing layers"""
+    """Edit bblayers.conf, adding and/or removing layers
+    Parameters:
+        bblayers_conf: path to bblayers.conf file to edit
+        add: layer path (or list of layer paths) to add; None or empty
+            list to add nothing
+        remove: layer path (or list of layer paths) to remove; None or
+            empty list to remove nothing
+    Returns a tuple:
+        notadded: list of layers specified to be added but weren't
+            (because they were already in the list)
+        notremoved: list of layers that were specified to be removed
+            but weren't (because they weren't in the list)
+    """
 
     import fnmatch
 
@@ -1210,6 +1222,13 @@
             pth = pth[:-1]
         return pth
 
+    approved = bb.utils.approved_variables()
+    def canonicalise_path(pth):
+        pth = remove_trailing_sep(pth)
+        if 'HOME' in approved and '~' in pth:
+            pth = os.path.expanduser(pth)
+        return pth
+
     def layerlist_param(value):
         if not value:
             return []
@@ -1218,48 +1237,80 @@
         else:
             return [remove_trailing_sep(value)]
 
-    notadded = []
-    notremoved = []
-
     addlayers = layerlist_param(add)
     removelayers = layerlist_param(remove)
 
     # Need to use a list here because we can't set non-local variables from a callback in python 2.x
     bblayercalls = []
+    removed = []
+    plusequals = False
+    orig_bblayers = []
+
+    def handle_bblayers_firstpass(varname, origvalue, op, newlines):
+        bblayercalls.append(op)
+        if op == '=':
+            del orig_bblayers[:]
+        orig_bblayers.extend([canonicalise_path(x) for x in origvalue.split()])
+        return (origvalue, None, 2, False)
 
     def handle_bblayers(varname, origvalue, op, newlines):
-        bblayercalls.append(varname)
         updated = False
         bblayers = [remove_trailing_sep(x) for x in origvalue.split()]
         if removelayers:
             for removelayer in removelayers:
-                matched = False
                 for layer in bblayers:
-                    if fnmatch.fnmatch(layer, removelayer):
+                    if fnmatch.fnmatch(canonicalise_path(layer), canonicalise_path(removelayer)):
                         updated = True
-                        matched = True
                         bblayers.remove(layer)
+                        removed.append(removelayer)
                         break
-                if not matched:
-                    notremoved.append(removelayer)
-        if addlayers:
+        if addlayers and not plusequals:
             for addlayer in addlayers:
                 if addlayer not in bblayers:
                     updated = True
                     bblayers.append(addlayer)
-                else:
-                    notadded.append(addlayer)
+            del addlayers[:]
 
         if updated:
+            if op == '+=' and not bblayers:
+                bblayers = None
             return (bblayers, None, 2, False)
         else:
             return (origvalue, None, 2, False)
 
-    edit_metadata_file(bblayers_conf, ['BBLAYERS'], handle_bblayers)
+    with open(bblayers_conf, 'r') as f:
+        (_, newlines) = edit_metadata(f, ['BBLAYERS'], handle_bblayers_firstpass)
 
     if not bblayercalls:
         raise Exception('Unable to find BBLAYERS in %s' % bblayers_conf)
 
+    # Try to do the "smart" thing depending on how the user has laid out
+    # their bblayers.conf file
+    if bblayercalls.count('+=') > 1:
+        plusequals = True
+
+    removelayers_canon = [canonicalise_path(layer) for layer in removelayers]
+    notadded = []
+    for layer in addlayers:
+        layer_canon = canonicalise_path(layer)
+        if layer_canon in orig_bblayers and not layer_canon in removelayers_canon:
+            notadded.append(layer)
+    notadded_canon = [canonicalise_path(layer) for layer in notadded]
+    addlayers[:] = [layer for layer in addlayers if canonicalise_path(layer) not in notadded_canon]
+
+    (updated, newlines) = edit_metadata(newlines, ['BBLAYERS'], handle_bblayers)
+    if addlayers:
+        # Still need to add these
+        for addlayer in addlayers:
+            newlines.append('BBLAYERS += "%s"\n' % addlayer)
+        updated = True
+
+    if updated:
+        with open(bblayers_conf, 'w') as f:
+            f.writelines(newlines)
+
+    notremoved = list(set(removelayers) - set(removed))
+
     return (notadded, notremoved)
 
 
@@ -1310,3 +1361,27 @@
     result = cdll['libc.so.6'].prctl(PR_SET_PDEATHSIG, signum)
     if result != 0:
         raise PrCtlError('prctl failed with error code %s' % result)
+
+#
+# Manually call the ioprio syscall. We could depend on other libs like psutil
+# however this gets us enough of what we need to bitbake for now without the
+# dependency
+#
+_unamearch = os.uname()[4]
+IOPRIO_WHO_PROCESS = 1
+IOPRIO_CLASS_SHIFT = 13
+
+def ioprio_set(who, cls, value):
+    NR_ioprio_set = None
+    if _unamearch == "x86_64":
+      NR_ioprio_set = 251
+    elif _unamearch[0] == "i" and _unamearch[2:3] == "86":
+      NR_ioprio_set = 289
+
+    if NR_ioprio_set:
+        ioprio = value | (cls << IOPRIO_CLASS_SHIFT)
+        rc = cdll['libc.so.6'].syscall(NR_ioprio_set, IOPRIO_WHO_PROCESS, who, ioprio)
+        if rc != 0:
+            raise ValueError("Unable to set ioprio, syscall returned %s" % rc)
+    else:
+        bb.warn("Unable to set IO Prio for arch %s" % _unamearch)
diff --git a/yocto-poky/bitbake/lib/prserv/db.py b/yocto-poky/bitbake/lib/prserv/db.py
index 4379580..36c9f7b 100644
--- a/yocto-poky/bitbake/lib/prserv/db.py
+++ b/yocto-poky/bitbake/lib/prserv/db.py
@@ -248,7 +248,7 @@
         self.connection.execute("PRAGMA journal_mode = WAL;")
         self._tables={}
 
-    def __del__(self):
+    def disconnect(self):
         self.connection.close()
 
     def __getitem__(self,tblname):
diff --git a/yocto-poky/bitbake/lib/prserv/serv.py b/yocto-poky/bitbake/lib/prserv/serv.py
index 5c0ffb9..eafc3aa 100644
--- a/yocto-poky/bitbake/lib/prserv/serv.py
+++ b/yocto-poky/bitbake/lib/prserv/serv.py
@@ -3,6 +3,7 @@
 from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
 import threading
 import Queue
+import socket
 
 try:
     import sqlite3
@@ -37,7 +38,6 @@
 class PRServer(SimpleXMLRPCServer):
     def __init__(self, dbfile, logfile, interface, daemon=True):
         ''' constructor '''
-        import socket
         try:
             SimpleXMLRPCServer.__init__(self, interface,
                                         logRequests=False, allow_none=True)
@@ -148,7 +148,7 @@
         while not self.quit:
             self.handle_request()
         self.handlerthread.join()
-        self.table.sync()
+        self.db.disconnect()
         logger.info("PRServer: stopping...")
         self.server_close()
         return
@@ -289,7 +289,8 @@
         return self.host, self.port
 
 def start_daemon(dbfile, host, port, logfile):
-    pidfile = PIDPREFIX % (host, port)
+    ip = socket.gethostbyname(host)
+    pidfile = PIDPREFIX % (ip, port)
     try:
         pf = file(pidfile,'r')
         pid = int(pf.readline().strip())
@@ -302,12 +303,21 @@
                             % pidfile)
         return 1
 
-    server = PRServer(os.path.abspath(dbfile), os.path.abspath(logfile), (host,port))
+    server = PRServer(os.path.abspath(dbfile), os.path.abspath(logfile), (ip,port))
     server.start()
+
+    # Sometimes, the port (i.e. localhost:0) indicated by the user does not match with
+    # the one the server actually is listening, so at least warn the user about it
+    _,rport = server.getinfo()
+    if port != rport:
+        sys.stdout.write("Server is listening at port %s instead of %s\n"
+                         % (rport,port))
     return 0
 
 def stop_daemon(host, port):
-    pidfile = PIDPREFIX % (host, port)
+    import glob
+    ip = socket.gethostbyname(host)
+    pidfile = PIDPREFIX % (ip, port)
     try:
         pf = file(pidfile,'r')
         pid = int(pf.readline().strip())
@@ -316,11 +326,23 @@
         pid = None
 
     if not pid:
-        sys.stderr.write("pidfile %s does not exist. Daemon not running?\n"
-                        % pidfile)
+        # when server starts at port=0 (i.e. localhost:0), server actually takes another port,
+        # so at least advise the user which ports the corresponding server is listening
+        ports = []
+        portstr = ""
+        for pf in glob.glob(PIDPREFIX % (ip,'*')):
+            bn = os.path.basename(pf)
+            root, _ = os.path.splitext(bn)
+            ports.append(root.split('_')[-1])
+        if len(ports):
+            portstr = "Wrong port? Other ports listening at %s: %s" % (host, ' '.join(ports))
+
+        sys.stderr.write("pidfile %s does not exist. Daemon not running? %s\n"
+                         % (pidfile,portstr))
+        return 1
 
     try:
-        PRServerConnection(host, port).terminate()
+        PRServerConnection(ip, port).terminate()
     except:
         logger.critical("Stop PRService %s:%d failed" % (host,port))
 
diff --git a/yocto-poky/bitbake/lib/toaster/bldcontrol/localhostbecontroller.py b/yocto-poky/bitbake/lib/toaster/bldcontrol/localhostbecontroller.py
index a9909b8..b5cf559 100644
--- a/yocto-poky/bitbake/lib/toaster/bldcontrol/localhostbecontroller.py
+++ b/yocto-poky/bitbake/lib/toaster/bldcontrol/localhostbecontroller.py
@@ -23,9 +23,11 @@
 import os
 import sys
 import re
+import shutil
 from django.db import transaction
 from django.db.models import Q
 from bldcontrol.models import BuildEnvironment, BRLayer, BRVariable, BRTarget, BRBitbake
+from orm.models import CustomImageRecipe, Layer, Layer_Version, ProjectLayer
 import subprocess
 
 from toastermain import settings
@@ -179,15 +181,9 @@
         logger.debug("localhostbecontroller: Stopped bitbake server")
 
     def getGitCloneDirectory(self, url, branch):
-        """ Utility that returns the last component of a git path as directory
-        """
-        import re
-        components = re.split(r'[:\.\/]', url)
-        base = components[-2] if components[-1] == "git" else components[-1]
-
+        """Construct unique clone directory name out of url and branch."""
         if branch != "HEAD":
-            return "_%s_%s.toaster_cloned" % (base, branch)
-
+            return "_toaster_clones/_%s_%s" % (re.sub('[:/@%]', '_', url), branch)
 
         # word of attention; this is a localhost-specific issue; only on the localhost we expect to have "HEAD" releases
         # which _ALWAYS_ means the current poky checkout
@@ -197,7 +193,7 @@
         return local_checkout_path
 
 
-    def setLayers(self, bitbakes, layers):
+    def setLayers(self, bitbakes, layers, targets):
         """ a word of attention: by convention, the first layer for any build will be poky! """
 
         assert self.be.sourcedir is not None
@@ -222,23 +218,26 @@
         logger.debug("localhostbecontroller, our git repos are %s" % pformat(gitrepos))
 
 
-        # 2. find checked-out git repos in the sourcedir directory that may help faster cloning
+        # 2. Note for future use if the current source directory is a
+        # checked-out git repos that could match a layer's vcs_url and therefore
+        # be used to speed up cloning (rather than fetching it again).
 
         cached_layers = {}
-        for ldir in os.listdir(self.be.sourcedir):
-            fldir = os.path.join(self.be.sourcedir, ldir)
-            if os.path.isdir(fldir):
+
+        try:
+            for remotes in self._shellcmd("git remote -v", self.be.sourcedir).split("\n"):
                 try:
-                    for line in self._shellcmd("git remote -v", fldir).split("\n"):
-                        try:
-                            remote = line.split("\t")[1].split(" ")[0]
-                            if remote not in cached_layers:
-                                cached_layers[remote] = fldir
-                        except IndexError:
-                            pass
-                except ShellCmdException:
-                    # ignore any errors in collecting git remotes
+                    remote = remotes.split("\t")[1].split(" ")[0]
+                    if remote not in cached_layers:
+                        cached_layers[remote] = self.be.sourcedir
+                except IndexError:
                     pass
+        except ShellCmdException:
+            # ignore any errors in collecting git remotes this is an optional
+            # step
+            pass
+
+        logger.info("Using pre-checked out source for layer %s", cached_layers)
 
         layerlist = []
 
@@ -260,13 +259,14 @@
                     self._shellcmd("git remote remove origin", localdirname)
                     self._shellcmd("git remote add origin \"%s\"" % giturl, localdirname)
                 else:
-                    logger.debug("localhostbecontroller: cloning %s:%s in %s" % (giturl, commit, localdirname))
-                    self._shellcmd("git clone \"%s\" --single-branch --branch \"%s\" \"%s\"" % (giturl, commit, localdirname))
+                    logger.debug("localhostbecontroller: cloning %s in %s" % (giturl, localdirname))
+                    self._shellcmd('git clone "%s" "%s"' % (giturl, localdirname))
 
             # branch magic name "HEAD" will inhibit checkout
             if commit != "HEAD":
                 logger.debug("localhostbecontroller: checking out commit %s to %s " % (commit, localdirname))
-                self._shellcmd("git fetch --all && git checkout \"%s\" && git rebase \"origin/%s\"" % (commit, commit) , localdirname)
+                ref = commit if re.match('^[a-fA-F0-9]+$', commit) else 'origin/%s' % commit
+                self._shellcmd('git fetch --all && git reset --hard "%s"' % ref, localdirname)
 
             # take the localdirname as poky dir if we can find the oe-init-build-env
             if self.pokydirname is None and os.path.exists(os.path.join(localdirname, "oe-init-build-env")):
@@ -299,6 +299,51 @@
         if not os.path.exists(bblayerconf):
             raise BuildSetupException("BE is not consistent: bblayers.conf file missing at %s" % bblayerconf)
 
+        # 6. create custom layer and add custom recipes to it
+        layerpath = os.path.join(self.be.sourcedir, "_meta-toaster-custom")
+        if os.path.isdir(layerpath):
+            shutil.rmtree(layerpath) # remove leftovers from previous builds
+        for target in targets:
+            try:
+                customrecipe = CustomImageRecipe.objects.get(name=target.target,
+                                                             project=bitbakes[0].req.project)
+            except CustomImageRecipe.DoesNotExist:
+                continue # not a custom recipe, skip
+
+            # create directory structure
+            for name in ("conf", "recipes"):
+                path = os.path.join(layerpath, name)
+                if not os.path.isdir(path):
+                    os.makedirs(path)
+
+            # create layer.oonf
+            config = os.path.join(layerpath, "conf", "layer.conf")
+            if not os.path.isfile(config):
+                with open(config, "w") as conf:
+                    conf.write('BBPATH .= ":${LAYERDIR}"\nBBFILES += "${LAYERDIR}/recipes/*.bb"\n')
+
+            # create recipe
+            recipe = os.path.join(layerpath, "recipes", "%s.bb" % target.target)
+            with open(recipe, "w") as recipef:
+                recipef.write("require %s\n" % customrecipe.base_recipe.recipe.file_path)
+                packages = [pkg.name for pkg in customrecipe.packages.all()]
+                if packages:
+                    recipef.write('IMAGE_INSTALL = "%s"\n' % ' '.join(packages))
+
+            # create *Layer* objects needed for build machinery to work
+            layer = Layer.objects.get_or_create(name="Toaster Custom layer",
+                                                summary="Layer for custom recipes",
+                                                vcs_url="file://%s" % layerpath)[0]
+            breq = target.req
+            lver = Layer_Version.objects.get_or_create(project=breq.project, layer=layer,
+                                                       dirpath=layerpath, build=breq.build)[0]
+            ProjectLayer.objects.get_or_create(project=breq.project, layercommit=lver,
+                                               optional=False)
+            BRLayer.objects.get_or_create(req=breq, name=layer.name, dirpath=layerpath,
+                                          giturl="file://%s" % layerpath)
+        if os.path.isdir(layerpath):
+            layerlist.append(layerpath)
+
         BuildEnvironmentController._updateBBLayers(bblayerconf, layerlist)
 
         self.islayerset = True
@@ -316,7 +361,7 @@
 
     def triggerBuild(self, bitbake, layers, variables, targets):
         # set up the buid environment with the needed layers
-        self.setLayers(bitbake, layers)
+        self.setLayers(bitbake, layers, targets)
         self.writeConfFile("conf/toaster-pre.conf", variables)
         self.writeConfFile("conf/toaster.conf", raw = "INHERIT+=\"toaster buildhistory\"")
 
diff --git a/yocto-poky/bitbake/lib/toaster/bldcontrol/management/commands/checksettings.py b/yocto-poky/bitbake/lib/toaster/bldcontrol/management/commands/checksettings.py
index b2c573c..5e70437 100644
--- a/yocto-poky/bitbake/lib/toaster/bldcontrol/management/commands/checksettings.py
+++ b/yocto-poky/bitbake/lib/toaster/bldcontrol/management/commands/checksettings.py
@@ -4,7 +4,7 @@
 from bldcontrol.models import BuildRequest, BuildEnvironment, BRError
 from orm.models import ToasterSetting, Build
 import os
-import sys, traceback
+import traceback
 
 def DN(path):
     if path is None:
@@ -21,7 +21,7 @@
         super(Command, self).__init__(*args, **kwargs)
         self.guesspath = DN(DN(DN(DN(DN(DN(DN(__file__)))))))
 
-    def _find_first_path_for_file(self, startdirectory, filename, level = 0):
+    def _find_first_path_for_file(self, startdirectory, filename, level=0):
         if level < 0:
             return None
         dirs = []
@@ -38,7 +38,7 @@
                 return ret
         return None
 
-    def _recursive_list_directories(self, startdirectory, level = 0):
+    def _recursive_list_directories(self, startdirectory, level=0):
         if level < 0:
             return []
         dirs = []
@@ -50,49 +50,23 @@
         except OSError:
             pass
         for j in dirs:
-                dirs = dirs + self._recursive_list_directories(j, level - 1)
+            dirs = dirs + self._recursive_list_directories(j, level - 1)
         return dirs
 
 
-    def _get_suggested_sourcedir(self, be):
-        if be.betype != BuildEnvironment.TYPE_LOCAL:
-            return ""
-        return DN(DN(DN(self._find_first_path_for_file(self.guesspath, "toasterconf.json", 4))))
-
-    def _get_suggested_builddir(self, be):
-        if be.betype != BuildEnvironment.TYPE_LOCAL:
-            return ""
-        return DN(self._find_first_path_for_file(DN(self.guesspath), "bblayers.conf", 4))
-
     def _verify_build_environment(self):
-        # refuse to start if we have no build environments
-        while BuildEnvironment.objects.count() == 0:
-            print(" !! No build environments found. Toaster needs at least one build environment in order to be able to run builds.\n" +
-                "You can manually define build environments in the database table bldcontrol_buildenvironment.\n" +
-                "Or Toaster can define a simple localhost-based build environment for you.")
-
-            i = raw_input(" --  Do you want to create a basic localhost build environment ? (Y/n) ");
-            if not len(i) or i.startswith("y") or i.startswith("Y"):
-                BuildEnvironment.objects.create(pk = 1, betype = 0)
-            else:
-                raise Exception("Toaster cannot start without build environments. Aborting.")
-
+        # provide a local build env. This will be extended later to include non local
+        if BuildEnvironment.objects.count() == 0:
+            BuildEnvironment.objects.create(betype=BuildEnvironment.TYPE_LOCAL)
 
         # we make sure we have builddir and sourcedir for all defined build envionments
         for be in BuildEnvironment.objects.all():
             be.needs_import = False
             def _verify_be():
                 is_changed = False
-                print("\nVerifying the build environment. If the local build environment is not properly configured, you will be asked to configure it.")
 
                 def _update_sourcedir():
-                    suggesteddir = self._get_suggested_sourcedir(be)
-                    if len(suggesteddir) > 0:
-                        be.sourcedir = raw_input("This is the directory Toaster uses to check out the source code of the layers you will build. Toaster will create new clones of the layers, so existing content in the chosen directory will not be changed.\nToaster suggests you use \"%s\" as your layers checkout directory. If you select this directory, a layer like \"meta-intel\" will end up in \"%s/meta-intel\".\nPress Enter to select \"%s\" or type the full path to a different directory. If you provide your own directory, it must be a parent of the cloned directory for the sources you are using to run Toaster: " % (suggesteddir, suggesteddir, suggesteddir))
-                    else:
-                        be.sourcedir = raw_input("Toaster needs to know in which directory it should check out the source code of the layers you will build. The directory should be a parent of the cloned directory for the sources you are using to run Toaster. Toaster will create new clones of the layers, so existing content in the chosen directory will not be changed.\nType the full path to the directory (for example: \"%s\": " % os.environ.get('HOME', '/tmp/'))
-                    if len(be.sourcedir) == 0 and len(suggesteddir) > 0:
-                        be.sourcedir = suggesteddir
+                    be.sourcedir = os.environ.get('TOASTER_DIR')
                     return True
 
                 if len(be.sourcedir) == 0:
@@ -103,23 +77,13 @@
                     print "\n -- Validation: The layers checkout directory must be set to an absolute path."
                     is_changed = _update_sourcedir()
 
-                if not be.sourcedir in DN(__file__):
-                    print "\n -- Validation: The layers checkout directory must be a parent of the current checkout."
-                    is_changed = _update_sourcedir()
-
                 if is_changed:
                     if be.betype == BuildEnvironment.TYPE_LOCAL:
                         be.needs_import = True
                     return True
 
                 def _update_builddir():
-                    suggesteddir = self._get_suggested_builddir(be)
-                    if len(suggesteddir) > 0:
-                        be.builddir = raw_input("Toaster needs to know where your build directory is located.\nThe build directory is where all the artifacts created by your builds will be stored. Toaster suggests \"%s\".\nPress Enter to select \"%s\" or type the full path to a different directory: " % (suggesteddir, suggesteddir))
-                    else:
-                        be.builddir = raw_input("Toaster needs to know where is your build directory.\nThe build directory is where all the artifacts created by your builds will be stored. Type the full path to the directory (for example: \" %s/build\")" % os.environ.get('HOME','/tmp/'))
-                    if len(be.builddir) == 0 and len(suggesteddir) > 0:
-                        be.builddir = suggesteddir
+                    be.builddir = os.environ.get('TOASTER_DIR')+"/build"
                     return True
 
                 if len(be.builddir) == 0:
@@ -138,79 +102,51 @@
 
 
                 if be.needs_import:
-                    print "\nToaster can use a SINGLE predefined configuration file to set up default project settings and layer information sources.\n"
+                    try:
+                        config_file = os.environ.get('TOASTER_CONF')
+                        print "\nImporting file: %s" % config_file
+                        from loadconf import Command as LoadConfigCommand
 
-                    # find configuration files
-                    config_files = []
-                    for dirname in self._recursive_list_directories(be.sourcedir,2):
-                        if os.path.exists(os.path.join(dirname, ".templateconf")):
-                            import subprocess
-                            proc = subprocess.Popen('bash -c ". '+os.path.join(dirname, ".templateconf")+'; echo \"\$TEMPLATECONF\""', shell=True, stdout=subprocess.PIPE)
-                            conffilepath, stderroroutput = proc.communicate()
-                            proc.wait()
-                            if proc.returncode != 0:
-                                raise Exception("Failed to source TEMPLATECONF: %s" % stderroroutput)
+                        LoadConfigCommand()._import_layer_config(config_file)
+                        # we run lsupdates after config update
+                        print "\nLayer configuration imported. Updating information from the layer sources, please wait.\nYou can re-update any time later by running bitbake/lib/toaster/manage.py lsupdates"
+                        from django.core.management import call_command
+                        call_command("lsupdates")
 
-                            conffilepath = os.path.join(conffilepath.strip(), "toasterconf.json")
-                            candidatefilepath = os.path.join(dirname, conffilepath)
-                            if "toaster_cloned" in candidatefilepath:
-                                continue
-                            if os.path.exists(candidatefilepath):
-                                config_files.append(candidatefilepath)
-
-                    if len(config_files) > 0:
-                        print "Toaster will list now the configuration files that it found. Select the number to use the desired configuration file."
-                        for cf in config_files:
-                            print "  [%d] - %s" % (config_files.index(cf) + 1, cf)
-                        print "\n  [0] - Exit without importing any file"
-                        try:
-                                i = raw_input("\nEnter your option: ")
-                                if len(i) and (int(i) - 1 >= 0 and int(i) - 1 < len(config_files)):
-                                    print "\nImporting file: %s" % config_files[int(i)-1]
-                                    from loadconf import Command as LoadConfigCommand
-
-                                    LoadConfigCommand()._import_layer_config(config_files[int(i)-1])
-                                    # we run lsupdates after config update
-                                    print "\nLayer configuration imported. Updating information from the layer sources, please wait.\nYou can re-update any time later by running bitbake/lib/toaster/manage.py lsupdates"
-                                    from django.core.management import call_command
-                                    call_command("lsupdates")
-
-                                    # we don't look for any other config files
-                                    return is_changed
-                        except Exception as e:
-                            print "Failure while trying to import the toaster config file: %s" % e
-                            traceback.print_exc(e)
-                    else:
-                        print "\nToaster could not find a configuration file. You need to configure Toaster manually using the web interface, or create a configuration file and use\n  bitbake/lib/toaster/managepy.py loadconf [filename]\n command to load it. You can use https://wiki.yoctoproject.org/wiki/File:Toasterconf.json.txt.patch as a starting point."
-
-
-
+                        # we don't look for any other config files
+                        return is_changed
+                    except Exception as e:
+                        print "Failure while trying to import the toaster config file %s: %s" %\
+                            (config_file, e)
+                        traceback.print_exc(e)
 
                 return is_changed
 
-            while (_verify_be()):
+            while _verify_be():
                 pass
         return 0
 
     def _verify_default_settings(self):
         # verify that default settings are there
-        if ToasterSetting.objects.filter(name = 'DEFAULT_RELEASE').count() != 1:
-            ToasterSetting.objects.filter(name = 'DEFAULT_RELEASE').delete()
-            ToasterSetting.objects.get_or_create(name = 'DEFAULT_RELEASE', value = '')
+        if ToasterSetting.objects.filter(name='DEFAULT_RELEASE').count() != 1:
+            ToasterSetting.objects.filter(name='DEFAULT_RELEASE').delete()
+            ToasterSetting.objects.get_or_create(name='DEFAULT_RELEASE', value='')
         return 0
 
     def _verify_builds_in_progress(self):
         # we are just starting up. we must not have any builds in progress, or build environments taken
-        for b in BuildRequest.objects.filter(state = BuildRequest.REQ_INPROGRESS):
-            BRError.objects.create(req = b, errtype = "toaster", errmsg = "Toaster found this build IN PROGRESS while Toaster started up. This is an inconsistent state, and the build was marked as failed")
+        for b in BuildRequest.objects.filter(state=BuildRequest.REQ_INPROGRESS):
+            BRError.objects.create(req=b, errtype="toaster",
+                                   errmsg=
+                                   "Toaster found this build IN PROGRESS while Toaster started up. This is an inconsistent state, and the build was marked as failed")
 
-        BuildRequest.objects.filter(state = BuildRequest.REQ_INPROGRESS).update(state = BuildRequest.REQ_FAILED)
+        BuildRequest.objects.filter(state=BuildRequest.REQ_INPROGRESS).update(state=BuildRequest.REQ_FAILED)
 
-        BuildEnvironment.objects.update(lock = BuildEnvironment.LOCK_FREE)
+        BuildEnvironment.objects.update(lock=BuildEnvironment.LOCK_FREE)
 
         # also mark "In Progress builds as failures"
         from django.utils import timezone
-        Build.objects.filter(outcome = Build.IN_PROGRESS).update(outcome = Build.FAILED, completed_on = timezone.now())
+        Build.objects.filter(outcome=Build.IN_PROGRESS).update(outcome=Build.FAILED, completed_on=timezone.now())
 
         return 0
 
diff --git a/yocto-poky/bitbake/lib/toaster/bldcontrol/management/commands/runbuilds.py b/yocto-poky/bitbake/lib/toaster/bldcontrol/management/commands/runbuilds.py
index 718e144..48dc618 100644
--- a/yocto-poky/bitbake/lib/toaster/bldcontrol/management/commands/runbuilds.py
+++ b/yocto-poky/bitbake/lib/toaster/bldcontrol/management/commands/runbuilds.py
@@ -5,6 +5,7 @@
 from bldcontrol.models import BuildRequest, BuildEnvironment, BRError, BRVariable
 import os
 import logging
+import time
 
 logger = logging.getLogger("ToasterScheduler")
 
@@ -118,7 +119,7 @@
             br.save()
             # transpose target information
             for brtarget in br.brtarget_set.all():
-                Target.objects.create(build = br.build, target= brtarget.target)
+                Target.objects.create(build=br.build, target=brtarget.target, task=brtarget.task)
             # transpose the launch errors in ToasterExceptions
             for brerror in br.brerror_set.all():
                 LogMessage.objects.create(build = br.build, level = LogMessage.EXCEPTION, message = brerror.errmsg)
@@ -128,6 +129,12 @@
 
 
     def handle_noargs(self, **options):
-        self.cleanup()
-        self.archive()
-        self.schedule()
+        while True:
+            try:
+                self.cleanup()
+                self.archive()
+                self.schedule()
+            except:
+                pass
+
+            time.sleep(1)
diff --git a/yocto-poky/bitbake/lib/toaster/bldcontrol/migrations/0009_auto__add_field_brlayer_layer_version.py b/yocto-poky/bitbake/lib/toaster/bldcontrol/migrations/0009_auto__add_field_brlayer_layer_version.py
new file mode 100644
index 0000000..9b50bc1
--- /dev/null
+++ b/yocto-poky/bitbake/lib/toaster/bldcontrol/migrations/0009_auto__add_field_brlayer_layer_version.py
@@ -0,0 +1,180 @@
+# -*- coding: utf-8 -*-
+from south.utils import datetime_utils as datetime
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+
+class Migration(SchemaMigration):
+
+    def forwards(self, orm):
+        # Adding field 'BRLayer.layer_version'
+        db.add_column(u'bldcontrol_brlayer', 'layer_version',
+                      self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Layer_Version'], null=True),
+                      keep_default=False)
+
+
+    def backwards(self, orm):
+        # Deleting field 'BRLayer.layer_version'
+        db.delete_column(u'bldcontrol_brlayer', 'layer_version_id')
+
+
+    models = {
+        u'bldcontrol.brbitbake': {
+            'Meta': {'object_name': 'BRBitbake'},
+            'commit': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
+            'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
+            'giturl': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']", 'unique': 'True'})
+        },
+        u'bldcontrol.brerror': {
+            'Meta': {'object_name': 'BRError'},
+            'errmsg': ('django.db.models.fields.TextField', [], {}),
+            'errtype': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
+            'traceback': ('django.db.models.fields.TextField', [], {})
+        },
+        u'bldcontrol.brlayer': {
+            'Meta': {'object_name': 'BRLayer'},
+            'commit': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
+            'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
+            'giturl': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']", 'null': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"})
+        },
+        u'bldcontrol.brtarget': {
+            'Meta': {'object_name': 'BRTarget'},
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
+            'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
+        },
+        u'bldcontrol.brvariable': {
+            'Meta': {'object_name': 'BRVariable'},
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
+            'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
+        },
+        u'bldcontrol.buildenvironment': {
+            'Meta': {'object_name': 'BuildEnvironment'},
+            'address': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
+            'bbaddress': ('django.db.models.fields.CharField', [], {'max_length': '254', 'blank': 'True'}),
+            'bbport': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
+            'bbstate': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+            'bbtoken': ('django.db.models.fields.CharField', [], {'max_length': '126', 'blank': 'True'}),
+            'betype': ('django.db.models.fields.IntegerField', [], {}),
+            'builddir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
+            'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'lock': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+            'sourcedir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
+            'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
+        },
+        u'bldcontrol.buildrequest': {
+            'Meta': {'object_name': 'BuildRequest'},
+            'build': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['orm.Build']", 'unique': 'True', 'null': 'True'}),
+            'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
+            'environment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildEnvironment']", 'null': 'True'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
+            'state': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+            'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
+        },
+        u'orm.bitbakeversion': {
+            'Meta': {'object_name': 'BitbakeVersion'},
+            'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+            'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+            'giturl': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
+        },
+        u'orm.branch': {
+            'Meta': {'unique_together': "(('layer_source', 'name'), ('layer_source', 'up_id'))", 'object_name': 'Branch'},
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'True', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+            'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
+            'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+            'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+        },
+        u'orm.build': {
+            'Meta': {'object_name': 'Build'},
+            'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+            'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
+            'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
+            'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
+            'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
+            'started_on': ('django.db.models.fields.DateTimeField', [], {})
+        },
+        u'orm.layer': {
+            'Meta': {'unique_together': "(('layer_source', 'up_id'), ('layer_source', 'name'))", 'object_name': 'Layer'},
+            'description': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'layer_index_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
+            'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'summary': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
+            'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+            'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
+            'vcs_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+            'vcs_web_file_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+            'vcs_web_tree_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+            'vcs_web_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'})
+        },
+        u'orm.layer_version': {
+            'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Layer_Version'},
+            'branch': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
+            'build': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'layer_version_build'", 'null': 'True', 'to': u"orm['orm.Build']"}),
+            'commit': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'dirpath': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'layer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'layer_version_layer'", 'to': u"orm['orm.Layer']"}),
+            'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+            'local_path': ('django.db.models.fields.FilePathField', [], {'default': "'/'", 'max_length': '1024'}),
+            'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+            'project': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.Project']", 'null': 'True'}),
+            'up_branch': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.Branch']", 'null': 'True'}),
+            'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+            'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+        },
+        u'orm.layersource': {
+            'Meta': {'unique_together': "(('sourcetype', 'apiurl'),)", 'object_name': 'LayerSource'},
+            'apiurl': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '63'}),
+            'sourcetype': ('django.db.models.fields.IntegerField', [], {})
+        },
+        u'orm.project': {
+            'Meta': {'object_name': 'Project'},
+            'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']", 'null': 'True'}),
+            'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'is_default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+            'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']", 'null': 'True'}),
+            'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
+            'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
+            'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
+        },
+        u'orm.release': {
+            'Meta': {'object_name': 'Release'},
+            'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
+            'branch_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50'}),
+            'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+            'helptext': ('django.db.models.fields.TextField', [], {'null': 'True'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
+        }
+    }
+
+    complete_apps = ['bldcontrol']
\ No newline at end of file
diff --git a/yocto-poky/bitbake/lib/toaster/bldcontrol/models.py b/yocto-poky/bitbake/lib/toaster/bldcontrol/models.py
index b61de58..ab41105 100644
--- a/yocto-poky/bitbake/lib/toaster/bldcontrol/models.py
+++ b/yocto-poky/bitbake/lib/toaster/bldcontrol/models.py
@@ -1,6 +1,6 @@
 from django.db import models
 from django.core.validators import MaxValueValidator, MinValueValidator
-from orm.models import Project, ProjectLayer, ProjectVariable, ProjectTarget, Build
+from orm.models import Project, ProjectLayer, ProjectVariable, ProjectTarget, Build, Layer_Version
 
 # a BuildEnvironment is the equivalent of the "build/" directory on the localhost
 class BuildEnvironment(models.Model):
@@ -39,40 +39,6 @@
     created     = models.DateTimeField(auto_now_add = True)
     updated     = models.DateTimeField(auto_now = True)
 
-
-    def get_artifact_type(self, path):
-        if self.betype == BuildEnvironment.TYPE_LOCAL:
-            try:
-                import magic
-
-                # fair warning: this is a mess; there are multiple competeing and incompatible
-                # magic modules floating around, so we try some of the most common combinations
-
-                try:    # we try ubuntu's python-magic 5.4
-                    m = magic.open(magic.MAGIC_MIME_TYPE)
-                    m.load()
-                    return m.file(path)
-                except AttributeError:
-                    pass
-
-                try:    # we try python-magic 0.4.6
-                    m = magic.Magic(magic.MAGIC_MIME)
-                    return m.from_file(path)
-                except AttributeError:
-                    pass
-
-                try:    # we try pip filemagic 1.6
-                    m = magic.Magic(flags=magic.MAGIC_MIME_TYPE)
-                    return m.id_filename(path)
-                except AttributeError:
-                    pass
-
-                return "binary/octet-stream"
-            except ImportError:
-                return "binary/octet-stream"
-        raise Exception("FIXME: artifact type not implemented for build environment type %s" % self.get_betype_display())
-
-
     def get_artifact(self, path):
         if self.betype == BuildEnvironment.TYPE_LOCAL:
             return open(path, "r")
@@ -137,6 +103,7 @@
     giturl      = models.CharField(max_length = 254)
     commit      = models.CharField(max_length = 254)
     dirpath     = models.CharField(max_length = 254)
+    layer_version = models.ForeignKey(Layer_Version, null=True)
 
 class BRBitbake(models.Model):
     req         = models.ForeignKey(BuildRequest, unique = True)    # only one bitbake for a request
diff --git a/yocto-poky/bitbake/lib/toaster/contrib/tts/toasteruitest/toaster_automation_test.py b/yocto-poky/bitbake/lib/toaster/contrib/tts/toasteruitest/toaster_automation_test.py
index 2a2078f..eac167b 100755
--- a/yocto-poky/bitbake/lib/toaster/contrib/tts/toasteruitest/toaster_automation_test.py
+++ b/yocto-poky/bitbake/lib/toaster/contrib/tts/toasteruitest/toaster_automation_test.py
@@ -221,6 +221,68 @@
     """
     return re.findall(r'([0-9]+)', s)
 
+# Below is decorator derived from toaster backend test code
+class NoParsingFilter(logging.Filter):
+    def filter(self, record):
+        return record.levelno == 100
+
+def LogResults(original_class):
+    orig_method = original_class.run
+
+    #rewrite the run method of unittest.TestCase to add testcase logging
+    def run(self, result, *args, **kws):
+        orig_method(self, result, *args, **kws)
+        passed = True
+        testMethod = getattr(self, self._testMethodName)
+
+        #if test case is decorated then use it's number, else use it's name
+        try:
+            test_case = testMethod.test_case
+        except AttributeError:
+            test_case = self._testMethodName
+
+        #create custom logging level for filtering.
+        custom_log_level = 100
+        logging.addLevelName(custom_log_level, 'RESULTS')
+        caller = os.path.basename(sys.argv[0])
+
+        def results(self, message, *args, **kws):
+            if self.isEnabledFor(custom_log_level):
+                self.log(custom_log_level, message, *args, **kws)
+        logging.Logger.results = results
+
+        logging.basicConfig(filename=os.path.join(os.getcwd(),'results-'+caller+'.log'),
+                            filemode='w',
+                            format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+                            datefmt='%H:%M:%S',
+                            level=custom_log_level)
+        for handler in logging.root.handlers:
+            handler.addFilter(NoParsingFilter())
+#        local_log = logging.getLogger(caller)
+        local_log = logging.getLogger()
+
+        #check status of tests and record it
+        for (name, msg) in result.errors:
+            if self._testMethodName == str(name).split(' ')[0]:
+                local_log.results("Testcase "+str(test_case)+": ERROR")
+                local_log.results("Testcase "+str(test_case)+":\n"+msg+"\n\n\n")
+                passed = False
+        for (name, msg) in result.failures:
+            if self._testMethodName == str(name).split(' ')[0]:
+                local_log.results("Testcase "+str(test_case)+": FAILED")
+                local_log.results("Testcase "+str(test_case)+":\n"+msg+"\n\n\n")
+                passed = False
+        for (name, msg) in result.skipped:
+            if self._testMethodName == str(name).split(' ')[0]:
+                local_log.results("Testcase "+str(test_case)+": SKIPPED"+"\n\n\n")
+                passed = False
+        if passed:
+            local_log.results("Testcase "+str(test_case)+": PASSED"+"\n\n\n")
+
+    original_class.run = run
+    return original_class
+
+
 
 
 ###########################################
@@ -321,7 +383,10 @@
             log_path = log_dir + os.sep +  self.browser + '-' +\
                     item + '-' + add_name + '-' + str(self.screenshot_sequence) + '.png'
             if item == 'native':
-                os.system("scrot " + log_path)
+                if self.host_os == "linux":
+                    os.system("scrot " + log_path)
+                elif self.host_os=="darwin":
+                    os.system("screencapture -x " + log_path)
             elif item == 'selenium':
                 self.driver.get_screenshot_as_file(log_path)
             self.screenshot_sequence += 1
@@ -531,6 +596,7 @@
     def is_text_present (self, patterns):
         for pattern in patterns:
             if str(pattern) not in self.driver.page_source:
+                print pattern
                 return False
         return True
 
@@ -592,7 +658,7 @@
 # Note: to comply with the unittest framework, we call these test_xxx functions
 # from run_toastercases.py to avoid calling setUp() and tearDown() multiple times
 
-
+@LogResults
 class toaster_cases(toaster_cases_base):
         ##############
         #  CASE 901  #
@@ -627,12 +693,12 @@
             if is_list_inverted(column_list):
                 self.driver.find_element_by_link_text(key).click()
                 column_list = self.get_table_column_text("class", table_head_dict[key])
-                self.failUnless(is_list_sequenced(column_list))
+                self.assertTrue(is_list_sequenced(column_list), msg=("%s column not in order" % key))
             else:
-                self.failUnless(is_list_sequenced(column_list))
+                self.assertTrue(is_list_sequenced(column_list), msg=("%s column not sequenced" % key))
                 self.driver.find_element_by_link_text(key).click()
                 column_list = self.get_table_column_text("class", table_head_dict[key])
-                self.failUnless(is_list_inverted(column_list))
+                self.assertTrue(is_list_inverted(column_list), msg=("%s column not inverted" % key))
         self.log.info("case passed")
 
 
@@ -656,10 +722,10 @@
             # if nothing found, we still count it as "pass"
             if new_target_column_texts:
                 for text in new_target_column_texts:
-                    self.failUnless(text.find(pattern))
+                    self.assertTrue(text.find(pattern), msg=("%s item doesn't exist " % pattern))
             self.driver.find_element_by_css_selector("i.icon-remove").click()
             target_column_texts = self.get_table_column_text("class", "target")
-            self.failUnless(ori_target_column_texts == target_column_texts)
+            self.assertTrue(ori_target_column_texts == target_column_texts, msg=("builds changed after operations"))
 
 
         ##############
@@ -682,10 +748,9 @@
             try:
                 temp_element = self.find_element_by_text_in_table('otable', item)
                 # this is how we find "filter icon" in the same level as temp_element(where "a" means clickable, "i" means icon)
-                self.failUnless(temp_element.find_element_by_xpath("..//*/a/i[@class='icon-filter filtered']"))
+                self.assertTrue(temp_element.find_element_by_xpath("..//*/a/i[@class='icon-filter filtered']"))
             except Exception,e:
-                self.log.error(" %s cannot be found! %s" % (item, e))
-                self.failIf(True)
+                self.assertFalse(True, msg=(" %s cannot be found! %s" % (item, e)))
                 raise
         # step 5-6
         temp_element = self.find_element_by_link_text_in_table('otable', 'Outcome')
@@ -728,10 +793,12 @@
         # This is how we find the "default" rows-number!
         rows_displayed = int(Select(self.driver.find_element_by_css_selector("select.pagesize")).first_selected_option.text)
         print rows_displayed
-        self.failUnless(self.get_table_element(self.table_name, rows_displayed))
-        self.failIf(self.get_table_element(self.table_name, rows_displayed + 1))
+        self.assertTrue(self.get_table_element(self.table_name, rows_displayed), msg=("not enough rows displayed"))
+        self.assertFalse(self.get_table_element(self.table_name, rows_displayed + 1), \
+                         msg=("more rows displayed than expected"))
         # Search text box background text is "Search tasks"
-        self.failUnless(self.driver.find_element_by_xpath("//*[@id='searchform']/*[@placeholder='Search tasks']"))
+        self.assertTrue(self.driver.find_element_by_xpath("//*[@id='searchform']/*[@placeholder='Search tasks']"),\
+                        msg=("background text doesn't exist"))
 
         self.driver.find_element_by_id("search").clear()
         self.driver.find_element_by_id("search").send_keys("busybox")
@@ -760,22 +827,23 @@
             column_list = self.get_table_column_text("class", table_head_dict[key])
 # after 1st click, the list should be either sequenced or inverted, but we don't have a "default order" here
 # the point is, after another click, it should be another order
-# the fist case is special:this means every item in column_list is the same, so
+# the first case is special:this means every item in column_list is the same, so
 # after one click, either sequenced or inverted will be fine
             if (is_list_inverted(column_list) and is_list_sequenced(column_list)) \
                 or (not column_list) :
                 self.find_element_by_link_text_in_table(self.table_name, key).click()
                 column_list = self.get_table_column_text("class", table_head_dict[key])
-                self.failUnless(is_list_sequenced(column_list) or is_list_inverted(column_list))
+                self.assertTrue(is_list_sequenced(column_list) or is_list_inverted(column_list), \
+                                msg=("%s column not in any order" % key))
             elif is_list_inverted(column_list):
                 self.find_element_by_link_text_in_table(self.table_name, key).click()
                 column_list = self.get_table_column_text("class", table_head_dict[key])
-                self.failUnless(is_list_sequenced(column_list))
+                self.assertTrue(is_list_sequenced(column_list), msg=("%s column not in order" % key))
             else:
-                self.failUnless(is_list_sequenced(column_list))
+                self.assertTrue(is_list_sequenced(column_list), msg=("%s column not in order" % key))
                 self.find_element_by_link_text_in_table(self.table_name, key).click()
                 column_list = self.get_table_column_text("class", table_head_dict[key])
-                self.failUnless(is_list_inverted(column_list))
+                self.assertTrue(is_list_inverted(column_list), msg=("%s column not inverted" % key))
 # step 8-10
         # filter dict: {link text name : filter table name in xpath}
         filter_dict = {'Executed':'filter_executed', 'Outcome':'filter_outcome', 'Cache attempt':'filter_cache_attempt'}
@@ -834,9 +902,9 @@
             self.find_element_by_link_text_in_table('nav', key).click()
             head_list = self.get_table_head_text('otable')
             for item in test_dict[key]['check_head_list']:
-                self.failUnless(item in head_list)
+                self.assertTrue(item in head_list, msg=("%s not in head row" % item))
             column_list = self.get_table_column_text('class', test_dict[key]['class'])
-            self.failUnless(is_list_inverted(column_list))
+            self.assertTrue(is_list_inverted(column_list), msg=("%s column not inverted" % key))
 
             self.driver.find_element_by_id("edit-columns-button").click()
             for item2 in test_dict[key]['check_column_list']:
@@ -862,21 +930,24 @@
         self.driver.find_element_by_partial_link_text("Generated files").click()
         head_list = self.get_table_head_text('otable')
         for item in ['File', 'Size']:
-            self.failUnless(item in head_list)
+            self.assertTrue(item in head_list, msg=("%s not in head row" % item))
         c_list = self.get_table_column_text('class', 'path')
-        self.failUnless(is_list_sequenced(c_list))
+        self.assertTrue(is_list_sequenced(c_list), msg=("column not in order"))
 # step 7
         self.driver.find_element_by_partial_link_text("Runtime dependencies").click()
         # save sceen here to observe...
         # note that here table name is not 'otable'
         head_list = self.get_table_head_text('dependencies')
         for item in ['Package', 'Version', 'Size']:
-            self.failUnless(item in head_list)
+            self.assertTrue(item in head_list, msg=("%s not in head row" % item))
         c_list = self.get_table_column_text_by_column_number('dependencies', 1)
-        self.failUnless(is_list_sequenced(c_list))
+        self.assertTrue(is_list_sequenced(c_list), msg=("list not in order"))
         texts = ['Size', 'License', 'Recipe', 'Recipe version', 'Layer', \
-                     'Layer branch', 'Layer commit', 'Layer directory']
-        self.failUnless(self.is_text_present(texts))
+                     'Layer branch', 'Layer commit']
+        time.sleep(1)
+#        for text in texts:
+#            self.assertTrue(self.is_text_present(text), msg=("text %s not in page" % text))
+        self.assertTrue(self.is_text_present(texts), msg=("text  not in page"))
 
 
         ##############
@@ -898,8 +969,8 @@
         # This is how we find the "default" rows-number!
         rows_displayed = int(Select(self.driver.find_element_by_css_selector("select.pagesize")).first_selected_option.text)
         print rows_displayed
-        self.failUnless(self.get_table_element(self.table_name, rows_displayed))
-        self.failIf(self.get_table_element(self.table_name, rows_displayed + 1))
+        self.assertTrue(self.get_table_element(self.table_name, rows_displayed))
+        self.assertFalse(self.get_table_element(self.table_name, rows_displayed + 1))
 
         # Check the default table is sorted by Recipe
         tasks_column_count = len(self.driver.find_elements_by_xpath("/html/body/div[2]/div/div[2]/div[2]/table/tbody/tr/td[1]"))
@@ -907,10 +978,10 @@
         default_column_list = self.get_table_column_text_by_column_number(self.table_name, 1)
         #print default_column_list
 
-        self.failUnless(is_list_sequenced(default_column_list))
+        self.assertTrue(is_list_sequenced(default_column_list))
 
         # Search text box background text is "Search recipes"
-        self.failUnless(self.driver.find_element_by_xpath("//*[@id='searchform']/*[@placeholder='Search recipes']"))
+        self.assertTrue(self.driver.find_element_by_xpath("//*[@id='searchform']/*[@placeholder='Search recipes']"))
 
         self.driver.find_element_by_id("search").clear()
         self.driver.find_element_by_id("search").send_keys(test_package1)
@@ -937,7 +1008,7 @@
 
         #self.driver.find_element_by_partial_link_text("zlib").click()
         #self.driver.back()
-        #self.failUnless(is_list_inverted(inverted_column_list))
+        #self.assertTrue(is_list_inverted(inverted_column_list))
         #self.find_element_by_link_text_in_table(self.table_name, 'Recipe').click()
 
         table_head_dict = {'Recipe':'recipe__name', 'Recipe file':'recipe_file', 'Section':'recipe_section', \
@@ -950,52 +1021,52 @@
                     or (not column_list) :
                 self.find_element_by_link_text_in_table(self.table_name, key).click()
                 column_list = self.get_table_column_text("class", table_head_dict[key])
-                self.failUnless(is_list_sequenced(column_list) or is_list_inverted(column_list))
+                self.assertTrue(is_list_sequenced(column_list) or is_list_inverted(column_list))
                 self.driver.find_element_by_partial_link_text("acl").click()
                 self.driver.back()
-                self.failUnless(is_list_sequenced(column_list) or is_list_inverted(column_list))
+                self.assertTrue(is_list_sequenced(column_list) or is_list_inverted(column_list))
                 # Search text box background text is "Search recipes"
-                self.failUnless(self.driver.find_element_by_xpath("//*[@id='searchform']/*[@placeholder='Search recipes']"))
+                self.assertTrue(self.driver.find_element_by_xpath("//*[@id='searchform']/*[@placeholder='Search recipes']"))
                 self.driver.find_element_by_id("search").clear()
                 self.driver.find_element_by_id("search").send_keys(test_package2)
                 self.driver.find_element_by_id("search-button").click()
                 column_search_list = self.get_table_column_text("class", table_head_dict[key])
-                self.failUnless(is_list_sequenced(column_search_list) or is_list_inverted(column_search_list))
+                self.assertTrue(is_list_sequenced(column_search_list) or is_list_inverted(column_search_list))
                 self.driver.find_element_by_css_selector("i.icon-remove").click()
             elif is_list_inverted(column_list):
                 self.find_element_by_link_text_in_table(self.table_name, key).click()
                 column_list = self.get_table_column_text("class", table_head_dict[key])
-                self.failUnless(is_list_sequenced(column_list))
+                self.assertTrue(is_list_sequenced(column_list))
                 self.driver.find_element_by_partial_link_text("acl").click()
                 self.driver.back()
-                self.failUnless(is_list_sequenced(column_list))
+                self.assertTrue(is_list_sequenced(column_list))
                 # Search text box background text is "Search recipes"
-                self.failUnless(self.driver.find_element_by_xpath("//*[@id='searchform']/*[@placeholder='Search recipes']"))
+                self.assertTrue(self.driver.find_element_by_xpath("//*[@id='searchform']/*[@placeholder='Search recipes']"))
                 self.driver.find_element_by_id("search").clear()
                 self.driver.find_element_by_id("search").send_keys(test_package2)
                 self.driver.find_element_by_id("search-button").click()
                 column_search_list = self.get_table_column_text("class", table_head_dict[key])
-                self.failUnless(is_list_sequenced(column_search_list))
+                self.assertTrue(is_list_sequenced(column_search_list))
                 self.driver.find_element_by_css_selector("i.icon-remove").click()
             else:
-                self.failUnless(is_list_sequenced(column_list))
+                self.assertTrue(is_list_sequenced(column_list))
                 self.find_element_by_link_text_in_table(self.table_name, key).click()
                 column_list = self.get_table_column_text("class", table_head_dict[key])
-                self.failUnless(is_list_inverted(column_list))
+                self.assertTrue(is_list_inverted(column_list))
                 try:
                     self.driver.find_element_by_partial_link_text("acl").click()
                 except:
                     self.driver.find_element_by_partial_link_text("zlib").click()
                 self.driver.back()
-                self.failUnless(is_list_inverted(column_list))
+                self.assertTrue(is_list_inverted(column_list))
                 # Search text box background text is "Search recipes"
-                self.failUnless(self.driver.find_element_by_xpath("//*[@id='searchform']/*[@placeholder='Search recipes']"))
+                self.assertTrue(self.driver.find_element_by_xpath("//*[@id='searchform']/*[@placeholder='Search recipes']"))
                 self.driver.find_element_by_id("search").clear()
                 self.driver.find_element_by_id("search").send_keys(test_package2)
                 self.driver.find_element_by_id("search-button").click()
                 column_search_list = self.get_table_column_text("class", table_head_dict[key])
                 #print column_search_list
-                self.failUnless(is_list_inverted(column_search_list))
+                self.assertTrue(is_list_inverted(column_search_list))
                 self.driver.find_element_by_css_selector("i.icon-remove").click()
 
         # Bug 5919
@@ -1011,7 +1082,7 @@
             #print tasks_column_count
             default_column_list = self.get_table_column_text_by_column_number(self.table_name, 1)
             #print default_column_list
-            self.failUnless(is_list_sequenced(default_column_list))
+            self.assertTrue(is_list_sequenced(default_column_list))
 
         self.driver.find_element_by_id("edit-columns-button").click()
         self.driver.find_element_by_id("recipe_file").click()
@@ -1061,31 +1132,31 @@
         # step 3
         head_list = self.get_table_head_text('otable')
         for item in ['Recipe', 'Recipe version', 'Recipe file', 'Section', 'License', 'Layer']:
-            self.failUnless(item in head_list)
-        self.driver.find_element_by_css_selector("button.btn.dropdown-toggle").click()
+            self.assertTrue(item in head_list, msg=("item %s not in head row" % item))
+        self.driver.find_element_by_id("edit-columns-button").click()
         self.driver.find_element_by_id("depends_on").click()
         self.driver.find_element_by_id("layer_version__branch").click()
         self.driver.find_element_by_id("layer_version__layer__commit").click()
         self.driver.find_element_by_id("depends_by").click()
-        self.driver.find_element_by_css_selector("button.btn.dropdown-toggle").click()
+        self.driver.find_element_by_id("edit-columns-button").click()
         # check if columns selected above is shown
-        check_list = ['Dependencies', 'Layer branch', 'Layer commit', 'Layer directory', 'Reverse dependencies']
+        check_list = ['Dependencies', 'Layer branch', 'Layer commit', 'Reverse dependencies']
         head_list = self.get_table_head_text('otable')
         time.sleep(2)
         print head_list
         for item in check_list:
-            self.failUnless(item in head_list)
+            self.assertTrue(item in head_list, msg=("item %s not in head row" % item))
         # un-check 'em all
-        self.driver.find_element_by_css_selector("button.btn.dropdown-toggle").click()
+        self.driver.find_element_by_id("edit-columns-button").click()
         self.driver.find_element_by_id("depends_on").click()
         self.driver.find_element_by_id("layer_version__branch").click()
         self.driver.find_element_by_id("layer_version__layer__commit").click()
         self.driver.find_element_by_id("depends_by").click()
-        self.driver.find_element_by_css_selector("button.btn.dropdown-toggle").click()
+        self.driver.find_element_by_id("edit-columns-button").click()
         # don't exist any more
         head_list = self.get_table_head_text('otable')
         for item in check_list:
-            self.failIf(item in head_list)
+            self.assertFalse(item in head_list, msg=("item %s should not be in head row" % item))
 
 
         ##############
@@ -1101,7 +1172,7 @@
         # step 3
         head_list = self.get_table_head_text('otable')
         for item in ['Recipe', 'Recipe version', 'Recipe file', 'Section', 'License', 'Layer']:
-            self.failUnless(item in head_list)
+            self.assertTrue(item in head_list, msg=("item %s not in head row" % item))
         # step 4
         self.driver.find_element_by_id("edit-columns-button").click()
         # save screen
@@ -1315,7 +1386,8 @@
         head_list = self.get_table_head_text('otable')
         print head_list
         print len(head_list)
-        self.failUnless(head_list == ['Variable', 'Value', 'Set in file', 'Description'])
+        self.assertTrue(head_list == ['Variable', 'Value', 'Set in file', 'Description'], \
+                        msg=("head row contents wrong"))
 # step 8
         # search other string. and click "Variable" to re-sort, check if table
         # head is still the same
@@ -1324,10 +1396,12 @@
         self.driver.find_element_by_id("search-button").click()
         self.find_element_by_link_text_in_table('otable', 'Variable').click()
         head_list = self.get_table_head_text('otable')
-        self.failUnless(head_list == ['Variable', 'Value', 'Set in file', 'Description'])
+        self.assertTrue(head_list == ['Variable', 'Value', 'Set in file', 'Description'], \
+                        msg=("head row contents wrong"))
         self.find_element_by_link_text_in_table('otable', 'Variable').click()
         head_list = self.get_table_head_text('otable')
-        self.failUnless(head_list == ['Variable', 'Value', 'Set in file', 'Description'])
+        self.assertTrue(head_list == ['Variable', 'Value', 'Set in file', 'Description'], \
+                        msg=("head row contents wrong"))
 
 
         ##############
@@ -1343,11 +1417,11 @@
         self.find_element_by_link_text_in_table('nav', 'Configuration').click()
         self.driver.find_element_by_link_text("BitBake variables").click()
         variable_list = self.get_table_column_text('class', 'variable_name')
-        self.failUnless(is_list_sequenced(variable_list))
+        self.assertTrue(is_list_sequenced(variable_list), msg=("list not in order"))
 # step 4
         self.find_element_by_link_text_in_table('otable', 'Variable').click()
         variable_list = self.get_table_column_text('class', 'variable_name')
-        self.failUnless(is_list_inverted(variable_list))
+        self.assertTrue(is_list_inverted(variable_list), msg=("list not inverted"))
         self.find_element_by_link_text_in_table('otable', 'Variable').click()
 # step 5
         # searching won't change the sequentiality
@@ -1355,7 +1429,7 @@
         self.driver.find_element_by_id("search").send_keys("lib")
         self.driver.find_element_by_id("search-button").click()
         variable_list = self.get_table_column_text('class', 'variable_name')
-        self.failUnless(is_list_sequenced(variable_list))
+        self.assertTrue(is_list_sequenced(variable_list), msg=("list not in order"))
 
 
         ##############
@@ -1369,7 +1443,7 @@
         # Step 2
         # default sequence in "Completed on" column is inverted
         c_list = self.get_table_column_text('class', 'completed_on')
-        self.failUnless(is_list_inverted(c_list))
+        self.assertTrue(is_list_inverted(c_list), msg=("list not inverted"))
         # step 3
         self.driver.find_element_by_id("edit-columns-button").click()
         self.driver.find_element_by_id("started_on").click()
@@ -1377,8 +1451,8 @@
         self.driver.find_element_by_id("time").click()
         self.driver.find_element_by_id("edit-columns-button").click()
         head_list = self.get_table_head_text('otable')
-        for item in ['Outcome', 'Target', 'Machine', 'Started on', 'Completed on', 'Failed tasks', 'Errors', 'Warnings', 'Warnings', 'Time']:
-            self.failUnless(item in head_list)
+        for item in ['Outcome', 'Recipe', 'Machine', 'Started on', 'Completed on', 'Failed tasks', 'Errors', 'Warnings', 'Warnings', 'Time']:
+            self.assertTrue(item in head_list, msg=("item %s not in head row" % item))
 
 
         ##############
@@ -1392,7 +1466,7 @@
         # Please refer to case 924 requirement
         # default sequence in "Completed on" column is inverted
         c_list = self.get_table_column_text('class', 'completed_on')
-        self.failUnless(is_list_inverted(c_list))
+        self.assertTrue(is_list_inverted(c_list), msg=("list not inverted"))
         # Step 4
         # click Errors , order in "Completed on" should be disturbed. Then hide
         # error column to check if order in "Completed on" can be restored
@@ -1403,7 +1477,7 @@
         # Note: without time.sleep here, there'll be unpredictable error..TBD
         time.sleep(1)
         c_list = self.get_table_column_text('class', 'completed_on')
-        self.failUnless(is_list_inverted(c_list))
+        self.assertTrue(is_list_inverted(c_list), msg=("list not inverted"))
 
 
         ##############
@@ -1419,7 +1493,7 @@
         self.find_element_by_link_text_in_table('nav', 'Packages').click()
         check_head_list = ['Package', 'Package version', 'Size', 'Recipe']
         head_list = self.get_table_head_text('otable')
-        self.failUnless(head_list == check_head_list)
+        self.assertTrue(head_list == check_head_list, msg=("head row not as expected"))
 # Step 4
         # pulldown menu
         option_ids = ['recipe__layer_version__layer__name', 'recipe__layer_version__branch', \
@@ -1448,7 +1522,7 @@
         self.find_element_by_link_text_in_table('nav', 'Packages').click()
         # column -- Package
         column_list = self.get_table_column_text_by_column_number('otable', 1)
-        self.failUnless(is_list_sequenced(column_list))
+        self.assertTrue(is_list_sequenced(column_list), msg=("list not in order"))
         self.find_element_by_link_text_in_table('otable', 'Size').click()
 
 
@@ -1470,7 +1544,7 @@
         self.driver.find_element_by_id("edit-columns-button").click()
         #get modified table header
         new_head = self.get_table_head_text('otable')
-        self.failUnless(head_list > new_head)
+        self.assertTrue(head_list > new_head)
 
         ##############
         #  CASE 943  #
@@ -1487,7 +1561,7 @@
         self.driver.find_element_by_id("search").send_keys("bash")
         self.driver.find_element_by_id("search-button").click()
         #check for the search result message "XX packages found"
-        self.failUnless(self.is_text_present("packages found"))
+        self.assertTrue(self.is_text_present("packages found"), msg=("no packages found text"))
 
 
         ##############
@@ -1508,11 +1582,12 @@
         self.driver.find_element_by_id("edit-columns-button").click()
         # otable is the recipes table here
         otable_head_text = self.get_table_head_text('otable')
-        for item in ["Layer", "Layer branch", "Layer commit", "Layer directory"]:
-            self.failIf(item not in otable_head_text)
+        for item in ["Layer", "Layer branch", "Layer commit"]:
+            self.assertFalse(item not in otable_head_text, msg=("item %s should be in head row" % item))
         # click the fist recipe, whatever it is
         self.get_table_element("otable", 1, 1).click()
-        self.failUnless(self.is_text_present(["Layer", "Layer branch", "Layer commit", "Recipe file"]))
+        self.assertTrue(self.is_text_present(["Layer", "Layer branch", "Layer commit", "Recipe file"]), \
+                        msg=("text not in web page"))
 
         # step 2: test Packages page stuff. almost same as above
         self.driver.back()
@@ -1525,10 +1600,11 @@
         self.driver.find_element_by_id("edit-columns-button").click()
         otable_head_text = self.get_table_head_text("otable")
         for item in ["Layer", "Layer branch", "Layer commit"]:
-            self.failIf(item not in otable_head_text)
+            self.assertFalse(item not in otable_head_text, msg=("item %s should be in head row" % item))
         # click the fist recipe, whatever it is
         self.get_table_element("otable", 1, 1).click()
-        self.failUnless(self.is_text_present(["Layer", "Layer branch", "Layer commit"]))
+        self.assertTrue(self.is_text_present(["Layer", "Layer branch", "Layer commit"]), \
+                        msg=("text not in web page"))
 
         # step 3: test Packages core-image-minimal(images) stuff. almost same as above. Note when future element-id changes...
         self.driver.back()
@@ -1540,17 +1616,18 @@
         self.driver.find_element_by_id("edit-columns-button").click()
         otable_head_text = self.get_table_head_text("otable")
         for item in ["Layer", "Layer branch", "Layer commit"]:
-            self.failIf(item not in otable_head_text)
+            self.assertFalse(item not in otable_head_text, msg=("item %s should be in head row" % item))
         # click the fist recipe, whatever it is
         self.get_table_element("otable", 1, 1).click()
-        self.failUnless(self.is_text_present(["Layer", "Layer branch", "Layer commit"]))
+        self.assertTrue(self.is_text_present(["Layer", "Layer branch", "Layer commit"]), \
+                        msg=("text not in web page"))
 
         # step 4: check Configuration page
         self.driver.back()
         self.driver.find_element_by_link_text("Configuration").click()
         otable_head_text = self.get_table_head_text()
         for item in ["Layer", "Layer branch", "Layer commit"]:
-            self.failIf(item not in otable_head_text)
+            self.assertTrue(item not in otable_head_text, msg=("item %s should not be in head row" % item))
 
 
         ##############
@@ -1575,14 +1652,14 @@
             # Sure we can use driver.get(url) to refresh page, but since page will vary, we use click link text here
             self.driver.find_element_by_link_text(items).click()
             Select(self.driver.find_element_by_css_selector("select.pagesize")).select_by_visible_text(str(rows_displayed))
-            self.failUnless(self.is_element_present(By.XPATH, xpath_table + "/tr[" + str(rows_displayed) +"]"))
-            self.failIf(self.is_element_present(By.XPATH, xpath_table + "/tr[" + str(rows_displayed+1) +"]"))
+            self.assertTrue(self.is_element_present(By.XPATH, xpath_table + "/tr[" + str(rows_displayed) +"]"))
+            self.assertFalse(self.is_element_present(By.XPATH, xpath_table + "/tr[" + str(rows_displayed+1) +"]"))
 
             # click 1st package, then go back to check if it's still those rows shown.
             self.driver.find_element_by_xpath(xpath_table + "/tr[1]/td[1]").click()
             self.driver.find_element_by_link_text(items).click()
-            self.failUnless(self.is_element_present(By.XPATH, xpath_table + "/tr[" + str(rows_displayed) +"]"))
-            self.failIf(self.is_element_present(By.XPATH, xpath_table + "/tr[" + str(rows_displayed+1) +"]"))
+            self.assertTrue(self.is_element_present(By.XPATH, xpath_table + "/tr[" + str(rows_displayed) +"]"))
+            self.assertFalse(self.is_element_present(By.XPATH, xpath_table + "/tr[" + str(rows_displayed+1) +"]"))
 
 
         ##############
@@ -1673,7 +1750,7 @@
         check_list = ['Description', 'Set in file']
         head_list = self.get_table_head_text('otable')
         for item in check_list:
-            self.failIf(item in head_list)
+            self.assertFalse(item in head_list, msg=("item %s should not be in head row" % item))
         # check these 2 options and verify again
         self.driver.find_element_by_id('edit-columns-button').click()
         self.driver.find_element_by_xpath(xpath_option('description')).click()
@@ -1681,7 +1758,7 @@
         self.driver.find_element_by_id('edit-columns-button').click()
         head_list = self.get_table_head_text('otable')
         for item in check_list:
-            self.failUnless(item in head_list)
+            self.assertTrue(item in head_list, msg=("item %s not in head row" % item))
 
 
         ##############
@@ -1703,7 +1780,7 @@
         self.driver.find_element_by_id("search-button").click()
         #get number of variables visible after search
         number_after_search = self.driver.find_element_by_class_name('page-header').text
-        self.failUnless(number_before_search > number_after_search)
+        self.assertTrue(number_before_search > number_after_search, msg=("items should be less after search"))
 
 
         ##############
@@ -1722,11 +1799,11 @@
             self.driver.find_element_by_partial_link_text("Directory structure")
         except Exception,e:
             self.log.error(e)
-            self.failIf(True)
+            self.assertFalse(True)
         # step 4
         head_list = self.get_table_head_text('otable')
         for item in ['Package', 'Package version', 'Size', 'Dependencies', 'Reverse dependencies', 'Recipe']:
-            self.failUnless(item in head_list)
+            self.assertTrue(item in head_list, msg=("item %s not in head row" % item))
         # step 5-6
         self.driver.find_element_by_id("edit-columns-button").click()
         selectable_class = 'checkbox'
@@ -1746,22 +1823,15 @@
             unselectable_list.append(element.text)
         # check them
         for item in selectable_check_list:
-            if item not in selectable_list:
-                self.log.error(" %s not found in dropdown menu \n" % item)
-                self.failIf(True)
+            self.assertTrue(item in selectable_list, msg=("%s not found in dropdown menu" % item))
         for item in unselectable_check_list:
-            if item not in unselectable_list:
-                self.log.error(" %s not found in dropdown menu \n" % item)
-                self.failIf(True)
+            self.assertTrue(item in unselectable_list, msg=("%s not found in dropdown menu" % item))
         self.driver.find_element_by_id("edit-columns-button").click()
         # step 7
         self.driver.find_element_by_partial_link_text("Directory structure").click()
         head_list = self.get_table_head_text('dirtable')
         for item in ['Directory / File', 'Symbolic link to', 'Source package', 'Size', 'Permissions', 'Owner', 'Group']:
-            if item not in head_list:
-                self.log.error(" %s not found in Directory structure table head \n" % item)
-                self.failIf(True)
-
+            self.assertTrue(item in head_list, msg=("%s not found in Directory structure table head" % item))
 
         ##############
         #  CASE 950  #
@@ -1791,12 +1861,11 @@
                 try:
                     self.find_element_by_link_text_in_table('nav', item)
                 except Exception:
-                    self.log.error("link  %s cannot be found in the page" % item)
-                    self.failIf(True)
+                    self.assertFalse(True, msg=("link  %s cannot be found in the page" % item))
             # step 6
             check_list_2 = ['Packages included', 'Total package size', \
                       'License manifest', 'Image files']
-            self.failUnless(self.is_text_present(check_list_2))
+            self.assertTrue(self.is_text_present(check_list_2), msg=("text not in web page"))
             self.driver.back()
         try:
             fail_icon = self.driver.find_element_by_xpath("//*[@class='icon-minus-sign error']")
@@ -1813,12 +1882,11 @@
                 try:
                     self.find_element_by_link_text_in_table('nav', item)
                 except Exception:
-                    self.log.error("link  %s cannot be found in the page" % item)
-                    self.failIf(True)
+                    self.assertFalse(True, msg=("link  %s cannot be found in the page" % item))
             # step 7 involved
             check_list_3 = ['Machine', 'Distro', 'Layers', 'Total number of tasks', 'Tasks executed', \
                       'Tasks not executed', 'Reuse', 'Recipes built', 'Packages built']
-            self.failUnless(self.is_text_present(check_list_3))
+            self.assertTrue(self.is_text_present(check_list_3), msg=("text not in web page"))
             self.driver.back()
 
 
@@ -1878,6 +1946,5 @@
                        tasks, recipes, packages need to run manually")
         self.driver.find_element_by_partial_link_text("Toaster manual").click()
         if not self.is_text_present("Toaster Manual"):
-            self.log.error("please check [Toaster manual] link on page")
-            self.failIf(True)
+            self.assertFalse(True, msg=("please check [Toaster manual] link on page"))
 
diff --git a/yocto-poky/bitbake/lib/toaster/contrib/tts/toasteruitest/toaster_test.cfg b/yocto-poky/bitbake/lib/toaster/contrib/tts/toasteruitest/toaster_test.cfg
index 6405f9a..685a9ee 100644
--- a/yocto-poky/bitbake/lib/toaster/contrib/tts/toasteruitest/toaster_test.cfg
+++ b/yocto-poky/bitbake/lib/toaster/contrib/tts/toasteruitest/toaster_test.cfg
@@ -18,4 +18,8 @@
 test_cases = [901, 902, 903]
 logging_level = 'DEBUG'
 
-
+[toaster_test_darwin]
+toaster_url = 'http://127.0.0.1:8000'
+test_browser = 'firefox'
+test_cases = [901, 902, 903, 904, 906, 910, 911, 912, 913, 914, 915, 916, 923, 924, 940, 941, 942, 943, 944, 945, 946, 947, 948, 949, 950, 951, 955, 956]
+logging_level = 'INFO'
diff --git a/yocto-poky/bitbake/lib/toaster/orm/migrations/0027_auto__add_customimagerecipe__add_unique_customimagerecipe_name_project.py b/yocto-poky/bitbake/lib/toaster/orm/migrations/0027_auto__add_customimagerecipe__add_unique_customimagerecipe_name_project.py
new file mode 100644
index 0000000..6030605
--- /dev/null
+++ b/yocto-poky/bitbake/lib/toaster/orm/migrations/0027_auto__add_customimagerecipe__add_unique_customimagerecipe_name_project.py
@@ -0,0 +1,375 @@
+# -*- coding: utf-8 -*-
+from south.utils import datetime_utils as datetime
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+
+class Migration(SchemaMigration):
+
+    def forwards(self, orm):
+        # Adding model 'CustomImageRecipe'
+        db.create_table(u'orm_customimagerecipe', (
+            (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
+            ('name', self.gf('django.db.models.fields.CharField')(max_length=100)),
+            ('base_recipe', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Recipe'])),
+            ('project', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Project'])),
+        ))
+        db.send_create_signal(u'orm', ['CustomImageRecipe'])
+
+        # Adding M2M table for field packages on 'CustomImageRecipe'
+        m2m_table_name = db.shorten_name(u'orm_customimagerecipe_packages')
+        db.create_table(m2m_table_name, (
+            ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
+            ('customimagerecipe', models.ForeignKey(orm[u'orm.customimagerecipe'], null=False)),
+            ('package', models.ForeignKey(orm[u'orm.package'], null=False))
+        ))
+        db.create_unique(m2m_table_name, ['customimagerecipe_id', 'package_id'])
+
+        # Adding unique constraint on 'CustomImageRecipe', fields ['name', 'project']
+        db.create_unique(u'orm_customimagerecipe', ['name', 'project_id'])
+
+
+        # Changing field 'Package.build'
+        db.alter_column(u'orm_package', 'build_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Build'], null=True))
+
+    def backwards(self, orm):
+        # Removing unique constraint on 'CustomImageRecipe', fields ['name', 'project']
+        db.delete_unique(u'orm_customimagerecipe', ['name', 'project_id'])
+
+        # Deleting model 'CustomImageRecipe'
+        db.delete_table(u'orm_customimagerecipe')
+
+        # Removing M2M table for field packages on 'CustomImageRecipe'
+        db.delete_table(db.shorten_name(u'orm_customimagerecipe_packages'))
+
+
+        # Changing field 'Package.build'
+        db.alter_column(u'orm_package', 'build_id', self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['orm.Build']))
+
+    models = {
+        u'orm.bitbakeversion': {
+            'Meta': {'object_name': 'BitbakeVersion'},
+            'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+            'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+            'giturl': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
+        },
+        u'orm.branch': {
+            'Meta': {'unique_together': "(('layer_source', 'name'), ('layer_source', 'up_id'))", 'object_name': 'Branch'},
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'True', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+            'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
+            'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+            'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+        },
+        u'orm.build': {
+            'Meta': {'object_name': 'Build'},
+            'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+            'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
+            'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
+            'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
+            'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
+            'started_on': ('django.db.models.fields.DateTimeField', [], {})
+        },
+        u'orm.buildartifact': {
+            'Meta': {'object_name': 'BuildArtifact'},
+            'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+            'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '100'}),
+            'file_size': ('django.db.models.fields.IntegerField', [], {}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
+        },
+        u'orm.customimagerecipe': {
+            'Meta': {'unique_together': "(('name', 'project'),)", 'object_name': 'CustomImageRecipe'},
+            'base_recipe': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Recipe']"}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'packages': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['orm.Package']", 'symmetrical': 'False'}),
+            'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"})
+        },
+        u'orm.helptext': {
+            'Meta': {'object_name': 'HelpText'},
+            'area': ('django.db.models.fields.IntegerField', [], {}),
+            'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'helptext_build'", 'to': u"orm['orm.Build']"}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'key': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'text': ('django.db.models.fields.TextField', [], {})
+        },
+        u'orm.layer': {
+            'Meta': {'unique_together': "(('layer_source', 'up_id'), ('layer_source', 'name'))", 'object_name': 'Layer'},
+            'description': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'layer_index_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
+            'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'summary': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
+            'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+            'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
+            'vcs_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+            'vcs_web_file_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+            'vcs_web_tree_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+            'vcs_web_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'})
+        },
+        u'orm.layer_version': {
+            'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Layer_Version'},
+            'branch': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
+            'build': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'layer_version_build'", 'null': 'True', 'to': u"orm['orm.Build']"}),
+            'commit': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'dirpath': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'layer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'layer_version_layer'", 'to': u"orm['orm.Layer']"}),
+            'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+            'local_path': ('django.db.models.fields.FilePathField', [], {'default': "'/'", 'max_length': '1024'}),
+            'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+            'project': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.Project']", 'null': 'True'}),
+            'up_branch': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.Branch']", 'null': 'True'}),
+            'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+            'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+        },
+        u'orm.layersource': {
+            'Meta': {'unique_together': "(('sourcetype', 'apiurl'),)", 'object_name': 'LayerSource'},
+            'apiurl': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '63'}),
+            'sourcetype': ('django.db.models.fields.IntegerField', [], {})
+        },
+        u'orm.layerversiondependency': {
+            'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'LayerVersionDependency'},
+            'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependees'", 'to': u"orm['orm.Layer_Version']"}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+            'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependencies'", 'to': u"orm['orm.Layer_Version']"}),
+            'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+        },
+        u'orm.logmessage': {
+            'Meta': {'object_name': 'LogMessage'},
+            'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'level': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+            'lineno': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+            'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
+            'pathname': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+            'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Task']", 'null': 'True', 'blank': 'True'})
+        },
+        u'orm.machine': {
+            'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Machine'},
+            'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+            'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']"}),
+            'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+            'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+            'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+        },
+        u'orm.package': {
+            'Meta': {'object_name': 'Package'},
+            'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']", 'null': 'True'}),
+            'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'installed_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
+            'installed_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+            'license': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'recipe': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Recipe']", 'null': 'True'}),
+            'revision': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
+            'section': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
+            'size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+            'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+            'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
+        },
+        u'orm.package_dependency': {
+            'Meta': {'object_name': 'Package_Dependency'},
+            'dep_type': ('django.db.models.fields.IntegerField', [], {}),
+            'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_target'", 'to': u"orm['orm.Package']"}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_source'", 'to': u"orm['orm.Package']"}),
+            'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']", 'null': 'True'})
+        },
+        u'orm.package_file': {
+            'Meta': {'object_name': 'Package_File'},
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildfilelist_package'", 'to': u"orm['orm.Package']"}),
+            'path': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+            'size': ('django.db.models.fields.IntegerField', [], {})
+        },
+        u'orm.project': {
+            'Meta': {'object_name': 'Project'},
+            'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']", 'null': 'True'}),
+            'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'is_default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+            'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']", 'null': 'True'}),
+            'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
+            'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
+            'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
+        },
+        u'orm.projectlayer': {
+            'Meta': {'unique_together': "(('project', 'layercommit'),)", 'object_name': 'ProjectLayer'},
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'layercommit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']", 'null': 'True'}),
+            'optional': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+            'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"})
+        },
+        u'orm.projecttarget': {
+            'Meta': {'object_name': 'ProjectTarget'},
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
+            'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
+        },
+        u'orm.projectvariable': {
+            'Meta': {'object_name': 'ProjectVariable'},
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
+            'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
+        },
+        u'orm.recipe': {
+            'Meta': {'unique_together': "(('layer_version', 'file_path', 'pathflags'),)", 'object_name': 'Recipe'},
+            'bugtracker': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
+            'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+            'file_path': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
+            'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'is_image': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+            'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+            'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'recipe_layer_version'", 'to': u"orm['orm.Layer_Version']"}),
+            'license': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+            'pathflags': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
+            'section': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+            'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+            'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+            'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
+            'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
+        },
+        u'orm.recipe_dependency': {
+            'Meta': {'object_name': 'Recipe_Dependency'},
+            'dep_type': ('django.db.models.fields.IntegerField', [], {}),
+            'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_depends'", 'to': u"orm['orm.Recipe']"}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_recipe'", 'to': u"orm['orm.Recipe']"})
+        },
+        u'orm.release': {
+            'Meta': {'object_name': 'Release'},
+            'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
+            'branch_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50'}),
+            'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+            'helptext': ('django.db.models.fields.TextField', [], {'null': 'True'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
+        },
+        u'orm.releasedefaultlayer': {
+            'Meta': {'object_name': 'ReleaseDefaultLayer'},
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'layer_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
+            'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"})
+        },
+        u'orm.releaselayersourcepriority': {
+            'Meta': {'unique_together': "(('release', 'layer_source'),)", 'object_name': 'ReleaseLayerSourcePriority'},
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.LayerSource']"}),
+            'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+            'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"})
+        },
+        u'orm.target': {
+            'Meta': {'object_name': 'Target'},
+            'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'image_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+            'is_image': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+            'license_manifest_path': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True'}),
+            'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
+        },
+        u'orm.target_file': {
+            'Meta': {'object_name': 'Target_File'},
+            'directory': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'directory_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
+            'group': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'inodetype': ('django.db.models.fields.IntegerField', [], {}),
+            'owner': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+            'path': ('django.db.models.fields.FilePathField', [], {'max_length': '100'}),
+            'permission': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
+            'size': ('django.db.models.fields.IntegerField', [], {}),
+            'sym_target': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'symlink_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
+            'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+        },
+        u'orm.target_image_file': {
+            'Meta': {'object_name': 'Target_Image_File'},
+            'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '254'}),
+            'file_size': ('django.db.models.fields.IntegerField', [], {}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+        },
+        u'orm.target_installed_package': {
+            'Meta': {'object_name': 'Target_Installed_Package'},
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildtargetlist_package'", 'to': u"orm['orm.Package']"}),
+            'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+        },
+        u'orm.task': {
+            'Meta': {'ordering': "('order', 'recipe')", 'unique_together': "(('build', 'recipe', 'task_name'),)", 'object_name': 'Task'},
+            'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_build'", 'to': u"orm['orm.Build']"}),
+            'cpu_usage': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '2'}),
+            'disk_io': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+            'elapsed_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '2'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'line_number': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+            'logfile': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+            'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
+            'order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+            'outcome': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
+            'path_to_sstate_obj': ('django.db.models.fields.FilePathField', [], {'max_length': '500', 'blank': 'True'}),
+            'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tasks'", 'to': u"orm['orm.Recipe']"}),
+            'script_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+            'source_url': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+            'sstate_checksum': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+            'sstate_result': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+            'task_executed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+            'task_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'work_directory': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'})
+        },
+        u'orm.task_dependency': {
+            'Meta': {'object_name': 'Task_Dependency'},
+            'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_depends'", 'to': u"orm['orm.Task']"}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_task'", 'to': u"orm['orm.Task']"})
+        },
+        u'orm.toastersetting': {
+            'Meta': {'object_name': 'ToasterSetting'},
+            'helptext': ('django.db.models.fields.TextField', [], {}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
+            'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
+        },
+        u'orm.variable': {
+            'Meta': {'object_name': 'Variable'},
+            'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'variable_build'", 'to': u"orm['orm.Build']"}),
+            'changed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+            'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+            'human_readable_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'variable_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'variable_value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
+        },
+        u'orm.variablehistory': {
+            'Meta': {'object_name': 'VariableHistory'},
+            'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'line_number': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+            'operation': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+            'value': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+            'variable': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vhistory'", 'to': u"orm['orm.Variable']"})
+        }
+    }
+
+    complete_apps = ['orm']
\ No newline at end of file
diff --git a/yocto-poky/bitbake/lib/toaster/orm/migrations/0028_auto__chg_field_logmessage_message.py b/yocto-poky/bitbake/lib/toaster/orm/migrations/0028_auto__chg_field_logmessage_message.py
new file mode 100644
index 0000000..a2f8661
--- /dev/null
+++ b/yocto-poky/bitbake/lib/toaster/orm/migrations/0028_auto__chg_field_logmessage_message.py
@@ -0,0 +1,345 @@
+# -*- coding: utf-8 -*-
+from south.utils import datetime_utils as datetime
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+
+class Migration(SchemaMigration):
+
+    def forwards(self, orm):
+
+        # Changing field 'LogMessage.message'
+        db.alter_column(u'orm_logmessage', 'message', self.gf('django.db.models.fields.TextField')(null=True))
+
+    def backwards(self, orm):
+
+        # Changing field 'LogMessage.message'
+        db.alter_column(u'orm_logmessage', 'message', self.gf('django.db.models.fields.CharField')(default='', max_length=240))
+
+    models = {
+        u'orm.bitbakeversion': {
+            'Meta': {'object_name': 'BitbakeVersion'},
+            'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+            'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+            'giturl': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
+        },
+        u'orm.branch': {
+            'Meta': {'unique_together': "(('layer_source', 'name'), ('layer_source', 'up_id'))", 'object_name': 'Branch'},
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'True', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+            'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
+            'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+            'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+        },
+        u'orm.build': {
+            'Meta': {'object_name': 'Build'},
+            'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+            'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
+            'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
+            'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
+            'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
+            'started_on': ('django.db.models.fields.DateTimeField', [], {})
+        },
+        u'orm.buildartifact': {
+            'Meta': {'object_name': 'BuildArtifact'},
+            'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+            'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '100'}),
+            'file_size': ('django.db.models.fields.IntegerField', [], {}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
+        },
+        u'orm.customimagerecipe': {
+            'Meta': {'unique_together': "(('name', 'project'),)", 'object_name': 'CustomImageRecipe'},
+            'base_recipe': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Recipe']"}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'packages': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['orm.Package']", 'symmetrical': 'False'}),
+            'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"})
+        },
+        u'orm.helptext': {
+            'Meta': {'object_name': 'HelpText'},
+            'area': ('django.db.models.fields.IntegerField', [], {}),
+            'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'helptext_build'", 'to': u"orm['orm.Build']"}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'key': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'text': ('django.db.models.fields.TextField', [], {})
+        },
+        u'orm.layer': {
+            'Meta': {'unique_together': "(('layer_source', 'up_id'), ('layer_source', 'name'))", 'object_name': 'Layer'},
+            'description': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'layer_index_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
+            'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'summary': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
+            'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+            'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
+            'vcs_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+            'vcs_web_file_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+            'vcs_web_tree_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+            'vcs_web_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'})
+        },
+        u'orm.layer_version': {
+            'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Layer_Version'},
+            'branch': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
+            'build': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'layer_version_build'", 'null': 'True', 'to': u"orm['orm.Build']"}),
+            'commit': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'dirpath': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'layer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'layer_version_layer'", 'to': u"orm['orm.Layer']"}),
+            'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+            'local_path': ('django.db.models.fields.FilePathField', [], {'default': "'/'", 'max_length': '1024'}),
+            'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+            'project': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.Project']", 'null': 'True'}),
+            'up_branch': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.Branch']", 'null': 'True'}),
+            'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+            'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+        },
+        u'orm.layersource': {
+            'Meta': {'unique_together': "(('sourcetype', 'apiurl'),)", 'object_name': 'LayerSource'},
+            'apiurl': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '63'}),
+            'sourcetype': ('django.db.models.fields.IntegerField', [], {})
+        },
+        u'orm.layerversiondependency': {
+            'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'LayerVersionDependency'},
+            'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependees'", 'to': u"orm['orm.Layer_Version']"}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+            'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependencies'", 'to': u"orm['orm.Layer_Version']"}),
+            'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+        },
+        u'orm.logmessage': {
+            'Meta': {'object_name': 'LogMessage'},
+            'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'level': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+            'lineno': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+            'message': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+            'pathname': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+            'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Task']", 'null': 'True', 'blank': 'True'})
+        },
+        u'orm.machine': {
+            'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Machine'},
+            'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+            'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']"}),
+            'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+            'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+            'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+        },
+        u'orm.package': {
+            'Meta': {'object_name': 'Package'},
+            'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']", 'null': 'True'}),
+            'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'installed_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
+            'installed_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+            'license': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'recipe': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Recipe']", 'null': 'True'}),
+            'revision': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
+            'section': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
+            'size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+            'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+            'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
+        },
+        u'orm.package_dependency': {
+            'Meta': {'object_name': 'Package_Dependency'},
+            'dep_type': ('django.db.models.fields.IntegerField', [], {}),
+            'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_target'", 'to': u"orm['orm.Package']"}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_source'", 'to': u"orm['orm.Package']"}),
+            'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']", 'null': 'True'})
+        },
+        u'orm.package_file': {
+            'Meta': {'object_name': 'Package_File'},
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildfilelist_package'", 'to': u"orm['orm.Package']"}),
+            'path': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+            'size': ('django.db.models.fields.IntegerField', [], {})
+        },
+        u'orm.project': {
+            'Meta': {'object_name': 'Project'},
+            'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']", 'null': 'True'}),
+            'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'is_default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+            'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']", 'null': 'True'}),
+            'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
+            'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
+            'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
+        },
+        u'orm.projectlayer': {
+            'Meta': {'unique_together': "(('project', 'layercommit'),)", 'object_name': 'ProjectLayer'},
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'layercommit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']", 'null': 'True'}),
+            'optional': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+            'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"})
+        },
+        u'orm.projecttarget': {
+            'Meta': {'object_name': 'ProjectTarget'},
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
+            'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
+        },
+        u'orm.projectvariable': {
+            'Meta': {'object_name': 'ProjectVariable'},
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
+            'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
+        },
+        u'orm.recipe': {
+            'Meta': {'unique_together': "(('layer_version', 'file_path', 'pathflags'),)", 'object_name': 'Recipe'},
+            'bugtracker': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
+            'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+            'file_path': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
+            'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'is_image': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+            'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+            'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'recipe_layer_version'", 'to': u"orm['orm.Layer_Version']"}),
+            'license': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+            'pathflags': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
+            'section': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+            'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+            'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+            'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
+            'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
+        },
+        u'orm.recipe_dependency': {
+            'Meta': {'object_name': 'Recipe_Dependency'},
+            'dep_type': ('django.db.models.fields.IntegerField', [], {}),
+            'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_depends'", 'to': u"orm['orm.Recipe']"}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_recipe'", 'to': u"orm['orm.Recipe']"})
+        },
+        u'orm.release': {
+            'Meta': {'object_name': 'Release'},
+            'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
+            'branch_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50'}),
+            'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+            'helptext': ('django.db.models.fields.TextField', [], {'null': 'True'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
+        },
+        u'orm.releasedefaultlayer': {
+            'Meta': {'object_name': 'ReleaseDefaultLayer'},
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'layer_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
+            'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"})
+        },
+        u'orm.releaselayersourcepriority': {
+            'Meta': {'unique_together': "(('release', 'layer_source'),)", 'object_name': 'ReleaseLayerSourcePriority'},
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.LayerSource']"}),
+            'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+            'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"})
+        },
+        u'orm.target': {
+            'Meta': {'object_name': 'Target'},
+            'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'image_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+            'is_image': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+            'license_manifest_path': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True'}),
+            'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
+        },
+        u'orm.target_file': {
+            'Meta': {'object_name': 'Target_File'},
+            'directory': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'directory_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
+            'group': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'inodetype': ('django.db.models.fields.IntegerField', [], {}),
+            'owner': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+            'path': ('django.db.models.fields.FilePathField', [], {'max_length': '100'}),
+            'permission': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
+            'size': ('django.db.models.fields.IntegerField', [], {}),
+            'sym_target': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'symlink_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
+            'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+        },
+        u'orm.target_image_file': {
+            'Meta': {'object_name': 'Target_Image_File'},
+            'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '254'}),
+            'file_size': ('django.db.models.fields.IntegerField', [], {}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+        },
+        u'orm.target_installed_package': {
+            'Meta': {'object_name': 'Target_Installed_Package'},
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildtargetlist_package'", 'to': u"orm['orm.Package']"}),
+            'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+        },
+        u'orm.task': {
+            'Meta': {'ordering': "('order', 'recipe')", 'unique_together': "(('build', 'recipe', 'task_name'),)", 'object_name': 'Task'},
+            'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_build'", 'to': u"orm['orm.Build']"}),
+            'cpu_usage': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '2'}),
+            'disk_io': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+            'elapsed_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '2'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'line_number': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+            'logfile': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+            'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
+            'order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+            'outcome': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
+            'path_to_sstate_obj': ('django.db.models.fields.FilePathField', [], {'max_length': '500', 'blank': 'True'}),
+            'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tasks'", 'to': u"orm['orm.Recipe']"}),
+            'script_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+            'source_url': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+            'sstate_checksum': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+            'sstate_result': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+            'task_executed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+            'task_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'work_directory': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'})
+        },
+        u'orm.task_dependency': {
+            'Meta': {'object_name': 'Task_Dependency'},
+            'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_depends'", 'to': u"orm['orm.Task']"}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_task'", 'to': u"orm['orm.Task']"})
+        },
+        u'orm.toastersetting': {
+            'Meta': {'object_name': 'ToasterSetting'},
+            'helptext': ('django.db.models.fields.TextField', [], {}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'name': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
+            'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
+        },
+        u'orm.variable': {
+            'Meta': {'object_name': 'Variable'},
+            'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'variable_build'", 'to': u"orm['orm.Build']"}),
+            'changed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+            'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+            'human_readable_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'variable_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+            'variable_value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
+        },
+        u'orm.variablehistory': {
+            'Meta': {'object_name': 'VariableHistory'},
+            'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
+            u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+            'line_number': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+            'operation': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+            'value': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+            'variable': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vhistory'", 'to': u"orm['orm.Variable']"})
+        }
+    }
+
+    complete_apps = ['orm']
\ No newline at end of file
diff --git a/yocto-poky/bitbake/lib/toaster/orm/models.py b/yocto-poky/bitbake/lib/toaster/orm/models.py
index e4d2e87..3832905 100644
--- a/yocto-poky/bitbake/lib/toaster/orm/models.py
+++ b/yocto-poky/bitbake/lib/toaster/orm/models.py
@@ -191,10 +191,11 @@
 
     # returns a queryset of compatible layers for a project
     def compatible_layerversions(self, release = None, layer_name = None):
+        logger.warning("This function is deprecated")
         if release == None:
             release = self.release
         # layers on the same branch or layers specifically set for this project
-        queryset = Layer_Version.objects.filter((Q(up_branch__name = release.branch_name) & Q(project = None)) | Q(project = self) | Q(build__project = self))
+        queryset = Layer_Version.objects.filter(((Q(up_branch__name = release.branch_name) & Q(project = None)) | Q(project = self)) & Q(build__isnull=True))
 
         if layer_name is not None:
             # we select only a layer name
@@ -205,45 +206,55 @@
 
         return queryset
 
-    def projectlayer_equivalent_set(self):
-        return self.compatible_layerversions().filter(layer__name__in = [x.layercommit.layer.name for x in self.projectlayer_set.all()]).select_related("up_branch")
+    def get_all_compatible_layer_versions(self):
+        """ Returns Queryset of all Layer_Versions which are compatible with
+        this project"""
+        queryset = Layer_Version.objects.filter(
+            (Q(up_branch__name=self.release.branch_name) & Q(build=None))
+            | Q(project=self))
+
+        return queryset
+
+    def get_project_layer_versions(self, pk=False):
+        """ Returns the Layer_Versions currently added to this project """
+        layer_versions = self.projectlayer_set.all().values('layercommit')
+
+        if pk is False:
+            return layer_versions
+        else:
+            return layer_versions.values_list('layercommit__pk', flat=True)
+
 
     def get_available_machines(self):
         """ Returns QuerySet of all Machines which are provided by the
         Layers currently added to the Project """
-        queryset = Machine.objects.filter(layer_version__in=self.projectlayer_equivalent_set)
+        queryset = Machine.objects.filter(
+            layer_version__in=self.get_project_layer_versions())
+
         return queryset
 
     def get_all_compatible_machines(self):
         """ Returns QuerySet of all the compatible machines available to the
         project including ones from Layers not currently added """
-        compatible_layers = self.compatible_layerversions()
+        queryset = Machine.objects.filter(
+            layer_version__in=self.get_all_compatible_layer_versions())
 
-        queryset = Machine.objects.filter(layer_version__in=compatible_layers)
         return queryset
 
     def get_available_recipes(self):
-        """ Returns QuerySet of all Recipes which are provided by the Layers
-        currently added to the Project """
-        project_layers = self.projectlayer_equivalent_set()
-        queryset = Recipe.objects.filter(layer_version__in = project_layers)
-
-        # Copied from get_all_compatible_recipes
-        search_maxids = map(lambda i: i[0], list(queryset.values('name').distinct().annotate(max_id=Max('id')).values_list('max_id')))
-        queryset = queryset.filter(id__in=search_maxids).select_related('layer_version', 'layer_version__layer', 'layer_version__up_branch', 'layer_source')
-        # End copy
+        """ Returns QuerySet of all the recipes that are provided by layers
+        added to this project """
+        queryset = Recipe.objects.filter(
+            layer_version__in=self.get_project_layer_versions())
 
         return queryset
 
     def get_all_compatible_recipes(self):
         """ Returns QuerySet of all the compatible Recipes available to the
         project including ones from Layers not currently added """
-        compatible_layerversions = self.compatible_layerversions()
-        queryset = Recipe.objects.filter(layer_version__in = compatible_layerversions)
+        queryset = Recipe.objects.filter(
+            layer_version__in=self.get_all_compatible_layer_versions()).exclude(name__exact='')
 
-        search_maxids = map(lambda i: i[0], list(queryset.values('name').distinct().annotate(max_id=Max('id')).values_list('max_id')))
-
-        queryset = queryset.filter(id__in=search_maxids).select_related('layer_version', 'layer_version__layer', 'layer_version__up_branch', 'layer_source')
         return queryset
 
 
@@ -260,7 +271,7 @@
             for l in self.projectlayer_set.all().order_by("pk"):
                 commit = l.layercommit.get_vcs_reference()
                 print("ii Building layer ", l.layercommit.layer.name, " at vcs point ", commit)
-                BRLayer.objects.create(req = br, name = l.layercommit.layer.name, giturl = l.layercommit.layer.vcs_url, commit = commit, dirpath = l.layercommit.dirpath)
+                BRLayer.objects.create(req = br, name = l.layercommit.layer.name, giturl = l.layercommit.layer.vcs_url, commit = commit, dirpath = l.layercommit.dirpath, layer_version=l.layercommit)
 
             br.state = BuildRequest.REQ_QUEUED
             now = timezone.now()
@@ -270,7 +281,7 @@
                                 )
             for t in self.projecttarget_set.all():
                 BRTarget.objects.create(req = br, target = t.target, task = t.task)
-                Target.objects.create(build = br.build, target = t.target)
+                Target.objects.create(build = br.build, target = t.target, task = t.task)
 
             for v in self.projectvariable_set.all():
                 BRVariable.objects.create(req = br, name = v.name, value = v.value)
@@ -333,13 +344,14 @@
         tgts = Target.objects.filter(build_id = self.id).order_by( 'target' );
         return( tgts );
 
-    @property
-    def toaster_exceptions(self):
-        return self.logmessage_set.filter(level=LogMessage.EXCEPTION)
+    def get_outcome_text(self):
+        return Build.BUILD_OUTCOME[int(self.outcome)][1]
 
     @property
     def errors(self):
-        return (self.logmessage_set.filter(level=LogMessage.ERROR)|self.logmessage_set.filter(level=LogMessage.EXCEPTION))
+        return (self.logmessage_set.filter(level=LogMessage.ERROR) |
+                self.logmessage_set.filter(level=LogMessage.EXCEPTION) |
+                self.logmessage_set.filter(level=LogMessage.CRITICAL))
 
     @property
     def warnings(self):
@@ -350,10 +362,23 @@
         return (self.completed_on - self.started_on).total_seconds()
 
     def get_current_status(self):
+        """
+        get the status string from the build request if the build
+        has one, or the text for the build outcome if it doesn't
+        """
+
         from bldcontrol.models import BuildRequest
-        if self.outcome == Build.IN_PROGRESS and self.buildrequest.state != BuildRequest.REQ_INPROGRESS:
+
+        build_request = None
+        if hasattr(self, 'buildrequest'):
+            build_request = self.buildrequest
+
+        if (build_request
+                and build_request.state != BuildRequest.REQ_INPROGRESS
+                and self.outcome == Build.IN_PROGRESS):
             return self.buildrequest.get_state_display()
-        return self.get_outcome_display()
+        else:
+            return self.get_outcome_text()
 
     def __str__(self):
         return "%d %s %s" % (self.id, self.project, ",".join([t.target for t in self.target_set.all()]))
@@ -551,7 +576,7 @@
 
 class Package(models.Model):
     search_allowed_fields = ['name', 'version', 'revision', 'recipe__name', 'recipe__version', 'recipe__license', 'recipe__layer_version__layer__name', 'recipe__layer_version__branch', 'recipe__layer_version__commit', 'recipe__layer_version__local_path', 'installed_name']
-    build = models.ForeignKey('Build')
+    build = models.ForeignKey('Build', null=True)
     recipe = models.ForeignKey('Recipe', null=True)
     name = models.CharField(max_length=100)
     installed_name = models.CharField(max_length=100, default='')
@@ -828,6 +853,7 @@
         import urllib2, urlparse, json
         import os
         proxy_settings = os.environ.get("http_proxy", None)
+        oe_core_layer = 'openembedded-core'
 
         def _get_json_response(apiurl = self.apiurl):
             _parsedurl = urlparse.urlparse(apiurl)
@@ -872,6 +898,25 @@
         if not connection.features.autocommits_when_autocommit_is_off:
             transaction.set_autocommit(False)
         for li in layers_info:
+            # Special case for the openembedded-core layer
+            if li['name'] == oe_core_layer:
+                try:
+                    # If we have an existing openembedded-core for example
+                    # from the toasterconf.json augment the info using the
+                    # layerindex rather than duplicate it
+                    oe_core_l =  Layer.objects.get(name=oe_core_layer)
+                    # Take ownership of the layer as now coming from the
+                    # layerindex
+                    oe_core_l.layer_source = self
+                    oe_core_l.up_id = li['id']
+                    oe_core_l.summary = li['summary']
+                    oe_core_l.description = li['description']
+                    oe_core_l.save()
+                    continue
+
+                except Layer.DoesNotExist:
+                    pass
+
             l, created = Layer.objects.get_or_create(layer_source = self, name = li['name'])
             l.up_id = li['id']
             l.up_date = li['updated']
@@ -882,6 +927,7 @@
             l.summary = li['summary']
             l.description = li['description']
             l.save()
+
         if not connection.features.autocommits_when_autocommit_is_off:
             transaction.set_autocommit(True)
 
@@ -974,9 +1020,12 @@
                 ro.file_path = ri['filepath'] + "/" + ri['filename']
                 if 'inherits' in ri:
                     ro.is_image = 'image' in ri['inherits'].split()
+                else: # workaround for old style layer index
+                    ro.is_image = "-image-" in ri['pn']
                 ro.save()
             except IntegrityError as e:
                 logger.debug("Failed saving recipe, ignoring: %s (%s:%s)" % (e, ro.layer_version, ri['filepath']+"/"+ri['filename']))
+                ro.delete()
         if not connection.features.autocommits_when_autocommit_is_off:
             transaction.set_autocommit(True)
 
@@ -1132,17 +1181,36 @@
         return project.compatible_layerversions(layer_name = self.layer.name)
 
     def get_vcs_reference(self):
-        if self.commit is not None and len(self.commit) > 0:
-            return self.commit
         if self.branch is not None and len(self.branch) > 0:
             return self.branch
         if self.up_branch is not None:
             return self.up_branch.name
+        if self.commit is not None and len(self.commit) > 0:
+            return self.commit
         return ("Cannot determine the vcs_reference for layer version %s" % vars(self))
 
     def get_detailspage_url(self, project_id):
         return reverse('layerdetails', args=(project_id, self.pk))
 
+    def get_alldeps(self, project_id):
+        """Get full list of unique layer dependencies."""
+        def gen_layerdeps(lver, project):
+            for ldep in lver.dependencies.all():
+                yield ldep.depends_on
+                # get next level of deps recursively calling gen_layerdeps
+                for subdep in gen_layerdeps(ldep.depends_on, project):
+                    yield subdep
+
+        project = Project.objects.get(pk=project_id)
+        result = []
+        projectlvers = [player.layercommit for player in project.projectlayer_set.all()]
+        for dep in gen_layerdeps(self, project):
+            # filter out duplicates and layers already belonging to the project
+            if dep not in result + projectlvers:
+                result.append(dep)
+
+        return sorted(result, key=lambda x: x.layer.name)
+
     def __unicode__(self):
         return "%d %s (VCS %s, Project %s)" % (self.pk, str(self.layer), self.get_vcs_reference(), self.build.project if self.build is not None else "No project")
 
@@ -1170,6 +1238,15 @@
     class Meta:
         unique_together = (("project", "layercommit"),)
 
+class CustomImageRecipe(models.Model):
+    name = models.CharField(max_length=100)
+    base_recipe = models.ForeignKey(Recipe)
+    packages = models.ManyToManyField(Package)
+    project = models.ForeignKey(Project)
+
+    class Meta:
+        unique_together = ("name", "project")
+
 class ProjectVariable(models.Model):
     project = models.ForeignKey(Project)
     name = models.CharField(max_length=100)
@@ -1206,16 +1283,20 @@
     INFO = 0
     WARNING = 1
     ERROR = 2
+    CRITICAL = 3
 
-    LOG_LEVEL = ( (INFO, "info"),
-            (WARNING, "warn"),
-            (ERROR, "error"),
-            (EXCEPTION, "toaster exception"))
+    LOG_LEVEL = (
+        (INFO, "info"),
+        (WARNING, "warn"),
+        (ERROR, "error"),
+        (CRITICAL, "critical"),
+        (EXCEPTION, "toaster exception")
+    )
 
     build = models.ForeignKey(Build)
     task  = models.ForeignKey(Task, blank = True, null=True)
     level = models.IntegerField(choices=LOG_LEVEL, default=INFO)
-    message=models.CharField(max_length=240)
+    message = models.TextField(blank=True, null=True)
     pathname = models.FilePathField(max_length=255, blank=True)
     lineno = models.IntegerField(null=True)
 
diff --git a/yocto-poky/bitbake/lib/toaster/orm/tests.py b/yocto-poky/bitbake/lib/toaster/orm/tests.py
index 783aea8..719266e 100644
--- a/yocto-poky/bitbake/lib/toaster/orm/tests.py
+++ b/yocto-poky/bitbake/lib/toaster/orm/tests.py
@@ -23,12 +23,11 @@
 
 from django.test import TestCase, TransactionTestCase
 from orm.models import LocalLayerSource, LayerIndexLayerSource, ImportedLayerSource, LayerSource
-from orm.models import Branch
+from orm.models import Branch, LayerVersionDependency
 
-from orm.models import Project, Build, Layer, Layer_Version, Branch, ProjectLayer
+from orm.models import Project, Layer, Layer_Version, Branch, ProjectLayer
 from orm.models import Release, ReleaseLayerSourcePriority, BitbakeVersion
 
-from django.utils import timezone
 from django.db import IntegrityError
 
 import os
@@ -153,35 +152,29 @@
         equivqs = self.lver.get_equivalents_wpriority(self.project)
         self.assertEqual(list(equivqs), [lver2, self.lver])
 
-    def test_build_layerversion(self):
+    def test_compatible_layer_versions(self):
         """
-        Any layer version coming from the build should show up
-        before any layer version coming from upstream
-        """
-        build = Build.objects.create(project=self.project,
-                                     started_on=timezone.now(),
-                                     completed_on=timezone.now())
-        lvb = Layer_Version.objects.create(layer=self.layer, build=build,
-                                           commit="deadbeef")
-
-        # a build layerversion must be in the equivalence
-        # list for the original layerversion
-        equivqs = self.lver.get_equivalents_wpriority(self.project)
-        self.assertTrue(len(equivqs) == 2)
-        self.assertTrue(equivqs[0] == self.lver)
-        self.assertTrue(equivqs[1] == lvb)
-
-        # getting the build layerversion equivalent list must
-        # return the same list as the original layer
-        bequivqs = lvb.get_equivalents_wpriority(self.project)
-
-        self.assertEqual(list(equivqs), list(bequivqs))
-
-    def test_compatible_layerversions(self):
-        """
-        When we have a 2 layer versions, compatible_layerversions()
+        When we have a 2 layer versions, get_all_compatible_layerversions()
         should return a queryset with both.
         """
-        compat_lv = self.project.compatible_layerversions()
+        compat_lv = self.project.get_all_compatible_layer_versions()
         self.assertEqual(list(compat_lv), [self.lver, self.lver2])
 
+    def test_layerversion_get_alldeps(self):
+        """Test Layer_Version.get_alldeps API."""
+        lvers = {}
+        for i in range(10):
+            name = "layer%d" % i
+            lvers[name] = Layer_Version.objects.create(layer=Layer.objects.create(name=name),
+                                                       project=self.project)
+            if i:
+                LayerVersionDependency.objects.create(layer_version=lvers["layer%d" % (i - 1)],
+                                                      depends_on=lvers[name])
+                # Check dinamically added deps
+                self.assertEqual(lvers['layer0'].get_alldeps(self.project.id),
+                                 [lvers['layer%d' % n] for n in range(1, i+1)])
+
+        # Check chain of deps created in previous loop
+        for i in range(10):
+            self.assertEqual(lvers['layer%d' % i].get_alldeps(self.project.id),
+                             [lvers['layer%d' % n] for n in range(i+1, 10)])
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/static/css/default.css b/yocto-poky/bitbake/lib/toaster/toastergui/static/css/default.css
index cce3e31..bc8a97b 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/static/css/default.css
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/static/css/default.css
@@ -15,6 +15,8 @@
 /* Styles for the help information */
 .get-help { color: #CCCCCC; }
 .get-help:hover, .icon-plus-sign:hover { color: #999999; cursor: pointer; }
+.get-help-green { color: #468847; }
+.get-help-green:hover { color: #347132; cursor: pointer; }
 .get-help-blue { color: #3A87AD; }
 .get-help-blue:hover { color: #005580; cursor: pointer; }
 .get-help-yellow { color: #C09853; }
@@ -161,9 +163,16 @@
 .project-name .label { font-weight: normal; margin-bottom: 5px; margin-left: -15px; padding: 5px; }
 .project-name .label > a { color: #fff; font-weight: normal; }
 
+/* styles for showing help icons next to command-line builds */
+.build-result .get-help-green, .build-result .get-help-red, .build-result .get-help-blue { margin-right: 35px; margin-top: 8px; font-size: 16px; }
+
 /* Remove bottom margin for forms inside modal dialogs */
 #dependencies-modal-form { margin-bottom: 0px; }
 
+/* Custom column widths */
+.narrow-col { width: 8%; }
+.medium-col { width: 12%; }
+
 /* Configuration styles */
 .icon-trash { color: #B94A48; font-size: 16px; padding-left: 5px; }
 .icon-trash:hover { color: #943A38; text-decoration: none; cursor: pointer; }
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/static/js/base.js b/yocto-poky/bitbake/lib/toaster/toastergui/static/js/base.js
index 895e61b..ed22a4e 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/static/js/base.js
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/static/js/base.js
@@ -6,6 +6,7 @@
   var newBuildTargetInput;
   var newBuildTargetBuildBtn;
   var projectNameForm = $("#project-name-change-form");
+  var projectNameContainer = $("#project-name-container");
   var projectName = $("#project-name");
   var projectNameFormToggle = $("#project-change-form-toggle");
   var projectNameChangeCancel = $("#project-name-change-cancel");
@@ -23,24 +24,21 @@
   /* Project name change functionality */
   projectNameFormToggle.click(function(e){
     e.preventDefault();
-
-    $(this).add(projectName).hide();
+    projectNameContainer.hide();
     projectNameForm.fadeIn();
   });
 
   projectNameChangeCancel.click(function(e){
     e.preventDefault();
-
     projectNameForm.hide();
-    projectName.add(projectNameFormToggle).fadeIn();
+    projectNameContainer.fadeIn();
   });
 
   $("#project-name-change-btn").click(function(e){
     var newProjectName = $("#project-name-change-input").val();
 
-    libtoaster.editCurrentProject({ projectName: newProjectName },function (){
-
-      projectName.text(newProjectName);
+    libtoaster.editCurrentProject({ projectName: newProjectName }, function (){
+      projectName.html(newProjectName);
       libtoaster.ctx.projectName = newProjectName;
       projectNameChangeCancel.click();
     });
@@ -123,14 +121,14 @@
   });
 
   function _checkProjectBuildable() {
-    if (selectedProject.projectId === undefined) {
+    if (selectedProject.projectId === undefined || selectedProject.projectIsDefault) {
       return;
     }
 
     libtoaster.getProjectInfo(selectedProject.projectPageUrl,
       function (data) {
         if (data.machine === null || data.machine.name === undefined || data.layers.length === 0) {
-          /* we can't build anything with out a machine and some layers */
+          /* we can't build anything without a machine and some layers */
           $("#new-build-button #targets-form").hide();
           $("#new-build-button .alert").show();
         } else {
@@ -149,7 +147,7 @@
     /* If we don't have a current project then present the set project
      * form.
      */
-    if (selectedProject.projectId === undefined) {
+    if (selectedProject.projectId === undefined || selectedProject.projectIsDefault) {
       $('#change-project-form').show();
       $('#project .icon-pencil').hide();
     }
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/static/js/customrecipe.js b/yocto-poky/bitbake/lib/toaster/toastergui/static/js/customrecipe.js
new file mode 100644
index 0000000..4f6b304
--- /dev/null
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/static/js/customrecipe.js
@@ -0,0 +1,50 @@
+"use strict";
+
+function customRecipePageInit(ctx) {
+
+  var urlParams = libtoaster.parseUrlParams();
+
+  (function notificationRequest(){
+    if (urlParams.hasOwnProperty('notify') && urlParams.notify === 'new'){
+      $("#image-created-notification").show();
+    }
+  })();
+
+  $("#recipeselection").on('table-done', function(e, total, tableParams){
+    /* Table is done so now setup the click handler for the package buttons */
+    $(".add-rm-package-btn").click(function(e){
+      e.preventDefault();
+      addRemovePackage($(this), tableParams);
+    });
+  });
+
+  function addRemovePackage(pkgBtn, tableParams){
+    var pkgBtnData = pkgBtn.data();
+    var method;
+    var buttonToShow;
+
+    if (pkgBtnData.directive == 'add') {
+      method = 'PUT';
+      buttonToShow = '#package-rm-btn-' + pkgBtnData.package;
+    } else if (pkgBtnData.directive == 'remove') {
+      method = 'DELETE';
+      buttonToShow = '#package-add-btn-' + pkgBtnData.package;
+    } else {
+      throw("Unknown package directive: should be add or remove");
+    }
+
+    $.ajax({
+        type: method,
+        url: pkgBtnData.packageUrl,
+        headers: { 'X-CSRFToken' : $.cookie('csrftoken')},
+        success: function(data){
+          /* Invalidate the Add | Rm package table's current cache */
+          tableParams.nocache = true;
+          $.get(ctx.tableApiUrl, tableParams);
+          /* Swap the buttons around */
+          pkgBtn.hide();
+          $(buttonToShow).show();
+        }
+    });
+  }
+}
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/static/js/importlayer.js b/yocto-poky/bitbake/lib/toaster/toastergui/static/js/importlayer.js
index 2fadbc0..c68f366 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/static/js/importlayer.js
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/static/js/importlayer.js
@@ -195,8 +195,8 @@
     var dupLayerInfo = $("#duplicate-layer-info");
     dupLayerInfo.find(".dup-layer-name").text(layer.name);
     dupLayerInfo.find(".dup-layer-link").attr("href", layer.layerdetailurl);
-    dupLayerInfo.find("#dup-layer-vcs-url").text(layer.layer__vcs_url);
-    dupLayerInfo.find("#dup-layer-revision").text(layer.revision.commit);
+    dupLayerInfo.find("#dup-layer-vcs-url").text(layer.vcs_url);
+    dupLayerInfo.find("#dup-layer-revision").text(layer.vcs_reference);
 
     $(".fields-apart-from-layer-name").fadeOut(function(){
 
@@ -214,11 +214,10 @@
       $.getJSON(libtoaster.ctx.layersTypeAheadUrl,
         { include_added: "true" , search: name, format: "json" },
         function(layer) {
-          if (layer.rows.length > 0) {
-            for (var i in layer.rows){
-              if (layer.rows[i].name == name) {
-                console.log(layer.rows[i])
-                layerExistsError(layer.rows[i]);
+          if (layer.results.length > 0) {
+            for (var i in layer.results){
+              if (layer.results[i].name == name) {
+                layerExistsError(layer.results[i]);
               }
             }
           }
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/static/js/jquery.treetable.js b/yocto-poky/bitbake/lib/toaster/toastergui/static/js/jquery.treetable.js
index 42e7427..794b902 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/static/js/jquery.treetable.js
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/static/js/jquery.treetable.js
@@ -421,7 +421,7 @@
         columnElType: "td", // i.e. 'td', 'th' or 'td,th'
         expandable: false,
         expanderTemplate: "<a href='#'>&nbsp;</a>",
-        indent: 19,
+        indent: 10,
         indenterTemplate: "<span class='indenter'></span>",
         initialState: "collapsed",
         nodeIdAttr: "ttId", // maps to data-tt-id
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/static/js/layerBtn.js b/yocto-poky/bitbake/lib/toaster/toastergui/static/js/layerBtn.js
index a0509f9a..7318b3f 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/static/js/layerBtn.js
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/static/js/layerBtn.js
@@ -1,6 +1,6 @@
 "use strict";
 
-function layerBtnsInit(ctx) {
+function layerBtnsInit() {
 
   /* Remove any current bindings to avoid duplicated binds */
   $(".layerbtn").unbind('click');
@@ -68,10 +68,16 @@
       });
   });
 
-  /* Setup the initial state of the buttons */
 
-  for (var i in ctx.projectLayers){
-      $(".layer-exists-" + ctx.projectLayers[i]).show();
-      $(".layer-add-" + ctx.projectLayers[i]).hide();
-  }
+  $(".customise-btn").unbind('click');
+  $(".customise-btn").click(function(e){
+    e.preventDefault();
+    var imgCustomModal = $("#new-custom-image-modal");
+
+    if (imgCustomModal.length == 0)
+      throw("Modal new-custom-image not found");
+
+    imgCustomModal.data('recipe', $(this).data('recipe'));
+    imgCustomModal.modal('show');
+  });
 }
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/static/js/layerdetails.js b/yocto-poky/bitbake/lib/toaster/toastergui/static/js/layerdetails.js
index 000e803..8c2ec4c 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/static/js/layerdetails.js
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/static/js/layerdetails.js
@@ -7,6 +7,9 @@
   var layerDepsList = $("#layer-deps-list");
   var currentLayerDepSelection;
   var addRmLayerBtn = $("#add-remove-layer-btn");
+  var targetTab = $("#targets-tab");
+  var machineTab = $("#machines-tab");
+  var detailsTab = $("#details-tab");
 
   /* setup the dependencies typeahead */
   libtoaster.makeTypeahead(layerDepInput, libtoaster.ctx.layersTypeAheadUrl, { include_added: "true" }, function(item){
@@ -15,6 +18,21 @@
     layerDepBtn.removeAttr("disabled");
   });
 
+  $(window).on('hashchange', function(e){
+    switch(window.location.hash){
+      case '#machines':
+        machineTab.tab('show');
+        break;
+      case '#recipes':
+        targetTab.tab('show');
+        break;
+      default:
+        detailsTab.tab('show');
+        break;
+    }
+  });
+
+
   $(".breadcrumb li:first a").click(function(e){
     e.preventDefault();
     /* By default this link goes to the project configuration page. However
@@ -143,7 +161,7 @@
       addRmLayerBtn.removeClass("btn-danger");
   }
 
-  $("#details-tab").on('show', function(){
+  detailsTab.on('show', function(){
     if (!ctx.layerVersion.inCurrentPrj)
       defaultAddBtnText();
 
@@ -174,7 +192,7 @@
       $("#no-recipes-yet").hide();
     }
 
-    $("#targets-tab").removeClass("muted");
+    targetTab.removeClass("muted");
     if (window.location.hash === "#recipes"){
       /* re run the machinesTabShow to update the text */
       targetsTabShow();
@@ -189,7 +207,7 @@
     else
       $("#no-machines-yet").hide();
 
-    $("#machines-tab").removeClass("muted");
+    machineTab.removeClass("muted");
     if (window.location.hash === "#machines"){
       /* re run the machinesTabShow to update the text */
       machinesTabShow();
@@ -202,7 +220,7 @@
 
   });
 
-  $("#targets-tab").on('show', targetsTabShow);
+  targetTab.on('show', targetsTabShow);
 
   function machinesTabShow(){
     if (!ctx.layerVersion.inCurrentPrj) {
@@ -219,7 +237,7 @@
     window.location.hash = "machines";
   }
 
-  $("#machines-tab").on('show', machinesTabShow);
+  machineTab.on('show', machinesTabShow);
 
   $(".pagesize").change(function(){
     var search = libtoaster.parseUrlParams();
@@ -236,7 +254,7 @@
 
     if (added){
       /* enable and switch all the button states */
-      $(".build-target-btn").removeAttr("disabled");
+      $(".build-recipe-btn").removeAttr("disabled");
       $(".select-machine-btn").removeAttr("disabled");
       addRmLayerBtn.addClass("btn-danger");
       addRmLayerBtn.data('directive', "remove");
@@ -245,7 +263,7 @@
 
     } else {
       /* disable and switch all the button states */
-      $(".build-target-btn").attr("disabled","disabled");
+      $(".build-recipe-btn").attr("disabled","disabled");
       $(".select-machine-btn").attr("disabled", "disabled");
       addRmLayerBtn.removeClass("btn-danger");
       addRmLayerBtn.data('directive', "add");
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/static/js/newcustomimage.js b/yocto-poky/bitbake/lib/toaster/toastergui/static/js/newcustomimage.js
new file mode 100644
index 0000000..935b21e
--- /dev/null
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/static/js/newcustomimage.js
@@ -0,0 +1,49 @@
+"use strict";
+
+function newCustomImagePageInit(ctx){
+
+  var newCustomImgBtn = $("#create-new-custom-image-btn");
+  var imgCustomModal = $("#new-custom-image-modal");
+
+  newCustomImgBtn.click(function(e){
+    e.preventDefault();
+
+    var name = imgCustomModal.find('input').val();
+    var baseRecipeId = imgCustomModal.data('recipe');
+
+    if (name.length > 0) {
+      createCustomRecipe(name, baseRecipeId);
+      imgCustomModal.modal('hide');
+    } else {
+      console.warn("TODO No name supplied");
+    }
+  });
+
+  function createCustomRecipe(name, baseRecipeId){
+    var data = {
+      'name' : name,
+      'project' : libtoaster.ctx.projectId,
+      'base' : baseRecipeId,
+    };
+
+    $.ajax({
+        type: "POST",
+        url: ctx.xhrCustomRecipeUrl,
+        data: data,
+        headers: { 'X-CSRFToken' : $.cookie('csrftoken')},
+        success: function (ret) {
+          if (ret.error !== "ok") {
+            console.warn(ret.error);
+          } else {
+            window.location.replace(ret.url + '?notify=new');
+          }
+        },
+        error: function (ret) {
+          console.warn("Call failed");
+          console.warn(ret);
+        }
+    });
+  }
+
+
+}
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/static/js/projectpage.js b/yocto-poky/bitbake/lib/toaster/toastergui/static/js/projectpage.js
index d367047..e742ef2 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/static/js/projectpage.js
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/static/js/projectpage.js
@@ -23,7 +23,7 @@
   var cancelReleaseChange = $("#cancel-release-change");
 
   var currentLayerAddSelection;
-  var currentMachineAddSelection = {};
+  var currentMachineAddSelection = "";
 
   var urlParams = libtoaster.parseUrlParams();
 
@@ -38,7 +38,7 @@
      */
     if (urlParams.hasOwnProperty('setMachine') &&
         urlParams.setMachine !== prjInfo.machine.name){
-        currentMachineAddSelection.name = urlParams.setMachine;
+        machineChangeInput.val(urlParams.setMachine);
         machineChangeBtn.click();
     } else {
       updateMachineName(prjInfo.machine.name);
@@ -103,6 +103,12 @@
     layerAddBtn.removeAttr("disabled");
   });
 
+  layerAddInput.keyup(function() {
+    if ($(this).val().length == 0) {
+      layerAddBtn.attr("disabled", "disabled")
+    }
+  });
+
   layerAddBtn.click(function(e){
     e.preventDefault();
     var layerObj = currentLayerAddSelection;
@@ -146,10 +152,7 @@
 
       link.attr("href", layerObj.layerdetailurl);
       link.text(layerObj.name);
-      /* YOCTO #8024
-        link.tooltip({title: layerObj.giturl + " | "+ layerObj.branch.name, placement: "right"});
-        branch name not accessible sometimes it is revision instead
-      */
+      link.tooltip({title: layerObj.vcs_url + " | "+ layerObj.vcs_reference, placement: "right"});
 
       var trashItem = projectLayer.children("span");
       trashItem.click(function (e) {
@@ -251,29 +254,33 @@
   }
 
   libtoaster.makeTypeahead(machineChangeInput, libtoaster.ctx.machinesTypeAheadUrl, { }, function(item){
-    currentMachineAddSelection = item;
+    currentMachineAddSelection = item.name;
     machineChangeBtn.removeAttr("disabled");
   });
 
   machineChangeBtn.click(function(e){
     e.preventDefault();
-    if (currentMachineAddSelection.name === undefined)
+    /* We accept any value regardless of typeahead selection or not */
+    if (machineChangeInput.val().length === 0)
       return;
 
-    libtoaster.editCurrentProject({ machineName : currentMachineAddSelection.name },
+    currentMachineAddSelection = machineChangeInput.val();
+
+    libtoaster.editCurrentProject(
+      { machineName : currentMachineAddSelection },
       function(){
         /* Success machine changed */
-        updateMachineName(currentMachineAddSelection.name);
+        updateMachineName(currentMachineAddSelection);
         machineChangeCancel.click();
 
         /* Show the alert message */
         var message = $('<span class="lead">You have changed the machine to: <strong><span id="notify-machine-name"></span></strong></span>');
-        message.find("#notify-machine-name").text(currentMachineAddSelection.name);
+        message.find("#notify-machine-name").text(currentMachineAddSelection);
         libtoaster.showChangeNotification(message);
     },
       function(){
         /* Failed machine changed */
-        console.log("failed to change machine");
+        console.warn("Failed to change machine");
     });
   });
 
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/static/js/table.js b/yocto-poky/bitbake/lib/toaster/toastergui/static/js/table.js
index f18034d..40b5022 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/static/js/table.js
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/static/js/table.js
@@ -33,14 +33,6 @@
 
   loadData(tableParams);
 
-  window.onpopstate = function(event){
-    if (event.state){
-      tableParams = event.state.tableParams;
-      /* We skip loadData and just update the table */
-      updateTable(event.state.tableData);
-    }
-  };
-
   function loadData(tableParams){
     $.ajax({
         type: "GET",
@@ -49,10 +41,8 @@
         headers: { 'X-CSRFToken' : $.cookie('csrftoken')},
         success: function(tableData) {
           updateTable(tableData);
-          window.history.pushState({
-              tableData: tableData,
-              tableParams: tableParams
-          }, null, libtoaster.dumpsUrlParams(tableParams));
+          window.history.replaceState(null, null,
+            libtoaster.dumpsUrlParams(tableParams));
         }
     });
   }
@@ -140,7 +130,7 @@
       tableBody.append(row);
 
       /* If we have layerbtns then initialise them */
-      layerBtnsInit(ctx);
+      layerBtnsInit();
 
       /* If we have popovers initialise them now */
       $('td > a.btn').popover({
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/static/js/tests/test.js b/yocto-poky/bitbake/lib/toaster/toastergui/static/js/tests/test.js
index d610113..aac0ba6 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/static/js/tests/test.js
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/static/js/tests/test.js
@@ -13,8 +13,6 @@
     "name":"meta-example"
   };
 
-  var correctResponse = "You have added <strong>3</strong> layers to your project: <a id=\"layer-affected-name\" href=\"/toastergui/project/1/layer/22\">meta-example</a> and its dependencies <a href=\"/toastergui/project/1/layer/9\" data-original-title=\"\" title=\"\">meta-example-two</a>, <a href=\"/toastergui/project/1/layer/9\" data-original-title=\"\" title=\"\">meta-example-three</a>";
-
   var layerDepsList = [
     {
     "layerdetailurl":"/toastergui/project/1/layer/9",
@@ -68,9 +66,9 @@
 });
 
 var layer = {
-  "id": 91,
-  "name":  "meta-crystalforest",
-  "layerdetailurl": "/toastergui/project/4/layer/91"
+  "id": 1,
+  "name":  "meta-testing",
+  "layerdetailurl": "/toastergui/project/1/layer/1"
 };
 
 QUnit.test("Add layer", function(assert){
@@ -84,11 +82,19 @@
     }
   }, 200);
 
-  libtoaster.addRmLayer(layer, true, function(deps){
-    assert.equal(deps.length, 1);
-    done();
-  });
+  /* Compare the number of layers before and after the add in the project */
+  libtoaster.getProjectInfo(libtoaster.ctx.projectPageUrl, function(prjInfo){
+    var origNumLayers = prjInfo.layers.length;
 
+    libtoaster.addRmLayer(layer, true, function(deps){
+      libtoaster.getProjectInfo(libtoaster.ctx.projectPageUrl,
+        function(prjInfo){
+        assert.ok(prjInfo.layers.length > origNumLayers,
+          "Layer not added to project");
+        done();
+      });
+    });
+  });
 });
 
 QUnit.test("Rm layer", function(assert){
@@ -152,11 +158,11 @@
 });
 
 QUnit.test("Layer btns init", function(assert){
-  assert.throws(layerBtnsInit({ projectLayers : [] }));
+  assert.throws(layerBtnsInit());
 });
 
 QUnit.test("Table init", function(assert){
-  assert.throws(tableInit({ url : tableUrl }));
+  assert.throws(tableInit({ url : ctx.tableUrl }));
 });
 
 $(document).ajaxError(function(event, jqxhr, settings, errMsg){
@@ -167,9 +173,3 @@
     assert.notOk(jqxhr.responseText);
   });
 });
-
-
-
-
-
-
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/tables.py b/yocto-poky/bitbake/lib/toaster/toastergui/tables.py
index 92e3b5c..9c9cda4 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/tables.py
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/tables.py
@@ -21,6 +21,7 @@
 
 from toastergui.widgets import ToasterTable
 from orm.models import Recipe, ProjectLayer, Layer_Version, Machine, Project
+from orm.models import CustomImageRecipe, Package
 from django.db.models import Q, Max
 from django.conf.urls import url
 from django.core.urlresolvers import reverse
@@ -50,14 +51,13 @@
     def __init__(self, *args, **kwargs):
         super(LayersTable, self).__init__(*args, **kwargs)
         self.default_orderby = "layer__name"
+        self.title = "Compatible layers"
 
     def get_context_data(self, **kwargs):
         context = super(LayersTable, self).get_context_data(**kwargs)
 
         project = Project.objects.get(pk=kwargs['pid'])
-
         context['project'] = project
-        context['projectlayers'] = map(lambda prjlayer: prjlayer.layercommit.id, ProjectLayer.objects.filter(project=project))
 
         return context
 
@@ -91,7 +91,10 @@
 
     def setup_queryset(self, *args, **kwargs):
         prj = Project.objects.get(pk = kwargs['pid'])
-        compatible_layers = prj.compatible_layerversions()
+        compatible_layers = prj.get_all_compatible_layer_versions()
+
+        self.static_context_extra['current_layers'] = \
+                prj.get_project_layer_versions(pk=True)
 
         self.queryset = compatible_layers.order_by(self.default_orderby)
 
@@ -208,6 +211,7 @@
     def __init__(self, *args, **kwargs):
         super(MachinesTable, self).__init__(*args, **kwargs)
         self.empty_state = "No machines maybe you need to do a build?"
+        self.title = "Compatible machines"
         self.default_orderby = "name"
 
     def get_context_data(self, **kwargs):
@@ -218,7 +222,7 @@
 
     def setup_filters(self, *args, **kwargs):
         project = Project.objects.get(pk=kwargs['pid'])
-        self.project_layers = project.projectlayer_equivalent_set()
+        self.project_layers = project.get_project_layer_versions()
 
         self.add_filter(title="Filter by project machines",
                         name="in_current_project",
@@ -308,13 +312,20 @@
 
 
 class RecipesTable(ToasterTable, ProjectFiltersMixin):
-    """Table of Recipes in Toaster"""
+    """Table of All Recipes in Toaster"""
 
     def __init__(self, *args, **kwargs):
         super(RecipesTable, self).__init__(*args, **kwargs)
         self.empty_state = "Toaster has no recipe information. To generate recipe information you can configure a layer source then run a build."
         self.default_orderby = "name"
 
+    build_col = { 'title' : "Build",
+            'help_text' : "Add or delete recipes to and from your project",
+            'hideable' : False,
+            'filter_name' : "in_current_project",
+            'static_data_name' : "add-del-layers",
+            'static_data_template' : '{% include "recipe_btn.html" %}'}
+
     def get_context_data(self, **kwargs):
         project = Project.objects.get(pk=kwargs['pid'])
         context = super(RecipesTable, self).get_context_data(**kwargs)
@@ -326,9 +337,6 @@
         return context
 
     def setup_filters(self, *args, **kwargs):
-        project = Project.objects.get(pk=kwargs['pid'])
-        self.project_layers = project.projectlayer_equivalent_set()
-
         self.add_filter(title="Filter by project recipes",
                         name="in_current_project",
                         filter_actions=[
@@ -336,24 +344,23 @@
                             self.make_filter_action("not_in_project", "Recipes provided by layers not added to this project", self.filter_not_in_project)
                         ])
 
-
     def setup_queryset(self, *args, **kwargs):
         prj = Project.objects.get(pk = kwargs['pid'])
 
+        # Project layers used by the filters
+        self.project_layers = prj.get_project_layer_versions(pk=True)
+
+        # Project layers used to switch the button states
+        self.static_context_extra['current_layers'] = self.project_layers
+
         self.queryset = prj.get_all_compatible_recipes()
         self.queryset = self.queryset.order_by(self.default_orderby)
 
 
     def setup_columns(self, *args, **kwargs):
 
-        self.add_column(title="Recipe",
-                        help_text="Information about a single piece of software, including where to download the source, configuration options, how to compile the source files and how to package the compiled output",
-                        hideable=False,
-                        orderable=True,
-                        field_name="name")
-
-        self.add_column(title="Recipe Version",
-                        hidden=True,
+        self.add_column(title="Version",
+                        hidden=False,
                         field_name="version")
 
         self.add_column(title="Description",
@@ -374,6 +381,7 @@
 
         self.add_column(title="Section",
                         help_text="The section in which recipes should be categorized",
+                        hidden=True,
                         orderable=True,
                         field_name="section")
 
@@ -390,24 +398,14 @@
 
         self.add_column(title="License",
                         help_text="The list of source licenses for the recipe. Multiple license names separated by the pipe character indicates a choice between licenses. Multiple license names separated by the ampersand character indicates multiple licenses exist that cover different parts of the source",
+                        hidden=True,
                         orderable=True,
                         field_name="license")
 
         self.add_column(title="Revision",
+                        hidden=True,
                         field_name="layer_version__get_vcs_reference")
 
-        self.add_column(title="Build",
-                        help_text="Add or delete recipes to and from your project",
-                        hideable=False,
-                        filter_name="in_current_project",
-                        static_data_name="add-del-layers",
-                        static_data_template='{% include "recipe_btn.html" %}')
-
-        project = Project.objects.get(pk=kwargs['pid'])
-        self.add_column(title="Project compatible Layer ID",
-                        displayable = False,
-                        field_name = "projectcompatible_layer",
-                        computation = lambda x: (x.layer_version.get_equivalents_wpriority(project)[0]))
 
 class LayerRecipesTable(RecipesTable):
     """ Smaller version of the Recipes table for use in layer details """
@@ -422,8 +420,8 @@
 
 
     def setup_queryset(self, *args, **kwargs):
-        RecipesTable.setup_queryset(self, *args, **kwargs)
-        self.queryset = self.queryset.filter(layer_version__pk=int(kwargs['layerid']))
+        self.queryset = \
+                Recipe.objects.filter(layer_version__pk=int(kwargs['layerid']))
 
         self.static_context_extra['in_prj'] = ProjectLayer.objects.filter(Q(project=kwargs['pid']) & Q(layercommit=kwargs['layerid'])).count()
 
@@ -434,20 +432,197 @@
                         orderable=True,
                         field_name="name")
 
+        self.add_column(title="Version",
+                        field_name="version")
+
         self.add_column(title="Description",
                         field_name="get_description_or_summary")
 
-
         build_recipe_template ='<button class="btn btn-block build-recipe-btn" data-recipe-name="{{data.name}}" {%if extra.in_prj == 0 %}disabled="disabled"{%endif%}>Build recipe</button>'
 
         self.add_column(title="Build recipe",
                         static_data_name="add-del-layers",
                         static_data_template=build_recipe_template)
 
-class ProjectLayersRecipesTable(RecipesTable):
-    """ Table that lists only recipes available for layers added to the project """
+class CustomImagesTable(ToasterTable):
+    """ Table to display your custom images """
+    def __init__(self, *args, **kwargs):
+        super(CustomImagesTable, self).__init__(*args, **kwargs)
+        self.title = "Custom images"
+
+    def get_context_data(self, **kwargs):
+        context = super(CustomImagesTable, self).get_context_data(**kwargs)
+        project = Project.objects.get(pk=kwargs['pid'])
+        context['project'] = project
+        context['projectlayers'] = map(lambda prjlayer: prjlayer.layercommit.id, ProjectLayer.objects.filter(project=context['project']))
+        return context
 
     def setup_queryset(self, *args, **kwargs):
-        super(ProjectLayersRecipesTable, self).setup_queryset(*args, **kwargs)
         prj = Project.objects.get(pk = kwargs['pid'])
-        self.queryset = self.queryset.filter(layer_version__in = prj.projectlayer_equivalent_set())
+        self.queryset = CustomImageRecipe.objects.filter(project=prj)
+        self.queryset = self.queryset.order_by('name')
+
+    def setup_columns(self, *args, **kwargs):
+
+        name_link_template = '''
+        <a href="{% url 'customrecipe' extra.pid data.id %}">
+          {{data.name}}
+        </a>
+        '''
+
+        self.add_column(title="Custom image",
+                        hideable=False,
+                        static_data_name="name",
+                        static_data_template=name_link_template)
+
+        self.add_column(title="Recipe file",
+                        static_data_name='recipe_file',
+                        static_data_template='')
+
+        approx_packages_template = '<a href="#imagedetails">{{data.packages.all|length}}</a>'
+        self.add_column(title="Approx packages",
+                        static_data_name='approx_packages',
+                        static_data_template=approx_packages_template)
+
+
+        build_btn_template = '''<button data-recipe-name="{{data.name}}"
+        class="btn btn-block build-recipe-btn" style="margin-top: 5px;" >
+        Build</button>'''
+
+        self.add_column(title="Build",
+                        hideable=False,
+                        static_data_name='build_custom_img',
+                        static_data_template=build_btn_template)
+
+class ImageRecipesTable(RecipesTable):
+    """ A subset of the recipes table which displayed just image recipes """
+
+    def __init__(self, *args, **kwargs):
+        super(ImageRecipesTable, self).__init__(*args, **kwargs)
+        self.title = "Compatible image recipes"
+
+    def setup_queryset(self, *args, **kwargs):
+        super(ImageRecipesTable, self).setup_queryset(*args, **kwargs)
+
+        self.queryset = self.queryset.filter(is_image=True)
+
+
+    def setup_columns(self, *args, **kwargs):
+        self.add_column(title="Image recipe",
+                        help_text="When you build an image recipe, you get an "
+                                  "image: a root file system you can"
+                                  "deploy to a machine",
+                        hideable=False,
+                        orderable=True,
+                        field_name="name")
+
+        super(ImageRecipesTable, self).setup_columns(*args, **kwargs)
+
+        self.add_column(**RecipesTable.build_col)
+
+
+class NewCustomImagesTable(ImageRecipesTable):
+    """ Table which displays Images recipes which can be customised """
+    def __init__(self, *args, **kwargs):
+        super(NewCustomImagesTable, self).__init__(*args, **kwargs)
+        self.title = "Select the image recipe you want to customise"
+
+    def setup_queryset(self, *args, **kwargs):
+        super(ImageRecipesTable, self).setup_queryset(*args, **kwargs)
+
+        self.queryset = self.queryset.filter(is_image=True)
+
+    def setup_columns(self, *args, **kwargs):
+        self.add_column(title="Image recipe",
+                        help_text="When you build an image recipe, you get an "
+                                  "image: a root file system you can"
+                                  "deploy to a machine",
+                        hideable=False,
+                        orderable=True,
+                        field_name="recipe__name")
+
+        super(ImageRecipesTable, self).setup_columns(*args, **kwargs)
+
+        self.add_column(title="Customise",
+                        hideable=False,
+                        filter_name="in_current_project",
+                        static_data_name="customise-or-add-recipe",
+                        static_data_template='{% include "customise_btn.html" %}')
+
+
+class SoftwareRecipesTable(RecipesTable):
+    """ Displays just the software recipes """
+    def __init__(self, *args, **kwargs):
+        super(SoftwareRecipesTable, self).__init__(*args, **kwargs)
+        self.title = "Compatible software recipes"
+
+    def setup_queryset(self, *args, **kwargs):
+        super(SoftwareRecipesTable, self).setup_queryset(*args, **kwargs)
+
+        self.queryset = self.queryset.filter(is_image=False)
+
+
+    def setup_columns(self, *args, **kwargs):
+        self.add_column(title="Software recipe",
+                        help_text="Information about a single piece of "
+                        "software, including where to download the source, "
+                        "configuration options, how to compile the source "
+                        "files and how to package the compiled output",
+                        hideable=False,
+                        orderable=True,
+                        field_name="name")
+
+        super(SoftwareRecipesTable, self).setup_columns(*args, **kwargs)
+
+        self.add_column(**RecipesTable.build_col)
+
+
+class SelectPackagesTable(ToasterTable):
+    """ Table to display the packages to add and remove from an image """
+
+    def __init__(self, *args, **kwargs):
+        super(SelectPackagesTable, self).__init__(*args, **kwargs)
+        self.title = "Add | Remove packages"
+
+    def setup_queryset(self, *args, **kwargs):
+        cust_recipe = CustomImageRecipe.objects.get(pk=kwargs['recipeid'])
+        prj = Project.objects.get(pk = kwargs['pid'])
+
+        current_packages = cust_recipe.packages.all()
+
+        # Get all the packages that are in the custom image
+        # Get all the packages built by builds in the current project
+        # but not those ones that are already in the custom image
+        self.queryset = Package.objects.filter(
+                            Q(pk__in=current_packages) |
+                            (Q(build__project=prj) &
+                            ~Q(name__in=current_packages.values_list('name'))))
+
+        self.queryset = self.queryset.order_by('name')
+
+        self.static_context_extra['recipe_id'] = kwargs['recipeid']
+        self.static_context_extra['current_packages'] = \
+                cust_recipe.packages.values_list('pk', flat=True)
+
+    def setup_columns(self, *args, **kwargs):
+        self.add_column(title="Package",
+                        hideable=False,
+                        orderable=True,
+                        field_name="name")
+
+        self.add_column(title="Package Version",
+                        field_name="version")
+
+        self.add_column(title="Approx Size",
+                        orderable=True,
+                        static_data_name="size",
+                        static_data_template="{% load projecttags %} \
+                        {{data.size|filtered_filesizeformat}}")
+        self.add_column(title="summary",
+                        field_name="summary")
+
+        self.add_column(title="Add | Remove",
+                        help_text="Use the add and remove buttons to modify "
+                        "the package content of you custom image",
+                        static_data_name="add_rm_pkg_btn",
+                        static_data_template='{% include "pkg_add_rm_btn.html" %}')
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/base.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/base.html
index 640bc47..11ac2a0 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/base.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/base.html
@@ -1,9 +1,12 @@
 <!DOCTYPE html>
 {% load static %}
 {% load projecttags %}
+{% load project_url_tag %}
 <html lang="en">
     <head>
-        <title>{% if objectname %} {{objectname|title}} - {% endif %}Toaster</title>
+        <title>
+          {% block title %} Toaster {% endblock %}
+        </title>
 <link rel="stylesheet" href="{% static 'css/bootstrap.min.css' %}" type="text/css"/>
 <link rel="stylesheet" href="{% static 'css/bootstrap-responsive.min.css' %}" type='text/css'/>
 <link rel="stylesheet" href="{% static 'css/font-awesome.min.css' %}" type='text/css'/>
@@ -35,8 +38,9 @@
         projectsTypeAheadUrl: {% url 'xhr_projectstypeahead' as prjurl%}{{prjurl|json}},
         {% if project.id %}
         projectId : {{project.id}},
-        projectPageUrl : {% url 'project' project.id as purl%}{{purl|json}},
+        projectPageUrl : {% url 'project' project.id as purl %}{{purl|json}},
         projectName : {{project.name|json}},
+        projectIsDefault: {% if project.is_default %}true{% else %}false{% endif %},
         recipesTypeAheadUrl: {% url 'xhr_recipestypeahead' project.id as paturl%}{{paturl|json}},
         layersTypeAheadUrl: {% url 'xhr_layerstypeahead' project.id as paturl%}{{paturl|json}},
         machinesTypeAheadUrl: {% url 'xhr_machinestypeahead' project.id as paturl%}{{paturl|json}},
@@ -47,7 +51,7 @@
         projectId : undefined,
         projectPageUrl : undefined,
         projectName : undefined,
-        projectId : undefined,
+        projectIsDefault: false,
         {% endif %}
       };
     </script>
@@ -89,9 +93,9 @@
             <i class="icon-info-sign" title="<strong>Toaster version information</strong>" data-content="<dl><dt>Branch</dt><dd>{{TOASTER_BRANCH}}</dd><dt>Revision</dt><dd>{{TOASTER_REVISION}}</dd></dl>"></i>
             {% endif %}
           </span>
-          {% if request.resolver_match.url_name != 'landing' and request.resolver_match.url_name != 'newproject' %}
+          {% if BUILD_MODE and request.resolver_match.url_name != 'landing' and request.resolver_match.url_name != 'newproject' %}
           <ul class="nav">
-            <li  {% if request.resolver_match.url_name == 'all-builds' %}
+            <li {% if request.resolver_match.url_name == 'all-builds' %}
                 class="active"
                 {% endif %}>
               <a href="{% url 'all-builds' %}">
@@ -118,55 +122,65 @@
             </li>
           </ul>
           <span class="pull-right divider-vertical"></span>
-          <div class="btn-group pull-right">
-            <a class="btn" id="new-project-button" href="{% url 'newproject' %}">New project</a>
-          </div>
-          <!-- New build popover -->
-          <div class="btn-group pull-right" id="new-build-button" style="display:none">
-            <button class="btn dropdown-toggle" data-toggle="dropdown">
-              New build
-              <i class="icon-caret-down"></i>
-            </button>
-            <ul class="dropdown-menu new-build multi-select">
-              <li>
-                <h3>New build</h3>
-                <h6>Project:</h6>
-                <span id="project">
-                  {% if project.id %}
-                  <a class="lead" href="{% url 'project' project.id %}">{{project.name}}</a>
-                  {% else %}
-                  <a class="lead" href="#"></a>
-                  {% endif %}
-                  <i class="icon-pencil"></i>
-                </span>
-                <form id="change-project-form" style="display:none;">
-                  <div class="input-append">
-                    <input type="text" class="input-medium" id="project-name-input" placeholder="Type a project name" autocomplete="off" data-minLength="1" data-autocomplete="off" data-provide="typeahead"/>
-                    <button id="save-project-button" class="btn" type="button">Save</button>
-                    <a href="#" id="cancel-change-project" class="btn btn-link" style="display: none">Cancel</a>
-                  </div>
-                  <p><a id="view-all-projects" href="{% url 'all-projects' %}">View all projects</a></p>
-                </form>
-              </li>
-              <li>
-                <div class="alert" style="display:none;">
-                  <p>This project configuration is incomplete, so you cannot run builds.</p>
-                  <p><a href="{% if project.id %}{% url 'project' project.id %}{% endif %}">View project configuration</a></p>
-                </div>
-              </li>
-              <li id="targets-form">
-                <h6>Recipe(s):</h6>
-                <form>
-                  <input type="text" class="input-xlarge build-target-input" placeholder="Type a recipe name" autocomplete="off" data-minLength="1" data-autocomplete="off" data-provide="typeahead" disabled/>
-                  <div class="row-fluid">
-                    <button class="btn btn-primary build-button" disabled>Build</button>
-                  </div>
-                </form>
-              </li>
-            </ul>
-          </div>
 
+          <!-- new project button; only show in build mode -->
+          {% if BUILD_MODE %}
+            <div class="btn-group pull-right">
+              <a class="btn" id="new-project-button" href="{% url 'newproject' %}">New project</a>
+            </div>
+          {% endif %}
 
+          <!--
+          New build popover; only shown if there is at least one user-created project
+          and we're in build mode
+          -->
+          {% if BUILD_MODE and non_cli_projects.count > 0 %}
+            <div class="btn-group pull-right" id="new-build-button" style="display:none">
+              <button class="btn dropdown-toggle" data-toggle="dropdown">
+                New build
+                <i class="icon-caret-down"></i>
+              </button>
+              <ul class="dropdown-menu new-build multi-select">
+                <li>
+                  <h3>New build</h3>
+                  <h6>
+                    Project:
+                    <span id="project">
+                      {% if project.id and not project.is_default %}
+                        <a class="lead" href="{% project_url project %}">{{project.name}}</a>
+                      {% else %}
+                        <a class="lead" href="#"></a>
+                      {% endif %}
+                      <i class="icon-pencil"></i>
+                    </span>
+                  </h6>
+                  <form id="change-project-form" style="display:none;">
+                    <div class="input-append">
+                      <input type="text" class="input-medium" id="project-name-input" placeholder="Type a project name" autocomplete="off" data-minLength="1" data-autocomplete="off" data-provide="typeahead"/>
+                      <button id="save-project-button" class="btn" type="button">Save</button>
+                      <a href="#" id="cancel-change-project" class="btn btn-link" style="display: none">Cancel</a>
+                    </div>
+                    <p><a id="view-all-projects" href="{% url 'all-projects' %}">View all projects</a></p>
+                  </form>
+                </li>
+                <li>
+                  <div class="alert" style="display:none;">
+                    <p>This project configuration is incomplete, so you cannot run builds.</p>
+                    <p><a href="{% if project.id %}{% url 'project' project.id %}{% endif %}">View project configuration</a></p>
+                  </div>
+                </li>
+                <li id="targets-form">
+                  <h6>Recipe(s):</h6>
+                  <form>
+                    <input type="text" class="input-xlarge build-target-input" placeholder="Type a recipe name" autocomplete="off" data-minLength="1" data-autocomplete="off" data-provide="typeahead" disabled/>
+                    <div class="row-fluid">
+                      <button class="btn btn-primary build-button" disabled>Build</button>
+                    </div>
+                  </form>
+                </li>
+              </ul>
+            </div>
+          {% endif %}
     </div>
  </div>
 </div>
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/baseprojectpage.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/baseprojectpage.html
index 668e0bf..1f45be4 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/baseprojectpage.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/baseprojectpage.html
@@ -1,6 +1,9 @@
 {% extends "base.html" %}
 {% load projecttags %}
 {% load humanize %}
+
+{% block title %} {{title}} - {{project.name}} - Toaster {% endblock %}
+
 {% block pagecontent %}
 
 {% include "projecttopbar.html" %}
@@ -23,8 +26,11 @@
     <ul class="nav nav-list well">
       <li><a class="nav-parent" href="{% url 'project' project.id %}">Configuration</a></li>
       <li class="nav-header">Compatible metadata</li>
-<!--  <li><a href="all-image-recipes.html">Image recipes</a></li> -->
-      <li><a href="{% url 'projecttargets' project.id %}">Recipes</a></li>
+      {% if CUSTOM_IMAGE %}
+      <li><a href="{% url 'projectcustomimages' project.id %}">Custom images</a></li>
+      {% endif %}
+      <li><a href="{% url 'projectimagerecipes' project.id %}">Image recipes</a></li>
+      <li><a href="{% url 'projectsoftwarerecipes' project.id %}">Software recipes</a></li>
       <li><a href="{% url 'projectmachines' project.id %}">Machines</a></li>
       <li><a href="{% url 'projectlayers' project.id %}">Layers</a></li>
       <li class="nav-header">Extra configuration</li>
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/bpackage.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/bpackage.html
index d775fec..81973cb 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/bpackage.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/bpackage.html
@@ -2,6 +2,7 @@
 
 {% load projecttags %}
 
+{% block title %} Packages built - {{build.target_set.all|dictsort:"target"|join:", "}} {{build.machine}} - {{build.project.name}} - Toaster {% endblock %}
 {% block localbreadcrumb %}
 <li>Packages</li>
 {% endblock %}
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/builddashboard.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/builddashboard.html
index bab8e38..323bbbb 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/builddashboard.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/builddashboard.html
@@ -2,8 +2,14 @@
 {% load humanize %}
 {% load projecttags %}
 
+{% block title %} {{build.target_set.all|dictsort:"target"|join:", "}} {{build.machine}} - {{build.project.name}} - Toaster {% endblock %}
 {% block parentbreadcrumb %}
-{{build.get_sorted_target_list.0.target}} {%if build.target_set.all.count > 1%}(+ {{build.target_set.all.count|add:"-1"}}){%endif%} {{build.machine}} ({{build.completed_on|date:"d/m/y H:i"}})
+{% if build.get_sorted_target_list.count > 0 %}
+  {{build.get_sorted_target_list.0.target}}
+  &nbsp;
+{% endif %}
+
+{%if build.target_set.all.count > 1%}(+ {{build.target_set.all.count|add:"-1"}}){%endif%} {{build.machine}} ({{build.completed_on|date:"d/m/y H:i"}})
 {% endblock %}
 
 {% block buildinfomain %}
@@ -37,19 +43,13 @@
     <span > <i class="icon-warning-sign yellow"></i><strong><a href="#warnings" class="warning show-warnings"> {{build.warnings.count}} warning{{build.warnings.count|pluralize}}</a></strong></span>
 {% endif %}
             <span class="pull-right">Build time: <a href="{% url 'buildtime' build.pk %}">{{ build.timespent_seconds|sectohms }}</a>
-            <a class="btn {%if build.outcome == build.SUCCEEDED%}btn-success{%else%}btn-danger{%endif%} pull-right log" href="{% url 'build_artifact' build.id "cookerlog" build.id %}">Download build log</a>
+            {% if build.cooker_log_path %}
+                <a class="btn {%if build.outcome == build.SUCCEEDED%}btn-success{%else%}btn-danger{%endif%} pull-right log" href="{% url 'build_artifact' build.id "cookerlog" build.id %}">Download build log</a>
+            {% endif %}
             </span>
 
 {%endif%}
     </div>
-    {% if build.toaster_exceptions.count > 0 %}
-    <div class="row">
-        <small class="pull-right">
-        <i class="icon-question-sign get-help get-help-blue" title="" data-original-title="Toaster exceptions do not affect your build: only the operation of Toaster"></i>
-        <a class="show-exceptions" href="#exceptions">Toaster threw {{build.toaster_exceptions.count}} exception{{build.toaster_exceptions.count|pluralize}}</a>
-    </small>
-    </div>
-    {% endif %}
   </div>
 </div>
 
@@ -67,11 +67,10 @@
     <div class="accordion-body collapse in" id="collapse-errors">
       <div class="accordion-inner">
         <div class="span10">
-          {% for error in logmessages %}{% if error.level == 2 %}
-            <div class="alert alert-error">
+          {% for error in build.errors %}
+            <div class="alert alert-error" data-error="{{ error.id }}">
               <pre>{{error.message}}</pre>
             </div>
-          {% endif %}
           {% endfor %}
         </div>
       </div>
@@ -268,33 +267,6 @@
 </div>
 {% endif %}
 
-
-{% if build.toaster_exceptions.count > 0 %}
-<div class="accordion span10 pull-right" id="exceptions">
-  <div class="accordion-group">
-    <div class="accordion-heading">
-      <a class="accordion-toggle exception toggle-exceptions">
-        <h2 id="exception-toggle">
-          <i class="icon-warning-sign"></i>
-          {{build.toaster_exceptions.count}} Toaster exception{{build.toaster_exceptions.count|pluralize}}
-        </h2>
-      </a>
-    </div>
-    <div class="accordion-body collapse" id="collapse-exceptions">
-      <div class="accordion-inner">
-        <div class="span10">
-          {% for exception in build.toaster_exceptions %}
-            <div class="alert alert-exception">
-              <pre>{{exception.message}}</pre>
-            </div>
-          {% endfor %}
-        </div>
-      </div>
-    </div>
-  </div>
-</div>
-{% endif %}
-
 <script type="text/javascript">
     $(document).ready(function() {
         //show warnings section when requested from the previous page
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/builds.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/builds.html
index c0d0c64..a27a121 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/builds.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/builds.html
@@ -2,8 +2,10 @@
 
 {% load static %}
 {% load projecttags %}
+{% load project_url_tag %}
 {% load humanize %}
 
+{% block title %} All builds - Toaster {% endblock %}
 {% block extraheadcontent %}
 <link rel="stylesheet" href="/static/css/jquery-ui.min.css" type='text/css'>
 <link rel="stylesheet" href="/static/css/jquery-ui.structure.min.css" type='text/css'>
@@ -28,8 +30,6 @@
 
   {% include "mrb_section.html" %}
 
-
-  {% if 1 %}
   <div class="page-header top-air">
      <h1>
       {% if request.GET.filter and objects.paginator.count > 0 or request.GET.search and objects.paginator.count > 0 %}
@@ -56,17 +56,25 @@
         </form>
       </div>
     </div>
-
-
   {% else %}
   {% include "basetable_top.html" %}
         <!-- Table data rows; the order needs to match the order of "tablecols" definitions; and the <td class value needs to match the tablecols clclass value for show/hide buttons to work -->
         {% for build in objects %}
-        <tr class="data">
+        <tr class="data" data-table-build-result="{{ build.id }}">
             <td class="outcome">
-        <a href="{% url "builddashboard" build.id %}">{%if build.outcome == build.SUCCEEDED%}<i class="icon-ok-sign success"></i>{%elif build.outcome == build.FAILED%}<i class="icon-minus-sign error"></i>{%else%}{%endif%}</a> &nbsp;
-        </td>
-            <td class="target">{% for t in build.target_set.all %} <a href="{% url "builddashboard" build.id %}"> {{t.target}} </a> <br />{% endfor %}</td>
+                <a href="{% url "builddashboard" build.id %}">{%if build.outcome == build.SUCCEEDED%}<i class="icon-ok-sign success"></i>{%elif build.outcome == build.FAILED%}<i class="icon-minus-sign error"></i>{%else%}{%endif%}</a> &nbsp;
+            </td>
+            <td class="target">
+                {% for t in build.target_set.all %}
+                    <a href="{% url "builddashboard" build.id %}">
+                        {% if t.task %}
+                            {{t.target}}:{{t.task}}
+                        {% else %}
+                            {{t.target}}
+                        {% endif %}
+                    </a> <br />
+                {% endfor %}
+            </td>
             <td class="machine"><a href="{% url "builddashboard" build.id %}">{{build.machine}}</a></td>
             <td class="started_on"><a href="{% url "builddashboard" build.id %}">{{build.started_on|date:"d/m/y H:i"}}</a></td>
             <td class="completed_on"><a href="{% url "builddashboard" build.id %}">{{build.completed_on|date:"d/m/y H:i"}}</a></td>
@@ -93,8 +101,11 @@
               <a href="{%url "builddashboard" build.id%}#images">{{fstypes|get_dict_value:build.id}}</a>
               {% endif %}
             </td>
-        <td>
-                <a href="{% url 'project' build.project.id %}">{{build.project.name}}</a>
+            <td class="project-name">
+                <a href="{% project_url build.project %}">{{build.project.name}}</a>
+                {% if build.project.is_default %}
+                    <i class="icon-question-sign get-help hover-help" title="" data-original-title="This project shows information about the builds you start from the command line while Toaster is running" style="visibility: hidden;"></i>
+                {% endif %}
             </td>
         </tr>
 
@@ -103,7 +114,6 @@
 
   {% include "basetable_bottom.html" %}
   {% endif %} {# objects.paginator.count #}
-{% endif %} {# empty #}
 </div><!-- end row-fluid-->
 
 {% endblock %}
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/configuration.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/configuration.html
index 3e48991..85d6a62 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/configuration.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/configuration.html
@@ -1,6 +1,7 @@
 {% extends "basebuildpage.html" %}
 {% load projecttags %}
 
+{% block title %} Configuration summary - {{build.target_set.all|dictsort:"target"|join:", "}} {{build.machine}} - {{build.project.name}} - Toaster {% endblock %}
 {% block localbreadcrumb %}
 <li>Configuration</li>
 {% endblock %}
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/configvars.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/configvars.html
index 8a572ae..e40c225 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/configvars.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/configvars.html
@@ -1,6 +1,7 @@
 {% extends "basebuildpage.html" %}
 {% load projecttags %}
 
+{% block title %} BitBake variables - {{build.target_set.all|dictsort:"target"|join:", "}} {{build.machine}} - {{build.project.name}} - Toaster {% endblock %}
 {% block localbreadcrumb %}
 <li>Configuration</li>
 {% endblock %}
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/customise_btn.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/customise_btn.html
new file mode 100644
index 0000000..54d05f9
--- /dev/null
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/customise_btn.html
@@ -0,0 +1,9 @@
+<button class="btn btn-block layer-exists-{{data.layer_version.id}} customise-btn" style="display:none;" data-recipe="{{data.id}}">
+  Customise
+</button>
+
+<button class="btn btn-block layer-add-{{data.layer_version.id}} layerbtn" data-layer='{ "id": {{data.layer_version.id}}, "name": "{{data.layer_version.layer.name}}", "layerdetailurl": "{% url 'layerdetails' extra.pid data.layer_version.id %}"}' data-directive="add">
+  <i class="icon-plus"></i>
+  Add layer
+</button>
+
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/customrecipe.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/customrecipe.html
new file mode 100644
index 0000000..823bbd8
--- /dev/null
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/customrecipe.html
@@ -0,0 +1,142 @@
+{% extends "base.html" %}
+{% load projecttags %}
+{% load humanize %}
+{% load static %}
+{% block pagecontent %}
+
+{% include "projecttopbar.html" %}
+
+<script src="{% static 'js/customrecipe.js' %}"></script>
+<script>
+  $(document).ready(function (){
+    var ctx = {
+      tableApiUrl: "{% url 'recipeselectpackages' project.id recipe.pk %}?format=json"
+    };
+
+    try {
+      customRecipePageInit(ctx);
+    } catch (e) {
+      document.write("Sorry, An error has occurred loading this page");
+      console.warn(e);
+    }
+  });
+</script>
+
+<div class="row-fluid span11">
+  <div class="alert alert-success lead" id="image-created-notification" style="margin-top: 15px; display: none">
+    <button type="button" data-dismiss="alert" class="close">x</button>
+    Your custom image <strong>{{recipe.name}}</strong> has been created. You can now add or remove packages as needed.
+  </div>
+  <div class="page-header air">
+    <h1>
+      {{recipe.name}}
+      <small>({{recipe.base_recipe.name}})</small>
+    </h1>
+  </div>
+</div>
+
+<div class="row-fluid span11">
+  <div class="span8">
+    <div class="button-place btn-group" style="width: 100%">
+      <a class="btn btn-large span6" href="#" id="build-custom-image" style="width: 50%">
+        Build {{recipe.name}}
+      </a>
+      <button class="btn btn-large span6" data-toggle="modal" data-target="#download-file" id="download" style="width: 50%">
+      Download recipe file
+    </button>
+  </div>
+  <div id="no-package-results" class="air" style="display:none;">
+    <div class="alert">
+      <h3>No packages found</h3>
+      <p>You might consider <a href="all-software-recipes.html">searching the list of recipes</a> instead. If you find a recipe that matches the name of the package you want:</p>
+      <ol>
+        <li>Add the layer providing the recipe to your project</li>
+        <li>Build the recipe</li>
+        <li>Once the build completes, come back to this page and search for the package</li>
+      </ol>
+      <form class="input-append no-results">
+        <input type="text" class="input-xlarge" value="search query">
+          <a href="#" class="add-on btn">
+            <i class="icon-remove"></i>
+          </a>
+          <button class="btn">Search</button>
+          <button class="btn btn-link" id="show-all">Show all packages</button>
+        </form>
+      </div>
+    </div>
+    <div id="packages-table">
+      {% url 'recipeselectpackages' project.id recipe.id as xhr_table_url %}
+      {% with 'recipeselection' as table_name %}
+      {% with 'Add | Remove packages' as  title %}
+
+      <h2>{{title}} (<span class="table-count-{{table_name}}"></span>) </h2>
+
+      {% include "toastertable.html" %}
+      {% endwith %}
+      {% endwith %}
+    </div>
+  </div>
+    <div class="span4 well">
+      <h2 style="margin-bottom:20px;">About {{recipe.name}}</h2>
+
+      <dl>
+        <dt>
+          Approx. packages included
+          <i class="icon-question-sign get-help" title="" data-original-title="The number of packages included is based on information from previous builds and from parsing layers, so we can never be sure it is 100% accurate"></i>
+        </dt>
+        <dd class="no-packages">{{recipe.packages.count}}</dd>
+        <!-- <dt>
+          Approx. package size
+          <i class="icon-question-sign get-help" title="" data-original-title="Package size is based on information from previous builds, so we can never be sure it is 100% accurate"></i>
+        </dt>
+        <dd>244.3 MB</dd>
+        <dt>Last build</dt>
+        <dd>
+          <i class="icon-ok-sign success"></i>
+          <a href="build-dashboard.html">11/06/15 15:22</a>
+        </dd>
+        <dt>Recipe file</dt>
+        <dd>
+          <code>custom-image-name.bb</code>
+          <a href="#download-file" data-toggle="modal"><i class="icon-download-alt" title="" data-original-title="Download recipe file"></i></a>
+          </dd> -->
+        <dt>Layer</dt>
+        <!-- TODO recipe details page -->
+        <dd><a href="{% url 'layerdetails' project.id recipe.base_recipe.layer_version.pk %}">{{recipe.base_recipe.layer_version.layer.name}}</a></dd>
+        <!--<dt>
+          Summary
+        </dt>
+        <dd>
+          <span class="muted">Not set</span>
+          <i class="icon-pencil" data-original-title="" title=""></i>
+        </dd>
+        <dt>
+          Description
+        </dt>
+        <dd>
+          <span class="muted">Not set</span>
+          <i class="icon-pencil" data-original-title="" title=""></i>
+        </dd>
+        <dt>Version</dt>
+        <dd>
+          1.0
+          <i class="icon-pencil" data-original-title="" title=""></i>
+        </dd>
+        <dt>Section</dt>
+        <dd>
+          base
+          <i class="icon-pencil" data-original-title="" title=""></i>
+          <i class="icon-trash" data-original-title="" title=""></i>
+        </dd>
+        <dt>License</dt>
+        <dd>
+          MIT
+          <i class="icon-question-sign get-help" title="" data-original-title="All custom images have their license set to MIT. This is because the license applies only to the recipe (.bb) file, and not to the image itself. To see which licenses apply to the image you must check the license manifest generated with each build"></i>
+          </dd> -->
+      </dl>
+      <i class="icon-trash no-tooltip"></i>
+      <a href="#" class="error" id="delete">Delete custom image</a>
+    </div>
+</div>
+
+  {% endblock %}
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/dirinfo.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/dirinfo.html
index a5bc481..ecb46bf 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/dirinfo.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/dirinfo.html
@@ -1,4 +1,5 @@
 {% extends "basebuildpage.html" %}
+{% block title %} Directory structure - {{ target.target }} {{ build.machine }} - {{ build.project.name }} - Toaster {% endblock %}
 {% block extraheadcontent %}
 {% load static %}
 <link rel="stylesheet" href="{% static 'css/jquery.treetable.css' %}" type="text/css">
@@ -103,12 +104,16 @@
             name += '</td>';
         }
         else {
-            name = '<td>';
             if (o.link_to == null) {
-                name += '<i class="icon-file"></i>';
+                namespan = 2;
+                if (o.package == null) {
+                  namespan = 3;
+                }
+                var colspan = 'colspan="' + namespan + '"';
+                name = '<td ' + colspan + '><i class="icon-file"></i>';
             }
             else {
-                name += '<i class="icon-hand-right"></i>';
+                name = '<td><i class="icon-hand-right"></i>';
             }
             name += '&nbsp;' + o.name;
             name += '</td>';
@@ -207,10 +212,10 @@
                     <th>Directory / File</th>
                     <th>Symbolic link to</th>
                     <th>Source package</th>
-                    <th>Size</th>
-                    <th>Permissions</th>
-                    <th>Owner</th>
-                    <th>Group</th>
+                    <th class="narrow-col">Size</th>
+                    <th class="medium-col">Permissions</th>
+                    <th class="narrow-col">Owner</th>
+                    <th class="narrow-col">Group</th>
                 </tr>
             </thead>
             <tbody>
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/importlayer.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/importlayer.html
index ce3d724..033f0ae 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/importlayer.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/importlayer.html
@@ -2,6 +2,7 @@
 {% load projecttags %}
 {% load humanize %}
 {% load static %}
+{% block title %} Import layer - {{project.name}} - Toaster {% endblock %}
 {% block pagecontent %}
 
 {% include "projecttopbar.html" %}
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/js-unit-tests.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/js-unit-tests.html
index 5b8fd84..8d65f33 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/js-unit-tests.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/js-unit-tests.html
@@ -22,9 +22,11 @@
 <script src="{% static 'js/table.js' %}"></script>
 
 <script>
-  var tableUrl = '{% url 'projectlayers' project.pk %}';
+  var ctx = {
+    tableUrl : '{% url 'projectlayers' project.pk %}',
+    projectId : {{project.pk}},
+  }
 </script>
-
 <script src="{% static 'js/tests/test.js' %}"></script>
 
 <div id="qunit"></div>
@@ -34,6 +36,12 @@
 <input type="text" id="projects" placeholder="projects"></input>
 <input type="text" id="machines" placeholder="machines"></input>
 
+<!-- import layer dependency input typeahead -->
+<input type="text" id="layer-dependency" style="display:none"></input>
+<!-- project page input typeaheads -->
+<input type="text" id="layer-add-input" style="display:none"></input>
+<input type="text" id="machine-change-input" style="display:none"></input>
+<!-- import layer dependency input typeahead on layer details edit layer -->
+<input type="text" id="layer-dep-input" style="display:none"></input>
+
 {% endblock %}
-
-
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/landing.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/landing.html
index 45e9532..cafaa1a 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/landing.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/landing.html
@@ -4,55 +4,69 @@
 {% load projecttags %}
 {% load humanize %}
 
+{% block title %} Welcome to Toaster {% endblock %}
 {% block pagecontent %}
 
-  <div class="container-fluid">
-   <div class="row-fluid">
-    <!-- Empty - no data in database -->
-    <div class="hero-unit span12 well-transparent">
-     <div class="row-fluid">
-      <div class="span6">
-       <h1>
-        This is Toaster
-       </h1>
-       <p>A web interface to <a href="http://www.openembedded.org">OpenEmbedded</a> and <a href="http://www.yoctoproject.org/tools-resources/projects/bitbake">BitBake</a>, the <a href="http://www.yoctoproject.org">Yocto Project</a> build system.</p>
+  {% if BUILD_MODE %}
+    <!-- build mode -->
+    <div class="container-fluid">
+      <div class="row-fluid">
+        <div class="hero-unit span12 well-transparent">
+          <div class="row-fluid">
 
+            <div class="span6">
+              <h1>This is Toaster</h1>
 
-		{% if lvs_nos %}
-		    <p class="hero-actions">
-			    <a class="btn btn-primary btn-large" href="{% url 'newproject' %}">
-				    To start building, create your first Toaster project
-			    </a>
-		    </p>
-		{% else %}
-		    <div class="alert alert-info lead air">
-			    Toaster has no layer information. Without layer information, you cannot run builds. To generate layer information you can:
-			    <ul>
-			        <li>
-			            <a href="http://www.yoctoproject.org/docs/latest/toaster-manual/toaster-manual.html#layer-source">Configure a layer source</a>
-			        </li>
-			        <li>
-			            <a href="{% url 'newproject' %}">Create a project</a>, then import layers
-			        </li>
-			    </ul>
-		    </div>
-	        {% endif %}
+              <p>A web interface to <a href="http://www.openembedded.org">OpenEmbedded</a> and <a href="http://www.yoctoproject.org/tools-resources/projects/bitbake">BitBake</a>, the <a href="http://www.yoctoproject.org">Yocto Project</a> build system.</p>
 
-        <ul class="unstyled">
-            <li>
-                <a href="http://www.yoctoproject.org/docs/latest/toaster-manual/toaster-manual.html">Read the Toaster manual</a>
-            </li>
-            <li>
-                <a href="https://wiki.yoctoproject.org/wiki/Contribute_to_Toaster">Contribute to Toaster</a>
-            </li>
-        </ul>
+		          {% if lvs_nos %}
+		            <p class="hero-actions">
+		              <a class="btn btn-primary btn-large" href="{% url 'newproject' %}">
+			              To start building, create your first Toaster project
+		              </a>
+		            </p>
+		          {% else %}
+                <div class="alert alert-info lead air">
+                  Toaster has no layer information. Without layer information, you cannot run builds. To generate layer information you can:
+                  <ul>
+                    <li>
+                      <a href="http://www.yoctoproject.org/docs/latest/toaster-manual/toaster-manual.html#layer-source">Configure a layer source</a>
+                    </li>
+                    <li>
+			                <a href="{% url 'newproject' %}">Create a project</a>, then import layers
+                    </li>
+                  </ul>
+                </div>
+              {% endif %}
 
+              <ul class="unstyled">
+                <li>
+                  <a href="http://www.yoctoproject.org/docs/latest/toaster-manual/toaster-manual.html">
+                    Read the Toaster manual
+                  </a>
+                </li>
+
+                <li>
+                  <a href="https://wiki.yoctoproject.org/wiki/Contribute_to_Toaster">
+                    Contribute to Toaster
+                  </a>
+                </li>
+              </ul>
+            </div>
+
+            <div class="span6">
+              <img alt="Yocto Project" class="thumbnail" src="{% static 'img/toaster_bw.png' %}"/>
+            </div>
+
+          </div>
+        </div>
       </div>
-      <div class="span6">
-          <img alt="Yocto Project" class="thumbnail" src="{% static 'img/toaster_bw.png' %}"/>
-      </div>
-     </div>
     </div>
-   </div>
+  {% else %}
+    <!-- analysis mode -->
+    <div class="alert alert-info lead top-air">
+      Toaster has not recorded any builds yet. Run a build from the command line to see it here.
+    </div>
+  {% endif %}
 
 {% endblock %}
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/landing_not_managed.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/landing_not_managed.html
index 5bc435d..9b37f55 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/landing_not_managed.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/landing_not_managed.html
@@ -4,6 +4,8 @@
 {% load projecttags %}
 {% load humanize %}
 
+{% block title %} Welcome to Toaster {% endblock %}
+
 {% block pagecontent %}
 
   <div class="container-fluid">
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/layer_btn.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/layer_btn.html
index a2e9393..314eec7 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/layer_btn.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/layer_btn.html
@@ -1,8 +1,16 @@
-<button class="btn btn-danger btn-block layer-exists-{{data.pk}} layerbtn" style="display:none;" data-layer='{ "id": {{data.pk}}, "name":  "{{data.layer.name}}", "layerdetailurl": "{%url 'layerdetails' extra.pid data.pk%}"}' data-directive="remove" >
+<button class="btn btn-danger btn-block layer-exists-{{data.pk}} layerbtn"  data-layer='{ "id": {{data.pk}}, "name":  "{{data.layer.name}}", "layerdetailurl": "{%url 'layerdetails' extra.pid data.pk%}"}' data-directive="remove"
+    {% if data.pk not in extra.current_layers %}
+    style="display:none;"
+    {% endif %}
+  >
   <i class="icon-trash"></i>
   Delete layer
 </button>
-<button class="btn btn-block layer-add-{{data.pk}} layerbtn" data-layer='{ "id": {{data.pk}}, "name":  "{{data.layer.name}}", "layerdetailurl": "{%url 'layerdetails' extra.pid data.pk%}"}' data-directive="add">
+<button class="btn btn-block layer-add-{{data.pk}} layerbtn" data-layer='{ "id": {{data.pk}}, "name":  "{{data.layer.name}}", "layerdetailurl": "{%url 'layerdetails' extra.pid data.pk%}"}' data-directive="add"
+    {% if data.pk in extra.current_layers %}
+    style="display:none;"
+    {% endif %}
+  >
   <i class="icon-plus"></i>
   Add layer
 </button>
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/layerdetails.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/layerdetails.html
index 7dd3db2..7fe365d 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/layerdetails.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/layerdetails.html
@@ -3,6 +3,7 @@
 {% load humanize %}
 {% load static %}
 
+{% block title %} {{layerversion.layer.name}} - {{project.name}} - Toaster {% endblock %}
 {% block pagecontent %}
 
 <div class="section">
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/mrb_section.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/mrb_section.html
index 396fb8e..bd8f991 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/mrb_section.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/mrb_section.html
@@ -1,43 +1,68 @@
 {% load static %}
 {% load projecttags %}
+{% load project_url_tag %}
 {% load humanize %}
 
+{%if mru and mru.count > 0%}
 
-{%if mru.count > 0%}
+  {%if mrb_type == 'project' %}
+      <h2>
+      Latest project builds
 
-  <div class="page-header">
+      {% if project.is_default %}
+          <i class="icon-question-sign get-help heading-help" title="" data-original-title="Builds in this project cannot be started from Toaster: they are started from the command line"></i>
+      {% endif %}
+      </h2>
+  {% else %}
+    <div class="page-header">
       <h1>
-          Latest builds
-       </h1>
-  </div>
+      Latest builds
+      </h1>
+    </div>
+  {% endif %}
   <div id="latest-builds">
   {% for build in mru %}
-    <div class="alert {%if build.outcome == build.SUCCEEDED%}alert-success{%elif build.outcome == build.FAILED%}alert-error{%else%}alert-info{%endif%} project-name ">
-       <span class="label {%if build.outcome == build.SUCCEEDED%}label-success{%elif build.outcome == build.FAILED%}label-important{%else%}label-info{%endif%}">
-         <a href={% url 'project' build.project.pk %}>
-           {{build.project.name}}
-         </a>
-       </span>
-
+  <div data-latest-build-result="{{ build.id }}" class="alert build-result {%if build.outcome == build.SUCCEEDED%}alert-success{%elif build.outcome == build.FAILED%}alert-error{%else%}alert-info{%endif%}{% if mrb_type != 'project' %} project-name{% endif %}">
+      {% if mrb_type != 'project' %}
+        <span class="label {%if build.outcome == build.SUCCEEDED%}label-success{%elif build.outcome == build.FAILED%}label-important{%else%}label-info{%endif%}">
+            <a href={% project_url build.project %}>
+                {{build.project.name}}
+            </a>
+        </span>
+      {% endif %}
         <div class="row-fluid">
-            <div class="span3 lead">
+          <div class="span3 lead">
     {%if build.outcome == build.SUCCEEDED or build.outcome == build.FAILED %}
-                <a href="{%url 'builddashboard' build.pk%}" class="{%if build.outcome == build.SUCCEEDED %}success{%else%}error{%endif%}">
+              <a href="{%url 'builddashboard' build.pk%}" class="{%if build.outcome == build.SUCCEEDED %}success{%else%}error{%endif%}">
     {% endif %}
             {% if build.target_set.all.count > 0 %}
                 <span data-toggle="tooltip"
-                  {%if build.target_set.all.count > 1%}
-                    title="Targets: {%for target in build.target_set.all%}{{target.target}} {%endfor%}"
-                  {%endif%}
+                  {% if build.target_set.all.count > 1 %}
+                    title="Targets:
+                    {% for target in build.target_set.all %}
+                        {% if target.task %}
+                            {{target.target}}:{{target.task}}
+                        {% else %}
+                            {{target.target}}
+                        {% endif %}
+                    {% endfor %}"
+                  {% endif %}
                 >
-
-                  {{build.target_set.all.0.target}} {%if build.target_set.all.count > 1%}(+ {{build.target_set.all.count|add:"-1"}}){%endif%}
+                {% if build.target_set.all.0.task %}
+                    {{build.target_set.all.0.target}}:{{build.target_set.all.0.task}}
+                {% else %}
+                    {{build.target_set.all.0.target}}
+                {% endif %}
+                {% if build.target_set.all.count > 1 %}
+                    (+ {{build.target_set.all.count|add:"-1"}})
+                {% endif %}
                 </span>
              {% endif %}
     {%if build.outcome == build.SUCCEEDED or build.outcome == build.FAILED %}
                 </a>
     {% endif %}
             </div>
+    {% if build.outcome == build.SUCCEEDED or build.outcome == build.FAILED %}
             <div class="span2 lead">
                 {% if build.completed_on|format_build_date  %}
                     {{ build.completed_on|date:'d/m/y H:i' }}
@@ -45,6 +70,7 @@
                     {{ build.completed_on|date:'H:i' }}
                 {% endif %}
             </div>
+    {% endif %}
     {%if build.outcome == build.SUCCEEDED or build.outcome == build.FAILED %}
             <div class="span2 lead">
       {% if  build.errors.count %}
@@ -58,28 +84,41 @@
             </div>
             <div class="lead ">
               <span class="lead">
-                Build time: <a href="{% url 'buildtime' build.pk %}">{{ build.timespent_seconds|sectohms }}</a>
+                  Build time: <a href="{% url 'buildtime' build.pk %}">{{ build.timespent_seconds|sectohms }}</a>
               </span>
-              <button class="btn
+              {% if build.project.is_default %}
+                  <i class="pull-right icon-question-sign get-help
                   {% if build.outcome == build.SUCCEEDED %}
-                      btn-success
+                      get-help-green
                   {% elif build.outcome == build.FAILED %}
-                      btn-danger
+                      get-help-red
                   {% else %}
-                      btn-info
-                  {%endif%}
-                  pull-right"
-                  onclick='scheduleBuild({% url 'projectbuilds' build.project.id as bpi %}{{bpi|json}},
-                    {{build.project.name|json}},
-                    {% url 'project' build.project.id as bpurl %}{{bpurl|json}},
-                    {{build.target_set.all|get_tasks|json}})'>
+                      get-help-blue
+                  {% endif %}
+                  " title="Builds in this project cannot be started from Toaster: they are started from the command line">
+                  </i>
+              {% else %}
+                  <button class="btn
+                      {% if build.outcome == build.SUCCEEDED %}
+                          btn-success
+                      {% elif build.outcome == build.FAILED %}
+                          btn-danger
+                      {% else %}
+                          btn-info
+                      {%endif%}
+                      pull-right"
+                      onclick='scheduleBuild({% url 'projectbuilds' build.project.id as bpi %}{{bpi|json}},
+                        {{build.project.name|json}},
+                        {% url 'project' build.project.id as purl %}{{purl|json}},
+                        {{build.target_set.all|get_tasks|json}})'>
 
-                    Run again
-              </button>
+                        Run again
+                  </button>
+              {% endif %}
             </div>
     {%endif%}
     {%if build.outcome == build.IN_PROGRESS %}
-            <div class="span4">
+            <div class="span4 offset1">
                 <div class="progress" style="margin-top:5px;" data-toggle="tooltip" title="{{build.completeper}}% of tasks complete">
                     <div style="width: {{build.completeper}}%;" class="bar"></div>
                 </div>
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/newcustomimage.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/newcustomimage.html
new file mode 100644
index 0000000..4487b3e
--- /dev/null
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/newcustomimage.html
@@ -0,0 +1,54 @@
+{% extends "base.html" %}
+{% load projecttags %}
+{% load humanize %}
+{% load static %}
+{% block pagecontent %}
+
+<script src="{% static 'js/newcustomimage.js' %}"></script>
+<script>
+  $(document).ready(function (){
+    var ctx = {
+      xhrCustomRecipeUrl : "{% url 'xhr_customrecipe' %}",
+    };
+
+    try {
+      newCustomImagePageInit(ctx);
+    } catch (e) {
+      document.write("Sorry, An error has occurred loading this page");
+      console.warn(e);
+    }
+  });
+</script>
+
+</script>
+<div class="modal hide fade in" id="new-custom-image-modal" aria-hidden="false">
+  <div class="modal-header">
+    <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
+    <h3>Name your custom image</h3>
+  </div>
+  <div class="modal-body">
+    <div class="row-fluid">
+      <span class="help-block span8">Image names must be unique. They should not contain spaces or capital letters, and the only allowed special character is dash (-).<p></p>
+      </span></div>
+    <div class="control-group controls">
+      <input type="text" class="huge span5" placeholder="Type the name, something like 'core-image-myimage'">
+        <span class="help-block" style="display:none">Image names cannot contain spaces or capital letters. The only allowed special character is dash (-)</span>
+        <span class="help-block" style="display: none">An image with this name already exists. Image names must be unique: try a different one.</span>
+      </div>
+    </div>
+    <div class="modal-footer">
+      <a href="#" id="create-new-custom-image-btn" class="btn btn-primary btn-large" data-original-title="" title="">Create custom image</a>
+    </div>
+</div>
+
+{% include "projecttopbar.html" %}
+
+
+{% url table_name project.id as xhr_table_url %}
+{% include "toastertable.html" %}
+
+
+
+{% endblock %}
+
+
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/newproject.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/newproject.html
index 997390b..e83b2be 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/newproject.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/newproject.html
@@ -1,6 +1,9 @@
 {% extends "base.html" %}
 {% load projecttags %}
 {% load humanize %}
+
+{% block title %} Create a new project - Toaster {% endblock %}
+
 {% block pagecontent %}
 <div class="row-fluid">
     <div class="page-header">
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/package_detail_base.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/package_detail_base.html
index a24bc8e..9fa28a8 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/package_detail_base.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/package_detail_base.html
@@ -1,6 +1,13 @@
 {% extends "basebuilddetailpage.html" %}
 {% load projecttags %}
 
+{% block title %}
+  {% if target %}
+    {{package.fullpackagespec}} - {{ target.target }} {{ build.machine }} - {{ build.project.name }} - Toaster
+  {% else %}
+    {{package.fullpackagespec}} - {{ build.target_set.all|dictsort:"target"|join:", " }} {{ build.machine }} - {{ build.project.name }} - Toaster
+  {% endif %}
+{% endblock %}
 {% block extraheadcontent %}
     <!-- functions to format package 'installed_package' alias -->
     <script>
@@ -38,9 +45,9 @@
 {% block pagedetailinfomain %}
     <div class="row span11">
         <div class="page-header">
-            {% block title %}
+            {% block mainheading %}
             <h1>{{package.fullpackagespec}}</h1>
-            {% endblock title %}
+            {% endblock %}
         </div> <!-- page-header -->
     </div> <!-- row span11 page-header -->
 
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/package_included_dependencies.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/package_included_dependencies.html
index 642ca69..8a0508e 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/package_included_dependencies.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/package_included_dependencies.html
@@ -1,13 +1,13 @@
 {% extends "package_detail_base.html" %}
 {% load projecttags %}
 
-{% block title %}
+{% block mainheading %}
     <h1>
         {{package.fullpackagespec}}
         <script> fmtAliasHelp("{{package.name}}", "{{package.alias}}", false) </script>
         <small>({{target.target}})</small>
     </h1>
-{% endblock title %}
+{% endblock %}
 
 {% block tabcontent %}
     {% with packageFileCount=package.buildfilelist_package.count %}
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/package_included_detail.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/package_included_detail.html
index d2aa26e..568e2f2 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/package_included_detail.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/package_included_detail.html
@@ -1,7 +1,7 @@
 {% extends "package_detail_base.html" %}
 {% load projecttags %}
 
-{% block title %}
+{% block mainheading %}
         <h1>
             {{package.fullpackagespec}}
             <script>
@@ -9,7 +9,7 @@
             </script>
             <small>({{target.target}})</small>
         </h1>
-{% endblock title %}
+{% endblock %}
 
 {% block tabcontent %}
 {% with packageFileCount=package.buildfilelist_package.count %}
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html
index 5cc8b47..fb310c7 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html
@@ -1,13 +1,13 @@
 {% extends "package_detail_base.html" %}
 {% load projecttags %}
 
-{% block title %}
+{% block mainheading %}
         <h1>
             {{package.fullpackagespec}}
             <script> fmtAliasHelp("{{package.name}}", "{{package.alias}}", false) </script>
             <small>({{target.target}})</small>
         </h1>
-{% endblock title %}
+{% endblock %}
 
 {% block tabcontent %}
     {% with packageFileCount=package.buildfilelist_package.count %}
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/pkg_add_rm_btn.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/pkg_add_rm_btn.html
new file mode 100644
index 0000000..b766aea
--- /dev/null
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/pkg_add_rm_btn.html
@@ -0,0 +1,16 @@
+<button class="btn btn-block btn-danger add-rm-package-btn" id="package-rm-btn-{{data.pk}}" data-directive="remove" data-package="{{data.pk}}" data-package-url="{% url 'xhr_customrecipe_packages' extra.recipe_id data.pk %}" style="
+  {% if data.pk not in extra.current_packages %}
+    display:none
+  {% endif %}
+  ">
+  <i class="icon-trash no-tooltip"></i>
+  Remove package
+</a>
+<button class="btn btn-block add-rm-package-btn" data-directive="add" id="package-add-btn-{{data.pk}}" data-package="{{data.pk}}" data-package-url="{% url 'xhr_customrecipe_packages' extra.recipe_id data.pk %}" style="
+  {% if data.pk in extra.current_packages %}
+    display:none
+  {% endif %}
+    ">
+<i class="icon-plus"></i>
+ Add package
+</button>
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/project.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/project.html
index e8354fd..4e83981 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/project.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/project.html
@@ -4,6 +4,7 @@
 {% load humanize %}
 {% load static %}
 
+{% block title %} Configuration - {{project.name}} - Toaster {% endblock %}
 {% block projectinfomain %}
 
 <script src="{% static 'js/layerDepsModal.js' %}"></script>
@@ -67,7 +68,7 @@
 
       <div class="alert alert-info" style="display:none" id="no-most-built">
         <span class="lead">You haven't built any recipes yet</span>
-        <p style="margin-top: 10px;"><a href="{% url 'projecttargets' project.id %}">Choose a recipe to build</a></p>
+        <p style="margin-top: 10px;"><a href="{% url 'projectsoftwarerecipes' project.id %}">Choose a recipe to build</a></p>
       </div>
 
       <ul class="unstyled configuration-list" id="freq-build-list">
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/projectbuilds.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/projectbuilds.html
index 27cfcd7..bb38284 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/projectbuilds.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/projectbuilds.html
@@ -2,7 +2,7 @@
 {% load projecttags %}
 {% load humanize %}
 
-
+{% block title %} Builds - {{project.name}} - Toaster {% endblock %}
 {% block extraheadcontent %}
 <link rel="stylesheet" href="/static/css/jquery-ui.min.css" type='text/css'>
 <link rel="stylesheet" href="/static/css/jquery-ui.structure.min.css" type='text/css'>
@@ -21,13 +21,17 @@
     });
 </script>
 
+ {% with mrb_type='project' %}
+     {% include "mrb_section.html" %}
+ {% endwith %}
+
       <h2>
         {% if request.GET.filter and objects.paginator.count > 0 or request.GET.search and objects.paginator.count > 0 %}
-            {{objects.paginator.count}} build{{objects.paginator.count|pluralize}} found
+            {{objects.paginator.count}} project build{{objects.paginator.count|pluralize}} found
         {%elif request.GET.filter and objects.paginator.count == 0 or request.GET.search and objects.paginator.count == 0 %}
-            No builds found
+            No project builds found
         {%else%}
-            Project builds
+            All project builds
         {%endif%}
         <i class="icon-question-sign get-help heading-help" title="This page lists all the builds for the current project"></i>
       </h2>
@@ -66,7 +70,17 @@
                     {% endif %}
             </td>
 
-            <td class="target">{% for t in build.target_set.all %} <a href="{% url "builddashboard" build.id %}"> {{t.target}} </a> <br />{% endfor %}</td>
+            <td class="target">
+                {% for t in build.target_set.all %}
+                    <a href="{% url "builddashboard" build.id %}">
+                        {% if t.task %}
+                            {{t.target}}:{{t.task}}
+                        {% else %}
+                            {{t.target}}
+                        {% endif %}
+                    </a> <br />
+                {% endfor %}
+            </td>
             <td class="machine"><a href="{% url "builddashboard" build.id %}">{{build.machine}}</a></td>
             <td class="started_on"><a href="{% url "builddashboard" build.id %}">{{build.started_on|date:"d/m/y H:i"}}</a></td>
             <td class="completed_on"><a href="{% url "builddashboard" build.id %}">{{build.completed_on|date:"d/m/y H:i"}}</a></td>
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/projectconf.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/projectconf.html
index 4c5a188..30fd03e 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/projectconf.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/projectconf.html
@@ -2,7 +2,7 @@
 {% load projecttags %}
 {% load humanize %}
 
-
+{% block title %} BitBake variables - {{project.name}} - Toaster {% endblock %}
 {% block projectinfomain %}
 
 <h2>Bitbake variables</h2>
@@ -43,6 +43,7 @@
                     <input id="filter-image_fstypes" type="text" placeholder="Search image types" class="span4">
                     <div id="all-image_fstypes" class="scrolling">
                     </div>
+                    <span class="help-block" id="fstypes-error-message">You must select at least one image type</span>
                     <button id="apply-change-image_fstypes" type="button" class="btn">Save</button>
                     <button id="cancel-change-image_fstypes" type="button" class="btn btn-link">Cancel</button>
                 </form>
@@ -312,9 +313,11 @@
             });
             if ( 0 == any_checked ) {
                 $("#apply-change-image_fstypes").attr("disabled","disabled");
+                $('#fstypes-error-message').show();
             }
             else {
                 $("#apply-change-image_fstypes").removeAttr("disabled");
+                $('#fstypes-error-message').hide();
             }
         }
 
@@ -546,10 +549,14 @@
                 // Add the un-checked boxes second
                 for (var i = 0, length = fstypes_list.length; i < length; i++) {
                     if (0  > fstypes.indexOf(" "+fstypes_list[i].value+" ")) {
-                            html += '<label class="checkbox"><input type="checkbox" class="fs-checkbox-fstypes" value="'+fstypes_list[i].value+'">'+fstypes_list[i].value+'</label>\n';
+                        html += '<label class="checkbox"><input type="checkbox" class="fs-checkbox-fstypes" value="'+fstypes_list[i].value+'">'+fstypes_list[i].value+'</label>\n';
                     }
                 }
+                // Add the 'no search matches' line last
+                html += '<label id="no-match-fstypes">No image types found</label>\n';
+                // Display the list
                 document.getElementById("all-image_fstypes").innerHTML = html;
+                $('#no-match-fstypes').hide();
 
                 // Watch elements to disable Save when none are checked
                 $(".fs-checkbox-fstypes").each(function(){
@@ -558,8 +565,9 @@
                     });
                 });
 
-                // clear the previous filter values
+                // clear the previous filter values and warning messages
                 $("input#filter-image_fstypes").val("");
+                $('#fstypes-error-message').hide();
             });
 
             $('#cancel-change-image_fstypes').click(function(){
@@ -569,17 +577,24 @@
             });
 
             $('#filter-image_fstypes').on('input', function(){
-                   var valThis = $(this).val().toLowerCase();
+                var valThis = $(this).val().toLowerCase();
+                var matchCount=0;
                 $('#all-image_fstypes label').each(function(){
                     var text = $(this).text().toLowerCase();
                     var match = text.indexOf(valThis);
                     if (match >= 0) {
                         $(this).show();
+                        matchCount += 1;
                     }
                     else {
                         $(this).hide();
                     }
                 });
+                if (matchCount === 0) {
+                   $('#no-match-fstypes').show();
+                } else {
+                   $('#no-match-fstypes').hide();
+                }
             });
 
             $('#apply-change-image_fstypes').click(function(){
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/projects.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/projects.html
index c2d77b5..678a796 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/projects.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/projects.html
@@ -2,8 +2,11 @@
 
 {% load static %}
 {% load projecttags %}
+{% load project_url_tag %}
 {% load humanize %}
 
+{% block title %} All projects - Toaster {% endblock %}
+
 {% block pagecontent %}
 
 
@@ -36,17 +39,29 @@
   {% else %} {# We have builds to display #}
   {% include "basetable_top.html" %}
   {% for o in objects %}
-    <tr class="data">
-      <td><a href="{% url 'project' o.id %}">{{o.name}}</a></td>
-      <td class="updated"><a href="{% url 'project' o.id %}">{{o.updated|date:"d/m/y H:i"}}</a></td>
-      <td>
+    <tr class="data" data-project="{{ o.id }}">
+      <td data-project-field="name">
+          <a href="{% project_url o %}">{{o.name}}</a>
+      </td>
+      <td class="updated"><a href="{% project_url o %}">{{o.updated|date:"d/m/y H:i"}}</a></td>
+      <td data-project-field="release">
         {% if o.release %}
             <a href="{% url 'project' o.id %}#project-details">{{o.release.name}}</a>
+        {% elif o.is_default %}
+            <span class="muted">Not applicable</span>
+            <i class="icon-question-sign get-help hover-help" title="" data-original-title="This project does not have a release set. It simply collects information about the builds you start from the command line while Toaster is running" style="visibility: hidden;"></i>
         {% else %}
             No release available
         {% endif %}
       </td>
-      <td><a href="{% url 'project' o.id %}#machine-distro">{{o.get_current_machine_name}}</a></td>
+      <td data-project-field="machine">
+        {% if o.is_default %}
+            <span class="muted">Not applicable</span>
+            <i class="icon-question-sign get-help hover-help" title="" data-original-title="This project does not have a machine set. It simply collects information about the builds you start from the command line while Toaster is running" style="visibility: hidden;"></i>
+        {% else %}
+            <a href="{% url 'project' o.id %}#machine-distro">{{o.get_current_machine_name}}</a>
+        {% endif %}
+      </td>
       {% if o.get_number_of_builds == 0 %}
       <td class="muted">{{o.get_number_of_builds}}</td>
       <td class="loutcome"></td>
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/projecttopbar.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/projecttopbar.html
index ca2741d..ee86b54 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/projecttopbar.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/projecttopbar.html
@@ -1,12 +1,18 @@
 <div class="alert alert-success lead" id="project-created-notification" style="margin-top:15px; display:none">
   <button type="button" class="close" data-dismiss="alert">×</button>
-  Your project <strong>{{project.name}}</strong> has been created. You can now <a href="{% url 'projectmachines' project.id %}">select your target machine</a> and <a href="{% url 'projecttargets' project.id %}">choose image recipes</a> to build.
+  Your project <strong>{{project.name}}</strong> has been created. You can now <a href="{% url 'projectmachines' project.id %}">select your target machine</a> and <a href="{% url 'projectsoftwarerecipes' project.id %}">choose image recipes</a> to build.
 </div>
 
 <!-- project name -->
 <div class="page-header">
-  <h1><span id="project-name">{{project.name}}</span>
+  <h1 id="project-name-container">
+    <span id="project-name">{{project.name}}</span>
+
     <i class="icon-pencil" data-original-title="" id="project-change-form-toggle" title=""></i>
+
+    {% if project.is_default %}
+        <i class="icon-question-sign get-help heading-help" title="" data-original-title="This project shows information about the builds you start from the command line while Toaster is running"></i>
+    {% endif %}
   </h1>
   <form id="project-name-change-form" style="margin-bottom: 0px; display: none;">
     <div class="input-append">
@@ -17,31 +23,40 @@
   </form>
 </div>
 
-<div id="project-topbar">
-  <ul class="nav nav-pills">
-    <li>
-      <a href="{% url 'projectbuilds' project.id %}">
-        Builds (<span class="total-builds">0</span>)
-      </a>
-    </li>
-    <li id="topbar-configuration-tab">
-      <a href="{% url 'project' project.id %}">
-        Configuration
-      </a>
-    </li>
-    <li>
-      <a href="{% url 'importlayer' project.id %}">
-        Import layer
-      </a>
-    </li>
-    <li class="pull-right">
-      <form class="form-inline" style="margin-bottom:0px;">
-        <i class="icon-question-sign get-help heading-help" data-placement="left" title="" data-original-title="Type the name of one or more recipes you want to build, separated by a space. You can also specify a task by appending a semicolon and a task name to the recipe name, like so: <code>busybox:clean</code>"></i>
-        <div class="input-append">
-          <input id="build-input" type="text" class="input-xlarge input-lg build-target-input" placeholder="Type the recipe you want to build" autocomplete="off" disabled>
-          <button id="build-button" class="btn btn-primary btn-large build-button" data-project-id="{{project.id}}" disabled>Build</button>
-        </div>
-      </form>
-    </li>
-  </ul>
-</div>
+{% if not project.is_default %}
+  <div id="project-topbar">
+    <ul class="nav nav-pills">
+      <li>
+        <a href="{% url 'projectbuilds' project.id %}">
+          Builds (<span class="total-builds">0</span>)
+        </a>
+      </li>
+      <li id="topbar-configuration-tab">
+        <a href="{% url 'project' project.id %}">
+          Configuration
+        </a>
+      </li>
+      <li>
+        <a href="{% url 'importlayer' project.id %}">
+          Import layer
+        </a>
+      </li>
+      {% if CUSTOM_IMAGE %}
+      <li>
+        <a href="{% url 'newcustomimage' project.id %}">
+          New custom image
+        </a>
+      </li>
+      {% endif %}
+      <li class="pull-right">
+        <form class="form-inline" style="margin-bottom:0px;">
+          <i class="icon-question-sign get-help heading-help" data-placement="left" title="" data-original-title="Type the name of one or more recipes you want to build, separated by a space. You can also specify a task by appending a semicolon and a task name to the recipe name, like so: <code>busybox:clean</code>"></i>
+          <div class="input-append">
+            <input id="build-input" type="text" class="input-xlarge input-lg build-target-input" placeholder="Type the recipe you want to build" autocomplete="off" disabled>
+            <button id="build-button" class="btn btn-primary btn-large build-button" data-project-id="{{project.id}}" disabled>Build</button>
+          </div>
+        </form>
+      </li>
+    </ul>
+  </div>
+{% endif %}
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/recipe.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/recipe.html
index b5e4192..c6ae2f3 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/recipe.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/recipe.html
@@ -2,6 +2,7 @@
 
 {% load projecttags %}
 
+{% block title %} {{object.name}}_{{object.version}} - {{build.target_set.all|dictsort:"target"|join:", "}} {{build.machine}} - {{build.project.name}} - Toaster {% endblock %}
 {% block localbreadcrumb %}
 <li><a href="{% url 'recipes' build.pk %}">Recipes</a></li>
 <li>{{object.name}}_{{object.version}} </li>
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/recipe_btn.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/recipe_btn.html
index 77c1b23..baab06e 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/recipe_btn.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/recipe_btn.html
@@ -1,7 +1,15 @@
-<button data-recipe-name="{{data.name}}" class="btn btn-block layer-exists-{{data.layer_version.pk}} build-recipe-btn" style="display:none; margin-top: 5px;" >
+<button data-recipe-name="{{data.name}}" class="btn btn-block layer-exists-{{data.layer_version.pk}} build-recipe-btn" style="margin-top: 5px;
+  {% if data.layer_version.pk not in extra.current_layers %}
+    display:none;
+  {% endif %}"
+ >
   Build recipe
 </button>
-<button class="btn btn-block layerbtn layer-add-{{data.layer_version.pk}}" data-layer='{ "id": {{data.layer_version.pk}}, "name":  "{{data.layer_version.layer.name}}", "layerdetailurl": "{%url 'layerdetails' extra.pid data.layer_version.pk%}"}' data-directive="add">
+<button class="btn btn-block layerbtn layer-add-{{data.layer_version.pk}}" data-layer='{ "id": {{data.layer_version.pk}}, "name":  "{{data.layer_version.layer.name}}", "layerdetailurl": "{%url 'layerdetails' extra.pid data.layer_version.pk%}"}' data-directive="add"
+    {% if data.layer_version.pk in extra.current_layers %}
+     style="display:none;"
+    {% endif %}
+>
   <i class="icon-plus"></i>
   Add layer
   <i title="" class="icon-question-sign get-help" data-original-title="To build this target, you must first add the {{data.layer_version.layer.name}} layer to your project"></i>
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/recipes.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/recipes.html
index 5cdac43..d144893 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/recipes.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/recipes.html
@@ -2,6 +2,7 @@
 
 {% load projecttags %}
 
+{% block title %} Recipes - {{build.target_set.all|dictsort:"target"|join:", "}} {{build.machine}} - {{build.project.name}} - Toaster {% endblock %}
 {% block localbreadcrumb %}
 <li>Recipes</li>
 {% endblock %}
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/target.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/target.html
index 65e6c4a..4c33eaa 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/target.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/target.html
@@ -1,4 +1,5 @@
 {% extends "basebuildpage.html" %}
+{% block title %} Packages included - {{ target.target }} {{ target.build.machine }} - {{ target.build.project.name }} - Toaster {% endblock %}
 {% block localbreadcrumb %}
 <li>{{target.target}}</li>
 {% endblock localbreadcrumb%}
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/task.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/task.html
index 635098a..ef628d9 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/task.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/task.html
@@ -3,6 +3,7 @@
 {% load projecttags %}
 {% load humanize %}
 
+{% block title %} {{task.recipe.name}}_{{task.recipe.version}} {{task.task_name}} - {{build.target_set.all|dictsort:"target"|join:", "}} {{build.machine}} - {{build.project.name}} - Toaster {% endblock %}
 {% block localbreadcrumb %}
 <li><a href="{% url 'tasks' build.pk %}">Tasks</a></li>
 <li>{{task.recipe.name}}_{{task.recipe.version}} {{task.task_name}}</li>
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/tasks.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/tasks.html
index b18b5c7..353410f 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/tasks.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/tasks.html
@@ -1,33 +1,34 @@
 {% extends "basebuildpage.html" %}
 {% load projecttags %}
 
+{% block title %} {{mainheading}} - {{build.target_set.all|dictsort:"target"|join:", "}} {{build.machine}} - {{build.project.name}} - Toaster{% endblock %}
 {% block localbreadcrumb %}
-<li>{{title}}</li>
+<li>{{mainheading}}</li>
 {% endblock %}
 
 {% block nav-tasks %}
-  {% if 'Tasks' == title %}
+  {% if 'Tasks' == mainheading %}
     <li class="active"><a href="{% url 'tasks' build.pk %}">Tasks</a></li>
   {% else %}
     <li><a href="{% url 'tasks' build.pk %}">Tasks</a></li>
   {% endif %}
 {% endblock %}
 {% block nav-buildtime %}
-  {% if 'Time' == title %}
+  {% if 'Time' == mainheading %}
     <li class="active"><a href="{% url 'buildtime' build.pk %}">Time</a></li>
   {% else %}
     <li><a href="{% url 'buildtime' build.pk %}">Time</a></li>
   {% endif %}
 {% endblock %}
 {% block nav-cpuusage %}
-  {% if 'CPU usage' == title %}
+  {% if 'CPU usage' == mainheading %}
     <li class="active"><a href="{% url 'cpuusage' build.pk %}">CPU usage</a></li>
   {% else %}
     <li><a href="{% url 'cpuusage' build.pk %}">CPU usage</a></li>
   {% endif %}
 {% endblock %}
 {% block nav-diskio %}
-  {% if 'Disk I/O' == title %}
+  {% if 'Disk I/O' == mainheading %}
     <li class="active"><a href="{% url 'diskio' build.pk %}">Disk I/O</a></li>
   {% else %}
     <li><a href="{% url 'diskio' build.pk %}">Disk I/O</a></li>
@@ -39,7 +40,7 @@
 {% if not request.GET.filter and not request.GET.search and not objects.paginator.count %}
   <!-- Empty - no data in database -->
   <div class="page-header">
-  <h1>{{title}}</h1>
+  <h1>{{mainheading}}</h1>
   </div>
   <div class="alert alert-info lead">
   No data was recorded for this build.
@@ -54,7 +55,7 @@
   {%elif request.GET.filter and objects.paginator.count == 0 or request.GET.search and objects.paginator.count == 0 %}
       No tasks found
   {%else%}
-      {{title}}
+      {{mainheading}}
   {%endif%}
   </h1>
   </div>
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/toastertable.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/toastertable.html
index 9ef4c6f..98a715f 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/toastertable.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/toastertable.html
@@ -12,7 +12,6 @@
       tableName : "{{table_name}}",
       url : "{{ xhr_table_url }}?format=json",
       title : "{{title}}",
-      projectLayers : {{projectlayers|json}},
     };
 
     try {
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templates/unavailable_artifact.html b/yocto-poky/bitbake/lib/toaster/toastergui/templates/unavailable_artifact.html
index b9f8fee..0301a6c 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/templates/unavailable_artifact.html
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templates/unavailable_artifact.html
@@ -3,6 +3,8 @@
 {% load humanize %}
 {% load static %}
 
+{% block title %} Build artifact no longer exists - Toaster {% endblock %}
+
 {% block pagecontent %}
 
 <div class="row-fluid air">
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/templatetags/project_url_tag.py b/yocto-poky/bitbake/lib/toaster/toastergui/templatetags/project_url_tag.py
new file mode 100644
index 0000000..04770ac
--- /dev/null
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/templatetags/project_url_tag.py
@@ -0,0 +1,34 @@
+from django import template
+from django.core.urlresolvers import reverse
+
+register = template.Library()
+
+def project_url(parser, token):
+    """
+    Create a URL for a project's main page;
+    for non-default projects, this is the configuration page;
+    for the default project, this is the project builds page
+    """
+    try:
+        tag_name, project = token.split_contents()
+    except ValueError:
+        raise template.TemplateSyntaxError(
+            "%s tag requires exactly one argument" % tag_name
+        )
+    return ProjectUrlNode(project)
+
+class ProjectUrlNode(template.Node):
+    def __init__(self, project):
+        self.project = template.Variable(project)
+
+    def render(self, context):
+        try:
+            project = self.project.resolve(context)
+            if project.is_default:
+                return reverse('projectbuilds', args=(project.id,))
+            else:
+                return reverse('project', args=(project.id,))
+        except template.VariableDoesNotExist:
+            return ''
+
+register.tag('project_url', project_url)
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/tests.py b/yocto-poky/bitbake/lib/toaster/toastergui/tests.py
index 4d1549b..9e6c46a 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/tests.py
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/tests.py
@@ -22,15 +22,29 @@
 """Test cases for Toaster GUI and ReST."""
 
 from django.test import TestCase
+from django.test.client import RequestFactory
 from django.core.urlresolvers import reverse
 from django.utils import timezone
-from orm.models import Project, Release, BitbakeVersion, ProjectTarget
+
+from orm.models import Project, Release, BitbakeVersion, Package, LogMessage
 from orm.models import ReleaseLayerSourcePriority, LayerSource, Layer, Build
-from orm.models import Layer_Version, Recipe, Machine, ProjectLayer
+from orm.models import Layer_Version, Recipe, Machine, ProjectLayer, Target
+from orm.models import CustomImageRecipe, ProjectVariable
+from orm.models import Branch
+
+import toastermain
+
+from toastergui.tables import SoftwareRecipesTable
 import json
 from bs4 import BeautifulSoup
+import re
 
 PROJECT_NAME = "test project"
+CLI_BUILDS_PROJECT_NAME = 'Command line builds'
+
+# by default, tests are run in build mode; to run in analysis mode,
+# set this to False in individual test cases
+toastermain.settings.BUILD_MODE = True
 
 class ViewTests(TestCase):
     """Tests to verify view APIs."""
@@ -39,27 +53,58 @@
         bbv = BitbakeVersion.objects.create(name="test bbv", giturl="/tmp/",
                                             branch="master", dirpath="")
         release = Release.objects.create(name="test release",
+                                         branch_name="master",
                                          bitbake_version=bbv)
         self.project = Project.objects.create_project(name=PROJECT_NAME,
                                                       release=release)
+        now = timezone.now()
+
+        build = Build.objects.create(project=self.project,
+                                     started_on=now,
+                                     completed_on=now)
+
         layersrc = LayerSource.objects.create(sourcetype=LayerSource.TYPE_IMPORTED)
         self.priority = ReleaseLayerSourcePriority.objects.create(release=release,
                                                                   layer_source=layersrc)
         layer = Layer.objects.create(name="base-layer", layer_source=layersrc,
                                      vcs_url="/tmp/")
 
-        lver = Layer_Version.objects.create(layer=layer, project=self.project,
-                                            layer_source=layersrc, commit="master")
+        branch = Branch.objects.create(name="master", layer_source=layersrc)
 
-        Recipe.objects.create(layer_source=layersrc, name="base-recipe",
-                              version="1.2", summary="one recipe",
-                              description="recipe", layer_version=lver)
+        lver = Layer_Version.objects.create(layer=layer, project=self.project,
+                                            layer_source=layersrc, commit="master",
+                                            up_branch=branch)
+
+        self.recipe1 = Recipe.objects.create(layer_source=layersrc,
+                                             name="base-recipe",
+                                             version="1.2",
+                                             summary="one recipe",
+                                             description="recipe",
+                                             layer_version=lver)
 
         Machine.objects.create(layer_version=lver, name="wisk",
                                description="wisking machine")
 
         ProjectLayer.objects.create(project=self.project, layercommit=lver)
 
+
+        self.customr = CustomImageRecipe.objects.create(\
+                           name="custom recipe", project=self.project,
+                           base_recipe=self.recipe1)
+
+        self.package = Package.objects.create(name='pkg1', recipe=self.recipe1,
+                                              build=build)
+
+
+        # recipe with project for testing AvailableRecipe table
+        self.recipe2 = Recipe.objects.create(layer_source=layersrc,
+                                             name="fancy-recipe",
+                                             version="1.4",
+                                             summary="a fancy recipe",
+                                             description="fancy recipe",
+                                             layer_version=lver,
+                                             file_path='/home/foo')
+
         self.assertTrue(lver in self.project.compatible_layerversions())
 
     def test_get_base_call_returns_html(self):
@@ -181,6 +226,140 @@
         data = json.loads(response.content)
         self.assertNotEqual(data["error"], "ok")
 
+    def test_custom_ok(self):
+        """Test successful return from ReST API xhr_customrecipe"""
+        url = reverse('xhr_customrecipe')
+        params = {'name': 'custom', 'project': self.project.id,
+                  'base': self.recipe1.id}
+        response = self.client.post(url, params)
+        self.assertEqual(response.status_code, 200)
+        data = json.loads(response.content)
+        self.assertEqual(data['error'], 'ok')
+        self.assertTrue('url' in data)
+        # get recipe from the database
+        recipe = CustomImageRecipe.objects.get(project=self.project,
+                                               name=params['name'])
+        args = (self.project.id, recipe.id,)
+        self.assertEqual(reverse('customrecipe', args=args), data['url'])
+
+    def test_custom_incomplete_params(self):
+        """Test not passing all required parameters to xhr_customrecipe"""
+        url = reverse('xhr_customrecipe')
+        for params in [{}, {'name': 'custom'},
+                       {'name': 'custom', 'project': self.project.id}]:
+            response = self.client.post(url, params)
+            self.assertEqual(response.status_code, 200)
+            data = json.loads(response.content)
+            self.assertNotEqual(data["error"], "ok")
+
+    def test_xhr_custom_wrong_project(self):
+        """Test passing wrong project id to xhr_customrecipe"""
+        url = reverse('xhr_customrecipe')
+        params = {'name': 'custom', 'project': 0, "base": self.recipe1.id}
+        response = self.client.post(url, params)
+        self.assertEqual(response.status_code, 200)
+        data = json.loads(response.content)
+        self.assertNotEqual(data["error"], "ok")
+
+    def test_xhr_custom_wrong_base(self):
+        """Test passing wrong base recipe id to xhr_customrecipe"""
+        url = reverse('xhr_customrecipe')
+        params = {'name': 'custom', 'project': self.project.id, "base": 0}
+        response = self.client.post(url, params)
+        self.assertEqual(response.status_code, 200)
+        data = json.loads(response.content)
+        self.assertNotEqual(data["error"], "ok")
+
+    def test_xhr_custom_details(self):
+        """Test getting custom recipe details"""
+        name = "custom recipe"
+        url = reverse('xhr_customrecipe_id', args=(self.customr.id,))
+        response = self.client.get(url)
+        self.assertEqual(response.status_code, 200)
+        expected = {"error": "ok",
+                    "info": {'id': self.customr.id,
+                             'name': name,
+                             'base_recipe_id': self.recipe1.id,
+                             'project_id': self.project.id,
+                            }
+                   }
+        self.assertEqual(json.loads(response.content), expected)
+
+    def test_xhr_custom_del(self):
+        """Test deleting custom recipe"""
+        name = "to be deleted"
+        recipe = CustomImageRecipe.objects.create(\
+                     name=name, project=self.project,
+                     base_recipe=self.recipe1)
+        url = reverse('xhr_customrecipe_id', args=(recipe.id,))
+        response = self.client.delete(url)
+        self.assertEqual(response.status_code, 200)
+        self.assertEqual(json.loads(response.content), {"error": "ok"})
+        # try to delete not-existent recipe
+        url = reverse('xhr_customrecipe_id', args=(recipe.id,))
+        response = self.client.delete(url)
+        self.assertEqual(response.status_code, 200)
+        self.assertNotEqual(json.loads(response.content)["error"], "ok")
+
+    def test_xhr_custom_packages(self):
+        """Test adding and deleting package to a custom recipe"""
+        url = reverse('xhr_customrecipe_packages',
+                      args=(self.customr.id, self.package.id))
+        # add self.package1 to recipe
+        response = self.client.put(url)
+        self.assertEqual(response.status_code, 200)
+        self.assertEqual(json.loads(response.content), {"error": "ok"})
+        self.assertEqual(self.customr.packages.all()[0].id, self.package.id)
+        # delete it
+        response = self.client.delete(url)
+        self.assertEqual(response.status_code, 200)
+        self.assertEqual(json.loads(response.content), {"error": "ok"})
+        self.assertFalse(self.customr.packages.all())
+        # delete it again to test error condition
+        response = self.client.delete(url)
+        self.assertEqual(response.status_code, 200)
+        self.assertNotEqual(json.loads(response.content)["error"], "ok")
+
+    def test_xhr_custom_packages_err(self):
+        """Test error conditions of xhr_customrecipe_packages"""
+        # test calls with wrong recipe id and wrong package id
+        for args in [(0, self.package.id), (self.customr.id, 0)]:
+            url = reverse('xhr_customrecipe_packages', args=args)
+            # test put and delete methods
+            for method in (self.client.put, self.client.delete):
+                response = method(url)
+                self.assertEqual(response.status_code, 200)
+                self.assertNotEqual(json.loads(response.content),
+                                    {"error": "ok"})
+
+    def test_software_recipes_table(self):
+        """Test structure returned for Software RecipesTable"""
+        table = SoftwareRecipesTable()
+        request = RequestFactory().get('/foo/', {'format': 'json'})
+        response = table.get(request, pid=self.project.id)
+        data = json.loads(response.content)
+
+        rows = data['rows']
+        row1 = next(x for x in rows if x['name'] == self.recipe1.name)
+        row2 = next(x for x in rows if x['name'] == self.recipe2.name)
+
+        self.assertEqual(response.status_code, 200, 'should be 200 OK status')
+        self.assertEqual(len(rows), 2, 'should be 2 recipes')
+
+        # check other columns have been populated correctly
+        self.assertEqual(row1['name'], self.recipe1.name)
+        self.assertEqual(row1['version'], self.recipe1.version)
+        self.assertEqual(row1['get_description_or_summary'],
+                         self.recipe1.description)
+        self.assertEqual(row1['layer_version__layer__name'],
+                         self.recipe1.layer_version.layer.name)
+        self.assertEqual(row2['name'], self.recipe2.name)
+        self.assertEqual(row2['version'], self.recipe2.version)
+        self.assertEqual(row2['get_description_or_summary'],
+                         self.recipe2.description)
+        self.assertEqual(row2['layer_version__layer__name'],
+                         self.recipe2.layer_version.layer.name)
+
 class LandingPageTests(TestCase):
     """ Tests for redirects on the landing page """
     # disable bogus pylint message error:
@@ -255,18 +434,48 @@
         self.assertTrue('/builds' in response.url,
                         'should redirect to builds')
 
-class ProjectsPageTests(TestCase):
-    """ Tests for projects page """
+class AllProjectsPageTests(TestCase):
+    """ Tests for projects page /projects/ """
 
-    PROJECT_NAME = 'cli builds'
+    MACHINE_NAME = 'delorean'
 
     def setUp(self):
         """ Add default project manually """
-        project = Project.objects.create_project(self.PROJECT_NAME, None)
+        project = Project.objects.create_project(CLI_BUILDS_PROJECT_NAME, None)
         self.default_project = project
         self.default_project.is_default = True
         self.default_project.save()
 
+        # this project is only set for some of the tests
+        self.project = None
+
+        self.release = None
+
+    def _add_build_to_default_project(self):
+        """ Add a build to the default project (not used in all tests) """
+        now = timezone.now()
+        build = Build.objects.create(project=self.default_project,
+                                     started_on=now,
+                                     completed_on=now)
+        build.save()
+
+    def _add_non_default_project(self):
+        """ Add another project """
+        bbv = BitbakeVersion.objects.create(name="test bbv", giturl="/tmp/",
+                                            branch="master", dirpath="")
+        self.release = Release.objects.create(name="test release",
+                                              branch_name="master",
+                                              bitbake_version=bbv)
+        self.project = Project.objects.create_project(PROJECT_NAME, self.release)
+        self.project.is_default = False
+        self.project.save()
+
+        # fake the MACHINE variable
+        project_var = ProjectVariable.objects.create(project=self.project,
+                                                     name='MACHINE',
+                                                     value=self.MACHINE_NAME)
+        project_var.save()
+
     def test_default_project_hidden(self):
         """ The default project should be hidden if it has no builds """
         params = {"count": 10, "orderby": "updated:-", "page": 1}
@@ -274,26 +483,116 @@
 
         self.assertTrue(not('tr class="data"' in response.content),
                         'should be no project rows in the page')
-        self.assertTrue(not(self.PROJECT_NAME in response.content),
+        self.assertTrue(not(CLI_BUILDS_PROJECT_NAME in response.content),
                         'default project "cli builds" should not be in page')
 
     def test_default_project_has_build(self):
         """ The default project should be shown if it has builds """
-        now = timezone.now()
-        build = Build.objects.create(project=self.default_project,
-                                     started_on=now,
-                                     completed_on=now)
-        build.save()
+        self._add_build_to_default_project()
 
         params = {"count": 10, "orderby": "updated:-", "page": 1}
         response = self.client.get(reverse('all-projects'), params)
 
         self.assertTrue('tr class="data"' in response.content,
                         'should be a project row in the page')
-        self.assertTrue(self.PROJECT_NAME in response.content,
+        self.assertTrue(CLI_BUILDS_PROJECT_NAME in response.content,
                         'default project "cli builds" should be in page')
 
-class ProjectBuildsDisplayTest(TestCase):
+    def test_default_project_release(self):
+        """
+        The release for the default project should display as
+        'Not applicable'
+        """
+        # need a build, otherwise project doesn't display at all
+        self._add_build_to_default_project()
+
+        # another project to test, which should show release
+        self._add_non_default_project()
+
+        response = self.client.get(reverse('all-projects'), follow=True)
+        soup = BeautifulSoup(response.content)
+
+        # check the release cell for the default project
+        attrs = {'data-project': str(self.default_project.id)}
+        rows = soup.find_all('tr', attrs=attrs)
+        self.assertEqual(len(rows), 1, 'should be one row for default project')
+        cells = rows[0].find_all('td', attrs={'data-project-field': 'release'})
+        self.assertEqual(len(cells), 1, 'should be one release cell')
+        text = cells[0].select('span.muted')[0].text
+        self.assertEqual(text, 'Not applicable',
+                         'release should be not applicable for default project')
+
+        # check the link in the release cell for the other project
+        attrs = {'data-project': str(self.project.id)}
+        rows = soup.find_all('tr', attrs=attrs)
+        cells = rows[0].find_all('td', attrs={'data-project-field': 'release'})
+        text = cells[0].select('a')[0].text
+        self.assertEqual(text, self.release.name,
+                         'release name should be shown for non-default project')
+
+    def test_default_project_machine(self):
+        """
+        The machine for the default project should display as
+        'Not applicable'
+        """
+        # need a build, otherwise project doesn't display at all
+        self._add_build_to_default_project()
+
+        # another project to test, which should show machine
+        self._add_non_default_project()
+
+        response = self.client.get(reverse('all-projects'), follow=True)
+        soup = BeautifulSoup(response.content)
+
+        # check the machine cell for the default project
+        attrs = {'data-project': str(self.default_project.id)}
+        rows = soup.find_all('tr', attrs=attrs)
+        self.assertEqual(len(rows), 1, 'should be one row for default project')
+        cells = rows[0].find_all('td', attrs={'data-project-field': 'machine'})
+        self.assertEqual(len(cells), 1, 'should be one machine cell')
+        text = cells[0].select('span.muted')[0].text
+        self.assertEqual(text, 'Not applicable',
+                         'machine should be not applicable for default project')
+
+        # check the link in the machine cell for the other project
+        attrs = {'data-project': str(self.project.id)}
+        rows = soup.find_all('tr', attrs=attrs)
+        cells = rows[0].find_all('td', attrs={'data-project-field': 'machine'})
+        text = cells[0].select('a')[0].text
+        self.assertEqual(text, self.MACHINE_NAME,
+                         'machine name should be shown for non-default project')
+
+    def test_project_page_links(self):
+        """
+        Test that links for the default project point to the builds
+        page /projects/X/builds for that project, and that links for
+        other projects point to their configuration pages /projects/X/
+        """
+
+        # need a build, otherwise project doesn't display at all
+        self._add_build_to_default_project()
+
+        # another project to test, which should show machine
+        self._add_non_default_project()
+
+        response = self.client.get(reverse('all-projects'), follow=True)
+        soup = BeautifulSoup(response.content)
+
+        # link for default project
+        row = soup.find('tr', attrs={'data-project': self.default_project.id})
+        cell = row.find('td', attrs={'data-project-field': 'name'})
+        expected_url = reverse('projectbuilds', args=(self.default_project.id,))
+        self.assertEqual(cell.find('a')['href'], expected_url,
+                         'link on default project name should point to builds')
+
+        # link for other project
+        row = soup.find('tr', attrs={'data-project': self.project.id})
+        cell = row.find('td', attrs={'data-project-field': 'name'})
+        expected_url = reverse('project', args=(self.project.id,))
+        self.assertEqual(cell.find('a')['href'], expected_url,
+                         'link on project name should point to configuration')
+
+class ProjectBuildsPageTests(TestCase):
     """ Test data at /project/X/builds is displayed correctly """
 
     def setUp(self):
@@ -303,8 +602,18 @@
                                          bitbake_version=bbv)
         self.project1 = Project.objects.create_project(name=PROJECT_NAME,
                                                        release=release)
+        self.project1.save()
+
         self.project2 = Project.objects.create_project(name=PROJECT_NAME,
                                                        release=release)
+        self.project2.save()
+
+        self.default_project = Project.objects.create_project(
+            name=CLI_BUILDS_PROJECT_NAME,
+            release=release
+        )
+        self.default_project.is_default = True
+        self.default_project.save()
 
         # parameters for builds to associate with the projects
         now = timezone.now()
@@ -338,6 +647,7 @@
         }
 
     def _get_rows_for_project(self, project_id):
+        """ Helper to retrieve HTML rows for a project """
         url = reverse("projectbuilds", args=(project_id,))
         response = self.client.get(url, follow=True)
         soup = BeautifulSoup(response.content)
@@ -345,35 +655,273 @@
 
     def test_show_builds_for_project(self):
         """ Builds for a project should be displayed """
-        build1a = Build.objects.create(**self.project1_build_success)
-        build1b = Build.objects.create(**self.project1_build_success)
+        Build.objects.create(**self.project1_build_success)
+        Build.objects.create(**self.project1_build_success)
         build_rows = self._get_rows_for_project(self.project1.id)
         self.assertEqual(len(build_rows), 2)
 
-    def test_show_builds_for_project_only(self):
+    def test_show_builds_project_only(self):
         """ Builds for other projects should be excluded """
-        build1a = Build.objects.create(**self.project1_build_success)
-        build1b = Build.objects.create(**self.project1_build_success)
-        build1c = Build.objects.create(**self.project1_build_success)
+        Build.objects.create(**self.project1_build_success)
+        Build.objects.create(**self.project1_build_success)
+        Build.objects.create(**self.project1_build_success)
 
         # shouldn't see these two
-        build2a = Build.objects.create(**self.project2_build_success)
-        build2b = Build.objects.create(**self.project2_build_in_progress)
+        Build.objects.create(**self.project2_build_success)
+        Build.objects.create(**self.project2_build_in_progress)
 
         build_rows = self._get_rows_for_project(self.project1.id)
         self.assertEqual(len(build_rows), 3)
 
-    def test_show_builds_exclude_in_progress(self):
+    def test_builds_exclude_in_progress(self):
         """ "in progress" builds should not be shown """
-        build1a = Build.objects.create(**self.project1_build_success)
-        build1b = Build.objects.create(**self.project1_build_success)
+        Build.objects.create(**self.project1_build_success)
+        Build.objects.create(**self.project1_build_success)
 
         # shouldn't see this one
-        build1c = Build.objects.create(**self.project1_build_in_progress)
+        Build.objects.create(**self.project1_build_in_progress)
 
         # shouldn't see these two either, as they belong to a different project
-        build2a = Build.objects.create(**self.project2_build_success)
-        build2b = Build.objects.create(**self.project2_build_in_progress)
+        Build.objects.create(**self.project2_build_success)
+        Build.objects.create(**self.project2_build_in_progress)
 
         build_rows = self._get_rows_for_project(self.project1.id)
-        self.assertEqual(len(build_rows), 2)
\ No newline at end of file
+        self.assertEqual(len(build_rows), 2)
+
+    def test_tasks_in_projectbuilds(self):
+        """ Task should be shown as suffix on build name """
+        build = Build.objects.create(**self.project1_build_success)
+        Target.objects.create(build=build, target='bash', task='clean')
+        url = reverse("projectbuilds", args=(self.project1.id,))
+        response = self.client.get(url, follow=True)
+        result = re.findall('^ +bash:clean$', response.content, re.MULTILINE)
+        self.assertEqual(len(result), 2)
+
+    def test_cli_builds_hides_tabs(self):
+        """
+        Display for command line builds should hide tabs;
+        note that the latest builds section is already tested in
+        AllBuildsPageTests, as the template is the same
+        """
+        url = reverse("projectbuilds", args=(self.default_project.id,))
+        response = self.client.get(url, follow=True)
+        soup = BeautifulSoup(response.content)
+        tabs = soup.select('#project-topbar')
+        self.assertEqual(len(tabs), 0,
+                         'should be no top bar shown for command line builds')
+
+    def test_non_cli_builds_has_tabs(self):
+        """
+        Non-command-line builds projects should show the tabs
+        """
+        url = reverse("projectbuilds", args=(self.project1.id,))
+        response = self.client.get(url, follow=True)
+        soup = BeautifulSoup(response.content)
+        tabs = soup.select('#project-topbar')
+        self.assertEqual(len(tabs), 1,
+                         'should be a top bar shown for non-command-line builds')
+
+class AllBuildsPageTests(TestCase):
+    """ Tests for all builds page /builds/ """
+
+    def setUp(self):
+        bbv = BitbakeVersion.objects.create(name="bbv1", giturl="/tmp/",
+                                            branch="master", dirpath="")
+        release = Release.objects.create(name="release1",
+                                         bitbake_version=bbv)
+        self.project1 = Project.objects.create_project(name=PROJECT_NAME,
+                                                       release=release)
+        self.default_project = Project.objects.create_project(
+            name=CLI_BUILDS_PROJECT_NAME,
+            release=release
+        )
+        self.default_project.is_default = True
+        self.default_project.save()
+
+        # parameters for builds to associate with the projects
+        now = timezone.now()
+
+        self.project1_build_success = {
+            "project": self.project1,
+            "started_on": now,
+            "completed_on": now,
+            "outcome": Build.SUCCEEDED
+        }
+
+        self.default_project_build_success = {
+            "project": self.default_project,
+            "started_on": now,
+            "completed_on": now,
+            "outcome": Build.SUCCEEDED
+        }
+
+    def test_show_tasks_in_allbuilds(self):
+        """ Task should be shown as suffix on build name """
+        build = Build.objects.create(**self.project1_build_success)
+        Target.objects.create(build=build, target='bash', task='clean')
+        url = reverse('all-builds')
+        response = self.client.get(url, follow=True)
+        result = re.findall('bash:clean', response.content, re.MULTILINE)
+        self.assertEqual(len(result), 3)
+
+    def test_no_run_again_for_cli_build(self):
+        """ "Run again" button should not be shown for command-line builds """
+        build = Build.objects.create(**self.default_project_build_success)
+        url = reverse('all-builds')
+        response = self.client.get(url, follow=True)
+        soup = BeautifulSoup(response.content)
+
+        attrs = {'data-latest-build-result': build.id}
+        result = soup.find('div', attrs=attrs)
+
+        # shouldn't see a run again button for command-line builds
+        run_again_button = result.select('button')
+        self.assertEqual(len(run_again_button), 0)
+
+        # should see a help icon for command-line builds
+        help_icon = result.select('i.get-help-green')
+        self.assertEqual(len(help_icon), 1)
+
+    def test_tooltips_on_project_name(self):
+        """
+        A tooltip should be present next to the command line
+        builds project name in the all builds page, but not for
+        other projects
+        """
+        build1 = Build.objects.create(**self.project1_build_success)
+        default_build = Build.objects.create(**self.default_project_build_success)
+
+        url = reverse('all-builds')
+        response = self.client.get(url, follow=True)
+        soup = BeautifulSoup(response.content)
+
+        # no help icon on non-default project name
+        result = soup.find('tr', attrs={'data-table-build-result': build1.id})
+        name = result.select('td.project-name')[0]
+        icons = name.select('i.get-help')
+        self.assertEqual(len(icons), 0,
+                         'should not be a help icon for non-cli builds name')
+
+        # help icon on default project name
+        result = soup.find('tr', attrs={'data-table-build-result': default_build.id})
+        name = result.select('td.project-name')[0]
+        icons = name.select('i.get-help')
+        self.assertEqual(len(icons), 1,
+                         'should be a help icon for cli builds name')
+
+class ProjectPageTests(TestCase):
+    """ Test project data at /project/X/ is displayed correctly """
+    CLI_BUILDS_PROJECT_NAME = 'Command line builds'
+
+    def test_command_line_builds_in_progress(self):
+        """
+        In progress builds should not cause an error to be thrown
+        when navigating to "command line builds" project page;
+        see https://bugzilla.yoctoproject.org/show_bug.cgi?id=8277
+        """
+
+        # add the "command line builds" default project; this mirrors what
+        # we do in migration 0026_set_default_project.py
+        default_project = Project.objects.create_project(self.CLI_BUILDS_PROJECT_NAME, None)
+        default_project.is_default = True
+        default_project.save()
+
+        # add an "in progress" build for the default project
+        now = timezone.now()
+        build = Build.objects.create(project=default_project,
+                                     started_on=now,
+                                     completed_on=now,
+                                     outcome=Build.IN_PROGRESS)
+
+        # navigate to the project page for the default project
+        url = reverse("project", args=(default_project.id,))
+        response = self.client.get(url, follow=True)
+
+        self.assertEqual(response.status_code, 200)
+
+class BuildDashboardTests(TestCase):
+    """ Tests for the build dashboard /build/X """
+
+    def setUp(self):
+        bbv = BitbakeVersion.objects.create(name="bbv1", giturl="/tmp/",
+                                            branch="master", dirpath="")
+        release = Release.objects.create(name="release1",
+                                         bitbake_version=bbv)
+        project = Project.objects.create_project(name=PROJECT_NAME,
+                                                 release=release)
+
+        now = timezone.now()
+
+        self.build1 = Build.objects.create(project=project,
+                                           started_on=now,
+                                           completed_on=now)
+
+        # exception
+        msg1 = 'an exception was thrown'
+        self.exception_message = LogMessage.objects.create(
+            build=self.build1,
+            level=LogMessage.EXCEPTION,
+            message=msg1
+        )
+
+        # critical
+        msg2 = 'a critical error occurred'
+        self.critical_message = LogMessage.objects.create(
+            build=self.build1,
+            level=LogMessage.CRITICAL,
+            message=msg2
+        )
+
+    def _get_build_dashboard_errors(self):
+        """
+        Get a list of HTML fragments representing the errors on the
+        build dashboard
+        """
+        url = reverse('builddashboard', args=(self.build1.id,))
+        response = self.client.get(url)
+        soup = BeautifulSoup(response.content)
+        return soup.select('#errors div.alert-error')
+
+    def _check_for_log_message(self, log_message):
+        """
+        Check whether the LogMessage instance <log_message> is
+        represented as an HTML error in the build dashboard page
+        """
+        errors = self._get_build_dashboard_errors()
+        self.assertEqual(len(errors), 2)
+
+        expected_text = log_message.message
+        expected_id = str(log_message.id)
+
+        found = False
+        for error in errors:
+            error_text = error.find('pre').text
+            text_matches = (error_text == expected_text)
+
+            error_id = error['data-error']
+            id_matches = (error_id == expected_id)
+
+            if text_matches and id_matches:
+                found = True
+                break
+
+        template_vars = (expected_text, error_text,
+                         expected_id, error_id)
+        assertion_error_msg = 'exception not found as error: ' \
+            'expected text "%s" and got "%s"; ' \
+            'expected ID %s and got %s' % template_vars
+        self.assertTrue(found, assertion_error_msg)
+
+    def test_exceptions_show_as_errors(self):
+        """
+        LogMessages with level EXCEPTION should display in the errors
+        section of the page
+        """
+        self._check_for_log_message(self.exception_message)
+
+    def test_criticals_show_as_errors(self):
+        """
+        LogMessages with level CRITICAL should display in the errors
+        section of the page
+        """
+        self._check_for_log_message(self.critical_message)
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/typeaheads.py b/yocto-poky/bitbake/lib/toaster/toastergui/typeaheads.py
index d5bec58..dd4b7f5 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/typeaheads.py
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/typeaheads.py
@@ -27,7 +27,7 @@
       super(LayersTypeAhead, self).__init__()
 
     def apply_search(self, search_term, prj, request):
-        layers = prj.compatible_layerversions()
+        layers = prj.get_all_compatible_layer_versions()
         layers = layers.order_by('layer__name')
 
         # Unlike the other typeaheads we also don't want to show suggestions
@@ -35,7 +35,8 @@
         # layerdeps to a new layer.
         if ("include_added" in request.GET and
                 request.GET['include_added'] != "true"):
-            layers = layers.exclude(pk__in=prj.projectlayer_equivalent_set)
+            layers = layers.exclude(
+                pk__in=prj.get_project_layer_versions(pk=True))
 
         primary_results = layers.filter(layer__name__istartswith=search_term)
         secondary_results = layers.filter(layer__name__icontains=search_term).exclude(pk__in=primary_results)
@@ -120,12 +121,12 @@
         return results
 
 class ProjectsTypeAhead(ToasterTypeAhead):
-    """ Typeahead for all the projects """
+    """ Typeahead for all the projects, except for command line builds """
     def __init__(self):
         super(ProjectsTypeAhead, self).__init__()
 
     def apply_search(self, search_term, prj, request):
-        projects = Project.objects.all().order_by("name")
+        projects = Project.objects.exclude(is_default=True).order_by("name")
 
         primary_results = projects.filter(name__istartswith=search_term)
         secondary_results = projects.filter(name__icontains=search_term).exclude(pk__in=primary_results)
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/urls.py b/yocto-poky/bitbake/lib/toaster/toastergui/urls.py
index 46e5761..a1adbb7 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/urls.py
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/urls.py
@@ -87,28 +87,29 @@
         # the table pages that have been converted to ToasterTable widget
         url(r'^project/(?P<pid>\d+)/machines/$',
             tables.MachinesTable.as_view(template_name="generic-toastertable-page.html"),
-            { 'table_name': tables.MachinesTable.__name__.lower(),
-              'title' : 'Compatible machines' },
             name="projectmachines"),
 
-        url(r'^project/(?P<pid>\d+)/recipes/$',
-            tables.RecipesTable.as_view(template_name="generic-toastertable-page.html"),
-            { 'table_name': tables.RecipesTable.__name__.lower(),
-              'title' : 'Compatible recipes' },
-            name="projecttargets"),
+        url(r'^project/(?P<pid>\d+)/softwarerecipes/$',
+            tables.SoftwareRecipesTable.as_view(template_name="generic-toastertable-page.html"),
+            name="projectsoftwarerecipes"),
 
-        url(r'^project/(?P<pid>\d+)/availablerecipes/$',
-            tables.ProjectLayersRecipesTable.as_view(template_name="generic-toastertable-page.html"),
-            { 'table_name': tables.ProjectLayersRecipesTable.__name__.lower(),
-              'title' : 'Recipes available for layers in the current project' },
-            name="projectavailabletargets"),
+        url(r'^project/(?P<pid>\d+)/images/$',
+            tables.ImageRecipesTable.as_view(template_name="generic-toastertable-page.html"), name="projectimagerecipes"),
+
+        url(r'^project/(?P<pid>\d+)/customimages/$',
+            tables.CustomImagesTable.as_view(template_name="generic-toastertable-page.html"), name="projectcustomimages"),
+
+        url(r'^project/(?P<pid>\d+)/newcustomimage/$',
+            tables.NewCustomImagesTable.as_view(template_name="newcustomimage.html"),
+            name="newcustomimage"),
+
 
         url(r'^project/(?P<pid>\d+)/layers/$',
             tables.LayersTable.as_view(template_name="generic-toastertable-page.html"),
-            { 'table_name': tables.LayersTable.__name__.lower(),
-              'title' : 'Compatible layers' },
             name="projectlayers"),
 
+
+
         url(r'^project/(?P<pid>\d+)/layer/(?P<layerid>\d+)$',
             'layerdetails', name='layerdetails'),
 
@@ -125,6 +126,16 @@
             name=tables.LayerMachinesTable.__name__.lower()),
 
 
+        url(r'^project/(?P<pid>\d+)/customrecipe/(?P<recipeid>\d+)/selectpackages/$',
+            tables.SelectPackagesTable.as_view(template_name="generic-toastertable-page.html"), name="recipeselectpackages"),
+
+
+        url(r'^project/(?P<pid>\d+)/customrecipe/(?P<recipe_id>\d+)$',
+            'customrecipe',
+            name="customrecipe"),
+
+
+
         # typeahead api end points
         url(r'^xhr_typeahead/(?P<pid>\d+)/layers$',
             typeaheads.LayersTypeAhead.as_view(), name='xhr_layerstypeahead'),
@@ -148,6 +159,14 @@
         # JS Unit tests
         url(r'^js-unit-tests/$', 'jsunittests', name='js-unit-tests'),
 
-        # default redirection
+        # image customisation functionality
+        url(r'^xhr_customrecipe/(?P<recipe_id>\d+)/packages/(?P<package_id>\d+|)$',
+            'xhr_customrecipe_packages', name='xhr_customrecipe_packages'),
+        url(r'^xhr_customrecipe/(?P<recipe_id>\d+)$', 'xhr_customrecipe_id',
+            name='xhr_customrecipe_id'),
+        url(r'^xhr_customrecipe/', 'xhr_customrecipe',
+            name='xhr_customrecipe'),
+
+          # default redirection
         url(r'^$', RedirectView.as_view( url= 'landing')),
 )
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/views.py b/yocto-poky/bitbake/lib/toaster/toastergui/views.py
index 8689a12..0e255f1 100755
--- a/yocto-poky/bitbake/lib/toaster/toastergui/views.py
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/views.py
@@ -26,12 +26,12 @@
 import operator,re
 
 from django.db.models import F, Q, Sum, Count, Max
-from django.db import IntegrityError
+from django.db import IntegrityError, Error
 from django.shortcuts import render, redirect
 from orm.models import Build, Target, Task, Layer, Layer_Version, Recipe, LogMessage, Variable
 from orm.models import Task_Dependency, Recipe_Dependency, Package, Package_File, Package_Dependency
 from orm.models import Target_Installed_Package, Target_File, Target_Image_File, BuildArtifact
-from orm.models import BitbakeVersion
+from orm.models import BitbakeVersion, CustomImageRecipe
 from bldcontrol import bbcontroller
 from django.views.decorators.cache import cache_control
 from django.core.urlresolvers import reverse, resolve
@@ -45,32 +45,50 @@
 from toastergui.templatetags.projecttags import json as jsonfilter
 import json
 from os.path import dirname
+from functools import wraps
 import itertools
+import mimetypes
 
-import magic
 import logging
 
 logger = logging.getLogger("toaster")
 
 class MimeTypeFinder(object):
-    _magic = magic.Magic(flags = magic.MAGIC_MIME_TYPE)
+    # setting this to False enables additional non-standard mimetypes
+    # to be included in the guess
+    _strict = False
 
-    # returns the mimetype for a file path
+    # returns the mimetype for a file path as a string,
+    # or 'application/octet-stream' if the type couldn't be guessed
     @classmethod
     def get_mimetype(self, path):
-        return self._magic.id_filename(path)
+        guess = mimetypes.guess_type(path, self._strict)
+        guessed_type = guess[0]
+        if guessed_type == None:
+            guessed_type = 'application/octet-stream'
+        return guessed_type
 
 # all new sessions should come through the landing page;
 # determine in which mode we are running in, and redirect appropriately
 def landing(request):
+    # in build mode, we redirect to the command-line builds page
+    # if there are any builds for the default (cli builds) project
+    default_project = Project.objects.get_default_project()
+    default_project_builds = Build.objects.filter(project = default_project)
+
+    if (not toastermain.settings.BUILD_MODE) and default_project_builds.count() > 0:
+        args = (default_project.id,)
+        return redirect(reverse('projectbuilds', args = args), permanent = False)
+
     # we only redirect to projects page if there is a user-generated project
+    num_builds = Build.objects.all().count()
     user_projects = Project.objects.filter(is_default = False)
     has_user_project = user_projects.count() > 0
 
-    if Build.objects.count() == 0 and has_user_project:
+    if num_builds == 0 and has_user_project:
         return redirect(reverse('all-projects'), permanent = False)
 
-    if Build.objects.all().count() > 0:
+    if num_builds > 0:
         return redirect(reverse('all-builds'), permanent = False)
 
     context = {'lvs_nos' : Layer_Version.objects.all().count()}
@@ -84,9 +102,12 @@
     if prj is not None:
         queryset = queryset.filter(project = prj)
 
+    if not toastermain.settings.BUILD_MODE:
+        queryset = queryset.exclude(project__is_default=False)
+
     return list(itertools.chain(
-        queryset.filter(outcome=Build.IN_PROGRESS).order_by("-pk"),
-        queryset.filter(outcome__lt=Build.IN_PROGRESS).order_by("-pk")[:3] ))
+        queryset.filter(outcome=Build.IN_PROGRESS).order_by("-started_on"),
+        queryset.filter(outcome__lt=Build.IN_PROGRESS).order_by("-started_on")[:3] ))
 
 
 # a JSON-able dict of recent builds; for use in the Project page, xhr_ updates,  and other places, as needed
@@ -1215,7 +1236,7 @@
     context = { 'objectname': variant,
                 'object_search_display': object_search_display,
                 'filter_search_display': filter_search_display,
-                'title': title_variant,
+                'mainheading': title_variant,
                 'build': build,
                 'objects': task_objects,
                 'default_orderby' : orderby,
@@ -1862,11 +1883,21 @@
     return redirect(builds)
     # the context processor that supplies data used across all the pages
 
-
+# a context processor which runs on every request; this provides the
+# projects and non_cli_projects (i.e. projects created by the user)
+# variables referred to in templates, which used to determine the
+# visibility of UI elements like the "New build" button
 def managedcontextprocessor(request):
+    projects = Project.objects.all()
     ret = {
-        "projects": Project.objects.all(),
+        "projects": projects,
+        "non_cli_projects": projects.exclude(is_default=True),
         "DEBUG" : toastermain.settings.DEBUG,
+
+        # True if Toaster is in build mode, False otherwise
+        "BUILD_MODE": toastermain.settings.BUILD_MODE,
+
+        "CUSTOM_IMAGE" : toastermain.settings.CUSTOM_IMAGE,
         "TOASTER_BRANCH": toastermain.settings.TOASTER_BRANCH,
         "TOASTER_REVISION" : toastermain.settings.TOASTER_REVISION,
     }
@@ -1908,6 +1939,11 @@
 
         queryset = Build.objects.all()
 
+        # if in analysis mode, exclude builds for all projects except
+        # command line builds
+        if not toastermain.settings.BUILD_MODE:
+            queryset = queryset.exclude(project__is_default=False)
+
         redirect_page = resolve(request.path_info).url_name
 
         context, pagesize, orderby = _build_list_helper(request,
@@ -1982,7 +2018,7 @@
         build_info = _build_page_range(Paginator(queryset, pagesize), request.GET.get('page', 1))
 
         # build view-specific information; this is rendered specifically in the builds page, at the top of the page (i.e. Recent builds)
-        build_mru = Build.objects.order_by("-started_on")[:3]
+        build_mru = _get_latest_builds()[:3]
 
         # calculate the exact begining of local today and yesterday, append context
         context_date,today_begin,yesterday_begin = _add_daterange_context(queryset_all, request, {'started_on','completed_on'})
@@ -2101,35 +2137,38 @@
                     },
                     {'name': 'Errors', 'clclass': 'errors_no',
                      'qhelp': "How many errors were encountered during the build (if any)",
-                     'orderfield': _get_toggle_order(request, "errors_no", True),
-                     'ordericon':_get_toggle_order_icon(request, "errors_no"),
-                     'orderkey' : 'errors_no',
-                     'filter' : {'class' : 'errors_no',
-                                 'label': 'Show:',
-                                 'options' : [
-                                             ('Builds with errors', 'errors_no__gte:1', queryset_with_search.filter(errors_no__gte=1).count()),
-                                             ('Builds without errors', 'errors_no:0', queryset_with_search.filter(errors_no=0).count()),
-                                             ]
-                                }
+                     # Comment out sorting and filter until YOCTO #8131 is fixed
+                     #'orderfield': _get_toggle_order(request, "errors_no", True),
+                     #'ordericon':_get_toggle_order_icon(request, "errors_no"),
+                     #'orderkey' : 'errors_no',
+                     #'filter' : {'class' : 'errors_no',
+                     #            'label': 'Show:',
+                     #            'options' : [
+                     #                        ('Builds with errors', 'errors_no__gte:1', queryset_with_search.filter(errors_no__gte=1).count()),
+                     #                        ('Builds without errors', 'errors_no:0', queryset_with_search.filter(errors_no=0).count()),
+                     #                        ]
+                     #           }
                     },
                     {'name': 'Warnings', 'clclass': 'warnings_no',
                      'qhelp': "How many warnings were encountered during the build (if any)",
-                     'orderfield': _get_toggle_order(request, "warnings_no", True),
-                     'ordericon':_get_toggle_order_icon(request, "warnings_no"),
-                     'orderkey' : 'warnings_no',
-                     'filter' : {'class' : 'warnings_no',
-                                 'label': 'Show:',
-                                 'options' : [
-                                             ('Builds with warnings','warnings_no__gte:1', queryset_with_search.filter(warnings_no__gte=1).count()),
-                                             ('Builds without warnings','warnings_no:0', queryset_with_search.filter(warnings_no=0).count()),
-                                             ]
-                                }
+                     # Comment out sorting and filter until YOCTO #8131 is fixed
+                     #'orderfield': _get_toggle_order(request, "warnings_no", True),
+                     #'ordericon':_get_toggle_order_icon(request, "warnings_no"),
+                     #'orderkey' : 'warnings_no',
+                     #'filter' : {'class' : 'warnings_no',
+                     #            'label': 'Show:',
+                     #            'options' : [
+                     #                        ('Builds with warnings','warnings_no__gte:1', queryset_with_search.filter(warnings_no__gte=1).count()),
+                     #                        ('Builds without warnings','warnings_no:0', queryset_with_search.filter(warnings_no=0).count()),
+                     #                        ]
+                     #           }
                     },
                     {'name': 'Time', 'clclass': 'time', 'hidden' : 1,
                      'qhelp': "How long it took the build to finish",
-                     'orderfield': _get_toggle_order(request, "timespent", True),
-                     'ordericon':_get_toggle_order_icon(request, "timespent"),
-                     'orderkey' : 'timespent',
+                     # Comment out sorting until YOCTO #8131 is fixed
+                     #'orderfield': _get_toggle_order(request, "timespent", True),
+                     #'ordericon':_get_toggle_order_icon(request, "timespent"),
+                     #'orderkey' : 'timespent',
                     },
                     {'name': 'Image files', 'clclass': 'output',
                      'qhelp': "The root file system types produced by the build. You can find them in your <code>/build/tmp/deploy/images/</code> directory",
@@ -2313,21 +2352,33 @@
 
         return context
 
+    def xhr_response(fun):
+        """
+        Decorator for REST methods.
+        calls jsonfilter on the returned dictionary and returns result
+        as HttpResponse object of content_type application/json
+        """
+        @wraps(fun)
+        def wrapper(*args, **kwds):
+            return HttpResponse(jsonfilter(fun(*args, **kwds)),
+                                content_type="application/json")
+        return wrapper
+
     def jsunittests(request):
-      """ Provides a page for the js unit tests """
-      bbv = BitbakeVersion.objects.filter(branch="master").first()
-      release = Release.objects.filter(bitbake_version=bbv).first()
+        """ Provides a page for the js unit tests """
+        bbv = BitbakeVersion.objects.filter(branch="master").first()
+        release = Release.objects.filter(bitbake_version=bbv).first()
 
-      name = "_js_unit_test_prj_"
+        name = "_js_unit_test_prj_"
 
-      # If there is an existing project by this name delete it. We don't want
-      # Lots of duplicates cluttering up the projects.
-      Project.objects.filter(name=name).delete()
+        # If there is an existing project by this name delete it. We don't want
+        # Lots of duplicates cluttering up the projects.
+        Project.objects.filter(name=name).delete()
 
-      new_project = Project.objects.create_project(name=name, release=release)
+        new_project = Project.objects.create_project(name=name, release=release)
 
-      context = { 'project' : new_project }
-      return render(request, "js-unit-tests.html", context)
+        context = { 'project' : new_project }
+        return render(request, "js-unit-tests.html", context)
 
     from django.views.decorators.csrf import csrf_exempt
     @csrf_exempt
@@ -2582,7 +2633,155 @@
 
         return HttpResponse(jsonfilter({"error": "ok",}), content_type = "application/json")
 
+    @xhr_response
+    def xhr_customrecipe(request):
+        """
+        Custom image recipe REST API
 
+        Entry point: /xhr_customrecipe/
+        Method: POST
+
+        Args:
+            name: name of custom recipe to create
+            project: target project id of orm.models.Project
+            base: base recipe id of orm.models.Recipe
+
+        Returns:
+            {"error": "ok",
+             "url": <url of the created recipe>}
+            or
+            {"error": <error message>}
+        """
+        # check if request has all required parameters
+        for param in ('name', 'project', 'base'):
+            if param not in request.POST:
+                return {"error": "Missing parameter '%s'" % param}
+
+        # get project and baserecipe objects
+        params = {}
+        for name, model in [("project", Project),
+                            ("base", Recipe)]:
+            value = request.POST[name]
+            try:
+                params[name] = model.objects.get(id=value)
+            except model.DoesNotExist:
+                return {"error": "Invalid %s id %s" % (name, value)}
+
+        # create custom recipe
+        try:
+            recipe = CustomImageRecipe.objects.create(
+                         name=request.POST["name"],
+                         base_recipe=params["base"],
+                         project=params["project"])
+        except Error as err:
+            return {"error": "Can't create custom recipe: %s" % err}
+
+        # Find the package list from the last build of this recipe/target
+        build = Build.objects.filter(target__target=params['base'].name,
+                    project=params['project']).last()
+
+        if build:
+            # Copy in every package
+            # We don't want these packages to be linked to anything because
+            # that underlying data may change e.g. delete a build
+            for package in build.package_set.all():
+                # Create the duplicate
+                package.pk = None
+                package.save()
+                # Disassociate the package from the build
+                package.build = None
+                package.save()
+                recipe.packages.add(package)
+        else:
+            logger.warn("No packages found for this base recipe")
+
+        return {"error": "ok",
+                "url": reverse('customrecipe', args=(params['project'].pk,
+                                                     recipe.id))}
+
+    @xhr_response
+    def xhr_customrecipe_id(request, recipe_id):
+        """
+        Set of ReST API processors working with recipe id.
+
+        Entry point: /xhr_customrecipe/<recipe_id>
+
+        Methods:
+            GET - Get details of custom image recipe
+            DELETE - Delete custom image recipe
+
+        Returns:
+            GET:
+            {"error": "ok",
+             "info": dictionary of field name -> value pairs
+                     of the CustomImageRecipe model}
+            DELETE:
+            {"error": "ok"}
+            or
+            {"error": <error message>}
+        """
+        objects = CustomImageRecipe.objects.filter(id=recipe_id)
+        if not objects:
+            return {"error": "Custom recipe with id=%s "
+                             "not found" % recipe_id}
+        if request.method == 'GET':
+            values = CustomImageRecipe.objects.filter(id=recipe_id).values()
+            if values:
+                return {"error": "ok", "info": values[0]}
+            else:
+                return {"error": "Custom recipe with id=%s "
+                                 "not found" % recipe_id}
+            return {"error": "ok", "info": objects.values()[0]}
+        elif request.method == 'DELETE':
+            objects.delete()
+            return {"error": "ok"}
+        else:
+            return {"error": "Method %s is not supported" % request.method}
+
+    @xhr_response
+    def xhr_customrecipe_packages(request, recipe_id, package_id):
+        """
+        ReST API to add/remove packages to/from custom recipe.
+
+        Entry point: /xhr_customrecipe/<recipe_id>/packages/
+
+        Methods:
+            PUT - Add package to the recipe
+            DELETE - Delete package from the recipe
+
+        Returns:
+            {"error": "ok"}
+            or
+            {"error": <error message>}
+        """
+        try:
+            recipe = CustomImageRecipe.objects.get(id=recipe_id)
+        except CustomImageRecipe.DoesNotExist:
+            return {"error": "Custom recipe with id=%s "
+                             "not found" % recipe_id}
+
+        if request.method == 'GET' and not package_id:
+            return {"error": "ok",
+                    "packages": list(recipe.packages.values_list('id'))}
+
+        try:
+            package = Package.objects.get(id=package_id)
+        except Package.DoesNotExist:
+            return {"error": "Package with id=%s "
+                             "not found" % package_id}
+
+        if request.method == 'PUT':
+            recipe.packages.add(package)
+            return {"error": "ok"}
+        elif request.method == 'DELETE':
+            if package in recipe.packages.all():
+                recipe.packages.remove(package)
+                return {"error": "ok"}
+            else:
+                return {"error": "Package '%s' is not in the recipe '%s'" % \
+                                 (package.name, recipe.name)}
+        else:
+            return {"error": "Method %s is not supported" % request.method}
 
     def importlayer(request, pid):
         template = "importlayer.html"
@@ -2596,12 +2795,16 @@
         project = Project.objects.get(pk=pid)
         layer_version = Layer_Version.objects.get(pk=layerid)
 
-        context = { 'project' : project,
-                   'layerversion' : layer_version,
-                   'layerdeps' : { "list": [
-                     [{"id": y.id, "name": y.layer.name} for y in x.depends_on.get_equivalents_wpriority(project)][0] for x in layer_version.dependencies.all()]},
-                   'projectlayers': map(lambda prjlayer: prjlayer.layercommit.id, ProjectLayer.objects.filter(project=project))
-                  }
+        context = {'project' : project,
+            'layerversion' : layer_version,
+            'layerdeps' : {"list": [{"id": dep.id,
+                "name": dep.layer.name,
+                "layerdetailurl": reverse('layerdetails', args=(pid, dep.pk)),
+                "vcs_url": dep.layer.vcs_url,
+                "vcs_reference": dep.get_vcs_reference()} \
+                for dep in layer_version.get_alldeps(project.id)]},
+            'projectlayers': map(lambda prjlayer: prjlayer.layercommit.id, ProjectLayer.objects.filter(project=project))
+        }
 
         return context
 
@@ -2628,6 +2831,15 @@
 
         return(vars_managed,sorted(vars_fstypes),vars_blacklist)
 
+    def customrecipe(request, pid, recipe_id):
+        project = Project.objects.get(pk=pid)
+        context = {'project' : project,
+                   'projectlayers': [],
+                   'recipe' : CustomImageRecipe.objects.get(pk=recipe_id)
+                  }
+
+        return render(request, "customrecipe.html", context)
+
     @_template_renderer("projectconf.html")
     def projectconf(request, pid):
 
@@ -2733,6 +2945,9 @@
         context['project'] = prj
         _set_parameters_values(pagesize, orderby, request)
 
+        # add the most recent builds for this project
+        context['mru'] = _get_latest_builds(prj)
+
         return context
 
 
@@ -2797,7 +3012,7 @@
             if file_name is None:
                 raise Exception("Could not handle artifact %s id %s" % (artifact_type, artifact_id))
             else:
-                content_type = b.buildrequest.environment.get_artifact_type(file_name)
+                content_type = MimeTypeFinder.get_mimetype(file_name)
                 fsock = b.buildrequest.environment.get_artifact(file_name)
                 file_name = os.path.basename(file_name) # we assume that the build environment system has the same path conventions as host
 
@@ -2833,6 +3048,10 @@
         queryset_all = queryset_all.filter(Q(is_default=False) |
                                            q_default_with_builds)
 
+        # if in BUILD_MODE, exclude everything but the command line builds project
+        if not toastermain.settings.BUILD_MODE:
+            queryset_all = queryset_all.exclude(is_default=False)
+
         # boilerplate code that takes a request for an object type and returns a queryset
         # for that object type. copypasta for all needed table searches
         (filter_string, search_term, ordering_string) = _search_tuple(request, Project)
diff --git a/yocto-poky/bitbake/lib/toaster/toastergui/widgets.py b/yocto-poky/bitbake/lib/toaster/toastergui/widgets.py
index eb2914d..6bb3889 100644
--- a/yocto-poky/bitbake/lib/toaster/toastergui/widgets.py
+++ b/yocto-poky/bitbake/lib/toaster/toastergui/widgets.py
@@ -20,6 +20,7 @@
 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 
 from django.views.generic import View, TemplateView
+from django.views.decorators.cache import cache_control
 from django.shortcuts import HttpResponse
 from django.http import HttpResponseBadRequest
 from django.core import serializers
@@ -38,6 +39,9 @@
 import operator
 import re
 
+import logging
+logger = logging.getLogger("toaster")
+
 from toastergui.views import objtojson
 
 class ToasterTable(TemplateView):
@@ -45,7 +49,7 @@
         super(ToasterTable, self).__init__()
         if 'template_name' in kwargs:
             self.template_name = kwargs['template_name']
-        self.title = None
+        self.title = "Table"
         self.queryset = None
         self.columns = []
         self.filters = {}
@@ -61,6 +65,18 @@
                         orderable=True,
                         field_name="id")
 
+        # prevent HTTP caching of table data
+    @cache_control(must_revalidate=True, max_age=0, no_store=True, no_cache=True)
+    def dispatch(self, *args, **kwargs):
+        return super(ToasterTable, self).dispatch(*args, **kwargs)
+
+    def get_context_data(self, **kwargs):
+        context = super(ToasterTable, self).get_context_data(**kwargs)
+        context['title'] = self.title
+        context['table_name'] =  type(self).__name__.lower()
+
+        return context
+
 
     def get(self, request, *args, **kwargs):
         if request.GET.get('format', None) == 'json':
@@ -219,7 +235,8 @@
         """Creates a query based on the model's search_allowed_fields"""
 
         if not hasattr(self.queryset.model, 'search_allowed_fields'):
-            raise Exception("Err Search fields aren't defined in the model")
+            raise Exception("Search fields aren't defined in the model %s"
+                           % self.queryset.model)
 
         search_queries = []
         for st in search_term.split(" "):
@@ -242,11 +259,14 @@
         search = request.GET.get("search", None)
         filters = request.GET.get("filter", None)
         orderby = request.GET.get("orderby", None)
+        nocache = request.GET.get("nocache", None)
 
         # Make a unique cache name
         cache_name = self.__class__.__name__
 
         for key, val in request.GET.iteritems():
+            if key == 'nocache':
+                continue
             cache_name = cache_name + str(key) + str(val)
 
         for key, val in kwargs.iteritems():
@@ -254,9 +274,14 @@
 
         # No special chars allowed in the cache name apart from dash
         cache_name = re.sub(r'[^A-Za-z0-9-]', "", cache_name)
+
+        if nocache:
+            cache.delete(cache_name)
+
         data = cache.get(cache_name)
 
         if data:
+            logger.debug("Got cache data for table '%s'" % self.title)
             return data
 
         self.setup_columns(**kwargs)
@@ -330,33 +355,6 @@
         return data
 
 
-class ToasterTemplateView(TemplateView):
-    # renders a instance in a template, or returns the context as json
-    # the class-equivalent of the _template_renderer decorator for views
-
-    def __init__(self, *args, **kwargs):
-        super(ToasterTemplateView, self).__init__(*args, **kwargs)
-        self.context_entries = []
-
-    def get(self, *args, **kwargs):
-        if self.request.GET.get('format', None) == 'json':
-            from django.core.urlresolvers import reverse
-            from django.shortcuts import HttpResponse
-            from views import objtojson
-            from toastergui.templatetags.projecttags import json as jsonfilter
-
-            context = self.get_context_data(**kwargs)
-
-            for x in context.keys():
-                if x not in self.context_entries:
-                    del context[x]
-
-            context["error"] = "ok"
-
-            return HttpResponse(jsonfilter(context,  default=objtojson ),
-                            content_type = "application/json; charset=utf-8")
-
-        return super(ToasterTemplateView, self).get(*args, **kwargs)
 
 class ToasterTypeAhead(View):
     """ A typeahead mechanism to support the front end typeahead widgets """
diff --git a/yocto-poky/bitbake/lib/toaster/toastermain/settings.py b/yocto-poky/bitbake/lib/toaster/toastermain/settings.py
index b149a5e..b28ddb2 100644
--- a/yocto-poky/bitbake/lib/toaster/toastermain/settings.py
+++ b/yocto-poky/bitbake/lib/toaster/toastermain/settings.py
@@ -23,6 +23,11 @@
 
 import os, re
 
+# Temporary toggle for Image customisation
+CUSTOM_IMAGE = False
+if os.environ.get("CUSTOM_IMAGE", None) is not None:
+    CUSTOM_IMAGE = True
+
 DEBUG = True
 TEMPLATE_DEBUG = DEBUG
 
@@ -87,11 +92,9 @@
     else:
         raise Exception("FIXME: Please implement missing database url schema for url: %s" % dburl)
 
-
+BUILD_MODE = False
 if 'TOASTER_MANAGED' in os.environ and os.environ['TOASTER_MANAGED'] == "1":
-    MANAGED = True
-else:
-    MANAGED = False
+    BUILD_MODE = True
 
 # Allows current database settings to be exported as a DATABASE_URL environment variable value
 
diff --git a/yocto-poky/bitbake/lib/toaster/toastermain/urls.py b/yocto-poky/bitbake/lib/toaster/toastermain/urls.py
index 521588a..6c4a953 100644
--- a/yocto-poky/bitbake/lib/toaster/toastermain/urls.py
+++ b/yocto-poky/bitbake/lib/toaster/toastermain/urls.py
@@ -60,7 +60,7 @@
     #logger.info("Enabled django_toolbar extension")
 
 
-if toastermain.settings.MANAGED:
+if toastermain.settings.BUILD_MODE:
     urlpatterns = [
         # Uncomment the next line to enable the admin:
         url(r'^admin/', include(admin.site.urls)),
diff --git a/yocto-poky/bitbake/toaster-requirements.txt b/yocto-poky/bitbake/toaster-requirements.txt
index c4a2221..1d7d21b 100644
--- a/yocto-poky/bitbake/toaster-requirements.txt
+++ b/yocto-poky/bitbake/toaster-requirements.txt
@@ -2,5 +2,4 @@
 South==0.8.4
 argparse==1.2.1
 wsgiref==0.1.2
-filemagic==1.6
 beautifulsoup4>=4.4.0
diff --git a/yocto-poky/documentation/Makefile b/yocto-poky/documentation/Makefile
index 9197a40..99adea2 100644
--- a/yocto-poky/documentation/Makefile
+++ b/yocto-poky/documentation/Makefile
@@ -333,7 +333,7 @@
 TARFILES = toaster-manual.html toaster-manual-style.css \
 	   figures/toaster-title.png figures/simple-configuration.png \
 	   figures/hosted-service.png
-MANUALS = $(DOC)/$(DOC).html $(DOC)/$(DOC).pdf
+MANUALS = $(DOC)/$(DOC).html
 FIGURES = figures
 STYLESHEET = $(DOC)/*.css
 endif
diff --git a/yocto-poky/documentation/adt-manual/adt-manual.xml b/yocto-poky/documentation/adt-manual/adt-manual.xml
index 6ce62c9..67b330a 100644
--- a/yocto-poky/documentation/adt-manual/adt-manual.xml
+++ b/yocto-poky/documentation/adt-manual/adt-manual.xml
@@ -87,9 +87,9 @@
                 <revremark>Released with the Yocto Project 1.8 Release.</revremark>
             </revision>
             <revision>
-                <revnumber>1.9</revnumber>
+                <revnumber>2.0</revnumber>
                 <date>October 2015</date>
-                <revremark>Released with the Yocto Project 1.9 Release.</revremark>
+                <revremark>Released with the Yocto Project 2.0 Release.</revremark>
             </revision>
        </revhistory>
 
diff --git a/yocto-poky/documentation/adt-manual/adt-package.xml b/yocto-poky/documentation/adt-manual/adt-package.xml
index f3ffa06..68eee9b 100644
--- a/yocto-poky/documentation/adt-manual/adt-package.xml
+++ b/yocto-poky/documentation/adt-manual/adt-package.xml
@@ -27,7 +27,7 @@
                 information about OPKG.</para></listitem>
             <listitem><para><emphasis>RPM:</emphasis> A more widely known PMS intended for GNU/Linux
                 distributions.
-                This PMS works with files packaged in an <filename>.rms</filename> format.
+                This PMS works with files packaged in an <filename>.rpm</filename> format.
                 The build system currently installs through this PMS by default.
                 See <ulink url='http://en.wikipedia.org/wiki/RPM_Package_Manager'></ulink>
                 for more information about RPM.</para></listitem>
diff --git a/yocto-poky/documentation/adt-manual/adt-prepare.xml b/yocto-poky/documentation/adt-manual/adt-prepare.xml
index 01f569f..65df1d0 100644
--- a/yocto-poky/documentation/adt-manual/adt-prepare.xml
+++ b/yocto-poky/documentation/adt-manual/adt-prepare.xml
@@ -364,6 +364,10 @@
                     Comments within the <filename>local.conf</filename> file
                     list the values you can use for the
                     <filename>MACHINE</filename> variable.
+                    If you do not change the <filename>MACHINE</filename>
+                    variable, the OpenEmbedded build system uses
+                    <filename>qemux86</filename> as the default target
+                    machine when building the cross-toolchain.
                     <note>
                         You can populate the Build Directory with the
                         cross-toolchains for more than a single architecture.
@@ -371,6 +375,17 @@
                         variable in the <filename>local.conf</filename> file and
                         re-run the <filename>bitbake</filename> command.
                     </note></para></listitem>
+                <listitem><para><emphasis>Make Sure Your Layers are Enabled:</emphasis>
+                    Examine the <filename>conf/bblayers.conf</filename> file
+                    and make sure that you have enabled all the compatible
+                    layers for your target machine.
+                    The OpenEmbedded build system needs to be aware of each
+                    layer you want included when building images and
+                    cross-toolchains.
+                    For information on how to enable a layer, see the
+                    "<ulink url='&YOCTO_DOCS_DEV_URL;#enabling-your-layer'>Enabling Your Layer</ulink>"
+                    section in the Yocto Project Development Manual.
+                    </para></listitem>
                 <listitem><para><emphasis>Generate the Cross-Toolchain:</emphasis>
                     Run <filename>bitbake meta-ide-support</filename> to
                     complete the cross-toolchain generation.
diff --git a/yocto-poky/documentation/bsp-guide/bsp-guide.xml b/yocto-poky/documentation/bsp-guide/bsp-guide.xml
index 5477ca8..d9bcc3f 100644
--- a/yocto-poky/documentation/bsp-guide/bsp-guide.xml
+++ b/yocto-poky/documentation/bsp-guide/bsp-guide.xml
@@ -99,9 +99,9 @@
                 <revremark>Released with the Yocto Project 1.8 Release.</revremark>
             </revision>
             <revision>
-                <revnumber>1.9</revnumber>
+                <revnumber>2.0</revnumber>
                 <date>October 2015</date>
-                <revremark>Released with the Yocto Project 1.9 Release.</revremark>
+                <revremark>Released with the Yocto Project 2.0 Release.</revremark>
             </revision>
         </revhistory>
 
@@ -113,7 +113,7 @@
     <legalnotice>
       <para>
         Permission is granted to copy, distribute and/or modify this document under
-        the terms of the <ulink type="http" url="http://creativecommons.org/licenses/by-nc-sa/2.0/uk/">Creative Commons Attribution-Non-Commercial-Share Alike 2.0 UK: England &amp; Wales</ulink> as published by Creative Commons.
+        the terms of the <ulink type="http" url="http://creativecommons.org/licenses/by-nc-sa/2.0/uk/">Creative Commons Attribution-Share Alike 2.0 UK: England &amp; Wales</ulink> as published by Creative Commons.
       </para>
       <note>
           For the latest version of this manual associated with this
diff --git a/yocto-poky/documentation/dev-manual/dev-manual-common-tasks.xml b/yocto-poky/documentation/dev-manual/dev-manual-common-tasks.xml
index e927a89..f0836e8 100644
--- a/yocto-poky/documentation/dev-manual/dev-manual-common-tasks.xml
+++ b/yocto-poky/documentation/dev-manual/dev-manual-common-tasks.xml
@@ -2249,6 +2249,19 @@
                 Typically, setting these options is accomplished by running a
                 configure script with some options, or by modifying a build
                 configuration file.
+                <note>
+                    As of Yocto Project Release 7.1, some of the core recipes
+                    that package binary configuration scripts now disable the
+                    scripts due to the scripts previously requiring error-prone
+                    path substitution.
+                    The OpenEmbedded build system uses
+                    <filename>pkg-config</filename> now, which is much more
+                    robust.
+                    You can find a list of the <filename>*-config</filename>
+                    scripts that are disabled list in the
+                    "<ulink url='&YOCTO_DOCS_REF_URL;#migration-1.7-binary-configuration-scripts-disabled'>Binary Configuration Scripts Disabled</ulink>"
+                    section in the Yocto Project Reference Manual.
+                </note>
             </para>
 
             <para>
@@ -2364,7 +2377,16 @@
             <para>
                 However, if the compile step fails, you need to diagnose the
                 failure.
-                Here are some common issues that cause failures:
+                Here are some common issues that cause failures.
+                <note>
+                    For cases where improper paths are detected for
+                    configuration files or for when libraries/headers cannot
+                    be found, be sure you are using the more robust
+                    <filename>pkg-config</filename>.
+                    See the note in section
+                    "<link linkend='new-recipe-configuring-the-recipe'>Configuring the Recipe</link>"
+                    for additional information.
+                </note>
                 <itemizedlist>
                     <listitem><para><emphasis>Parallel build failures:</emphasis>
                         These failures manifest themselves as intermittent
@@ -2708,23 +2730,20 @@
                         is configured, it might be important to mark the
                         packages produced as being specific to a particular
                         machine, or to mark them as not being specific to
-                        a particular machine or architecture at all.
-                        By default, packages produced for the target are
-                        marked as being specific to the architecture of the
-                        target machine because that is usually the desired
-                        result.
-                        However, if the recipe configures the software to be
-                        built specific to the target machine (e.g. the
+                        a particular machine or architecture at all.</para>
+                        <para>By default, packages apply to any machine with the
+                        same architecture as the target machine.
+                        When a recipe produces packages that are
+                        machine-specific (e.g. the
                         <ulink url='&YOCTO_DOCS_REF_URL;#var-MACHINE'><filename>MACHINE</filename></ulink>
                         value is passed into the configure script or a patch
-                        is applied only for a particular machine), then you
-                        should mark the packages produced as being
-                        machine-specific by adding the following to the
+                        is applied only for a particular machine), you should
+                        mark them as such by adding the following to the
                         recipe:
                         <literallayout class='monospaced'>
      PACKAGE_ARCH = "${MACHINE_ARCH}"
-                        </literallayout>
-                        On the other hand, if the recipe produces packages
+                        </literallayout></para>
+                        <para>On the other hand, if the recipe produces packages
                         that do not contain anything specific to the target
                         machine or architecture at all (e.g. recipes
                         that simply package script files or configuration
@@ -3554,7 +3573,7 @@
      require conf/multilib.conf
      MULTILIBS = "multilib:lib32"
      DEFAULTTUNE_virtclass-multilib-lib32 = "x86"
-     IMAGE_INSTALL = "lib32-connman"
+     IMAGE_INSTALL_append = " lib32-glib-2.0"
                     </literallayout>
                     This example enables an
                     additional library named <filename>lib32</filename> alongside the
@@ -3565,7 +3584,7 @@
                 </para>
 
                 <para>
-                    The example then includes <filename>lib32-connman</filename>
+                    The example then includes <filename>lib32-glib-2.0</filename>
                     in all the images, which illustrates one method of including a
                     multiple library dependency.
                     You can use a normal image build to include this dependency,
@@ -3575,7 +3594,7 @@
                     </literallayout>
                     You can also build Multilib packages specifically with a command like this:
                     <literallayout class='monospaced'>
-     $ bitbake lib32-connman
+     $ bitbake lib32-glib-2.0
                     </literallayout>
                 </para>
             </section>
@@ -4307,18 +4326,18 @@
 	            A source plugin is created as a subclass of
 	            <filename>SourcePlugin</filename>.
                 The plugin file containing it is added to
-	            <filename>scripts/lib/mic/plugins/source/</filename> to
+	            <filename>scripts/lib/wic/plugins/source/</filename> to
 	            make the plugin implementation available to the
 	            <filename>wic</filename> implementation.
                 For more information, see
-	            <filename>scripts/lib/mic/pluginbase.py</filename>.
+	            <filename>scripts/lib/wic/pluginbase.py</filename>.
             </para>
 
             <para>
 	            Source plugins can also be implemented and added by
 	            external layers.
                 As such, any plugins found in a
-	            <filename>scripts/lib/mic/plugins/source/</filename>
+	            <filename>scripts/lib/wic/plugins/source/</filename>
 	            directory in an external layer are also made
 	            available.
             </para>
@@ -4539,9 +4558,17 @@
                             option or the equivalent rootfs derived from the
 			                <filename>-e</filename> command-line
 			                option.
-                            Exactly what those contents and
-			                filesystem type end up being are dependent
-			                on the given plugin implementation.
+                            Exactly what those contents and filesystem type end
+                            up being are dependent on the given plugin
+                            implementation.
+                            </para>
+                            <para>If you do not use the
+                            <filename>--source</filename> option, the
+                            <filename>wic</filename> command creates an empty
+                            partition.
+                            Consequently, you must use the
+                            <filename>--size</filename> option to specify the
+                            size of the empty partition.
                             </para></listitem>
                         <listitem><para><emphasis><filename>--ondisk</filename> or <filename>--ondrive</filename>:</emphasis>
                             Forces the partition to be created on a particular
@@ -4585,6 +4612,49 @@
                             This option is a <filename>wic</filename>-specific
                             option that says to start a partition on an
                             x KBytes boundary.</para></listitem>
+                        <listitem><para><emphasis><filename>--no-table</filename>:</emphasis>
+                            This option is a <filename>wic</filename>-specific
+                            option.
+                            Using the option reserves space for the partition
+                            and causes it to become populated.
+                            However, the partition is not added to the
+                            partition table.
+                            </para></listitem>
+                        <listitem><para><emphasis><filename>--extra-space</filename>:</emphasis>
+                            This option is a <filename>wic</filename>-specific
+                            option that adds extra space after the space
+                            filled by the content of the partition.
+                            The final size can go beyond the size specified
+                            by the <filename>--size</filename> option.
+                            The default value is 10 Mbytes.
+                            </para></listitem>
+                        <listitem><para><emphasis><filename>--overhead-factor</filename>:</emphasis>
+                            This option is a <filename>wic</filename>-specific
+                            option that multiplies the size of the partition by
+                            the option's value.
+                            You must supply a value greater than or equal to
+                            "1".
+                            The default value is "1.3".
+                            </para></listitem>
+                        <listitem><para><emphasis><filename>--part-type</filename>:</emphasis>
+                            This option is a <filename>wic</filename>-specific
+                            option that specifies the partition type globally
+                            unique identifier (GUID) for GPT partitions.
+                            You can find the list of partition type GUIDs
+                            at
+                            <ulink url='http://en.wikipedia.org/wiki/GUID_Partition_Table#Partition_type_GUIDs'></ulink>.
+                            </para></listitem>
+                        <listitem><para><emphasis><filename>--use-uuid</filename>:</emphasis>
+                            This option is a <filename>wic</filename>-specific
+                            option that causes <filename>wic</filename> to
+                            generate a random GUID for the partition.
+                            The generated identifier is used in the bootloader
+                            configuration to specify the root partition.
+                            </para></listitem>
+                        <listitem><para><emphasis><filename>--uuid</filename>:</emphasis>
+                            This option is a <filename>wic</filename>-specific
+                            option that specifies the partition UUID.
+                            </para></listitem>
                     </itemizedlist>
                 </para>
             </section>
@@ -8166,6 +8236,29 @@
                             must accept incoming connections from 192.168.7.0/24,
                             which is the default IP range used for tap devices
                             by <filename>runqemu</filename>.</para></listitem>
+                        <listitem><para><emphasis>Be sure your host has the
+                            correct packages installed:</emphasis>
+                            Depending your host's distribution, you need
+                            to have the following packages installed:
+                            <itemizedlist>
+                                <listitem><para>Ubuntu and Debian:
+                                    <filename>sysstat</filename> and
+                                    <filename>iproute2</filename>
+                                    </para></listitem>
+                                <listitem><para>OpenSUSE:
+                                    <filename>sysstat</filename> and
+                                    <filename>iproute2</filename>
+                                    </para></listitem>
+                                <listitem><para>Fedora:
+                                    <filename>sysstat</filename> and
+                                    <filename>iproute</filename>
+                                    </para></listitem>
+                                <listitem><para>CentOS:
+                                    <filename>sysstat</filename> and
+                                    <filename>iproute</filename>
+                                    </para></listitem>
+                            </itemizedlist>
+                        </para></listitem>
                     </itemizedlist>
                 </para>
 
@@ -8563,7 +8656,7 @@
                         </literallayout></para></listitem>
                     <listitem><para><emphasis>Manually running tests:</emphasis>
                         To manually run the tests, first globally inherit the
-                        <ulink url='&YOCTO_DOCS_REF_URL;#ref-classes-testimage'><filename>testimage</filename></ulink>
+                        <ulink url='&YOCTO_DOCS_REF_URL;#ref-classes-testimage*'><filename>testimage</filename></ulink>
                         class by editing your <filename>local.conf</filename>
                         file:
                         <literallayout class='monospaced'>
diff --git a/yocto-poky/documentation/dev-manual/dev-manual-model.xml b/yocto-poky/documentation/dev-manual/dev-manual-model.xml
index 6e0ded2..6e42c7b 100644
--- a/yocto-poky/documentation/dev-manual/dev-manual-model.xml
+++ b/yocto-poky/documentation/dev-manual/dev-manual-model.xml
@@ -939,14 +939,14 @@
                                 For example, if you are using Luna, do the
                                 following:
                                 <literallayout class='monospaced'>
-     $ git checkout luna/yocto-1.8
+     $ git checkout luna/yocto-&DISTRO;
                                 </literallayout>
                                 This puts you in a detached HEAD state, which
                                 is fine since you are only going to be building
                                 and not developing.
                                 <note>
                                     If you are building kepler, checkout the
-                                    <filename>kepler/yocto-1.8</filename>
+                                    <filename>kepler/yocto-&DISTRO;</filename>
                                     branch.
                                 </note>
                                 </para></listitem>
@@ -975,13 +975,13 @@
                                 Be sure to provide the tag name, documentation
                                 branch, and a release name.
                                 Here is an example that uses the
-                                <filename>luna/yocto-1.8</filename> tag, the
+                                <filename>luna/yocto-&DISTRO;</filename> tag, the
                                 <filename>master</filename> documentation
                                 branch, and
                                 <filename>&DISTRO_NAME;</filename> for the
                                 release name:
                                 <literallayout class='monospaced'>
-     $ ECLIPSE_HOME=/home/scottrif/eclipse-poky/scripts/eclipse ./build.sh luna/yocto-1.8 master &DISTRO_NAME; 2>&amp;1 | tee -a build.log
+     $ ECLIPSE_HOME=/home/scottrif/eclipse-poky/scripts/eclipse ./build.sh luna/yocto-&DISTRO; master &DISTRO_NAME; 2>&amp;1 | tee -a build.log
                                 </literallayout>
                                 After running the script, the file
                                 <filename>org.yocto.sdk-</filename><replaceable>release</replaceable><filename>-</filename><replaceable>date</replaceable><filename>-archive.zip</filename>
@@ -1340,6 +1340,40 @@
                 "Project" menu.
                 The console should update and you can note the cross-compiler
                 you are using.
+                <note>
+                    When building "Yocto Project ADT Autotools" projects, the Eclipse
+                    IDE might display error messages for Functions/Symbols/Types
+                    that cannot be "resolved", even when the related include file
+                    is listed at the project navigator and when the project is
+                    able to build.
+                    For these cases only, it is recommended to add a new linked
+                    folder to the appropriate sysroot.
+                    Use these steps to add the linked folder:
+                    <orderedlist>
+                        <listitem><para>
+                            Select the project.
+                            </para></listitem>
+                        <listitem><para>
+                            Select "Folder" from the
+                            <filename>File > New</filename> menu.
+                            </para></listitem>
+                        <listitem><para>
+                            In the "New Folder" Dialog, select "Link to alternate
+                            location (linked folder)".
+                            </para></listitem>
+                        <listitem><para>
+                            Click "Browse" to navigate to the include folder inside
+                            the same sysroot location selected in the Yocto Project
+                            configuration preferences.
+                            </para></listitem>
+                        <listitem><para>
+                            Click "OK".
+                            </para></listitem>
+                        <listitem><para>
+                            Click "Finish" to save the linked folder.
+                            </para></listitem>
+                    </orderedlist>
+                </note>
             </para>
         </section>
 
@@ -2183,7 +2217,7 @@
                 </literallayout>
                 <note>
                     For complete syntax, use the
-                    <filename>devtool update-recipe --help</filename> command.
+                    <filename>devtool build --help</filename> command.
                 </note>
                 Building your software using <filename>devtool build</filename>
                 is identical to using BitBake to build the software.
diff --git a/yocto-poky/documentation/dev-manual/dev-manual-qemu.xml b/yocto-poky/documentation/dev-manual/dev-manual-qemu.xml
index ccc915f..903028f 100644
--- a/yocto-poky/documentation/dev-manual/dev-manual-qemu.xml
+++ b/yocto-poky/documentation/dev-manual/dev-manual-qemu.xml
@@ -197,14 +197,14 @@
                     but also is not as easy to use or comprehensive
                     as the default.
                     </para></listitem>
-                <listitem><para><filename>kvm</filename>:
+                <listitem><para id='kvm-cond'><filename>kvm</filename>:
                     Enables KVM when running "qemux86" or "qemux86-64"
                     QEMU architectures.
                     For KVM to work, all the following conditions must be met:
                     <itemizedlist>
                         <listitem><para>
                             Your <replaceable>MACHINE</replaceable> must be either
-                            "qemux86" or "qemux86-64".
+qemux86" or "qemux86-64".
                             </para></listitem>
                         <listitem><para>
                             Your build host has to have the KVM modules
@@ -212,13 +212,25 @@
                             <filename>/dev/kvm</filename>.
                             </para></listitem>
                         <listitem><para>
-                            Your build host has to have virtio net device, which
-                            are <filename>/dev/vhost-net</filename>.
-                            </para></listitem>
-                        <listitem><para>
                             The  build host <filename>/dev/kvm</filename>
                             directory has to be both writable and readable.
                             </para></listitem>
+                    </itemizedlist>
+                    </para></listitem>
+                <listitem><para><filename>kvm-vhost</filename>:
+                    Enables KVM with VHOST support when running "qemux86" or "qemux86-64"
+                    QEMU architectures.
+                    For KVM with VHOST to work, the following conditions must
+                    be met:
+                    <itemizedlist>
+                        <listitem><para>
+                            <link linkend='kvm-cond'>kvm</link> option
+                            conditions must be met.
+                            </para></listitem>
+                        <listitem><para>
+                            Your build host has to have virtio net device, which
+                            are <filename>/dev/vhost-net</filename>.
+                            </para></listitem>
                         <listitem><para>
                             The build host <filename>/dev/vhost-net</filename>
                             directory has to be either readable or writable
diff --git a/yocto-poky/documentation/dev-manual/dev-manual.xml b/yocto-poky/documentation/dev-manual/dev-manual.xml
index 608d3a9..3ddd01f 100644
--- a/yocto-poky/documentation/dev-manual/dev-manual.xml
+++ b/yocto-poky/documentation/dev-manual/dev-manual.xml
@@ -26,7 +26,7 @@
                 <affiliation>
                     <orgname>Intel Corporation</orgname>
                 </affiliation>
-                <email>scott.m.rifenbark@intel.com</email>
+                <email>srifenbark@gmail.com</email>
             </author>
         </authorgroup>
 
@@ -77,9 +77,9 @@
                 <revremark>Released with the Yocto Project 1.8 Release.</revremark>
             </revision>
             <revision>
-                <revnumber>1.9</revnumber>
+                <revnumber>2.0</revnumber>
                 <date>October 2015</date>
-                <revremark>Released with the Yocto Project 1.9 Release.</revremark>
+                <revremark>Released with the Yocto Project 2.0 Release.</revremark>
             </revision>
         </revhistory>
 
diff --git a/yocto-poky/documentation/kernel-dev/kernel-dev-common.xml b/yocto-poky/documentation/kernel-dev/kernel-dev-common.xml
index 27c82ce..ab7f80f 100644
--- a/yocto-poky/documentation/kernel-dev/kernel-dev-common.xml
+++ b/yocto-poky/documentation/kernel-dev/kernel-dev-common.xml
@@ -389,6 +389,10 @@
                 You can use the entire <filename>.config</filename> file as the
                 <filename>defconfig</filename> file as described in the
                 "<link linkend='changing-the-configuration'>Changing the Configuration</link>" section.
+                For more information on the <filename>.config</filename> file,
+                see the
+                "<ulink url='&YOCTO_DOCS_DEV_URL;#using-menuconfig'>Using <filename>menuconfig</filename></ulink>"
+                section in the Yocto Project Development Manual.
             </para>
 
             <para>
diff --git a/yocto-poky/documentation/kernel-dev/kernel-dev.xml b/yocto-poky/documentation/kernel-dev/kernel-dev.xml
index e3df2cc..38850fa 100644
--- a/yocto-poky/documentation/kernel-dev/kernel-dev.xml
+++ b/yocto-poky/documentation/kernel-dev/kernel-dev.xml
@@ -62,9 +62,9 @@
                 <revremark>Released with the Yocto Project 1.8 Release.</revremark>
             </revision>
             <revision>
-                <revnumber>1.9</revnumber>
+                <revnumber>2.0</revnumber>
                 <date>October 2015</date>
-                <revremark>Released with the Yocto Project 1.9 Release.</revremark>
+                <revremark>Released with the Yocto Project 2.0 Release.</revremark>
             </revision>
         </revhistory>
 
diff --git a/yocto-poky/documentation/mega-manual/figures/add-variable.png b/yocto-poky/documentation/mega-manual/figures/add-variable.png
new file mode 100644
index 0000000..6bdcca7
--- /dev/null
+++ b/yocto-poky/documentation/mega-manual/figures/add-variable.png
Binary files differ
diff --git a/yocto-poky/documentation/mega-manual/figures/bash-oecore.png b/yocto-poky/documentation/mega-manual/figures/bash-oecore.png
new file mode 100644
index 0000000..801a5d9
--- /dev/null
+++ b/yocto-poky/documentation/mega-manual/figures/bash-oecore.png
Binary files differ
diff --git a/yocto-poky/documentation/mega-manual/figures/set-variable.png b/yocto-poky/documentation/mega-manual/figures/set-variable.png
new file mode 100644
index 0000000..d36b527
--- /dev/null
+++ b/yocto-poky/documentation/mega-manual/figures/set-variable.png
Binary files differ
diff --git a/yocto-poky/documentation/mega-manual/figures/variable-added.png b/yocto-poky/documentation/mega-manual/figures/variable-added.png
new file mode 100644
index 0000000..518f25f
--- /dev/null
+++ b/yocto-poky/documentation/mega-manual/figures/variable-added.png
Binary files differ
diff --git a/yocto-poky/documentation/mega-manual/mega-manual.xml b/yocto-poky/documentation/mega-manual/mega-manual.xml
index a75ebf1..5c1faec 100644
--- a/yocto-poky/documentation/mega-manual/mega-manual.xml
+++ b/yocto-poky/documentation/mega-manual/mega-manual.xml
@@ -35,7 +35,7 @@
                 <affiliation>
                     <orgname>Intel Corporation</orgname>
                 </affiliation>
-                <email>scott.m.rifenbark@intel.com</email>
+                <email>srifenbark@gmail.com</email>
             </author>
         </authorgroup>
 
@@ -46,9 +46,9 @@
                 <revremark>Released with the Yocto Project 1.8 Release.</revremark>
             </revision>
             <revision>
-                <revnumber>1.9</revnumber>
+                <revnumber>2.0</revnumber>
                 <date>October 2015</date>
-                <revremark>Released with the Yocto Project 1.9 Release.</revremark>
+                <revremark>Released with the Yocto Project 2.0 Release.</revremark>
             </revision>
        </revhistory>
 
diff --git a/yocto-poky/documentation/poky.ent b/yocto-poky/documentation/poky.ent
index 07c4f6d..33d52c0 100644
--- a/yocto-poky/documentation/poky.ent
+++ b/yocto-poky/documentation/poky.ent
@@ -1,7 +1,7 @@
-<!ENTITY DISTRO "1.9">
-<!ENTITY DISTRO_COMPRESSED "19">
-<!ENTITY DISTRO_NAME "tbd">
-<!ENTITY YOCTO_DOC_VERSION "1.9">
+<!ENTITY DISTRO "2.0">
+<!ENTITY DISTRO_COMPRESSED "20">
+<!ENTITY DISTRO_NAME "jethro">
+<!ENTITY YOCTO_DOC_VERSION "2.0">
 <!ENTITY POKYVERSION "14.0.0">
 <!ENTITY POKYVERSION_COMPRESSED "1400">
 <!ENTITY YOCTO_POKY "poky-&DISTRO_NAME;-&POKYVERSION;">
@@ -67,4 +67,5 @@
 <!ENTITY OPENSUSE_HOST_PACKAGES_ESSENTIAL "python gcc gcc-c++ git chrpath make wget python-xml \
      diffstat makeinfo python-curses patch socat">
 <!ENTITY CENTOS_HOST_PACKAGES_ESSENTIAL "gawk make wget tar bzip2 gzip python unzip perl patch \
-     diffutils diffstat git cpp gcc gcc-c++ glibc-devel texinfo chrpath socat">
+     diffutils diffstat git cpp gcc gcc-c++ glibc-devel texinfo chrpath socat \
+     perl-Data-Dumper perl-Text-ParseWords perl-Thread-Queue">
diff --git a/yocto-poky/documentation/profile-manual/profile-manual.xml b/yocto-poky/documentation/profile-manual/profile-manual.xml
index 38620df..7f9b2c4 100644
--- a/yocto-poky/documentation/profile-manual/profile-manual.xml
+++ b/yocto-poky/documentation/profile-manual/profile-manual.xml
@@ -62,9 +62,9 @@
                 <revremark>Released with the Yocto Project 1.8 Release.</revremark>
             </revision>
             <revision>
-                <revnumber>1.9</revnumber>
+                <revnumber>2.0</revnumber>
                 <date>October 2015</date>
-                <revremark>Released with the Yocto Project 1.9 Release.</revremark>
+                <revremark>Released with the Yocto Project 2.0 Release.</revremark>
             </revision>
         </revhistory>
 
diff --git a/yocto-poky/documentation/ref-manual/closer-look.xml b/yocto-poky/documentation/ref-manual/closer-look.xml
index 27f674a..45dcd9b 100644
--- a/yocto-poky/documentation/ref-manual/closer-look.xml
+++ b/yocto-poky/documentation/ref-manual/closer-look.xml
@@ -1059,7 +1059,7 @@
                 the root filesystem image that lists out, line-by-line, the
                 installed packages.
                 This manifest file is useful for the
-                <link linkend='ref-classes-testimage'><filename>testimage</filename></link>
+                <link linkend='ref-classes-testimage*'><filename>testimage</filename></link>
                 class, for example, to determine whether or not to run
                 specific tests.
                 See the
diff --git a/yocto-poky/documentation/ref-manual/introduction.xml b/yocto-poky/documentation/ref-manual/introduction.xml
index 5250e26..0b16544 100644
--- a/yocto-poky/documentation/ref-manual/introduction.xml
+++ b/yocto-poky/documentation/ref-manual/introduction.xml
@@ -154,11 +154,14 @@
                 <listitem><para>Ubuntu 13.10</para></listitem> -->
                 <listitem><para>Ubuntu 14.04 (LTS)</para></listitem>
                 <listitem><para>Ubuntu 14.10</para></listitem>
+                <listitem><para>Ubuntu 15.04</para></listitem>
+                <listitem><para>Ubuntu 15.10</para></listitem>
 <!--                <listitem><para>Fedora 16 (Verne)</para></listitem>
                 <listitem><para>Fedora 17 (Spherical)</para></listitem>
                 <listitem><para>Fedora release 19 (Schrödinger's Cat)</para></listitem>
                 <listitem><para>Fedora release 20 (Heisenbug)</para></listitem> -->
                 <listitem><para>Fedora release 21</para></listitem>
+                <listitem><para>Fedora release 22</para></listitem>
 <!--                <listitem><para>CentOS release 5.6 (Final)</para></listitem>
                 <listitem><para>CentOS release 5.7 (Final)</para></listitem>
                 <listitem><para>CentOS release 5.8 (Final)</para></listitem>
@@ -250,8 +253,15 @@
                         Packages needed if you are going to be using the
                         <ulink url='&YOCTO_DOCS_ADT_URL;#using-the-adt-installer'>Application Development Toolkit (ADT) Installer</ulink>:
                         <literallayout class='monospaced'>
-     $ sudo apt-get install autoconf automake libtool libglib2.0-dev
+     $ sudo apt-get install autoconf automake libtool libglib2.0-dev libarchive-dev
                         </literallayout></para></listitem>
+                    <listitem><para><emphasis>OpenEmbedded Self-Test (<filename>oe-selftest</filename>):</emphasis>
+                        Packages needed if you are going to run
+                        <filename>oe-selftest</filename>:
+                        <literallayout class='monospaced'>
+     $ sudo apt-get install python-git
+                        </literallayout>
+                        </para></listitem>
                 </itemizedlist>
             </para>
         </section>
@@ -267,28 +277,35 @@
                         Packages needed to build an image for a headless
                         system:
                         <literallayout class='monospaced'>
-     $ sudo yum install &FEDORA_HOST_PACKAGES_ESSENTIAL;
+     $ sudo dnf install &FEDORA_HOST_PACKAGES_ESSENTIAL;
                         </literallayout></para></listitem>
                     <listitem><para><emphasis>Graphical and Eclipse Plug-In Extras:</emphasis>
                         Packages recommended if the host system has graphics
                         support or if you are going to use the Eclipse
                         IDE:
                         <literallayout class='monospaced'>
-     $ sudo yum install SDL-devel xterm perl-Thread-Queue
+     $ sudo dnf install SDL-devel xterm
                         </literallayout></para></listitem>
                     <listitem><para><emphasis>Documentation:</emphasis>
                         Packages needed if you are going to build out the
                         Yocto Project documentation manuals:
                         <literallayout class='monospaced'>
-     $ sudo yum install make docbook-style-dsssl docbook-style-xsl \
+     $ sudo dnf install make docbook-style-dsssl docbook-style-xsl \
      docbook-dtds docbook-utils fop libxslt dblatex xmlto xsltproc
                         </literallayout></para></listitem>
                     <listitem><para><emphasis>ADT Installer Extras:</emphasis>
                         Packages needed if you are going to be using the
                         <ulink url='&YOCTO_DOCS_ADT_URL;#using-the-adt-installer'>Application Development Toolkit (ADT) Installer</ulink>:
                         <literallayout class='monospaced'>
-     $ sudo yum install autoconf automake libtool glib2-devel
+     $ sudo dnf install autoconf automake libtool glib2-devel libarchive-devel
                         </literallayout></para></listitem>
+                    <listitem><para><emphasis>OpenEmbedded Self-Test (<filename>oe-selftest</filename>):</emphasis>
+                        Packages needed if you are going to run
+                        <filename>oe-selftest</filename>:
+                        <literallayout class='monospaced'>
+     $ sudo dnf install GitPython
+                        </literallayout>
+                        </para></listitem>
                 </itemizedlist>
             </para>
         </section>
@@ -323,7 +340,13 @@
                         Packages needed if you are going to be using the
                         <ulink url='&YOCTO_DOCS_ADT_URL;#using-the-adt-installer'>Application Development Toolkit (ADT) Installer</ulink>:
                         <literallayout class='monospaced'>
-     $ sudo zypper install autoconf automake libtool glib2-devel
+     $ sudo zypper install autoconf automake libtool glib2-devel libarchive-devel
+                        </literallayout></para></listitem>
+                    <listitem><para><emphasis>OpenEmbedded Self-Test (<filename>oe-selftest</filename>):</emphasis>
+                        Packages needed if you are going to run
+                        <filename>oe-selftest</filename>:
+                        <literallayout class='monospaced'>
+     $ sudo zypper install python-GitPython
                         </literallayout></para></listitem>
                 </itemizedlist>
             </para>
@@ -336,14 +359,14 @@
                 The following list shows the required packages by function
                 given a supported CentOS Linux distribution:
                 <note>
-                    For CentOS 6.x, some of the versions of the components
-                    provided by the distribution are too old (e.g. Git, Python,
-                    and tar).
-                    It is recommended that you install the buildtools in order
-                    to provide versions that will work with the OpenEmbedded
-                    build system.
-                    For information on how to install the buildtools tarball,
-                    see the
+                    For CentOS 6.x, some of the versions
+                    of the components provided by the distribution are
+                    too old (e.g. Git, Python, and tar).
+                    It is recommended that you install the buildtools
+                    in order to provide versions that will work with
+                    the OpenEmbedded build system.
+                    For information on how to install the buildtools
+                    tarball, see the
                     "<link linkend='required-git-tar-and-python-versions'>Required Git, Tar, and Python Versions</link>"
                     section.
                 </note>
@@ -372,8 +395,24 @@
                         Packages needed if you are going to be using the
                         <ulink url='&YOCTO_DOCS_ADT_URL;#using-the-adt-installer'>Application Development Toolkit (ADT) Installer</ulink>:
                         <literallayout class='monospaced'>
-     $ sudo yum install autoconf automake libtool glib2-devel
-                        </literallayout></para></listitem>
+     $ sudo yum install autoconf automake libtool glib2-devel libarchive-devel
+                        </literallayout>
+                        <note>
+                            For CentOS 6.x, in order for the
+                            ADT installer script to work, you must have
+                            installed the <filename>liblzma5</filename>,
+                            <filename>libarchive3.x</filename>, and
+                            <filename>libarchive-devel-3.1.3</filename>
+                            (or older) packages, in that order.
+                        </note>
+                        </para></listitem>
+                    <listitem><para><emphasis>OpenEmbedded Self-Test (<filename>oe-selftest</filename>):</emphasis>
+                        Packages needed if you are going to run
+                        <filename>oe-selftest</filename>:
+                        <literallayout class='monospaced'>
+     $ sudo yum install GitPython
+                        </literallayout>
+                        </para></listitem>
                 </itemizedlist>
             </para>
         </section>
diff --git a/yocto-poky/documentation/ref-manual/migration.xml b/yocto-poky/documentation/ref-manual/migration.xml
index dc75eb8..21763e3 100644
--- a/yocto-poky/documentation/ref-manual/migration.xml
+++ b/yocto-poky/documentation/ref-manual/migration.xml
@@ -980,7 +980,7 @@
         <para>
             A new automated image testing framework has been added
             through the
-            <link linkend='ref-classes-testimage'><filename>testimage*.bbclass</filename></link>
+            <link linkend='ref-classes-testimage*'><filename>testimage.bbclass</filename></link>
             class.
             This framework replaces the older
             <filename>imagetest-qemu</filename> framework.
@@ -1254,7 +1254,7 @@
                     <listitem><para><filename>bb.MalformedUrl</filename>:
                         Use <filename>bb.fetch.MalformedUrl</filename>.
                         </para></listitem>
-                    <listitem><para><filename>bb.fetch.encodeurl</filename>:
+                    <listitem><para><filename>bb.encodeurl</filename>:
                         Use <filename>bb.fetch.encodeurl</filename>.
                         </para></listitem>
                     <listitem><para><filename>bb.decodeurl</filename>:
@@ -1485,8 +1485,9 @@
             Recipes building Autotools-based
             software that fails to build with a separate build directory
             should be changed to inherit from the
-            <link linkend='ref-classes-autotools-brokensep'><filename>autotools-brokensep</filename></link>
-            class instead of the <filename>autotools</filename> class.
+            <link linkend='ref-classes-autotools'><filename>autotools-brokensep</filename></link>
+            class instead of the <filename>autotools</filename> or
+            <filename>autotools_stage</filename>classes.
         </para>
     </section>
 
@@ -1794,8 +1795,9 @@
                     need to either patch the software so that it can build
                     separately, or you will need to change the recipe to
                     inherit the
-                    <link linkend='ref-classes-autotools-brokensep'><filename>autotools-brokensep</filename></link>
-                    class instead of the <filename>autotools</filename> class.
+                    <link linkend='ref-classes-autotools'><filename>autotools-brokensep</filename></link>
+                    class instead of the <filename>autotools</filename> or
+                    <filename>autotools_stage</filename> classes.
                     </para></listitem>
                 <listitem><para><emphasis>
                     The <filename>--foreign</filename> option is
@@ -2313,6 +2315,427 @@
     </section>
 </section>
 
+<section id='moving-to-the-yocto-project-2.0-release'>
+    <title>Moving to the Yocto Project 2.0 Release</title>
+
+    <para>
+        This section provides migration information for moving to the
+        Yocto Project 2.0 Release from the prior release.
+    </para>
+
+    <section id='migration-2.0-gcc-5'>
+        <title>GCC 5</title>
+
+        <para>
+            The default compiler is now GCC 5.2.
+            This change has required fixes for compilation errors in a number
+            of other recipes.
+        </para>
+
+        <para>
+            One important example is a fix for when the Linux kernel freezes at
+            boot time on ARM when built with GCC 5.
+            If you are using your own kernel recipe or source tree and
+            building for ARM, you will likely need to apply this
+            <ulink url='https://git.kernel.org/cgit/linux/kernel/git/torvalds/linux.git/commit?id=a077224fd35b2f7fbc93f14cf67074fc792fbac2'>patch</ulink>.
+            The standard <filename>linux-yocto</filename> kernel source tree
+            already has a workaround for the same issue.
+        </para>
+
+        <para>
+            For further details, see
+            <ulink url='https://gcc.gnu.org/gcc-5/changes.html'></ulink> and
+            the porting guide at
+            <ulink url='https://gcc.gnu.org/gcc-5/porting_to.html'></ulink>.
+        </para>
+
+        <para>
+            Alternatively, you can switch back to GCC 4.9 or 4.8 by
+            setting <filename>GCCVERSION</filename> in your configuration,
+            as follows:
+            <literallayout class='monospaced'>
+     GCCVERSION = "4.9%"
+            </literallayout>
+        </para>
+    </section>
+
+    <section id='migration-2.0-Gstreamer-0.10-removed'>
+        <title>Gstreamer 0.10 Removed</title>
+
+        <para>
+            Gstreamer 0.10 has been removed in favor of Gstreamer 1.x.
+            As part of the change, recipes for Gstreamer 0.10 and related
+            software are now located
+            in <filename>meta-multimedia</filename>.
+            This change results in Qt4 having Phonon and Gstreamer
+            support in QtWebkit disabled by default.
+        </para>
+    </section>
+
+    <section id='migration-2.0-removed-recipes'>
+        <title>Removed Recipes</title>
+
+        <para>
+            The following recipes have been moved or removed:
+            <itemizedlist>
+                <listitem><para>
+                    <filename>bluez4</filename>: The recipe is obsolete and
+                    has been moved due to <filename>bluez5</filename>
+                    becoming fully integrated.
+                    The <filename>bluez4</filename> recipe now resides in
+                    <filename>meta-oe</filename>.
+                    </para></listitem>
+                <listitem><para>
+                    <filename>gamin</filename>: The recipe is obsolete and
+                    has been removed.
+                    </para></listitem>
+                <listitem><para>
+                    <filename>gnome-icon-theme</filename>: The recipe's
+                    functionally has been replaced by
+                    <filename>adwaita-icon-theme</filename>.
+                    </para></listitem>
+                <listitem><para>
+                    Gstreamer 0.10 Recipes: Recipes for Gstreamer 0.10 have
+                    been removed in favor of the recipes for Gstreamer 1.x.
+                    </para></listitem>
+                <listitem><para>
+                    <filename>insserv</filename>: The recipe is obsolete and
+                    has been removed.
+                    </para></listitem>
+                <listitem><para>
+                    <filename>libunique</filename>: The recipe is no longer
+                    used and has been moved to <filename>meta-oe</filename>.
+                    </para></listitem>
+                <listitem><para>
+                    <filename>midori</filename>: The recipe's functionally
+                    has been replaced by <filename>epiphany</filename>.
+                    </para></listitem>
+                <listitem><para>
+                    <filename>python-gst</filename>: The recipe is obsolete
+                    and has been removed since it only contains bindings for
+                    Gstreamer 0.10.
+                    </para></listitem>
+                <listitem><para>
+                    <filename>qt-mobility</filename>: The recipe is obsolete and
+                    has been removed since it requires
+                    <filename>Gstreamer 0.10</filename>, which has been
+                    replaced.
+                    </para></listitem>
+                <listitem><para>
+                    <filename>subversion</filename>: All 1.6.x versions of this
+                    recipe have been removed.
+                    </para></listitem>
+                <listitem><para>
+                    <filename>webkit-gtk</filename>: The older 1.8.3 version
+                    of this recipe has been removed in favor of
+                    <filename>webkitgtk</filename>.
+                    </para></listitem>
+            </itemizedlist>
+        </para>
+    </section>
+
+    <section id='migration-2.0-bitbake-datastore-improvements'>
+        <title>BitBake datastore improvements</title>
+
+        <para>
+            The method by which BitBake's datastore handles overrides has
+            changed.
+            Overrides are now applied dynamically and
+            <filename>bb.data.update_data()</filename> is now a no-op.
+            Thus, <filename>bb.data.update_data()</filename> is no longer
+            required in order to apply the correct overrides.
+            In practice, this change is unlikely to require any changes to
+            Metadata.
+            However, these minor changes in behavior exist:
+            <itemizedlist>
+                <listitem><para>
+                    All potential overrides are now visible in the variable
+                    history as seen when you run the following:
+                    <literallayout class='monospaced'>
+     $ bitbake -e
+                    </literallayout>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>d.delVar('VARNAME')</filename> and
+                    <filename>d.setVar('VARNAME', None)</filename> result
+                    in the variable and all of its overrides being cleared out.
+                    Before the change, only the non-overridden values
+                    were cleared.
+                    </para></listitem>
+            </itemizedlist>
+        </para>
+    </section>
+
+    <section id='migration-2.0-shell-message-function-changes'>
+        <title>Shell Message Function Changes</title>
+
+        <para>
+            The shell versions of the BitBake message functions (i.e.
+            <filename>bbdebug</filename>, <filename>bbnote</filename>,
+            <filename>bbwarn</filename>, <filename>bbplain</filename>,
+            <filename>bberror</filename>, and <filename>bbfatal</filename>)
+            are now connected through to their BitBake equivalents
+            <filename>bb.debug()</filename>, <filename>bb.note()</filename>,
+            <filename>bb.warn()</filename>, <filename>bb.plain()</filename>,
+            <filename>bb.error()</filename>, and
+            <filename>bb.fatal()</filename>, respectively.
+            Thus, those message functions that you would expect to be printed
+            by the BitBake UI are now actually printed.
+            In practice, this change means two things:
+            <itemizedlist>
+                <listitem><para>
+                    If you now see messages on the console that you did not
+                    previously see as a result of this change, you might
+                    need to clean up the calls to
+                    <filename>bbwarn</filename>, <filename>bberror</filename>,
+                    and so forth.
+                    Or, you might want to simply remove the calls.
+                    </para></listitem>
+                <listitem><para>
+                    The <filename>bbfatal</filename> message function now
+                    suppresses the full error log in the UI, which means any
+                    calls to <filename>bbfatal</filename> where you still
+                    wish to see the full error log should be replaced by
+                    <filename>die</filename> or
+                    <filename>bbfatal_log</filename>.
+                    </para></listitem>
+            </itemizedlist>
+        </para>
+    </section>
+
+    <section id='migration-2.0-extra-development-debug-package-cleanup'>
+        <title>Extra Development/Debug Package Cleanup</title>
+
+        <para>
+            The following recipes have had extra
+            <filename>dev/dbg</filename> packages removed:
+            <itemizedlist>
+                <listitem><para>
+                    <filename>acl</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>apmd</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>aspell</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>attr</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>augeas</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>bzip2</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>cogl</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>curl</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>elfutils</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>gcc-target</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>libgcc</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>libtool</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>libxmu</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>opkg</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>pciutils</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>rpm</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>sysfsutils</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>tiff</filename>
+                    </para></listitem>
+                <listitem><para>
+                    <filename>xz</filename>
+                    </para></listitem>
+            </itemizedlist>
+            All of the above recipes now conform to the standard packaging
+            scheme where a single <filename>-dev</filename>,
+            <filename>-dbg</filename>, and <filename>-staticdev</filename>
+            package exists per recipe.
+        </para>
+    </section>
+
+    <section id='migration-2.0-recipe-maintenance-tracking-data-moved-to-oe-core'>
+        <title>Recipe Maintenance Tracking Data Moved to OE-Core</title>
+
+        <para>
+            Maintenance tracking data for recipes that was previously part
+            of <filename>meta-yocto</filename> has been moved to OE-Core.
+            The change includes <filename>package_regex.inc</filename> and
+            <filename>distro_alias.inc</filename>, which are typically enabled
+            when using the
+            <link linkend='ref-classes-distrodata'><filename>distrodata</filename></link>
+            class.
+            Additionally, the contents of
+            <filename>upstream_tracking.inc</filename> has now been split out
+            to the relevant recipes.
+        </para>
+    </section>
+
+    <section id='migration-2.0-automatic-stale-sysroot-file-cleanup'>
+        <title>Automatic Stale Sysroot File Cleanup</title>
+
+        <para>
+            Stale files from recipes that no longer exist in the current
+            configuration are now automatically removed from
+            sysroot as well as removed from
+            any other place managed by shared state.
+            This automatic cleanup means that the build system now properly
+            handles situations such as renaming the build system side of
+            recipes, removal of layers from
+            <filename>bblayers.conf</filename>, and
+            <link linkend='var-DISTRO_FEATURES'><filename>DISTRO_FEATURES</filename></link>
+            changes.
+        </para>
+
+        <para>
+            Additionally, work directories for old versions of recipes are
+            now pruned.
+            If you wish to disable pruning old work directories, you can set
+            the following variable in your configuration:
+            <literallayout class='monospaced'>
+     SSTATE_PRUNE_OBSOLETEWORKDIR = "0"
+            </literallayout>
+        </para>
+    </section>
+
+    <section id='migration-2.0-linux-yocto-kernel-metadata-repository-now-split-from-source'>
+        <title><filename>linux-yocto</filename> Kernel Metadata Repository Now Split from Source</title>
+
+        <para>
+            The <filename>linux-yocto</filename> tree has up to now been a
+            combined set of kernel changes and configuration (meta) data
+            carried in a single tree.
+            While this format is effective at keeping kernel configuration and
+            source modifications synchronized, it is not always obvious to
+            developers how to manipulate the Metadata as compared to the
+            source.
+        </para>
+
+        <para>
+            Metadata processing has now been removed from the
+            <link linkend='ref-classes-kernel-yocto'><filename>kernel-yocto</filename></link>
+            class and the external Metadata repository
+            <filename>yocto-kernel-cache</filename>, which has always been used
+            to seed the <filename>linux-yocto</filename> "meta" branch.
+            This separate <filename>linux-yocto</filename> cache repository
+            is now the primary location for this data.
+            Due to this change, <filename>linux-yocto</filename> is no longer
+            able to process combined trees.
+            Thus, if you need to have your own combined kernel repository,
+            you must do the split there as well and update your recipes
+            accordingly.
+            See the <filename>meta/recipes-kernel/linux/linux-yocto_4.1.bb</filename>
+            recipe for an example.
+        </para>
+    </section>
+
+    <section id='migration-2.0-additional-qa-checks'>
+        <title>Additional QA checks</title>
+
+        <para>
+            The following QA checks have been added:
+            <itemizedlist>
+                <listitem><para>
+                    Added a "host-user-contaminated" check for ownership
+                    issues for packaged files outside of
+                    <filename>/home</filename>.
+                    The check looks for files that are incorrectly owned by the
+                    user that ran BitBake instead of owned by a valid user in
+                    the target system.
+                    </para></listitem>
+                <listitem><para>
+                    Added an "invalid-chars" check for invalid (non-UTF8)
+                    characters in recipe metadata variable values
+                    (i.e.
+                    <link linkend='var-DESCRIPTION'><filename>DESCRIPTION</filename></link>,
+                    <link linkend='var-SUMMARY'><filename>SUMMARY</filename></link>,
+                    <link linkend='var-LICENSE'><filename>LICENSE</filename></link>,
+                    and
+                    <link linkend='var-SECTION'><filename>SECTION</filename></link>).
+                    Some package managers do not support these characters.
+                    </para></listitem>
+                <listitem><para>
+                    Added an "invalid-packageconfig" check for any options
+                    specified in
+                    <link linkend='var-PACKAGECONFIG'><filename>PACKAGECONFIG</filename></link>
+                    that do not match any <filename>PACKAGECONFIG</filename>
+                    option defined for the recipe.
+                    </para></listitem>
+            </itemizedlist>
+        </para>
+    </section>
+
+    <section id='migration-2.0-miscellaneous'>
+        <title>Miscellaneous Changes</title>
+
+        <para>
+            These additional changes exist:
+            <itemizedlist>
+                <listitem><para>
+                    <filename>gtk-update-icon-cache</filename> has been
+                    renamed to <filename>gtk-icon-utils</filename>.
+                    </para></listitem>
+                <listitem><para>
+                    The <filename>tools-profile</filename>
+                    <link linkend='var-IMAGE_FEATURES'><filename>IMAGE_FEATURES</filename></link>
+                    item as well as its corresponding packagegroup and
+                    <filename>packagegroup-core-tools-profile</filename> no
+                    longer bring in <filename>oprofile</filename>.
+                    Bringing in <filename>oprofile</filename> was originally
+                    added to aid compilation on resource-constrained
+                    targets.
+                    However, this aid has not been widely used and is not
+                    likely to be used going forward due to the more powerful
+                    target platforms and the existence of better
+                    cross-compilation tools.
+                    </para></listitem>
+                <listitem><para>
+                    The
+                    <link linkend='var-IMAGE_FSTYPES'><filename>IMAGE_FSTYPES</filename></link>
+                    variable's default value now specifies
+                    <filename>ext4</filename> instead of
+                    <filename>ext3</filename>.
+                    </para></listitem>
+                <listitem><para>
+                    All support for the <filename>PRINC</filename>
+                    variable has been removed.
+                    </para></listitem>
+                <listitem><para>
+                    The <filename>packagegroup-core-full-cmdline</filename>
+                    packagegroup no longer brings in
+                    <filename>lighttpd</filename> due to the fact that
+                    bringing in <filename>lighttpd</filename> is not really in
+                    line with the packagegroup's purpose, which is to add full
+                    versions of command-line tools that by default are
+                    provided by <filename>busybox</filename>.
+                    </para></listitem>
+            </itemizedlist>
+        </para>
+    </section>
+</section>
+
+
 </chapter>
 <!--
 vim: expandtab tw=80 ts=4
diff --git a/yocto-poky/documentation/ref-manual/ref-classes.xml b/yocto-poky/documentation/ref-manual/ref-classes.xml
index d87c9ff..b2941b8 100644
--- a/yocto-poky/documentation/ref-manual/ref-classes.xml
+++ b/yocto-poky/documentation/ref-manual/ref-classes.xml
@@ -52,20 +52,22 @@
         and a C library as pre-requisites, and splitting out of debug symbols
         during packaging).
         <note>
-            Unlike e.g. Debian, OpenEmbedded recipes that produce packages
-            which
+            <para>Unlike some distro recipes (e.g. Debian), OpenEmbedded recipes
+            that produce packages that depend on tunings through use of the
             <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>
-            on
+            and
             <link linkend='var-TUNE_PKGARCH'><filename>TUNE_PKGARCH</filename></link>
-            packages should never be made <filename>allarch</filename>, even
-            if they do not produce architecture-specific output. This would
-            cause the do_package_write_* tasks to have different signatures
-            for
-            <link linkend='var-MACHINE'><filename>MACHINE</filename></link>s
-            with different
-            <link linkend='var-TUNE_PKGARCH'><filename>TUNE_PKGARCH</filename></link>,
-            thus unnecessary rebuilds every single time an image for a different
-            MACHINE is built (even without any change to the recipe).
+            variables, should never be configured for all architectures
+            using <filename>allarch</filename>.
+            This is the case even if the recipes do not produce
+            architecture-specific output.</para>
+            <para>Configuring such recipes for all architectures causes the
+            <link linkend='ref-tasks-package_write_deb'><filename>do_package_write_*</filename></link>
+            tasks to have different signatures for the machines with different
+            tunings.
+            Additionally, unnecessary rebuilds occur every time an
+            image for a different <filename>MACHINE</filename> is built
+            even when the recipe never changes.</para>
         </note>
     </para>
 
@@ -101,70 +103,44 @@
 </section>
 
 <section id='ref-classes-autotools'>
-    <title><filename>autotools.bbclass</filename></title>
+    <title><filename>autotools*.bbclass</filename></title>
 
     <para>
-        The <filename>autotools</filename> class supports Autotooled
+        The <filename>autotools*</filename> classes support Autotooled
         packages.
     </para>
 
     <para>
         The <filename>autoconf</filename>, <filename>automake</filename>,
-        and <filename>libtool</filename> bring standardization.
-        This class defines a set of tasks (configure, compile etc.) that
+        and <filename>libtool</filename> packages bring standardization.
+        This class defines a set of tasks (e.g.
+        <filename>configure</filename>, <filename>compile</filename> and
+        so forth) that
         work for all Autotooled packages.
         It should usually be enough to define a few standard variables
         and then simply <filename>inherit autotools</filename>.
-        This class can also work with software that emulates Autotools.
+        These classes can also work with software that emulates Autotools.
         For more information, see the
         "<ulink url='&YOCTO_DOCS_DEV_URL;#new-recipe-autotooled-package'>Autotooled Package</ulink>"
         section in the Yocto Project Development Manual.
     </para>
 
     <para>
-        By default, the <filename>autotools</filename> class
-        uses out-of-tree builds
+        By default, the <filename>autotools*</filename> classes
+        use out-of-tree builds (i.e.
+        <filename>autotools.bbclass</filename> and
+        <filename>autotools_stage.bbclass</filename>).
         (<link linkend='var-B'><filename>B</filename></link> <filename>!=</filename>
         <link linkend='var-S'><filename>S</filename></link>).
+    </para>
+
+    <para>
         If the software being built by a recipe does not support
         using out-of-tree builds, you should have the recipe inherit the
-        <link linkend='ref-classes-autotools-brokensep'><filename>autotools-brokensep</filename></link>
-        class.
-    </para>
-
-    <para>
-        It's useful to have some idea of how the tasks defined by this class work
-        and what they do behind the scenes.
-        <itemizedlist>
-            <listitem><para><link linkend='ref-tasks-configure'><filename>do_configure</filename></link> -
-                Regenerates the
-                configure script (using <filename>autoreconf</filename>) and then launches it
-                with a standard set of arguments used during cross-compilation.
-                You can pass additional parameters to <filename>configure</filename> through the
-                <filename><link linkend='var-EXTRA_OECONF'>EXTRA_OECONF</link></filename> variable.
-                </para></listitem>
-            <listitem><para><link linkend='ref-tasks-compile'><filename>do_compile</filename></link> - Runs <filename>make</filename> with
-                arguments that specify the compiler and linker.
-                You can pass additional arguments through
-                the <filename><link linkend='var-EXTRA_OEMAKE'>EXTRA_OEMAKE</link></filename> variable.
-                </para></listitem>
-            <listitem><para><link linkend='ref-tasks-install'><filename>do_install</filename></link> - Runs <filename>make install</filename>
-                and passes in
-                <filename>${</filename><link linkend='var-D'><filename>D</filename></link><filename>}</filename>
-                as <filename>DESTDIR</filename>.
-                </para></listitem>
-        </itemizedlist>
-    </para>
-</section>
-
-<section id='ref-classes-autotools-brokensep'>
-    <title><filename>autotools-brokensep.bbclass</filename></title>
-
-    <para>
+        <filename>autotools-brokensep</filename> class.
         The <filename>autotools-brokensep</filename> class behaves the same
-        as the
-        <link linkend='ref-classes-autotools'><filename>autotools</filename></link>
-        class but builds with
+        as the <filename>autotools</filename> and
+        <filename>autotools_stage</filename> classes but builds with
         <link linkend='var-B'><filename>B</filename></link> ==
         <link linkend='var-S'><filename>S</filename></link>.
         This method is useful when out-of-tree build support is either not
@@ -174,6 +150,34 @@
             if at all possible.
         </note>
     </para>
+
+    <para>
+        It's useful to have some idea of how the tasks defined by
+        the <filename>autotools*</filename> classes work and what they do
+        behind the scenes.
+        <itemizedlist>
+            <listitem><para><link linkend='ref-tasks-configure'><filename>do_configure</filename></link> -
+                Regenerates the
+                configure script (using <filename>autoreconf</filename>) and
+                then launches it with a standard set of arguments used during
+                cross-compilation.
+                You can pass additional parameters to
+                <filename>configure</filename> through the
+                <filename><link linkend='var-EXTRA_OECONF'>EXTRA_OECONF</link></filename> variable.
+                </para></listitem>
+            <listitem><para><link linkend='ref-tasks-compile'><filename>do_compile</filename></link> -
+                Runs <filename>make</filename> with arguments that specify the
+                compiler and linker.
+                You can pass additional arguments through
+                the <filename><link linkend='var-EXTRA_OEMAKE'>EXTRA_OEMAKE</link></filename> variable.
+                </para></listitem>
+            <listitem><para><link linkend='ref-tasks-install'><filename>do_install</filename></link> -
+                Runs <filename>make install</filename> and passes in
+                <filename>${</filename><link linkend='var-D'><filename>D</filename></link><filename>}</filename>
+                as <filename>DESTDIR</filename>.
+                </para></listitem>
+        </itemizedlist>
+    </para>
 </section>
 
 <section id='ref-classes-base'>
@@ -211,14 +215,22 @@
         use for this class.
         <note>
             For RPMs and other packages that do not contain a subdirectory,
-            you should specify a "subdir" parameter.
+            you should specify an appropriate fetcher parameter to point to
+            the subdirectory.
+            For example, if BitBake is using the Git fetcher
+            (<filename>git://</filename>), the "subpath" parameter limits
+            the checkout to a specific subpath of the tree.
             Here is an example where <filename>${BP}</filename> is used so that
             the files are extracted into the subdirectory expected by the
             default value of
             <link linkend='var-S'><filename>S</filename></link>:
             <literallayout class='monospaced'>
-     SRC_URI = "http://example.com/downloads/somepackage.rpm;subdir=${BP}"
+     SRC_URI = "git://example.com/downloads/somepackage.rpm;subpath=${BP}"
             </literallayout>
+            See the
+            "<ulink url='&YOCTO_DOCS_BB_URL;#bb-fetchers'>Fetchers</ulink>"
+            section in the BitBake User Manual for more information on
+            supported BitBake Fetchers.
         </note>
     </para>
 </section>
@@ -552,10 +564,10 @@
 </section>
 
 <section id='ref-classes-cpan'>
-    <title><filename>cpan.bbclass</filename></title>
+    <title><filename>cpan*.bbclass</filename></title>
 
     <para>
-        The <filename>cpan</filename> class supports Perl modules.
+        The <filename>cpan*</filename> classes support Perl modules.
     </para>
 
     <para>
@@ -574,6 +586,8 @@
                 using <filename>cpan_build.bbclass</filename> in their recipes.
                 </para></listitem>
         </itemizedlist>
+        Both build methods inherit the <filename>cpan-base</filename> class
+        for basic Perl support.
     </para>
 </section>
 
@@ -707,25 +721,20 @@
 
     <para>
         The class is not included by default.
-        To use it, you must include the following files and set the
+        To use it, you must set the
         <link linkend='var-INHERIT'><filename>INHERIT</filename></link>
         variable:
         <literallayout class='monospaced'>
-     include conf/distro/include/distro_alias.inc
-     include conf/distro/include/recipe_color.inc
-     include conf/distro/include/maintainers.inc
-     include conf/distro/include/upstream_tracking.inc
-     include conf/distro/include/package_regex.inc
      INHERIT+= "distrodata"
         </literallayout>
     </para>
 </section>
 
 <section id='ref-classes-distutils'>
-    <title><filename>distutils.bbclass</filename></title>
+    <title><filename>distutils*.bbclass</filename></title>
 
     <para>
-        The <filename>distutils</filename> class supports recipes for Python
+        The <filename>distutils*</filename> classes support recipes for Python
         version 2.x extensions, which are simple.
         These recipes usually only need to point to the source's archive and
         then inherit the proper class.
@@ -733,8 +742,8 @@
         module authors used.
         <itemizedlist>
             <listitem><para>Extensions that use an Autotools-based build system
-                require Autotools and
-                <filename>distutils</filename>-based classes in their recipes.
+                require Autotools and the classes based on
+                <filename>distutils</filename> in their recipes.
                 </para></listitem>
             <listitem><para>Extensions that use build systems based on
                 <filename>distutils</filename> require
@@ -746,18 +755,26 @@
                 class in their recipes.
                 </para></listitem>
         </itemizedlist>
+        The <filename>distutils-common-base</filename> class is required by
+        some of the <filename>distutils*</filename> classes to provide common
+        Python2 support.
+    </para>
+
+    <para>
+	    The <filename>distutils-tools</filename> class supports recipes for
+        additional "distutils" tools.
     </para>
 </section>
 
 <section id='ref-classes-distutils3'>
-    <title><filename>distutils3.bbclass</filename></title>
+    <title><filename>distutils3*.bbclass</filename></title>
 
     <para>
-        The <filename>distutils3</filename> class supports recipes for Python
+        The <filename>distutils3*</filename> classes support recipes for Python
         version 3.x extensions, which are simple.
         These recipes usually only need to point to the source's archive and
         then inherit the proper class.
-        Building is split into two methods depending on which method the
+        Building is split into three methods depending on which method the
         module authors used.
         <itemizedlist>
             <listitem><para>Extensions that use an Autotools-based build system
@@ -774,6 +791,14 @@
                 class in their recipes.
                 </para></listitem>
         </itemizedlist>
+        The <filename>distutils3*</filename> classes either inherit their
+        corresponding <filename>distutils*</filename> class or replicate them
+        using a Python3 version instead (e.g.
+        <filename>distutils3-base</filename> inherits
+        <filename>distutils-common-base</filename>, which is the same as
+        <filename>distutils-base</filename> but inherits
+        <filename>python3native</filename> instead of
+        <filename>pythonnative</filename>).
     </para>
 </section>
 
@@ -905,6 +930,19 @@
     </para>
 </section>
 
+<section id='ref-classes-fs-uuid'>
+    <title><filename>fs-uuid.bbclass</filename></title>
+
+    <para>
+        The <filename>fs-uuid</filename> class extracts UUID from
+        <filename>${</filename><link linkend='var-ROOTFS'><filename>ROOTFS</filename></link><filename>}</filename>,
+        which must have been built by the time that this function gets called.
+        The <filename>fs-uuid</filename> class only works on
+        <filename>ext</filename> file systems and depends on
+        <filename>tune2fs</filename>.
+    </para>
+</section>
+
 <section id='ref-classes-gconf'>
     <title><filename>gconf.bbclass</filename></title>
 
@@ -1255,6 +1293,15 @@
     </para>
 </section>
 
+<section id='ref-classes-image-buildinfo'>
+    <title><filename>image-buildinfo.bbclass</filename></title>
+
+    <para>
+        The <filename>image-buildinfo</filename> class writes information
+        to the target filesystem on <filename>/etc/build</filename>.
+    </para>
+</section>
+
 <section id='ref-classes-image_types'>
     <title><filename>image_types.bbclass</filename></title>
 
@@ -1364,10 +1411,21 @@
         the OpenEmbedded build process.
         <note>
             This class is currently unmaintained.
+            The <filename>strace</filename> package needs to be installed
+            in the build host as a dependency for this tool.
         </note>
     </para>
 </section>
 
+<section id='ref-classes-image-vm'>
+    <title><filename>image-vm.bbclass</filename></title>
+
+    <para>
+        The <filename>image-vm</filename> class supports building VM
+        images.
+    </para>
+</section>
+
 <section id='ref-classes-image-vmdk'>
     <title><filename>image-vmdk.bbclass</filename></title>
 
@@ -1868,6 +1926,25 @@
     </para>
 </section>
 
+<section id='ref-classes-kernel-fitimage'>
+    <title><filename>kernel-fitimage.bbclass</filename></title>
+
+    <para>
+        The <filename>kernel-fitimage</filename> class provides support to
+        pack zImages.
+    </para>
+</section>
+
+<section id='ref-classes-kernel-grub'>
+    <title><filename>kernel-grub.bbclass</filename></title>
+
+    <para>
+        The <filename>kernel-grub</filename> class updates the boot area and
+        the boot menu with the kernel as the priority boot mechanism while
+        installing a RPM to update the kernel on a deployed target.
+    </para>
+</section>
+
 <section id='ref-classes-kernel-module-split'>
     <title><filename>kernel-module-split.bbclass</filename></title>
 
@@ -1878,6 +1955,24 @@
     </para>
 </section>
 
+<section id='ref-classes-kernel-uboot'>
+    <title><filename>kernel-uboot.bbclass</filename></title>
+
+    <para>
+        The <filename>kernel-uboot</filename> class provides support for
+        building from vmlinux-style kernel sources.
+    </para>
+</section>
+
+<section id='ref-classes-kernel-uimage'>
+    <title><filename>kernel-uimage.bbclass</filename></title>
+
+    <para>
+        The <filename>kernel-uimage</filename> class provides support to
+        pack uImage.
+    </para>
+</section>
+
 <section id='ref-classes-kernel-yocto'>
     <title><filename>kernel-yocto.bbclass</filename></title>
 
@@ -1888,6 +1983,15 @@
     </para>
 </section>
 
+<section id='ref-classes-kernelsrc'>
+    <title><filename>kernelsrc.bbclass</filename></title>
+
+    <para>
+        The <filename>kernelsrc</filename> class sets the Linux kernel
+        source and version.
+    </para>
+</section>
+
 <section id='ref-classes-lib_package'>
     <title><filename>lib_package.bbclass</filename></title>
 
@@ -1902,6 +2006,25 @@
     </para>
 </section>
 
+<section id='ref-classes-libc*'>
+    <title><filename>libc*.bbclass</filename></title>
+
+    <para>
+        The <filename>libc*</filename> classes support recipes that build
+        packages with <filename>libc</filename>:
+        <itemizedlist>
+            <listitem><para>The <filename>libc-common</filename> class
+                provides common support for building with
+                <filename>libc</filename>.
+                </para></listitem>
+            <listitem><para>The <filename>libc-package</filename> class
+                supports packaging up <filename>glibc</filename> and
+                <filename>eglibc</filename>.
+                </para></listitem>
+        </itemizedlist>
+    </para>
+</section>
+
 <section id='ref-classes-license'>
     <title><filename>license.bbclass</filename></title>
 
@@ -1926,6 +2049,16 @@
     </para>
 </section>
 
+<section id='ref-classes-linuxloader'>
+    <title><filename>linuxloader.bbclass</filename></title>
+
+    <para>
+        Provides the function <filename>linuxloader()</filename>, which gives
+        the value of the dynamic loader/linker provided on the platform.
+        This value is used by a number of other classes.
+    </para>
+</section>
+
 <section id='ref-classes-logging'>
     <title><filename>logging.bbclass</filename></title>
 
@@ -1971,6 +2104,15 @@
     </para>
 </section>
 
+<section id='ref-classes-migrate_localcount'>
+    <title><filename>migrate_localcount.bbclass</filename></title>
+
+    <para>
+        The <filename>migrate_localcount</filename> class verifies a recipe's
+        localcount data and increments it appropriately.
+    </para>
+</section>
+
 <section id='ref-classes-mime'>
     <title><filename>mime.bbclass</filename></title>
 
@@ -2487,17 +2629,18 @@
     <title><filename>pkgconfig.bbclass</filename></title>
 
     <para>
-        The <filename>pkg-config</filename> class provides a standard way to get
-        header and library information.
+        The <filename>pkgconfig</filename> class provides a standard way to get
+        header and library information by using <filename>pkg-config</filename>.
         This class aims to smooth integration of
         <filename>pkg-config</filename> into libraries that use it.
     </para>
 
     <para>
-        During staging, BitBake installs <filename>pkg-config</filename> data into the
-        <filename>sysroots/</filename> directory.
-        By making use of sysroot functionality within <filename>pkg-config</filename>,
-        this class no longer has to manipulate the files.
+        During staging, BitBake installs <filename>pkg-config</filename>
+        data into the <filename>sysroots/</filename> directory.
+        By making use of sysroot functionality within
+        <filename>pkg-config</filename>, the <filename>pkgconfig</filename>
+        class no longer has to manipulate the files.
     </para>
 </section>
 
@@ -2536,6 +2679,9 @@
                 Supports creation of the SDK given the opkg (IPK format)
                 package manager.
                 </para></listitem>
+            <listitem><para><emphasis><filename>populate_sdk_ext</filename>:</emphasis>
+                Supports extensible SDK creation under all package managers.
+                </para></listitem>
         </itemizedlist>
     </para>
 
@@ -2692,6 +2838,16 @@
     </para>
 </section>
 
+<section id='ref-classes-python3native'>
+    <title><filename>python3native.bbclass</filename></title>
+
+    <para>
+        The <filename>python3native</filename> class supports using the
+        native version of Python 3 built by the build system rather than
+        support of the version provided by the build host.
+    </para>
+</section>
+
 <section id='ref-classes-pythonnative'>
     <title><filename>pythonnative.bbclass</filename></title>
 
@@ -2773,6 +2929,16 @@
     </para>
 </section>
 
+<section id='ref-classes-recipe_sanity'>
+    <title><filename>recipe_sanity.bbclass</filename></title>
+
+    <para>
+        The <filename>recipe_sanity</filename> class checks for the presence
+        of any host system recipe prerequisites that might affect the
+        build (e.g. variables that are set or software that is present).
+    </para>
+</section>
+
 <section id='ref-classes-relocatable'>
     <title><filename>relocatable.bbclass</filename></title>
 
@@ -2871,6 +3037,11 @@
                 The <filename>rootfs_ipk</filename> class, which supports
                 creation of root filesystems for images built using
                 <filename>.ipk</filename> packages.</para></listitem>
+            <listitem><para>
+                The <filename>rootfsdebugfiles</filename> class, which installs
+                additional files found on the build host directly into the
+                root filesystem.
+                </para></listitem>
         </itemizedlist>
     </para>
 
@@ -2948,6 +3119,15 @@
     </para>
 </section>
 
+<section id='ref-classes-sign_rpm'>
+    <title><filename>sign_rpm.bbclass</filename></title>
+
+    <para>
+        The <filename>sign_rpm</filename> class supports generating signed
+        RPM packages.
+    </para>
+</section>
+
 <section id='ref-classes-sip'>
     <title><filename>sip.bbclass</filename></title>
 
@@ -3181,28 +3361,40 @@
     </para>
 </section>
 
-<section id='ref-classes-testimage'>
-    <title><filename>testimage.bbclass</filename></title>
+<section id='ref-classes-testimage*'>
+    <title><filename>testimage*.bbclass</filename></title>
 
     <para>
-        The <filename>testimage</filename> class supports running automated
-        tests against images using QEMU and on actual hardware.
-        The class handles loading the tests and starting the image.
+        The <filename>testimage*</filename> classes support running
+        automated tests against images using QEMU and on actual hardware.
+        The classes handle loading the tests and starting the image.
+        To use the classes, you need to perform steps to set up the
+        environment.
     </para>
 
     <para>
-        To use the class, you need to perform steps to set up the
-        environment.
         The tests are commands that run on the target system over
         <filename>ssh</filename>.
-        they are written in Python and make use of the
+        Each test is written in Python and makes use of the
         <filename>unittest</filename> module.
     </para>
 
     <para>
+        The <filename>testimage.bbclass</filename> runs tests on an image
+        when called using the following:
+        <literallayout class='monospaced'>
+     $ bitbake -c testimage <replaceable>image</replaceable>
+        </literallayout>
+        The <filename>testimage-auto</filename> class runs tests on an image
+        after the image is constructed (i.e.
+        <link linkend='var-TEST_IMAGE'><filename>TEST_IMAGE</filename></link>
+        must be set to "1").
+    </para>
+
+    <para>
         For information on how to enable, run, and create new tests, see the
         "<ulink url='&YOCTO_DOCS_DEV_URL;#performing-automated-runtime-testing'>Performing Automated Runtime Testing</ulink>"
-        section.
+        section in the Yocto Project Development Manual.
     </para>
 </section>
 
@@ -3385,24 +3577,29 @@
 </section>
 
 <section id='ref-classes-useradd'>
-    <title><filename>useradd.bbclass</filename></title>
+    <title><filename>useradd*.bbclass</filename></title>
 
     <para>
-        The <filename>useradd</filename> class supports the addition of users
+        The <filename>useradd*</filename> classes support the addition of users
         or groups for usage by the package on the target.
         For example, if you have packages that contain system services that
-        should be run under their own user or group, you can use this class to
-        enable creation of the user or group.
+        should be run under their own user or group, you can use these classes
+        to enable creation of the user or group.
         The <filename>meta-skeleton/recipes-skeleton/useradd/useradd-example.bb</filename>
         recipe in the <ulink url='&YOCTO_DOCS_DEV_URL;#source-directory'>Source Directory</ulink>
         provides a simple example that shows how to add three
         users and groups to two packages.
         See the <filename>useradd-example.bb</filename> recipe for more
-        information on how to use this class.
+        information on how to use these classes.
     </para>
 
     <para>
-        The <filename>useradd</filename> class supports the
+        The <filename>useradd_base</filename> class provides basic
+        functionality for user or groups settings.
+    </para>
+
+    <para>
+        The <filename>useradd*</filename> classes support the
         <link linkend='var-USERADD_PACKAGES'><filename>USERADD_PACKAGES</filename></link>,
         <link linkend='var-USERADD_PARAM'><filename>USERADD_PARAM</filename></link>,
         <link linkend='var-GROUPADD_PARAM'><filename>GROUPADD_PARAM</filename></link>,
@@ -3410,10 +3607,6 @@
         <link linkend='var-GROUPMEMS_PARAM'><filename>GROUPMEMS_PARAM</filename></link>
         variables.
     </para>
-</section>
-
-<section id='ref-classes-useradd-staticids'>
-    <title><filename>useradd-staticids.bbclass</filename></title>
 
     <para>
         The <filename>useradd-staticids</filename> class supports the addition
@@ -3457,7 +3650,8 @@
     </para>
 
     <note><title>Notes</title>
-        You do not use this class directly.
+        You do not use the <filename>useradd-staticids</filename>
+        class directly.
         You either enable or disable the class by setting the
         <filename>USERADDEXTENSION</filename> variable.
         If you enable or disable the class in a configured system,
diff --git a/yocto-poky/documentation/ref-manual/ref-manual.xml b/yocto-poky/documentation/ref-manual/ref-manual.xml
index 0b4eddf..a296b9b 100644
--- a/yocto-poky/documentation/ref-manual/ref-manual.xml
+++ b/yocto-poky/documentation/ref-manual/ref-manual.xml
@@ -93,9 +93,9 @@
                 <revremark>Released with the Yocto Project 1.8 Release.</revremark>
             </revision>
             <revision>
-                <revnumber>1.9</revnumber>
+                <revnumber>2.0</revnumber>
                 <date>October 2015</date>
-                <revremark>Released with the Yocto Project 1.9 Release.</revremark>
+                <revremark>Released with the Yocto Project 2.0 Release.</revremark>
             </revision>
         </revhistory>
 
diff --git a/yocto-poky/documentation/ref-manual/ref-tasks.xml b/yocto-poky/documentation/ref-manual/ref-tasks.xml
index 59b4d96..21403c0 100644
--- a/yocto-poky/documentation/ref-manual/ref-tasks.xml
+++ b/yocto-poky/documentation/ref-manual/ref-tasks.xml
@@ -619,6 +619,15 @@
         </para>
     </section>
 
+    <section id='ref-tasks-kernel_metadata'>
+        <title><filename>do_kernel_metadata</filename></title>
+
+        <para>
+            Collects kernel metadata for a
+            <filename>linux-yocto</filename> style kernel.
+        </para>
+    </section>
+
     <section id='ref-tasks-menuconfig'>
         <title><filename>do_menuconfig</filename></title>
 
@@ -638,6 +647,14 @@
         </para>
     </section>
 
+    <section id='ref-tasks-shared_workdir'>
+        <title><filename>do_shared_workdir</filename></title>
+
+        <para>
+            Creates the shared working directory for the kernel.
+        </para>
+    </section>
+
     <section id='ref-tasks-sizecheck'>
         <title><filename>do_sizecheck</filename></title>
 
diff --git a/yocto-poky/documentation/ref-manual/ref-variables.xml b/yocto-poky/documentation/ref-manual/ref-variables.xml
index 4c4fc22..0b2c426 100644
--- a/yocto-poky/documentation/ref-manual/ref-variables.xml
+++ b/yocto-poky/documentation/ref-manual/ref-variables.xml
@@ -3533,6 +3533,45 @@
             </glossdef>
         </glossentry>
 
+        <glossentry id='var-EXCLUDE_FROM_SHLIBS'><glossterm>EXCLUDE_FROM_SHLIBS</glossterm>
+            <info>
+                EXCLUDE_FROM_SHLIBS[doc] = "Causes the OpenEmbedded build system's shared libraries resolver to exclude an entire package when scanning for shared libraries."
+            </info>
+            <glossdef>
+                <para role="glossdeffirst">
+<!--                <para role="glossdeffirst"><imagedata fileref="figures/define-generic.png" /> -->
+                    Triggers the OpenEmbedded build system's shared libraries
+                    resolver to exclude an entire package when scanning for
+                    shared libraries.
+                    <note>
+                        The shared libraries resolver's functionality results
+                        in part from the internal function
+                        <filename>package_do_shlibs</filename>, which is part of
+                        the
+                        <link linkend='ref-tasks-package'><filename>do_package</filename></link>
+                        task.
+                        You should be aware that the shared libraries resolver
+                        might implicitly define some dependencies between
+                        packages.
+                    </note>
+                    The <filename>EXCLUDE_FROM_SHLIBS</filename> variable is
+                    similar to the
+                    <link linkend='var-PRIVATE_LIBS'><filename>PRIVATE_LIBS</filename></link>
+                    variable, which excludes a package's particular libraries
+                    only and not the whole package.
+                </para>
+
+                <para>
+                    Use the
+                    <filename>EXCLUDE_FROM_SHLIBS</filename> variable by
+                    setting it to "1" for a particular package:
+                    <literallayout class='monospaced'>
+     EXCLUDE_FROM_SHLIBS = "1"
+                    </literallayout>
+                </para>
+            </glossdef>
+        </glossentry>
+
         <glossentry id='var-EXCLUDE_FROM_WORLD'><glossterm>EXCLUDE_FROM_WORLD</glossterm>
             <info>
                 EXCLUDE_FROM_WORLD[doc] = "Directs BitBake to exclude a recipe from world builds (i.e. bitbake world)."
@@ -5532,6 +5571,9 @@
                     within the function, you can use
                     <filename>${IMAGE_ROOTFS}</filename>, which points to
                     the directory that becomes the root filesystem image.
+                    See the
+                    <link linkend='var-IMAGE_ROOTFS'><filename>IMAGE_ROOTFS</filename></link>
+                    variable for more information.
                 </para>
             </glossdef>
         </glossentry>
@@ -5557,6 +5599,9 @@
                     within the function, you can use
                     <filename>${IMAGE_ROOTFS}</filename>, which points to
                     the directory that becomes the root filesystem image.
+                    See the
+                    <link linkend='var-IMAGE_ROOTFS'><filename>IMAGE_ROOTFS</filename></link>
+                    variable for more information.
                 </para>
             </glossdef>
         </glossentry>
@@ -5723,32 +5768,45 @@
                     Specifies the complete list of supported image types
                     by default:
                     <literallayout class='monospaced'>
-     jffs2
-     jffs2.sum
-     cramfs
-     ext2
-     ext2.gz
-     ext2.bz2
-     ext3
-     ext3.gz
-     ext2.lzma
      btrfs
-     live
-     squashfs
-     squashfs-xz
-     ubi
-     ubifs
-     tar
-     tar.gz
-     tar.bz2
-     tar.xz
      cpio
      cpio.gz
-     cpio.xz
+     cpio.lz4
      cpio.lzma
+     cpio.xz
+     cramfs
+     elf
+     ext2
+     ext2.bz2
+     ext2.gz
+     ext2.lzma
+     ext3
+     ext3.gz
+     ext4
+     ext4.gz
+     hdddirect
+     hddimg
+     iso
+     jffs2
+     jffs2.sum
+     multiubi
+     qcow2
+     squashfs
+     squashfs-lzo
+     squashfs-xz
+     tar
+     tar.bz2
+     tar.gz
+     tar.lz4
+     tar.xz
+     ubi
+     ubifs
      vdi
      vmdk
-     elf
+     wic
+     wic.bz2
+     wic.gz
+     wic.lzma
                     </literallayout>
                 </para>
 
@@ -6409,6 +6467,21 @@
             </glossdef>
         </glossentry>
 
+        <glossentry id='var-KERNEL_ALT_IMAGETYPE'><glossterm>KERNEL_ALT_IMAGETYPE</glossterm>
+            <info>
+                KERNEL_ALT_IMAGETYPE[doc] = "Specifies an alternate kernel image type for creation."
+            </info>
+            <glossdef>
+                <para role="glossdeffirst">
+<!--                <para role="glossdeffirst"><imagedata fileref="figures/define-generic.png" /> -->
+                    Specifies an alternate kernel image type for creation in
+                    addition to the kernel image type specified using the
+                    <link linkend='var-KERNEL_IMAGETYPE'><filename>KERNEL_IMAGETYPE</filename></link>
+                    variable.
+                </para>
+            </glossdef>
+        </glossentry>
+
         <glossentry id='var-KERNEL_CLASSES'><glossterm>KERNEL_CLASSES</glossterm>
             <info>
                 KERNEL_CLASSES[doc] = "A list of classes defining kernel image types that kernel class should inherit."
@@ -6430,6 +6503,34 @@
             </glossdef>
         </glossentry>
 
+        <glossentry id='var-KERNEL_DEVICETREE'><glossterm>KERNEL_DEVICETREE</glossterm>
+            <info>
+                KERNEL_DEVICETREE[doc] = "Specifies the name of the generated Linux kernel device tree (i.e. the <filename>.dtb</filename>) file."
+            </info>
+            <glossdef>
+                <para role="glossdeffirst">
+<!--                <para role="glossdeffirst"><imagedata fileref="figures/define-generic.png" /> -->
+                    Specifies the name of the generated Linux kernel device tree
+                    (i.e. the <filename>.dtb</filename>) file.
+                    <note>
+                        Legacy support exists for specifying the full path
+                        to the device tree.
+                        However, providing just the <filename>.dtb</filename>
+                        file is preferred.
+                    </note>
+                    In order to use this variable, you must have the include
+                    files in your kernel recipe:
+                    <literallayout class='monospaced'>
+     require recipes-kernel/linux/linux-dtb.inc
+                    </literallayout>
+                    or
+                    <literallayout class='monospaced'>
+     require recipes-kernel/linux/linux-yocto.inc
+                    </literallayout>
+                </para>
+            </glossdef>
+        </glossentry>
+
         <glossentry id='var-KERNEL_EXTRA_ARGS'><glossterm>KERNEL_EXTRA_ARGS</glossterm>
             <info>
                 KERNEL_EXTRA_ARGS[doc] = "Specifies additional make command-line arguments the OpenEmbedded build system passes on when compiling the kernel."
@@ -6559,6 +6660,12 @@
                     when building the kernel and is passed to <filename>make</filename> as the target to
                     build.
                 </para>
+
+                <para>
+                    If you want to build an alternate kernel image type, use the
+                    <link linkend='var-KERNEL_ALT_IMAGETYPE'><filename>KERNEL_ALT_IMAGETYPE</filename></link>
+                    variable.
+                </para>
             </glossdef>
         </glossentry>
 
@@ -6697,6 +6804,46 @@
             </glossdef>
         </glossentry>
 
+        <glossentry id='var-KERNEL_VERSION'><glossterm>KERNEL_VERSION</glossterm>
+            <info>
+                KERNEL_VERSION[doc] = "Specifies the version of the kernel as extracted from version.h or utsrelease.h within the kernel sources."
+            </info>
+            <glossdef>
+                <para role="glossdeffirst">
+<!--                <para role="glossdeffirst"><imagedata fileref="figures/define-generic.png" /> -->
+                    Specifies the version of the kernel as extracted from
+                    <filename>version.h</filename> or
+                    <filename>utsrelease.h</filename> within the kernel sources.
+                    Effects of setting this variable do not take affect until
+                    the kernel has been configured.
+                    Consequently, attempting to refer to this variable in
+                    contexts prior to configuration will not work.
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-KERNELDEPMODDEPEND'><glossterm>KERNELDEPMODDEPEND</glossterm>
+            <info>
+                KERNELDEPMODDEPEND[doc] = "Specifies whether or not to use the data referenced through the PKGDATA_DIR directory."
+            </info>
+            <glossdef>
+                <para role="glossdeffirst">
+<!--                <para role="glossdeffirst"><imagedata fileref="figures/define-generic.png" /> -->
+                    Specifies whether the data referenced through
+                    <link linkend='var-PKGDATA_DIR'><filename>PKGDATA_DIR</filename></link>
+                    is needed or not.
+                    The <filename>KERNELDEPMODDEPEND</filename> does not
+                    control whether or not that data exists,
+                    but simply whether or not it is used.
+                    If you do not need to use the data, set the
+                    <filename>KERNELDEPMODDEPEND</filename> variable in your
+                    <filename>initramfs</filename> recipe.
+                    Setting the variable there when the data is not needed
+                    avoids a potential dependency loop.
+                </para>
+            </glossdef>
+        </glossentry>
+
         <glossentry id='var-KFEATURE_DESCRIPTION'><glossterm>KFEATURE_DESCRIPTION</glossterm>
             <info>
                 KFEATURE_DESCRIPTION[doc] = "Provides a short description of a configuration fragment. You use this variable in the .scc file that describes a configuration fragment file."
@@ -7376,6 +7523,16 @@
                     <literallayout class='monospaced'>
      MACHINE_ESSENTIAL_EXTRA_RRECOMMENDS += "kernel-module-ab123"
                     </literallayout>
+                    <note>
+                        In this example, the
+                        <filename>kernel-module-ab123</filename> recipe
+                        needs to explicitly set its
+                        <link linkend='var-PACKAGES'><filename>PACKAGES</filename></link>
+                        variable to ensure that BitBake does not use the
+                        kernel recipe's
+                        <link linkend='var-PACKAGES_DYNAMIC'><filename>PACKAGES_DYNAMIC</filename></link>
+                        variable to satisfy the dependency.
+                    </note>
                 </para>
 
                 <para>
@@ -8311,6 +8468,32 @@
             </glossdef>
         </glossentry>
 
+        <glossentry id='var-PACKAGE_EXCLUDE_COMPLEMENTARY'><glossterm>PACKAGE_EXCLUDE_COMPLEMENTARY</glossterm>
+            <info>
+                PACKAGE_EXCLUDE_COMPLEMENTARY[doc] = "Prevents specific packages from being installed when you are installing complementary packages."
+            </info>
+            <glossdef>
+                <para role="glossdeffirst">
+<!--                <para role="glossdeffirst"><imagedata fileref="figures/define-generic.png" /> -->
+                    Prevents specific packages from being installed when
+                    you are installing complementary packages.
+                </para>
+
+                <para>
+                    You might find that you want to prevent installing certain
+                    packages when you are installing complementary packages.
+                    For example, if you are using
+                    <link linkend='var-IMAGE_FEATURES'><filename>IMAGE_FEATURES</filename></link>
+                    to install <filename>dev-pkgs</filename>, you might not want
+                    to install all packages from a particular multilib.
+                    If you find yourself in this situation, you can use the
+                    <filename>PACKAGE_EXCLUDE_COMPLEMENTARY</filename> variable
+                    to specify regular expressions to match the packages you
+                    want to exclude.
+                </para>
+            </glossdef>
+        </glossentry>
+
         <glossentry id='var-PACKAGE_EXCLUDE'><glossterm>PACKAGE_EXCLUDE</glossterm>
             <info>
                 PACKAGE_EXCLUDE[doc] = "Packages to exclude from the installation. If a listed package is required, an error is generated."
@@ -8377,6 +8560,144 @@
             </glossdef>
         </glossentry>
 
+        <glossentry id='var-PACKAGE_FEED_ARCHS'><glossterm>PACKAGE_FEED_ARCHS</glossterm>
+            <info>
+                PACKAGE_FEED_ARCHS[doc] = "Specifies user-defined package architectures when constructing package feed URIs."
+            </info>
+            <glossdef>
+                <para role="glossdeffirst">
+<!--                <para role="glossdeffirst"><imagedata fileref="figures/define-generic.png" /> -->
+                    Specifies the package architectures used as part of the
+                    package feed URIs during the build.
+                    The <filename>PACKAGE_FEED_ARCHS</filename> variable is
+                    appended to the final package feed URI, which is constructed
+                    using the
+                    <link linkend='var-PACKAGE_FEED_URIS'><filename>PACKAGE_FEED_URIS</filename></link>
+                    and
+                    <link linkend='var-PACKAGE_FEED_BASE_PATHS'><filename>PACKAGE_FEED_BASE_PATHS</filename></link>
+                    variables.
+                </para>
+
+                <para>
+                    Consider the following example where the
+                    <filename>PACKAGE_FEED_URIS</filename>,
+                    <filename>PACKAGE_FEED_BASE_PATHS</filename>, and
+                    <filename>PACKAGE_FEED_ARCHS</filename> variables are
+                    defined in your <filename>local.conf</filename> file:
+                    <literallayout class='monospaced'>
+     PACKAGE_FEED_URIS = "https://example.com/packagerepos/release \
+                          https://example.com/packagerepos/updates"
+     PACKAGE_FEED_BASE_PATHS = "rpm rpm-dev"
+     PACKAGE_FEED_ARCHS = "all core2-64"
+                    </literallayout>
+                    Given these settings, the resulting package feeds are
+                    as follows:
+                    <literallayout class='monospaced'>
+     https://example.com/packagerepos/release/rpm/all
+     https://example.com/packagerepos/release/rpm/core2-64
+     https://example.com/packagerepos/release/rpm-dev/all
+     https://example.com/packagerepos/release/rpm-dev/core2-64
+     https://example.com/packagerepos/updates/rpm/all
+     https://example.com/packagerepos/updates/rpm/core2-64
+     https://example.com/packagerepos/updates/rpm-dev/all
+     https://example.com/packagerepos/updates/rpm-dev/core2-64
+                    </literallayout>
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-PACKAGE_FEED_BASE_PATHS'><glossterm>PACKAGE_FEED_BASE_PATHS</glossterm>
+            <info>
+                PACKAGE_FEED_BASE_PATHS[doc] = "Specifies base path used when constructing package feed URIs."
+            </info>
+            <glossdef>
+                <para role="glossdeffirst">
+<!--                <para role="glossdeffirst"><imagedata fileref="figures/define-generic.png" /> -->
+                    Specifies the base path used when constructing package feed
+                    URIs.
+                    The <filename>PACKAGE_FEED_BASE_PATHS</filename> variable
+                    makes up the middle portion of a package feed URI used
+                    by the OpenEmbedded build system.
+                    The base path lies between the
+                    <link linkend='var-PACKAGE_FEED_URIS'><filename>PACKAGE_FEED_URIS</filename></link>
+                    and
+                    <link linkend='var-PACKAGE_FEED_ARCHS'><filename>PACKAGE_FEED_ARCHS</filename></link>
+                    variables.
+                </para>
+
+                <para>
+                    Consider the following example where the
+                    <filename>PACKAGE_FEED_URIS</filename>,
+                    <filename>PACKAGE_FEED_BASE_PATHS</filename>, and
+                    <filename>PACKAGE_FEED_ARCHS</filename> variables are
+                    defined in your <filename>local.conf</filename> file:
+                    <literallayout class='monospaced'>
+     PACKAGE_FEED_URIS = "https://example.com/packagerepos/release \
+                          https://example.com/packagerepos/updates"
+     PACKAGE_FEED_BASE_PATHS = "rpm rpm-dev"
+     PACKAGE_FEED_ARCHS = "all core2-64"
+                    </literallayout>
+                    Given these settings, the resulting package feeds are
+                    as follows:
+                    <literallayout class='monospaced'>
+     https://example.com/packagerepos/release/rpm/all
+     https://example.com/packagerepos/release/rpm/core2-64
+     https://example.com/packagerepos/release/rpm-dev/all
+     https://example.com/packagerepos/release/rpm-dev/core2-64
+     https://example.com/packagerepos/updates/rpm/all
+     https://example.com/packagerepos/updates/rpm/core2-64
+     https://example.com/packagerepos/updates/rpm-dev/all
+     https://example.com/packagerepos/updates/rpm-dev/core2-64
+                    </literallayout>
+                </para>
+            </glossdef>
+        </glossentry>
+
+        <glossentry id='var-PACKAGE_FEED_URIS'><glossterm>PACKAGE_FEED_URIS</glossterm>
+            <info>
+                PACKAGE_FEED_URIS[doc] = "Specifies the front portion of the package feed URI used by the OpenEmbedded build system."
+            </info>
+            <glossdef>
+                <para role="glossdeffirst">
+<!--                <para role="glossdeffirst"><imagedata fileref="figures/define-generic.png" /> -->
+                    Specifies the front portion of the package feed URI
+                    used by the OpenEmbedded build system.
+                    Each final package feed URI is comprised of
+                    <filename>PACKAGE_FEED_URIS</filename>,
+                    <link linkend='var-PACKAGE_FEED_BASE_PATHS'><filename>PACKAGE_FEED_BASE_PATHS</filename></link>,
+                    and
+                    <link linkend='var-PACKAGE_FEED_ARCHS'><filename>PACKAGE_FEED_ARCHS</filename></link>
+                    variables.
+                </para>
+
+                <para>
+                    Consider the following example where the
+                    <filename>PACKAGE_FEED_URIS</filename>,
+                    <filename>PACKAGE_FEED_BASE_PATHS</filename>, and
+                    <filename>PACKAGE_FEED_ARCHS</filename> variables are
+                    defined in your <filename>local.conf</filename> file:
+                    <literallayout class='monospaced'>
+     PACKAGE_FEED_URIS = "https://example.com/packagerepos/release \
+                          https://example.com/packagerepos/updates"
+     PACKAGE_FEED_BASE_PATHS = "rpm rpm-dev"
+     PACKAGE_FEED_ARCHS = "all core2-64"
+                    </literallayout>
+                    Given these settings, the resulting package feeds are
+                    as follows:
+                    <literallayout class='monospaced'>
+     https://example.com/packagerepos/release/rpm/all
+     https://example.com/packagerepos/release/rpm/core2-64
+     https://example.com/packagerepos/release/rpm-dev/all
+     https://example.com/packagerepos/release/rpm-dev/core2-64
+     https://example.com/packagerepos/updates/rpm/all
+     https://example.com/packagerepos/updates/rpm/core2-64
+     https://example.com/packagerepos/updates/rpm-dev/all
+     https://example.com/packagerepos/updates/rpm-dev/core2-64
+                    </literallayout>
+                </para>
+            </glossdef>
+        </glossentry>
+
         <glossentry id='var-PACKAGE_GROUP'><glossterm>PACKAGE_GROUP</glossterm>
             <info>
                 PACKAGE_GROUP[doc] = "Defines one or more packages to include in an image when a specific item is included in IMAGE_FEATURES."
@@ -10865,6 +11186,34 @@
             </glossdef>
         </glossentry>
 
+        <glossentry id='var-SKIP_FILEDEPS'><glossterm>SKIP_FILEDEPS</glossterm>
+            <info>
+                SKIP_FILEDEPS[doc] = "Enables you to remove all files from
+                the "Provides" section of an RPM package."
+            </info>
+            <glossdef>
+                <para role="glossdeffirst">
+<!--                <para role="glossdeffirst"><imagedata fileref="figures/define-generic.png" /> -->
+                    Enables removal of all files from the "Provides" section of
+                    an RPM package.
+                    Removal of these files is required for packages containing
+                    prebuilt binaries and libraries such as
+                    <filename>libstdc++</filename> and
+                    <filename>glibc</filename>.
+                </para>
+
+                <para>
+                    To enable file removal, set the variable to "1" in your
+                    <filename>conf/local.conf</filename> configuration file
+                    in your:
+                    <ulink url='&YOCTO_DOCS_DEV_URL;#build-directory'>Build Directory</ulink>.
+                    <literallayout class='monospaced'>
+     SKIP_FILEDEPS = "1"
+                    </literallayout>
+                </para>
+            </glossdef>
+        </glossentry>
+
         <glossentry id='var-SOC_FAMILY'><glossterm>SOC_FAMILY</glossterm>
             <info>
                 SOC_FAMILY[doc] = "Groups together machines based upon the same family of SOC (System On Chip). You typically set this variable in a common .inc file that you include in the configuration files of all the machines."
@@ -11052,7 +11401,14 @@
                 </para>
 
                 <para>
-                    The following list explains the available URI protocols:
+                    The following list explains the available URI protocols.
+                    URI protocols are highly dependent on particular BitBake
+                    Fetcher submodules.
+                    Depending on the fetcher BitBake uses, various URL
+                    parameters are employed.
+                    For specifics on the supported Fetchers, see the
+                    "<ulink url='&YOCTO_DOCS_BB_URL;#bb-fetchers'>Fetchers</ulink>"
+                    section in the BitBake User Manual.
                     <itemizedlist>
                         <listitem><para><emphasis><filename>file://</filename> -</emphasis>
                             Fetches files, which are usually files shipped with
@@ -11180,11 +11536,25 @@
                         <listitem><para><emphasis><filename>unpack</filename> -</emphasis> Controls
                             whether or not to unpack the file if it is an archive.
                             The default action is to unpack the file.</para></listitem>
+                        <listitem><para><emphasis><filename>destsuffix</filename> -</emphasis> Places the file
+                            (or extracts its contents) into the specified
+                            subdirectory of <link linkend='var-WORKDIR'><filename>WORKDIR</filename></link>
+                            when the Git fetcher is used.
+                            </para></listitem>
                         <listitem><para><emphasis><filename>subdir</filename> -</emphasis> Places the file
                             (or extracts its contents) into the specified
-                            subdirectory of <link linkend='var-WORKDIR'><filename>WORKDIR</filename></link>.
-                            This option is useful for unusual tarballs or other archives that
-                            do not have their files already in a subdirectory within the archive.
+                            subdirectory of <link linkend='var-WORKDIR'><filename>WORKDIR</filename></link>
+                            when the local (<filename>file://</filename>)
+                            fetcher is used.
+                            </para></listitem>
+                        <listitem><para><emphasis><filename>localdir</filename> -</emphasis> Places the file
+                            (or extracts its contents) into the specified
+                            subdirectory of <link linkend='var-WORKDIR'><filename>WORKDIR</filename></link>
+                            when the CVS fetcher is used.
+                            </para></listitem>
+                        <listitem><para><emphasis><filename>subpath</filename> -</emphasis>
+                            Limits the checkout to a specific subpath of the
+                            tree when using the Git fetcher is used.
                             </para></listitem>
                         <listitem><para><emphasis><filename>name</filename> -</emphasis> Specifies a
                             name to be used for association with <filename>SRC_URI</filename> checksums
@@ -11636,6 +12006,25 @@
             </glossdef>
         </glossentry>
 
+        <glossentry id='var-STAGING_KERNEL_BUILDDIR'><glossterm>STAGING_KERNEL_BUILDDIR</glossterm>
+            <info>
+                STAGING_KERNEL_BUILDDIR[doc] = "Points to the directory containing the kernel build artifacts."
+            </info>
+            <glossdef>
+                <para role="glossdeffirst">
+<!--                <para role="glossdeffirst"><imagedata fileref="figures/define-generic.png" /> -->
+                    Points to the directory containing the kernel build
+                    artifacts.
+                    Recipes building software that needs to access kernel
+                    build artifacts
+                    (e.g. <filename>systemtap-uprobes</filename>) can look in
+                    the directory specified with the
+                    <filename>STAGING_KERNEL_BUILDDIR</filename> variable to
+                    find these artifacts after the kernel has been built.
+                </para>
+            </glossdef>
+        </glossentry>
+
         <glossentry id='var-STAGING_KERNEL_DIR'><glossterm>STAGING_KERNEL_DIR</glossterm>
             <info>
                 STAGING_KERNEL_DIR[doc] = "The directory with kernel headers that are required to build out-of-tree modules."
@@ -12569,7 +12958,7 @@
                     these tests, see the
                     "<ulink url='&YOCTO_DOCS_DEV_URL;#performing-automated-runtime-testing'>Performing Automated Runtime Testing</ulink>"
                     section in the Yocto Project Development Manual and the
-                    "<link linkend='ref-classes-testimage'><filename>testimage.bbclass</filename></link>"
+                    "<link linkend='ref-classes-testimage*'><filename>testimage*.bbclass</filename></link>"
                     section.
                 </para>
             </glossdef>
@@ -13948,7 +14337,7 @@
                         <filename>uid</filename> and <filename>gid</filename>
                         values causes the OpenEmbedded build system to employ
                         the
-                        <link linkend='ref-classes-useradd-staticids'><filename>useradd-staticids</filename></link>
+                        <link linkend='ref-classes-useradd'><filename>useradd-staticids</filename></link>
                         class.
                     </note>
                 </para>
diff --git a/yocto-poky/documentation/toaster-manual/figures/add-variable.png b/yocto-poky/documentation/toaster-manual/figures/add-variable.png
new file mode 100644
index 0000000..6bdcca7
--- /dev/null
+++ b/yocto-poky/documentation/toaster-manual/figures/add-variable.png
Binary files differ
diff --git a/yocto-poky/documentation/toaster-manual/figures/bash-oecore.png b/yocto-poky/documentation/toaster-manual/figures/bash-oecore.png
new file mode 100644
index 0000000..801a5d9
--- /dev/null
+++ b/yocto-poky/documentation/toaster-manual/figures/bash-oecore.png
Binary files differ
diff --git a/yocto-poky/documentation/toaster-manual/figures/set-variable.png b/yocto-poky/documentation/toaster-manual/figures/set-variable.png
new file mode 100644
index 0000000..d36b527
--- /dev/null
+++ b/yocto-poky/documentation/toaster-manual/figures/set-variable.png
Binary files differ
diff --git a/yocto-poky/documentation/toaster-manual/figures/variable-added.png b/yocto-poky/documentation/toaster-manual/figures/variable-added.png
new file mode 100644
index 0000000..518f25f
--- /dev/null
+++ b/yocto-poky/documentation/toaster-manual/figures/variable-added.png
Binary files differ
diff --git a/yocto-poky/documentation/toaster-manual/toaster-manual-intro.xml b/yocto-poky/documentation/toaster-manual/toaster-manual-intro.xml
index ad9e08b..9f4c38b 100644
--- a/yocto-poky/documentation/toaster-manual/toaster-manual-intro.xml
+++ b/yocto-poky/documentation/toaster-manual/toaster-manual-intro.xml
@@ -39,7 +39,7 @@
         <para>
             You can use Toaster in Analysis Mode or Build Mode:
             <itemizedlist>
-                <listitem><para><emphasis>Analysis Mode:</emphasis>
+                <listitem><para id='toaster-analysis-mode'><emphasis>Analysis Mode:</emphasis>
                     In Analysis Mode, you can record builds and statistics.
                     In this Mode, you directly access the
                     <filename>bitbake</filename> command, which you then use to
@@ -82,7 +82,7 @@
                             </para></listitem>
                     </itemizedlist>
                     </para></listitem>
-                <listitem><para><emphasis>Build Mode:</emphasis>
+                <listitem><para id='toaster-build-mode'><emphasis>Build Mode:</emphasis>
                     In Build Mode, Toaster handles the build configuration,
                     scheduling and execution.
                     In this mode, all your interaction with the build system
diff --git a/yocto-poky/documentation/toaster-manual/toaster-manual-reference.xml b/yocto-poky/documentation/toaster-manual/toaster-manual-reference.xml
index 0c9401f..faca4ca 100644
--- a/yocto-poky/documentation/toaster-manual/toaster-manual-reference.xml
+++ b/yocto-poky/documentation/toaster-manual/toaster-manual-reference.xml
@@ -214,48 +214,48 @@
                         In the file, you will find a section for layer sources
                         such as the following:
                         <literallayout class='monospaced'>
-     "layersources": [
-         {
-             "name": "Local Yocto Project",
-             "sourcetype": "local",
-             "apiurl": "../../",
-             "branches": ["HEAD", "master", "fido", "dizzy"],
-             "layers": [
-                 {
-                     "name": "openembedded-core",
-                     "local_path": "meta",
-                     "vcs_url": "remote:origin",
-                     "dirpath": "meta"
-                 },
-                 {
-                     "name": "meta-yocto",
-                     "local_path": "meta-yocto",
-                     "vcs_url": "remote:origin",
-                     "dirpath": "meta-yocto"
-                 },
-                 {
-                     "name": "meta-yocto-bsp",
-                     "local_path": "meta-yocto-bsp",
-                     "vcs_url": "remote:origin",
-                     "dirpath": "meta-yocto-bsp"
-                 }
+    "layersources": [
+        {
+            "name": "Local Yocto Project",
+            "sourcetype": "local",
+            "apiurl": "../../",
+            "branches": ["HEAD" ],
+            "layers": [
+                {
+                    "name": "openembedded-core",
+                    "local_path": "meta",
+                    "vcs_url": "remote:origin",
+                    "dirpath": "meta"
+                },
+                {
+                    "name": "meta-yocto",
+                    "local_path": "meta-yocto",
+                    "vcs_url": "remote:origin",
+                    "dirpath": "meta-yocto"
+                },
+                {
+                    "name": "meta-yocto-bsp",
+                    "local_path": "meta-yocto-bsp",
+                    "vcs_url": "remote:origin",
+                    "dirpath": "meta-yocto-bsp"
+                }
 
-             ]
-         },
-         {
-             "name": "OpenEmbedded",
-             "sourcetype": "layerindex",
-             "apiurl": "http://layers.openembedded.org/layerindex/api/",
-             "branches": ["master", "fido", "dizzy"]
-         },
-         {
-             "name": "Imported layers",
-             "sourcetype": "imported",
-             "apiurl": "",
-             "branches": ["master", "fido", "dizzy", "HEAD"]
+            ]
+        },
+        {
+            "name": "OpenEmbedded",
+            "sourcetype": "layerindex",
+            "apiurl": "http://layers.openembedded.org/layerindex/api/",
+            "branches": ["master", "jethro" ,"fido"]
+        },
+        {
+            "name": "Imported layers",
+            "sourcetype": "imported",
+            "apiurl": "",
+            "branches": ["master", "jethro","fido", "HEAD"]
 
-         }
-     ],
+        }
+    ],
                         </literallayout>
                         You should add your own layer source to this section by
                         following the same format used for the "OpenEmbedded"
@@ -268,7 +268,7 @@
                         indicate which branches from your layer source you want
                         to make available through Toaster.
                         For example, the OpenEmbedded layer source makes
-                        available only its "master", "fido", and "dizzy"
+                        available only its "master", "fido", and "jethro"
                         branches.
                     </para>
 
@@ -373,12 +373,12 @@
                 As shipped, Toaster is configured to work with the following
                 releases:
                 <itemizedlist>
-                    <listitem><para><emphasis>Yocto Project 1.7 "Dizzy" or OpenEmbedded "Dizzy":</emphasis>
+                    <listitem><para><emphasis>Yocto Project 2.0 "Jethro" or OpenEmbedded "Jethro":</emphasis>
                         This release causes your Toaster projects to
-                        build against the head of the dizzy branch at
-                        <ulink url='&YOCTO_GIT_URL;/cgit/cgit.cgi/poky/log/?h=dizzy'></ulink>
+                        build against the head of the jethro branch at
+                        <ulink url='&YOCTO_GIT_URL;/cgit/cgit.cgi/poky/log/?h=jethro'></ulink>
                         or
-                        <ulink url='http://git.openembedded.org/openembedded-core/commit/?h=dizzy'></ulink>.
+                        <ulink url='http://git.openembedded.org/openembedded-core/commit/?h=jethro'></ulink>.
                         </para></listitem>
                     <listitem><para><emphasis>Yocto Project 1.8 "Fido" or OpenEmbedded "Fido":</emphasis>
                         This release causes your Toaster projects to
@@ -489,7 +489,7 @@
                         The branch for the layer source
                         (<filename>branch</filename>) used with the release.
                         For example, for the OpenEmbedded layer source, the
-                        "master", "fido", and "dizzy" branches are available.
+                        "master", "fido", and "jethro" branches are available.
                         </para></listitem>
                     <listitem><para><emphasis>Default Layers:</emphasis>
                         The set of default layers
@@ -538,7 +538,7 @@
              "branch": "master",
              "defaultlayers": [ "openembedded-core" ],
              "layersourcepriority": { "Imported layers": 99, "Local OpenEmbedded" : 10, "OpenEmbedded" :  0 },
-             "helptext": "Toaster will run your builds using the OpenEmbedded master branch, where active development takes place. This is not a stable branch, so your builds might not work as expected."
+             "helptext": "Toaster will run your builds using the tip of the &lt;a href=\"http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/\"&gt;Yocto Project master branch&lt;/a&gt;, where active development takes place. This is not a stable branch, so your builds might not work as expected."
          }
      ]
                 </literallayout>
@@ -662,7 +662,6 @@
          "IMAGE_FSTYPES": "ext3 jffs2 tar.bz2",
          "IMAGE_INSTALL_append": "",
          "PACKAGE_CLASSES": "package_rpm",
-         "SDKMACHINE"   : "x86_64"
      },
                     </literallayout>
                 </para>
@@ -710,48 +709,48 @@
                 <para>
                     Here is the default <filename>layersources</filename> area:
                     <literallayout class='monospaced'>
-     "layersources": [
-         {
-             "name": "Local Yocto Project",
-             "sourcetype": "local",
-             "apiurl": "../../",
-             "branches": ["HEAD", "master", "fido", "dizzy"],
-             "layers": [
-                 {
-                     "name": "openembedded-core",
-                     "local_path": "meta",
-                     "vcs_url": "remote:origin",
-                     "dirpath": "meta"
-                 },
-                 {
-                     "name": "meta-yocto",
-                     "local_path": "meta-yocto",
-                     "vcs_url": "remote:origin",
-                     "dirpath": "meta-yocto"
-                 },
-                 {
-                     "name": "meta-yocto-bsp",
-                     "local_path": "meta-yocto-bsp",
-                     "vcs_url": "remote:origin",
-                     "dirpath": "meta-yocto-bsp"
-                 }
+    "layersources": [
+        {
+            "name": "Local Yocto Project",
+            "sourcetype": "local",
+            "apiurl": "../../",
+            "branches": ["HEAD" ],
+            "layers": [
+                {
+                    "name": "openembedded-core",
+                    "local_path": "meta",
+                    "vcs_url": "remote:origin",
+                    "dirpath": "meta"
+                },
+                {
+                    "name": "meta-yocto",
+                    "local_path": "meta-yocto",
+                    "vcs_url": "remote:origin",
+                    "dirpath": "meta-yocto"
+                },
+                {
+                    "name": "meta-yocto-bsp",
+                    "local_path": "meta-yocto-bsp",
+                    "vcs_url": "remote:origin",
+                    "dirpath": "meta-yocto-bsp"
+                }
 
-             ]
-         },
-         {
-             "name": "OpenEmbedded",
-             "sourcetype": "layerindex",
-             "apiurl": "http://layers.openembedded.org/layerindex/api/",
-             "branches": ["master", "fido", "dizzy"]
-         },
-         {
-             "name": "Imported layers",
-             "sourcetype": "imported",
-             "apiurl": "",
-             "branches": ["master", "fido", "dizzy", "HEAD"]
+            ]
+        },
+        {
+            "name": "OpenEmbedded",
+            "sourcetype": "layerindex",
+            "apiurl": "http://layers.openembedded.org/layerindex/api/",
+            "branches": ["master", "jethro" ,"fido"]
+        },
+        {
+            "name": "Imported layers",
+            "sourcetype": "imported",
+            "apiurl": "",
+            "branches": ["master", "jethro","fido", "HEAD"]
 
-         }
-     ],
+        }
+    ],
                     </literallayout>
                 </para>
             </section>
@@ -763,7 +762,7 @@
                     This area of the JSON file defines the version of
                     BitBake Toaster uses.
                     As shipped, Toaster is configured to recognize four
-                    versions of BitBake: master, fido, dizzy, and HEAD.
+                    versions of BitBake: master, fido, jethro, and HEAD.
                     <note>
                         HEAD is a special option that builds whatever is
                         available on disk, without checking out any remote
@@ -781,18 +780,18 @@
              "branch": "master",
              "dirpath": "bitbake"
          },
+        {
+             "name": "jethro",
+             "giturl": "remote:origin",
+             "branch": "jethro",
+             "dirpath": "bitbake"
+         },
          {
              "name": "fido",
              "giturl": "remote:origin",
              "branch": "fido",
             "dirpath": "bitbake"
         },
-        {
-             "name": "dizzy",
-             "giturl": "remote:origin",
-             "branch": "dizzy",
-             "dirpath": "bitbake"
-         },
          {
              "name": "HEAD",
              "giturl": "remote:origin",
@@ -848,6 +847,15 @@
              "helptext": "Toaster will run your builds using the tip of the &lt;a href=\"http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/\"&gt;Yocto Project master branch&lt;/a&gt;, where active development takes place. This is not a stable branch, so your builds might not work as expected."
          },
          {
+             "name": "jethro",
+             "description": "Yocto Project 2.0 Jethro",
+             "bitbake": "jethro",
+             "branch": "jethro",
+             "defaultlayers": [ "openembedded-core", "meta-yocto", "meta-yocto-bsp"],
+             "layersourcepriority": { "Imported layers": 99, "Local Yocto Project" : 10, "OpenEmbedded" :  0 },
+             "helptext": "Toaster will run your builds with the tip of the &lt;a href=\"http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=jethro\"&gt;Yocto Project 2.0 \"Jethro\"&lt;/a&gt; branch."
+         },
+         {
              "name": "fido",
              "description": "Yocto Project 1.8 Fido",
              "bitbake": "fido",
@@ -857,15 +865,6 @@
              "helptext": "Toaster will run your builds with the tip of the &lt;a href=\"http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=fido\"&gt;Yocto Project 1.8 \"Fido\"&lt;/a&gt; branch."
          },
          {
-             "name": "dizzy",
-             "description": "Yocto Project 1.7 Dizzy",
-             "bitbake": "dizzy",
-             "branch": "dizzy",
-             "defaultlayers": [ "openembedded-core", "meta-yocto", "meta-yocto-bsp"],
-             "layersourcepriority": { "Imported layers": 99, "Local Yocto Project" : 10, "OpenEmbedded" :  0 },
-             "helptext": "Toaster will run your builds with the tip of the &lt;a href=\"http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=dizzy\"&gt;Yocto Project 1.7 \"Dizzy\"&lt;/a&gt; branch."
-         },
-         {
              "name": "local",
              "description": "Local Yocto Project",
              "bitbake": "HEAD",
diff --git a/yocto-poky/documentation/toaster-manual/toaster-manual-setup-and-use.xml b/yocto-poky/documentation/toaster-manual/toaster-manual-setup-and-use.xml
index 869d249..2693569 100644
--- a/yocto-poky/documentation/toaster-manual/toaster-manual-setup-and-use.xml
+++ b/yocto-poky/documentation/toaster-manual/toaster-manual-setup-and-use.xml
@@ -6,9 +6,488 @@
 
 <title>Setting Up and Using Toaster</title>
 
-    <section id='using-toaster-in-analysis-mode'>
+    <section id='starting-toaster-for-local-development'>
+        <title>Starting Toaster for Local Development</title>
+
+        <para>
+            Once you have set up the Yocto Project and installed the
+            Toaster system dependencies as described in
+            "<link linkend='toaster-manual-start'>Preparing to Use Toaster</link>",
+            you are ready to start Toaster.
+        </para>
+
+        <para>
+            If you want to configure and start your builds using the
+            Toaster web interface
+            (i.e. "<link linkend='toaster-build-mode'>Build Mode</link>"),
+            navigate to the root of your
+            <ulink url='&YOCTO_DOCS_DEV_URL;#source-directory'>Source Directory</ulink>
+            (e.g. <filename>poky</filename>):
+            <literallayout class='monospaced'>
+     $ cd poky
+            </literallayout>
+            Next, start Toaster:
+            <literallayout class='monospaced'>
+     $ bitbake/bin/toaster
+            </literallayout>
+            Open your favourite browser and enter the following:
+            <literallayout class='monospaced'>
+     http://127.0.0.1:8000
+            </literallayout>
+            If you would rather configure and start your builds
+            using the command line
+            (i.e. <link linkend='toaster-analysis-mode'>Analysis Mode</link>),
+            you can get Toaster to "listen"
+            to your builds and collect information about them.
+            To do that, navigate to the root of your Source Directory:
+            <literallayout class='monospaced'>
+     $ cd poky
+            </literallayout>
+            Once in that directory, source the build environment script:
+            <literallayout class='monospaced'>
+     $ source oe-init-build-env
+            </literallayout>
+            Next, from the build directory (e.g.
+            <filename>poky/build</filename>), start Toaster using this
+            command:
+            <literallayout class='monospaced'>
+     $ source ../bitbake/bin/toaster
+            </literallayout>
+            You can now run builds normally.
+        </para>
+
+        <para>
+            To see the build information provided by Toaster, open your
+            favorite browser and enter the following:
+            <literallayout class='monospaced'>
+     http://127.0.0.1:8000
+            </literallayout>
+        </para>
+    </section>
+
+    <section id='setting-a-different-port'>
+        <title>Setting a Different Port</title>
+
+        <para>
+            By default, Toaster starts on port 8000.
+            You can use the <filename>WEBPORT</filename> parameter to
+            set a different port.
+            For example, either of the following commands sets the
+            port to "8400":
+            <literallayout class='monospaced'>
+     $ bitbake/bin/toaster webport=8400
+            </literallayout>
+            or
+            <literallayout class='monospaced'>
+     $ source ../bitbake/bin/toaster webport=8400
+            </literallayout>
+        </para>
+    </section>
+
+    <section id='the-directory-for-cloning-layers'>
+        <title>The Directory for Cloning Layers</title>
+
+        <para>
+            If you are running Toaster in
+            <link linkend='toaster-build-mode'>Build Mode</link>,
+            Toaster creates a <filename>_toaster_clones</filename>
+            directory inside your Source Directory
+            (i.e. <filename>poky</filename>).
+            For example, suppose you use this command to start Toaster:
+            <literallayout class='monospaced'>
+     poky/bitbake/bin/toaster
+            </literallayout>
+            In this example, Toaster creates and uses the
+            <filename>poky/_toaster_clones</filename>
+            directory to clone any layers needed for your builds.
+        </para>
+
+        <para>
+            Alternatively, if you would like all of your Toaster related
+            files and directories to be in a particular location other than
+            the default, you can set the <filename>TOASTER_DIR</filename>
+            environment variable, which takes precedence over your current
+            working directory.
+            Setting this environment variable causes Toaster to create and use
+            <filename>$TOASTER_DIR./_toaster_clones</filename>.
+        </para>
+    </section>
+
+    <section id='toaster-the-build-directory'>
+        <title>The Build Directory</title>
+
+        <para>
+            If you are running Toaster in
+            <link linkend='toaster-build-mode'>Build Mode</link>,
+            Toaster creates a build directory within your Source
+            Directory (e.g. <filename>poky</filename>).
+            For example, suppose you use this command to start Toaster:
+            <literallayout class='monospaced'>
+     poky/bitbake/bin/toaster
+            </literallayout>
+            In this example, Toaster creates and uses the
+            <filename>poky/build</filename>
+            directory to execute the builds.
+        </para>
+
+        <para>
+            Alternatively, if you would like all of your Toaster related files
+            and directories to be in a particular location, you can set
+            the <filename>TOASTER_DIR</filename> environment variable,
+            which takes precedence over your current working directory.
+            Setting this environment variable causes Toaster to use
+            <filename>$TOASTER_DIR./build</filename> as the build directory.
+        </para>
+    </section>
+
+    <section id='toaster-creating-a-django-super-user'>
+        <title>Creating a Django Superuser</title>
+
+        <para>
+            Toaster is built on the
+            <ulink url='https://www.djangoproject.com/'>Django framework</ulink>.
+            Django provides an administration interface you can use
+            to edit Toaster configuration parameters.
+        </para>
+
+        <para>
+            To access the Django administration interface, you must
+            create a superuser by following these steps:
+            <orderedlist>
+                <listitem><para>
+                    If you used <filename>virtualenv</filename>, which is
+                    recommended, to set up the Toaster system dependencies,
+                    you need be sure the virtual environment is activated.
+                    To activate this environment, use the following:
+                    <literallayout class='monospaced'>
+     $ source venv/bin/activate
+                    </literallayout>
+                    </para></listitem>
+                <listitem><para>
+                    From the root of your checkout directory, invoke the
+                    following command from <filename>manage.py</filename>:
+                    <literallayout class='monospaced'>
+     $ ./bitbake/lib/toaster/manage.py createsuperuser
+                    </literallayout>
+                    </para></listitem>
+                <listitem><para>
+                    Django prompts you for the username, which you need to
+                    provide.
+                    </para></listitem>
+                <listitem><para>
+                    Django prompts you for an email address, which is
+                    optional.
+                    </para></listitem>
+                <listitem><para>
+                    Django prompts you for a password, which you must provide.
+                    </para></listitem>
+                <listitem><para>
+                    Django prompts you to re-enter your password for verification.
+                    </para></listitem>
+            </orderedlist>
+            After completing these steps, the following confirmation message
+            appears:
+            <literallayout class='monospaced'>
+     Superuser created successfully.
+            </literallayout>
+        </para>
+
+        <para>
+            Creating a superuser allows you to access the Django administration
+            interface through a browser.
+            The URL for this interface is the same as the URL used for the
+            Toaster instance with "/admin" on the end.
+            For example, if you are running Toaster locally, use the
+            following URL:
+            <literallayout class='monospaced'>
+     http://127.0.0.1:8000/admin
+            </literallayout>
+            You can use the Django administration interface to set Toaster
+            configuration parameters such as the build directory, layer sources,
+            default variable values, and BitBake versions.
+        </para>
+    </section>
+
+    <section id='toaster-setting-up-a-production-instance-of-toaster'>
+        <title>Setting Up a Production Instance of Toaster</title>
+
+        <para>
+            You can use a production instance of Toaster to share the
+            Toaster instance with remote users, multiple users, or both.
+            The production instance is also the setup that can cope with
+            heavier loads on the web service.
+            Use the instructions in the following sections to set up
+            Toaster in
+            <link linkend='toaster-build-mode'>Build Mode</link>
+            where builds and projects are run,
+            viewed, and defined through the Toaster web interface.
+        </para>
+
+        <section id='toaster-production-instance-requirements'>
+            <title>Requirements</title>
+
+            <para>
+                Be sure you meet the following requirements:
+                <note>
+                    You must comply with all Apache,
+                    <filename>mod-wsgi</filename>, and Mysql requirements.
+                </note>
+                <itemizedlist>
+                    <listitem><para>
+                        Have all the build requirements as described in
+                        "<link linkend='toaster-setting-up-the-basic-system-requirements'>Setting Up the Basic System Requirements</link>"
+                        chapter.
+                        </para></listitem>
+                    <listitem><para>
+                        Have an Apache webserver.
+                        </para></listitem>
+                    <listitem><para>
+                        Have <filename>mod-wsgi</filename> for the Apache
+                        webserver.
+                        </para></listitem>
+                    <listitem><para>
+                        Use the Mysql database server.
+                        </para></listitem>
+                    <listitem><para>
+                        If you are using Ubuntu 14.04.3, run the following:
+                        <literallayout class='monospaced'>
+     $ sudo apt-get install apache2 libapache2-mod-wsgi mysql-server virtualenv libmysqlclient-dev
+                        </literallayout>
+                        </para></listitem>
+                    <listitem><para>
+                        If you are using Fedora 22 or a RedHat distribution, run
+                        the following:
+                        <literallayout class='monospaced'>
+     $ sudo dnf install httpd mod_wsgi python-virtualenv gcc mysql-devel
+                        </literallayout>
+                        </para></listitem>
+                </itemizedlist>
+            </para>
+        </section>
+
+        <section id='toaster-installation-steps'>
+            <title>Installation</title>
+
+            <para>
+                Perform the following steps to install Toaster:
+                <orderedlist>
+                    <listitem><para>
+                        Checkout a copy of <filename>poky</filename>
+                        into the web server directory.
+                        You will be using <filename>/var/www/toaster</filename>:
+                        <literallayout class='monospaced'>
+     $ mkdir -p /var/www/toaster
+     $ cd /var/www/toaster/
+     $ git clone git://git.yoctoproject.org/poky
+     $ git checkout &DISTRO_NAME;
+                        </literallayout>
+                        </para></listitem>
+                    <listitem><para>
+                        Initialize a virtual environment and install Toaster
+                        dependencies.
+                        Using a virtual environment keeps the Python packages
+                        isolated from your system-provided packages:
+                        <literallayout class='monospaced'>
+     $ cd /var/www/toaster/
+     $ virtualenv venv
+     $ source ./venv/bin/activate
+     $ pip install -r ./poky/bitbake/toaster-requirements.txt
+     $ pip install mysql
+     $ pip install MySQL-python
+                        </literallayout>
+                        <note>
+                            Isolating these packages is not required but is
+                            recommended.
+                            Alternatively, you can use your operating system's
+                            package manager to install the packages.
+                        </note>
+                        </para></listitem>
+                    <listitem><para>
+                        Configure Toaster by editing
+                        <filename>/var/www/toaster/poky/bitbake/lib/toaster/toastermain/settings.py</filename>
+                        as follows:
+                        <itemizedlist>
+                            <listitem><para>
+                                Edit the <filename>DATABASE</filename> settings:
+                                <literallayout class='monospaced'>
+     DATABASES = {
+         'default': {
+             'ENGINE': 'django.db.backends.mysql',
+             'NAME': 'toaster_data',
+             'USER': 'toaster',
+             'PASSWORD': 'yourpasswordhere',
+             'HOST': 'localhost',
+             'PORT': '3306',
+        }
+     }
+                                </literallayout>
+                                </para></listitem>
+                            <listitem><para>
+                                Edit the <filename>SECRET_KEY</filename>:
+                                <literallayout class='monospaced'>
+     SECRET_KEY = '<replaceable>your_secret_key</replaceable>'
+                                </literallayout>
+                                </para></listitem>
+                            <listitem><para>
+                                Edit the <filename>STATIC_ROOT</filename>:
+                                <literallayout class='monospaced'>
+     STATIC_ROOT = '/var/www/toaster/static_files/'
+                                </literallayout>
+                                </para></listitem>
+                            <listitem><para>
+                                Enable Build Mode by adding the following
+                                line to <filename>settings.py</filename>:
+                                <literallayout class='monospaced'>
+     BUILD_MODE=True
+                                </literallayout>
+                                </para></listitem>
+                        </itemizedlist>
+                        </para></listitem>
+                    <listitem><para>
+                        Add the database and user to the <filename>mysql</filename>
+                        server defined earlier:
+                        <literallayout class='monospaced'>
+     $ mysql -u root -p
+     mysql> CREATE DATABASE toaster_data;
+     mysql> CREATE USER 'toaster'@'localhost' identified by 'yourpasswordhere';
+     mysql> GRANT all on toaster_data.* to 'toaster'@'localhost';
+     mysql> quit
+                        </literallayout>
+                        </para></listitem>
+                    <listitem><para>
+                        Get Toaster to create the database schema,
+                        default data, and gather the statically-served files:
+                        <literallayout class='monospaced'>
+     $ cd  /var/www/toaster/poky/
+     $ ./bitbake/lib/toaster/manage.py syncdb
+     $ ./bitbake/lib/toaster/manage.py migrate
+     $ TOASTER_DIR=`pwd` TOASTER_CONF=./meta-yocto/conf/toasterconf.json ./bitbake/lib/toaster/manage.py checksettings
+     $ ./bitbake/lib/toaster/manage.py collectstatic
+                        </literallayout>
+                        </para>
+
+                        <para>
+                            For the above set of commands, after moving to the
+                            <filename>poky</filename> directory,
+                            the <filename>syncdb</filename> and <filename>migrate</filename>
+                            commands ensure the database
+                            schema has had changes propagated correctly (i.e.
+                            migrations).
+                        </para>
+
+                        <para>
+                            The next line sets the Toaster root directory
+                            <filename>TOASTER_DIR</filename> and the location of
+                            the Toaster configuration file
+                            <filename>TOASTER_CONF</filename>, which is
+                            relative to the Toaster root directory
+                            <filename>TOASTER_DIR</filename>.
+                            For more information on the Toaster configuration file
+                            <filename>TOASTER_CONF</filename>, see the
+                            <link linkend='toaster-json-files'>JSON Files</link>
+                            section of this manual.
+                        </para>
+
+                        <para>
+                            This line also runs the <filename>checksettings</filename>
+                            command, which configures the location of the Toaster
+                            <ulink url='&YOCTO_DOCS_DEV_URL;#build-directory'>Build directory</ulink>.
+                            The Toaster root directory <filename>TOASTER_DIR</filename>
+                            determines where the Toaster build directory
+                            is created on the file system.
+                            In the example above,
+                            <filename>TOASTER_DIR</filename> is set as follows:
+                            <literallayout class="monospaced">
+     /var/www/toaster/poky
+                            </literallayout>
+                            This setting causes the Toaster build directory to be:
+                            <literallayout class="monospaced">
+     /var/www/toaster/poky/build
+                            </literallayout>
+                        </para>
+
+                        <para>
+                            Finally, the <filename>collectstatic</filename> command
+                            is a Django framework command that collects all the
+                            statically served files into a designated directory to
+                            be served up by the Apache web server.
+                        </para></listitem>
+                    <listitem><para>
+                        Add an Apache configuration file for Toaster to your Apache web
+                        server's configuration directory.
+                        If you are using Ubuntu or Debian, put the file here:
+                        <literallayout class='monospaced'>
+     /etc/apache2/conf-available/toaster.conf
+                        </literallayout>
+                        If you are using Fedora or RedHat, put it here:
+                        <literallayout class='monospaced'>
+     /etc/httpd/conf.d/toaster.conf
+                        </literallayout>
+                        Following is a sample Apache configuration for Toaster
+                        you can follow:
+                        <literallayout class='monospaced'>
+     Alias /static /var/www/toaster/static_files
+     &lt;Directory /var/www/toaster/static_files&gt;
+             Order allow,deny
+             Allow from all
+             Require all granted
+     &lt;/Directory&gt;
+
+     WSGIDaemonProcess toaster_wsgi python-path=/var/www/toaster/poky/bitbake/lib/toaster:/var/www/toaster/venv/lib/python2.7/site-packages
+
+     WSGIScriptAlias / "/var/www/toaster/poky/bitbake/lib/toaster/toastermain/wsgi.py"
+     &lt;Location /&gt;
+         WSGIProcessGroup toastern_wsgi
+     &lt;/Location&gt;
+                        </literallayout>
+                        If you are using Ubuntu or Debian,
+                        you will need to enable the config and module for Apache:
+                        <literallayout class='monospaced'>
+     $ sudo a2enmod wsgi
+     $ sudo a2enconf toaster
+     $ chmod +x bitbake/lib/toaster/toastermain/wsgi.py
+                        </literallayout>
+                        Finally, restart Apache to make sure all new configuration
+                        is loaded.
+                        For Ubuntu and Debian use:
+                        <literallayout class='monospaced'>
+     $ sudo service apache2 restart
+                        </literallayout>
+                        For Fedora and RedHat use:
+                        <literallayout class='monospaced'>
+     $ sudo service httpd restart
+                        </literallayout>
+                        </para></listitem>
+                    <listitem><para>
+                        Install the build runner service.
+                        This service needs to be running in order to dispatch
+                        builds.
+                        Use this command:
+                        <literallayout class='monospaced'>
+     /var/www/toaster/poky/bitbake/lib/toaster/manage.py runbuilds
+                        </literallayout>
+                        Here is an example:
+                        <literallayout class='monospaced'>
+     #!/bin/sh
+     # toaster run builds dispatcher
+     cd /var/www/toaster/
+     source ./venv/bin/activate
+     ./bitbake/lib/toaster/manage.py runbuilds
+                        </literallayout>
+                        </para></listitem>
+                </orderedlist>
+                You can now open up a browser and start using Toaster.
+            </para>
+        </section>
+    </section>
+
+
+
+
+<!--    <section id='using-toaster-in-analysis-mode'>
         <title>Using Toaster in Analysis Mode</title>
 
+
         <para>
             This section describes how to use Toaster in Analysis Mode
             after setting Toaster up as a local instance or as a hosted
@@ -324,14 +803,14 @@
                     <listitem><para><emphasis>Start the BitBake Server:</emphasis>
                         Start the BitBake server using the following command:
                         <literallayout class='monospaced'>
-     $ bitbake --postread conf/toaster.conf --server-only -t xmlrpc -B localhost:0 &amp;&amp; export BBSERVER=localhost:-1
+     $ bitbake &dash;&dash;postread conf/toaster.conf &dash;&dash;server-only -t xmlrpc -B localhost:0 &amp;&amp; export BBSERVER=localhost:-1
                         </literallayout>
                         </para></listitem>
                     <listitem><para><emphasis>Start the Logging Server:</emphasis>
                         Start the Toaster Logging Interface using the following
                         command:
                         <literallayout class='monospaced'>
-     $ nohup bitbake --observe-only -u toasterui >toaster_ui.log &amp;
+     $ nohup bitbake &dash;&dash;observe-only -u toasterui >toaster_ui.log &amp;
                         </literallayout>
                         <note>
                             No hard-coded ports are used in the BitBake options
@@ -810,6 +1289,7 @@
             </para>
         </section>
     </section>
+-->
 
     <section id='using-the-toaster-web-interface'>
         <title>Using the Toaster Web Interface</title>
@@ -867,58 +1347,119 @@
                     CPU usage, and disk I/O.
                     </para></listitem>
             </itemizedlist>
-            Following are several videos that show how to use the Toaster GUI:
-            <itemizedlist>
-                <listitem><para><emphasis>Build Configuration:</emphasis>
-                    This
-                    <ulink url='https://www.youtube.com/watch?v=qYgDZ8YzV6w'>video</ulink>
-                    overviews and demonstrates build configuration for Toaster.
-                    </para></listitem>
-                <listitem><para><emphasis>Toaster Homepage and Table Controls:</emphasis>
-                    This
-                    <ulink url='https://www.youtube.com/watch?v=QEARDnrR1Xw'>video</ulink>
-                    goes over the Toaster entry page, and provides
-                    an overview of the data manipulation capabilities of
-                    Toaster, which include search, sorting and filtering by
-                    different criteria.
-                    </para></listitem>
-                <listitem><para><emphasis>Build Dashboard:</emphasis>
-                    This
-                    <ulink url='https://www.youtube.com/watch?v=KKqHYcnp2gE'>video</ulink>
-                    shows you the build dashboard, a page providing an
-                    overview of the information available for a selected build.
-                    </para></listitem>
-                <listitem><para><emphasis>Image Information:</emphasis>
-                    This
-                    <ulink url='https://www.youtube.com/watch?v=XqYGFsmA0Rw'>video</ulink>
-                    walks through the information Toaster provides
-                    about images: packages installed and root file system.
-                    </para></listitem>
-                <listitem><para><emphasis>Configuration:</emphasis>
-                    This
-                    <ulink url='https://www.youtube.com/watch?v=UW-j-T2TzIg'>video</ulink>
-                    provides Toaster build configuration information.
-                    </para></listitem>
-                <listitem><para><emphasis>Tasks:</emphasis>
-                    This
-                    <ulink url='https://www.youtube.com/watch?v=D4-9vGSxQtw'>video</ulink>
-                    shows the information Toaster provides about the
-                    tasks run by the build system.
-                    </para></listitem>
-                <listitem><para><emphasis>Recipes and Packages Built:</emphasis>
-                    This
-                    <ulink url='https://www.youtube.com/watch?v=x-6dx4huNnw'>video</ulink>
-                    shows the information Toaster provides about recipes
-                    and packages built.
-                    </para></listitem>
-                <listitem><para><emphasis>Performance Data:</emphasis>
-                    This
-                    <ulink url='https://www.youtube.com/watch?v=qWGMrJoqusQ'>video</ulink>
-                    shows the build performance data provided by
-                    Toaster.
-                    </para></listitem>
-            </itemizedlist>
         </para>
+
+        <section id='web-interface-videos'>
+            <title>Toaster Web Interface Videos</title>
+
+            <para>
+                Following are several videos that show how to use the Toaster GUI:
+                <itemizedlist>
+                    <listitem><para><emphasis>Build Configuration:</emphasis>
+                        This
+                        <ulink url='https://www.youtube.com/watch?v=qYgDZ8YzV6w'>video</ulink>
+                        overviews and demonstrates build configuration for Toaster.
+                        </para></listitem>
+                    <listitem><para><emphasis>Build Custom Layers:</emphasis>
+                        This
+                        <ulink url='https://www.youtube.com/watch?v=QJzaE_XjX5c'>video</ulink>
+                        shows you how to build custom layers that are used with
+                        Toaster.
+                        </para></listitem>
+                    <listitem><para><emphasis>Toaster Homepage and Table Controls:</emphasis>
+                        This
+                        <ulink url='https://www.youtube.com/watch?v=QEARDnrR1Xw'>video</ulink>
+                        goes over the Toaster entry page, and provides
+                        an overview of the data manipulation capabilities of
+                        Toaster, which include search, sorting and filtering by
+                        different criteria.
+                        </para></listitem>
+                    <listitem><para><emphasis>Build Dashboard:</emphasis>
+                        This
+                        <ulink url='https://www.youtube.com/watch?v=KKqHYcnp2gE'>video</ulink>
+                        shows you the build dashboard, a page providing an
+                        overview of the information available for a selected build.
+                        </para></listitem>
+                    <listitem><para><emphasis>Image Information:</emphasis>
+                        This
+                        <ulink url='https://www.youtube.com/watch?v=XqYGFsmA0Rw'>video</ulink>
+                        walks through the information Toaster provides
+                        about images: packages installed and root file system.
+                        </para></listitem>
+                    <listitem><para><emphasis>Configuration:</emphasis>
+                        This
+                        <ulink url='https://www.youtube.com/watch?v=UW-j-T2TzIg'>video</ulink>
+                        provides Toaster build configuration information.
+                        </para></listitem>
+                    <listitem><para><emphasis>Tasks:</emphasis>
+                        This
+                        <ulink url='https://www.youtube.com/watch?v=D4-9vGSxQtw'>video</ulink>
+                        shows the information Toaster provides about the
+                        tasks run by the build system.
+                        </para></listitem>
+                    <listitem><para><emphasis>Recipes and Packages Built:</emphasis>
+                        This
+                        <ulink url='https://www.youtube.com/watch?v=x-6dx4huNnw'>video</ulink>
+                        shows the information Toaster provides about recipes
+                        and packages built.
+                        </para></listitem>
+                    <listitem><para><emphasis>Performance Data:</emphasis>
+                        This
+                        <ulink url='https://www.youtube.com/watch?v=qWGMrJoqusQ'>video</ulink>
+                        shows the build performance data provided by
+                        Toaster.
+                        </para></listitem>
+                </itemizedlist>
+            </para>
+        </section>
+
+        <section id='toaster-web-interface-preferred-version'>
+            <title>Building a Specific Recipe Given Multiple Versions</title>
+
+            <para>
+                Occasionally, a layer might provide more than one version of
+                the same recipe.
+                For example, the <filename>openembedded-core</filename> layer
+                provides two versions of the <filename>bash</filename> recipe
+                (i.e. 3.2.48 and 4.3.30-r0) and two versions of the
+                <filename>which</filename> recipe (i.e. 2.21 and 2.18).
+                The following figure shows this exact scenario:
+                <imagedata fileref="figures/bash-oecore.png" align="center" width="9in" depth="6in" />
+            </para>
+
+            <para>
+                By default, the OpenEmbedded build system builds one of the
+                two recipes.
+                For the <filename>bash</filename> case, version 4.3.30-r0 is
+                built by default.
+                Unfortunately, Toaster as it exists, is not able to override
+                the default recipe version.
+                If you would like to build bash 3.2.48, you need to set the
+                <ulink url='&YOCTO_DOCS_REF_URL;#var-PREFERRED_VERSION'><filename>PREFERRED_VERSION</filename></ulink>
+                variable.
+                You can do so from Toaster, using the "Add variable" form,
+                which is available in the "BitBake variables" page of the
+                project configuration section as shown in the following screen:
+                <imagedata fileref="figures/add-variable.png" align="center" width="9in" depth="6in" />
+            </para>
+
+            <para>
+                To specify <filename>bash</filename> 3.2.48 as the version to build,
+                enter "PREFERRED_VERSION_bash" in the "Variable" field, and "3.2.48"
+                in the "Value" field.
+                Next, click the "Add variable" button:
+                <imagedata fileref="figures/set-variable.png" align="center" width="9in" depth="6in" />
+            </para>
+
+            <para>
+                After clicking the "Add variable" button, the settings for
+                <filename>PREFERRED_VERSION</filename> are added to the bottom
+                of the BitBake variables list.
+                With these settings, the OpenEmbedded build system builds the
+                desired version of the recipe rather than the default version:
+                <imagedata fileref="figures/variable-added.png" align="center" width="9in" depth="6in" />
+            </para>
+        </section>
     </section>
 
 <!--
diff --git a/yocto-poky/documentation/toaster-manual/toaster-manual-start.xml b/yocto-poky/documentation/toaster-manual/toaster-manual-start.xml
index fbdb5ec..daefa79 100644
--- a/yocto-poky/documentation/toaster-manual/toaster-manual-start.xml
+++ b/yocto-poky/documentation/toaster-manual/toaster-manual-start.xml
@@ -15,12 +15,13 @@
         <title>Setting Up the Basic System Requirements</title>
 
         <para>
-            You first need to be sure your build system is set up to run
-            the Yocto Project.
-            See the
-            "<ulink url='&YOCTO_DOCS_QS_URL;#yp-resources'>Setting Up to Use the Yocto Project</ulink>"
-            section in the Yocto Project Quick Start for information on how
-            to set up your system for the Yocto Project.
+            Before you can use Toaster, you need to first set up your
+            build system to run the Yocto Project.
+            To do this, follow the instructions in the
+            "<ulink url='&YOCTO_DOCS_QS_URL;#packages'>The Build Host Packages</ulink>"
+            and
+            "<ulink url='&YOCTO_DOCS_QS_URL;#releases'>Yocto Project Release</ulink>"
+            sections in the Yocto Project Quick Start.
         </para>
     </section>
 
@@ -41,16 +42,21 @@
             install-compatible format.
         </para>
 
-        <section id='toaster-optional-virtual-environment'>
-            <title>Optionally Setting Up a Python Virtual Environment</title>
+        <section id='toaster-virtual-environment'>
+            <title>Set Up a Python Virtual Environment</title>
 
             <para>
-                It is highly recommended that you use a Python virtual
-                environment that allows you to maintain a dedicated Python
-                executable and its own set of installed modules.
-                Doing so separates the executable from the Python and modules
-                provided by the operating system and therefore avoids any
-                version conflicts.
+                Set up a Python virtual environment that allows you
+                to maintain a dedicated Python executable and its own
+                set of installed modules.
+                Doing so separates the executable from Python and the
+                modules provided by the operating system.
+                This separation avoids any version conflicts.
+                <note>
+                    Creating a virtual environment is not absolutely
+                    necessary.
+                    However, doing so is highly recommended.
+                </note>
             </para>
 
             <para>
@@ -73,7 +79,7 @@
                         </para></listitem>
                 </orderedlist>
                 <note>
-                    If you do choose to set up a virtual environment in
+                    After setting up a virtual environment in
                     which to run Toaster, you must initialize that
                     virtual environment each time you want to start
                     Toaster.
diff --git a/yocto-poky/documentation/toaster-manual/toaster-manual.xml b/yocto-poky/documentation/toaster-manual/toaster-manual.xml
index 9dac6d9..59dca8f 100644
--- a/yocto-poky/documentation/toaster-manual/toaster-manual.xml
+++ b/yocto-poky/documentation/toaster-manual/toaster-manual.xml
@@ -26,7 +26,7 @@
                 <affiliation>
                     <orgname>Intel Corporation</orgname>
                 </affiliation>
-                <email>scott.m.rifenbark@intel.com</email>
+                <email>srifenbark@gmail.com</email>
             </author>
         </authorgroup>
 
@@ -37,9 +37,9 @@
                 <revremark>Released with the Yocto Project 1.8 Release.</revremark>
             </revision>
             <revision>
-                <revnumber>1.9</revnumber>
+                <revnumber>2.0</revnumber>
                 <date>October 2015</date>
-                <revremark>Released with the Yocto Project 1.9 Release.</revremark>
+                <revremark>Released with the Yocto Project 2.0 Release.</revremark>
             </revision>
        </revhistory>
 
diff --git a/yocto-poky/documentation/tools/mega-manual.sed b/yocto-poky/documentation/tools/mega-manual.sed
index bec40b3..088a99b 100644
--- a/yocto-poky/documentation/tools/mega-manual.sed
+++ b/yocto-poky/documentation/tools/mega-manual.sed
@@ -2,32 +2,32 @@
 # This style is for manual folders like "yocto-project-qs" and "poky-ref-manual".
 # This is the old way that did it.  Can't do that now that we have "bitbake-user-manual" strings
 # in the mega-manual.
-# s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.9\/[a-z]*-[a-z]*-[a-z]*\/[a-z]*-[a-z]*-[a-z]*.html#/\"link\" href=\"#/g
-s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.9\/yocto-project-qs\/yocto-project-qs.html#/\"link\" href=\"#/g
-s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.9\/poky-ref-manual\/poky-ref-manual.html#/\"link\" href=\"#/g
+# s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/2.0\/[a-z]*-[a-z]*-[a-z]*\/[a-z]*-[a-z]*-[a-z]*.html#/\"link\" href=\"#/g
+s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/2.0\/yocto-project-qs\/yocto-project-qs.html#/\"link\" href=\"#/g
+s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/2.0\/poky-ref-manual\/poky-ref-manual.html#/\"link\" href=\"#/g
 
 # Processes all other manuals (<word>-<word> style) except for the BitBake User Manual because
 # it is not included in the mega-manual.
 # This style is for manual folders that use two word, which is the standard now (e.g. "ref-manual").
 # This was the one-liner that worked before we introduced the BitBake User Manual, which is
 # not in the mega-manual.
-# s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.9\/[a-z]*-[a-z]*\/[a-z]*-[a-z]*.html#/\"link\" href=\"#/g
+# s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/2.0\/[a-z]*-[a-z]*\/[a-z]*-[a-z]*.html#/\"link\" href=\"#/g
 
-s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.9\/adt-manual\/adt-manual.html#/\"link\" href=\"#/g
-s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.9\/bsp-guide\/bsp-guide.html#/\"link\" href=\"#/g
-s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.9\/dev-manual\/dev-manual.html#/\"link\" href=\"#/g
-s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.9\/kernel-dev\/kernel-dev.html#/\"link\" href=\"#/g
-s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.9\/profile-manual\/profile-manual.html#/\"link\" href=\"#/g
-s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.9\/ref-manual\/ref-manual.html#/\"link\" href=\"#/g
-s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.9\/toaster-manual\/toaster-manual.html#/\"link\" href=\"#/g
-s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.9\/yocto-project-qs\/yocto-project-qs.html#/\"link\" href=\"#/g
+s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/2.0\/adt-manual\/adt-manual.html#/\"link\" href=\"#/g
+s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/2.0\/bsp-guide\/bsp-guide.html#/\"link\" href=\"#/g
+s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/2.0\/dev-manual\/dev-manual.html#/\"link\" href=\"#/g
+s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/2.0\/kernel-dev\/kernel-dev.html#/\"link\" href=\"#/g
+s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/2.0\/profile-manual\/profile-manual.html#/\"link\" href=\"#/g
+s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/2.0\/ref-manual\/ref-manual.html#/\"link\" href=\"#/g
+s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/2.0\/toaster-manual\/toaster-manual.html#/\"link\" href=\"#/g
+s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/2.0\/yocto-project-qs\/yocto-project-qs.html#/\"link\" href=\"#/g
 
 # Process cases where just an external manual is referenced without an id anchor
-s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.9\/yocto-project-qs\/yocto-project-qs.html\" target=\"_top\">Yocto Project Quick Start<\/a>/Yocto Project Quick Start/g
-s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.9\/dev-manual\/dev-manual.html\" target=\"_top\">Yocto Project Development Manual<\/a>/Yocto Project Development Manual/g
-s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.9\/adt-manual\/adt-manual.html\" target=\"_top\">Yocto Project Application Developer's Guide<\/a>/Yocto Project Application Developer's Guide/g
-s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.9\/bsp-guide\/bsp-guide.html\" target=\"_top\">Yocto Project Board Support Package (BSP) Developer's Guide<\/a>/Yocto Project Board Support Package (BSP) Developer's Guide/g
-s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.9\/profile-manual\/profile-manual.html\" target=\"_top\">Yocto Project Profiling and Tracing Manual<\/a>/Yocto Project Profiling and Tracing Manual/g
-s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.9\/kernel-dev\/kernel-dev.html\" target=\"_top\">Yocto Project Linux Kernel Development Manual<\/a>/Yocto Project Linux Kernel Development Manual/g
-s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.9\/ref-manual\/ref-manual.html\" target=\"_top\">Yocto Project Reference Manual<\/a>/Yocto Project Reference Manual/g
-s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.9\/toaster-manual\/toaster-manual.html\" target=\"_top\">Toaster User Manual<\/a>/Toaster User Manual/g
+s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/2.0\/yocto-project-qs\/yocto-project-qs.html\" target=\"_top\">Yocto Project Quick Start<\/a>/Yocto Project Quick Start/g
+s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/2.0\/dev-manual\/dev-manual.html\" target=\"_top\">Yocto Project Development Manual<\/a>/Yocto Project Development Manual/g
+s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/2.0\/adt-manual\/adt-manual.html\" target=\"_top\">Yocto Project Application Developer's Guide<\/a>/Yocto Project Application Developer's Guide/g
+s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/2.0\/bsp-guide\/bsp-guide.html\" target=\"_top\">Yocto Project Board Support Package (BSP) Developer's Guide<\/a>/Yocto Project Board Support Package (BSP) Developer's Guide/g
+s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/2.0\/profile-manual\/profile-manual.html\" target=\"_top\">Yocto Project Profiling and Tracing Manual<\/a>/Yocto Project Profiling and Tracing Manual/g
+s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/2.0\/kernel-dev\/kernel-dev.html\" target=\"_top\">Yocto Project Linux Kernel Development Manual<\/a>/Yocto Project Linux Kernel Development Manual/g
+s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/2.0\/ref-manual\/ref-manual.html\" target=\"_top\">Yocto Project Reference Manual<\/a>/Yocto Project Reference Manual/g
+s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/2.0\/toaster-manual\/toaster-manual.html\" target=\"_top\">Toaster User Manual<\/a>/Toaster User Manual/g
diff --git a/yocto-poky/documentation/yocto-project-qs/yocto-project-qs.xml b/yocto-poky/documentation/yocto-project-qs/yocto-project-qs.xml
index 5da7314..5315dfe 100644
--- a/yocto-poky/documentation/yocto-project-qs/yocto-project-qs.xml
+++ b/yocto-poky/documentation/yocto-project-qs/yocto-project-qs.xml
@@ -308,7 +308,7 @@
                         </para></listitem>
                     <listitem><para><emphasis>Fedora</emphasis>
                         <literallayout class='monospaced'>
-     $ sudo yum install &FEDORA_HOST_PACKAGES_ESSENTIAL; SDL-devel xterm
+     $ sudo dnf install &FEDORA_HOST_PACKAGES_ESSENTIAL; SDL-devel xterm
                         </literallayout>
                         </para></listitem>
                     <listitem><para><emphasis>OpenSUSE</emphasis>
diff --git a/yocto-poky/meta-selftest/conf/machine/qemux86copy.conf b/yocto-poky/meta-selftest/conf/machine/qemux86copy.conf
new file mode 100644
index 0000000..76c13fd
--- /dev/null
+++ b/yocto-poky/meta-selftest/conf/machine/qemux86copy.conf
@@ -0,0 +1,3 @@
+require conf/machine/qemux86.conf
+
+MACHINEOVERRIDES .= ":qemux86"
diff --git a/yocto-poky/meta-selftest/lib/devtool/__init__.py b/yocto-poky/meta-selftest/lib/devtool/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/yocto-poky/meta-selftest/lib/devtool/__init__.py
diff --git a/yocto-poky/meta-selftest/lib/devtool/test.py b/yocto-poky/meta-selftest/lib/devtool/test.py
new file mode 100644
index 0000000..b7474b5
--- /dev/null
+++ b/yocto-poky/meta-selftest/lib/devtool/test.py
@@ -0,0 +1,11 @@
+import argparse
+
+def selftest_reverse(args, config, basepath, workspace):
+    """Reverse the value passed to verify the plugin is executing."""
+    print args.value[::-1]
+
+def register_commands(subparsers, context):
+    parser_build = subparsers.add_parser('selftest-reverse', help='Reverse value (for selftest)',
+                                         formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+    parser_build.add_argument('value', help='Value to reverse')
+    parser_build.set_defaults(func=selftest_reverse)
diff --git a/yocto-poky/meta-selftest/recipes-test/error/error.bb b/yocto-poky/meta-selftest/recipes-test/error/error.bb
new file mode 100644
index 0000000..a7bdecf
--- /dev/null
+++ b/yocto-poky/meta-selftest/recipes-test/error/error.bb
@@ -0,0 +1,11 @@
+SUMMARY = "Error Test case that fails on do_compile"
+DESCRIPTION = "This generates a compile time error to be used to for testing."
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
+
+INHIBIT_DEFAULT_DEPS = "1"                                                                                                                                                                  
+EXCLUDE_FROM_WORLD = "1"
+
+do_compile() {
+        bbfatal "Failing as expected.";
+}
diff --git a/yocto-poky/meta-selftest/recipes-test/images/error-image.bb b/yocto-poky/meta-selftest/recipes-test/images/error-image.bb
new file mode 100644
index 0000000..13d9cc0
--- /dev/null
+++ b/yocto-poky/meta-selftest/recipes-test/images/error-image.bb
@@ -0,0 +1,8 @@
+SUMMARY = "An image that includes the error recipe and will therefore fail"
+DESCRIPTION = "This generates an error. Not currently used by oe-selftest"
+
+IMAGE_INSTALL = "error"
+
+IMAGE_LINGUAS = " "
+
+inherit core-image
diff --git a/yocto-poky/meta-selftest/recipes-test/images/test-empty-image.bb b/yocto-poky/meta-selftest/recipes-test/images/test-empty-image.bb
new file mode 100644
index 0000000..88d8d61
--- /dev/null
+++ b/yocto-poky/meta-selftest/recipes-test/images/test-empty-image.bb
@@ -0,0 +1,6 @@
+SUMMARY = "An empty image."
+IMAGE_INSTALL = ""
+IMAGE_LINGUAS = ""
+PACKAGE_INSTALL = ""
+
+inherit image
diff --git a/yocto-poky/meta-selftest/recipes-test/images/wic-image-minimal.bb b/yocto-poky/meta-selftest/recipes-test/images/wic-image-minimal.bb
index 073c569..89451bd 100644
--- a/yocto-poky/meta-selftest/recipes-test/images/wic-image-minimal.bb
+++ b/yocto-poky/meta-selftest/recipes-test/images/wic-image-minimal.bb
@@ -1,10 +1,14 @@
 SUMMARY = "An example of partitioned image."
 
+SRC_URI = "file://${FILE_DIRNAME}/${BPN}.wks"
+
 IMAGE_INSTALL = "packagegroup-core-boot ${ROOTFS_PKGMANAGE_BOOTSTRAP}"
 
 IMAGE_FSTYPES = "wic.bz2"
 RM_OLD_IMAGE = "1"
 
+DEPENDS = "syslinux syslinux-native parted-native dosfstools-native mtools-native"
+
 # core-image-minimal is referenced in .wks, so we need its rootfs
 # to be ready before our rootfs
 do_rootfs[depends] += "core-image-minimal:do_rootfs"
diff --git a/yocto-poky/meta-selftest/recipes-test/images/wic-image-minimal.wks b/yocto-poky/meta-selftest/recipes-test/images/wic-image-minimal.wks
index 29cd8f2..8f9be09 100644
--- a/yocto-poky/meta-selftest/recipes-test/images/wic-image-minimal.wks
+++ b/yocto-poky/meta-selftest/recipes-test/images/wic-image-minimal.wks
@@ -3,8 +3,8 @@
 # created from core-image-minimal and wic-image-minimal image recipes.
 
 part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024
-part / --source rootfs --ondisk sda --fstype=ext2 --label platform --align 1024
-part /core --source rootfs --rootfs-dir=core-image-minimal --ondisk sda --fstype=ext2 --label core --align 1024
-part /backup --source rootfs --rootfs-dir=wic-image-minimal --ondisk sda --fstype=ext2 --label backup --align 1024
+part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024
+part /core --source rootfs --rootfs-dir=core-image-minimal --ondisk sda --fstype=ext4 --label core --align 1024
+part /backup --source rootfs --rootfs-dir=wic-image-minimal --ondisk sda --fstype=ext4 --label backup --align 1024
 
 bootloader  --timeout=0  --append="rootwait console=tty0"
diff --git a/yocto-poky/meta-skeleton/recipes-kernel/linux/linux-yocto-custom.bb b/yocto-poky/meta-skeleton/recipes-kernel/linux/linux-yocto-custom.bb
index cce44d9..6194d4f 100644
--- a/yocto-poky/meta-skeleton/recipes-kernel/linux/linux-yocto-custom.bb
+++ b/yocto-poky/meta-skeleton/recipes-kernel/linux/linux-yocto-custom.bb
@@ -43,9 +43,9 @@
 #   
 #   example configuration addition:
 #            SRC_URI += "file://smp.cfg"
-#   example patch addition (for kernel v3.4 only):
-#            SRC_URI += "file://0001-linux-version-tweak.patch
-#   example feature addition (for kernel v3.4 only):
+#   example patch addition (for kernel v4.x only):
+#            SRC_URI += "file://0001-linux-version-tweak.patch"
+#   example feature addition (for kernel v4.x only):
 #            SRC_URI += "file://feature.scc"
 #
 
@@ -56,15 +56,14 @@
 # tree if you do not want to build from Linus' tree.
 SRC_URI = "git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git;protocol=git;nocheckout=1;name=machine"
 
-LINUX_VERSION ?= "3.4"
-LINUX_VERSION_EXTENSION ?= "-custom"
+LINUX_VERSION ?= "4.2"
+LINUX_VERSION_EXTENSION_append = "-custom"
 
 # Modify SRCREV to a different commit hash in a copy of this recipe to
 # build a different release of the Linux kernel.
-# tag: v3.4 76e10d158efb6d4516018846f60c2ab5501900bc
-SRCREV_machine="76e10d158efb6d4516018846f60c2ab5501900bc"
+# tag: v4.2 64291f7db5bd8150a74ad2036f1037e6a0428df2
+SRCREV_machine="64291f7db5bd8150a74ad2036f1037e6a0428df2"
 
-PR = "r1"
 PV = "${LINUX_VERSION}+git${SRCPV}"
 
 # Override COMPATIBLE_MACHINE to include your machine in a copy of this recipe
diff --git a/yocto-poky/meta-skeleton/recipes-kernel/linux/linux-yocto-custom/0001-linux-version-tweak.patch b/yocto-poky/meta-skeleton/recipes-kernel/linux/linux-yocto-custom/0001-linux-version-tweak.patch
index c9562da..1c88315 100644
--- a/yocto-poky/meta-skeleton/recipes-kernel/linux/linux-yocto-custom/0001-linux-version-tweak.patch
+++ b/yocto-poky/meta-skeleton/recipes-kernel/linux/linux-yocto-custom/0001-linux-version-tweak.patch
@@ -1,26 +1,29 @@
-From 5a55943a6bbb10a79994a0b18071b2427dffb15f Mon Sep 17 00:00:00 2001
+From fb2c401374d4efe89e8da795e21d96fac038639d Mon Sep 17 00:00:00 2001
 From: Bruce Ashfield <bruce.ashfield@windriver.com>
 Date: Mon, 11 Jun 2012 15:31:42 -0400
 Subject: [PATCH] linux: version tweak
 
+Upstream-Status: Inappropriate [example code]
+
 Signed-off-by: Bruce Ashfield <bruce.ashfield@windriver.com>
+Signed-off-by: Saul Wold <sgw@linux.intel.com>
 ---
- Makefile |    2 +-
- 1 files changed, 1 insertions(+), 1 deletions(-)
+ Makefile | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
 
 diff --git a/Makefile b/Makefile
-index a687963..f5b1ac3 100644
+index c361593..099e8ff 100644
 --- a/Makefile
 +++ b/Makefile
-@@ -2,7 +2,7 @@ VERSION = 3
- PATCHLEVEL = 4
+@@ -2,7 +2,7 @@ VERSION = 4
+ PATCHLEVEL = 2
  SUBLEVEL = 0
  EXTRAVERSION =
--NAME = Saber-toothed Squirrel
-+NAME = Saber-toothed Squirrel-custom
+-NAME = Hurr durr I'ma sheep
++NAME = Hurr durr I'ma customized sheep 
  
  # *DOCUMENTATION*
  # To see a list of typical targets execute "make help"
 -- 
-1.7.5.4
+2.1.4
 
diff --git a/yocto-poky/meta-yocto-bsp/conf/machine/beaglebone.conf b/yocto-poky/meta-yocto-bsp/conf/machine/beaglebone.conf
index 0daebf3..ff46fb2 100644
--- a/yocto-poky/meta-yocto-bsp/conf/machine/beaglebone.conf
+++ b/yocto-poky/meta-yocto-bsp/conf/machine/beaglebone.conf
@@ -22,7 +22,7 @@
 SERIAL_CONSOLE = "115200 ttyO0"
 
 PREFERRED_PROVIDER_virtual/kernel ?= "linux-yocto"
-PREFERRED_VERSION_linux-yocto ?= "3.19%"
+PREFERRED_VERSION_linux-yocto ?= "4.1%"
 
 KERNEL_IMAGETYPE = "zImage"
 KERNEL_DEVICETREE = "am335x-bone.dtb am335x-boneblack.dtb"
diff --git a/yocto-poky/meta-yocto-bsp/conf/machine/edgerouter.conf b/yocto-poky/meta-yocto-bsp/conf/machine/edgerouter.conf
index 45ef237..476e690 100644
--- a/yocto-poky/meta-yocto-bsp/conf/machine/edgerouter.conf
+++ b/yocto-poky/meta-yocto-bsp/conf/machine/edgerouter.conf
@@ -11,7 +11,7 @@
 KERNEL_IMAGE_STRIP_EXTRA_SECTIONS  = ".comment"
 
 PREFERRED_PROVIDER_virtual/kernel ?= "linux-yocto"
-PREFERRED_VERSION_linux-yocto ?= "3.19%"
+PREFERRED_VERSION_linux-yocto ?= "4.1%"
 
 SERIAL_CONSOLE = "115200 ttyS0"
 USE_VT ?= "0"
diff --git a/yocto-poky/meta-yocto-bsp/conf/machine/include/genericx86-common.inc b/yocto-poky/meta-yocto-bsp/conf/machine/include/genericx86-common.inc
index 7c10b56..1588556 100644
--- a/yocto-poky/meta-yocto-bsp/conf/machine/include/genericx86-common.inc
+++ b/yocto-poky/meta-yocto-bsp/conf/machine/include/genericx86-common.inc
@@ -13,7 +13,7 @@
             ${XSERVER_X86_MODESETTING} \
            "
 
-MACHINE_EXTRA_RRECOMMENDS += "linux-firmware v86d eee-acpi-scripts"
+MACHINE_EXTRA_RRECOMMENDS += "linux-firmware eee-acpi-scripts"
 
 GLIBC_ADDONS = "nptl"
 
diff --git a/yocto-poky/meta-yocto-bsp/conf/machine/mpc8315e-rdb.conf b/yocto-poky/meta-yocto-bsp/conf/machine/mpc8315e-rdb.conf
index 2beef48..036b05f 100644
--- a/yocto-poky/meta-yocto-bsp/conf/machine/mpc8315e-rdb.conf
+++ b/yocto-poky/meta-yocto-bsp/conf/machine/mpc8315e-rdb.conf
@@ -14,7 +14,7 @@
 
 MACHINE_FEATURES = "keyboard pci ext2 ext3 serial"
 
-PREFERRED_VERSION_linux-yocto ?= "3.19%"
+PREFERRED_VERSION_linux-yocto ?= "4.1%"
 PREFERRED_PROVIDER_virtual/kernel = "linux-yocto"
 
 PREFERRED_PROVIDER_virtual/xserver ?= "xserver-xorg"
diff --git a/yocto-poky/meta-yocto-bsp/lib/oeqa/yoctobsp.py b/yocto-poky/meta-yocto-bsp/lib/oeqa/yoctobsp.py
new file mode 100644
index 0000000..4c539a1
--- /dev/null
+++ b/yocto-poky/meta-yocto-bsp/lib/oeqa/yoctobsp.py
@@ -0,0 +1,39 @@
+import unittest
+import os
+import logging
+import tempfile
+import shutil
+
+from oeqa.selftest.base import oeSelfTest
+from oeqa.utils.commands import runCmd
+from oeqa.utils.decorators import skipUnlessPassed
+
+class YoctoBSP(oeSelfTest):
+
+    @classmethod
+    def setUpClass(self):
+        result = runCmd("yocto-bsp list karch")
+        self.karchs = [karch.lstrip() for karch in result.output.splitlines()][1:]
+
+    def test_yoctobsp_listproperties(self):
+        for karch in self.karchs:
+            result = runCmd("yocto-bsp list %s properties" % karch)
+            self.assertEqual(0, result.status, msg="Properties from %s architecture could not be listed" % karch)
+
+    def test_yoctobsp_create(self):
+        # Generate a temporal file and folders for each karch
+        json = "{\"use_default_kernel\":\"yes\"}\n"
+        fd = tempfile.NamedTemporaryFile(delete=False)
+        fd.write(json)
+        fd.close()
+        tmpfolders = dict([(karch, tempfile.mkdtemp()) for karch in self.karchs])
+        # Create BSP
+        for karch in self.karchs:
+            result = runCmd("yocto-bsp create test %s -o %s -i %s" % (karch, "%s/unitest" % tmpfolders[karch], fd.name))
+            self.assertEqual(0, result.status, msg="Could not create a BSP with architecture %s using %s " % (karch, fd.name))
+        # Remove tmp file/folders
+        os.unlink(fd.name)
+        self.assertFalse(os.path.exists(fd.name), msg = "Temporal file %s could not be removed" % fd.name)
+        for tree in tmpfolders.values():
+            shutil.rmtree(tree)
+            self.assertFalse(os.path.exists(tree), msg = "Temporal folder %s could not be removed" % tree)
diff --git a/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_3.14.bbappend b/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_3.14.bbappend
index 589ece7..310aeb8 100644
--- a/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_3.14.bbappend
+++ b/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_3.14.bbappend
@@ -7,8 +7,8 @@
 KMACHINE_genericx86 ?= "common-pc"
 KMACHINE_genericx86-64 ?= "common-pc-64"
 
-SRCREV_machine_genericx86 ?= "af1f7f586bd32d39c057f17606991b887eadb389"
-SRCREV_machine_genericx86-64 ?= "578602a722dbfb260801f3b37c6eafd2abb2340d"
+SRCREV_machine_genericx86 ?= "d9bf859dfae6f88b88b157119c20ae4d5e51420a"
+SRCREV_machine_genericx86-64 ?= "93b2b800d85c1565af7d96f3776dc38c85ae1902"
 SRCREV_machine_edgerouter ?= "578602a722dbfb260801f3b37c6eafd2abb2340d"
 SRCREV_machine_beaglebone ?= "578602a722dbfb260801f3b37c6eafd2abb2340d"
 SRCREV_machine_mpc8315e-rdb ?= "1cb1bbaf63cecc918cf36c89819a7464af4c4b13"
@@ -18,3 +18,6 @@
 COMPATIBLE_MACHINE_edgerouter = "edgerouter"
 COMPATIBLE_MACHINE_beaglebone = "beaglebone"
 COMPATIBLE_MACHINE_mpc8315e-rdb = "mpc8315e-rdb"
+
+LINUX_VERSION_genericx86 = "3.14.39"
+LINUX_VERSION_genericx86-64 = "3.14.39"
diff --git a/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_3.19.bbappend b/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_3.19.bbappend
index c87f840..396af14 100644
--- a/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_3.19.bbappend
+++ b/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_3.19.bbappend
@@ -7,8 +7,8 @@
 KMACHINE_genericx86 ?= "common-pc"
 KMACHINE_genericx86-64 ?= "common-pc-64"
 
-SRCREV_machine_genericx86 ?= "e152349de59b43b2a75f2c332b44171df461d5a0"
-SRCREV_machine_genericx86-64 ?= "e152349de59b43b2a75f2c332b44171df461d5a0"
+SRCREV_machine_genericx86 ?= "1583bf79b946cd5581d84d8c369b819a5ecb94b4"
+SRCREV_machine_genericx86-64 ?= "1583bf79b946cd5581d84d8c369b819a5ecb94b4"
 SRCREV_machine_edgerouter ?= "e152349de59b43b2a75f2c332b44171df461d5a0"
 SRCREV_machine_beaglebone ?= "e152349de59b43b2a75f2c332b44171df461d5a0"
 SRCREV_machine_mpc8315e-rdb ?= "2893f3e8ece72f6f47329714d6afe4c9c545bbf9"
diff --git a/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.1.bbappend b/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.1.bbappend
index a5fe0e6..571da39 100644
--- a/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.1.bbappend
+++ b/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.1.bbappend
@@ -1,11 +1,23 @@
 KBRANCH_genericx86  = "standard/base"
 KBRANCH_genericx86-64  = "standard/base"
+KBRANCH_edgerouter = "standard/edgerouter"
+KBRANCH_beaglebone = "standard/beaglebone"
+KBRANCH_mpc8315e-rdb = "standard/fsl-mpc8315e-rdb"
 
 KMACHINE_genericx86 ?= "common-pc"
 KMACHINE_genericx86-64 ?= "common-pc-64"
 
-SRCREV_machine_genericx86 ?= "4e30e64c44df9e59bd13239951bb8d2b5b276e6f"
-SRCREV_machine_genericx86-64 ?= "4e30e64c44df9e59bd13239951bb8d2b5b276e6f"
+SRCREV_machine_genericx86 ?= "2e0ac7b6c4e3ada23a84756287e9b7051ace939a"
+SRCREV_machine_genericx86-64 ?= "2e0ac7b6c4e3ada23a84756287e9b7051ace939a"
+SRCREV_machine_edgerouter ?= "79a31b9d23db126f8a6be3eb88fd683056a213f1"
+SRCREV_machine_beaglebone ?= "efb6ffb2ca96a364f916c9890ad023fc595e0e6e"
+SRCREV_machine_mpc8315e-rdb ?= "79a31b9d23db126f8a6be3eb88fd683056a213f1"
 
 COMPATIBLE_MACHINE_genericx86 = "genericx86"
 COMPATIBLE_MACHINE_genericx86-64 = "genericx86-64"
+COMPATIBLE_MACHINE_edgerouter = "edgerouter"
+COMPATIBLE_MACHINE_beaglebone = "beaglebone"
+COMPATIBLE_MACHINE_mpc8315e-rdb = "mpc8315e-rdb"
+
+LINUX_VERSION_genericx86 = "4.1.17"
+LINUX_VERSION_genericx86-64 = "4.1.17"
diff --git a/yocto-poky/meta-yocto/conf/distro/poky.conf b/yocto-poky/meta-yocto/conf/distro/poky.conf
index 7ac17fb..f654f9e 100644
--- a/yocto-poky/meta-yocto/conf/distro/poky.conf
+++ b/yocto-poky/meta-yocto/conf/distro/poky.conf
@@ -1,7 +1,7 @@
 DISTRO = "poky"
 DISTRO_NAME = "Poky (Yocto Project Reference Distro)"
-DISTRO_VERSION = "1.8+snapshot-${DATE}"
-DISTRO_CODENAME = "master"
+DISTRO_VERSION = "2.0.1"
+DISTRO_CODENAME = "jethro"
 SDK_VENDOR = "-pokysdk"
 SDK_VERSION := "${@'${DISTRO_VERSION}'.replace('snapshot-${DATE}','snapshot')}"
 
@@ -72,10 +72,13 @@
 SANITY_TESTED_DISTROS ?= " \
             poky-1.7 \n \
             poky-1.8 \n \
+            poky-2.0 \n \
             Ubuntu-14.04 \n \
             Ubuntu-14.10 \n \
             Ubuntu-15.04 \n \
+            Ubuntu-15.10 \n \
             Fedora-21 \n \
+            Fedora-22 \n \
             CentOS-6.* \n \
             CentOS-7.* \n \
             Debian-7.* \n \
diff --git a/yocto-poky/meta-yocto/conf/local.conf.sample b/yocto-poky/meta-yocto/conf/local.conf.sample
index 371349d..bf6dd90 100644
--- a/yocto-poky/meta-yocto/conf/local.conf.sample
+++ b/yocto-poky/meta-yocto/conf/local.conf.sample
@@ -151,7 +151,9 @@
 #   - 'image-swab' to perform host system intrusion detection
 # NOTE: if listing mklibs & prelink both, then make sure mklibs is before prelink
 # NOTE: mklibs also needs to be explicitly enabled for a given image, see local.conf.extended
-USER_CLASSES ?= "buildstats image-mklibs image-prelink"
+# NOTE: image-prelink is currently broken due to problems with the prelinker.  It is advised
+# that you do NOT run the prelinker at this time. 
+USER_CLASSES ?= "buildstats image-mklibs"
 
 #
 # Runtime testing of images
diff --git a/yocto-poky/meta-yocto/conf/local.conf.sample.extended b/yocto-poky/meta-yocto/conf/local.conf.sample.extended
index ccdd326..bc765a1 100644
--- a/yocto-poky/meta-yocto/conf/local.conf.sample.extended
+++ b/yocto-poky/meta-yocto/conf/local.conf.sample.extended
@@ -165,6 +165,15 @@
 # currently an example class is image_types_uboot
 # IMAGE_CLASSES = " image_types_uboot"
 
+# The following options will build a companion 'debug filesystem' in addition
+# to the normal deployable filesystem.  This companion system allows a
+# debugger to know the symbols and related sources.  It can be used to
+# debug a remote 'production' system without having to add the debug symbols
+# and sources to remote system.  If IMAGE_FSTYPES_DEBUGFS is not defined, it
+# defaults to IMAGE_FSTYPES.
+#IMAGE_GEN_DEBUGFS = "1"
+#IMAGE_FSTYPES_DEBUGFS = "tar.gz"
+
 # Incremental rpm image generation, the rootfs would be totally removed
 # and re-created in the second generation by default, but with
 # INC_RPM_IMAGE_GEN = "1", the rpm based rootfs would be kept, and will
diff --git a/yocto-poky/meta-yocto/conf/toasterconf.json b/yocto-poky/meta-yocto/conf/toasterconf.json
index c455276..3df1703 100644
--- a/yocto-poky/meta-yocto/conf/toasterconf.json
+++ b/yocto-poky/meta-yocto/conf/toasterconf.json
@@ -12,7 +12,7 @@
             "name": "Local Yocto Project",
             "sourcetype": "local",
             "apiurl": "../../",
-            "branches": ["HEAD", "master", "fido", "dizzy"],
+            "branches": ["HEAD" ],
             "layers": [
                 {
                     "name": "openembedded-core",
@@ -39,13 +39,13 @@
             "name": "OpenEmbedded",
             "sourcetype": "layerindex",
             "apiurl": "http://layers.openembedded.org/layerindex/api/",
-            "branches": ["master", "fido", "dizzy"]
+            "branches": ["master", "jethro" ,"fido"]
         },
         {
             "name": "Imported layers",
             "sourcetype": "imported",
             "apiurl": "",
-            "branches": ["master", "fido", "dizzy", "HEAD"]
+            "branches": ["master", "jethro","fido", "HEAD"]
 
         }
     ],
@@ -57,15 +57,15 @@
             "dirpath": "bitbake"
         },
         {
-            "name": "fido",
+            "name": "jethro",
             "giturl": "remote:origin",
-            "branch": "fido",
+            "branch": "jethro",
             "dirpath": "bitbake"
         },
         {
-            "name": "dizzy",
+            "name": "fido",
             "giturl": "remote:origin",
-            "branch": "dizzy",
+            "branch": "fido",
             "dirpath": "bitbake"
         },
         {
@@ -89,6 +89,15 @@
             "helptext": "Toaster will run your builds using the tip of the <a href=\"http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/\">Yocto Project master branch</a>, where active development takes place. This is not a stable branch, so your builds might not work as expected."
         },
         {
+            "name": "jethro",
+            "description": "Yocto Project 2.0 Jethro",
+            "bitbake": "jethro",
+            "branch": "jethro",
+            "defaultlayers": [ "openembedded-core", "meta-yocto", "meta-yocto-bsp"],
+            "layersourcepriority": { "Imported layers": 99, "Local Yocto Project" : 10, "OpenEmbedded" :  0 },
+            "helptext": "Toaster will run your builds with the tip of the <a href=\"http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=jethro\">Yocto Project 2.0 \"Jethro\"</a> branch."
+        },
+        {
             "name": "fido",
             "description": "Yocto Project 1.8 Fido",
             "bitbake": "fido",
@@ -98,15 +107,6 @@
             "helptext": "Toaster will run your builds with the tip of the <a href=\"http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=fido\">Yocto Project 1.8 \"Fido\"</a> branch."
         },
         {
-            "name": "dizzy",
-            "description": "Yocto Project 1.7 Dizzy",
-            "bitbake": "dizzy",
-            "branch": "dizzy",
-            "defaultlayers": [ "openembedded-core", "meta-yocto", "meta-yocto-bsp"],
-            "layersourcepriority": { "Imported layers": 99, "Local Yocto Project" : 10, "OpenEmbedded" :  0 },
-            "helptext": "Toaster will run your builds with the tip of the <a href=\"http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=dizzy\">Yocto Project 1.7 \"Dizzy\"</a> branch."
-        },
-        {
             "name": "local",
             "description": "Local Yocto Project",
             "bitbake": "HEAD",
diff --git a/yocto-poky/meta/classes/allarch.bbclass b/yocto-poky/meta/classes/allarch.bbclass
index 2fea7c0..4af38d7 100644
--- a/yocto-poky/meta/classes/allarch.bbclass
+++ b/yocto-poky/meta/classes/allarch.bbclass
@@ -27,6 +27,10 @@
         d.setVar("PACKAGE_EXTRA_ARCHS", "")
         d.setVar("SDK_ARCH", "none")
         d.setVar("SDK_CC_ARCH", "none")
+        d.setVar("TARGET_CPPFLAGS", "none")
+        d.setVar("TARGET_CFLAGS", "none")
+        d.setVar("TARGET_CXXFLAGS", "none")
+        d.setVar("TARGET_LDFLAGS", "none")
 
         # Avoid this being unnecessarily different due to nuances of
         # the target machine that aren't important for "all" arch
diff --git a/yocto-poky/meta/classes/archiver.bbclass b/yocto-poky/meta/classes/archiver.bbclass
index 089d707..41a552c 100644
--- a/yocto-poky/meta/classes/archiver.bbclass
+++ b/yocto-poky/meta/classes/archiver.bbclass
@@ -99,27 +99,6 @@
                 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_patched' % pn)
             elif ar_src == "configured":
                 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_configured' % pn)
-
-    # The gcc staff uses shared source
-    flag = d.getVarFlag("do_unpack", "stamp-base", True)
-    if flag:
-        if ar_src in [ 'original', 'patched' ]:
-            ar_outdir = os.path.join(d.getVar('ARCHIVER_TOPDIR', True), 'work-shared')
-            d.setVar('ARCHIVER_OUTDIR', ar_outdir)
-        d.setVarFlag('do_ar_original', 'stamp-base', flag)
-        d.setVarFlag('do_ar_patched', 'stamp-base', flag)
-        d.setVarFlag('do_unpack_and_patch', 'stamp-base', flag)
-        d.setVarFlag('do_ar_original', 'vardepsexclude', 'PN PF ARCHIVER_OUTDIR WORKDIR')
-        d.setVarFlag('do_unpack_and_patch', 'vardepsexclude', 'PN PF ARCHIVER_OUTDIR WORKDIR')
-        d.setVarFlag('do_ar_patched', 'vardepsexclude', 'PN PF ARCHIVER_OUTDIR WORKDIR')
-        d.setVarFlag('create_diff_gz', 'vardepsexclude', 'PF')
-        d.setVarFlag('create_tarball', 'vardepsexclude', 'PF')
-
-        flag_clean = d.getVarFlag('do_unpack', 'stamp-base-clean', True)
-        if flag_clean:
-            d.setVarFlag('do_ar_original', 'stamp-base-clean', flag_clean)
-            d.setVarFlag('do_ar_patched', 'stamp-base-clean', flag_clean)
-            d.setVarFlag('do_unpack_and_patch', 'stamp-base-clean', flag_clean)
 }
 
 # Take all the sources for a recipe and puts them in WORKDIR/archiver-work/.
@@ -178,13 +157,8 @@
     # Get the ARCHIVER_OUTDIR before we reset the WORKDIR
     ar_outdir = d.getVar('ARCHIVER_OUTDIR', True)
     bb.note('Archiving the patched source...')
-    d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True))
-    # The gcc staff uses shared source
-    flag = d.getVarFlag('do_unpack', 'stamp-base', True)
-    if flag:
-        create_tarball(d, d.getVar('S', True), 'patched', ar_outdir, 'gcc')
-    else:
-        create_tarball(d, d.getVar('S', True), 'patched', ar_outdir)
+    d.setVar('WORKDIR', ar_outdir)
+    create_tarball(d, d.getVar('S', True), 'patched', ar_outdir)
 }
 
 python do_ar_configured() {
@@ -222,17 +196,18 @@
         create_tarball(d, srcdir, 'configured', ar_outdir)
 }
 
-def create_tarball(d, srcdir, suffix, ar_outdir, pf=None):
+def create_tarball(d, srcdir, suffix, ar_outdir):
     """
     create the tarball from srcdir
     """
     import tarfile
 
+    # Make sure we are only creating a single tarball for gcc sources
+    if d.getVar('SRC_URI', True) == "" and 'gcc' in d.getVar('PN', True):
+        return
+
     bb.utils.mkdirhier(ar_outdir)
-    if pf:
-        tarname = os.path.join(ar_outdir, '%s-%s.tar.gz' % (pf, suffix))
-    else:
-        tarname = os.path.join(ar_outdir, '%s-%s.tar.gz' % \
+    tarname = os.path.join(ar_outdir, '%s-%s.tar.gz' % \
             (d.getVar('PF', True), suffix))
 
     srcdir = srcdir.rstrip('/')
@@ -275,11 +250,9 @@
             [ 'patched', 'configured'] and \
             d.getVarFlag('ARCHIVER_MODE', 'diff', True) != '1':
         return
-
-    ar_outdir = d.getVar('ARCHIVER_OUTDIR', True)
-
     # Change the WORKDIR to make do_unpack do_patch run in another dir.
-    d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True))
+    ar_outdir = d.getVar('ARCHIVER_OUTDIR', True)
+    d.setVar('WORKDIR', ar_outdir)
 
     # The changed 'WORKDIR' also casued 'B' changed, create dir 'B' for the
     # possibly requiring of the following tasks (such as some recipes's
@@ -299,7 +272,11 @@
         src = d.getVar('S', True).rstrip('/')
         src_orig = '%s.orig' % src
         oe.path.copytree(src, src_orig)
-    bb.build.exec_func('do_patch', d)
+
+    # Make sure gcc sources are patched only once
+    if not ((d.getVar('SRC_URI', True) == "" and 'gcc' in d.getVar('PN', True))):
+        bb.build.exec_func('do_patch', d)
+
     # Create the patches
     if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1':
         bb.note('Creating diff gz...')
@@ -370,7 +347,6 @@
 do_deploy_archives[sstate-outputdirs] = "${DEPLOY_DIR_SRC}"
 
 addtask do_ar_original after do_unpack
-addtask do_unpack_and_patch after do_patch
 addtask do_ar_patched after do_unpack_and_patch
 addtask do_ar_configured after do_unpack_and_patch
 addtask do_dumpdata
@@ -383,3 +359,11 @@
 do_deploy_all_archives() {
         :
 }
+
+python () {
+    # Add tasks in the correct order, specifically for linux-yocto to avoid race condition
+    if bb.data.inherits_class('kernel-yocto', d):
+        bb.build.addtask('do_kernel_configme', 'do_configure', 'do_unpack_and_patch', d)
+    else:
+        bb.build.addtask('do_unpack_and_patch', None, 'do_patch', d)
+}
diff --git a/yocto-poky/meta/classes/autotools.bbclass b/yocto-poky/meta/classes/autotools.bbclass
index 819045a..d546a5c 100644
--- a/yocto-poky/meta/classes/autotools.bbclass
+++ b/yocto-poky/meta/classes/autotools.bbclass
@@ -77,16 +77,20 @@
 		  ${@append_libtool_sysroot(d)}"
 CONFIGUREOPT_DEPTRACK ?= "--disable-dependency-tracking"
 
+AUTOTOOLS_SCRIPT_PATH ?= "${S}"
+CONFIGURE_SCRIPT ?= "${AUTOTOOLS_SCRIPT_PATH}/configure"
+
+AUTOTOOLS_AUXDIR ?= "${AUTOTOOLS_SCRIPT_PATH}"
 
 oe_runconf () {
-	cfgscript="${S}/configure"
+	cfgscript="${CONFIGURE_SCRIPT}"
 	if [ -x "$cfgscript" ] ; then
 		bbnote "Running $cfgscript ${CONFIGUREOPTS} ${EXTRA_OECONF} $@"
 		set +e
 		${CACHED_CONFIGUREVARS} $cfgscript ${CONFIGUREOPTS} ${EXTRA_OECONF} "$@"
 		if [ "$?" != "0" ]; then
 			echo "Configure failed. The contents of all config.log files follows to aid debugging"
-			find ${S} -ignore_readdir_race -name config.log -print -exec cat {} \;
+			find ${B} -ignore_readdir_race -name config.log -print -exec cat {} \;
 			die "oe_runconf failed"
 		fi
 		set -e
@@ -95,8 +99,6 @@
 	fi
 }
 
-AUTOTOOLS_AUXDIR ?= "${S}"
-
 CONFIGURESTAMPFILE = "${WORKDIR}/configure.sstate"
 
 autotools_preconfigure() {
@@ -134,7 +136,7 @@
 ACLOCALDIR = "${B}/aclocal-copy"
 
 python autotools_copy_aclocals () {
-    s = d.getVar("S", True)
+    s = d.getVar("AUTOTOOLS_SCRIPT_PATH", True)
     if not os.path.exists(s + "/configure.in") and not os.path.exists(s + "/configure.ac"):
         if not d.getVar("AUTOTOOLS_COPYACLOCAL", False):
             return
@@ -168,9 +170,9 @@
             for datadep in data[3]:
                 if datadep in done:
                     continue
-                done.append(datadep)
                 if (not data[0].endswith("-native")) and taskdepdata[datadep][0].endswith("-native") and dep != start:
                     continue
+                done.append(datadep)
                 new.append(datadep)
                 if taskdepdata[datadep][1] == "do_configure":
                     configuredeps.append(taskdepdata[datadep][0])
@@ -228,13 +230,13 @@
 	( for ac in `find ${S} -ignore_readdir_race -name configure.in -o -name configure.ac`; do
 		rm -f `dirname $ac`/configure
 		done )
-	if [ -e ${S}/configure.in -o -e ${S}/configure.ac ]; then
+	if [ -e ${AUTOTOOLS_SCRIPT_PATH}/configure.in -o -e ${AUTOTOOLS_SCRIPT_PATH}/configure.ac ]; then
 		olddir=`pwd`
-		cd ${S}
+		cd ${AUTOTOOLS_SCRIPT_PATH}
 		ACLOCAL="aclocal --system-acdir=${ACLOCALDIR}/"
 		if [ x"${acpaths}" = xdefault ]; then
 			acpaths=
-			for i in `find ${S} -ignore_readdir_race -maxdepth 2 -name \*.m4|grep -v 'aclocal.m4'| \
+			for i in `find ${AUTOTOOLS_SCRIPT_PATH} -ignore_readdir_race -maxdepth 2 -name \*.m4|grep -v 'aclocal.m4'| \
 				grep -v 'acinclude.m4' | grep -v 'aclocal-copy' | sed -e 's,\(.*/\).*$,\1,'|sort -u`; do
 				acpaths="$acpaths -I $i"
 			done
@@ -265,21 +267,20 @@
 				bbnote Executing glib-gettextize --force --copy
 				echo "no" | glib-gettextize --force --copy
 			fi
-		else if grep "^[[:space:]]*AM_GNU_GETTEXT" $CONFIGURE_AC >/dev/null; then
+		elif grep "^[[:space:]]*AM_GNU_GETTEXT" $CONFIGURE_AC >/dev/null; then
 			# We'd call gettextize here if it wasn't so broken...
-				cp ${STAGING_DATADIR_NATIVE}/gettext/config.rpath ${AUTOTOOLS_AUXDIR}/
-				if [ -d ${S}/po/ ]; then
-					cp -f ${STAGING_DATADIR_NATIVE}/gettext/po/Makefile.in.in ${S}/po/
-					if [ ! -e ${S}/po/remove-potcdate.sin ]; then
-						cp ${STAGING_DATADIR_NATIVE}/gettext/po/remove-potcdate.sin ${S}/po/
-					fi
+			cp ${STAGING_DATADIR_NATIVE}/gettext/config.rpath ${AUTOTOOLS_AUXDIR}/
+			if [ -d ${S}/po/ ]; then
+				cp -f ${STAGING_DATADIR_NATIVE}/gettext/po/Makefile.in.in ${S}/po/
+				if [ ! -e ${S}/po/remove-potcdate.sin ]; then
+					cp ${STAGING_DATADIR_NATIVE}/gettext/po/remove-potcdate.sin ${S}/po/
 				fi
-				for i in gettext.m4 iconv.m4 lib-ld.m4 lib-link.m4 lib-prefix.m4 nls.m4 po.m4 progtest.m4; do
-					for j in `find ${S} -ignore_readdir_race -name $i | grep -v aclocal-copy`; do
-						rm $j
-					done
-				done
 			fi
+			for i in gettext.m4 iconv.m4 lib-ld.m4 lib-link.m4 lib-prefix.m4 nls.m4 po.m4 progtest.m4; do
+				for j in `find ${S} -ignore_readdir_race -name $i | grep -v aclocal-copy`; do
+					rm $j
+				done
+			done
 		fi
 		mkdir -p m4
 		if grep "^[[:space:]]*[AI][CT]_PROG_INTLTOOL" $CONFIGURE_AC >/dev/null; then
@@ -290,7 +291,7 @@
 		ACLOCAL="$ACLOCAL" autoreconf -Wcross --verbose --install --force ${EXTRA_AUTORECONF} $acpaths || die "autoreconf execution failed."
 		cd $olddir
 	fi
-	if [ -e ${S}/configure ]; then
+	if [ -e ${CONFIGURE_SCRIPT} ]; then
 		oe_runconf
 	else
 		bbnote "nothing to configure"
diff --git a/yocto-poky/meta/classes/base.bbclass b/yocto-poky/meta/classes/base.bbclass
index f078001..9bd5499 100644
--- a/yocto-poky/meta/classes/base.bbclass
+++ b/yocto-poky/meta/classes/base.bbclass
@@ -391,7 +391,8 @@
             items = flagval.split(",")
             num = len(items)
             if num > 4:
-                bb.error("Only enable,disable,depend,rdepend can be specified!")
+                bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend can be specified!"
+                    % (d.getVar('PN', True), flag))
 
             if flag in pkgconfig:
                 if num >= 3 and items[2]:
@@ -512,7 +513,8 @@
                 if unskipped_pkgs:
                     for pkg in skipped_pkgs:
                         bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + recipe_license)
-                        d.setVar('LICENSE_EXCLUSION-' + pkg, 1)
+                        mlprefix = d.getVar('MLPREFIX', True)
+                        d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, 1)
                     for pkg in unskipped_pkgs:
                         bb.debug(1, "INCLUDING the package " + pkg)
                 elif all_skipped or incompatible_license(d, bad_licenses):
diff --git a/yocto-poky/meta/classes/buildhistory.bbclass b/yocto-poky/meta/classes/buildhistory.bbclass
index 4db0441..5e2581f 100644
--- a/yocto-poky/meta/classes/buildhistory.bbclass
+++ b/yocto-poky/meta/classes/buildhistory.bbclass
@@ -521,7 +521,7 @@
 POPULATE_SDK_POST_HOST_COMMAND_append = " buildhistory_list_installed_sdk_host ;\
                                           buildhistory_get_sdk_installed_host ; "
 
-SDK_POSTPROCESS_COMMAND += "buildhistory_get_sdkinfo ; "
+SDK_POSTPROCESS_COMMAND_append = " buildhistory_get_sdkinfo ; "
 
 def buildhistory_get_build_id(d):
     if d.getVar('BB_WORKERCONTEXT', True) != '1':
diff --git a/yocto-poky/meta/classes/cpan-base.bbclass b/yocto-poky/meta/classes/cpan-base.bbclass
index d9817ba..7810a4d 100644
--- a/yocto-poky/meta/classes/cpan-base.bbclass
+++ b/yocto-poky/meta/classes/cpan-base.bbclass
@@ -49,7 +49,11 @@
 FILES_${PN}-dbg += "${PERLLIBDIRS}/auto/*/.debug \
                     ${PERLLIBDIRS}/auto/*/*/.debug \
                     ${PERLLIBDIRS}/auto/*/*/*/.debug \
+                    ${PERLLIBDIRS}/auto/*/*/*/*/.debug \
+                    ${PERLLIBDIRS}/auto/*/*/*/*/*/.debug \
                     ${PERLLIBDIRS}/vendor_perl/${PERLVERSION}/auto/*/.debug \
                     ${PERLLIBDIRS}/vendor_perl/${PERLVERSION}/auto/*/*/.debug \
                     ${PERLLIBDIRS}/vendor_perl/${PERLVERSION}/auto/*/*/*/.debug \
+                    ${PERLLIBDIRS}/vendor_perl/${PERLVERSION}/auto/*/*/*/*/.debug \
+                    ${PERLLIBDIRS}/vendor_perl/${PERLVERSION}/auto/*/*/*/*/*/.debug \
                     "
diff --git a/yocto-poky/meta/classes/cpan.bbclass b/yocto-poky/meta/classes/cpan.bbclass
index e2bbd2f..8e079e0 100644
--- a/yocto-poky/meta/classes/cpan.bbclass
+++ b/yocto-poky/meta/classes/cpan.bbclass
@@ -17,7 +17,7 @@
 
 cpan_do_configure () {
 	export PERL5LIB="${PERL_ARCHLIB}"
-	yes '' | perl ${EXTRA_PERLFLAGS} Makefile.PL ${EXTRA_CPANFLAGS}
+	yes '' | perl ${EXTRA_PERLFLAGS} Makefile.PL INSTALLDIRS=vendor ${EXTRA_CPANFLAGS}
 
 	# Makefile.PLs can exit with success without generating a
 	# Makefile, e.g. in cases of missing configure time
diff --git a/yocto-poky/meta/classes/cpan_build.bbclass b/yocto-poky/meta/classes/cpan_build.bbclass
index 4f648a6..fac074d 100644
--- a/yocto-poky/meta/classes/cpan_build.bbclass
+++ b/yocto-poky/meta/classes/cpan_build.bbclass
@@ -8,6 +8,7 @@
 # Env var which tells perl if it should use host (no) or target (yes) settings
 export PERLCONFIGTARGET = "${@is_target(d)}"
 export PERL_ARCHLIB = "${STAGING_LIBDIR}${PERL_OWN_DIR}/perl/${@get_perl_version(d)}"
+export PERLHOSTLIB = "${STAGING_LIBDIR_NATIVE}/perl-native/perl/${@get_perl_version(d)}/"
 export LD = "${CCLD}"
 
 cpan_build_do_configure () {
@@ -16,22 +17,24 @@
 		. ${STAGING_LIBDIR}/perl/config.sh
 	fi
 
-	perl Build.PL --installdirs vendor \
-				--destdir ${D} \
-				--install_path arch="${libdir}/perl" \
-				--install_path script=${bindir} \
-				--install_path bin=${bindir} \
-				--install_path bindoc=${mandir}/man1 \
-				--install_path libdoc=${mandir}/man3 \
-                                ${EXTRA_CPAN_BUILD_FLAGS}
+	perl Build.PL --installdirs vendor --destdir ${D} \
+			${EXTRA_CPAN_BUILD_FLAGS}
+
+	# Build.PLs can exit with success without generating a
+	# Build, e.g. in cases of missing configure time
+	# dependencies. This is considered a best practice by
+	# cpantesters.org. See:
+	#  * http://wiki.cpantesters.org/wiki/CPANAuthorNotes
+	#  * http://www.nntp.perl.org/group/perl.qa/2008/08/msg11236.html
+	[ -e Build ] || bbfatal "No Build was generated by Build.PL"
 }
 
 cpan_build_do_compile () {
-        perl Build
+        perl Build verbose=1
 }
 
 cpan_build_do_install () {
-	perl Build install
+	perl Build install --destdir ${D}
 }
 
 EXPORT_FUNCTIONS do_configure do_compile do_install
diff --git a/yocto-poky/meta/classes/cross-canadian.bbclass b/yocto-poky/meta/classes/cross-canadian.bbclass
index d30a168..ea17f09 100644
--- a/yocto-poky/meta/classes/cross-canadian.bbclass
+++ b/yocto-poky/meta/classes/cross-canadian.bbclass
@@ -67,7 +67,7 @@
         d.appendVar("CANADIANEXTRAOS", " linux-gnuspe linux-uclibcspe linux-muslspe")
     elif tarch == "mips64":
         d.appendVar("CANADIANEXTRAOS", " linux-gnun32 linux-uclibcn32 linux-musln32")
-    if tarch == "arm":
+    if tarch == "arm" or tarch == "armeb":
         d.setVar("TARGET_OS", "linux-gnueabi")
     else:
         d.setVar("TARGET_OS", "linux")
diff --git a/yocto-poky/meta/classes/deploy.bbclass b/yocto-poky/meta/classes/deploy.bbclass
index 78f5e4a..8ad07da 100644
--- a/yocto-poky/meta/classes/deploy.bbclass
+++ b/yocto-poky/meta/classes/deploy.bbclass
@@ -8,3 +8,4 @@
 }
 addtask do_deploy_setscene
 do_deploy[dirs] = "${DEPLOYDIR} ${B}"
+do_deploy[stamp-extra-info] = "${MACHINE}"
diff --git a/yocto-poky/meta/classes/distrodata.bbclass b/yocto-poky/meta/classes/distrodata.bbclass
index 4168e43..44c06e1 100644
--- a/yocto-poky/meta/classes/distrodata.bbclass
+++ b/yocto-poky/meta/classes/distrodata.bbclass
@@ -33,7 +33,7 @@
         tmpdir = d.getVar('TMPDIR', True)
         distro_check_dir = os.path.join(tmpdir, "distro_check")
         datetime = localdata.getVar('DATETIME', True)
-        dist_check.update_distro_data(distro_check_dir, datetime)
+        dist_check.update_distro_data(distro_check_dir, datetime, localdata)
 
         if pn.find("-native") != -1:
             pnstripped = pn.split("-native")
@@ -118,7 +118,7 @@
         tmpdir = d.getVar('TMPDIR', True)
         distro_check_dir = os.path.join(tmpdir, "distro_check")
         datetime = localdata.getVar('DATETIME', True)
-        dist_check.update_distro_data(distro_check_dir, datetime)
+        dist_check.update_distro_data(distro_check_dir, datetime, localdata)
 
         pn = d.getVar("PN", True)
         bb.note("Package Name: %s" % pn)
@@ -271,10 +271,11 @@
         from bb.fetch2 import FetchError, NoMethodError, decodeurl
 
         """first check whether a uri is provided"""
-        src_uri = d.getVar('SRC_URI', True)
-        if not src_uri:
-                return
-        uri_type, _, _, _, _, _ = decodeurl(src_uri)
+        src_uri = (d.getVar('SRC_URI', True) or '').split()
+        if src_uri:
+            uri_type, _, _, _, _, _ = decodeurl(src_uri[0])
+        else:
+            uri_type = "none"
 
         """initialize log files."""
         logpath = d.getVar('LOG_DIR', True)
@@ -354,7 +355,10 @@
             elif cmp == 0:
                 pstatus = "MATCH"
 
-        psrcuri = psrcuri.split()[0]
+        if psrcuri:
+            psrcuri = psrcuri.split()[0]
+        else:
+            psrcuri = "none"
         pdepends = "".join(pdepends.split("\t"))
         pdesc = "".join(pdesc.split("\t"))
         no_upgr_reason = d.getVar('RECIPE_NO_UPDATE_REASON', True)
@@ -402,7 +406,7 @@
     bb.utils.mkdirhier(logpath)
     result_file = os.path.join(logpath, "distrocheck.csv")
     datetime = localdata.getVar('DATETIME', True)
-    dc.update_distro_data(distro_check_dir, datetime)
+    dc.update_distro_data(distro_check_dir, datetime, localdata)
 
     # do the comparison
     result = dc.compare_in_distro_packages_list(distro_check_dir, d)
diff --git a/yocto-poky/meta/classes/distutils3.bbclass b/yocto-poky/meta/classes/distutils3.bbclass
index e909ef4..443bf3a 100644
--- a/yocto-poky/meta/classes/distutils3.bbclass
+++ b/yocto-poky/meta/classes/distutils3.bbclass
@@ -21,6 +21,7 @@
         build ${DISTUTILS_BUILD_ARGS} || \
         bbfatal "${PYTHON_PN} setup.py build_ext execution failed."
 }
+distutils3_do_compile[vardepsexclude] = "MACHINE"
 
 distutils3_stage_headers() {
         install -d ${STAGING_DIR_HOST}${PYTHON_SITEPACKAGES_DIR}
@@ -33,6 +34,7 @@
         ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py install_headers ${DISTUTILS_STAGE_HEADERS_ARGS} || \
         bbfatal "${PYTHON_PN} setup.py install_headers execution failed."
 }
+distutils3_stage_headers[vardepsexclude] = "MACHINE"
 
 distutils3_stage_all() {
         if [ ${BUILD_SYS} != ${HOST_SYS} ]; then
@@ -48,6 +50,7 @@
         ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py install ${DISTUTILS_STAGE_ALL_ARGS} || \
         bbfatal "${PYTHON_PN} setup.py install (stage) execution failed."
 }
+distutils3_stage_all[vardepsexclude] = "MACHINE"
 
 distutils3_do_install() {
         install -d ${D}${PYTHON_SITEPACKAGES_DIR}
@@ -90,6 +93,7 @@
             rmdir ${D}${datadir}/share
         fi
 }
+distutils3_do_install[vardepsexclude] = "MACHINE"
 
 EXPORT_FUNCTIONS do_compile do_install
 
diff --git a/yocto-poky/meta/classes/externalsrc.bbclass b/yocto-poky/meta/classes/externalsrc.bbclass
index 0fa5817..f7ed66d 100644
--- a/yocto-poky/meta/classes/externalsrc.bbclass
+++ b/yocto-poky/meta/classes/externalsrc.bbclass
@@ -73,7 +73,8 @@
 
         fetch_tasks = ['do_fetch', 'do_unpack']
         # If we deltask do_patch, there's no dependency to ensure do_unpack gets run, so add one
-        d.appendVarFlag('do_configure', 'deps', ['do_unpack'])
+        # Note that we cannot use d.appendVarFlag() here because deps is expected to be a list object, not a string
+        d.setVarFlag('do_configure', 'deps', (d.getVarFlag('do_configure', 'deps', False) or []) + ['do_unpack'])
 
         for task in d.getVar("SRCTREECOVEREDTASKS", True).split():
             if local_srcuri and task in fetch_tasks:
@@ -88,5 +89,5 @@
 
 python externalsrc_compile_prefunc() {
     # Make it obvious that this is happening, since forgetting about it could lead to much confusion
-    bb.warn('Compiling %s from external source %s' % (d.getVar('PN', True), d.getVar('EXTERNALSRC', True)))
+    bb.plain('NOTE: %s: compiling from external source tree %s' % (d.getVar('PN', True), d.getVar('EXTERNALSRC', True)))
 }
diff --git a/yocto-poky/meta/classes/fontcache.bbclass b/yocto-poky/meta/classes/fontcache.bbclass
index d122387..8ebdfc4 100644
--- a/yocto-poky/meta/classes/fontcache.bbclass
+++ b/yocto-poky/meta/classes/fontcache.bbclass
@@ -9,12 +9,23 @@
 FONT_PACKAGES ??= "${PN}"
 FONT_EXTRA_RDEPENDS ?= "fontconfig-utils"
 FONTCONFIG_CACHE_DIR ?= "${localstatedir}/cache/fontconfig"
+FONTCONFIG_CACHE_PARAMS ?= "-v"
+# You can change this to e.g. FC_DEBUG=16 to debug fc-cache issues,
+# something has to be set, because qemuwrapper is using this variable after -E
+# multiple variables aren't allowed because for qemu they are separated
+# by comma and in -n "$D" case they should be separated by space
+FONTCONFIG_CACHE_ENV ?= "FC_DEBUG=1"
 fontcache_common() {
-if [ "x$D" != "x" ] ; then
-	$INTERCEPT_DIR/postinst_intercept update_font_cache ${PKG} mlprefix=${MLPREFIX} bindir=${bindir} \
-		libdir=${libdir} base_libdir=${base_libdir} fontconfigcachedir=${FONTCONFIG_CACHE_DIR}
+if [ -n "$D" ] ; then
+	$INTERCEPT_DIR/postinst_intercept update_font_cache ${PKG} mlprefix=${MLPREFIX} \
+		'bindir="${bindir}"' \
+		'libdir="${libdir}"' \
+		'base_libdir="${base_libdir}"' \
+		'fontconfigcachedir="${FONTCONFIG_CACHE_DIR}"' \
+		'fontconfigcacheparams="${FONTCONFIG_CACHE_PARAMS}"' \
+		'fontconfigcacheenv="${FONTCONFIG_CACHE_ENV}"'
 else
-	fc-cache
+	${FONTCONFIG_CACHE_ENV} fc-cache ${FONTCONFIG_CACHE_PARAMS}
 fi
 }
 
diff --git a/yocto-poky/meta/classes/grub-efi.bbclass b/yocto-poky/meta/classes/grub-efi.bbclass
index 4ddc2bb..9a4220a 100644
--- a/yocto-poky/meta/classes/grub-efi.bbclass
+++ b/yocto-poky/meta/classes/grub-efi.bbclass
@@ -52,7 +52,8 @@
 	mkdir -p ${EFIIMGDIR}/${EFIDIR}
 	cp $iso_dir/${EFIDIR}/* ${EFIIMGDIR}${EFIDIR}
 	cp $iso_dir/vmlinuz ${EFIIMGDIR}
-	echo "${GRUB_IMAGE}" > ${EFIIMGDIR}/startup.nsh
+	EFIPATH=$(echo "${EFIDIR}" | sed 's/\//\\/g')
+	echo "fs0:${EFIPATH}\\${GRUB_IMAGE}" > ${EFIIMGDIR}/startup.nsh
 	if [ -f "$iso_dir/initrd" ] ; then
 		cp $iso_dir/initrd ${EFIIMGDIR}
 	fi
diff --git a/yocto-poky/meta/classes/gtk-icon-cache.bbclass b/yocto-poky/meta/classes/gtk-icon-cache.bbclass
index 12358e3..0f1052b 100644
--- a/yocto-poky/meta/classes/gtk-icon-cache.bbclass
+++ b/yocto-poky/meta/classes/gtk-icon-cache.bbclass
@@ -4,12 +4,13 @@
 
 gtk_icon_cache_postinst() {
 if [ "x$D" != "x" ]; then
-	$INTERCEPT_DIR/postinst_intercept update_icon_cache ${PKG} mlprefix=${MLPREFIX} libdir=${libdir} \
-		base_libdir=${base_libdir}
+	$INTERCEPT_DIR/postinst_intercept update_icon_cache ${PKG} \
+		mlprefix=${MLPREFIX} \
+		libdir_native=${libdir_native}
 else
 
 	# Update the pixbuf loaders in case they haven't been registered yet
-	GDK_PIXBUF_MODULEDIR=${libdir}/gdk-pixbuf-2.0/2.10.0/loaders gdk-pixbuf-query-loaders --update-cache
+	${libdir}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders --update-cache
 
 	for icondir in /usr/share/icons/* ; do
 		if [ -d $icondir ] ; then
@@ -21,8 +22,9 @@
 
 gtk_icon_cache_postrm() {
 if [ "x$D" != "x" ]; then
-	$INTERCEPT_DIR/postinst_intercept update_icon_cache ${PKG} mlprefix=${MLPREFIX} libdir=${libdir} \
-		base_libdir=${base_libdir}
+	$INTERCEPT_DIR/postinst_intercept update_icon_cache ${PKG} \
+		mlprefix=${MLPREFIX} \
+		libdir=${libdir}
 else
 	for icondir in /usr/share/icons/* ; do
 		if [ -d $icondir ] ; then
diff --git a/yocto-poky/meta/classes/gummiboot.bbclass b/yocto-poky/meta/classes/gummiboot.bbclass
index 3d9c08b..9a97ac1 100644
--- a/yocto-poky/meta/classes/gummiboot.bbclass
+++ b/yocto-poky/meta/classes/gummiboot.bbclass
@@ -46,7 +46,8 @@
         mkdir -p ${EFIIMGDIR}/${EFIDIR}
         cp $iso_dir/${EFIDIR}/* ${EFIIMGDIR}${EFIDIR}
         cp $iso_dir/vmlinuz ${EFIIMGDIR}
-        echo "${DEST_EFI_IMAGE}" > ${EFIIMGDIR}/startup.nsh
+        EFIPATH=$(echo "${EFIDIR}" | sed 's/\//\\/g')
+        echo "fs0:${EFIPATH}\\${DEST_EFI_IMAGE}" > ${EFIIMGDIR}/startup.nsh
         if [ -f "$iso_dir/initrd" ] ; then
             cp $iso_dir/initrd ${EFIIMGDIR}
         fi
diff --git a/yocto-poky/meta/classes/image-live.bbclass b/yocto-poky/meta/classes/image-live.bbclass
index fa7a131..23e4a5c 100644
--- a/yocto-poky/meta/classes/image-live.bbclass
+++ b/yocto-poky/meta/classes/image-live.bbclass
@@ -2,7 +2,7 @@
 AUTO_SYSLINUXCFG = "1"
 INITRD_IMAGE ?= "core-image-minimal-initramfs"
 INITRD ?= "${DEPLOY_DIR_IMAGE}/${INITRD_IMAGE}-${MACHINE}.cpio.gz"
-SYSLINUX_ROOT = "root=/dev/ram0"
+SYSLINUX_ROOT ?= "root=/dev/ram0"
 SYSLINUX_TIMEOUT ?= "50"
 SYSLINUX_LABELS ?= "boot install"
 LABELS_append = " ${SYSLINUX_LABELS} "
diff --git a/yocto-poky/meta/classes/image-mklibs.bbclass b/yocto-poky/meta/classes/image-mklibs.bbclass
index c455a8e..cfb3ffc 100644
--- a/yocto-poky/meta/classes/image-mklibs.bbclass
+++ b/yocto-poky/meta/classes/image-mklibs.bbclass
@@ -25,7 +25,7 @@
 		x86_64)
 			dynamic_loader="${base_libdir}/ld-linux-x86-64.so.2"
 			;;
-		i586 )
+		i*86 )
 			dynamic_loader="${base_libdir}/ld-linux.so.2"
 			;;
 		arm )
diff --git a/yocto-poky/meta/classes/image-vm.bbclass b/yocto-poky/meta/classes/image-vm.bbclass
index 0632667..5ddd1cb 100644
--- a/yocto-poky/meta/classes/image-vm.bbclass
+++ b/yocto-poky/meta/classes/image-vm.bbclass
@@ -18,7 +18,8 @@
 IMAGE_TYPEDEP_vmdk = "ext4"
 IMAGE_TYPEDEP_vdi = "ext4"
 IMAGE_TYPEDEP_qcow2 = "ext4"
-IMAGE_TYPES_MASKED += "vmdk vdi qcow2"
+IMAGE_TYPEDEP_hdddirect = "ext4"
+IMAGE_TYPES_MASKED += "vmdk vdi qcow2 hdddirect"
 
 create_vmdk_image () {
     qemu-img convert -O vmdk ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.hdddirect ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.vmdk
diff --git a/yocto-poky/meta/classes/image.bbclass b/yocto-poky/meta/classes/image.bbclass
index 86a98bb..d2f8105 100644
--- a/yocto-poky/meta/classes/image.bbclass
+++ b/yocto-poky/meta/classes/image.bbclass
@@ -150,7 +150,7 @@
 IMAGE_TYPE_live = "${@build_live(d)}"
 inherit ${IMAGE_TYPE_live}
 
-IMAGE_TYPE_vm = '${@bb.utils.contains_any("IMAGE_FSTYPES", ["vmdk", "vdi", "qcow2"], "image-vm", "", d)}'
+IMAGE_TYPE_vm = '${@bb.utils.contains_any("IMAGE_FSTYPES", ["vmdk", "vdi", "qcow2", "hdddirect"], "image-vm", "", d)}'
 inherit ${IMAGE_TYPE_vm}
 
 python () {
@@ -239,6 +239,29 @@
 	# Tweak the mount option and fs_passno for rootfs in fstab
 	sed -i -e '/^[#[:space:]]*\/dev\/root/{s/defaults/ro/;s/\([[:space:]]*[[:digit:]]\)\([[:space:]]*\)[[:digit:]]$/\1\20/}' ${IMAGE_ROOTFS}/etc/fstab
 
+	# If we're using openssh and the /etc/ssh directory has no pre-generated keys,
+	# we should configure openssh to use the configuration file /etc/ssh/sshd_config_readonly
+	# and the keys under /var/run/ssh.
+	if [ -d ${IMAGE_ROOTFS}/etc/ssh ]; then
+		if [ -e ${IMAGE_ROOTFS}/etc/ssh/ssh_host_rsa_key ]; then
+			echo "SYSCONFDIR=/etc/ssh" >> ${IMAGE_ROOTFS}/etc/default/ssh
+			echo "SSHD_OPTS=" >> ${IMAGE_ROOTFS}/etc/default/ssh
+		else
+			echo "SYSCONFDIR=/var/run/ssh" >> ${IMAGE_ROOTFS}/etc/default/ssh
+			echo "SSHD_OPTS='-f /etc/ssh/sshd_config_readonly'" >> ${IMAGE_ROOTFS}/etc/default/ssh
+		fi
+	fi
+
+	# Also tweak the key location for dropbear in the same way.
+	if [ -d ${IMAGE_ROOTFS}/etc/dropbear ]; then
+		if [ -e ${IMAGE_ROOTFS}/etc/dropbear/dropbear_rsa_host_key ]; then
+			echo "DROPBEAR_RSAKEY_DIR=/etc/dropbear" >> ${IMAGE_ROOTFS}/etc/default/dropbear
+		else
+			echo "DROPBEAR_RSAKEY_DIR=/var/lib/dropbear" >> ${IMAGE_ROOTFS}/etc/default/dropbear
+		fi
+	fi
+
+
 	if ${@bb.utils.contains("DISTRO_FEATURES", "sysvinit", "true", "false", d)}; then
 		# Change the value of ROOTFS_READ_ONLY in /etc/default/rcS to yes
 		if [ -e ${IMAGE_ROOTFS}/etc/default/rcS ]; then
@@ -249,18 +272,6 @@
 		if [ -x ${IMAGE_ROOTFS}/etc/init.d/populate-volatile.sh ]; then
 			${IMAGE_ROOTFS}/etc/init.d/populate-volatile.sh
 		fi
-		# If we're using openssh and the /etc/ssh directory has no pre-generated keys,
-		# we should configure openssh to use the configuration file /etc/ssh/sshd_config_readonly
-		# and the keys under /var/run/ssh.
-		if [ -d ${IMAGE_ROOTFS}/etc/ssh ]; then
-			if [ -e ${IMAGE_ROOTFS}/etc/ssh/ssh_host_rsa_key ]; then
-				echo "SYSCONFDIR=/etc/ssh" >> ${IMAGE_ROOTFS}/etc/default/ssh
-				echo "SSHD_OPTS=" >> ${IMAGE_ROOTFS}/etc/default/ssh
-			else
-				echo "SYSCONFDIR=/var/run/ssh" >> ${IMAGE_ROOTFS}/etc/default/ssh
-				echo "SSHD_OPTS='-f /etc/ssh/sshd_config_readonly'" >> ${IMAGE_ROOTFS}/etc/default/ssh
-			fi
-		fi
 	fi
 
 	if ${@bb.utils.contains("DISTRO_FEATURES", "systemd", "true", "false", d)}; then
diff --git a/yocto-poky/meta/classes/image_types.bbclass b/yocto-poky/meta/classes/image_types.bbclass
index 306403e..5036919 100644
--- a/yocto-poky/meta/classes/image_types.bbclass
+++ b/yocto-poky/meta/classes/image_types.bbclass
@@ -13,7 +13,7 @@
     deps = []
     ctypes = d.getVar('COMPRESSIONTYPES', True).split()
     for type in (d.getVar('IMAGE_FSTYPES', True) or "").split():
-        if type in ["vmdk", "vdi", "qcow2", "live", "iso", "hddimg"]:
+        if type in ["vmdk", "vdi", "qcow2", "hdddirect", "live", "iso", "hddimg"]:
             type = "ext4"
         basetype = type
         for ctype in ctypes:
@@ -139,17 +139,19 @@
 	# Cleanup cfg file
 	mv ubinize${vname}.cfg ${DEPLOY_DIR_IMAGE}/
 
-	# Create own symlink
-	cd ${DEPLOY_DIR_IMAGE}
-	if [ -e ${IMAGE_NAME}${vname}.rootfs.ubifs ]; then
-		ln -sf ${IMAGE_NAME}${vname}.rootfs.ubifs \
-		${IMAGE_LINK_NAME}${vname}.ubifs
+	# Create own symlinks for 'named' volumes
+	if [ -n "$vname" ]; then
+		cd ${DEPLOY_DIR_IMAGE}
+		if [ -e ${IMAGE_NAME}${vname}.rootfs.ubifs ]; then
+			ln -sf ${IMAGE_NAME}${vname}.rootfs.ubifs \
+			${IMAGE_LINK_NAME}${vname}.ubifs
+		fi
+		if [ -e ${IMAGE_NAME}${vname}.rootfs.ubi ]; then
+			ln -sf ${IMAGE_NAME}${vname}.rootfs.ubi \
+			${IMAGE_LINK_NAME}${vname}.ubi
+		fi
+		cd -
 	fi
-	if [ -e ${IMAGE_NAME}${vname}.rootfs.ubi ]; then
-		ln -sf ${IMAGE_NAME}${vname}.rootfs.ubi \
-		${IMAGE_LINK_NAME}${vname}.ubi
-	fi
-	cd -
 }
 
 IMAGE_CMD_multiubi () {
@@ -225,6 +227,7 @@
     vmdk \
     vdi \
     qcow2 \
+    hdddirect \
     elf \
     wic wic.gz wic.bz2 wic.lzma \
 "
@@ -252,7 +255,7 @@
 IMAGE_EXTENSION_live = "hddimg iso"
 
 # The IMAGE_TYPES_MASKED variable will be used to mask out from the IMAGE_FSTYPES,
-# images that will not be built at do_rootfs time: vmdk, vdi, qcow2, hddimg, iso, etc.
+# images that will not be built at do_rootfs time: vmdk, vdi, qcow2, hdddirect, hddimg, iso, etc.
 IMAGE_TYPES_MASKED ?= ""
 
 # The WICVARS variable is used to define list of bitbake variables used in wic code
diff --git a/yocto-poky/meta/classes/insane.bbclass b/yocto-poky/meta/classes/insane.bbclass
index 5c8629a..a77438d 100644
--- a/yocto-poky/meta/classes/insane.bbclass
+++ b/yocto-poky/meta/classes/insane.bbclass
@@ -32,7 +32,7 @@
             installed-vs-shipped compile-host-path install-host-path \
             pn-overrides infodir build-deps file-rdeps \
             unknown-configure-option symlink-to-sysroot multilib \
-            invalid-pkgconfig host-user-contaminated \
+            invalid-packageconfig host-user-contaminated \
             "
 ERROR_QA ?= "dev-so debug-deps dev-deps debug-files arch pkgconfig la \
             perms dep-cmp pkgvarcheck perm-config perm-line perm-link \
@@ -86,6 +86,7 @@
                         "mipsel":     ( 8,     0,    0,          True,          32),
                         "mips64":     ( 8,     0,    0,          False,         64),
                         "mips64el":   ( 8,     0,    0,          True,          64),
+                        "nios2":      (113,    0,    0,          True,          32),
                         "s390":       (22,     0,    0,          False,         32),
                         "sh4":        (42,     0,    0,          True,          32),
                         "sparc":      ( 2,     0,    0,          False,         32),
@@ -166,7 +167,7 @@
 
 def package_qa_clean_path(path,d):
     """ Remove the common prefix from the path. In this case it is the TMPDIR"""
-    return path.replace(d.getVar('TMPDIR',True),"")
+    return path.replace(d.getVar("TMPDIR", True) + "/", "")
 
 def package_qa_write_error(type, error, d):
     logfile = d.getVar('QA_LOGFILE', True)
@@ -980,6 +981,7 @@
         return
 
     dest = d.getVar('PKGDEST', True)
+    pn = d.getVar('PN', True)
     home = os.path.join(dest, 'home')
     if path == home or path.startswith(home + os.sep):
         return
@@ -991,14 +993,15 @@
         if exc.errno != errno.ENOENT:
             raise
     else:
+        rootfs_path = path[len(dest):]
         check_uid = int(d.getVar('HOST_USER_UID', True))
         if stat.st_uid == check_uid:
-            messages["host-user-contaminated"] = "%s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (path, check_uid)
+            messages["host-user-contaminated"] = "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, rootfs_path, check_uid)
             return False
 
         check_gid = int(d.getVar('HOST_USER_GID', True))
         if stat.st_gid == check_gid:
-            messages["host-user-contaminated"] = "%s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (path, check_gid)
+            messages["host-user-contaminated"] = "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, rootfs_path, check_gid)
             return False
     return True
 
@@ -1089,7 +1092,7 @@
         # Check package name
         if not pkgname_pattern.match(package):
             package_qa_handle_error("pkgname",
-                    "%s doesn't match the [a-z0-9.+-]+ regex\n" % package, d)
+                    "%s doesn't match the [a-z0-9.+-]+ regex" % package, d)
 
         path = "%s/%s" % (pkgdest, package)
         if not package_qa_walk(path, warnchecks, errorchecks, skip, package, d):
@@ -1143,7 +1146,7 @@
         if "config.log" in files:
             if subprocess.call(statement, shell=True) == 0:
                 bb.fatal("""This autoconf log indicates errors, it looked at host include and/or library paths while determining system capabilities.
-Rerun configure task after fixing this. The path was '%s'""" % root)
+Rerun configure task after fixing this.""")
 
         if "configure.ac" in files:
             configs.append(os.path.join(root,"configure.ac"))
@@ -1207,7 +1210,7 @@
             if pconfig not in pkgconfigflags:
                 pn = d.getVar('PN', True)
                 error_msg = "%s: invalid PACKAGECONFIG: %s" % (pn, pconfig)
-                package_qa_handle_error("invalid-pkgconfig", error_msg, d)
+                package_qa_handle_error("invalid-packageconfig", error_msg, d)
 }
 
 python do_qa_unpack() {
diff --git a/yocto-poky/meta/classes/kernel-arch.bbclass b/yocto-poky/meta/classes/kernel-arch.bbclass
index 211b72b..d8b180e 100644
--- a/yocto-poky/meta/classes/kernel-arch.bbclass
+++ b/yocto-poky/meta/classes/kernel-arch.bbclass
@@ -13,14 +13,17 @@
                sh sh64 um h8300   \
                parisc s390  v850 \
                avr32 blackfin \
-               microblaze"
+               microblaze \
+               nios2"
 
 def map_kernel_arch(a, d):
     import re
 
     valid_archs = d.getVar('valid_archs', True).split()
 
-    if   re.match('(i.86|athlon|x86.64)$', a):  return 'x86'
+    if   re.match('i.86$', a):                  return 'i386'
+    elif re.match('x86.64$', a):                return 'x86_64'
+    elif re.match('athlon$', a):                return 'x86'
     elif re.match('armeb$', a):                 return 'arm'
     elif re.match('aarch64$', a):               return 'arm64'
     elif re.match('aarch64_be$', a):            return 'arm64'
diff --git a/yocto-poky/meta/classes/kernel-yocto.bbclass b/yocto-poky/meta/classes/kernel-yocto.bbclass
index 325f94c..c2d0d30 100644
--- a/yocto-poky/meta/classes/kernel-yocto.bbclass
+++ b/yocto-poky/meta/classes/kernel-yocto.bbclass
@@ -52,7 +52,9 @@
         parm = urldata.parm
         if "branch" in parm:
             branches = urldata.parm.get("branch").split(',')
-            return branches[0]
+            btype = urldata.parm.get("type")
+            if btype != "kmeta":
+                return branches[0]
 	    
     return default
 
@@ -182,11 +184,18 @@
 	source_dir=`echo ${S} | sed 's%/$%%'`
 	source_workdir="${WORKDIR}/git"
 	if [ -d "${WORKDIR}/git/" ]; then
-		# case: git repository (bare or non-bare)
+		# case: git repository
 		# if S is WORKDIR/git, then we shouldn't be moving or deleting the tree.
 		if [ "${source_dir}" != "${source_workdir}" ]; then
-			rm -rf ${S}
-			mv ${WORKDIR}/git ${S}
+			if [ -d "${source_workdir}/.git" ]; then
+				# regular git repository with .git
+				rm -rf ${S}
+				mv ${WORKDIR}/git ${S}
+			else
+				# create source for bare cloned git repository
+				git clone ${WORKDIR}/git ${S}
+				rm -rf ${WORKDIR}/git
+			fi
 		fi
 		cd ${S}
 	else
diff --git a/yocto-poky/meta/classes/kernel.bbclass b/yocto-poky/meta/classes/kernel.bbclass
index dfbdfd2..ee3e9a0 100644
--- a/yocto-poky/meta/classes/kernel.bbclass
+++ b/yocto-poky/meta/classes/kernel.bbclass
@@ -309,9 +309,18 @@
 		cp -fR include/generated/* $kerneldir/include/generated/
 	fi
 
-	if [ -d arch/${ARCH}/include/generated ]; then
-		mkdir -p $kerneldir/arch/${ARCH}/include/generated/
-		cp -fR arch/${ARCH}/include/generated/* $kerneldir/arch/${ARCH}/include/generated/
+	# When ARCH is set to i386 or x86_64, we need to map ARCH to the real name of src
+	# dir (x86) under arch/ of kenrel tree, so that we can find correct source to copy.
+
+	if [ "${ARCH}" = "i386" ] || [ "${ARCH}" = "x86_64" ]; then
+		KERNEL_SRCARCH=x86
+	else
+		KERNEL_SRCARCH=${ARCH}
+	fi
+
+	if [ -d arch/${KERNEL_SRCARCH}/include/generated ]; then
+		mkdir -p $kerneldir/arch/${KERNEL_SRCARCH}/include/generated/
+		cp -fR arch/${KERNEL_SRCARCH}/include/generated/* $kerneldir/arch/${KERNEL_SRCARCH}/include/generated/
 	fi
 }
 
@@ -413,7 +422,7 @@
 			  gawk '{print $1}'`
 
 		for str in ${KERNEL_IMAGE_STRIP_EXTRA_SECTIONS}; do {
-			if [ "$headers" != *"$str"* ]; then
+			if ! (echo "$headers" | grep -q "^$str$"); then
 				bbwarn "Section not found: $str";
 			fi
 
diff --git a/yocto-poky/meta/classes/libc-package.bbclass b/yocto-poky/meta/classes/libc-package.bbclass
index 47be691..adb4230 100644
--- a/yocto-poky/meta/classes/libc-package.bbclass
+++ b/yocto-poky/meta/classes/libc-package.bbclass
@@ -236,8 +236,8 @@
                 supported[locale] = charset
 
     def output_locale_source(name, pkgname, locale, encoding):
-        d.setVar('RDEPENDS_%s' % pkgname, 'localedef %s-localedata-%s %s-charmap-%s' % \
-        (mlprefix+bpn, legitimize_package_name(locale), mlprefix+bpn, legitimize_package_name(encoding)))
+        d.setVar('RDEPENDS_%s' % pkgname, '%slocaledef %s-localedata-%s %s-charmap-%s' % \
+        (mlprefix, mlprefix+bpn, legitimize_package_name(locale), mlprefix+bpn, legitimize_package_name(encoding)))
         d.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst', True) \
         % (locale, encoding, locale))
         d.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm', True) % \
diff --git a/yocto-poky/meta/classes/license.bbclass b/yocto-poky/meta/classes/license.bbclass
index c616a20..8ad4614 100644
--- a/yocto-poky/meta/classes/license.bbclass
+++ b/yocto-poky/meta/classes/license.bbclass
@@ -474,6 +474,7 @@
 do_populate_lic[sstate-outputdirs] = "${LICENSE_DIRECTORY}/"
 
 ROOTFS_POSTPROCESS_COMMAND_prepend = "write_package_manifest; license_create_manifest; "
+do_rootfs[recrdeptask] += "do_populate_lic"
 
 do_populate_lic_setscene[dirs] = "${LICSSTATEDIR}/${PN}"
 do_populate_lic_setscene[cleandirs] = "${LICSSTATEDIR}"
diff --git a/yocto-poky/meta/classes/metadata_scm.bbclass b/yocto-poky/meta/classes/metadata_scm.bbclass
index 237e618..0f7f423 100644
--- a/yocto-poky/meta/classes/metadata_scm.bbclass
+++ b/yocto-poky/meta/classes/metadata_scm.bbclass
@@ -65,18 +65,19 @@
     return revision
 
 def base_get_metadata_git_branch(path, d):
-    branch = os.popen('cd %s; git branch 2>&1 | grep "^* " | tr -d "* "' % path).read()
+    import bb.process
 
-    if len(branch) != 0:
-        return branch
-    return "<unknown>"
+    try:
+        rev, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', cwd=path)
+    except bb.process.ExecutionError:
+        rev = '<unknown>'
+    return rev.strip()
 
 def base_get_metadata_git_revision(path, d):
-    f = os.popen("cd %s; git log -n 1 --pretty=oneline -- 2>&1" % path)
-    data = f.read()
-    if f.close() is None:        
-        rev = data.split(" ")[0]
-        if len(rev) != 0:
-            return rev
-    return "<unknown>"
+    import bb.process
 
+    try:
+        rev, _ = bb.process.run('git rev-parse HEAD', cwd=path)
+    except bb.process.ExecutionError:
+        rev = '<unknown>'
+    return rev.strip()
diff --git a/yocto-poky/meta/classes/multilib.bbclass b/yocto-poky/meta/classes/multilib.bbclass
index 8f61d8d..052f911 100644
--- a/yocto-poky/meta/classes/multilib.bbclass
+++ b/yocto-poky/meta/classes/multilib.bbclass
@@ -26,6 +26,7 @@
     if bb.data.inherits_class('image', e.data):
         e.data.setVar("MLPREFIX", variant + "-")
         e.data.setVar("PN", variant + "-" + e.data.getVar("PN", False))
+        e.data.setVar('SDKTARGETSYSROOT', e.data.getVar('SDKTARGETSYSROOT', True))
         target_vendor = e.data.getVar("TARGET_VENDOR_" + "virtclass-multilib-" + variant, False)
         if target_vendor:
             e.data.setVar("TARGET_VENDOR", target_vendor)
@@ -93,10 +94,6 @@
         # FIXME, we need to map this to something, not delete it!
         d.setVar("PACKAGE_INSTALL_ATTEMPTONLY", "")
 
-    if bb.data.inherits_class('populate_sdk_base', d):
-        clsextend.map_depends_variable("TOOLCHAIN_TARGET_TASK")
-        clsextend.map_depends_variable("TOOLCHAIN_TARGET_TASK_ATTEMPTONLY")
-
     if bb.data.inherits_class('image', d):
         return
 
diff --git a/yocto-poky/meta/classes/multilib_global.bbclass b/yocto-poky/meta/classes/multilib_global.bbclass
index 612cfb6..67dc72b 100644
--- a/yocto-poky/meta/classes/multilib_global.bbclass
+++ b/yocto-poky/meta/classes/multilib_global.bbclass
@@ -93,20 +93,38 @@
         if prov != provexp and d.getVar(prov, False):
             d.renameVar(prov, provexp)
 
+    def translate_provide(prefix, prov):
+        if not prov.startswith("virtual/"):
+            return prefix + "-" + prov
+        if prov == "virtual/kernel":
+            return prov
+        prov = prov.replace("virtual/", "")
+        return "virtual/" + prefix + "-" + prov
 
     mp = (d.getVar("MULTI_PROVIDER_WHITELIST", True) or "").split()
     extramp = []
     for p in mp:
         if p.endswith("-native") or "-crosssdk-" in p or p.startswith(("nativesdk-", "virtual/nativesdk-")) or 'cross-canadian' in p:
             continue
-        virt = ""
-        if p.startswith("virtual/"):
-            p = p.replace("virtual/", "")
-            virt = "virtual/"
         for pref in prefixes:
-            extramp.append(virt + pref + "-" + p)
+            extramp.append(translate_provide(pref, p))
     d.setVar("MULTI_PROVIDER_WHITELIST", " ".join(mp + extramp))
 
+    abisafe = (d.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE", True) or "").split()
+    extras = []
+    for p in prefixes:
+        for a in abisafe:
+            extras.append(p + "-" + a)
+    d.appendVar("SIGGEN_EXCLUDERECIPES_ABISAFE", " " + " ".join(extras))
+
+    siggen_exclude = (d.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", True) or "").split()
+    extras = []
+    for p in prefixes:
+        for a in siggen_exclude:
+            a1, a2 = a.split("->")
+            extras.append(translate_provide(p, a1) + "->" + translate_provide(p, a2))
+    d.appendVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", " " + " ".join(extras))
+
 python multilib_virtclass_handler_vendor () {
     if isinstance(e, bb.event.ConfigParsed):
         for v in e.data.getVar("MULTILIB_VARIANTS", True).split():
diff --git a/yocto-poky/meta/classes/package.bbclass b/yocto-poky/meta/classes/package.bbclass
index cd92beb..a86b680 100644
--- a/yocto-poky/meta/classes/package.bbclass
+++ b/yocto-poky/meta/classes/package.bbclass
@@ -39,7 +39,6 @@
 #    packaging steps
 
 inherit packagedata
-inherit prserv
 inherit chrpath
 
 # Need the package_qa_handle_error() in insane.bbclass
@@ -1146,7 +1145,8 @@
         else:
             for f in unshipped:
                 msg = msg + "\n  " + f
-            msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install."
+            msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n"
+            msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped))
             package_qa_handle_error("installed-vs-shipped", msg, d)
 }
 populate_packages[dirs] = "${D}"
diff --git a/yocto-poky/meta/classes/pixbufcache.bbclass b/yocto-poky/meta/classes/pixbufcache.bbclass
index 349967d..dbe11e1 100644
--- a/yocto-poky/meta/classes/pixbufcache.bbclass
+++ b/yocto-poky/meta/classes/pixbufcache.bbclass
@@ -15,7 +15,7 @@
 else
 
 	# Update the pixbuf loaders in case they haven't been registered yet
-	GDK_PIXBUF_MODULEDIR=${libdir}/gdk-pixbuf-2.0/2.10.0/loaders gdk-pixbuf-query-loaders --update-cache
+	${libdir}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders --update-cache
 
 	if [ -x ${bindir}/gtk-update-icon-cache ] && [ -d ${datadir}/icons ]; then
 		for icondir in /usr/share/icons/*; do
@@ -46,7 +46,7 @@
 }
 
 gdkpixbuf_complete() {
-	GDK_PIXBUF_FATAL_LOADER=1 ${STAGING_BINDIR_NATIVE}/gdk-pixbuf-query-loaders --update-cache || exit 1
+	GDK_PIXBUF_FATAL_LOADER=1 ${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders --update-cache || exit 1
 }
 
 #
diff --git a/yocto-poky/meta/classes/populate_sdk_base.bbclass b/yocto-poky/meta/classes/populate_sdk_base.bbclass
index aa7a9a5..35e129b 100644
--- a/yocto-poky/meta/classes/populate_sdk_base.bbclass
+++ b/yocto-poky/meta/classes/populate_sdk_base.bbclass
@@ -80,6 +80,7 @@
 
 POPULATE_SDK_POST_TARGET_COMMAND_append = " write_target_sdk_manifest ; "
 POPULATE_SDK_POST_HOST_COMMAND_append = " write_host_sdk_manifest; "
+SDK_POSTPROCESS_COMMAND = " create_sdk_files; tar_sdk; ${SDK_PACKAGING_FUNC}; "
 
 # Some archs override this, we need the nativesdk version
 # turns out this is hard to get from the datastore due to TRANSLATED_TARGET_ARCH
@@ -108,15 +109,6 @@
                     manifest_type=Manifest.MANIFEST_TYPE_SDK_TARGET)
 
     populate_sdk(d)
-
-    # Process DEFAULTTUNE
-    bb.build.exec_func("create_sdk_files", d)
-
-    bb.build.exec_func("tar_sdk", d)
-
-    sdk_packaging_func = d.getVar("SDK_PACKAGING_FUNC", True) or ""
-    if sdk_packaging_func.strip():
-        bb.build.exec_func(d.getVar("SDK_PACKAGING_FUNC", True), d)
 }
 
 fakeroot create_sdk_files() {
@@ -196,7 +188,7 @@
 	done
 }
 
-do_populate_sdk[dirs] = "${TOPDIR}"
+do_populate_sdk[dirs] = "${PKGDATA_DIR} ${TOPDIR}"
 do_populate_sdk[depends] += "${@' '.join([x + ':do_populate_sysroot' for x in d.getVar('SDK_DEPENDS', True).split()])}  ${@d.getVarFlag('do_rootfs', 'depends', False)}"
 do_populate_sdk[rdepends] = "${@' '.join([x + ':do_populate_sysroot' for x in d.getVar('SDK_RDEPENDS', True).split()])}"
 do_populate_sdk[recrdeptask] += "do_packagedata do_package_write_rpm do_package_write_ipk do_package_write_deb"
diff --git a/yocto-poky/meta/classes/populate_sdk_ext.bbclass b/yocto-poky/meta/classes/populate_sdk_ext.bbclass
index 4ef8838..b9808bb 100644
--- a/yocto-poky/meta/classes/populate_sdk_ext.bbclass
+++ b/yocto-poky/meta/classes/populate_sdk_ext.bbclass
@@ -51,7 +51,7 @@
     core_meta_subdir = ''
 
     # Copy in all metadata layers + bitbake (as repositories)
-    buildsystem = oe.copy_buildsystem.BuildSystem(d)
+    buildsystem = oe.copy_buildsystem.BuildSystem('extensible SDK', d)
     baseoutpath = d.getVar('SDK_OUTPUT', True) + '/' + d.getVar('SDKPATH', True)
     layers_copied = buildsystem.copy_bitbake_and_layers(baseoutpath + '/layers')
 
@@ -155,10 +155,16 @@
         f.write('NATIVELSBSTRING_forcevariable = "%s"\n\n' % fixedlsbstring)
 
         # Ensure locked sstate cache objects are re-used without error
-        f.write('SIGGEN_LOCKEDSIGS_CHECK_LEVEL = "warn"\n\n')
+        f.write('SIGGEN_LOCKEDSIGS_CHECK_LEVEL = "none"\n\n')
+
+        # If you define a sdk_extraconf() function then it can contain additional config
+        extraconf = (d.getVar('sdk_extraconf', True) or '').strip()
+        if extraconf:
+            # Strip off any leading / trailing spaces
+            for line in extraconf.splitlines():
+                f.write(line.strip() + '\n')
 
         f.write('require conf/locked-sigs.inc\n')
-        f.write('require conf/work-config.inc\n')
 
     sigfile = d.getVar('WORKDIR', True) + '/locked-sigs.inc'
     oe.copy_buildsystem.generate_locked_sigs(sigfile, d)
@@ -178,17 +184,10 @@
                                                    d.getVar('SSTATE_DIR', True),
                                                    sstate_out, d,
                                                    fixedlsbstring)
-
-    # Create a dummy config file for additional settings
-    with open(baseoutpath + '/conf/work-config.inc', 'w') as f:
-        pass
 }
 
 def extsdk_get_buildtools_filename(d):
-    # This is somewhat of a hack
-    localdata = bb.data.createCopy(d)
-    localdata.setVar('PN', 'buildtools-tarball')
-    return localdata.expand('${SDK_NAME}-buildtools-nativesdk-standalone-*.sh')
+    return '*-buildtools-nativesdk-standalone-*.sh'
 
 install_tools() {
 	install -d ${SDK_OUTPUT}/${SDKPATHNATIVE}${bindir_nativesdk}
@@ -201,6 +200,8 @@
 	install $buildtools_path ${SDK_OUTPUT}/${SDKPATH}
 
 	install ${SDK_DEPLOY}/${BUILD_ARCH}-nativesdk-libc.tar.bz2 ${SDK_OUTPUT}/${SDKPATH}
+
+	install -m 0755 ${COREBASE}/meta/files/ext-sdk-prepare.sh ${SDK_OUTPUT}/${SDKPATH}
 }
 
 # Since bitbake won't run as root it doesn't make sense to try and install
@@ -218,29 +219,37 @@
 sdk_ext_postinst() {
 	printf "\nExtracting buildtools...\n"
 	cd $target_sdk_dir
-	printf "buildtools\ny" | ./*buildtools-nativesdk-standalone* > /dev/null
+	printf "buildtools\ny" | ./*buildtools-nativesdk-standalone* > /dev/null || ( printf 'ERROR: buildtools installation failed\n' ; exit 1 )
 
 	# Make sure when the user sets up the environment, they also get
 	# the buildtools-tarball tools in their path.
-	echo ". $target_sdk_dir/buildtools/environment-setup*" >> $target_sdk_dir/environment-setup*
+	env_setup_script="$target_sdk_dir/environment-setup-${REAL_MULTIMACH_TARGET_SYS}"
+	echo ". $target_sdk_dir/buildtools/environment-setup*" >> $env_setup_script
 
 	# Allow bitbake environment setup to be ran as part of this sdk.
-	echo "export OE_SKIP_SDK_CHECK=1" >> $target_sdk_dir/environment-setup*
+	echo "export OE_SKIP_SDK_CHECK=1" >> $env_setup_script
 
 	# A bit of another hack, but we need this in the path only for devtool
 	# so put it at the end of $PATH.
-	echo "export PATH=\$PATH:$target_sdk_dir/sysroots/${SDK_SYS}/${bindir_nativesdk}" >> $target_sdk_dir/environment-setup*
+	echo "export PATH=\$PATH:$target_sdk_dir/sysroots/${SDK_SYS}/${bindir_nativesdk}" >> $env_setup_script
+
+	echo "printf 'SDK environment now set up; additionally you may now run devtool to perform development tasks.\nRun devtool --help for further details.\n'" >> $env_setup_script
+
+	# Warn if trying to use external bitbake and the ext SDK together
+	echo "(which bitbake > /dev/null 2>&1 && echo 'WARNING: attempting to use the extensible SDK in an environment set up to run bitbake - this may lead to unexpected results. Please source this script in a new shell session instead.') || true" >> $env_setup_script
 
 	# For now this is where uninative.bbclass expects the tarball
 	mv *-nativesdk-libc.tar.* $target_sdk_dir/`dirname ${oe_init_build_env_path}`
 
 	if [ "$prepare_buildsystem" != "no" ]; then
-	    printf "Preparing build system...\n"
-	    # dash which is /bin/sh on Ubuntu will not preserve the
-	    # current working directory when first ran, nor will it set $1 when
-	    # sourcing a script. That is why this has to look so ugly.
-	    sh -c ". buildtools/environment-setup* > preparing_build_system.log && cd $target_sdk_dir/`dirname ${oe_init_build_env_path}` && set $target_sdk_dir && . $target_sdk_dir/${oe_init_build_env_path} $target_sdk_dir >> preparing_build_system.log && bitbake ${SDK_TARGETS} >> preparing_build_system.log" || { echo "SDK preparation failed: see `pwd`/preparing_build_system.log" ; exit 1 ; }
+		printf "Preparing build system...\n"
+		# dash which is /bin/sh on Ubuntu will not preserve the
+		# current working directory when first ran, nor will it set $1 when
+		# sourcing a script. That is why this has to look so ugly.
+		LOGFILE="$target_sdk_dir/preparing_build_system.log"
+		sh -c ". buildtools/environment-setup* > $LOGFILE && cd $target_sdk_dir/`dirname ${oe_init_build_env_path}` && set $target_sdk_dir && . $target_sdk_dir/${oe_init_build_env_path} $target_sdk_dir >> $LOGFILE && $target_sdk_dir/ext-sdk-prepare.sh $target_sdk_dir '${SDK_TARGETS}' >> $LOGFILE 2>&1" || { echo "ERROR: SDK preparation failed: see $LOGFILE"; echo "printf 'ERROR: this SDK was not fully installed and needs reinstalling\n'" >> $env_setup_script ; exit 1 ; }
 	fi
+	rm -f $target_sdk_dir/ext-sdk-prepare.sh
 	echo done
 }
 
@@ -249,6 +258,11 @@
 SDK_POSTPROCESS_COMMAND_prepend_task-populate-sdk-ext = "copy_buildsystem; install_tools; "
 
 fakeroot python do_populate_sdk_ext() {
+    # FIXME hopefully we can remove this restriction at some point, but uninative
+    # currently forces this upon us
+    if d.getVar('SDK_ARCH', True) != d.getVar('BUILD_ARCH', True):
+        bb.fatal('The extensible SDK can currently only be built for the same architecture as the machine being built on - SDK_ARCH is set to %s (likely via setting SDKMACHINE) which is different from the architecture of the build machine (%s). Unable to continue.' % (d.getVar('SDK_ARCH', True), d.getVar('BUILD_ARCH', True)))
+
     bb.build.exec_func("do_populate_sdk", d)
 }
 
diff --git a/yocto-poky/meta/classes/prserv.bbclass b/yocto-poky/meta/classes/prserv.bbclass
deleted file mode 100644
index 139597f..0000000
--- a/yocto-poky/meta/classes/prserv.bbclass
+++ /dev/null
@@ -1,2 +0,0 @@
-
-
diff --git a/yocto-poky/meta/classes/ptest.bbclass b/yocto-poky/meta/classes/ptest.bbclass
index b5f470f..4dc5dbe 100644
--- a/yocto-poky/meta/classes/ptest.bbclass
+++ b/yocto-poky/meta/classes/ptest.bbclass
@@ -39,12 +39,12 @@
 do_install_ptest_base() {
     if [ -f ${WORKDIR}/run-ptest ]; then
         install -D ${WORKDIR}/run-ptest ${D}${PTEST_PATH}/run-ptest
-        if grep -q install-ptest: Makefile; then
-            oe_runmake DESTDIR=${D}${PTEST_PATH} install-ptest
-        fi
-        do_install_ptest
-        chown -R root:root ${D}${PTEST_PATH}
     fi
+    if grep -q install-ptest: Makefile; then
+        oe_runmake DESTDIR=${D}${PTEST_PATH} install-ptest
+    fi
+    do_install_ptest
+    chown -R root:root ${D}${PTEST_PATH}
 }
 
 do_configure_ptest_base[dirs] = "${B}"
diff --git a/yocto-poky/meta/classes/report-error.bbclass b/yocto-poky/meta/classes/report-error.bbclass
index 040c29e..82b5bcd 100644
--- a/yocto-poky/meta/classes/report-error.bbclass
+++ b/yocto-poky/meta/classes/report-error.bbclass
@@ -9,22 +9,25 @@
 ERR_REPORT_DIR ?= "${LOG_DIR}/error-report"
 
 def errorreport_getdata(e):
+    import codecs
     logpath = e.data.getVar('ERR_REPORT_DIR', True)
     datafile = os.path.join(logpath, "error-report.txt")
-    with open(datafile) as f:
+    with codecs.open(datafile, 'r', 'utf-8') as f:
         data = f.read()
     return data
 
 def errorreport_savedata(e, newdata, file):
     import json
+    import codecs
     logpath = e.data.getVar('ERR_REPORT_DIR', True)
     datafile = os.path.join(logpath, file)
-    with open(datafile, "w") as f:
+    with codecs.open(datafile, 'w', 'utf-8') as f:
         json.dump(newdata, f, indent=4, sort_keys=True)
     return datafile
 
 python errorreport_handler () {
         import json
+        import codecs
 
         logpath = e.data.getVar('ERR_REPORT_DIR', True)
         datafile = os.path.join(logpath, "error-report.txt")
@@ -53,8 +56,8 @@
             taskdata['task'] = task
             if log:
                 try:
-                    logFile = open(log, 'r')
-                    logdata = logFile.read().decode('utf-8')
+                    logFile = codecs.open(log, 'r', 'utf-8')
+                    logdata = logFile.read()
                     logFile.close()
                 except:
                     logdata = "Unable to read log file"
diff --git a/yocto-poky/meta/classes/sanity.bbclass b/yocto-poky/meta/classes/sanity.bbclass
index 2eb744f..ae86d26 100644
--- a/yocto-poky/meta/classes/sanity.bbclass
+++ b/yocto-poky/meta/classes/sanity.bbclass
@@ -3,7 +3,7 @@
 #
 
 SANITY_REQUIRED_UTILITIES ?= "patch diffstat makeinfo git bzip2 tar \
-    gzip gawk chrpath wget cpio perl"
+    gzip gawk chrpath wget cpio perl file"
 
 def bblayers_conf_file(d):
     return os.path.join(d.getVar('TOPDIR', True), 'conf/bblayers.conf')
@@ -839,9 +839,12 @@
     else:
         bb.utils.mkdirhier(tmpdir)
         # Remove setuid, setgid and sticky bits from TMPDIR
-        os.chmod(tmpdir, os.stat(tmpdir).st_mode & ~ stat.S_ISUID)
-        os.chmod(tmpdir, os.stat(tmpdir).st_mode & ~ stat.S_ISGID)
-        os.chmod(tmpdir, os.stat(tmpdir).st_mode & ~ stat.S_ISVTX)
+        try:
+            os.chmod(tmpdir, os.stat(tmpdir).st_mode & ~ stat.S_ISUID)
+            os.chmod(tmpdir, os.stat(tmpdir).st_mode & ~ stat.S_ISGID)
+            os.chmod(tmpdir, os.stat(tmpdir).st_mode & ~ stat.S_ISVTX)
+        except OSError as exc:
+            bb.warn("Unable to chmod TMPDIR: %s" % exc)
         with open(checkfile, "w") as f:
             f.write(tmpdir)
 
diff --git a/yocto-poky/meta/classes/sign_package_feed.bbclass b/yocto-poky/meta/classes/sign_package_feed.bbclass
new file mode 100644
index 0000000..4263810
--- /dev/null
+++ b/yocto-poky/meta/classes/sign_package_feed.bbclass
@@ -0,0 +1,31 @@
+# Class for signing package feeds
+#
+# Related configuration variables that will be used after this class is
+# iherited:
+# PACKAGE_FEED_PASSPHRASE_FILE
+#           Path to a file containing the passphrase of the signing key.
+# PACKAGE_FEED_GPG_NAME
+#           Name of the key to sign with. May be key id or key name.
+# GPG_BIN
+#           Optional variable for specifying the gpg binary/wrapper to use for
+#           signing.
+# GPG_PATH
+#           Optional variable for specifying the gnupg "home" directory:
+#
+inherit sanity
+
+PACKAGE_FEED_SIGN = '1'
+
+python () {
+    # Check sanity of configuration
+    for var in ('PACKAGE_FEED_GPG_NAME', 'PACKAGE_FEED_GPG_PASSPHRASE_FILE'):
+        if not d.getVar(var, True):
+            raise_sanity_error("You need to define %s in the config" % var, d)
+
+    # Set expected location of the public key
+    d.setVar('PACKAGE_FEED_GPG_PUBKEY',
+             os.path.join(d.getVar('STAGING_ETCDIR_NATIVE'),
+                                   'PACKAGE-FEED-GPG-PUBKEY'))
+}
+
+do_package_index[depends] += "signing-keys:do_export_public_keys"
diff --git a/yocto-poky/meta/classes/sign_rpm.bbclass b/yocto-poky/meta/classes/sign_rpm.bbclass
index 0aa4cd8..f0c3dc9 100644
--- a/yocto-poky/meta/classes/sign_rpm.bbclass
+++ b/yocto-poky/meta/classes/sign_rpm.bbclass
@@ -4,23 +4,27 @@
 # RPM_GPG_PASSPHRASE_FILE
 #           Path to a file containing the passphrase of the signing key.
 # RPM_GPG_NAME
-#           Name of the key to sign with. Alternatively you can define
-#           %_gpg_name macro in your ~/.oerpmmacros file.
-# RPM_GPG_PUBKEY
-#           Path to a file containing the public key (in "armor" format)
-#           corresponding the signing key.
+#           Name of the key to sign with. May be key id or key name.
 # GPG_BIN
 #           Optional variable for specifying the gpg binary/wrapper to use for
 #           signing.
+# GPG_PATH
+#           Optional variable for specifying the gnupg "home" directory:
 #
 inherit sanity
 
 RPM_SIGN_PACKAGES='1'
 
 
-_check_gpg_name () {
-    macrodef=`rpm -E '%_gpg_name'`
-    [ "$macrodef" == "%_gpg_name" ] && return 1 || return 0
+python () {
+    # Check configuration
+    for var in ('RPM_GPG_NAME', 'RPM_GPG_PASSPHRASE_FILE'):
+        if not d.getVar(var, True):
+            raise_sanity_error("You need to define %s in the config" % var, d)
+
+    # Set the expected location of the public key
+    d.setVar('RPM_GPG_PUBKEY', os.path.join(d.getVar('STAGING_ETCDIR_NATIVE'),
+                                            'RPM-GPG-PUBKEY'))
 }
 
 
@@ -29,18 +33,11 @@
 
     # Find the correct rpm binary
     rpm_bin_path = d.getVar('STAGING_BINDIR_NATIVE', True) + '/rpm'
-    cmd = rpm_bin_path + " --addsign "
-    if gpg_name:
-        cmd += "--define '%%_gpg_name %s' " % gpg_name
-    else:
-        try:
-            bb.build.exec_func('_check_gpg_name', d)
-        except bb.build.FuncFailed:
-            raise_sanity_error("You need to define RPM_GPG_NAME in bitbake "
-                               "config or the %_gpg_name RPM macro defined "
-                               "(e.g. in  ~/.oerpmmacros", d)
+    cmd = rpm_bin_path + " --addsign --define '_gpg_name %s' " % gpg_name
     if d.getVar('GPG_BIN', True):
         cmd += "--define '%%__gpg %s' " % d.getVar('GPG_BIN', True)
+    if d.getVar('GPG_PATH', True):
+        cmd += "--define '_gpg_path %s' " % d.getVar('GPG_PATH', True)
     cmd += ' '.join(files)
 
     # Need to use pexpect for feeding the passphrase
@@ -51,20 +48,19 @@
         proc.expect(pexpect.EOF, timeout=900)
         proc.close()
     except pexpect.TIMEOUT as err:
-        bb.debug('rpmsign timeout: %s' % err)
+        bb.warn('rpmsign timeout: %s' % err)
         proc.terminate()
+    else:
+        if os.WEXITSTATUS(proc.status) or not os.WIFEXITED(proc.status):
+            bb.warn('rpmsign failed: %s' % proc.before.strip())
     return proc.exitstatus
 
 
 python sign_rpm () {
     import glob
 
-    rpm_gpg_pass_file = (d.getVar("RPM_GPG_PASSPHRASE_FILE", True) or "")
-    if rpm_gpg_pass_file:
-        with open(rpm_gpg_pass_file) as fobj:
-            rpm_gpg_passphrase = fobj.readlines()[0].rstrip('\n')
-    else:
-        raise_sanity_error("You need to define RPM_GPG_PASSPHRASE_FILE in the config", d)
+    with open(d.getVar("RPM_GPG_PASSPHRASE_FILE", True)) as fobj:
+        rpm_gpg_passphrase = fobj.readlines()[0].rstrip('\n')
 
     rpm_gpg_name = (d.getVar("RPM_GPG_NAME", True) or "")
 
@@ -73,3 +69,5 @@
     if rpmsign_wrapper(d, rpms, rpm_gpg_passphrase, rpm_gpg_name) != 0:
         raise bb.build.FuncFailed("RPM signing failed")
 }
+
+do_package_index[depends] += "signing-keys:do_export_public_keys"
diff --git a/yocto-poky/meta/classes/sstate.bbclass b/yocto-poky/meta/classes/sstate.bbclass
index b9ad6da..d09e27a 100644
--- a/yocto-poky/meta/classes/sstate.bbclass
+++ b/yocto-poky/meta/classes/sstate.bbclass
@@ -61,16 +61,6 @@
 # Whether to verify the GnUPG signatures when extracting sstate archives
 SSTATE_VERIFY_SIG ?= "0"
 
-# Specify dirs in which the shell function is executed and don't use ${B}
-# as default dirs to avoid possible race about ${B} with other task.
-sstate_create_package[dirs] = "${SSTATE_BUILDDIR}"
-sstate_unpack_package[dirs] = "${SSTATE_INSTDIR}"
-
-# Do not run sstate_hardcode_path() in ${B}:
-# the ${B} maybe removed by cmake_do_configure() while
-# sstate_hardcode_path() running.
-sstate_hardcode_path[dirs] = "${SSTATE_BUILDDIR}"
-
 python () {
     if bb.data.inherits_class('native', d):
         d.setVar('SSTATE_PKGARCH', d.getVar('BUILD_ARCH', False))
@@ -164,6 +154,8 @@
     shareddirs = []
     bb.utils.mkdirhier(d.expand("${SSTATE_MANIFESTS}"))
 
+    sstateinst = d.expand("${WORKDIR}/sstate-install-%s/" % ss['task'])
+
     manifest, d2 = oe.sstatesig.sstate_get_manifest_filename(ss['task'], d)
 
     if os.access(manifest, os.R_OK):
@@ -267,7 +259,8 @@
             oe.path.copyhardlinktree(state[1], state[2])
 
     for postinst in (d.getVar('SSTATEPOSTINSTFUNCS', True) or '').split():
-        bb.build.exec_func(postinst, d)
+        # All hooks should run in the SSTATE_INSTDIR
+        bb.build.exec_func(postinst, d, (sstateinst,))
 
     for lock in locks:
         bb.utils.unlockfile(lock)
@@ -307,7 +300,8 @@
             bb.warn("Cannot verify signature on sstate package %s" % sstatepkg)
 
     for f in (d.getVar('SSTATEPREINSTFUNCS', True) or '').split() + ['sstate_unpack_package'] + (d.getVar('SSTATEPOSTUNPACKFUNCS', True) or '').split():
-        bb.build.exec_func(f, d)
+        # All hooks should run in the SSTATE_INSTDIR
+        bb.build.exec_func(f, d, (sstateinst,))
 
     for state in ss['dirs']:
         prepdir(state[1])
@@ -579,8 +573,9 @@
 
     for f in (d.getVar('SSTATECREATEFUNCS', True) or '').split() + ['sstate_create_package'] + \
              (d.getVar('SSTATEPOSTCREATEFUNCS', True) or '').split():
-        bb.build.exec_func(f, d)
-  
+        # All hooks should run in SSTATE_BUILDDIR.
+        bb.build.exec_func(f, d, (sstatebuild,))
+
     bb.siggen.dump_this_task(sstatepkg + ".siginfo", d)
 
     return
@@ -642,19 +637,22 @@
     shared_state = sstate_state_fromvars(d)
     sstate_clean(shared_state, d)
 }
+sstate_task_prefunc[dirs] = "${WORKDIR}"
 
 python sstate_task_postfunc () {
     shared_state = sstate_state_fromvars(d)
+
     sstate_install(shared_state, d)
     for intercept in shared_state['interceptfuncs']:
-        bb.build.exec_func(intercept, d)
+        bb.build.exec_func(intercept, d, (d.getVar("WORKDIR", True),))
     omask = os.umask(002)
     if omask != 002:
        bb.note("Using umask 002 (not %0o) for sstate packaging" % omask)
     sstate_package(shared_state, d)
     os.umask(omask)
 }
-  
+sstate_task_postfunc[dirs] = "${WORKDIR}"
+
 
 #
 # Shell function to generate a sstate package from a directory
diff --git a/yocto-poky/meta/classes/testimage.bbclass b/yocto-poky/meta/classes/testimage.bbclass
index a1918ba..b4d4a69 100644
--- a/yocto-poky/meta/classes/testimage.bbclass
+++ b/yocto-poky/meta/classes/testimage.bbclass
@@ -80,11 +80,13 @@
 
 testimage_dump_host () {
     top -bn1
+    iostat -x -z -N -d -p ALL 20 2
     ps -ef
     free
     df
     memstat
     dmesg
+    ip -s link
     netstat -an
 }
 
@@ -146,6 +148,10 @@
                     testslist.append("oeqa." + type + "." + testname)
                     found = True
                     break
+                elif os.path.exists(os.path.join(p, 'lib', 'oeqa', type, testname.split(".")[0] + '.py')):
+                    testslist.append("oeqa." + type + "." + testname)
+                    found = True
+                    break
             if not found:
                 bb.fatal('Test %s specified in TEST_SUITES could not be found in lib/oeqa/runtime under BBPATH' % testname)
 
@@ -172,6 +178,7 @@
     import json
     import shutil
     import pkgutil
+    import re
 
     exportpath = d.getVar("TEST_EXPORT_DIR", True)
 
@@ -198,9 +205,18 @@
     savedata["host_dumper"]["parent_dir"] = tc.host_dumper.parent_dir
     savedata["host_dumper"]["cmds"] = tc.host_dumper.cmds
 
-    with open(os.path.join(exportpath, "testdata.json"), "w") as f:
+    json_file = os.path.join(exportpath, "testdata.json")
+    with open(json_file, "w") as f:
             json.dump(savedata, f, skipkeys=True, indent=4, sort_keys=True)
 
+    # Replace absolute path with relative in the file
+    exclude_path = os.path.join(d.getVar("COREBASE", True),'meta','lib','oeqa')
+    f1 = open(json_file,'r').read()
+    f2 = open(json_file,'w')
+    m = f1.replace(exclude_path,'oeqa')
+    f2.write(m)
+    f2.close()
+
     # now start copying files
     # we'll basically copy everything under meta/lib/oeqa, with these exceptions
     #  - oeqa/targetcontrol.py - not needed
@@ -214,6 +230,8 @@
     bb.utils.mkdirhier(os.path.join(exportpath, "oeqa/utils"))
     # copy test modules, this should cover tests in other layers too
     for t in tc.testslist:
+        if re.search("\w+\.\w+\.test_\S+", t):
+            t = '.'.join(t.split('.')[:3])
         mod = pkgutil.get_loader(t)
         shutil.copy2(mod.filename, os.path.join(exportpath, "oeqa/runtime"))
     # copy __init__.py files
@@ -279,14 +297,20 @@
             self.imagefeatures = d.getVar("IMAGE_FEATURES", True).split()
             self.distrofeatures = d.getVar("DISTRO_FEATURES", True).split()
             manifest = os.path.join(d.getVar("DEPLOY_DIR_IMAGE", True), d.getVar("IMAGE_LINK_NAME", True) + ".manifest")
+            nomanifest = d.getVar("IMAGE_NO_MANIFEST", True)
+
             self.sigterm = False
             self.origsigtermhandler = signal.getsignal(signal.SIGTERM)
             signal.signal(signal.SIGTERM, self.sigterm_exception)
-            try:
-                with open(manifest) as f:
-                    self.pkgmanifest = f.read()
-            except IOError as e:
-                bb.fatal("No package manifest file found. Did you build the image?\n%s" % e)
+
+            if nomanifest is None or nomanifest != "1":
+                try:
+                    with open(manifest) as f:
+                        self.pkgmanifest = f.read()
+                except IOError as e:
+                    bb.fatal("No package manifest file found. Did you build the image?\n%s" % e)
+            else:
+                self.pkgmanifest = ""
 
         def sigterm_exception(self, signum, stackframe):
             bb.warn("TestImage received SIGTERM, shutting down...")
@@ -305,13 +329,15 @@
         import traceback
         bb.fatal("Loading tests failed:\n%s" % traceback.format_exc())
 
-    target.deploy()
 
-    try:
-        target.start()
-        if export:
-            exportTests(d,tc)
-        else:
+    if export:
+        signal.signal(signal.SIGTERM, tc.origsigtermhandler)
+        tc.origsigtermhandler = None
+        exportTests(d,tc)
+    else:
+        target.deploy()
+        try:
+            target.start()
             starttime = time.time()
             result = runTests(tc)
             stoptime = time.time()
@@ -324,9 +350,9 @@
                 bb.plain(msg)
             else:
                 raise bb.build.FuncFailed("%s - FAILED - check the task log and the ssh log" % pn )
-    finally:
-        signal.signal(signal.SIGTERM, tc.origsigtermhandler)
-        target.stop()
+        finally:
+            signal.signal(signal.SIGTERM, tc.origsigtermhandler)
+            target.stop()
 
 testimage_main[vardepsexclude] =+ "BB_ORIGENV"
 
diff --git a/yocto-poky/meta/classes/toolchain-scripts.bbclass b/yocto-poky/meta/classes/toolchain-scripts.bbclass
index d0b2b91..ab4feb0 100644
--- a/yocto-poky/meta/classes/toolchain-scripts.bbclass
+++ b/yocto-poky/meta/classes/toolchain-scripts.bbclass
@@ -32,6 +32,7 @@
 	echo 'export OECORE_TARGET_SYSROOT="$SDKTARGETSYSROOT"' >> $script
 	echo "export OECORE_ACLOCAL_OPTS=\"-I $sdkpathnative/usr/share/aclocal\"" >> $script
 	echo "export PYTHONHOME=$sdkpathnative$prefix" >> $script
+	echo 'unset command_not_found_handle' >> $script
 
 	toolchain_shared_env_script
 }
diff --git a/yocto-poky/meta/classes/uninative.bbclass b/yocto-poky/meta/classes/uninative.bbclass
index 51391db..0cd27db 100644
--- a/yocto-poky/meta/classes/uninative.bbclass
+++ b/yocto-poky/meta/classes/uninative.bbclass
@@ -1,6 +1,6 @@
 NATIVELSBSTRING = "universal"
 
-UNINATIVE_LOADER = "${STAGING_DIR_NATIVE}/lib/ld-linux-x86-64.so.2"
+UNINATIVE_LOADER ?= "${@bb.utils.contains('BUILD_ARCH', 'x86_64', '${STAGING_DIR_NATIVE}/lib/ld-linux-x86-64.so.2', '${STAGING_DIR_NATIVE}/lib/ld-linux.so.2', d)}"
 
 addhandler uninative_eventhandler
 uninative_eventhandler[eventmask] = "bb.event.BuildStarted"
diff --git a/yocto-poky/meta/classes/useradd-staticids.bbclass b/yocto-poky/meta/classes/useradd-staticids.bbclass
index 421a70a..924d6ea 100644
--- a/yocto-poky/meta/classes/useradd-staticids.bbclass
+++ b/yocto-poky/meta/classes/useradd-staticids.bbclass
@@ -2,6 +2,7 @@
 # we need a function to reformat the params based on a static file
 def update_useradd_static_config(d):
     import argparse
+    import itertools
     import re
 
     class myArgumentParser( argparse.ArgumentParser ):
@@ -16,6 +17,11 @@
         def error(self, message):
             raise bb.build.FuncFailed(message)
 
+    def list_extend(iterable, length, obj = None):
+        """Ensure that iterable is the specified length by extending with obj
+        and return it as a list"""
+        return list(itertools.islice(itertools.chain(iterable, itertools.repeat(obj)), length))
+
     # We parse and rewrite the useradd components
     def rewrite_useradd(params):
         # The following comes from --help on useradd from shadow
@@ -84,7 +90,10 @@
                     for line in f:
                         if line.startswith('#'):
                             continue
-                        field = line.rstrip().split(":")
+                        # Make sure there always are at least seven elements in
+                        # the field list. This allows for leaving out trailing
+                        # colons in the passwd file.
+                        field = list_extend(line.rstrip().split(":"), 7)
                         if field[0] == uaargs.LOGIN:
                             if uaargs.uid and field[2] and (uaargs.uid != field[2]):
                                 bb.warn("%s: Changing username %s's uid from (%s) to (%s), verify configuration files!" % (d.getVar('PN', True), uaargs.LOGIN, uaargs.uid, field[2]))
@@ -220,7 +229,10 @@
                     for line in f:
                         if line.startswith('#'):
                             continue
-                        field = line.rstrip().split(":")
+                        # Make sure there always are at least four elements in
+                        # the field list. This allows for leaving out trailing
+                        # colons in the group file.
+                        field = list_extend(line.rstrip().split(":"), 4)
                         if field[0] == gaargs.GROUP and field[2]:
                             if gaargs.gid and (gaargs.gid != field[2]):
                                 bb.warn("%s: Changing groupname %s's gid from (%s) to (%s), verify configuration files!" % (d.getVar('PN', True), gaargs.GROUP, gaargs.gid, field[2]))
diff --git a/yocto-poky/meta/classes/useradd_base.bbclass b/yocto-poky/meta/classes/useradd_base.bbclass
index 802f3a1..ab3cd35 100644
--- a/yocto-poky/meta/classes/useradd_base.bbclass
+++ b/yocto-poky/meta/classes/useradd_base.bbclass
@@ -104,7 +104,7 @@
 			sleep $count
 		done
 	else
-		bbwarn "${PN}: group $groupname already contains $username, not re-adding it"
+		bbnote "${PN}: group $groupname already contains $username, not re-adding it"
 	fi
 	if test "x$gshadow" = "xno"; then
 		rm -f $rootdir${sysconfdir}/gshadow
@@ -136,7 +136,7 @@
 			sleep $count
 		done
 	else
-		bbwarn "${PN}: group $groupname doesn't exist, not removing it"
+		bbnote "${PN}: group $groupname doesn't exist, not removing it"
 	fi
 }
 
@@ -164,7 +164,7 @@
 		       sleep $count
 	       done
 	else
-		bbwarn "${PN}: user $username doesn't exist, not removing it"
+		bbnote "${PN}: user $username doesn't exist, not removing it"
 	fi
 }
 
diff --git a/yocto-poky/meta/conf/bitbake.conf b/yocto-poky/meta/conf/bitbake.conf
index d8a66f9..06971da 100644
--- a/yocto-poky/meta/conf/bitbake.conf
+++ b/yocto-poky/meta/conf/bitbake.conf
@@ -166,6 +166,7 @@
 ASSUME_PROVIDED = "\
     bzip2-native \
     chrpath-native \
+    file-native \
     git-native \
     grep-native \
     diffstat-native \
@@ -566,7 +567,7 @@
 # Download locations and utilities.
 ##################################################################
 
-APACHE_MIRROR = "http://www.apache.org/dist"
+APACHE_MIRROR = "http://archive.apache.org/dist"
 DEBIAN_MIRROR = "ftp://ftp.debian.org/debian/pool"
 GENTOO_MIRROR = "http://distfiles.gentoo.org/distfiles"
 GNOME_GIT = "git://git.gnome.org"
@@ -806,7 +807,7 @@
     lockfiles type vardepsexclude vardeps vardepvalue vardepvalueexclude \
     file-checksums python func task export unexport noexec nostamp dirs cleandirs \
     sstate-lockfile-shared prefuncs postfuncs export_func deptask rdeptask \
-    recrdeptask nodeprrecs stamp-base stamp-extra-info"
+    recrdeptask nodeprrecs stamp-base stamp-extra-info sstate-outputdirs"
 
 MLPREFIX ??= ""
 MULTILIB_VARIANTS ??= ""
diff --git a/yocto-poky/meta/conf/distro/include/default-distrovars.inc b/yocto-poky/meta/conf/distro/include/default-distrovars.inc
index 29b762b..8366904 100644
--- a/yocto-poky/meta/conf/distro/include/default-distrovars.inc
+++ b/yocto-poky/meta/conf/distro/include/default-distrovars.inc
@@ -47,3 +47,6 @@
 
 # Missing checksums should raise an error
 BB_STRICT_CHECKSUM = "1"
+
+GTK2DISTROFEATURES = "directfb x11"
+GTK3DISTROFEATURES = "x11 wayland"
diff --git a/yocto-poky/meta/conf/distro/include/distro_alias.inc b/yocto-poky/meta/conf/distro/include/distro_alias.inc
index bd3da9c..ca333c8 100644
--- a/yocto-poky/meta/conf/distro/include/distro_alias.inc
+++ b/yocto-poky/meta/conf/distro/include/distro_alias.inc
@@ -14,6 +14,7 @@
 DISTRO_PN_ALIAS_pn-adt-installer = "Intel"
 DISTRO_PN_ALIAS_pn-alsa-state = "OE-Core"
 DISTRO_PN_ALIAS_pn-alsa-utils-alsaconf = "OE-Core"
+DISTRO_PN_ALIAS_pn-alsa-utils-scripts = "OE-Core"
 DISTRO_PN_ALIAS_pn-atk = "Fedora=atk OpenSuSE=atk"
 DISTRO_PN_ALIAS_pn-augeas = "Ubuntu=libaugeas0 Debian=libaugeas0"
 DISTRO_PN_ALIAS_pn-avahi-ui = "Ubuntu=avahi-discover Debian=avahi-discover"
@@ -43,6 +44,7 @@
 DISTRO_PN_ALIAS_pn-clutter-1.8 = "Fedora=clutter OpenSuse=clutter Ubuntu=clutter-1.0 Mandriva=clutter Debian=clutter"
 DISTRO_PN_ALIAS_pn-clutter-gst-1.0 = "Debian=clutter-gst Ubuntu=clutter-gst Fedora=clutter-gst"
 DISTRO_PN_ALIAS_pn-clutter-gst-1.8 = "Fedora=clutter-gst Debian=libclutter-gst"
+DISTRO_PN_ALIAS_pn-clutter-gst-3.0 = "Ubuntu=libclutter-gst Debian=libclutter-gst"
 DISTRO_PN_ALIAS_pn-clutter-gtk-1.0 = "Debian=clutter-gtk Ubuntu=clutter-gtk Fedora=clutter-gtk"
 DISTRO_PN_ALIAS_pn-clutter-gtk-1.8 = "Fedora=clutter-gtk OpenSuSE=clutter-gtk Ubuntu=clutter-gtk-0.10 Mandriva=clutter-gtk Debian=clutter-gtk"
 DISTRO_PN_ALIAS_pn-cogl-1.0 = "Debian=cogl Ubuntu=cogl Fedora=cogl"
@@ -220,6 +222,7 @@
 DISTRO_PN_ALIAS_pn-libmpc = "Fedora=libmpc OpenSuse=libmpc2"
 DISTRO_PN_ALIAS_pn-libnewt = "Debian=libnewt0.52 Fedora=newt"
 DISTRO_PN_ALIAS_pn-libnewt-python = "Ubuntu=python-newt Fedora=newt-python"
+DISTRO_PN_ALIAS_pn-libnl = "Mandriva=libnl Fedora=libnl"
 DISTRO_PN_ALIAS_pn-libnss-mdns = "Meego=nss-mdns OpenSuSE=nss-mdns Ubuntu=nss-mdns Mandriva=nss_mdns Debian=nss-mdns"
 DISTRO_PN_ALIAS_pn-libomxil = "OSPDT upstream=http://omxil.sourceforge.net/"
 DISTRO_PN_ALIAS_pn-libowl = "Debian=owl OpenedHand"
@@ -383,14 +386,18 @@
 DISTRO_PN_ALIAS_pn-pseudo = "Windriver"
 DISTRO_PN_ALIAS_pn-psplash = "OpenedHand"
 DISTRO_PN_ALIAS_pn-ptest-runner = "OE-Core"
+DISTRO_PN_ALIAS_pn-pulseaudio-client-conf-sato = "OE-Core"
 DISTRO_PN_ALIAS_pn-puzzles = "Debian=sgt-puzzles Fedora=puzzles"
 DISTRO_PN_ALIAS_pn-python3 = "Fedora=python3 Debian=python3.2"
 DISTRO_PN_ALIAS_pn-python3-distribute = "Debian=python3-setuptools Fedora=python3-setuptools"
+DISTRO_PN_ALIAS_pn-python3-pip = "OpenSuSE=python3-pip Debian=python3-pip"
+DISTRO_PN_ALIAS_pn-python3-setuptools = "OpenSuSE=python3-setuptools Debian=python3-setuptools"
 DISTRO_PN_ALIAS_pn-python-ZSI = "OE-Core"
 DISTRO_PN_ALIAS_pn-python-argparse = "Fedora=python-argparse OpenSuSE=python-argparse"
 DISTRO_PN_ALIAS_pn-python-dbus = "Ubuntu=python-dbus Debian=python-dbus Mandriva=python-dbus"
 DISTRO_PN_ALIAS_pn-python-distribute = "Opensuse=python-setuptools Fedora=python-setuptools"
 DISTRO_PN_ALIAS_pn-python-git = "Debian=python-git Fedora=GitPython"
+DISTRO_PN_ALIAS_pn-python-imaging = "Mandriva=python-imaging Debian=python-imaging"
 DISTRO_PN_ALIAS_pn-python-mako = "Fedora=python-mako Opensuse=python-Mako"
 DISTRO_PN_ALIAS_pn-python-pycairo = "Meego=pycairo Fedora=pycairo Ubuntu=pycairo Debian=pycairo"
 DISTRO_PN_ALIAS_pn-python-pycurl = "Debian=python-pycurl Ubuntu=python-pycurl"
@@ -436,7 +443,9 @@
 DISTRO_PN_ALIAS_pn-shasum = "OE-Core"
 DISTRO_PN_ALIAS_pn-shutdown-desktop = "OpenedHand"
 DISTRO_PN_ALIAS_pn-signgp = "OE-Core"
+DISTRO_PN_ALIAS_pn-speexdsp = "Ubuntu=libspeexdsp1 Fedora=speexdsp"
 DISTRO_PN_ALIAS_pn-stat = "Debian=coreutils Fedora=coreutils"
+DISTRO_PN_ALIAS_pn-stress = "Debian=stress Fedora=stress"
 DISTRO_PN_ALIAS_pn-swabber = "OE-Core"
 DISTRO_PN_ALIAS_pn-sysklogd = "Debian=sysklogd Mandriva=sysklogd"
 DISTRO_PN_ALIAS_pn-sysprof = "Fedora=sysprof Debian=sysprof"
@@ -460,6 +469,7 @@
 DISTRO_PN_ALIAS_pn-tzcode = "OSPDT"
 DISTRO_PN_ALIAS_pn-u-boot-fw-utils = "Ubuntu=u-boot-tools Debian=u-boot-tools"
 DISTRO_PN_ALIAS_pn-u-boot-mkimage = "Ubuntu=uboot-mkimage Debian=uboot-mkimage"
+DISTRO_PN_ALIAS_pn-udev = "Mandriva=udev Fedora=udev"
 DISTRO_PN_ALIAS_pn-udev-extraconf = "OE-Core"
 DISTRO_PN_ALIAS_pn-unfs3 = "Debian=unfs3 Fedora=unfs3"
 DISTRO_PN_ALIAS_pn-unfs-server = "OE-Core"
diff --git a/yocto-poky/meta/conf/documentation.conf b/yocto-poky/meta/conf/documentation.conf
index 075ab6a..845559a 100644
--- a/yocto-poky/meta/conf/documentation.conf
+++ b/yocto-poky/meta/conf/documentation.conf
@@ -212,6 +212,8 @@
 IMAGE_CLASSES[doc] = "A list of classes that all images should inherit."
 IMAGE_FEATURES[doc] = "The primary list of features to include in an image. Configure this variable in an image recipe."
 IMAGE_FSTYPES[doc] = "Formats of root filesystem images that you want to have created."
+IMAGE_FSTYPES_DEBUGFS[doc] = "Formats of the debug root filesystem images that you want to have created."
+IMAGE_GEN_DEBUGFS[doc] = "When set to '1', generate a companion debug object/source filesystem image."
 IMAGE_INSTALL[doc] = "Specifies the packages to install into an image. Image recipes set IMAGE_INSTALL to specify the packages to install into an image through image.bbclass."
 IMAGE_LINGUAS[doc] = "Specifies the list of locales to install into the image during the root filesystem construction process."
 IMAGE_NAME[doc] = "The name of the output image files minus the extension."
diff --git a/yocto-poky/meta/conf/layer.conf b/yocto-poky/meta/conf/layer.conf
index 9e80018..9773632 100644
--- a/yocto-poky/meta/conf/layer.conf
+++ b/yocto-poky/meta/conf/layer.conf
@@ -41,9 +41,7 @@
 "
 
 SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS += " \
-  gcc-cross-${TARGET_ARCH}->glibc \
-  gcc-cross-${TARGET_ARCH}->musl \
-  gcc-cross-${TARGET_ARCH}->uclibc \
+  gcc-cross-${TARGET_ARCH}->virtual/libc \
   gcc-cross-${TARGET_ARCH}->linux-libc-headers \
   ppp-dialin->ppp \
   resolvconf->bash \
@@ -56,5 +54,6 @@
   font-alias->font-util \
   weston-init->weston \
   weston-init->kbd \
+  oprofile->virtual/kernel \
 "
 
diff --git a/yocto-poky/meta/conf/machine/include/qemu.inc b/yocto-poky/meta/conf/machine/include/qemu.inc
index d5c0b37..16e9469 100644
--- a/yocto-poky/meta/conf/machine/include/qemu.inc
+++ b/yocto-poky/meta/conf/machine/include/qemu.inc
@@ -5,7 +5,7 @@
 PREFERRED_PROVIDER_virtual/libgles2 ?= "mesa"
 
 XSERVER ?= "xserver-xorg \
-            ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'mesa-driver-swrast', '', d)} \
+            ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'mesa-driver-swrast xserver-xorg-extension-glx', '', d)} \
             xf86-input-evdev \
             xf86-input-mouse \
             xf86-video-fbdev \
diff --git a/yocto-poky/meta/conf/machine/include/tune-thunderx.inc b/yocto-poky/meta/conf/machine/include/tune-thunderx.inc
new file mode 100644
index 0000000..40de61d
--- /dev/null
+++ b/yocto-poky/meta/conf/machine/include/tune-thunderx.inc
@@ -0,0 +1,19 @@
+require conf/machine/include/arm/arch-armv8.inc
+
+DEFAULTTUNE ?= "thunderx"
+AVAILTUNES += "thunderx thunderx_be"
+
+TUNEVALID[thunderx] = "Enable instructions for Cavium ThunderX"
+
+TUNE_CCARGS .= "${@bb.utils.contains("TUNE_FEATURES", "thunderx", " -mcpu=thunderx ", "",d)}"
+
+ARMPKGARCH_tune-thunderx ?= "thunderx"
+ARMPKGARCH_tune-thunderx_be ?= "thunderx_be"
+
+TUNE_FEATURES_tune-thunderx ?= "${TUNE_FEATURES_tune-aarch64} thunderx"
+TUNE_FEATURES_tune-thunderx_be ?= "${TUNE_FEATURES_tune-thunderx} bigendian"
+BASE_LIB_tune-thunderx = "lib64"
+BASE_LIB_tune-thunderx_be = "lib64"
+
+PACKAGE_EXTRA_ARCHS_tune-thunderx = "aarch64 thunderx"
+PACKAGE_EXTRA_ARCHS_tune-thunderx_be = "aarch64_be thunderx_be"
diff --git a/yocto-poky/meta/conf/machine/qemux86-64.conf b/yocto-poky/meta/conf/machine/qemux86-64.conf
index a4fd43c..489194a 100644
--- a/yocto-poky/meta/conf/machine/qemux86-64.conf
+++ b/yocto-poky/meta/conf/machine/qemux86-64.conf
@@ -16,7 +16,7 @@
 SERIAL_CONSOLES = "115200;ttyS0 115200;ttyS1"
 
 XSERVER = "xserver-xorg \
-           ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'mesa-driver-swrast', '', d)} \
+           ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'mesa-driver-swrast xserver-xorg-extension-glx', '', d)} \
            xf86-input-vmmouse \
            xf86-input-keyboard \
            xf86-input-evdev \
diff --git a/yocto-poky/meta/conf/machine/qemux86.conf b/yocto-poky/meta/conf/machine/qemux86.conf
index 96cea66..3cc8091 100644
--- a/yocto-poky/meta/conf/machine/qemux86.conf
+++ b/yocto-poky/meta/conf/machine/qemux86.conf
@@ -15,7 +15,7 @@
 SERIAL_CONSOLES = "115200;ttyS0 115200;ttyS1"
 
 XSERVER = "xserver-xorg \
-           ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'mesa-driver-swrast', '', d)} \
+           ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'mesa-driver-swrast xserver-xorg-extension-glx', '', d)} \
            xf86-input-vmmouse \
            xf86-input-keyboard \
            xf86-input-evdev \
diff --git a/yocto-poky/meta/conf/multilib.conf b/yocto-poky/meta/conf/multilib.conf
index 89a8e90..50303fb 100644
--- a/yocto-poky/meta/conf/multilib.conf
+++ b/yocto-poky/meta/conf/multilib.conf
@@ -2,7 +2,7 @@
 baselib = "${@d.getVar('BASE_LIB_tune-' + (d.getVar('DEFAULTTUNE', True) or 'INVALID'), True) or d.getVar('BASELIB', True)}"
 
 MULTILIB_VARIANTS = "${@extend_variants(d,'MULTILIBS','multilib')}"
-MULTILIB_SAVE_VARNAME = "DEFAULTTUNE TARGET_ARCH TARGET_SYS"
+MULTILIB_SAVE_VARNAME = "DEFAULTTUNE TARGET_ARCH TARGET_SYS TARGET_VENDOR"
 
 MULTILIBS ??= "multilib:lib32"
 
@@ -24,3 +24,4 @@
 # inside the multilib sysroot.  Fix this by explicitly adding the MACHINE's
 # architecture-independent pkgconfig location to PKG_CONFIG_PATH.
 PKG_CONFIG_PATH .= ":${STAGING_DIR}/${MACHINE}${datadir}/pkgconfig"
+PKG_CONFIG_PATH[vardepsexclude] = "MACHINE"
diff --git a/yocto-poky/meta/files/common-licenses/GFDL-1.1 b/yocto-poky/meta/files/common-licenses/GFDL-1.1
index 4a0fe1c..1d74223 100644
--- a/yocto-poky/meta/files/common-licenses/GFDL-1.1
+++ b/yocto-poky/meta/files/common-licenses/GFDL-1.1
@@ -1,8 +1,7 @@
-		GNU Free Documentation License
-		  Version 1.2, November 2002
+                GNU Free Documentation License
+                   Version 1.1, March 2000
 
-
- Copyright (C) 2000,2001,2002  Free Software Foundation, Inc.
+ Copyright (C) 2000  Free Software Foundation, Inc.
      51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
  Everyone is permitted to copy and distribute verbatim copies
  of this license document, but changing it is not allowed.
@@ -11,12 +10,12 @@
 0. PREAMBLE
 
 The purpose of this License is to make a manual, textbook, or other
-functional and useful document "free" in the sense of freedom: to
-assure everyone the effective freedom to copy and redistribute it,
-with or without modifying it, either commercially or noncommercially.
-Secondarily, this License preserves for the author and publisher a way
-to get credit for their work, while not being considered responsible
-for modifications made by others.
+written document "free" in the sense of freedom: to assure everyone
+the effective freedom to copy and redistribute it, with or without
+modifying it, either commercially or noncommercially.  Secondarily,
+this License preserves for the author and publisher a way to get
+credit for their work, while not being considered responsible for
+modifications made by others.
 
 This License is a kind of "copyleft", which means that derivative
 works of the document must themselves be free in the same sense.  It
@@ -34,15 +33,11 @@
 
 1. APPLICABILITY AND DEFINITIONS
 
-This License applies to any manual or other work, in any medium, that
-contains a notice placed by the copyright holder saying it can be
-distributed under the terms of this License.  Such a notice grants a
-world-wide, royalty-free license, unlimited in duration, to use that
-work under the conditions stated herein.  The "Document", below,
-refers to any such manual or work.  Any member of the public is a
-licensee, and is addressed as "you".  You accept the license if you
-copy, modify or distribute the work in a way requiring permission
-under copyright law.
+This License applies to any manual or other work that contains a
+notice placed by the copyright holder saying it can be distributed
+under the terms of this License.  The "Document", below, refers to any
+such manual or work.  Any member of the public is a licensee, and is
+addressed as "you".
 
 A "Modified Version" of the Document means any work containing the
 Document or a portion of it, either copied verbatim, or with
@@ -52,7 +47,7 @@
 the Document that deals exclusively with the relationship of the
 publishers or authors of the Document to the Document's overall subject
 (or to related matters) and contains nothing that could fall directly
-within that overall subject.  (Thus, if the Document is in part a
+within that overall subject.  (For example, if the Document is in part a
 textbook of mathematics, a Secondary Section may not explain any
 mathematics.)  The relationship could be a matter of historical
 connection with the subject or with related matters, or of legal,
@@ -61,40 +56,33 @@
 
 The "Invariant Sections" are certain Secondary Sections whose titles
 are designated, as being those of Invariant Sections, in the notice
-that says that the Document is released under this License.  If a
-section does not fit the above definition of Secondary then it is not
-allowed to be designated as Invariant.  The Document may contain zero
-Invariant Sections.  If the Document does not identify any Invariant
-Sections then there are none.
+that says that the Document is released under this License.
 
 The "Cover Texts" are certain short passages of text that are listed,
 as Front-Cover Texts or Back-Cover Texts, in the notice that says that
-the Document is released under this License.  A Front-Cover Text may
-be at most 5 words, and a Back-Cover Text may be at most 25 words.
+the Document is released under this License.
 
 A "Transparent" copy of the Document means a machine-readable copy,
 represented in a format whose specification is available to the
-general public, that is suitable for revising the document
+general public, whose contents can be viewed and edited directly and
 straightforwardly with generic text editors or (for images composed of
 pixels) generic paint programs or (for drawings) some widely available
 drawing editor, and that is suitable for input to text formatters or
 for automatic translation to a variety of formats suitable for input
 to text formatters.  A copy made in an otherwise Transparent file
-format whose markup, or absence of markup, has been arranged to thwart
-or discourage subsequent modification by readers is not Transparent.
-An image format is not Transparent if used for any substantial amount
-of text.  A copy that is not "Transparent" is called "Opaque".
+format whose markup has been designed to thwart or discourage
+subsequent modification by readers is not Transparent.  A copy that is
+not "Transparent" is called "Opaque".
 
 Examples of suitable formats for Transparent copies include plain
 ASCII without markup, Texinfo input format, LaTeX input format, SGML
 or XML using a publicly available DTD, and standard-conforming simple
-HTML, PostScript or PDF designed for human modification.  Examples of
-transparent image formats include PNG, XCF and JPG.  Opaque formats
-include proprietary formats that can be read and edited only by
-proprietary word processors, SGML or XML for which the DTD and/or
+HTML designed for human modification.  Opaque formats include
+PostScript, PDF, proprietary formats that can be read and edited only
+by proprietary word processors, SGML or XML for which the DTD and/or
 processing tools are not generally available, and the
-machine-generated HTML, PostScript or PDF produced by some word
-processors for output purposes only.
+machine-generated HTML produced by some word processors for output
+purposes only.
 
 The "Title Page" means, for a printed book, the title page itself,
 plus such following pages as are needed to hold, legibly, the material
@@ -103,21 +91,6 @@
 the text near the most prominent appearance of the work's title,
 preceding the beginning of the body of the text.
 
-A section "Entitled XYZ" means a named subunit of the Document whose
-title either is precisely XYZ or contains XYZ in parentheses following
-text that translates XYZ in another language.  (Here XYZ stands for a
-specific section name mentioned below, such as "Acknowledgements",
-"Dedications", "Endorsements", or "History".)  To "Preserve the Title"
-of such a section when you modify the Document means that it remains a
-section "Entitled XYZ" according to this definition.
-
-The Document may include Warranty Disclaimers next to the notice which
-states that this License applies to the Document.  These Warranty
-Disclaimers are considered to be included by reference in this
-License, but only as regards disclaiming warranties: any other
-implication that these Warranty Disclaimers may have is void and has
-no effect on the meaning of this License.
-
 
 2. VERBATIM COPYING
 
@@ -137,10 +110,9 @@
 
 3. COPYING IN QUANTITY
 
-If you publish printed copies (or copies in media that commonly have
-printed covers) of the Document, numbering more than 100, and the
-Document's license notice requires Cover Texts, you must enclose the
-copies in covers that carry, clearly and legibly, all these Cover
+If you publish printed copies of the Document numbering more than 100,
+and the Document's license notice requires Cover Texts, you must enclose
+the copies in covers that carry, clearly and legibly, all these Cover
 Texts: Front-Cover Texts on the front cover, and Back-Cover Texts on
 the back cover.  Both covers must also clearly and legibly identify
 you as the publisher of these copies.  The front cover must present
@@ -158,15 +130,16 @@
 If you publish or distribute Opaque copies of the Document numbering
 more than 100, you must either include a machine-readable Transparent
 copy along with each Opaque copy, or state in or with each Opaque copy
-a computer-network location from which the general network-using
-public has access to download using public-standard network protocols
-a complete Transparent copy of the Document, free of added material.
-If you use the latter option, you must take reasonably prudent steps,
-when you begin distribution of Opaque copies in quantity, to ensure
-that this Transparent copy will remain thus accessible at the stated
-location until at least one year after the last time you distribute an
-Opaque copy (directly or through your agents or retailers) of that
-edition to the public.
+a publicly-accessible computer-network location containing a complete
+Transparent copy of the Document, free of added material, which the
+general network-using public has access to download anonymously at no
+charge using public-standard network protocols.  If you use the latter
+option, you must take reasonably prudent steps, when you begin
+distribution of Opaque copies in quantity, to ensure that this
+Transparent copy will remain thus accessible at the stated location
+until at least one year after the last time you distribute an Opaque
+copy (directly or through your agents or retailers) of that edition to
+the public.
 
 It is requested, but not required, that you contact the authors of the
 Document well before redistributing any large number of copies, to give
@@ -190,8 +163,7 @@
 B. List on the Title Page, as authors, one or more persons or entities
    responsible for authorship of the modifications in the Modified
    Version, together with at least five of the principal authors of the
-   Document (all of its principal authors, if it has fewer than five),
-   unless they release you from this requirement.
+   Document (all of its principal authors, if it has less than five).
 C. State on the Title page the name of the publisher of the
    Modified Version, as the publisher.
 D. Preserve all the copyright notices of the Document.
@@ -203,10 +175,10 @@
 G. Preserve in that license notice the full lists of Invariant Sections
    and required Cover Texts given in the Document's license notice.
 H. Include an unaltered copy of this License.
-I. Preserve the section Entitled "History", Preserve its Title, and add
-   to it an item stating at least the title, year, new authors, and
+I. Preserve the section entitled "History", and its title, and add to
+   it an item stating at least the title, year, new authors, and
    publisher of the Modified Version as given on the Title Page.  If
-   there is no section Entitled "History" in the Document, create one
+   there is no section entitled "History" in the Document, create one
    stating the title, year, authors, and publisher of the Document as
    given on its Title Page, then add an item describing the Modified
    Version as stated in the previous sentence.
@@ -217,18 +189,17 @@
    You may omit a network location for a work that was published at
    least four years before the Document itself, or if the original
    publisher of the version it refers to gives permission.
-K. For any section Entitled "Acknowledgements" or "Dedications",
-   Preserve the Title of the section, and preserve in the section all
-   the substance and tone of each of the contributor acknowledgements
+K. In any section entitled "Acknowledgements" or "Dedications",
+   preserve the section's title, and preserve in the section all the
+   substance and tone of each of the contributor acknowledgements
    and/or dedications given therein.
 L. Preserve all the Invariant Sections of the Document,
    unaltered in their text and in their titles.  Section numbers
    or the equivalent are not considered part of the section titles.
-M. Delete any section Entitled "Endorsements".  Such a section
+M. Delete any section entitled "Endorsements".  Such a section
    may not be included in the Modified Version.
-N. Do not retitle any existing section to be Entitled "Endorsements"
+N. Do not retitle any existing section as "Endorsements"
    or to conflict in title with any Invariant Section.
-O. Preserve any Warranty Disclaimers.
 
 If the Modified Version includes new front-matter sections or
 appendices that qualify as Secondary Sections and contain no material
@@ -237,7 +208,7 @@
 list of Invariant Sections in the Modified Version's license notice.
 These titles must be distinct from any other section titles.
 
-You may add a section Entitled "Endorsements", provided it contains
+You may add a section entitled "Endorsements", provided it contains
 nothing but endorsements of your Modified Version by various
 parties--for example, statements of peer review or that the text has
 been approved by an organization as the authoritative definition of a
@@ -265,7 +236,7 @@
 versions, provided that you include in the combination all of the
 Invariant Sections of all of the original documents, unmodified, and
 list them all as Invariant Sections of your combined work in its
-license notice, and that you preserve all their Warranty Disclaimers.
+license notice.
 
 The combined work need only contain one copy of this License, and
 multiple identical Invariant Sections may be replaced with a single
@@ -276,11 +247,11 @@
 Make the same adjustment to the section titles in the list of
 Invariant Sections in the license notice of the combined work.
 
-In the combination, you must combine any sections Entitled "History"
-in the various original documents, forming one section Entitled
-"History"; likewise combine any sections Entitled "Acknowledgements",
-and any sections Entitled "Dedications".  You must delete all sections
-Entitled "Endorsements".
+In the combination, you must combine any sections entitled "History"
+in the various original documents, forming one section entitled
+"History"; likewise combine any sections entitled "Acknowledgements",
+and any sections entitled "Dedications".  You must delete all sections
+entitled "Endorsements."
 
 
 6. COLLECTIONS OF DOCUMENTS
@@ -301,20 +272,18 @@
 
 A compilation of the Document or its derivatives with other separate
 and independent documents or works, in or on a volume of a storage or
-distribution medium, is called an "aggregate" if the copyright
-resulting from the compilation is not used to limit the legal rights
-of the compilation's users beyond what the individual works permit.
-When the Document is included in an aggregate, this License does not
-apply to the other works in the aggregate which are not themselves
-derivative works of the Document.
+distribution medium, does not as a whole count as a Modified Version
+of the Document, provided no compilation copyright is claimed for the
+compilation.  Such a compilation is called an "aggregate", and this
+License does not apply to the other self-contained works thus compiled
+with the Document, on account of their being thus compiled, if they
+are not themselves derivative works of the Document.
 
 If the Cover Text requirement of section 3 is applicable to these
-copies of the Document, then if the Document is less than one half of
-the entire aggregate, the Document's Cover Texts may be placed on
-covers that bracket the Document within the aggregate, or the
-electronic equivalent of covers if the Document is in electronic form.
-Otherwise they must appear on printed covers that bracket the whole
-aggregate.
+copies of the Document, then if the Document is less than one quarter
+of the entire aggregate, the Document's Cover Texts may be placed on
+covers that surround only the Document within the aggregate.
+Otherwise they must appear on covers around the whole aggregate.
 
 
 8. TRANSLATION
@@ -325,17 +294,10 @@
 permission from their copyright holders, but you may include
 translations of some or all Invariant Sections in addition to the
 original versions of these Invariant Sections.  You may include a
-translation of this License, and all the license notices in the
-Document, and any Warranty Disclaimers, provided that you also include
-the original English version of this License and the original versions
-of those notices and disclaimers.  In case of a disagreement between
-the translation and the original version of this License or a notice
-or disclaimer, the original version will prevail.
-
-If a section in the Document is Entitled "Acknowledgements",
-"Dedications", or "History", the requirement (section 4) to Preserve
-its Title (section 1) will typically require changing the actual
-title.
+translation of this License provided that you also include the
+original English version of this License.  In case of a disagreement
+between the translation and the original English version of this
+License, the original English version will prevail.
 
 
 9. TERMINATION
@@ -373,23 +335,19 @@
 the License in the document and put the following copyright and
 license notices just after the title page:
 
-    Copyright (c)  YEAR  YOUR NAME.
-    Permission is granted to copy, distribute and/or modify this document
-    under the terms of the GNU Free Documentation License, Version 1.2
-    or any later version published by the Free Software Foundation;
-    with no Invariant Sections, no Front-Cover Texts, and no Back-Cover Texts.
-    A copy of the license is included in the section entitled "GNU
-    Free Documentation License".
+      Copyright (c)  YEAR  YOUR NAME.
+      Permission is granted to copy, distribute and/or modify this document
+      under the terms of the GNU Free Documentation License, Version 1.1
+      or any later version published by the Free Software Foundation;
+      with the Invariant Sections being LIST THEIR TITLES, with the
+      Front-Cover Texts being LIST, and with the Back-Cover Texts being LIST.
+      A copy of the license is included in the section entitled "GNU
+      Free Documentation License".
 
-If you have Invariant Sections, Front-Cover Texts and Back-Cover Texts,
-replace the "with...Texts." line with this:
-
-    with the Invariant Sections being LIST THEIR TITLES, with the
-    Front-Cover Texts being LIST, and with the Back-Cover Texts being LIST.
-
-If you have Invariant Sections without Cover Texts, or some other
-combination of the three, merge those two alternatives to suit the
-situation.
+If you have no Invariant Sections, write "with no Invariant Sections"
+instead of saying which ones are invariant.  If you have no
+Front-Cover Texts, write "no Front-Cover Texts" instead of
+"Front-Cover Texts being LIST"; likewise for Back-Cover Texts.
 
 If your document contains nontrivial examples of program code, we
 recommend releasing these examples in parallel under your choice of
diff --git a/yocto-poky/meta/files/ext-sdk-prepare.sh b/yocto-poky/meta/files/ext-sdk-prepare.sh
new file mode 100644
index 0000000..160c71e
--- /dev/null
+++ b/yocto-poky/meta/files/ext-sdk-prepare.sh
@@ -0,0 +1,20 @@
+#!/bin/sh
+
+# Prepare the build system within the extensible SDK
+
+target_sdk_dir="$1"
+sdk_targets="$2"
+
+# Avoid actually building images during this phase, but still
+# ensure all dependencies are extracted from sstate
+# This is a hack, to be sure, but we really don't need to do this here
+for sdktarget in $sdk_targets ; do
+	bbappend=`recipetool newappend $target_sdk_dir/workspace $sdktarget`
+	printf 'python do_rootfs_forcevariable () {\n    bb.utils.mkdirhier(d.getVar("IMAGE_ROOTFS", True))\n}\n' > $bbappend
+	printf 'python do_bootimg () {\n    pass\n}\n' >> $bbappend
+	printf 'python do_bootdirectdisk () {\n    pass\n}\n' >> $bbappend
+	printf 'python do_vmimg () {\n    pass\n}\n' >> $bbappend
+	printf "Created bbappend %s\n" "$bbappend"
+done
+bitbake $sdk_targets || exit 1
+rm -rf $target_sdk_dir/workspace/appends/*
diff --git a/yocto-poky/meta/files/toolchain-shar-extract.sh b/yocto-poky/meta/files/toolchain-shar-extract.sh
index cd0a547..35d3c75 100644
--- a/yocto-poky/meta/files/toolchain-shar-extract.sh
+++ b/yocto-poky/meta/files/toolchain-shar-extract.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
 
 INST_ARCH=$(uname -m | sed -e "s/i[3-6]86/ix86/" -e "s/x86[-_]64/x86_64/")
 SDK_ARCH=$(echo @SDK_ARCH@ | sed -e "s/i[3-6]86/ix86/" -e "s/x86[-_]64/x86_64/")
@@ -68,8 +68,9 @@
 	esac
 done
 
-echo "@SDK_TITLE@ installer version @SDK_VERSION@"
-echo "==========================================================="
+titlestr="@SDK_TITLE@ installer version @SDK_VERSION@"
+printf "%s\n" "$titlestr"
+printf "%${#titlestr}s\n" | tr " " "="
 
 if [ $verbose = 1 ] ; then
 	set -x
@@ -86,7 +87,7 @@
 	if [ "$answer" = "Y" ]; then
 		target_sdk_dir="$DEFAULT_INSTALL_DIR"
 	else
-		read -e -p "Enter target directory for SDK (default: $DEFAULT_INSTALL_DIR): " target_sdk_dir
+		read -p "Enter target directory for SDK (default: $DEFAULT_INSTALL_DIR): " target_sdk_dir
 		[ "$target_sdk_dir" = "" ] && target_sdk_dir=$DEFAULT_INSTALL_DIR
 	fi
 fi
@@ -100,9 +101,9 @@
 
 if [ "$SDK_EXTENSIBLE" = "1" ]; then
 	# We're going to be running the build system, additional restrictions apply
-	if echo "$target_sdk_dir" | grep -q '[+\ @]'; then
+	if echo "$target_sdk_dir" | grep -q '[+\ @$]'; then
 		echo "The target directory path ($target_sdk_dir) contains illegal" \
-		     "characters such as spaces, @ or +. Abort!"
+		     "characters such as spaces, @, \$ or +. Abort!"
 		exit 1
 	fi
 else
@@ -163,14 +164,25 @@
 payload_offset=$(($(grep -na -m1 "^MARKER:$" $0|cut -d':' -f1) + 1))
 
 printf "Extracting SDK..."
-tail -n +$payload_offset $0| $SUDO_EXEC tar xj -C $target_sdk_dir
+tail -n +$payload_offset $0| $SUDO_EXEC tar xj -C $target_sdk_dir --checkpoint=.2500
 echo "done"
 
 printf "Setting it up..."
 # fix environment paths
+real_env_setup_script=""
 for env_setup_script in `ls $target_sdk_dir/environment-setup-*`; do
+	if grep -q 'OECORE_NATIVE_SYSROOT=' $env_setup_script; then
+		# Handle custom env setup scripts that are only named
+		# environment-setup-* so that they have relocation
+		# applied - what we want beyond here is the main one
+		# rather than the one that simply sorts last
+		real_env_setup_script="$env_setup_script"
+	fi
 	$SUDO_EXEC sed -e "s:@SDKPATH@:$target_sdk_dir:g" -i $env_setup_script
 done
+if [ -n "$real_env_setup_script" ] ; then
+	env_setup_script="$real_env_setup_script"
+fi
 
 @SDK_POST_INSTALL_COMMAND@
 
@@ -182,7 +194,9 @@
 
 echo "SDK has been successfully set up and is ready to be used."
 echo "Each time you wish to use the SDK in a new shell session, you need to source the environment setup script e.g."
-echo " \$ . $target_sdk_dir/environment-setup-@REAL_MULTIMACH_TARGET_SYS@"
+for env_setup_script in `ls $target_sdk_dir/environment-setup-*`; do
+	echo " \$ . $env_setup_script"
+done
 
 exit 0
 
diff --git a/yocto-poky/meta/files/toolchain-shar-relocate.sh b/yocto-poky/meta/files/toolchain-shar-relocate.sh
index dfb8e16..4ef2927 100644
--- a/yocto-poky/meta/files/toolchain-shar-relocate.sh
+++ b/yocto-poky/meta/files/toolchain-shar-relocate.sh
@@ -26,25 +26,21 @@
 	fi
 fi
 
-# replace @SDKPATH@ with the new prefix in all text files: configs/scripts/etc
+# replace @SDKPATH@ with the new prefix in all text files: configs/scripts/etc.
+# replace the host perl with SDK perl.
 for replace in "$target_sdk_dir -maxdepth 1" "$native_sysroot"; do
-	$SUDO_EXEC find $replace -type f -exec file '{}' \; | \
-		grep ":.*\(ASCII\|script\|source\).*text" | \
-		awk -F':' '{printf "\"%s\"\n", $1}' | \
-		grep -v "$target_sdk_dir/environment-setup-*" | \
-		$SUDO_EXEC xargs -n32 sed -i -e "s:$DEFAULT_INSTALL_DIR:$target_sdk_dir:g"
-done
+	$SUDO_EXEC find $replace -type f
+done | xargs -n100 file | grep ":.*\(ASCII\|script\|source\).*text" | \
+    awk -F':' '{printf "\"%s\"\n", $1}' | \
+    grep -v "$target_sdk_dir/environment-setup-*" | \
+    xargs -n100 $SUDO_EXEC sed -i \
+        -e "s:$DEFAULT_INSTALL_DIR:$target_sdk_dir:g" \
+        -e "s:^#! */usr/bin/perl.*:#! /usr/bin/env perl:g" \
+        -e "s: /usr/bin/perl: /usr/bin/env perl:g"
 
 # change all symlinks pointing to @SDKPATH@
 for l in $($SUDO_EXEC find $native_sysroot -type l); do
 	$SUDO_EXEC ln -sfn $(readlink $l|$SUDO_EXEC sed -e "s:$DEFAULT_INSTALL_DIR:$target_sdk_dir:") $l
 done
 
-# find out all perl scripts in $native_sysroot and modify them replacing the
-# host perl with SDK perl.
-for perl_script in $($SUDO_EXEC find $native_sysroot -type f -exec grep -l "^#!.*perl" '{}' \;); do
-	$SUDO_EXEC sed -i -e "s:^#! */usr/bin/perl.*:#! /usr/bin/env perl:g" -e \
-		"s: /usr/bin/perl: /usr/bin/env perl:g" $perl_script
-done
-
 echo done
diff --git a/yocto-poky/meta/lib/oe/copy_buildsystem.py b/yocto-poky/meta/lib/oe/copy_buildsystem.py
index 979578c..c0e7541 100644
--- a/yocto-poky/meta/lib/oe/copy_buildsystem.py
+++ b/yocto-poky/meta/lib/oe/copy_buildsystem.py
@@ -14,8 +14,9 @@
         shutil.copymode(src, dest)
 
 class BuildSystem(object):
-    def __init__(self, d):
+    def __init__(self, context, d):
         self.d = d
+        self.context = context
         self.layerdirs = d.getVar('BBLAYERS', True).split()
 
     def copy_bitbake_and_layers(self, destdir):
@@ -38,7 +39,7 @@
             if os.path.exists(layerconf):
                 with open(layerconf, 'r') as f:
                     if f.readline().startswith("# ### workspace layer auto-generated by devtool ###"):
-                        bb.warn("Skipping local workspace layer %s" % layer)
+                        bb.plain("NOTE: Excluding local workspace layer %s from %s" % (layer, self.context))
                         continue
 
             # If the layer was already under corebase, leave it there
diff --git a/yocto-poky/meta/lib/oe/distro_check.py b/yocto-poky/meta/lib/oe/distro_check.py
index 8ed5b0e..f92cd2e 100644
--- a/yocto-poky/meta/lib/oe/distro_check.py
+++ b/yocto-poky/meta/lib/oe/distro_check.py
@@ -1,7 +1,23 @@
-def get_links_from_url(url):
+from contextlib import contextmanager
+@contextmanager
+def create_socket(url, d):
+    import urllib
+    socket = urllib.urlopen(url, proxies=get_proxies(d))
+    try:
+        yield socket
+    finally:
+        socket.close()
+
+def get_proxies(d):
+    import os
+    proxykeys = ['http', 'https', 'ftp', 'ftps', 'no', 'all']
+    proxyvalues = map(lambda key: d.getVar(key+'_proxy', True), proxykeys)
+    return dict(zip(proxykeys, proxyvalues))
+
+def get_links_from_url(url, d):
     "Return all the href links found on the web location"
 
-    import urllib, sgmllib
+    import sgmllib
     
     class LinksParser(sgmllib.SGMLParser):
         def parse(self, s):
@@ -24,19 +40,18 @@
             "Return the list of hyperlinks."
             return self.hyperlinks
 
-    sock = urllib.urlopen(url)
-    webpage = sock.read()
-    sock.close()
+    with create_socket(url,d) as sock:
+        webpage = sock.read()
 
     linksparser = LinksParser()
     linksparser.parse(webpage)
     return linksparser.get_hyperlinks()
 
-def find_latest_numeric_release(url):
+def find_latest_numeric_release(url, d):
     "Find the latest listed numeric release on the given url"
     max=0
     maxstr=""
-    for link in get_links_from_url(url):
+    for link in get_links_from_url(url, d):
         try:
             release = float(link)
         except:
@@ -70,7 +85,7 @@
     return set.keys()
 
 
-def get_latest_released_meego_source_package_list():
+def get_latest_released_meego_source_package_list(d):
     "Returns list of all the name os packages in the latest meego distro"
 
     package_names = []
@@ -82,11 +97,11 @@
     package_list=clean_package_list(package_names)
     return "1.0", package_list
 
-def get_source_package_list_from_url(url, section):
+def get_source_package_list_from_url(url, section, d):
     "Return a sectioned list of package names from a URL list"
 
     bb.note("Reading %s: %s" % (url, section))
-    links = get_links_from_url(url)
+    links = get_links_from_url(url, d)
     srpms = filter(is_src_rpm, links)
     names_list = map(package_name_from_srpm, srpms)
 
@@ -96,44 +111,44 @@
 
     return new_pkgs
 
-def get_latest_released_fedora_source_package_list():
+def get_latest_released_fedora_source_package_list(d):
     "Returns list of all the name os packages in the latest fedora distro"
-    latest = find_latest_numeric_release("http://archive.fedoraproject.org/pub/fedora/linux/releases/")
+    latest = find_latest_numeric_release("http://archive.fedoraproject.org/pub/fedora/linux/releases/", d)
 
-    package_names = get_source_package_list_from_url("http://archive.fedoraproject.org/pub/fedora/linux/releases/%s/Fedora/source/SRPMS/" % latest, "main")
+    package_names = get_source_package_list_from_url("http://archive.fedoraproject.org/pub/fedora/linux/releases/%s/Fedora/source/SRPMS/" % latest, "main", d)
 
 #    package_names += get_source_package_list_from_url("http://download.fedora.redhat.com/pub/fedora/linux/releases/%s/Everything/source/SPRMS/" % latest, "everything")
-    package_names += get_source_package_list_from_url("http://archive.fedoraproject.org/pub/fedora/linux/updates/%s/SRPMS/" % latest, "updates")
+    package_names += get_source_package_list_from_url("http://archive.fedoraproject.org/pub/fedora/linux/updates/%s/SRPMS/" % latest, "updates", d)
 
     package_list=clean_package_list(package_names)
         
     return latest, package_list
 
-def get_latest_released_opensuse_source_package_list():
+def get_latest_released_opensuse_source_package_list(d):
     "Returns list of all the name os packages in the latest opensuse distro"
-    latest = find_latest_numeric_release("http://download.opensuse.org/source/distribution/")
+    latest = find_latest_numeric_release("http://download.opensuse.org/source/distribution/",d)
 
-    package_names = get_source_package_list_from_url("http://download.opensuse.org/source/distribution/%s/repo/oss/suse/src/" % latest, "main")
-    package_names += get_source_package_list_from_url("http://download.opensuse.org/update/%s/rpm/src/" % latest, "updates")
+    package_names = get_source_package_list_from_url("http://download.opensuse.org/source/distribution/%s/repo/oss/suse/src/" % latest, "main", d)
+    package_names += get_source_package_list_from_url("http://download.opensuse.org/update/%s/rpm/src/" % latest, "updates", d)
 
     package_list=clean_package_list(package_names)
     return latest, package_list
 
-def get_latest_released_mandriva_source_package_list():
+def get_latest_released_mandriva_source_package_list(d):
     "Returns list of all the name os packages in the latest mandriva distro"
-    latest = find_latest_numeric_release("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/")
-    package_names = get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/main/release/" % latest, "main")
+    latest = find_latest_numeric_release("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/", d)
+    package_names = get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/main/release/" % latest, "main", d)
 #    package_names += get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/contrib/release/" % latest, "contrib")
-    package_names += get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/main/updates/" % latest, "updates")
+    package_names += get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/main/updates/" % latest, "updates", d)
 
     package_list=clean_package_list(package_names)
     return latest, package_list
 
-def find_latest_debian_release(url):
+def find_latest_debian_release(url, d):
     "Find the latest listed debian release on the given url"
 
     releases = []
-    for link in get_links_from_url(url):
+    for link in get_links_from_url(url, d):
         if link[:6] == "Debian":
             if ';' not in link:
                 releases.append(link)
@@ -143,16 +158,15 @@
     except:
         return "_NotFound_"
 
-def get_debian_style_source_package_list(url, section):
+def get_debian_style_source_package_list(url, section, d):
     "Return the list of package-names stored in the debian style Sources.gz file"
-    import urllib
-    sock = urllib.urlopen(url)
-    import tempfile
-    tmpfile = tempfile.NamedTemporaryFile(mode='wb', prefix='oecore.', suffix='.tmp', delete=False)
-    tmpfilename=tmpfile.name
-    tmpfile.write(sock.read())
-    sock.close()
-    tmpfile.close()
+    with create_socket(url,d) as sock:
+        webpage = sock.read()
+        import tempfile
+        tmpfile = tempfile.NamedTemporaryFile(mode='wb', prefix='oecore.', suffix='.tmp', delete=False)
+        tmpfilename=tmpfile.name
+        tmpfile.write(sock.read())
+        tmpfile.close()
     import gzip
     bb.note("Reading %s: %s" % (url, section))
 
@@ -165,41 +179,41 @@
 
     return package_names
 
-def get_latest_released_debian_source_package_list():
+def get_latest_released_debian_source_package_list(d):
     "Returns list of all the name os packages in the latest debian distro"
-    latest = find_latest_debian_release("http://ftp.debian.org/debian/dists/")
+    latest = find_latest_debian_release("http://ftp.debian.org/debian/dists/", d)
     url = "http://ftp.debian.org/debian/dists/stable/main/source/Sources.gz" 
-    package_names = get_debian_style_source_package_list(url, "main")
+    package_names = get_debian_style_source_package_list(url, "main", d)
 #    url = "http://ftp.debian.org/debian/dists/stable/contrib/source/Sources.gz" 
 #    package_names += get_debian_style_source_package_list(url, "contrib")
     url = "http://ftp.debian.org/debian/dists/stable-proposed-updates/main/source/Sources.gz" 
-    package_names += get_debian_style_source_package_list(url, "updates")
+    package_names += get_debian_style_source_package_list(url, "updates", d)
     package_list=clean_package_list(package_names)
     return latest, package_list
 
-def find_latest_ubuntu_release(url):
+def find_latest_ubuntu_release(url, d):
     "Find the latest listed ubuntu release on the given url"
     url += "?C=M;O=D" # Descending Sort by Last Modified
-    for link in get_links_from_url(url):
+    for link in get_links_from_url(url, d):
         if link[-8:] == "-updates":
             return link[:-8]
     return "_NotFound_"
 
-def get_latest_released_ubuntu_source_package_list():
+def get_latest_released_ubuntu_source_package_list(d):
     "Returns list of all the name os packages in the latest ubuntu distro"
-    latest = find_latest_ubuntu_release("http://archive.ubuntu.com/ubuntu/dists/")
+    latest = find_latest_ubuntu_release("http://archive.ubuntu.com/ubuntu/dists/", d)
     url = "http://archive.ubuntu.com/ubuntu/dists/%s/main/source/Sources.gz" % latest
-    package_names = get_debian_style_source_package_list(url, "main")
+    package_names = get_debian_style_source_package_list(url, "main", d)
 #    url = "http://archive.ubuntu.com/ubuntu/dists/%s/multiverse/source/Sources.gz" % latest
 #    package_names += get_debian_style_source_package_list(url, "multiverse")
 #    url = "http://archive.ubuntu.com/ubuntu/dists/%s/universe/source/Sources.gz" % latest
 #    package_names += get_debian_style_source_package_list(url, "universe")
     url = "http://archive.ubuntu.com/ubuntu/dists/%s-updates/main/source/Sources.gz" % latest
-    package_names += get_debian_style_source_package_list(url, "updates")
+    package_names += get_debian_style_source_package_list(url, "updates", d)
     package_list=clean_package_list(package_names)
     return latest, package_list
 
-def create_distro_packages_list(distro_check_dir):
+def create_distro_packages_list(distro_check_dir, d):
     pkglst_dir = os.path.join(distro_check_dir, "package_lists")
     if not os.path.isdir (pkglst_dir):
         os.makedirs(pkglst_dir)
@@ -220,7 +234,7 @@
     begin = datetime.now()
     for distro in per_distro_functions:
         name = distro[0]
-        release, package_list = distro[1]()
+        release, package_list = distro[1](d)
         bb.note("Distro: %s, Latest Release: %s, # src packages: %d" % (name, release, len(package_list)))
         package_list_file = os.path.join(pkglst_dir, name + "-" + release)
         f = open(package_list_file, "w+b")
@@ -231,7 +245,7 @@
     delta = end - begin
     bb.note("package_list generatiosn took this much time: %d seconds" % delta.seconds)
 
-def update_distro_data(distro_check_dir, datetime):
+def update_distro_data(distro_check_dir, datetime, d):
     """
         If distro packages list data is old then rebuild it.
         The operations has to be protected by a lock so that
@@ -258,7 +272,7 @@
         if saved_datetime[0:8] != datetime[0:8]:
             bb.note("The build datetime did not match: saved:%s current:%s" % (saved_datetime, datetime))
             bb.note("Regenerating distro package lists")
-            create_distro_packages_list(distro_check_dir)
+            create_distro_packages_list(distro_check_dir, d)
             f.seek(0)
             f.write(datetime)
 
diff --git a/yocto-poky/meta/lib/oe/image.py b/yocto-poky/meta/lib/oe/image.py
index f9e9bfd..b9eb3de 100644
--- a/yocto-poky/meta/lib/oe/image.py
+++ b/yocto-poky/meta/lib/oe/image.py
@@ -5,7 +5,7 @@
 
 
 def generate_image(arg):
-    (type, subimages, create_img_cmd) = arg
+    (type, subimages, create_img_cmd, sprefix) = arg
 
     bb.note("Running image creation script for %s: %s ..." %
             (type, create_img_cmd))
@@ -54,14 +54,16 @@
             base_type = self._image_base_type(node)
             deps = (self.d.getVar('IMAGE_TYPEDEP_' + node, True) or "")
             base_deps = (self.d.getVar('IMAGE_TYPEDEP_' + base_type, True) or "")
-            if deps != "" or base_deps != "":
-                graph[node] = deps
 
-                for dep in deps.split() + base_deps.split():
-                    if not dep in graph:
-                        add_node(dep)
-            else:
-                graph[node] = ""
+            graph[node] = ""
+            for dep in deps.split() + base_deps.split():
+                if not dep in graph[node]:
+                    if graph[node] != "":
+                        graph[node] += " "
+                    graph[node] += dep
+
+                if not dep in graph:
+                    add_node(dep)
 
         for fstype in image_fstypes:
             add_node(fstype)
@@ -264,9 +266,9 @@
 
         return (alltypes, filtered_groups, cimages)
 
-    def _write_script(self, type, cmds):
+    def _write_script(self, type, cmds, sprefix=""):
         tempdir = self.d.getVar('T', True)
-        script_name = os.path.join(tempdir, "create_image." + type)
+        script_name = os.path.join(tempdir, sprefix + "create_image." + type)
         rootfs_size = self._get_rootfs_size()
 
         self.d.setVar('img_creation_func', '\n'.join(cmds))
@@ -284,7 +286,7 @@
 
         return script_name
 
-    def _get_imagecmds(self):
+    def _get_imagecmds(self, sprefix=""):
         old_overrides = self.d.getVar('OVERRIDES', 0)
 
         alltypes, fstype_groups, cimages = self._get_image_types()
@@ -320,9 +322,9 @@
                 else:
                     subimages.append(type)
 
-                script_name = self._write_script(type, cmds)
+                script_name = self._write_script(type, cmds, sprefix)
 
-                image_cmds.append((type, subimages, script_name))
+                image_cmds.append((type, subimages, script_name, sprefix))
 
             image_cmd_groups.append(image_cmds)
 
@@ -355,6 +357,27 @@
 
         image_cmd_groups = self._get_imagecmds()
 
+        # Process the debug filesystem...
+        debugfs_d = bb.data.createCopy(self.d)
+        if self.d.getVar('IMAGE_GEN_DEBUGFS', True) == "1":
+            bb.note("Processing debugfs image(s) ...")
+            orig_d = self.d
+            self.d = debugfs_d
+
+            self.d.setVar('IMAGE_ROOTFS', orig_d.getVar('IMAGE_ROOTFS', True) + '-dbg')
+            self.d.setVar('IMAGE_NAME', orig_d.getVar('IMAGE_NAME', True) + '-dbg')
+            self.d.setVar('IMAGE_LINK_NAME', orig_d.getVar('IMAGE_LINK_NAME', True) + '-dbg')
+
+            debugfs_image_fstypes = orig_d.getVar('IMAGE_FSTYPES_DEBUGFS', True)
+            if debugfs_image_fstypes:
+                self.d.setVar('IMAGE_FSTYPES', orig_d.getVar('IMAGE_FSTYPES_DEBUGFS', True))
+
+            self._remove_old_symlinks()
+
+            image_cmd_groups += self._get_imagecmds("debugfs.")
+
+            self.d = orig_d
+
         self._write_wic_env()
 
         for image_cmds in image_cmd_groups:
@@ -369,9 +392,16 @@
                 if result is not None:
                     bb.fatal(result)
 
-            for image_type, subimages, script in image_cmds:
-                bb.note("Creating symlinks for %s image ..." % image_type)
-                self._create_symlinks(subimages)
+            for image_type, subimages, script, sprefix in image_cmds:
+                if sprefix == 'debugfs.':
+                    bb.note("Creating symlinks for %s debugfs image ..." % image_type)
+                    orig_d = self.d
+                    self.d = debugfs_d
+                    self._create_symlinks(subimages)
+                    self.d = orig_d
+                else:
+                    bb.note("Creating symlinks for %s image ..." % image_type)
+                    self._create_symlinks(subimages)
 
         execute_pre_post_process(self.d, post_process_cmds)
 
diff --git a/yocto-poky/meta/lib/oe/package_manager.py b/yocto-poky/meta/lib/oe/package_manager.py
index 292ed44..b9fa6d8 100644
--- a/yocto-poky/meta/lib/oe/package_manager.py
+++ b/yocto-poky/meta/lib/oe/package_manager.py
@@ -133,8 +133,11 @@
             if pkgfeed_gpg_name:
                 repomd_file = os.path.join(arch_dir, 'repodata', 'repomd.xml')
                 gpg_cmd = "%s --detach-sign --armor --batch --no-tty --yes " \
-                          "--passphrase-file '%s' -u '%s' %s" % (gpg_bin,
-                          pkgfeed_gpg_pass, pkgfeed_gpg_name, repomd_file)
+                          "--passphrase-file '%s' -u '%s' " % \
+                          (gpg_bin, pkgfeed_gpg_pass, pkgfeed_gpg_name)
+                if self.d.getVar('GPG_PATH', True):
+                    gpg_cmd += "--homedir %s " % self.d.getVar('GPG_PATH', True)
+                gpg_cmd += repomd_file
                 repo_sign_cmds.append(gpg_cmd)
 
             rpm_dirs_found = True
@@ -200,6 +203,8 @@
         result = oe.utils.multiprocess_exec(index_cmds, create_index)
         if result:
             bb.fatal('%s' % ('\n'.join(result)))
+        if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1':
+            raise NotImplementedError('Package feed signing not implementd for ipk')
 
 
 
@@ -275,6 +280,8 @@
         result = oe.utils.multiprocess_exec(index_cmds, create_index)
         if result:
             bb.fatal('%s' % ('\n'.join(result)))
+        if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1':
+            raise NotImplementedError('Package feed signing not implementd for dpkg')
 
 
 
@@ -434,24 +441,30 @@
                 (self.opkg_cmd, self.opkg_args)
 
         try:
-            output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).strip()
+            # bb.note(cmd)
+            tmp_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).strip()
+
         except subprocess.CalledProcessError as e:
             bb.fatal("Cannot get the installed packages list. Command '%s' "
                      "returned %d:\n%s" % (cmd, e.returncode, e.output))
 
-        if output and format == "file":
-            tmp_output = ""
-            for line in output.split('\n'):
+        output = list()
+        for line in tmp_output.split('\n'):
+            if len(line.strip()) == 0:
+                continue
+            if format == "file":
                 pkg, pkg_file, pkg_arch = line.split()
                 full_path = os.path.join(self.rootfs_dir, pkg_arch, pkg_file)
                 if os.path.exists(full_path):
-                    tmp_output += "%s %s %s\n" % (pkg, full_path, pkg_arch)
+                    output.append('%s %s %s' % (pkg, full_path, pkg_arch))
                 else:
-                    tmp_output += "%s %s %s\n" % (pkg, pkg_file, pkg_arch)
+                    output.append('%s %s %s' % (pkg, pkg_file, pkg_arch))
+            else:
+                output.append(line)
 
-            output = tmp_output
+        output.sort()
 
-        return output
+        return '\n'.join(output)
 
 
 class DpkgPkgsList(PkgsList):
@@ -605,12 +618,12 @@
             cmd.extend(['-x', exclude])
         try:
             bb.note("Installing complementary packages ...")
+            bb.note('Running %s' % cmd)
             complementary_pkgs = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
         except subprocess.CalledProcessError as e:
             bb.fatal("Could not compute complementary packages list. Command "
                      "'%s' returned %d:\n%s" %
                      (' '.join(cmd), e.returncode, e.output))
-
         self.install(complementary_pkgs.split(), attempt_only=True)
 
     def deploy_dir_lock(self):
@@ -1050,6 +1063,35 @@
     def update(self):
         self._invoke_smart('update rpmsys')
 
+    def get_rdepends_recursively(self, pkgs):
+        # pkgs will be changed during the loop, so use [:] to make a copy.
+        for pkg in pkgs[:]:
+            sub_data = oe.packagedata.read_subpkgdata(pkg, self.d)
+            sub_rdep = sub_data.get("RDEPENDS_" + pkg)
+            if not sub_rdep:
+                continue
+            done = bb.utils.explode_dep_versions2(sub_rdep).keys()
+            next = done
+            # Find all the rdepends on dependency chain
+            while next:
+                new = []
+                for sub_pkg in next:
+                    sub_data = oe.packagedata.read_subpkgdata(sub_pkg, self.d)
+                    sub_pkg_rdep = sub_data.get("RDEPENDS_" + sub_pkg)
+                    if not sub_pkg_rdep:
+                        continue
+                    for p in bb.utils.explode_dep_versions2(sub_pkg_rdep):
+                        # Already handled, skip it.
+                        if p in done or p in pkgs:
+                            continue
+                        # It's a new dep
+                        if oe.packagedata.has_subpkgdata(p, self.d):
+                            done.append(p)
+                            new.append(p)
+                next = new
+            pkgs.extend(done)
+        return pkgs
+
     '''
     Install pkgs with smart, the pkg name is oe format
     '''
@@ -1059,8 +1101,58 @@
             bb.note("There are no packages to install")
             return
         bb.note("Installing the following packages: %s" % ' '.join(pkgs))
-        pkgs = self._pkg_translate_oe_to_smart(pkgs, attempt_only)
+        if not attempt_only:
+            # Pull in multilib requires since rpm may not pull in them
+            # correctly, for example,
+            # lib32-packagegroup-core-standalone-sdk-target requires
+            # lib32-libc6, but rpm may pull in libc6 rather than lib32-libc6
+            # since it doesn't know mlprefix (lib32-), bitbake knows it and
+            # can handle it well, find out the RDEPENDS on the chain will
+            # fix the problem. Both do_rootfs and do_populate_sdk have this
+            # issue.
+            # The attempt_only packages don't need this since they are
+            # based on the installed ones.
+            #
+            # Separate pkgs into two lists, one is multilib, the other one
+            # is non-multilib.
+            ml_pkgs = []
+            non_ml_pkgs = pkgs[:]
+            for pkg in pkgs:
+                for mlib in (self.d.getVar("MULTILIB_VARIANTS", True) or "").split():
+                    if pkg.startswith(mlib + '-'):
+                        ml_pkgs.append(pkg)
+                        non_ml_pkgs.remove(pkg)
 
+            if len(ml_pkgs) > 0 and len(non_ml_pkgs) > 0:
+                # Found both foo and lib-foo
+                ml_pkgs = self.get_rdepends_recursively(ml_pkgs)
+                non_ml_pkgs = self.get_rdepends_recursively(non_ml_pkgs)
+                # Longer list makes smart slower, so only keep the pkgs
+                # which have the same BPN, and smart can handle others
+                # correctly.
+                pkgs_new = []
+                for pkg in non_ml_pkgs:
+                    for mlib in (self.d.getVar("MULTILIB_VARIANTS", True) or "").split():
+                        mlib_pkg = mlib + "-" + pkg
+                        if mlib_pkg in ml_pkgs:
+                            pkgs_new.append(pkg)
+                            pkgs_new.append(mlib_pkg)
+                for pkg in pkgs:
+                    if pkg not in pkgs_new:
+                        pkgs_new.append(pkg)
+                pkgs = pkgs_new
+                new_depends = {}
+                deps = bb.utils.explode_dep_versions2(" ".join(pkgs))
+                for depend in deps:
+                    data = oe.packagedata.read_subpkgdata(depend, self.d)
+                    key = "PKG_%s" % depend
+                    if key in data:
+                        new_depend = data[key]
+                    else:
+                        new_depend = depend
+                    new_depends[new_depend] = deps[depend]
+                pkgs = bb.utils.join_deps(new_depends, commasep=True).split(', ')
+        pkgs = self._pkg_translate_oe_to_smart(pkgs, attempt_only)
         if not attempt_only:
             bb.note('to be installed: %s' % ' '.join(pkgs))
             cmd = "%s %s install -y %s" % \
@@ -1379,6 +1471,16 @@
                                         self.d.getVar('FEED_DEPLOYDIR_BASE_URI', True),
                                         arch))
 
+            if self.opkg_dir != '/var/lib/opkg':
+                # There is no command line option for this anymore, we need to add
+                # info_dir and status_file to config file, if OPKGLIBDIR doesn't have
+                # the default value of "/var/lib" as defined in opkg:
+                # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR      "/var/lib/opkg/info"
+                # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE   "/var/lib/opkg/status"
+                cfg_file.write("option info_dir     %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'info'))
+                cfg_file.write("option status_file  %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'status'))
+
+
     def _create_config(self):
         with open(self.config_file, "w+") as config_file:
             priority = 1
@@ -1394,6 +1496,15 @@
                     config_file.write("src oe-%s file:%s\n" %
                                       (arch, pkgs_dir))
 
+            if self.opkg_dir != '/var/lib/opkg':
+                # There is no command line option for this anymore, we need to add
+                # info_dir and status_file to config file, if OPKGLIBDIR doesn't have
+                # the default value of "/var/lib" as defined in opkg:
+                # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR      "/var/lib/opkg/info"
+                # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE   "/var/lib/opkg/status"
+                config_file.write("option info_dir     %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'info'))
+                config_file.write("option status_file  %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'status'))
+
     def insert_feeds_uris(self):
         if self.feed_uris == "":
             return
@@ -1433,7 +1544,7 @@
         self.deploy_dir_unlock()
 
     def install(self, pkgs, attempt_only=False):
-        if attempt_only and len(pkgs) == 0:
+        if not pkgs:
             return
 
         cmd = "%s %s install %s" % (self.opkg_cmd, self.opkg_args, ' '.join(pkgs))
diff --git a/yocto-poky/meta/lib/oe/patch.py b/yocto-poky/meta/lib/oe/patch.py
index 108bf1d..2bf501e 100644
--- a/yocto-poky/meta/lib/oe/patch.py
+++ b/yocto-poky/meta/lib/oe/patch.py
@@ -337,12 +337,15 @@
         return (tmpfile, cmd)
 
     @staticmethod
-    def extractPatches(tree, startcommit, outdir):
+    def extractPatches(tree, startcommit, outdir, paths=None):
         import tempfile
         import shutil
         tempdir = tempfile.mkdtemp(prefix='oepatch')
         try:
             shellcmd = ["git", "format-patch", startcommit, "-o", tempdir]
+            if paths:
+                shellcmd.append('--')
+                shellcmd.extend(paths)
             out = runcmd(["sh", "-c", " ".join(shellcmd)], tree)
             if out:
                 for srcfile in out.split():
@@ -407,6 +410,13 @@
                     runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
                 except CmdError:
                     pass
+                # git am won't always clean up after itself, sadly, so...
+                shellcmd = ["git", "--work-tree=%s" % reporoot, "reset", "--hard", "HEAD"]
+                runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
+                # Also need to take care of any stray untracked files
+                shellcmd = ["git", "--work-tree=%s" % reporoot, "clean", "-f"]
+                runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
+
                 # Fall back to git apply
                 shellcmd = ["git", "--git-dir=%s" % reporoot, "apply", "-p%s" % patch['strippath']]
                 try:
diff --git a/yocto-poky/meta/lib/oe/recipeutils.py b/yocto-poky/meta/lib/oe/recipeutils.py
index d4fa726..119a688 100644
--- a/yocto-poky/meta/lib/oe/recipeutils.py
+++ b/yocto-poky/meta/lib/oe/recipeutils.py
@@ -31,9 +31,13 @@
     import bb.providers
 
     if pn in cooker.recipecache.pkg_pn:
-        filenames = cooker.recipecache.pkg_pn[pn]
         best = bb.providers.findBestProvider(pn, cooker.data, cooker.recipecache, cooker.recipecache.pkg_pn)
         return best[3]
+    elif pn in cooker.recipecache.providers:
+        filenames = cooker.recipecache.providers[pn]
+        eligible, foundUnique = bb.providers.filterProviders(filenames, pn, cooker.expanded_data, cooker.recipecache)
+        filename = eligible[0]
+        return filename
     else:
         return None
 
@@ -72,6 +76,8 @@
             raise bb.providers.NoProvider('Unable to find any recipe file matching %s' % pn)
     if appends:
         appendfiles = cooker.collection.get_file_appends(recipefile)
+    else:
+        appendfiles = None
     return parse_recipe(recipefile, appendfiles, d)
 
 
@@ -95,6 +101,63 @@
     return varfiles
 
 
+def split_var_value(value, assignment=True):
+    """
+    Split a space-separated variable's value into a list of items,
+    taking into account that some of the items might be made up of
+    expressions containing spaces that should not be split.
+    Parameters:
+        value:
+            The string value to split
+        assignment:
+            True to assume that the value represents an assignment
+            statement, False otherwise. If True, and an assignment
+            statement is passed in the first item in
+            the returned list will be the part of the assignment
+            statement up to and including the opening quote character,
+            and the last item will be the closing quote.
+    """
+    inexpr = 0
+    lastchar = None
+    out = []
+    buf = ''
+    for char in value:
+        if char == '{':
+            if lastchar == '$':
+                inexpr += 1
+        elif char == '}':
+            inexpr -= 1
+        elif assignment and char in '"\'' and inexpr == 0:
+            if buf:
+                out.append(buf)
+            out.append(char)
+            char = ''
+            buf = ''
+        elif char.isspace() and inexpr == 0:
+            char = ''
+            if buf:
+                out.append(buf)
+            buf = ''
+        buf += char
+        lastchar = char
+    if buf:
+        out.append(buf)
+
+    # Join together assignment statement and opening quote
+    outlist = out
+    if assignment:
+        assigfound = False
+        for idx, item in enumerate(out):
+            if '=' in item:
+                assigfound = True
+            if assigfound:
+                if '"' in item or "'" in item:
+                    outlist = [' '.join(out[:idx+1])]
+                    outlist.extend(out[idx+1:])
+                    break
+    return outlist
+
+
 def patch_recipe_file(fn, values, patch=False, relpath=''):
     """Update or insert variable values into a recipe file (assuming you
        have already identified the exact file you want to update.)
@@ -112,7 +175,7 @@
             if name in nowrap_vars:
                 tf.write(rawtext)
             elif name in list_vars:
-                splitvalue = values[name].split()
+                splitvalue = split_var_value(values[name], assignment=False)
                 if len(splitvalue) > 1:
                     linesplit = ' \\\n' + (' ' * (len(name) + 4))
                     tf.write('%s = "%s%s"\n' % (name, linesplit.join(splitvalue), linesplit))
@@ -277,6 +340,22 @@
     return remotes
 
 
+def get_recipe_local_files(d, patches=False):
+    """Get a list of local files in SRC_URI within a recipe."""
+    uris = (d.getVar('SRC_URI', True) or "").split()
+    fetch = bb.fetch2.Fetch(uris, d)
+    ret = {}
+    for uri in uris:
+        if fetch.ud[uri].type == 'file':
+            if (not patches and
+                    bb.utils.exec_flat_python_func('patch_path', uri, fetch, '')):
+                continue
+            # Skip files that are referenced by absolute path
+            if not os.path.isabs(fetch.ud[uri].basepath):
+                ret[fetch.ud[uri].basepath] = fetch.localpath(uri)
+    return ret
+
+
 def get_recipe_patches(d):
     """Get a list of the patches included in SRC_URI within a recipe."""
     patchfiles = []
@@ -518,7 +597,7 @@
                             instfunclines.append(line)
                     return (instfunclines, None, 4, False)
             else:
-                splitval = origvalue.split()
+                splitval = split_var_value(origvalue, assignment=False)
                 changed = False
                 removevar = varname
                 if varname in ['SRC_URI', 'SRC_URI_append%s' % appendoverride]:
@@ -673,11 +752,14 @@
     ru['type'] = 'U'
     ru['datetime'] = ''
 
+    pv = rd.getVar('PV', True)
+
     # XXX: If don't have SRC_URI means that don't have upstream sources so
-    # returns 1.0.
+    # returns the current recipe version, so that upstream version check
+    # declares a match.
     src_uris = rd.getVar('SRC_URI', True)
     if not src_uris:
-        ru['version'] = '1.0'
+        ru['version'] = pv
         ru['type'] = 'M'
         ru['datetime'] = datetime.now()
         return ru
@@ -686,8 +768,6 @@
     src_uri = src_uris.split()[0]
     uri_type, _, _, _, _, _ =  decodeurl(src_uri)
 
-    pv = rd.getVar('PV', True)
-
     manual_upstream_version = rd.getVar("RECIPE_UPSTREAM_VERSION", True)
     if manual_upstream_version:
         # manual tracking of upstream version.
diff --git a/yocto-poky/meta/lib/oe/rootfs.py b/yocto-poky/meta/lib/oe/rootfs.py
index 3b53fce..18df22d 100644
--- a/yocto-poky/meta/lib/oe/rootfs.py
+++ b/yocto-poky/meta/lib/oe/rootfs.py
@@ -66,6 +66,7 @@
                 m = r.search(line)
                 if m:
                     found_error = 1
+                    bb.warn('[log_check] In line: [%s]' % line)
                     bb.warn('[log_check] %s: found an error message in the logfile (keyword \'%s\'):\n[log_check] %s'
 				    % (self.d.getVar('PN', True), m.group(), line))
 
@@ -278,6 +279,7 @@
 
         bb.note("Running intercept scripts:")
         os.environ['D'] = self.image_rootfs
+        os.environ['STAGING_DIR_NATIVE'] = self.d.getVar('STAGING_DIR_NATIVE', True)
         for script in os.listdir(intercepts_dir):
             script_full = os.path.join(intercepts_dir, script)
 
@@ -595,7 +597,11 @@
 
         pkg_list = []
 
-        pkgs = self._get_pkgs_postinsts(status_file)
+        pkgs = None
+        if not self.d.getVar('PACKAGE_INSTALL', True).strip():
+            bb.note("Building empty image")
+        else:
+            pkgs = self._get_pkgs_postinsts(status_file)
         if pkgs:
             root = "__packagegroup_postinst__"
             pkgs[root] = pkgs.keys()
diff --git a/yocto-poky/meta/lib/oe/sdk.py b/yocto-poky/meta/lib/oe/sdk.py
index 53da0f0..3103f48 100644
--- a/yocto-poky/meta/lib/oe/sdk.py
+++ b/yocto-poky/meta/lib/oe/sdk.py
@@ -5,6 +5,7 @@
 import os
 import shutil
 import glob
+import traceback
 
 
 class Sdk(object):
@@ -25,7 +26,7 @@
         else:
             self.manifest_dir = manifest_dir
 
-        bb.utils.remove(self.sdk_output, True)
+        self.remove(self.sdk_output, True)
 
         self.install_order = Manifest.INSTALL_ORDER
 
@@ -34,29 +35,56 @@
         pass
 
     def populate(self):
-        bb.utils.mkdirhier(self.sdk_output)
+        self.mkdirhier(self.sdk_output)
 
         # call backend dependent implementation
         self._populate()
 
         # Don't ship any libGL in the SDK
-        bb.utils.remove(os.path.join(self.sdk_output, self.sdk_native_path,
-                                     self.d.getVar('libdir_nativesdk', True).strip('/'),
-                                     "libGL*"))
+        self.remove(os.path.join(self.sdk_output, self.sdk_native_path,
+                         self.d.getVar('libdir_nativesdk', True).strip('/'),
+                         "libGL*"))
 
         # Fix or remove broken .la files
-        bb.utils.remove(os.path.join(self.sdk_output, self.sdk_native_path,
-                                     self.d.getVar('libdir_nativesdk', True).strip('/'),
-                                     "*.la"))
+        self.remove(os.path.join(self.sdk_output, self.sdk_native_path,
+                         self.d.getVar('libdir_nativesdk', True).strip('/'),
+                         "*.la"))
 
         # Link the ld.so.cache file into the hosts filesystem
         link_name = os.path.join(self.sdk_output, self.sdk_native_path,
                                  self.sysconfdir, "ld.so.cache")
-        bb.utils.mkdirhier(os.path.dirname(link_name))
+        self.mkdirhier(os.path.dirname(link_name))
         os.symlink("/etc/ld.so.cache", link_name)
 
         execute_pre_post_process(self.d, self.d.getVar('SDK_POSTPROCESS_COMMAND', True))
 
+    def movefile(self, sourcefile, destdir):
+        try:
+            # FIXME: this check of movefile's return code to None should be
+            # fixed within the function to use only exceptions to signal when
+            # something goes wrong
+            if (bb.utils.movefile(sourcefile, destdir) == None):
+                raise OSError("moving %s to %s failed"
+                        %(sourcefile, destdir))
+        #FIXME: using umbrella exc catching because bb.utils method raises it
+        except Exception as e:
+            bb.debug(1, "printing the stack trace\n %s" %traceback.format_exc())
+            bb.error("unable to place %s in final SDK location" % sourcefile)
+
+    def mkdirhier(self, dirpath):
+        try:
+            bb.utils.mkdirhier(dirpath)
+        except OSError as e:
+            bb.debug(1, "printing the stack trace\n %s" %traceback.format_exc())
+            bb.fatal("cannot make dir for SDK: %s" % dirpath)
+
+    def remove(self, path, recurse=False):
+        try:
+            bb.utils.remove(path, recurse)
+        #FIXME: using umbrella exc catching because bb.utils method raises it
+        except Exception as e:
+            bb.debug(1, "printing the stack trace\n %s" %traceback.format_exc())
+            bb.warn("cannot remove SDK dir: %s" % path)
 
 class RpmSdk(Sdk):
     def __init__(self, d, manifest_dir=None):
@@ -143,15 +171,15 @@
                                             "lib",
                                             "rpm"
                                             )
-        bb.utils.mkdirhier(native_rpm_state_dir)
+        self.mkdirhier(native_rpm_state_dir)
         for f in glob.glob(os.path.join(self.sdk_output,
                                         "var",
                                         "lib",
                                         "rpm",
                                         "*")):
-            bb.utils.movefile(f, native_rpm_state_dir)
+            self.movefile(f, native_rpm_state_dir)
 
-        bb.utils.remove(os.path.join(self.sdk_output, "var"), True)
+        self.remove(os.path.join(self.sdk_output, "var"), True)
 
         # Move host sysconfig data
         native_sysconf_dir = os.path.join(self.sdk_output,
@@ -159,10 +187,10 @@
                                           self.d.getVar('sysconfdir',
                                                         True).strip('/'),
                                           )
-        bb.utils.mkdirhier(native_sysconf_dir)
+        self.mkdirhier(native_sysconf_dir)
         for f in glob.glob(os.path.join(self.sdk_output, "etc", "*")):
-            bb.utils.movefile(f, native_sysconf_dir)
-        bb.utils.remove(os.path.join(self.sdk_output, "etc"), True)
+            self.movefile(f, native_sysconf_dir)
+        self.remove(os.path.join(self.sdk_output, "etc"), True)
 
 
 class OpkgSdk(Sdk):
@@ -219,12 +247,12 @@
         target_sysconfdir = os.path.join(self.sdk_target_sysroot, self.sysconfdir)
         host_sysconfdir = os.path.join(self.sdk_host_sysroot, self.sysconfdir)
 
-        bb.utils.mkdirhier(target_sysconfdir)
+        self.mkdirhier(target_sysconfdir)
         shutil.copy(self.target_conf, target_sysconfdir)
         os.chmod(os.path.join(target_sysconfdir,
                               os.path.basename(self.target_conf)), 0644)
 
-        bb.utils.mkdirhier(host_sysconfdir)
+        self.mkdirhier(host_sysconfdir)
         shutil.copy(self.host_conf, host_sysconfdir)
         os.chmod(os.path.join(host_sysconfdir,
                               os.path.basename(self.host_conf)), 0644)
@@ -232,11 +260,11 @@
         native_opkg_state_dir = os.path.join(self.sdk_output, self.sdk_native_path,
                                              self.d.getVar('localstatedir_nativesdk', True).strip('/'),
                                              "lib", "opkg")
-        bb.utils.mkdirhier(native_opkg_state_dir)
+        self.mkdirhier(native_opkg_state_dir)
         for f in glob.glob(os.path.join(self.sdk_output, "var", "lib", "opkg", "*")):
-            bb.utils.movefile(f, native_opkg_state_dir)
+            self.movefile(f, native_opkg_state_dir)
 
-        bb.utils.remove(os.path.join(self.sdk_output, "var"), True)
+        self.remove(os.path.join(self.sdk_output, "var"), True)
 
 
 class DpkgSdk(Sdk):
@@ -264,7 +292,7 @@
     def _copy_apt_dir_to(self, dst_dir):
         staging_etcdir_native = self.d.getVar("STAGING_ETCDIR_NATIVE", True)
 
-        bb.utils.remove(dst_dir, True)
+        self.remove(dst_dir, True)
 
         shutil.copytree(os.path.join(staging_etcdir_native, "apt"), dst_dir)
 
@@ -306,11 +334,11 @@
 
         native_dpkg_state_dir = os.path.join(self.sdk_output, self.sdk_native_path,
                                              "var", "lib", "dpkg")
-        bb.utils.mkdirhier(native_dpkg_state_dir)
+        self.mkdirhier(native_dpkg_state_dir)
         for f in glob.glob(os.path.join(self.sdk_output, "var", "lib", "dpkg", "*")):
-            bb.utils.movefile(f, native_dpkg_state_dir)
+            self.movefile(f, native_dpkg_state_dir)
+        self.remove(os.path.join(self.sdk_output, "var"), True)
 
-        bb.utils.remove(os.path.join(self.sdk_output, "var"), True)
 
 
 def sdk_list_installed_packages(d, target, format=None, rootfs_dir=None):
diff --git a/yocto-poky/meta/lib/oe/sstatesig.py b/yocto-poky/meta/lib/oe/sstatesig.py
index cb46712..6d1be3e 100644
--- a/yocto-poky/meta/lib/oe/sstatesig.py
+++ b/yocto-poky/meta/lib/oe/sstatesig.py
@@ -94,6 +94,26 @@
         self.machine = data.getVar("MACHINE", True)
         self.mismatch_msgs = []
         pass
+
+    def tasks_resolved(self, virtmap, virtpnmap, dataCache):
+        # Translate virtual/xxx entries to PN values
+        newabisafe = []
+        for a in self.abisaferecipes:
+            if a in virtpnmap:
+                newabisafe.append(virtpnmap[a])
+            else:
+                newabisafe.append(a)
+        self.abisaferecipes = newabisafe
+        newsafedeps = []
+        for a in self.saferecipedeps:
+            a1, a2 = a.split("->")
+            if a1 in virtpnmap:
+                a1 = virtpnmap[a1]
+            if a2 in virtpnmap:
+                a2 = virtpnmap[a2]
+            newsafedeps.append(a1 + "->" + a2)
+        self.saferecipedeps = newsafedeps
+
     def rundep_check(self, fn, recipename, task, dep, depname, dataCache = None):
         return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache)
 
diff --git a/yocto-poky/meta/lib/oeqa/oetest.py b/yocto-poky/meta/lib/oeqa/oetest.py
index a6f89b6..6f9edec 100644
--- a/yocto-poky/meta/lib/oeqa/oetest.py
+++ b/yocto-poky/meta/lib/oeqa/oetest.py
@@ -11,9 +11,14 @@
 import unittest
 import inspect
 import subprocess
-import bb
-from oeqa.utils.decorators import LogResults, gettag
-from sys import exc_info, exc_clear
+try:
+    import bb
+except ImportError:
+    pass
+import logging
+from oeqa.utils.decorators import LogResults, gettag, getResults
+
+logger = logging.getLogger("BitBake")
 
 def getVar(obj):
     #extend form dict, if a variable didn't exists, need find it in testcase
@@ -89,7 +94,7 @@
                                 suite.dependencies.append(dep_suite)
                             break
                     else:
-                        bb.warn("Test %s was declared as @skipUnlessPassed('%s') but that test is either not defined or not active. Will run the test anyway." %
+                        logger.warning("Test %s was declared as @skipUnlessPassed('%s') but that test is either not defined or not active. Will run the test anyway." %
                                 (test, depends_on))
     # Use brute-force topological sort to determine ordering. Sort by
     # depth (higher depth = must run later), with original ordering to
@@ -106,14 +111,34 @@
     suites.sort(cmp=lambda a,b: cmp((a.depth, a.index), (b.depth, b.index)))
     return testloader.suiteClass(suites)
 
+_buffer = ""
+
+def custom_verbose(msg, *args, **kwargs):
+    global _buffer
+    if msg[-1] != "\n":
+        _buffer += msg
+    else:
+        _buffer += msg
+        try:
+            bb.plain(_buffer.rstrip("\n"), *args, **kwargs)
+        except NameError:
+            logger.info(_buffer.rstrip("\n"), *args, **kwargs)
+        _buffer = ""
+
 def runTests(tc, type="runtime"):
 
     suite = loadTests(tc, type)
-    bb.note("Test modules  %s" % tc.testslist)
+    logger.info("Test modules  %s" % tc.testslist)
     if hasattr(tc, "tagexp") and tc.tagexp:
-        bb.note("Filter test cases by tags: %s" % tc.tagexp)
-    bb.note("Found %s tests" % suite.countTestCases())
+        logger.info("Filter test cases by tags: %s" % tc.tagexp)
+    logger.info("Found %s tests" % suite.countTestCases())
     runner = unittest.TextTestRunner(verbosity=2)
+    try:
+        if bb.msg.loggerDefaultVerbose:
+            runner.stream.write = custom_verbose
+    except NameError:
+        # Not in bb environment?
+        pass
     result = runner.run(suite)
 
     return result
@@ -158,17 +183,24 @@
         pass
 
     def tearDown(self):
-        # If a test fails or there is an exception
-        if not exc_info() == (None, None, None):
-            exc_clear()
-            #Only dump for QemuTarget
-            if (type(self.target).__name__ == "QemuTarget"):
-                self.tc.host_dumper.create_dir(self._testMethodName)
-                self.tc.host_dumper.dump_host()
-                self.target.target_dumper.dump_target(
-                        self.tc.host_dumper.dump_dir)
-                print ("%s dump data stored in %s" % (self._testMethodName,
-                         self.tc.host_dumper.dump_dir))
+        res = getResults()
+        # If a test fails or there is an exception dump
+        # for QemuTarget only
+        if (type(self.target).__name__ == "QemuTarget" and
+                (self.id() in res.getErrorList() or
+                self.id() in  res.getFailList())):
+            self.tc.host_dumper.create_dir(self._testMethodName)
+            self.tc.host_dumper.dump_host()
+            self.target.target_dumper.dump_target(
+                    self.tc.host_dumper.dump_dir)
+            print ("%s dump data stored in %s" % (self._testMethodName,
+                     self.tc.host_dumper.dump_dir))
+
+        self.tearDownLocal()
+
+    # Method to be run after tearDown and implemented by child classes
+    def tearDownLocal(self):
+        pass
 
     #TODO: use package_manager.py to install packages on any type of image
     def install_packages(self, packagelist):
@@ -190,7 +222,7 @@
         return False
 
     def _run(self, cmd):
-        return subprocess.check_output(cmd, shell=True)
+        return subprocess.check_output(". %s; " % self.tc.sdkenv + cmd, shell=True)
 
 def getmodule(pos=2):
     # stack returns a list of tuples containg frame information
diff --git a/yocto-poky/meta/lib/oeqa/runexported.py b/yocto-poky/meta/lib/oeqa/runexported.py
index 96442b1..dba0d7a 100755
--- a/yocto-poky/meta/lib/oeqa/runexported.py
+++ b/yocto-poky/meta/lib/oeqa/runexported.py
@@ -21,7 +21,7 @@
 import sys
 import os
 import time
-from optparse import OptionParser
+import argparse
 
 try:
     import simplejson as json
@@ -49,8 +49,8 @@
     def exportStart(self):
         self.sshlog = os.path.join(self.testdir, "ssh_target_log.%s" % self.datetime)
         sshloglink = os.path.join(self.testdir, "ssh_target_log")
-        if os.path.islink(sshloglink):
-            os.unlink(sshloglink)
+        if os.path.exists(sshloglink):
+            os.remove(sshloglink)
         os.symlink(self.sshlog, sshloglink)
         print("SSH log file: %s" %  self.sshlog)
         self.connection = SSHControl(self.ip, logfile=self.sshlog)
@@ -76,43 +76,41 @@
 
 def main():
 
-    usage = "usage: %prog [options] <json file>"
-    parser = OptionParser(usage=usage)
-    parser.add_option("-t", "--target-ip", dest="ip", help="The IP address of the target machine. Use this to \
+    parser = argparse.ArgumentParser()
+    parser.add_argument("-t", "--target-ip", dest="ip", help="The IP address of the target machine. Use this to \
             overwrite the value determined from TEST_TARGET_IP at build time")
-    parser.add_option("-s", "--server-ip", dest="server_ip", help="The IP address of this machine. Use this to \
+    parser.add_argument("-s", "--server-ip", dest="server_ip", help="The IP address of this machine. Use this to \
             overwrite the value determined from TEST_SERVER_IP at build time.")
-    parser.add_option("-d", "--deploy-dir", dest="deploy_dir", help="Full path to the package feeds, that this \
+    parser.add_argument("-d", "--deploy-dir", dest="deploy_dir", help="Full path to the package feeds, that this \
             the contents of what used to be DEPLOY_DIR on the build machine. If not specified it will use the value \
             specified in the json if that directory actually exists or it will error out.")
-    parser.add_option("-l", "--log-dir", dest="log_dir", help="This sets the path for TEST_LOG_DIR. If not specified \
+    parser.add_argument("-l", "--log-dir", dest="log_dir", help="This sets the path for TEST_LOG_DIR. If not specified \
             the current dir is used. This is used for usually creating a ssh log file and a scp test file.")
+    parser.add_argument("json", help="The json file exported by the build system", default="testdata.json", nargs='?')
 
-    (options, args) = parser.parse_args()
-    if len(args) != 1:
-        parser.error("Incorrect number of arguments. The one and only argument should be a json file exported by the build system")
+    args = parser.parse_args()
 
-    with open(args[0], "r") as f:
+    with open(args.json, "r") as f:
         loaded = json.load(f)
 
-    if options.ip:
-        loaded["target"]["ip"] = options.ip
-    if options.server_ip:
-        loaded["target"]["server_ip"] = options.server_ip
+    if args.ip:
+        loaded["target"]["ip"] = args.ip
+    if args.server_ip:
+        loaded["target"]["server_ip"] = args.server_ip
 
     d = MyDataDict()
     for key in loaded["d"].keys():
         d[key] = loaded["d"][key]
 
-    if options.log_dir:
-        d["TEST_LOG_DIR"] = options.log_dir
+    if args.log_dir:
+        d["TEST_LOG_DIR"] = args.log_dir
     else:
         d["TEST_LOG_DIR"] = os.path.abspath(os.path.dirname(__file__))
-    if options.deploy_dir:
-        d["DEPLOY_DIR"] = options.deploy_dir
+    if args.deploy_dir:
+        d["DEPLOY_DIR"] = args.deploy_dir
     else:
         if not os.path.isdir(d["DEPLOY_DIR"]):
-            raise Exception("The path to DEPLOY_DIR does not exists: %s" % d["DEPLOY_DIR"])
+            print("WARNING: The path to DEPLOY_DIR does not exist: %s" % d["DEPLOY_DIR"])
 
 
     target = FakeTarget(d)
diff --git a/yocto-poky/meta/lib/oeqa/runtime/_ptest.py b/yocto-poky/meta/lib/oeqa/runtime/_ptest.py
index 81c9c43..0621028 100644
--- a/yocto-poky/meta/lib/oeqa/runtime/_ptest.py
+++ b/yocto-poky/meta/lib/oeqa/runtime/_ptest.py
@@ -98,7 +98,7 @@
 
         return complementary_pkgs.split()
 
-    def setUp(self):
+    def setUpLocal(self):
         self.ptest_log = os.path.join(oeRuntimeTest.tc.d.getVar("TEST_LOG_DIR",True), "ptest-%s.log" % oeRuntimeTest.tc.d.getVar('DATETIME', True))
 
     @skipUnlessPassed('test_ssh')
diff --git a/yocto-poky/meta/lib/oeqa/runtime/connman.py b/yocto-poky/meta/lib/oeqa/runtime/connman.py
index ee69e5d..bd9dba3 100644
--- a/yocto-poky/meta/lib/oeqa/runtime/connman.py
+++ b/yocto-poky/meta/lib/oeqa/runtime/connman.py
@@ -29,26 +29,3 @@
         if status != 0:
             print self.service_status("connman")
             self.fail("No connmand process running")
-
-    @testcase(223)
-    def test_only_one_connmand_in_background(self):
-        """
-        Summary:     Only one connmand in background
-        Expected:    There will be only one connmand instance in background.
-        Product:     BSPs
-        Author:      Alexandru Georgescu <alexandru.c.georgescu@intel.com>
-        AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
-        """
-
-        # Make sure that 'connmand' is running in background
-        (status, output) = self.target.run(oeRuntimeTest.pscmd + ' | grep [c]onnmand')
-        self.assertEqual(0, status, 'Failed to find "connmand" process running in background.')
-
-        # Start a new instance of 'connmand'
-        (status, output) = self.target.run('connmand')
-        self.assertEqual(0, status, 'Failed to start a new "connmand" process.')
-
-        # Make sure that only one 'connmand' is running in background
-        (status, output) = self.target.run(oeRuntimeTest.pscmd + ' | grep [c]onnmand | wc -l')
-        self.assertEqual(0, status, 'Failed to find "connmand" process running in background.')
-        self.assertEqual(1, int(output), 'Found {} connmand processes running, expected 1.'.format(output))
diff --git a/yocto-poky/meta/lib/oeqa/runtime/date.py b/yocto-poky/meta/lib/oeqa/runtime/date.py
index 3a8fe84..447987e 100644
--- a/yocto-poky/meta/lib/oeqa/runtime/date.py
+++ b/yocto-poky/meta/lib/oeqa/runtime/date.py
@@ -4,11 +4,11 @@
 
 class DateTest(oeRuntimeTest):
 
-    def setUp(self):
+    def setUpLocal(self):
         if oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager", True) == "systemd":
             self.target.run('systemctl stop systemd-timesyncd')
 
-    def tearDown(self):
+    def tearDownLocal(self):
         if oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager", True) == "systemd":
             self.target.run('systemctl start systemd-timesyncd')
 
diff --git a/yocto-poky/meta/lib/oeqa/runtime/files/testsdkmakefile b/yocto-poky/meta/lib/oeqa/runtime/files/testsdkmakefile
new file mode 100644
index 0000000..fb05f82
--- /dev/null
+++ b/yocto-poky/meta/lib/oeqa/runtime/files/testsdkmakefile
@@ -0,0 +1,5 @@
+test: test.o
+	$(CC) -o test test.o -lm
+test.o: test.c
+	$(CC) -c test.c
+
diff --git a/yocto-poky/meta/lib/oeqa/runtime/kernelmodule.py b/yocto-poky/meta/lib/oeqa/runtime/kernelmodule.py
index 2e81720..38ca184 100644
--- a/yocto-poky/meta/lib/oeqa/runtime/kernelmodule.py
+++ b/yocto-poky/meta/lib/oeqa/runtime/kernelmodule.py
@@ -10,7 +10,7 @@
 
 class KernelModuleTest(oeRuntimeTest):
 
-    def setUp(self):
+    def setUpLocal(self):
         self.target.copy_to(os.path.join(oeRuntimeTest.tc.filesdir, "hellomod.c"), "/tmp/hellomod.c")
         self.target.copy_to(os.path.join(oeRuntimeTest.tc.filesdir, "hellomod_makefile"), "/tmp/Makefile")
 
@@ -30,5 +30,5 @@
             (status, output) = self.target.run(cmd, 900)
             self.assertEqual(status, 0, msg="\n".join([cmd, output]))
 
-    def tearDown(self):
+    def tearDownLocal(self):
         self.target.run('rm -f /tmp/Makefile /tmp/hellomod.c')
diff --git a/yocto-poky/meta/lib/oeqa/runtime/parselogs.py b/yocto-poky/meta/lib/oeqa/runtime/parselogs.py
index e20947b..fc2bc38 100644
--- a/yocto-poky/meta/lib/oeqa/runtime/parselogs.py
+++ b/yocto-poky/meta/lib/oeqa/runtime/parselogs.py
@@ -36,6 +36,8 @@
     'VGA arbiter: cannot open kernel arbiter, no multi-card support',
     'Failed to find URL:http://ipv4.connman.net/online/status.html',
     'Online check failed for',
+    'netlink init failed',
+    'Fast TSC calibration',
     ]
 
 x86_common = [
@@ -46,7 +48,6 @@
 ] + common_errors
 
 qemux86_common = [
-    'Fast TSC calibration', 
     'wrong ELF class',
     "fail to add MMCONFIG information, can't access extended PCI configuration space under this bridge.",
     "can't claim BAR ",
@@ -89,7 +90,7 @@
         '(EE) open /dev/fb0: No such file or directory',
         '(EE) AIGLX: reverting to software rendering',
         ] + x86_common,
-    'core2_32' : [
+    'intel-core2-32' : [
         'ACPI: No _BQC method, cannot determine initial brightness',
         '[Firmware Bug]: ACPI: No _BQC method, cannot determine initial brightness',
         '(EE) Failed to load module "psb"',
@@ -98,6 +99,7 @@
         '(EE) Failed to load module psbdrv',
         '(EE) open /dev/fb0: No such file or directory',
         '(EE) AIGLX: reverting to software rendering',
+        "controller can't do DEVSLP, turning off",
         ] + x86_common,
     'intel-corei7-64' : [
         "controller can't do DEVSLP, turning off",
@@ -108,13 +110,9 @@
     'edgerouter' : [
         'Fatal server error:',
         ] + common_errors,
-    'minnow' : [
-        'netlink init failed',
-        ] + common_errors,
     'jasperforest' : [
         'Activated service \'org.bluez\' failed:',
         'Unable to find NFC netlink family',
-        'netlink init failed',
         ] + common_errors,
 }
 
@@ -233,8 +231,7 @@
 
     #get the output of dmesg and write it in a file. This file is added to log_locations.
     def write_dmesg(self):
-        (status, dmesg) = self.target.run("dmesg")
-        (status, dmesg2) = self.target.run("echo \""+str(dmesg)+"\" > /tmp/dmesg_output.log")
+        (status, dmesg) = self.target.run("dmesg > /tmp/dmesg_output.log")
 
     @testcase(1059)
     @skipUnlessPassed('test_ssh')
diff --git a/yocto-poky/meta/lib/oeqa/runtime/scanelf.py b/yocto-poky/meta/lib/oeqa/runtime/scanelf.py
index 43a024a..67e02ff 100644
--- a/yocto-poky/meta/lib/oeqa/runtime/scanelf.py
+++ b/yocto-poky/meta/lib/oeqa/runtime/scanelf.py
@@ -8,7 +8,7 @@
 
 class ScanelfTest(oeRuntimeTest):
 
-    def setUp(self):
+    def setUpLocal(self):
         self.scancmd = 'scanelf --quiet --recursive --mount --ldpath --path'
 
     @testcase(966)
diff --git a/yocto-poky/meta/lib/oeqa/sdk/gcc.py b/yocto-poky/meta/lib/oeqa/sdk/gcc.py
index 67994b9..8395b9b 100644
--- a/yocto-poky/meta/lib/oeqa/sdk/gcc.py
+++ b/yocto-poky/meta/lib/oeqa/sdk/gcc.py
@@ -14,7 +14,7 @@
 
     @classmethod
     def setUpClass(self):
-        for f in ['test.c', 'test.cpp', 'testmakefile']:
+        for f in ['test.c', 'test.cpp', 'testsdkmakefile']:
             shutil.copyfile(os.path.join(self.tc.filesdir, f), self.tc.sdktestdir + f)
 
     def test_gcc_compile(self):
@@ -27,10 +27,10 @@
         self._run('$CXX %s/test.cpp -o %s/test -lm' % (self.tc.sdktestdir, self.tc.sdktestdir))
 
     def test_make(self):
-        self._run('cd %s; make -f testmakefile' % self.tc.sdktestdir)
+        self._run('cd %s; make -f testsdkmakefile' % self.tc.sdktestdir)
 
     @classmethod
     def tearDownClass(self):
-        files = [self.tc.sdktestdir + f for f in ['test.c', 'test.cpp', 'test.o', 'test', 'testmakefile']]
+        files = [self.tc.sdktestdir + f for f in ['test.c', 'test.cpp', 'test.o', 'test', 'testsdkmakefile']]
         for f in files:
             bb.utils.remove(f)
diff --git a/yocto-poky/meta/lib/oeqa/selftest/archiver.py b/yocto-poky/meta/lib/oeqa/selftest/archiver.py
new file mode 100644
index 0000000..f2030c4
--- /dev/null
+++ b/yocto-poky/meta/lib/oeqa/selftest/archiver.py
@@ -0,0 +1,50 @@
+from oeqa.selftest.base import oeSelfTest
+from oeqa.utils.commands import bitbake, get_bb_var
+from oeqa.utils.decorators import testcase
+import glob
+import os
+import shutil
+
+
+class Archiver(oeSelfTest):
+
+    @testcase(1345)
+    def test_archiver_allows_to_filter_on_recipe_name(self):
+        """
+        Summary:     The archiver should offer the possibility to filter on the recipe. (#6929)
+        Expected:    1. Included recipe (busybox) should be included
+                     2. Excluded recipe (zlib) should be excluded
+        Product:     oe-core
+        Author:      Daniel Istrate <daniel.alexandrux.istrate@intel.com>
+        AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
+        """
+
+        include_recipe = 'busybox'
+        exclude_recipe = 'zlib'
+
+        features = 'INHERIT += "archiver"\n'
+        features += 'ARCHIVER_MODE[src] = "original"\n'
+        features += 'COPYLEFT_PN_INCLUDE = "%s"\n' % include_recipe
+        features += 'COPYLEFT_PN_EXCLUDE = "%s"\n' % exclude_recipe
+
+        # Update local.conf
+        self.write_config(features)
+
+        tmp_dir = get_bb_var('TMPDIR')
+        deploy_dir_src = get_bb_var('DEPLOY_DIR_SRC')
+        target_sys = get_bb_var('TARGET_SYS')
+        src_path = os.path.join(deploy_dir_src, target_sys)
+
+        # Delete tmp directory
+        shutil.rmtree(tmp_dir)
+
+        # Build core-image-minimal
+        bitbake('core-image-minimal')
+
+        # Check that include_recipe was included
+        is_included = len(glob.glob(src_path + '/%s*' % include_recipe))
+        self.assertEqual(1, is_included, 'Recipe %s was not included.' % include_recipe)
+
+        # Check that exclude_recipe was excluded
+        is_excluded = len(glob.glob(src_path + '/%s*' % exclude_recipe))
+        self.assertEqual(0, is_excluded, 'Recipe %s was not excluded.' % exclude_recipe)
diff --git a/yocto-poky/meta/lib/oeqa/selftest/base.py b/yocto-poky/meta/lib/oeqa/selftest/base.py
index b2faa66..9bddc23 100644
--- a/yocto-poky/meta/lib/oeqa/selftest/base.py
+++ b/yocto-poky/meta/lib/oeqa/selftest/base.py
@@ -31,7 +31,7 @@
         self.testinc_bblayers_path = os.path.join(self.builddir, "conf/bblayers.inc")
         self.testlayer_path = oeSelfTest.testlayer_path
         self._extra_tear_down_commands = []
-        self._track_for_cleanup = []
+        self._track_for_cleanup = [self.testinc_path]
         super(oeSelfTest, self).__init__(methodName)
 
     def setUp(self):
diff --git a/yocto-poky/meta/lib/oeqa/selftest/bbtests.py b/yocto-poky/meta/lib/oeqa/selftest/bbtests.py
index 3d6860f..94ca79c 100644
--- a/yocto-poky/meta/lib/oeqa/selftest/bbtests.py
+++ b/yocto-poky/meta/lib/oeqa/selftest/bbtests.py
@@ -1,8 +1,5 @@
-import unittest
 import os
-import logging
 import re
-import shutil
 
 import oeqa.utils.ftools as ftools
 from oeqa.selftest.base import oeSelfTest
@@ -68,15 +65,43 @@
         bitbake('-cclean man')
         self.assertTrue("ERROR: Function failed: patch_do_patch" in result.output, msg = "Though no man-1.5h1-make.patch file exists, bitbake didn't output any err. message. bitbake output: %s" % result.output)
 
+    @testcase(1354)
+    def test_force_task_1(self):
+        # test 1 from bug 5875
+        test_recipe = 'zlib'
+        test_data = "Microsoft Made No Profit From Anyone's Zunes Yo"
+        image_dir = get_bb_var('D', test_recipe)
+        pkgsplit_dir = get_bb_var('PKGDEST', test_recipe)
+        man_dir = get_bb_var('mandir', test_recipe)
+
+        bitbake('-c cleansstate %s' % test_recipe)
+        bitbake(test_recipe)
+        self.add_command_to_tearDown('bitbake -c clean %s' % test_recipe)
+
+        man_file = os.path.join(image_dir + man_dir, 'man3/zlib.3')
+        ftools.append_file(man_file, test_data)
+        bitbake('-c package -f %s' % test_recipe)
+
+        man_split_file = os.path.join(pkgsplit_dir, 'zlib-doc' + man_dir, 'man3/zlib.3')
+        man_split_content = ftools.read_file(man_split_file)
+        self.assertIn(test_data, man_split_content, 'The man file has not changed in packages-split.')
+
+        ret = bitbake(test_recipe)
+        self.assertIn('task do_package_write_rpm:', ret.output, 'Task do_package_write_rpm did not re-executed.')
+
     @testcase(163)
-    def test_force_task(self):
-        bitbake('m4-native')
-        self.add_command_to_tearDown('bitbake -c clean m4-native')
-        result = bitbake('-C compile m4-native')
-        look_for_tasks = ['do_compile', 'do_install', 'do_populate_sysroot']
+    def test_force_task_2(self):
+        # test 2 from bug 5875
+        test_recipe = 'zlib'
+
+        bitbake('-c cleansstate %s' % test_recipe)
+        bitbake(test_recipe)
+        self.add_command_to_tearDown('bitbake -c clean %s' % test_recipe)
+
+        result = bitbake('-C compile %s' % test_recipe)
+        look_for_tasks = ['do_compile:', 'do_install:', 'do_populate_sysroot:', 'do_package:']
         for task in look_for_tasks:
-            find_task = re.search("m4-native.*%s" % task, result.output)
-            self.assertTrue(find_task, msg = "Couldn't find %s task. bitbake output %s" % (task, result.output))
+            self.assertIn(task, result.output, msg="Couldn't find %s task.")
 
     @testcase(167)
     def test_bitbake_g(self):
@@ -101,6 +126,8 @@
         self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
 SSTATE_DIR = \"${TOPDIR}/download-selftest\"
 """)
+        self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
+
         bitbake('-ccleanall man')
         result = bitbake('-c fetch man', ignore_status=True)
         bitbake('-ccleanall man')
@@ -116,20 +143,20 @@
         self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
 SSTATE_DIR = \"${TOPDIR}/download-selftest\"
 """)
+        self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
+
         data = 'SRC_URI_append = ";downloadfilename=test-aspell.tar.gz"'
         self.write_recipeinc('aspell', data)
         bitbake('-ccleanall aspell')
         result = bitbake('-c fetch aspell', ignore_status=True)
         self.delete_recipeinc('aspell')
-        self.addCleanup(bitbake, '-ccleanall aspell')
         self.assertEqual(result.status, 0, msg = "Couldn't fetch aspell. %s" % result.output)
         self.assertTrue(os.path.isfile(os.path.join(get_bb_var("DL_DIR"), 'test-aspell.tar.gz')), msg = "File rename failed. No corresponding test-aspell.tar.gz file found under %s" % str(get_bb_var("DL_DIR")))
         self.assertTrue(os.path.isfile(os.path.join(get_bb_var("DL_DIR"), 'test-aspell.tar.gz.done')), "File rename failed. No corresponding test-aspell.tar.gz.done file found under %s" % str(get_bb_var("DL_DIR")))
 
     @testcase(1028)
     def test_environment(self):
-        self.append_config("TEST_ENV=\"localconf\"")
-        self.addCleanup(self.remove_config, "TEST_ENV=\"localconf\"")
+        self.write_config("TEST_ENV=\"localconf\"")
         result = runCmd('bitbake -e | grep TEST_ENV=')
         self.assertTrue('localconf' in result.output, msg = "bitbake didn't report any value for TEST_ENV variable. To test, run 'bitbake -e | grep TEST_ENV='")
 
@@ -156,8 +183,7 @@
         ftools.write_file(preconf ,"TEST_PREFILE=\"prefile\"")
         result = runCmd('bitbake -r conf/prefile.conf -e | grep TEST_PREFILE=')
         self.assertTrue('prefile' in result.output, "Preconfigure file \"prefile.conf\"was not taken into consideration. ")
-        self.append_config("TEST_PREFILE=\"localconf\"")
-        self.addCleanup(self.remove_config, "TEST_PREFILE=\"localconf\"")
+        self.write_config("TEST_PREFILE=\"localconf\"")
         result = runCmd('bitbake -r conf/prefile.conf -e | grep TEST_PREFILE=')
         self.assertTrue('localconf' in result.output, "Preconfigure file \"prefile.conf\"was not taken into consideration.")
 
@@ -166,8 +192,7 @@
         postconf = os.path.join(self.builddir, 'conf/postfile.conf')
         self.track_for_cleanup(postconf)
         ftools.write_file(postconf , "TEST_POSTFILE=\"postfile\"")
-        self.append_config("TEST_POSTFILE=\"localconf\"")
-        self.addCleanup(self.remove_config, "TEST_POSTFILE=\"localconf\"")
+        self.write_config("TEST_POSTFILE=\"localconf\"")
         result = runCmd('bitbake -R conf/postfile.conf -e | grep TEST_POSTFILE=')
         self.assertTrue('postfile' in result.output, "Postconfigure file \"postfile.conf\"was not taken into consideration.")
 
@@ -181,6 +206,7 @@
         self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
 SSTATE_DIR = \"${TOPDIR}/download-selftest\"
 """)
+        self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
         self.write_recipeinc('man',"\ndo_fail_task () {\nexit 1 \n}\n\naddtask do_fail_task before do_fetch\n" )
         runCmd('bitbake -c cleanall man xcursor-transparent-theme')
         result = runCmd('bitbake man xcursor-transparent-theme -k', ignore_status=True)
diff --git a/yocto-poky/meta/lib/oeqa/selftest/buildoptions.py b/yocto-poky/meta/lib/oeqa/selftest/buildoptions.py
index 483803b..acf481f 100644
--- a/yocto-poky/meta/lib/oeqa/selftest/buildoptions.py
+++ b/yocto-poky/meta/lib/oeqa/selftest/buildoptions.py
@@ -1,9 +1,6 @@
-import unittest
 import os
-import logging
 import re
 import glob as g
-import pexpect as p
 
 from oeqa.selftest.base import oeSelfTest
 from oeqa.selftest.buildhistory import BuildhistoryBase
@@ -42,7 +39,7 @@
         for image_file in deploydir_files:
             if imagename in image_file and os.path.islink(os.path.join(deploydir, image_file)):
                 track_original_files.append(os.path.realpath(os.path.join(deploydir, image_file)))
-        self.append_config("RM_OLD_IMAGE = \"1\"")
+        self.write_config("RM_OLD_IMAGE = \"1\"")
         bitbake("-C rootfs core-image-minimal")
         deploydir_files = os.listdir(deploydir)
         remaining_not_expected = [path for path in track_original_files if os.path.basename(path) in deploydir_files]
@@ -100,7 +97,7 @@
 
     @testcase(278)
     def test_sanity_userspace_dependency(self):
-        self.append_config('WARN_QA_append = " unsafe-references-in-binaries unsafe-references-in-scripts"')
+        self.write_config('WARN_QA_append = " unsafe-references-in-binaries unsafe-references-in-scripts"')
         bitbake("-ccleansstate gzip nfs-utils")
         res = bitbake("gzip nfs-utils")
         self.assertTrue("WARNING: QA Issue: gzip" in res.output, "WARNING: QA Issue: gzip message is not present in bitbake's output: %s" % res.output)
@@ -128,7 +125,7 @@
         This method is used to test the build of directfb image for arm arch.
         In essence we build a coreimagedirectfb and test the exitcode of bitbake that in case of success is 0.
         """
-        self.add_command_to_tearDown('cleanupworkdir')
+        self.add_command_to_tearDown('cleanup-workdir')
         self.write_config("DISTRO_FEATURES_remove = \"x11\"\nDISTRO_FEATURES_append = \" directfb\"\nMACHINE ??= \"qemuarm\"")
         res = bitbake("core-image-directfb", ignore_status=True)
         self.assertEqual(res.status, 0, "\ncoreimagedirectfb failed to build. Please check logs for further details.\nbitbake output %s" % res.output)
@@ -139,7 +136,7 @@
         """
         Test for archiving the work directory and exporting the source files.
         """
-        self.add_command_to_tearDown('cleanupworkdir')
+        self.add_command_to_tearDown('cleanup-workdir')
         self.write_config("INHERIT = \"archiver\"\nARCHIVER_MODE[src] = \"original\"\nARCHIVER_MODE[srpm] = \"1\"")
         res = bitbake("xcursor-transparent-theme", ignore_status=True)
         self.assertEqual(res.status, 0, "\nCouldn't build xcursortransparenttheme.\nbitbake output %s" % res.output)
diff --git a/yocto-poky/meta/lib/oeqa/selftest/devtool.py b/yocto-poky/meta/lib/oeqa/selftest/devtool.py
index 6e731d6..dcdef5a 100644
--- a/yocto-poky/meta/lib/oeqa/selftest/devtool.py
+++ b/yocto-poky/meta/lib/oeqa/selftest/devtool.py
@@ -84,11 +84,44 @@
 
 class DevtoolTests(DevtoolBase):
 
+    def setUp(self):
+        """Test case setup function"""
+        super(DevtoolTests, self).setUp()
+        self.workspacedir = os.path.join(self.builddir, 'workspace')
+        self.assertTrue(not os.path.exists(self.workspacedir),
+                        'This test cannot be run with a workspace directory '
+                        'under the build directory')
+
+    def _check_src_repo(self, repo_dir):
+        """Check srctree git repository"""
+        self.assertTrue(os.path.isdir(os.path.join(repo_dir, '.git')),
+                        'git repository for external source tree not found')
+        result = runCmd('git status --porcelain', cwd=repo_dir)
+        self.assertEqual(result.output.strip(), "",
+                         'Created git repo is not clean')
+        result = runCmd('git symbolic-ref HEAD', cwd=repo_dir)
+        self.assertEqual(result.output.strip(), "refs/heads/devtool",
+                         'Wrong branch in git repo')
+
+    def _check_repo_status(self, repo_dir, expected_status):
+        """Check the worktree status of a repository"""
+        result = runCmd('git status . --porcelain',
+                        cwd=repo_dir)
+        for line in result.output.splitlines():
+            for ind, (f_status, fn_re) in enumerate(expected_status):
+                if re.match(fn_re, line[3:]):
+                    if f_status != line[:2]:
+                        self.fail('Unexpected status in line: %s' % line)
+                    expected_status.pop(ind)
+                    break
+            else:
+                self.fail('Unexpected modified file in line: %s' % line)
+        if expected_status:
+            self.fail('Missing file changes: %s' % expected_status)
+
     @testcase(1158)
     def test_create_workspace(self):
         # Check preconditions
-        workspacedir = os.path.join(self.builddir, 'workspace')
-        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
         result = runCmd('bitbake-layers show-layers')
         self.assertTrue('/workspace' not in result.output, 'This test cannot be run with a workspace layer in bblayers.conf')
         # Try creating a workspace layer with a specific path
@@ -99,19 +132,16 @@
         result = runCmd('bitbake-layers show-layers')
         self.assertIn(tempdir, result.output)
         # Try creating a workspace layer with the default path
-        self.track_for_cleanup(workspacedir)
+        self.track_for_cleanup(self.workspacedir)
         self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
         result = runCmd('devtool create-workspace')
-        self.assertTrue(os.path.isfile(os.path.join(workspacedir, 'conf', 'layer.conf')), msg = "No workspace created. devtool output: %s " % result.output)
+        self.assertTrue(os.path.isfile(os.path.join(self.workspacedir, 'conf', 'layer.conf')), msg = "No workspace created. devtool output: %s " % result.output)
         result = runCmd('bitbake-layers show-layers')
         self.assertNotIn(tempdir, result.output)
-        self.assertIn(workspacedir, result.output)
+        self.assertIn(self.workspacedir, result.output)
 
     @testcase(1159)
     def test_devtool_add(self):
-        # Check preconditions
-        workspacedir = os.path.join(self.builddir, 'workspace')
-        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
         # Fetch source
         tempdir = tempfile.mkdtemp(prefix='devtoolqa')
         self.track_for_cleanup(tempdir)
@@ -121,11 +151,11 @@
         srcdir = os.path.join(tempdir, 'pv-1.5.3')
         self.assertTrue(os.path.isfile(os.path.join(srcdir, 'configure')), 'Unable to find configure script in source directory')
         # Test devtool add
-        self.track_for_cleanup(workspacedir)
+        self.track_for_cleanup(self.workspacedir)
         self.add_command_to_tearDown('bitbake -c cleansstate pv')
         self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
         result = runCmd('devtool add pv %s' % srcdir)
-        self.assertTrue(os.path.exists(os.path.join(workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created')
+        self.assertTrue(os.path.exists(os.path.join(self.workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created')
         # Test devtool status
         result = runCmd('devtool status')
         self.assertIn('pv', result.output)
@@ -144,9 +174,6 @@
 
     @testcase(1162)
     def test_devtool_add_library(self):
-        # Check preconditions
-        workspacedir = os.path.join(self.builddir, 'workspace')
-        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
         # We don't have the ability to pick up this dependency automatically yet...
         bitbake('libusb1')
         # Fetch source
@@ -158,10 +185,10 @@
         srcdir = os.path.join(tempdir, 'libftdi1-1.1')
         self.assertTrue(os.path.isfile(os.path.join(srcdir, 'CMakeLists.txt')), 'Unable to find CMakeLists.txt in source directory')
         # Test devtool add (and use -V so we test that too)
-        self.track_for_cleanup(workspacedir)
+        self.track_for_cleanup(self.workspacedir)
         self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
         result = runCmd('devtool add libftdi %s -V 1.1' % srcdir)
-        self.assertTrue(os.path.exists(os.path.join(workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created')
+        self.assertTrue(os.path.exists(os.path.join(self.workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created')
         # Test devtool status
         result = runCmd('devtool status')
         self.assertIn('libftdi', result.output)
@@ -185,9 +212,6 @@
 
     @testcase(1160)
     def test_devtool_add_fetch(self):
-        # Check preconditions
-        workspacedir = os.path.join(self.builddir, 'workspace')
-        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
         # Fetch source
         tempdir = tempfile.mkdtemp(prefix='devtoolqa')
         self.track_for_cleanup(tempdir)
@@ -196,11 +220,11 @@
         testrecipe = 'python-markupsafe'
         srcdir = os.path.join(tempdir, testrecipe)
         # Test devtool add
-        self.track_for_cleanup(workspacedir)
+        self.track_for_cleanup(self.workspacedir)
         self.add_command_to_tearDown('bitbake -c cleansstate %s' % testrecipe)
         self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
         result = runCmd('devtool add %s %s -f %s' % (testrecipe, srcdir, url))
-        self.assertTrue(os.path.exists(os.path.join(workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created. %s' % result.output)
+        self.assertTrue(os.path.exists(os.path.join(self.workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created. %s' % result.output)
         self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory')
         # Test devtool status
         result = runCmd('devtool status')
@@ -232,9 +256,6 @@
 
     @testcase(1161)
     def test_devtool_add_fetch_git(self):
-        # Check preconditions
-        workspacedir = os.path.join(self.builddir, 'workspace')
-        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
         # Fetch source
         tempdir = tempfile.mkdtemp(prefix='devtoolqa')
         self.track_for_cleanup(tempdir)
@@ -243,11 +264,11 @@
         testrecipe = 'libmatchbox2'
         srcdir = os.path.join(tempdir, testrecipe)
         # Test devtool add
-        self.track_for_cleanup(workspacedir)
+        self.track_for_cleanup(self.workspacedir)
         self.add_command_to_tearDown('bitbake -c cleansstate %s' % testrecipe)
         self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
         result = runCmd('devtool add %s %s -f %s' % (testrecipe, srcdir, url))
-        self.assertTrue(os.path.exists(os.path.join(workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created: %s' % result.output)
+        self.assertTrue(os.path.exists(os.path.join(self.workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created: %s' % result.output)
         self.assertTrue(os.path.isfile(os.path.join(srcdir, 'configure.ac')), 'Unable to find configure.ac in source directory')
         # Test devtool status
         result = runCmd('devtool status')
@@ -284,32 +305,25 @@
 
     @testcase(1164)
     def test_devtool_modify(self):
-        # Check preconditions
-        workspacedir = os.path.join(self.builddir, 'workspace')
-        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
         # Clean up anything in the workdir/sysroot/sstate cache
         bitbake('mdadm -c cleansstate')
         # Try modifying a recipe
         tempdir = tempfile.mkdtemp(prefix='devtoolqa')
         self.track_for_cleanup(tempdir)
-        self.track_for_cleanup(workspacedir)
+        self.track_for_cleanup(self.workspacedir)
         self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
         self.add_command_to_tearDown('bitbake -c clean mdadm')
         result = runCmd('devtool modify mdadm -x %s' % tempdir)
         self.assertTrue(os.path.exists(os.path.join(tempdir, 'Makefile')), 'Extracted source could not be found')
-        self.assertTrue(os.path.isdir(os.path.join(tempdir, '.git')), 'git repository for external source tree not found')
-        self.assertTrue(os.path.exists(os.path.join(workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created')
-        matches = glob.glob(os.path.join(workspacedir, 'appends', 'mdadm_*.bbappend'))
+        self.assertTrue(os.path.exists(os.path.join(self.workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created')
+        matches = glob.glob(os.path.join(self.workspacedir, 'appends', 'mdadm_*.bbappend'))
         self.assertTrue(matches, 'bbappend not created %s' % result.output)
         # Test devtool status
         result = runCmd('devtool status')
         self.assertIn('mdadm', result.output)
         self.assertIn(tempdir, result.output)
         # Check git repo
-        result = runCmd('git status --porcelain', cwd=tempdir)
-        self.assertEqual(result.output.strip(), "", 'Created git repo is not clean')
-        result = runCmd('git symbolic-ref HEAD', cwd=tempdir)
-        self.assertEqual(result.output.strip(), "refs/heads/devtool", 'Wrong branch in git repo')
+        self._check_src_repo(tempdir)
         # Try building
         bitbake('mdadm')
         # Try making (minor) modifications to the source
@@ -336,13 +350,10 @@
 
     @testcase(1166)
     def test_devtool_modify_invalid(self):
-        # Check preconditions
-        workspacedir = os.path.join(self.builddir, 'workspace')
-        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
         # Try modifying some recipes
         tempdir = tempfile.mkdtemp(prefix='devtoolqa')
         self.track_for_cleanup(tempdir)
-        self.track_for_cleanup(workspacedir)
+        self.track_for_cleanup(self.workspacedir)
         self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
 
         testrecipes = 'perf kernel-devsrc package-index core-image-minimal meta-toolchain packagegroup-core-sdk meta-ide-support'.split()
@@ -367,14 +378,14 @@
             self.assertNotEqual(result.status, 0, 'devtool modify on %s should have failed. devtool output: %s' %  (testrecipe, result.output))
             self.assertIn('ERROR: ', result.output, 'devtool modify on %s should have given an ERROR' % testrecipe)
 
+    @testcase(1365)
     def test_devtool_modify_native(self):
         # Check preconditions
-        workspacedir = os.path.join(self.builddir, 'workspace')
-        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+        self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
         # Try modifying some recipes
         tempdir = tempfile.mkdtemp(prefix='devtoolqa')
         self.track_for_cleanup(tempdir)
-        self.track_for_cleanup(workspacedir)
+        self.track_for_cleanup(self.workspacedir)
         self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
 
         bbclassextended = False
@@ -400,8 +411,6 @@
     @testcase(1165)
     def test_devtool_modify_git(self):
         # Check preconditions
-        workspacedir = os.path.join(self.builddir, 'workspace')
-        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
         testrecipe = 'mkelfimage'
         src_uri = get_bb_var('SRC_URI', testrecipe)
         self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe)
@@ -410,32 +419,26 @@
         # Try modifying a recipe
         tempdir = tempfile.mkdtemp(prefix='devtoolqa')
         self.track_for_cleanup(tempdir)
-        self.track_for_cleanup(workspacedir)
+        self.track_for_cleanup(self.workspacedir)
         self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
         self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
         result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
         self.assertTrue(os.path.exists(os.path.join(tempdir, 'Makefile')), 'Extracted source could not be found')
-        self.assertTrue(os.path.isdir(os.path.join(tempdir, '.git')), 'git repository for external source tree not found')
-        self.assertTrue(os.path.exists(os.path.join(workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created. devtool output: %s' % result.output)
-        matches = glob.glob(os.path.join(workspacedir, 'appends', 'mkelfimage_*.bbappend'))
+        self.assertTrue(os.path.exists(os.path.join(self.workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created. devtool output: %s' % result.output)
+        matches = glob.glob(os.path.join(self.workspacedir, 'appends', 'mkelfimage_*.bbappend'))
         self.assertTrue(matches, 'bbappend not created')
         # Test devtool status
         result = runCmd('devtool status')
         self.assertIn(testrecipe, result.output)
         self.assertIn(tempdir, result.output)
         # Check git repo
-        result = runCmd('git status --porcelain', cwd=tempdir)
-        self.assertEqual(result.output.strip(), "", 'Created git repo is not clean')
-        result = runCmd('git symbolic-ref HEAD', cwd=tempdir)
-        self.assertEqual(result.output.strip(), "refs/heads/devtool", 'Wrong branch in git repo')
+        self._check_src_repo(tempdir)
         # Try building
         bitbake(testrecipe)
 
     @testcase(1167)
     def test_devtool_modify_localfiles(self):
         # Check preconditions
-        workspacedir = os.path.join(self.builddir, 'workspace')
-        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
         testrecipe = 'lighttpd'
         src_uri = (get_bb_var('SRC_URI', testrecipe) or '').split()
         foundlocal = False
@@ -449,13 +452,13 @@
         # Try modifying a recipe
         tempdir = tempfile.mkdtemp(prefix='devtoolqa')
         self.track_for_cleanup(tempdir)
-        self.track_for_cleanup(workspacedir)
+        self.track_for_cleanup(self.workspacedir)
         self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
         self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
         result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
         self.assertTrue(os.path.exists(os.path.join(tempdir, 'configure.ac')), 'Extracted source could not be found')
-        self.assertTrue(os.path.exists(os.path.join(workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created')
-        matches = glob.glob(os.path.join(workspacedir, 'appends', '%s_*.bbappend' % testrecipe))
+        self.assertTrue(os.path.exists(os.path.join(self.workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created')
+        matches = glob.glob(os.path.join(self.workspacedir, 'appends', '%s_*.bbappend' % testrecipe))
         self.assertTrue(matches, 'bbappend not created')
         # Test devtool status
         result = runCmd('devtool status')
@@ -464,30 +467,46 @@
         # Try building
         bitbake(testrecipe)
 
+    @testcase(1378)
+    def test_devtool_modify_virtual(self):
+        # Try modifying a virtual recipe
+        virtrecipe = 'virtual/libx11'
+        realrecipe = 'libx11'
+        tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+        self.track_for_cleanup(tempdir)
+        self.track_for_cleanup(self.workspacedir)
+        self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+        result = runCmd('devtool modify %s -x %s' % (virtrecipe, tempdir))
+        self.assertTrue(os.path.exists(os.path.join(tempdir, 'Makefile.am')), 'Extracted source could not be found')
+        self.assertTrue(os.path.exists(os.path.join(self.workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created')
+        matches = glob.glob(os.path.join(self.workspacedir, 'appends', '%s_*.bbappend' % realrecipe))
+        self.assertTrue(matches, 'bbappend not created %s' % result.output)
+        # Test devtool status
+        result = runCmd('devtool status')
+        self.assertNotIn(virtrecipe, result.output)
+        self.assertIn(realrecipe, result.output)
+        # Check git repo
+        self._check_src_repo(tempdir)
+        # This is probably sufficient
+
+
     @testcase(1169)
     def test_devtool_update_recipe(self):
         # Check preconditions
-        workspacedir = os.path.join(self.builddir, 'workspace')
-        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
         testrecipe = 'minicom'
         recipefile = get_bb_var('FILE', testrecipe)
         src_uri = get_bb_var('SRC_URI', testrecipe)
         self.assertNotIn('git://', src_uri, 'This test expects the %s recipe to NOT be a git recipe' % testrecipe)
-        result = runCmd('git status . --porcelain', cwd=os.path.dirname(recipefile))
-        self.assertEqual(result.output.strip(), "", '%s recipe is not clean' % testrecipe)
+        self._check_repo_status(os.path.dirname(recipefile), [])
         # First, modify a recipe
         tempdir = tempfile.mkdtemp(prefix='devtoolqa')
         self.track_for_cleanup(tempdir)
-        self.track_for_cleanup(workspacedir)
+        self.track_for_cleanup(self.workspacedir)
         self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
         # (don't bother with cleaning the recipe on teardown, we won't be building it)
         result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
         # Check git repo
-        self.assertTrue(os.path.isdir(os.path.join(tempdir, '.git')), 'git repository for external source tree not found')
-        result = runCmd('git status --porcelain', cwd=tempdir)
-        self.assertEqual(result.output.strip(), "", 'Created git repo is not clean')
-        result = runCmd('git symbolic-ref HEAD', cwd=tempdir)
-        self.assertEqual(result.output.strip(), "refs/heads/devtool", 'Wrong branch in git repo')
+        self._check_src_repo(tempdir)
         # Add a couple of commits
         # FIXME: this only tests adding, need to also test update and remove
         result = runCmd('echo "Additional line" >> README', cwd=tempdir)
@@ -497,25 +516,14 @@
         result = runCmd('git commit -m "Add a new file"', cwd=tempdir)
         self.add_command_to_tearDown('cd %s; rm %s/*.patch; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
         result = runCmd('devtool update-recipe %s' % testrecipe)
-        result = runCmd('git status . --porcelain', cwd=os.path.dirname(recipefile))
-        self.assertNotEqual(result.output.strip(), "", '%s recipe should be modified' % testrecipe)
-        status = result.output.splitlines()
-        self.assertEqual(len(status), 3, 'Less/more files modified than expected. Entire status:\n%s' % result.output)
-        for line in status:
-            if line.endswith('0001-Change-the-README.patch'):
-                self.assertEqual(line[:3], '?? ', 'Unexpected status in line: %s' % line)
-            elif line.endswith('0002-Add-a-new-file.patch'):
-                self.assertEqual(line[:3], '?? ', 'Unexpected status in line: %s' % line)
-            elif re.search('%s_[^_]*.bb$' % testrecipe, line):
-                self.assertEqual(line[:3], ' M ', 'Unexpected status in line: %s' % line)
-            else:
-                raise AssertionError('Unexpected modified file in status: %s' % line)
+        expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
+                           ('??', '.*/0001-Change-the-README.patch$'),
+                           ('??', '.*/0002-Add-a-new-file.patch$')]
+        self._check_repo_status(os.path.dirname(recipefile), expected_status)
 
     @testcase(1172)
     def test_devtool_update_recipe_git(self):
         # Check preconditions
-        workspacedir = os.path.join(self.builddir, 'workspace')
-        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
         testrecipe = 'mtd-utils'
         recipefile = get_bb_var('FILE', testrecipe)
         src_uri = get_bb_var('SRC_URI', testrecipe)
@@ -525,21 +533,16 @@
             if entry.startswith('file://') and entry.endswith('.patch'):
                 patches.append(entry[7:].split(';')[0])
         self.assertGreater(len(patches), 0, 'The %s recipe does not appear to contain any patches, so this test will not be effective' % testrecipe)
-        result = runCmd('git status . --porcelain', cwd=os.path.dirname(recipefile))
-        self.assertEqual(result.output.strip(), "", '%s recipe is not clean' % testrecipe)
+        self._check_repo_status(os.path.dirname(recipefile), [])
         # First, modify a recipe
         tempdir = tempfile.mkdtemp(prefix='devtoolqa')
         self.track_for_cleanup(tempdir)
-        self.track_for_cleanup(workspacedir)
+        self.track_for_cleanup(self.workspacedir)
         self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
         # (don't bother with cleaning the recipe on teardown, we won't be building it)
         result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
         # Check git repo
-        self.assertTrue(os.path.isdir(os.path.join(tempdir, '.git')), 'git repository for external source tree not found')
-        result = runCmd('git status --porcelain', cwd=tempdir)
-        self.assertEqual(result.output.strip(), "", 'Created git repo is not clean')
-        result = runCmd('git symbolic-ref HEAD', cwd=tempdir)
-        self.assertEqual(result.output.strip(), "refs/heads/devtool", 'Wrong branch in git repo')
+        self._check_src_repo(tempdir)
         # Add a couple of commits
         # FIXME: this only tests adding, need to also test update and remove
         result = runCmd('echo "# Additional line" >> Makefile', cwd=tempdir)
@@ -549,19 +552,10 @@
         result = runCmd('git commit -m "Add a new file"', cwd=tempdir)
         self.add_command_to_tearDown('cd %s; rm -rf %s; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
         result = runCmd('devtool update-recipe -m srcrev %s' % testrecipe)
-        result = runCmd('git status . --porcelain', cwd=os.path.dirname(recipefile))
-        self.assertNotEqual(result.output.strip(), "", '%s recipe should be modified' % testrecipe)
-        status = result.output.splitlines()
-        for line in status:
-            for patch in patches:
-                if line.endswith(patch):
-                    self.assertEqual(line[:3], ' D ', 'Unexpected status in line: %s' % line)
-                    break
-            else:
-                if re.search('%s_[^_]*.bb$' % testrecipe, line):
-                    self.assertEqual(line[:3], ' M ', 'Unexpected status in line: %s' % line)
-                else:
-                    raise AssertionError('Unexpected modified file in status: %s' % line)
+        expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile))] + \
+                          [(' D', '.*/%s$' % patch) for patch in patches]
+        self._check_repo_status(os.path.dirname(recipefile), expected_status)
+
         result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile))
         addlines = ['SRCREV = ".*"', 'SRC_URI = "git://git.infradead.org/mtd-utils.git"']
         srcurilines = src_uri.split()
@@ -588,50 +582,33 @@
         # Now try with auto mode
         runCmd('cd %s; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, os.path.basename(recipefile)))
         result = runCmd('devtool update-recipe %s' % testrecipe)
-        result = runCmd('git rev-parse --show-toplevel')
+        result = runCmd('git rev-parse --show-toplevel', cwd=os.path.dirname(recipefile))
         topleveldir = result.output.strip()
-        result = runCmd('git status . --porcelain', cwd=os.path.dirname(recipefile))
-        status = result.output.splitlines()
         relpatchpath = os.path.join(os.path.relpath(os.path.dirname(recipefile), topleveldir), testrecipe)
-        expectedstatus = [('M', os.path.relpath(recipefile, topleveldir)),
-                          ('??', '%s/0001-Change-the-Makefile.patch' % relpatchpath),
-                          ('??', '%s/0002-Add-a-new-file.patch' % relpatchpath)]
-        for line in status:
-            statusline = line.split(None, 1)
-            for fstatus, fn in expectedstatus:
-                if fn == statusline[1]:
-                    if fstatus != statusline[0]:
-                        self.fail('Unexpected status in line: %s' % line)
-                    break
-            else:
-                self.fail('Unexpected modified file in line: %s' % line)
+        expected_status = [(' M', os.path.relpath(recipefile, topleveldir)),
+                           ('??', '%s/0001-Change-the-Makefile.patch' % relpatchpath),
+                           ('??', '%s/0002-Add-a-new-file.patch' % relpatchpath)]
+        self._check_repo_status(os.path.dirname(recipefile), expected_status)
 
     @testcase(1170)
     def test_devtool_update_recipe_append(self):
         # Check preconditions
-        workspacedir = os.path.join(self.builddir, 'workspace')
-        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
         testrecipe = 'mdadm'
         recipefile = get_bb_var('FILE', testrecipe)
         src_uri = get_bb_var('SRC_URI', testrecipe)
         self.assertNotIn('git://', src_uri, 'This test expects the %s recipe to NOT be a git recipe' % testrecipe)
-        result = runCmd('git status . --porcelain', cwd=os.path.dirname(recipefile))
-        self.assertEqual(result.output.strip(), "", '%s recipe is not clean' % testrecipe)
+        self._check_repo_status(os.path.dirname(recipefile), [])
         # First, modify a recipe
         tempdir = tempfile.mkdtemp(prefix='devtoolqa')
         tempsrcdir = os.path.join(tempdir, 'source')
         templayerdir = os.path.join(tempdir, 'layer')
         self.track_for_cleanup(tempdir)
-        self.track_for_cleanup(workspacedir)
+        self.track_for_cleanup(self.workspacedir)
         self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
         # (don't bother with cleaning the recipe on teardown, we won't be building it)
         result = runCmd('devtool modify %s -x %s' % (testrecipe, tempsrcdir))
         # Check git repo
-        self.assertTrue(os.path.isdir(os.path.join(tempsrcdir, '.git')), 'git repository for external source tree not found')
-        result = runCmd('git status --porcelain', cwd=tempsrcdir)
-        self.assertEqual(result.output.strip(), "", 'Created git repo is not clean')
-        result = runCmd('git symbolic-ref HEAD', cwd=tempsrcdir)
-        self.assertEqual(result.output.strip(), "refs/heads/devtool", 'Wrong branch in git repo')
+        self._check_src_repo(tempsrcdir)
         # Add a commit
         result = runCmd("sed 's!\\(#define VERSION\\W*\"[^\"]*\\)\"!\\1-custom\"!' -i ReadMe.c", cwd=tempsrcdir)
         result = runCmd('git commit -a -m "Add our custom version"', cwd=tempsrcdir)
@@ -642,8 +619,7 @@
         result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir))
         self.assertNotIn('WARNING:', result.output)
         # Check recipe is still clean
-        result = runCmd('git status . --porcelain', cwd=os.path.dirname(recipefile))
-        self.assertEqual(result.output.strip(), "", '%s recipe is not clean' % testrecipe)
+        self._check_repo_status(os.path.dirname(recipefile), [])
         # Check bbappend was created
         splitpath = os.path.dirname(recipefile).split(os.sep)
         appenddir = os.path.join(templayerdir, splitpath[-2], splitpath[-1])
@@ -685,8 +661,6 @@
     @testcase(1171)
     def test_devtool_update_recipe_append_git(self):
         # Check preconditions
-        workspacedir = os.path.join(self.builddir, 'workspace')
-        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
         testrecipe = 'mtd-utils'
         recipefile = get_bb_var('FILE', testrecipe)
         src_uri = get_bb_var('SRC_URI', testrecipe)
@@ -695,23 +669,18 @@
             if entry.startswith('git://'):
                 git_uri = entry
                 break
-        result = runCmd('git status . --porcelain', cwd=os.path.dirname(recipefile))
-        self.assertEqual(result.output.strip(), "", '%s recipe is not clean' % testrecipe)
+        self._check_repo_status(os.path.dirname(recipefile), [])
         # First, modify a recipe
         tempdir = tempfile.mkdtemp(prefix='devtoolqa')
         tempsrcdir = os.path.join(tempdir, 'source')
         templayerdir = os.path.join(tempdir, 'layer')
         self.track_for_cleanup(tempdir)
-        self.track_for_cleanup(workspacedir)
+        self.track_for_cleanup(self.workspacedir)
         self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
         # (don't bother with cleaning the recipe on teardown, we won't be building it)
         result = runCmd('devtool modify %s -x %s' % (testrecipe, tempsrcdir))
         # Check git repo
-        self.assertTrue(os.path.isdir(os.path.join(tempsrcdir, '.git')), 'git repository for external source tree not found')
-        result = runCmd('git status --porcelain', cwd=tempsrcdir)
-        self.assertEqual(result.output.strip(), "", 'Created git repo is not clean')
-        result = runCmd('git symbolic-ref HEAD', cwd=tempsrcdir)
-        self.assertEqual(result.output.strip(), "refs/heads/devtool", 'Wrong branch in git repo')
+        self._check_src_repo(tempsrcdir)
         # Add a commit
         result = runCmd('echo "# Additional line" >> Makefile', cwd=tempsrcdir)
         result = runCmd('git commit -a -m "Change the Makefile"', cwd=tempsrcdir)
@@ -731,8 +700,7 @@
         result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir))
         self.assertNotIn('WARNING:', result.output)
         # Check recipe is still clean
-        result = runCmd('git status . --porcelain', cwd=os.path.dirname(recipefile))
-        self.assertEqual(result.output.strip(), "", '%s recipe is not clean' % testrecipe)
+        self._check_repo_status(os.path.dirname(recipefile), [])
         # Check bbappend was created
         splitpath = os.path.dirname(recipefile).split(os.sep)
         appenddir = os.path.join(templayerdir, splitpath[-2], splitpath[-1])
@@ -779,28 +747,104 @@
             self.assertEqual(expectedlines, f.readlines())
         # Deleting isn't expected to work under these circumstances
 
+    @testcase(1370)
+    def test_devtool_update_recipe_local_files(self):
+        """Check that local source files are copied over instead of patched"""
+        testrecipe = 'makedevs'
+        recipefile = get_bb_var('FILE', testrecipe)
+        # Setup srctree for modifying the recipe
+        tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+        self.track_for_cleanup(tempdir)
+        self.track_for_cleanup(self.workspacedir)
+        self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+        # (don't bother with cleaning the recipe on teardown, we won't be
+        # building it)
+        result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
+        # Check git repo
+        self._check_src_repo(tempdir)
+        # Edit / commit local source
+        runCmd('echo "/* Foobar */" >> oe-local-files/makedevs.c', cwd=tempdir)
+        runCmd('echo "Foo" > oe-local-files/new-local', cwd=tempdir)
+        runCmd('echo "Bar" > new-file', cwd=tempdir)
+        runCmd('git add new-file', cwd=tempdir)
+        runCmd('git commit -m "Add new file"', cwd=tempdir)
+        self.add_command_to_tearDown('cd %s; git clean -fd .; git checkout .' %
+                                     os.path.dirname(recipefile))
+        runCmd('devtool update-recipe %s' % testrecipe)
+        expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
+                           (' M', '.*/makedevs/makedevs.c$'),
+                           ('??', '.*/makedevs/new-local$'),
+                           ('??', '.*/makedevs/0001-Add-new-file.patch$')]
+        self._check_repo_status(os.path.dirname(recipefile), expected_status)
+
+    @testcase(1371)
+    def test_devtool_update_recipe_local_files_2(self):
+        """Check local source files support when oe-local-files is in Git"""
+        testrecipe = 'lzo'
+        recipefile = get_bb_var('FILE', testrecipe)
+        # Setup srctree for modifying the recipe
+        tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+        self.track_for_cleanup(tempdir)
+        self.track_for_cleanup(self.workspacedir)
+        self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+        result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
+        # Check git repo
+        self._check_src_repo(tempdir)
+        # Add oe-local-files to Git
+        runCmd('rm oe-local-files/.gitignore', cwd=tempdir)
+        runCmd('git add oe-local-files', cwd=tempdir)
+        runCmd('git commit -m "Add local sources"', cwd=tempdir)
+        # Edit / commit local sources
+        runCmd('echo "# Foobar" >> oe-local-files/acinclude.m4', cwd=tempdir)
+        runCmd('git commit -am "Edit existing file"', cwd=tempdir)
+        runCmd('git rm oe-local-files/run-ptest', cwd=tempdir)
+        runCmd('git commit -m"Remove file"', cwd=tempdir)
+        runCmd('echo "Foo" > oe-local-files/new-local', cwd=tempdir)
+        runCmd('git add oe-local-files/new-local', cwd=tempdir)
+        runCmd('git commit -m "Add new local file"', cwd=tempdir)
+        runCmd('echo "Gar" > new-file', cwd=tempdir)
+        runCmd('git add new-file', cwd=tempdir)
+        runCmd('git commit -m "Add new file"', cwd=tempdir)
+        self.add_command_to_tearDown('cd %s; git clean -fd .; git checkout .' %
+                                     os.path.dirname(recipefile))
+        # Checkout unmodified file to working copy -> devtool should still pick
+        # the modified version from HEAD
+        runCmd('git checkout HEAD^ -- oe-local-files/acinclude.m4', cwd=tempdir)
+        runCmd('devtool update-recipe %s' % testrecipe)
+        expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
+                           (' M', '.*/acinclude.m4$'),
+                           (' D', '.*/run-ptest$'),
+                           ('??', '.*/new-local$'),
+                           ('??', '.*/0001-Add-new-file.patch$')]
+        self._check_repo_status(os.path.dirname(recipefile), expected_status)
+
     @testcase(1163)
     def test_devtool_extract(self):
-        # Check preconditions
-        workspacedir = os.path.join(self.builddir, 'workspace')
-        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
         tempdir = tempfile.mkdtemp(prefix='devtoolqa')
         # Try devtool extract
         self.track_for_cleanup(tempdir)
-        self.track_for_cleanup(workspacedir)
+        self.track_for_cleanup(self.workspacedir)
         self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
         result = runCmd('devtool extract remake %s' % tempdir)
         self.assertTrue(os.path.exists(os.path.join(tempdir, 'Makefile.am')), 'Extracted source could not be found')
-        self.assertTrue(os.path.isdir(os.path.join(tempdir, '.git')), 'git repository for external source tree not found')
+        self._check_src_repo(tempdir)
+
+    @testcase(1379)
+    def test_devtool_extract_virtual(self):
+        tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+        # Try devtool extract
+        self.track_for_cleanup(tempdir)
+        self.track_for_cleanup(self.workspacedir)
+        self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+        result = runCmd('devtool extract virtual/libx11 %s' % tempdir)
+        self.assertTrue(os.path.exists(os.path.join(tempdir, 'Makefile.am')), 'Extracted source could not be found')
+        self._check_src_repo(tempdir)
 
     @testcase(1168)
     def test_devtool_reset_all(self):
-        # Check preconditions
-        workspacedir = os.path.join(self.builddir, 'workspace')
-        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
         tempdir = tempfile.mkdtemp(prefix='devtoolqa')
         self.track_for_cleanup(tempdir)
-        self.track_for_cleanup(workspacedir)
+        self.track_for_cleanup(self.workspacedir)
         self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
         testrecipe1 = 'mdadm'
         testrecipe2 = 'cronie'
@@ -823,6 +867,7 @@
         matches2 = glob.glob(stampprefix2 + '*')
         self.assertFalse(matches2, 'Stamp files exist for recipe %s that should have been cleaned' % testrecipe2)
 
+    @testcase(1272)
     def test_devtool_deploy_target(self):
         # NOTE: Whilst this test would seemingly be better placed as a runtime test,
         # unfortunately the runtime tests run under bitbake and you can't run
@@ -846,8 +891,7 @@
                 break
         else:
             self.skipTest('No tap devices found - you must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
-        workspacedir = os.path.join(self.builddir, 'workspace')
-        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+        self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
         # Definitions
         testrecipe = 'mdadm'
         testfile = '/sbin/mdadm'
@@ -863,7 +907,7 @@
         # Try devtool modify
         tempdir = tempfile.mkdtemp(prefix='devtoolqa')
         self.track_for_cleanup(tempdir)
-        self.track_for_cleanup(workspacedir)
+        self.track_for_cleanup(self.workspacedir)
         self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
         self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
         result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
@@ -908,18 +952,19 @@
             result = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, testcommand), ignore_status=True)
             self.assertNotEqual(result, 0, 'undeploy-target did not remove command as it should have')
 
+    @testcase(1366)
     def test_devtool_build_image(self):
         """Test devtool build-image plugin"""
         # Check preconditions
-        workspacedir = os.path.join(self.builddir, 'workspace')
-        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+        self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
         image = 'core-image-minimal'
-        self.track_for_cleanup(workspacedir)
+        self.track_for_cleanup(self.workspacedir)
         self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
         self.add_command_to_tearDown('bitbake -c clean %s' % image)
         bitbake('%s -c clean' % image)
         # Add target and native recipes to workspace
-        for recipe in ('mdadm', 'parted-native'):
+        recipes = ['mdadm', 'parted-native']
+        for recipe in recipes:
             tempdir = tempfile.mkdtemp(prefix='devtoolqa')
             self.track_for_cleanup(tempdir)
             self.add_command_to_tearDown('bitbake -c clean %s' % recipe)
@@ -927,17 +972,24 @@
         # Try to build image
         result = runCmd('devtool build-image %s' % image)
         self.assertNotEqual(result, 0, 'devtool build-image failed')
-        # Check if image.bbappend has required content
-        bbappend = os.path.join(workspacedir, 'appends', image+'.bbappend')
-        self.assertTrue(os.path.isfile(bbappend), 'bbappend not created %s' % result.output)
-        # NOTE: native recipe parted-native should not be in IMAGE_INSTALL_append
-        self.assertTrue('IMAGE_INSTALL_append = " mdadm"\n' in open(bbappend).readlines(),
-                        'IMAGE_INSTALL_append = " mdadm" not found in %s' % bbappend)
+        # Check if image contains expected packages
+        deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
+        image_link_name = get_bb_var('IMAGE_LINK_NAME', image)
+        reqpkgs = [item for item in recipes if not item.endswith('-native')]
+        with open(os.path.join(deploy_dir_image, image_link_name + '.manifest'), 'r') as f:
+            for line in f:
+                splitval = line.split()
+                if splitval:
+                    pkg = splitval[0]
+                    if pkg in reqpkgs:
+                        reqpkgs.remove(pkg)
+        if reqpkgs:
+            self.fail('The following packages were not present in the image as expected: %s' % ', '.join(reqpkgs))
 
+    @testcase(1367)
     def test_devtool_upgrade(self):
         # Check preconditions
-        workspacedir = os.path.join(self.builddir, 'workspace')
-        self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+        self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
         # Check parameters
         result = runCmd('devtool upgrade -h')
         for param in 'recipename srctree --version -V --branch -b --keep-temp --no-patch'.split():
@@ -955,9 +1007,9 @@
         # Check if srctree at least is populated
         self.assertTrue(len(os.listdir(tempdir)) > 0, 'scrtree (%s) should be populated with new (%s) source code' % (tempdir, version))
         # Check new recipe folder is present
-        self.assertTrue(os.path.exists(os.path.join(workspacedir,'recipes',recipe)), 'Recipe folder should exist')
+        self.assertTrue(os.path.exists(os.path.join(self.workspacedir,'recipes',recipe)), 'Recipe folder should exist')
         # Check new recipe file is present
-        self.assertTrue(os.path.exists(os.path.join(workspacedir,'recipes',recipe,"%s_%s.bb" % (recipe,version))), 'Recipe folder should exist')
+        self.assertTrue(os.path.exists(os.path.join(self.workspacedir,'recipes',recipe,"%s_%s.bb" % (recipe,version))), 'Recipe folder should exist')
         # Check devtool status and make sure recipe is present
         result = runCmd('devtool status')
         self.assertIn(recipe, result.output)
@@ -967,5 +1019,18 @@
         result = runCmd('devtool status')
         self.assertNotIn(recipe, result.output)
         self.track_for_cleanup(tempdir)
-        self.track_for_cleanup(workspacedir)
+        self.track_for_cleanup(self.workspacedir)
         self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+
+    @testcase(1352)
+    def test_devtool_layer_plugins(self):
+        """Test that devtool can use plugins from other layers.
+
+        This test executes the selftest-reverse command from meta-selftest."""
+
+        self.track_for_cleanup(self.workspacedir)
+        self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+
+        s = "Microsoft Made No Profit From Anyone's Zunes Yo"
+        result = runCmd("devtool --quiet selftest-reverse \"%s\"" % s)
+        self.assertEqual(result.output, s[::-1])
diff --git a/yocto-poky/meta/lib/oeqa/selftest/imagefeatures.py b/yocto-poky/meta/lib/oeqa/selftest/imagefeatures.py
index fcffc42..4efb0d9 100644
--- a/yocto-poky/meta/lib/oeqa/selftest/imagefeatures.py
+++ b/yocto-poky/meta/lib/oeqa/selftest/imagefeatures.py
@@ -25,9 +25,7 @@
         features = 'EXTRA_IMAGE_FEATURES = "ssh-server-openssh empty-root-password allow-empty-password"\n'
         features += 'INHERIT += "extrausers"\n'
         features += 'EXTRA_USERS_PARAMS = "useradd -p \'\' {}; usermod -s /bin/sh {};"'.format(self.test_user, self.test_user)
-
-        # Append 'features' to local.conf
-        self.append_config(features)
+        self.write_config(features)
 
         # Build a core-image-minimal
         bitbake('core-image-minimal')
@@ -53,9 +51,7 @@
         features = 'EXTRA_IMAGE_FEATURES = "ssh-server-openssh allow-empty-password"\n'
         features += 'INHERIT += "extrausers"\n'
         features += 'EXTRA_USERS_PARAMS = "useradd -p \'\' {}; usermod -s /bin/sh {};"'.format(self.test_user, self.test_user)
-
-        # Append 'features' to local.conf
-        self.append_config(features)
+        self.write_config(features)
 
         # Build a core-image-minimal
         bitbake('core-image-minimal')
@@ -87,9 +83,7 @@
         features += 'IMAGE_INSTALL_append = " openssh"\n'
         features += 'EXTRA_IMAGE_FEATURES = "empty-root-password allow-empty-password package-management"\n'
         features += 'RPMROOTFSDEPENDS_remove = "rpmresolve-native:do_populate_sysroot"'
-
-        # Append 'features' to local.conf
-        self.append_config(features)
+        self.write_config(features)
 
         # Build a core-image-minimal
         bitbake('core-image-minimal')
@@ -159,9 +153,7 @@
 
         features = 'DISTRO_FEATURES_append = " wayland"\n'
         features += 'CORE_IMAGE_EXTRA_INSTALL += "wayland weston"'
-
-        # Append 'features' to local.conf
-        self.append_config(features)
+        self.write_config(features)
 
         # Build a core-image-weston
         bitbake('core-image-weston')
diff --git a/yocto-poky/meta/lib/oeqa/selftest/layerappend.py b/yocto-poky/meta/lib/oeqa/selftest/layerappend.py
index a82a6c8..4de5034 100644
--- a/yocto-poky/meta/lib/oeqa/selftest/layerappend.py
+++ b/yocto-poky/meta/lib/oeqa/selftest/layerappend.py
@@ -46,10 +46,11 @@
 
 SRC_URI_append += "file://appendtest.txt"
 """
-    layerappend = "BBLAYERS += \"COREBASE/meta-layertest0 COREBASE/meta-layertest1 COREBASE/meta-layertest2\""
+    layerappend = ''
 
     def tearDownLocal(self):
-        ftools.remove_from_file(self.builddir + "/conf/bblayers.conf", self.layerappend.replace("COREBASE", self.builddir + "/.."))
+        if self.layerappend:
+            ftools.remove_from_file(self.builddir + "/conf/bblayers.conf", self.layerappend)
 
     @testcase(1196)
     def test_layer_appends(self):
@@ -79,7 +80,9 @@
                 with open(layer + "/recipes-test/layerappendtest/appendtest.txt", "w") as f:
                     f.write("Layer 2 test")
             self.track_for_cleanup(layer)
-        ftools.append_file(self.builddir + "/conf/bblayers.conf", self.layerappend.replace("COREBASE", self.builddir + "/.."))
+
+        self.layerappend = "BBLAYERS += \"{0}/meta-layertest0 {0}/meta-layertest1 {0}/meta-layertest2\"".format(corebase)
+        ftools.append_file(self.builddir + "/conf/bblayers.conf", self.layerappend)
         bitbake("layerappendtest")
         data = ftools.read_file(stagingdir + "/appendtest.txt")
         self.assertEqual(data, "Layer 2 test")
diff --git a/yocto-poky/meta/lib/oeqa/selftest/manifest.py b/yocto-poky/meta/lib/oeqa/selftest/manifest.py
new file mode 100644
index 0000000..44d0404
--- /dev/null
+++ b/yocto-poky/meta/lib/oeqa/selftest/manifest.py
@@ -0,0 +1,165 @@
+import unittest
+import os
+
+from oeqa.selftest.base import oeSelfTest
+from oeqa.utils.commands import get_bb_var, bitbake
+from oeqa.utils.decorators import testcase
+
+class ManifestEntry:
+    '''A manifest item of a collection able to list missing packages'''
+    def __init__(self, entry):
+        self.file = entry
+        self.missing = []
+
+class VerifyManifest(oeSelfTest):
+    '''Tests for the manifest files and contents of an image'''
+
+    @classmethod
+    def check_manifest_entries(self, manifest, path):
+        manifest_errors = []
+        try:
+            with open(manifest, "r") as mfile:
+                for line in mfile:
+                    manifest_entry = os.path.join(path, line.split()[0])
+                    self.log.debug("{}: looking for {}"\
+                                    .format(self.classname, manifest_entry))
+                    if not os.path.isfile(manifest_entry):
+                        manifest_errors.append(manifest_entry)
+                        self.log.debug("{}: {} not found"\
+                                    .format(self.classname, manifest_entry))
+        except OSError as e:
+            self.log.debug("{}: checking of {} failed"\
+                    .format(self.classname, manifest))
+            raise e
+
+        return manifest_errors
+
+    #this will possibly move from here
+    @classmethod
+    def get_dir_from_bb_var(self, bb_var, target = None):
+        target == self.buildtarget if target == None else target
+        directory = get_bb_var(bb_var, target);
+        if not directory or not os.path.isdir(directory):
+            self.log.debug("{}: {} points to {} when target = {}"\
+                    .format(self.classname, bb_var, directory, target))
+            raise OSError
+        return directory
+
+    @classmethod
+    def setUpClass(self):
+
+        self.buildtarget = 'core-image-minimal'
+        self.classname = 'VerifyManifest'
+
+        self.log.info("{}: doing bitbake {} as a prerequisite of the test"\
+                .format(self.classname, self.buildtarget))
+        if bitbake(self.buildtarget).status:
+            self.log.debug("{} Failed to setup {}"\
+                    .format(self.classname, self.buildtarget))
+            unittest.SkipTest("{}: Cannot setup testing scenario"\
+                    .format(self.classname))
+
+    @testcase(1380)
+    def test_SDK_manifest_entries(self):
+        '''Verifying the SDK manifest entries exist, this may take a build'''
+
+        # the setup should bitbake core-image-minimal and here it is required
+        # to do an additional setup for the sdk
+        sdktask = '-c populate_sdk'
+        bbargs = sdktask + ' ' + self.buildtarget
+        self.log.debug("{}: doing bitbake {} as a prerequisite of the test"\
+                .format(self.classname, bbargs))
+        if bitbake(bbargs).status:
+            self.log.debug("{} Failed to bitbake {}"\
+                    .format(self.classname, bbargs))
+            unittest.SkipTest("{}: Cannot setup testing scenario"\
+                    .format(self.classname))
+
+
+        pkgdata_dir = reverse_dir = {}
+        mfilename = mpath = m_entry = {}
+        # get manifest location based on target to query about
+        d_target= dict(target = self.buildtarget,
+                         host = 'nativesdk-packagegroup-sdk-host')
+        try:
+            mdir = self.get_dir_from_bb_var('SDK_DEPLOY', self.buildtarget)
+            for k in d_target.keys():
+                mfilename[k] = "{}-toolchain-{}.{}.manifest".format(
+                        get_bb_var("SDK_NAME", self.buildtarget),
+                        get_bb_var("SDK_VERSION", self.buildtarget),
+                        k)
+                mpath[k] = os.path.join(mdir, mfilename[k])
+                if not os.path.isfile(mpath[k]):
+                    self.log.debug("{}: {} does not exist".format(
+                        self.classname, mpath[k]))
+                    raise IOError
+                m_entry[k] = ManifestEntry(mpath[k])
+
+                pkgdata_dir[k] = self.get_dir_from_bb_var('PKGDATA_DIR',
+                        d_target[k])
+                reverse_dir[k] = os.path.join(pkgdata_dir[k],
+                        'runtime-reverse')
+                if not os.path.exists(reverse_dir[k]):
+                    self.log.debug("{}: {} does not exist".format(
+                        self.classname, reverse_dir[k]))
+                    raise IOError
+        except OSError:
+            raise unittest.SkipTest("{}: Error in obtaining manifest dirs"\
+                .format(self.classname))
+        except IOError:
+            msg = "{}: Error cannot find manifests in the specified dir:\n{}"\
+                    .format(self.classname, mdir)
+            self.fail(msg)
+
+        for k in d_target.keys():
+            self.log.debug("{}: Check manifest {}".format(
+                self.classname, m_entry[k].file))
+
+            m_entry[k].missing = self.check_manifest_entries(\
+                                               m_entry[k].file,reverse_dir[k])
+            if m_entry[k].missing:
+                msg = '{}: {} Error has the following missing entries'\
+                        .format(self.classname, m_entry[k].file)
+                logmsg = msg+':\n'+'\n'.join(m_entry[k].missing)
+                self.log.debug(logmsg)
+                self.log.info(msg)
+                self.fail(logmsg)
+
+    @testcase(1381)
+    def test_image_manifest_entries(self):
+        '''Verifying the image manifest entries exist'''
+
+        # get manifest location based on target to query about
+        try:
+            mdir = self.get_dir_from_bb_var('DEPLOY_DIR_IMAGE',
+                                                self.buildtarget)
+            mfilename = get_bb_var("IMAGE_LINK_NAME", self.buildtarget)\
+                    + ".manifest"
+            mpath = os.path.join(mdir, mfilename)
+            if not os.path.isfile(mpath): raise IOError
+            m_entry = ManifestEntry(mpath)
+
+            pkgdata_dir = {}
+            pkgdata_dir = self.get_dir_from_bb_var('PKGDATA_DIR',
+                                                self.buildtarget)
+            revdir = os.path.join(pkgdata_dir, 'runtime-reverse')
+            if not os.path.exists(revdir): raise IOError
+        except OSError:
+            raise unittest.SkipTest("{}: Error in obtaining manifest dirs"\
+                .format(self.classname))
+        except IOError:
+            msg = "{}: Error cannot find manifests in dir:\n{}"\
+                    .format(self.classname, mdir)
+            self.fail(msg)
+
+        self.log.debug("{}: Check manifest {}"\
+                            .format(self.classname, m_entry.file))
+        m_entry.missing = self.check_manifest_entries(\
+                                                    m_entry.file, revdir)
+        if m_entry.missing:
+            msg = '{}: {} Error has the following missing entries'\
+                    .format(self.classname, m_entry.file)
+            logmsg = msg+':\n'+'\n'.join(m_entry.missing)
+            self.log.debug(logmsg)
+            self.log.info(msg)
+            self.fail(logmsg)
diff --git a/yocto-poky/meta/lib/oeqa/selftest/recipetool.py b/yocto-poky/meta/lib/oeqa/selftest/recipetool.py
index c34ad68..b1f1d2a 100644
--- a/yocto-poky/meta/lib/oeqa/selftest/recipetool.py
+++ b/yocto-poky/meta/lib/oeqa/selftest/recipetool.py
@@ -492,9 +492,12 @@
 
 
 class RecipetoolAppendsrcTests(RecipetoolAppendsrcBase):
+
+    @testcase(1273)
     def test_recipetool_appendsrcfile_basic(self):
         self._test_appendsrcfile('base-files', 'a-file')
 
+    @testcase(1274)
     def test_recipetool_appendsrcfile_basic_wildcard(self):
         testrecipe = 'base-files'
         self._test_appendsrcfile(testrecipe, 'a-file', options='-w')
@@ -502,12 +505,15 @@
         bbappendfile = self._check_bbappend(testrecipe, recipefile, self.templayerdir)
         self.assertEqual(os.path.basename(bbappendfile), '%s_%%.bbappend' % testrecipe)
 
+    @testcase(1281)
     def test_recipetool_appendsrcfile_subdir_basic(self):
         self._test_appendsrcfile('base-files', 'a-file', 'tmp')
 
+    @testcase(1282)
     def test_recipetool_appendsrcfile_subdir_basic_dirdest(self):
         self._test_appendsrcfile('base-files', destdir='tmp')
 
+    @testcase(1280)
     def test_recipetool_appendsrcfile_srcdir_basic(self):
         testrecipe = 'bash'
         srcdir = get_bb_var('S', testrecipe)
@@ -515,12 +521,14 @@
         subdir = os.path.relpath(srcdir, workdir)
         self._test_appendsrcfile(testrecipe, 'a-file', srcdir=subdir)
 
+    @testcase(1275)
     def test_recipetool_appendsrcfile_existing_in_src_uri(self):
         testrecipe = 'base-files'
         filepath = self._get_first_file_uri(testrecipe)
         self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe)
         self._test_appendsrcfile(testrecipe, filepath, has_src_uri=False)
 
+    @testcase(1276)
     def test_recipetool_appendsrcfile_existing_in_src_uri_diff_params(self):
         testrecipe = 'base-files'
         subdir = 'tmp'
@@ -530,6 +538,7 @@
         output = self._test_appendsrcfile(testrecipe, filepath, subdir, has_src_uri=False)
         self.assertTrue(any('with different parameters' in l for l in output))
 
+    @testcase(1277)
     def test_recipetool_appendsrcfile_replace_file_srcdir(self):
         testrecipe = 'bash'
         filepath = 'Makefile.in'
@@ -541,6 +550,7 @@
         bitbake('%s:do_unpack' % testrecipe)
         self.assertEqual(open(self.testfile, 'r').read(), open(os.path.join(srcdir, filepath), 'r').read())
 
+    @testcase(1278)
     def test_recipetool_appendsrcfiles_basic(self, destdir=None):
         newfiles = [self.testfile]
         for i in range(1, 5):
@@ -550,5 +560,6 @@
             newfiles.append(testfile)
         self._test_appendsrcfiles('gcc', newfiles, destdir=destdir, options='-W')
 
+    @testcase(1279)
     def test_recipetool_appendsrcfiles_basic_subdir(self):
         self.test_recipetool_appendsrcfiles_basic(destdir='testdir')
diff --git a/yocto-poky/meta/lib/oeqa/selftest/sstatetests.py b/yocto-poky/meta/lib/oeqa/selftest/sstatetests.py
index c4efc47..3c23062 100644
--- a/yocto-poky/meta/lib/oeqa/selftest/sstatetests.py
+++ b/yocto-poky/meta/lib/oeqa/selftest/sstatetests.py
@@ -34,7 +34,7 @@
         targetarch = get_bb_var('TUNE_ARCH')
         self.run_test_sstate_creation(['binutils-cross-'+ targetarch, 'binutils-native'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
 
-    @testcase(975)
+    @testcase(1374)
     def test_sstate_creation_distro_specific_fail(self):
         targetarch = get_bb_var('TUNE_ARCH')
         self.run_test_sstate_creation(['binutils-cross-'+ targetarch, 'binutils-native'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True, should_pass=False)
@@ -43,7 +43,7 @@
     def test_sstate_creation_distro_nonspecific_pass(self):
         self.run_test_sstate_creation(['glibc-initial'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
 
-    @testcase(976)
+    @testcase(1375)
     def test_sstate_creation_distro_nonspecific_fail(self):
         self.run_test_sstate_creation(['glibc-initial'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True, should_pass=False)
 
@@ -70,11 +70,11 @@
         targetarch = get_bb_var('TUNE_ARCH')
         self.run_test_cleansstate_task(['binutils-cross-' + targetarch, 'binutils-native', 'glibc-initial'], distro_specific=True, distro_nonspecific=True, temp_sstate_location=True)
 
-    @testcase(977)
+    @testcase(1376)
     def test_cleansstate_task_distro_nonspecific(self):
         self.run_test_cleansstate_task(['glibc-initial'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
 
-    @testcase(977)
+    @testcase(1377)
     def test_cleansstate_task_distro_specific(self):
         targetarch = get_bb_var('TUNE_ARCH')
         self.run_test_cleansstate_task(['binutils-cross-'+ targetarch, 'binutils-native', 'glibc-initial'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
@@ -111,12 +111,12 @@
         targetarch = get_bb_var('TUNE_ARCH')
         self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + targetarch, 'binutils-native'], temp_sstate_location=True)
 
-    @testcase(175)
+    @testcase(1372)
     def test_rebuild_distro_specific_sstate_cross_target(self):
         targetarch = get_bb_var('TUNE_ARCH')
         self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + targetarch], temp_sstate_location=True)
 
-    @testcase(175)
+    @testcase(1373)
     def test_rebuild_distro_specific_sstate_native_target(self):
         self.run_test_rebuild_distro_specific_sstate(['binutils-native'], temp_sstate_location=True)
 
@@ -211,6 +211,8 @@
         they're built on a 32 or 64 bit system. Rather than requiring two different 
         build machines and running a builds, override the variables calling uname()
         manually and check using bitbake -S.
+        
+        Also check that SDKMACHINE changing doesn't change any of these stamps.
         """
 
         topdir = get_bb_var('TOPDIR')
@@ -219,6 +221,7 @@
 TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
 BUILD_ARCH = \"x86_64\"
 BUILD_OS = \"linux\"
+SDKMACHINE = \"x86_64\"
 """)
         self.track_for_cleanup(topdir + "/tmp-sstatesamehash")
         bitbake("core-image-sato -S none")
@@ -226,6 +229,7 @@
 TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
 BUILD_ARCH = \"i686\"
 BUILD_OS = \"linux\"
+SDKMACHINE = \"i686\"
 """)
         self.track_for_cleanup(topdir + "/tmp-sstatesamehash2")
         bitbake("core-image-sato -S none")
@@ -233,11 +237,16 @@
         def get_files(d):
             f = []
             for root, dirs, files in os.walk(d):
+                if "core-image-sato" in root:
+                        # SDKMACHINE changing will change do_rootfs/do_testimage/do_build stamps of core-image-sato itself
+                        # which is safe to ignore
+                        continue
                 f.extend(os.path.join(root, name) for name in files)
             return f
         files1 = get_files(topdir + "/tmp-sstatesamehash/stamps/")
         files2 = get_files(topdir + "/tmp-sstatesamehash2/stamps/")
         files2 = [x.replace("tmp-sstatesamehash2", "tmp-sstatesamehash").replace("i686-linux", "x86_64-linux").replace("i686" + targetvendor + "-linux", "x86_64" + targetvendor + "-linux", ) for x in files2]
+        self.maxDiff = None
         self.assertItemsEqual(files1, files2)
 
 
@@ -271,11 +280,13 @@
         files1 = get_files(topdir + "/tmp-sstatesamehash/stamps/")
         files2 = get_files(topdir + "/tmp-sstatesamehash2/stamps/")
         files2 = [x.replace("tmp-sstatesamehash2", "tmp-sstatesamehash") for x in files2]
+        self.maxDiff = None
         self.assertItemsEqual(files1, files2)
 
+    @testcase(1368)
     def test_sstate_allarch_samesigs(self):
         """
-        The sstate checksums off allarch packages should be independent of whichever 
+        The sstate checksums of allarch packages should be independent of whichever 
         MACHINE is set. Check this using bitbake -S.
         Also, rather than duplicate the test, check nativesdk stamps are the same between
         the two MACHINE values.
@@ -319,4 +330,50 @@
         files2 = [x.replace("tmp-sstatesamehash2", "tmp-sstatesamehash") for x in files2]
         self.maxDiff = None
         self.assertItemsEqual(files1, files2)
-        
+
+    @testcase(1369)
+    def test_sstate_sametune_samesigs(self):
+        """
+        The sstate checksums of two identical machines (using the same tune) should be the 
+        same, apart from changes within the machine specific stamps directory. We use the
+        qemux86copy machine to test this. Also include multilibs in the test.
+        """
+
+        topdir = get_bb_var('TOPDIR')
+        targetos = get_bb_var('TARGET_OS')
+        targetvendor = get_bb_var('TARGET_VENDOR')
+        self.write_config("""
+TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
+MACHINE = \"qemux86\"
+require conf/multilib.conf
+MULTILIBS = "multilib:lib32"
+DEFAULTTUNE_virtclass-multilib-lib32 = "x86"
+""")
+        self.track_for_cleanup(topdir + "/tmp-sstatesamehash")
+        bitbake("world meta-toolchain -S none")
+        self.write_config("""
+TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
+MACHINE = \"qemux86copy\"
+require conf/multilib.conf
+MULTILIBS = "multilib:lib32"
+DEFAULTTUNE_virtclass-multilib-lib32 = "x86"
+""")
+        self.track_for_cleanup(topdir + "/tmp-sstatesamehash2")
+        bitbake("world meta-toolchain -S none")
+
+        def get_files(d):
+            f = []
+            for root, dirs, files in os.walk(d):
+                for name in files:
+                    if "meta-environment" in root or "cross-canadian" in root:
+                        continue
+                    if "qemux86copy-" in root or "qemux86-" in root:
+                        continue
+                    if "do_build" not in name and "do_populate_sdk" not in name:
+                        f.append(os.path.join(root, name))
+            return f
+        files1 = get_files(topdir + "/tmp-sstatesamehash/stamps")
+        files2 = get_files(topdir + "/tmp-sstatesamehash2/stamps")
+        files2 = [x.replace("tmp-sstatesamehash2", "tmp-sstatesamehash") for x in files2]
+        self.maxDiff = None
+        self.assertItemsEqual(files1, files2)
diff --git a/yocto-poky/meta/lib/oeqa/selftest/wic.py b/yocto-poky/meta/lib/oeqa/selftest/wic.py
index 3dc54a4..ea78e22 100644
--- a/yocto-poky/meta/lib/oeqa/selftest/wic.py
+++ b/yocto-poky/meta/lib/oeqa/selftest/wic.py
@@ -31,50 +31,54 @@
 
 from oeqa.selftest.base import oeSelfTest
 from oeqa.utils.commands import runCmd, bitbake, get_bb_var
+from oeqa.utils.decorators import testcase
+
 
 class Wic(oeSelfTest):
     """Wic test class."""
 
     resultdir = "/var/tmp/wic/build/"
+    image_is_ready = False
 
-    @classmethod
-    def setUpClass(cls):
-        """Build wic runtime dependencies."""
-        bitbake('syslinux syslinux-native parted-native gptfdisk-native '
-                'dosfstools-native mtools-native')
-        Wic.image_is_ready = False
-
-    def setUp(self):
+    def setUpLocal(self):
         """This code is executed before each test method."""
+        self.write_config('IMAGE_FSTYPES += " hddimg"\nMACHINE_FEATURES_append = " efi"\n')
+
+        # Do this here instead of in setUpClass as the base setUp does some
+        # clean up which can result in the native tools built earlier in
+        # setUpClass being unavailable.
         if not Wic.image_is_ready:
-            # build core-image-minimal with required features
-            features = 'IMAGE_FSTYPES += " hddimg"\nMACHINE_FEATURES_append = " efi"\n'
-            self.append_config(features)
+            bitbake('syslinux syslinux-native parted-native gptfdisk-native '
+                    'dosfstools-native mtools-native')
             bitbake('core-image-minimal')
-            # set this class variable to avoid buiding image many times
             Wic.image_is_ready = True
 
         rmtree(self.resultdir, ignore_errors=True)
 
-    def test01_help(self):
+    @testcase(1208)
+    def test_help(self):
         """Test wic --help"""
         self.assertEqual(0, runCmd('wic --help').status)
 
-    def test02_createhelp(self):
+    @testcase(1209)
+    def test_createhelp(self):
         """Test wic create --help"""
         self.assertEqual(0, runCmd('wic create --help').status)
 
-    def test03_listhelp(self):
+    @testcase(1210)
+    def test_listhelp(self):
         """Test wic list --help"""
         self.assertEqual(0, runCmd('wic list --help').status)
 
-    def test04_build_image_name(self):
+    @testcase(1211)
+    def test_build_image_name(self):
         """Test wic create directdisk --image-name core-image-minimal"""
         self.assertEqual(0, runCmd("wic create directdisk "
                                    "--image-name core-image-minimal").status)
         self.assertEqual(1, len(glob(self.resultdir + "directdisk-*.direct")))
 
-    def test05_build_artifacts(self):
+    @testcase(1212)
+    def test_build_artifacts(self):
         """Test wic create directdisk providing all artifacts."""
         vars = dict((var.lower(), get_bb_var(var, 'core-image-minimal')) \
                         for var in ('STAGING_DATADIR', 'DEPLOY_DIR_IMAGE',
@@ -87,34 +91,41 @@
         self.assertEqual(0, status)
         self.assertEqual(1, len(glob(self.resultdir + "directdisk-*.direct")))
 
-    def test06_gpt_image(self):
+    @testcase(1157)
+    def test_gpt_image(self):
         """Test creation of core-image-minimal with gpt table and UUID boot"""
         self.assertEqual(0, runCmd("wic create directdisk-gpt "
                                    "--image-name core-image-minimal").status)
         self.assertEqual(1, len(glob(self.resultdir + "directdisk-*.direct")))
 
-    def test07_unsupported_subcommand(self):
+    @testcase(1213)
+    def test_unsupported_subcommand(self):
         """Test unsupported subcommand"""
         self.assertEqual(1, runCmd('wic unsupported',
                          ignore_status=True).status)
 
-    def test08_no_command(self):
+    @testcase(1214)
+    def test_no_command(self):
         """Test wic without command"""
         self.assertEqual(1, runCmd('wic', ignore_status=True).status)
 
-    def test09_help_kickstart(self):
+    @testcase(1215)
+    def test_help_overview(self):
         """Test wic help overview"""
         self.assertEqual(0, runCmd('wic help overview').status)
 
-    def test10_help_plugins(self):
+    @testcase(1216)
+    def test_help_plugins(self):
         """Test wic help plugins"""
         self.assertEqual(0, runCmd('wic help plugins').status)
 
-    def test11_help_kickstart(self):
+    @testcase(1217)
+    def test_help_kickstart(self):
         """Test wic help kickstart"""
         self.assertEqual(0, runCmd('wic help kickstart').status)
 
-    def test12_compress_gzip(self):
+    @testcase(1264)
+    def test_compress_gzip(self):
         """Test compressing an image with gzip"""
         self.assertEqual(0, runCmd("wic create directdisk "
                                    "--image-name core-image-minimal "
@@ -122,7 +133,8 @@
         self.assertEqual(1, len(glob(self.resultdir + \
                                          "directdisk-*.direct.gz")))
 
-    def test13_compress_gzip(self):
+    @testcase(1265)
+    def test_compress_bzip2(self):
         """Test compressing an image with bzip2"""
         self.assertEqual(0, runCmd("wic create directdisk "
                                    "--image-name core-image-minimal "
@@ -130,7 +142,8 @@
         self.assertEqual(1, len(glob(self.resultdir + \
                                          "directdisk-*.direct.bz2")))
 
-    def test14_compress_gzip(self):
+    @testcase(1266)
+    def test_compress_xz(self):
         """Test compressing an image with xz"""
         self.assertEqual(0, runCmd("wic create directdisk "
                                    "--image-name core-image-minimal "
@@ -138,13 +151,15 @@
         self.assertEqual(1, len(glob(self.resultdir + \
                                          "directdisk-*.direct.xz")))
 
-    def test15_wrong_compressor(self):
+    @testcase(1267)
+    def test_wrong_compressor(self):
         """Test how wic breaks if wrong compressor is provided"""
         self.assertEqual(2, runCmd("wic create directdisk "
                                    "--image-name core-image-minimal "
                                    "-c wrong", ignore_status=True).status)
 
-    def test16_rootfs_indirect_recipes(self):
+    @testcase(1268)
+    def test_rootfs_indirect_recipes(self):
         """Test usage of rootfs plugin with rootfs recipes"""
         wks = "directdisk-multi-rootfs"
         self.assertEqual(0, runCmd("wic create %s "
@@ -154,7 +169,8 @@
                                    % wks).status)
         self.assertEqual(1, len(glob(self.resultdir + "%s*.direct" % wks)))
 
-    def test17_rootfs_artifacts(self):
+    @testcase(1269)
+    def test_rootfs_artifacts(self):
         """Test usage of rootfs plugin with rootfs paths"""
         vars = dict((var.lower(), get_bb_var(var, 'core-image-minimal')) \
                         for var in ('STAGING_DATADIR', 'DEPLOY_DIR_IMAGE',
@@ -171,14 +187,16 @@
         self.assertEqual(1, len(glob(self.resultdir + \
                                      "%(wks)s-*.direct" % vars)))
 
-    def test18_iso_image(self):
-        """Test creation of hybrid iso imagewith legacy and EFI boot"""
+    @testcase(1346)
+    def test_iso_image(self):
+        """Test creation of hybrid iso image with legacy and EFI boot"""
         self.assertEqual(0, runCmd("wic create mkhybridiso "
                                    "--image-name core-image-minimal").status)
         self.assertEqual(1, len(glob(self.resultdir + "HYBRID_ISO_IMG-*.direct")))
         self.assertEqual(1, len(glob(self.resultdir + "HYBRID_ISO_IMG-*.iso")))
 
-    def test19_image_env(self):
+    @testcase(1347)
+    def test_image_env(self):
         """Test generation of <image>.env files."""
         image = 'core-image-minimal'
         stdir = get_bb_var('STAGING_DIR_TARGET', image)
@@ -200,7 +218,8 @@
                 self.assertTrue(var in content, "%s is not in .env file" % var)
                 self.assertTrue(content[var])
 
-    def test20_wic_image_type(self):
+    @testcase(1351)
+    def test_wic_image_type(self):
         """Test building wic images by bitbake"""
         self.assertEqual(0, bitbake('wic-image-minimal').status)
 
@@ -214,21 +233,24 @@
             self.assertTrue(os.path.islink(path))
             self.assertTrue(os.path.isfile(os.path.realpath(path)))
 
-    def test21_qemux86_directdisk(self):
+    @testcase(1348)
+    def test_qemux86_directdisk(self):
         """Test creation of qemux-86-directdisk image"""
         image = "qemux86-directdisk"
         self.assertEqual(0, runCmd("wic create %s -e core-image-minimal" \
                                    % image).status)
         self.assertEqual(1, len(glob(self.resultdir + "%s-*direct" % image)))
 
-    def test22_mkgummidisk(self):
+    @testcase(1349)
+    def test_mkgummidisk(self):
         """Test creation of mkgummidisk image"""
         image = "mkgummidisk"
         self.assertEqual(0, runCmd("wic create %s -e core-image-minimal" \
                                    % image).status)
         self.assertEqual(1, len(glob(self.resultdir + "%s-*direct" % image)))
 
-    def test23_mkefidisk(self):
+    @testcase(1350)
+    def test_mkefidisk(self):
         """Test creation of mkefidisk image"""
         image = "mkefidisk"
         self.assertEqual(0, runCmd("wic create %s -e core-image-minimal" \
diff --git a/yocto-poky/meta/lib/oeqa/utils/decorators.py b/yocto-poky/meta/lib/oeqa/utils/decorators.py
index b6adcb1..0d79223 100644
--- a/yocto-poky/meta/lib/oeqa/utils/decorators.py
+++ b/yocto-poky/meta/lib/oeqa/utils/decorators.py
@@ -33,6 +33,10 @@
                     ret.append(s.replace("setUpModule (", "").replace(")",""))
                 else:
                     ret.append(s)
+                # Append also the test without the full path
+                testname = s.split('.')[-1]
+                if testname:
+                    ret.append(testname)
             return ret
         self.faillist = handleList(upperf.f_locals['result'].failures)
         self.errorlist = handleList(upperf.f_locals['result'].errors)
@@ -53,11 +57,11 @@
         self.testcase = testcase
 
     def __call__(self,f):
-        def wrapped_f(*args):
+        def wrapped_f(*args, **kwargs):
             res = getResults()
             if self.testcase in (res.getFailList() or res.getErrorList()):
                 raise unittest.SkipTest("Testcase dependency not met: %s" % self.testcase)
-            return f(*args)
+            return f(*args, **kwargs)
         wrapped_f.__name__ = f.__name__
         return wrapped_f
 
@@ -67,11 +71,11 @@
         self.testcase = testcase
 
     def __call__(self,f):
-        def wrapped_f(*args):
+        def wrapped_f(*args, **kwargs):
             res = getResults()
             if self.testcase in res.getSkipList():
                 raise unittest.SkipTest("Testcase dependency not met: %s" % self.testcase)
-            return f(*args)
+            return f(*args, **kwargs)
         wrapped_f.__name__ = f.__name__
         return wrapped_f
 
@@ -81,13 +85,13 @@
         self.testcase = testcase
 
     def __call__(self,f):
-        def wrapped_f(*args):
+        def wrapped_f(*args, **kwargs):
             res = getResults()
             if self.testcase in res.getSkipList() or \
                     self.testcase in res.getFailList() or \
                     self.testcase in res.getErrorList():
                 raise unittest.SkipTest("Testcase dependency not met: %s" % self.testcase)
-            return f(*args)
+            return f(*args, **kwargs)
         wrapped_f.__name__ = f.__name__
         wrapped_f._depends_on = self.testcase
         return wrapped_f
@@ -98,8 +102,8 @@
         self.test_case = test_case
 
     def __call__(self, func):
-        def wrapped_f(*args):
-            return func(*args)
+        def wrapped_f(*args, **kwargs):
+            return func(*args, **kwargs)
         wrapped_f.test_case = self.test_case
         wrapped_f.__name__ = func.__name__
         return wrapped_f
@@ -111,6 +115,12 @@
 def LogResults(original_class):
     orig_method = original_class.run
 
+    from time import strftime, gmtime
+    caller = os.path.basename(sys.argv[0])
+    timestamp = strftime('%Y%m%d%H%M%S',gmtime())
+    logfile = os.path.join(os.getcwd(),'results-'+caller+'.'+timestamp+'.log')
+    linkfile = os.path.join(os.getcwd(),'results-'+caller+'.log')
+
     #rewrite the run method of unittest.TestCase to add testcase logging
     def run(self, result, *args, **kws):
         orig_method(self, result, *args, **kws)
@@ -127,14 +137,13 @@
         #create custom logging level for filtering.
         custom_log_level = 100
         logging.addLevelName(custom_log_level, 'RESULTS')
-        caller = os.path.basename(sys.argv[0])
 
         def results(self, message, *args, **kws):
             if self.isEnabledFor(custom_log_level):
                 self.log(custom_log_level, message, *args, **kws)
         logging.Logger.results = results
 
-        logging.basicConfig(filename=os.path.join(os.getcwd(),'results-'+caller+'.log'),
+        logging.basicConfig(filename=logfile,
                             filemode='w',
                             format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
                             datefmt='%H:%M:%S',
@@ -162,7 +171,13 @@
         if passed:
             local_log.results("Testcase "+str(test_case)+": PASSED")
 
+        # Create symlink to the current log
+        if os.path.exists(linkfile):
+            os.remove(linkfile)
+        os.symlink(logfile, linkfile)
+
     original_class.run = run
+
     return original_class
 
 class TimeOut(BaseException):
diff --git a/yocto-poky/meta/lib/oeqa/utils/dump.py b/yocto-poky/meta/lib/oeqa/utils/dump.py
index 4ae871c..63a591d 100644
--- a/yocto-poky/meta/lib/oeqa/utils/dump.py
+++ b/yocto-poky/meta/lib/oeqa/utils/dump.py
@@ -16,9 +16,20 @@
 
     def __init__(self, cmds, parent_dir):
         self.cmds = []
-        self.parent_dir = parent_dir
+        # Some testing doesn't inherit testimage, so it is needed
+        # to set some defaults.
+        self.parent_dir = parent_dir or "/tmp/oe-saved-tests"
+        dft_cmds = """  top -bn1
+                        iostat -x -z -N -d -p ALL 20 2
+                        ps -ef
+                        free
+                        df
+                        memstat
+                        dmesg
+                        ip -s link
+                        netstat -an"""
         if not cmds:
-            return
+            cmds = dft_cmds
         for cmd in cmds.split('\n'):
             cmd = cmd.lstrip()
             if not cmd or cmd[0] == '#':
diff --git a/yocto-poky/meta/lib/oeqa/utils/ftools.py b/yocto-poky/meta/lib/oeqa/utils/ftools.py
index 64ebe3d..1bd9a30 100644
--- a/yocto-poky/meta/lib/oeqa/utils/ftools.py
+++ b/yocto-poky/meta/lib/oeqa/utils/ftools.py
@@ -1,12 +1,19 @@
 import os
 import re
+import errno
 
 def write_file(path, data):
+    # In case data is None, return immediately
+    if data is None:
+        return
     wdata = data.rstrip() + "\n"
     with open(path, "w") as f:
         f.write(wdata)
 
 def append_file(path, data):
+    # In case data is None, return immediately
+    if data is None:
+        return
     wdata = data.rstrip() + "\n"
     with open(path, "a") as f:
             f.write(wdata)
@@ -18,7 +25,18 @@
     return data
 
 def remove_from_file(path, data):
-    lines = read_file(path).splitlines()
+    # In case data is None, return immediately
+    if data is None:
+        return
+    try:
+        rdata = read_file(path)
+    except IOError as e:
+        # if file does not exit, just quit, otherwise raise an exception
+        if e.errno == errno.ENOENT:
+            return
+        else:
+            raise
+    lines = rdata.splitlines()
     rmdata = data.strip().splitlines()
     for l in rmdata:
         for c in range(0, lines.count(l)):
diff --git a/yocto-poky/meta/lib/oeqa/utils/qemurunner.py b/yocto-poky/meta/lib/oeqa/utils/qemurunner.py
index d32c9db..bdc6e0a 100644
--- a/yocto-poky/meta/lib/oeqa/utils/qemurunner.py
+++ b/yocto-poky/meta/lib/oeqa/utils/qemurunner.py
@@ -13,12 +13,20 @@
 import socket
 import select
 import errno
+import string
 import threading
+import codecs
 from oeqa.utils.dump import HostDumper
 
 import logging
 logger = logging.getLogger("BitBake.QemuRunner")
 
+# Get Unicode non printable control chars
+control_range = range(0,32)+range(127,160)
+control_chars = [unichr(x) for x in control_range
+                if unichr(x) not in string.printable]
+re_control_char = re.compile('[%s]' % re.escape("".join(control_chars)))
+
 class QemuRunner:
 
     def __init__(self, machine, rootfs, display, tmpdir, deploy_dir_image, logfile, boottime, dump_dir, dump_host_cmds):
@@ -61,7 +69,10 @@
 
     def log(self, msg):
         if self.logfile:
-            with open(self.logfile, "a") as f:
+            # It is needed to sanitize the data received from qemu
+            # because is possible to have control characters
+            msg = re_control_char.sub('', unicode(msg, 'utf-8'))
+            with codecs.open(self.logfile, "a", encoding="utf-8") as f:
                 f.write("%s" % msg)
 
     def getOutput(self, o):
@@ -170,6 +181,9 @@
             cmdline = ''
             with open('/proc/%s/cmdline' % self.qemupid) as p:
                 cmdline = p.read()
+                # It is needed to sanitize the data received
+                # because is possible to have control characters
+                cmdline = re_control_char.sub('', cmdline)
             try:
                 ips = re.findall("((?:[0-9]{1,3}\.){3}[0-9]{1,3})", cmdline.split("ip=")[1])
                 if not ips or len(ips) != 3:
@@ -186,7 +200,6 @@
             logger.info("Target IP: %s" % self.ip)
             logger.info("Server IP: %s" % self.server_ip)
 
-            logger.info("Starting logging thread")
             self.thread = LoggingThread(self.log, threadsock, logger)
             self.thread.start()
             if not self.thread.connection_established.wait(self.boottime):
@@ -197,6 +210,7 @@
                 self.stop_thread()
                 return False
 
+            logger.info("Output from runqemu:\n%s", self.getOutput(output))
             logger.info("Waiting at most %d seconds for login banner" % self.boottime)
             endtime = time.time() + self.boottime
             socklist = [self.server_socket]
@@ -259,8 +273,9 @@
 
     def stop(self):
         self.stop_thread()
-        if self.runqemu:
+        if hasattr(self, "origchldhandler"):
             signal.signal(signal.SIGCHLD, self.origchldhandler)
+        if self.runqemu:
             os.kill(self.monitorpid, signal.SIGKILL)
             logger.info("Sending SIGTERM to runqemu")
             try:
@@ -280,7 +295,6 @@
             self.server_socket = None
         self.qemupid = None
         self.ip = None
-        signal.signal(signal.SIGCHLD, self.origchldhandler)
 
     def stop_thread(self):
         if self.thread and self.thread.is_alive():
@@ -440,9 +454,9 @@
 
     def eventloop(self):
         poll = select.poll()
-        eventmask = self.errorevents | self.readevents
+        event_read_mask = self.errorevents | self.readevents
         poll.register(self.serversock.fileno())
-        poll.register(self.readpipe, eventmask)
+        poll.register(self.readpipe, event_read_mask)
 
         breakout = False
         self.running = True
@@ -466,7 +480,7 @@
                     self.readsock, _ = self.serversock.accept()
                     self.readsock.setblocking(0)
                     poll.unregister(self.serversock.fileno())
-                    poll.register(self.readsock.fileno())
+                    poll.register(self.readsock.fileno(), event_read_mask)
 
                     self.logger.info("Setting connection established event")
                     self.connection_established.set()
diff --git a/yocto-poky/meta/recipes-bsp/gnu-efi/gnu-efi/gcc46-compatibility.patch b/yocto-poky/meta/recipes-bsp/gnu-efi/gnu-efi/gcc46-compatibility.patch
new file mode 100644
index 0000000..0ce6d7b
--- /dev/null
+++ b/yocto-poky/meta/recipes-bsp/gnu-efi/gnu-efi/gcc46-compatibility.patch
@@ -0,0 +1,21 @@
+don't break with old compilers and -DGNU_EFI_USE_MS_ABI
+It's entirely legitimate to request GNU_EFI_USE_MS_ABI even if the current
+compiler doesn't support it, and gnu-efi should transparently fall back to
+using legacy techniques to set the calling convention.  We don't get type
+checking, but at least it will still compile.
+
+Author: Steve Langasek <steve.langasek@ubuntu.com>
+Upstream-Status: Pending
+Index: gnu-efi-3.0.3/inc/x86_64/efibind.h
+===================================================================
+--- gnu-efi-3.0.3.orig/inc/x86_64/efibind.h
++++ gnu-efi-3.0.3/inc/x86_64/efibind.h
+@@ -25,8 +25,6 @@ Revision History
+ #if defined(GNU_EFI_USE_MS_ABI)
+     #if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 7))
+         #define HAVE_USE_MS_ABI 1
+-    #else
+-        #error Compiler is too old for GNU_EFI_USE_MS_ABI
+     #endif
+ #endif
+ 
diff --git a/yocto-poky/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.3.bb b/yocto-poky/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.3.bb
index 1a1ba40..eca3459 100644
--- a/yocto-poky/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.3.bb
+++ b/yocto-poky/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.3.bb
@@ -18,6 +18,7 @@
            file://parallel-make-archives.patch \
            file://lib-Makefile-fix-parallel-issue.patch \
            file://gnu-efi-Make-setjmp.S-portable-to-ARM.patch \
+           file://gcc46-compatibility.patch \
           "
 
 SRC_URI[md5sum] = "15a4bcbc18a9a5e8110ed955970622e6"
diff --git a/yocto-poky/meta/recipes-bsp/grub/files/CVE-2015-8370.patch b/yocto-poky/meta/recipes-bsp/grub/files/CVE-2015-8370.patch
new file mode 100644
index 0000000..78f514e
--- /dev/null
+++ b/yocto-poky/meta/recipes-bsp/grub/files/CVE-2015-8370.patch
@@ -0,0 +1,59 @@
+From 451d80e52d851432e109771bb8febafca7a5f1f2 Mon Sep 17 00:00:00 2001
+From: Hector Marco-Gisbert <hecmargi@upv.es>
+Date: Wed, 16 Dec 2015 07:57:18 +0300
+Subject: [PATCH] Fix security issue when reading username and password
+
+This patch fixes two integer underflows at:
+  * grub-core/lib/crypto.c
+  * grub-core/normal/auth.c
+
+CVE-2015-8370
+
+Signed-off-by: Hector Marco-Gisbert <hecmargi@upv.es>
+Signed-off-by: Ismael Ripoll-Ripoll <iripoll@disca.upv.es>
+Also-By: Andrey Borzenkov <arvidjaar@gmail.com>
+
+Upstream-Status: Backport
+
+http://git.savannah.gnu.org/cgit/grub.git/commit/?id=451d80e52d851432e109771bb8febafca7a5f1f2
+
+CVE: CVE-2015-8370
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ grub-core/lib/crypto.c  | 3 ++-
+ grub-core/normal/auth.c | 7 +++++--
+ 2 files changed, 7 insertions(+), 3 deletions(-)
+
+Index: git/grub-core/lib/crypto.c
+===================================================================
+--- git.orig/grub-core/lib/crypto.c
++++ git/grub-core/lib/crypto.c
+@@ -458,7 +458,8 @@ grub_password_get (char buf[], unsigned
+ 
+       if (key == '\b')
+ 	{
+-	  cur_len--;
++	  if (cur_len)
++	    cur_len--;
+ 	  continue;
+ 	}
+ 
+Index: git/grub-core/normal/auth.c
+===================================================================
+--- git.orig/grub-core/normal/auth.c
++++ git/grub-core/normal/auth.c
+@@ -174,8 +174,11 @@ grub_username_get (char buf[], unsigned
+ 
+       if (key == '\b')
+ 	{
+-	  cur_len--;
+-	  grub_printf ("\b");
++	  if (cur_len)
++	    {
++	      cur_len--;
++	      grub_printf ("\b");
++	    }
+ 	  continue;
+ 	}
+ 
diff --git a/yocto-poky/meta/recipes-bsp/grub/grub2.inc b/yocto-poky/meta/recipes-bsp/grub/grub2.inc
index 312771b..fe2407c 100644
--- a/yocto-poky/meta/recipes-bsp/grub/grub2.inc
+++ b/yocto-poky/meta/recipes-bsp/grub/grub2.inc
@@ -27,6 +27,7 @@
            file://0001-Unset-need_charset_alias-when-building-for-musl.patch \
            file://0001-parse_dhcp_vendor-Add-missing-const-qualifiers.patch \
            file://grub2-fix-initrd-size-bug.patch \
+           file://CVE-2015-8370.patch \
             "
 
 DEPENDS = "flex-native bison-native xz"
diff --git a/yocto-poky/meta/recipes-bsp/gummiboot/gummiboot/0001-console-Fix-C-syntax-errors-for-function-declaration.patch b/yocto-poky/meta/recipes-bsp/gummiboot/gummiboot/0001-console-Fix-C-syntax-errors-for-function-declaration.patch
new file mode 100644
index 0000000..fa50bc4
--- /dev/null
+++ b/yocto-poky/meta/recipes-bsp/gummiboot/gummiboot/0001-console-Fix-C-syntax-errors-for-function-declaration.patch
@@ -0,0 +1,74 @@
+From 55957faf1272c8f5f304909faeebf647a78e3701 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Wed, 9 Sep 2015 07:19:45 +0000
+Subject: [PATCH] console: Fix C syntax errors for function declaration
+
+To address this, the semicolons after the function parameters should be
+replaced by commas, and the last one should be omitted
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+Upstream-Status: Pending
+
+ src/efi/console.c | 26 +++++++++++++-------------
+ 1 file changed, 13 insertions(+), 13 deletions(-)
+
+diff --git a/src/efi/console.c b/src/efi/console.c
+index 6206c80..66aa88f 100644
+--- a/src/efi/console.c
++++ b/src/efi/console.c
+@@ -27,8 +27,8 @@
+ struct _EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL;
+ 
+ typedef EFI_STATUS (EFIAPI *EFI_INPUT_RESET_EX)(
+-        struct _EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL *This;
+-        BOOLEAN ExtendedVerification;
++        struct _EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL *This,
++        BOOLEAN ExtendedVerification
+ );
+ 
+ typedef UINT8 EFI_KEY_TOGGLE_STATE;
+@@ -44,29 +44,29 @@ typedef struct {
+ } EFI_KEY_DATA;
+ 
+ typedef EFI_STATUS (EFIAPI *EFI_INPUT_READ_KEY_EX)(
+-        struct _EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL *This;
+-        EFI_KEY_DATA *KeyData;
++        struct _EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL *This,
++        EFI_KEY_DATA *KeyData
+ );
+ 
+ typedef EFI_STATUS (EFIAPI *EFI_SET_STATE)(
+-        struct _EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL *This;
+-        EFI_KEY_TOGGLE_STATE *KeyToggleState;
++        struct _EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL *This,
++        EFI_KEY_TOGGLE_STATE *KeyToggleState
+ );
+ 
+ typedef EFI_STATUS (EFIAPI *EFI_KEY_NOTIFY_FUNCTION)(
+-        EFI_KEY_DATA *KeyData;
++        EFI_KEY_DATA *KeyData
+ );
+ 
+ typedef EFI_STATUS (EFIAPI *EFI_REGISTER_KEYSTROKE_NOTIFY)(
+-        struct _EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL *This;
+-        EFI_KEY_DATA KeyData;
+-        EFI_KEY_NOTIFY_FUNCTION KeyNotificationFunction;
+-        VOID **NotifyHandle;
++        struct _EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL *This,
++        EFI_KEY_DATA KeyData,
++        EFI_KEY_NOTIFY_FUNCTION KeyNotificationFunction,
++        VOID **NotifyHandle
+ );
+ 
+ typedef EFI_STATUS (EFIAPI *EFI_UNREGISTER_KEYSTROKE_NOTIFY)(
+-        struct _EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL *This;
+-        VOID *NotificationHandle;
++        struct _EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL *This,
++        VOID *NotificationHandle
+ );
+ 
+ typedef struct _EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL {
+-- 
+2.5.1
+
diff --git a/yocto-poky/meta/recipes-bsp/gummiboot/gummiboot_git.bb b/yocto-poky/meta/recipes-bsp/gummiboot/gummiboot_git.bb
index 91c3db9..376ab54 100644
--- a/yocto-poky/meta/recipes-bsp/gummiboot/gummiboot_git.bb
+++ b/yocto-poky/meta/recipes-bsp/gummiboot/gummiboot_git.bb
@@ -13,6 +13,7 @@
 SRCREV = "2bcd919c681c952eb867ef1bdb458f1bc49c2d55"
 SRC_URI = "git://anongit.freedesktop.org/gummiboot \
            file://fix-objcopy.patch \
+           file://0001-console-Fix-C-syntax-errors-for-function-declaration.patch \
           "
 
 # Note: Add COMPATIBLE_HOST here is only because it depends on gnu-efi
@@ -28,6 +29,8 @@
 
 EXTRA_OEMAKE += "gummibootlibdir=${libdir}/gummiboot"
 
+TUNE_CCARGS_remove = "-mfpmath=sse"
+
 do_deploy () {
         install ${B}/gummiboot*.efi ${DEPLOYDIR}
 }
diff --git a/yocto-poky/meta/recipes-bsp/hostap/hostap-utils-0.4.7/0001-Define-_u32-__s32-__u16-__s16-__u8-in-terms-of-c99-t.patch b/yocto-poky/meta/recipes-bsp/hostap/hostap-utils-0.4.7/0001-Define-_u32-__s32-__u16-__s16-__u8-in-terms-of-c99-t.patch
new file mode 100644
index 0000000..b44dca3
--- /dev/null
+++ b/yocto-poky/meta/recipes-bsp/hostap/hostap-utils-0.4.7/0001-Define-_u32-__s32-__u16-__s16-__u8-in-terms-of-c99-t.patch
@@ -0,0 +1,36 @@
+From 742fb110d9841a04b3ced256b0bf80ff304dcaff Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Mon, 31 Aug 2015 05:45:08 +0000
+Subject: [PATCH] Define _u32/__s32/__u16/__s16/__u8 in terms of c99 types
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+Upstream-Status: Pending
+
+ wireless_copy.h | 10 +++++-----
+ 1 file changed, 5 insertions(+), 5 deletions(-)
+
+diff --git a/wireless_copy.h b/wireless_copy.h
+index 8208258..1171a35 100644
+--- a/wireless_copy.h
++++ b/wireless_copy.h
+@@ -86,11 +86,11 @@
+ #else
+ #include <sys/types.h>
+ #include <net/if.h>
+-typedef __uint32_t __u32;
+-typedef __int32_t __s32;
+-typedef __uint16_t __u16;
+-typedef __int16_t __s16;
+-typedef __uint8_t __u8;
++typedef u_int32_t __u32;
++typedef int32_t __s32;
++typedef u_int16_t __u16;
++typedef int16_t __s16;
++typedef u_int8_t __u8;
+ #ifndef __user
+ #define __user
+ #endif /* __user */
+-- 
+2.5.1
+
diff --git a/yocto-poky/meta/recipes-bsp/hostap/hostap-utils.inc b/yocto-poky/meta/recipes-bsp/hostap/hostap-utils.inc
index 89d977a..140321d 100644
--- a/yocto-poky/meta/recipes-bsp/hostap/hostap-utils.inc
+++ b/yocto-poky/meta/recipes-bsp/hostap/hostap-utils.inc
@@ -10,7 +10,9 @@
 PR = "r4"
 
 SRC_URI = "http://hostap.epitest.fi/releases/hostap-utils-${PV}.tar.gz \
-	   file://hostap-fw-load.patch"
+           file://hostap-fw-load.patch \
+           file://0001-Define-_u32-__s32-__u16-__s16-__u8-in-terms-of-c99-t.patch \
+"
 S = "${WORKDIR}/hostap-utils-${PV}"
 
 BINARIES = "hostap_crypt_conf hostap_diag hostap_fw_load hostap_io_debug \
diff --git a/yocto-poky/meta/recipes-connectivity/avahi/avahi-ui_0.6.31.bb b/yocto-poky/meta/recipes-connectivity/avahi/avahi-ui_0.6.31.bb
index eea4d70..0d42b90 100644
--- a/yocto-poky/meta/recipes-connectivity/avahi/avahi-ui_0.6.31.bb
+++ b/yocto-poky/meta/recipes-connectivity/avahi/avahi-ui_0.6.31.bb
@@ -6,7 +6,8 @@
 
 require avahi.inc
 
-inherit python-dir pythonnative
+inherit python-dir pythonnative distro_features_check
+ANY_OF_DISTRO_FEATURES = "${GTK2DISTROFEATURES}"
 
 PACKAGECONFIG ??= "python"
 PACKAGECONFIG[python] = "--enable-python,--disable-python,python-native python"
diff --git a/yocto-poky/meta/recipes-connectivity/bind/bind/0001-lib-dns-gen.c-fix-too-long-error.patch b/yocto-poky/meta/recipes-connectivity/bind/bind/0001-lib-dns-gen.c-fix-too-long-error.patch
new file mode 100644
index 0000000..1ed858c
--- /dev/null
+++ b/yocto-poky/meta/recipes-connectivity/bind/bind/0001-lib-dns-gen.c-fix-too-long-error.patch
@@ -0,0 +1,34 @@
+From 5bc3167a8b714ec0c4a3f1c7f3b9411296ec0a23 Mon Sep 17 00:00:00 2001
+From: Robert Yang <liezhi.yang@windriver.com>
+Date: Wed, 16 Sep 2015 20:23:47 -0700
+Subject: [PATCH] lib/dns/gen.c: fix too long error
+
+The 512 is a little short when build in deep dir, and cause "too long"
+error, use PATH_MAX if defined.
+
+Upstream-Status: Pending
+
+Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
+---
+ lib/dns/gen.c |    4 ++++
+ 1 file changed, 4 insertions(+)
+
+diff --git a/lib/dns/gen.c b/lib/dns/gen.c
+index 51a0435..3d7214f 100644
+--- a/lib/dns/gen.c
++++ b/lib/dns/gen.c
+@@ -148,7 +148,11 @@ static const char copyright[] =
+ #define TYPECLASSBUF (TYPECLASSLEN + 1)
+ #define TYPECLASSFMT "%" STR(TYPECLASSLEN) "[-0-9a-z]_%d"
+ #define ATTRIBUTESIZE 256
++#ifdef PATH_MAX
++#define DIRNAMESIZE PATH_MAX
++#else
+ #define DIRNAMESIZE 512
++#endif
+ 
+ static struct cc {
+ 	struct cc *next;
+-- 
+1.7.9.5
+
diff --git a/yocto-poky/meta/recipes-connectivity/bind/bind/CVE-2015-8000.patch b/yocto-poky/meta/recipes-connectivity/bind/bind/CVE-2015-8000.patch
new file mode 100644
index 0000000..e1c8052
--- /dev/null
+++ b/yocto-poky/meta/recipes-connectivity/bind/bind/CVE-2015-8000.patch
@@ -0,0 +1,278 @@
+From 8259daad7242ab2af8731681177ef7e948a15ece Mon Sep 17 00:00:00 2001
+From: Mark Andrews <marka@isc.org>
+Date: Mon, 16 Nov 2015 13:12:20 +1100
+Subject: [PATCH] 4260.   [security]      Insufficient testing when parsing a
+ message allowed                         records with an incorrect class to be
+ be accepted,                         triggering a REQUIRE failure when those
+ records                         were subsequently cached. (CVE-2015-8000) [RT
+ #4098]
+
+(cherry picked from commit c8821d124c532e0a65752b378f924d4259499fd3)
+(cherry picked from commit 3a4c24c4a52d4a2d21d2decbde3d4e514e27d51c)
+
+
+Upstream-Status: Backport
+
+https://source.isc.org/cgi-bin/gitweb.cgi?p=bind9.git;a=commit;h=8259daad7242ab2af8731681177ef7e948a15ece
+
+CVE: CVE-2015-8000
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ CHANGES                       |  5 +++++
+ bin/tests/system/start.pl     |  5 ++++-
+ doc/arm/notes.xml             |  9 +++++++++
+ lib/dns/include/dns/message.h | 13 +++++++++++--
+ lib/dns/message.c             | 45 ++++++++++++++++++++++++++++++++++++++-----
+ lib/dns/resolver.c            |  9 +++++++++
+ lib/dns/xfrin.c               |  2 ++
+ 7 files changed, 80 insertions(+), 8 deletions(-)
+
+Index: bind-9.10.2-P4/bin/tests/system/start.pl
+===================================================================
+--- bind-9.10.2-P4.orig/bin/tests/system/start.pl
++++ bind-9.10.2-P4/bin/tests/system/start.pl
+@@ -68,6 +68,7 @@ my $NAMED = $ENV{'NAMED'};
+ my $LWRESD = $ENV{'LWRESD'};
+ my $DIG = $ENV{'DIG'};
+ my $PERL = $ENV{'PERL'};
++my $PYTHON = $ENV{'PYTHON'};
+ 
+ # Start the server(s)
+ 
+@@ -213,7 +214,9 @@ sub start_server {
+ 		$pid_file = "lwresd.pid";
+ 	} elsif ($server =~ /^ans/) {
+ 		$cleanup_files = "{ans.run}";
+-                if (-e "$testdir/$server/ans.pl") {
++                if (-e "$testdir/$server/ans.py") {
++                        $command = "$PYTHON ans.py 10.53.0.$' 5300";
++                } elsif (-e "$testdir/$server/ans.pl") {
+                         $command = "$PERL ans.pl";
+                 } else {
+                         $command = "$PERL $topdir/ans.pl 10.53.0.$'";
+Index: bind-9.10.2-P4/doc/arm/notes.xml
+===================================================================
+--- bind-9.10.2-P4.orig/doc/arm/notes.xml
++++ bind-9.10.2-P4/doc/arm/notes.xml
+@@ -62,6 +62,15 @@
+     <itemizedlist>
+       <listitem>
+ 	<para>
++	  Insufficient testing when parsing a message allowed
++	  records with an incorrect class to be be accepted,
++	  triggering a REQUIRE failure when those records
++	  were subsequently cached.  This flaw is disclosed
++	  in CVE-2015-8000. [RT #4098]
++	</para>
++      </listitem>
++      <listitem>
++	<para>
+ 	  An incorrect boundary check in the OPENPGPKEY rdatatype
+ 	  could trigger an assertion failure. This flaw is disclosed
+ 	  in CVE-2015-5986. [RT #40286]
+Index: bind-9.10.2-P4/lib/dns/include/dns/message.h
+===================================================================
+--- bind-9.10.2-P4.orig/lib/dns/include/dns/message.h
++++ bind-9.10.2-P4/lib/dns/include/dns/message.h
+@@ -15,8 +15,6 @@
+  * PERFORMANCE OF THIS SOFTWARE.
+  */
+ 
+-/* $Id$ */
+-
+ #ifndef DNS_MESSAGE_H
+ #define DNS_MESSAGE_H 1
+ 
+@@ -221,6 +219,8 @@ struct dns_message {
+ 	unsigned int			free_saved : 1;
+ 	unsigned int			sitok : 1;
+ 	unsigned int			sitbad : 1;
++	unsigned int			tkey : 1;
++	unsigned int			rdclass_set : 1;
+ 
+ 	unsigned int			opt_reserved;
+ 	unsigned int			sig_reserved;
+@@ -1400,6 +1400,15 @@ dns_message_buildopt(dns_message_t *msg,
+  * \li	 other.
+  */
+ 
++void
++dns_message_setclass(dns_message_t *msg, dns_rdataclass_t rdclass);
++/*%<
++ * Set the expected class of records in the response.
++ *
++ * Requires:
++ * \li   msg be a valid message with parsing intent.
++ */
++
+ ISC_LANG_ENDDECLS
+ 
+ #endif /* DNS_MESSAGE_H */
+Index: bind-9.10.2-P4/lib/dns/message.c
+===================================================================
+--- bind-9.10.2-P4.orig/lib/dns/message.c
++++ bind-9.10.2-P4/lib/dns/message.c
+@@ -439,6 +439,8 @@ msginit(dns_message_t *m) {
+ 	m->free_saved = 0;
+ 	m->sitok = 0;
+ 	m->sitbad = 0;
++	m->tkey = 0;
++	m->rdclass_set = 0;
+ 	m->querytsig = NULL;
+ }
+ 
+@@ -1091,13 +1093,19 @@ getquestions(isc_buffer_t *source, dns_m
+ 		 * If this class is different than the one we already read,
+ 		 * this is an error.
+ 		 */
+-		if (msg->state == DNS_SECTION_ANY) {
+-			msg->state = DNS_SECTION_QUESTION;
++		if (msg->rdclass_set == 0) {
+ 			msg->rdclass = rdclass;
++			msg->rdclass_set = 1;
+ 		} else if (msg->rdclass != rdclass)
+ 			DO_FORMERR;
+ 
+ 		/*
++		 * Is this a TKEY query?
++		 */
++		if (rdtype == dns_rdatatype_tkey)
++			msg->tkey = 1;
++
++		/*
+ 		 * Can't ask the same question twice.
+ 		 */
+ 		result = dns_message_find(name, rdclass, rdtype, 0, NULL);
+@@ -1241,12 +1249,12 @@ getsection(isc_buffer_t *source, dns_mes
+ 		 * If there was no question section, we may not yet have
+ 		 * established a class.  Do so now.
+ 		 */
+-		if (msg->state == DNS_SECTION_ANY &&
++		if (msg->rdclass_set == 0 &&
+ 		    rdtype != dns_rdatatype_opt &&	/* class is UDP SIZE */
+ 		    rdtype != dns_rdatatype_tsig &&	/* class is ANY */
+ 		    rdtype != dns_rdatatype_tkey) {	/* class is undefined */
+ 			msg->rdclass = rdclass;
+-			msg->state = DNS_SECTION_QUESTION;
++			msg->rdclass_set = 1;
+ 		}
+ 
+ 		/*
+@@ -1256,7 +1264,7 @@ getsection(isc_buffer_t *source, dns_mes
+ 		if (msg->opcode != dns_opcode_update
+ 		    && rdtype != dns_rdatatype_tsig
+ 		    && rdtype != dns_rdatatype_opt
+-		    && rdtype != dns_rdatatype_dnskey /* in a TKEY query */
++		    && rdtype != dns_rdatatype_key /* in a TKEY query */
+ 		    && rdtype != dns_rdatatype_sig /* SIG(0) */
+ 		    && rdtype != dns_rdatatype_tkey /* Win2000 TKEY */
+ 		    && msg->rdclass != dns_rdataclass_any
+@@ -1264,6 +1272,16 @@ getsection(isc_buffer_t *source, dns_mes
+ 			DO_FORMERR;
+ 
+ 		/*
++		 * If this is not a TKEY query/response then the KEY
++		 * record's class needs to match.
++		 */
++		if (msg->opcode != dns_opcode_update && !msg->tkey &&
++		    rdtype == dns_rdatatype_key &&
++		    msg->rdclass != dns_rdataclass_any &&
++		    msg->rdclass != rdclass)
++			DO_FORMERR;
++
++		/*
+ 		 * Special type handling for TSIG, OPT, and TKEY.
+ 		 */
+ 		if (rdtype == dns_rdatatype_tsig) {
+@@ -1377,6 +1395,10 @@ getsection(isc_buffer_t *source, dns_mes
+ 				skip_name_search = ISC_TRUE;
+ 				skip_type_search = ISC_TRUE;
+ 				issigzero = ISC_TRUE;
++			} else {
++				if (msg->rdclass != dns_rdataclass_any &&
++				    msg->rdclass != rdclass)
++					DO_FORMERR;
+ 			}
+ 		} else
+ 			covers = 0;
+@@ -1625,6 +1647,7 @@ dns_message_parse(dns_message_t *msg, is
+ 	msg->counts[DNS_SECTION_ADDITIONAL] = isc_buffer_getuint16(source);
+ 
+ 	msg->header_ok = 1;
++	msg->state = DNS_SECTION_QUESTION;
+ 
+ 	/*
+ 	 * -1 means no EDNS.
+@@ -3706,3 +3729,15 @@ dns_message_buildopt(dns_message_t *mess
+ 		dns_message_puttemprdatalist(message, &rdatalist);
+ 	return (result);
+ }
++
++void
++dns_message_setclass(dns_message_t *msg, dns_rdataclass_t rdclass) {
++
++	REQUIRE(DNS_MESSAGE_VALID(msg));
++	REQUIRE(msg->from_to_wire == DNS_MESSAGE_INTENTPARSE);
++	REQUIRE(msg->state == DNS_SECTION_ANY);
++	REQUIRE(msg->rdclass_set == 0);
++
++	msg->rdclass = rdclass;
++	msg->rdclass_set = 1;
++}
+Index: bind-9.10.2-P4/lib/dns/resolver.c
+===================================================================
+--- bind-9.10.2-P4.orig/lib/dns/resolver.c
++++ bind-9.10.2-P4/lib/dns/resolver.c
+@@ -7309,6 +7309,8 @@ resquery_response(isc_task_t *task, isc_
+ 			goto done;
+ 	}
+ 
++	dns_message_setclass(message, fctx->res->rdclass);
++
+ 	if ((options & DNS_FETCHOPT_TCP) == 0) {
+ 		if ((options & DNS_FETCHOPT_NOEDNS0) == 0)
+ 			dns_adb_setudpsize(fctx->adb, query->addrinfo,
+@@ -7391,6 +7393,13 @@ resquery_response(isc_task_t *task, isc_
+ 				 &dns_master_style_comment,
+ 				 ISC_LOG_DEBUG(10),
+ 				 fctx->res->mctx);
++
++	if (message->rdclass != fctx->res->rdclass) {
++		resend = ISC_TRUE;
++		FCTXTRACE("bad class");
++		goto done;
++	}
++
+ 	/*
+ 	 * Process receive opt record.
+ 	 */
+Index: bind-9.10.2-P4/lib/dns/xfrin.c
+===================================================================
+--- bind-9.10.2-P4.orig/lib/dns/xfrin.c
++++ bind-9.10.2-P4/lib/dns/xfrin.c
+@@ -1225,6 +1225,8 @@ xfrin_recv_done(isc_task_t *task, isc_ev
+ 	msg->tsigctx = xfr->tsigctx;
+ 	xfr->tsigctx = NULL;
+ 
++	dns_message_setclass(msg, xfr->rdclass);
++
+ 	if (xfr->nmsg > 0)
+ 		msg->tcp_continuation = 1;
+ 
+Index: bind-9.10.2-P4/CHANGES
+===================================================================
+--- bind-9.10.2-P4.orig/CHANGES
++++ bind-9.10.2-P4/CHANGES
+@@ -1,4 +1,9 @@
+-	--- 9.10.2-P4 released ---
++4260.  [security]      Insufficient testing when parsing a message allowed
++                       records with an incorrect class to be be accepted,
++                       triggering a REQUIRE failure when those records
++                       were subsequently cached. (CVE-2015-8000) [RT #4098]
++
++    --- 9.10.2-P4 released ---
+ 
+ 4170.	[security]	An incorrect boundary check in the OPENPGPKEY
+ 			rdatatype could trigger an assertion failure.
diff --git a/yocto-poky/meta/recipes-connectivity/bind/bind/CVE-2015-8461.patch b/yocto-poky/meta/recipes-connectivity/bind/bind/CVE-2015-8461.patch
new file mode 100644
index 0000000..88e9c83
--- /dev/null
+++ b/yocto-poky/meta/recipes-connectivity/bind/bind/CVE-2015-8461.patch
@@ -0,0 +1,44 @@
+From adbf81335b67be0cebdcf9f1f4fcb38ef4814f4d Mon Sep 17 00:00:00 2001
+From: Mark Andrews <marka@isc.org>
+Date: Thu, 25 Jun 2015 18:36:27 +1000
+Subject: [PATCH] 4146.   [bug]           Address reference leak that could
+ prevent a clean                         shutdown. [RT #37125]
+
+Upstream-Status: Backport
+
+https://source.isc.org/cgi-bin/gitweb.cgi?p=bind9.git;a=commit;h=adbf81335b67be0cebdcf9f1f4fcb38ef4814f4d
+
+CVE: CVE-2015-8461
+Signed-off-by:  Armin Kuster <akuster@mvista.com>
+---
+ CHANGES            | 3 +++
+ lib/dns/resolver.c | 5 +++++
+ 2 files changed, 8 insertions(+)
+
+Index: bind-9.10.2-P4/CHANGES
+===================================================================
+--- bind-9.10.2-P4.orig/CHANGES
++++ bind-9.10.2-P4/CHANGES
+@@ -1,3 +1,6 @@
++4146.  [bug]           Address reference leak that could prevent a clean
++                       shutdown. [RT #37125]
++
+ 4260.  [security]      Insufficient testing when parsing a message allowed
+                        records with an incorrect class to be be accepted,
+                        triggering a REQUIRE failure when those records
+Index: bind-9.10.2-P4/lib/dns/resolver.c
+===================================================================
+--- bind-9.10.2-P4.orig/lib/dns/resolver.c
++++ bind-9.10.2-P4/lib/dns/resolver.c
+@@ -1649,6 +1649,11 @@ fctx_query(fetchctx_t *fctx, dns_adbaddr
+ 	if (query->dispatch != NULL)
+ 		dns_dispatch_detach(&query->dispatch);
+ 
++	LOCK(&res->buckets[fctx->bucketnum].lock);
++	INSIST(fctx->references > 1);
++	fctx->references--;
++	UNLOCK(&res->buckets[fctx->bucketnum].lock);
++
+  cleanup_query:
+ 	if (query->connects == 0) {
+ 		query->magic = 0;
diff --git a/yocto-poky/meta/recipes-connectivity/bind/bind/CVE-2015-8704.patch b/yocto-poky/meta/recipes-connectivity/bind/bind/CVE-2015-8704.patch
new file mode 100644
index 0000000..d5bf740
--- /dev/null
+++ b/yocto-poky/meta/recipes-connectivity/bind/bind/CVE-2015-8704.patch
@@ -0,0 +1,28 @@
+a buffer size check can cause denial of service under certain circumstances 
+
+[security]
+The following flaw in BIND was reported by ISC:
+
+A buffer size check used to guard against overflow could cause named to exit with an INSIST failure In apl_42.c.
+
+A server could exit due to an INSIST failure in apl_42.c when performing certain string formatting operations.
+
+Upstream-Status: Backport
+CVE: CVE-2015-8704
+
+[The patch is taken from BIND 9.10.3:
+https://bugzilla.redhat.com/show_bug.cgi?id=CVE-2015-8704]
+
+Signed-off-by: Derek Straka <derek@asterius.io>
+diff --git a/lib/dns/rdata/in_1/apl_42.c b/lib/dns/rdata/in_1/apl_42.c
+index bedd38e..28eb7f2 100644
+--- a/lib/dns/rdata/in_1/apl_42.c
++++ b/lib/dns/rdata/in_1/apl_42.c
+@@ -116,7 +116,7 @@ totext_in_apl(ARGS_TOTEXT) {
+	isc_uint8_t len;
+	isc_boolean_t neg;
+	unsigned char buf[16];
+-	char txt[sizeof(" !64000")];
++	char txt[sizeof(" !64000:")];
+	const char *sep = "";
+	int n;
diff --git a/yocto-poky/meta/recipes-connectivity/bind/bind/CVE-2015-8705.patch b/yocto-poky/meta/recipes-connectivity/bind/bind/CVE-2015-8705.patch
new file mode 100644
index 0000000..c4a052d
--- /dev/null
+++ b/yocto-poky/meta/recipes-connectivity/bind/bind/CVE-2015-8705.patch
@@ -0,0 +1,44 @@
+a crash or assertion failure can during format processing 
+
+[security]
+The following flaw in BIND was reported by ISC:
+
+In versions of BIND 9.10, errors can occur when OPT pseudo-RR data or ECS options are formatted to text. In 9.10.3 through 9.10.3-P2, the issue may result in a REQUIRE assertion failure in buffer.c.
+
+This issue can affect both authoritative and recursive servers if they are performing debug logging. (It may also crash related tools which use the same code, such as dig or delv.)
+
+A server could exit due to an INSIST failure in apl_42.c when performing certain string formatting operations.
+
+Upstream-Status: Backport
+CVE: CVE-2015-8705
+
+[The patch is taken from BIND 9.10.3:
+https://bugzilla.redhat.com/show_bug.cgi?id=CVE-2015-8705]
+
+Signed-off-by: Derek Straka <derek@asterius.io>
+diff --git a/lib/dns/message.c b/lib/dns/message.c
+index ea7b93a..810c58e 100644
+--- a/lib/dns/message.c
++++ b/lib/dns/message.c
+@@ -3310,9 +3310,19 @@
+ 			} else if (optcode == DNS_OPT_SIT) {
+ 				ADD_STRING(target, "; SIT");
+ 			} else if (optcode == DNS_OPT_CLIENT_SUBNET) {
++				isc_buffer_t ecsbuf;
+ 				ADD_STRING(target, "; CLIENT-SUBNET: ");
+-				render_ecs(&optbuf, target);
+-				ADD_STRING(target, "\n");
++				isc_buffer_init(&ecsbuf,
++							isc_buffer_current(&optbuf),
++							optlen);
++				isc_buffer_add(&ecsbuf, optlen);
++				result = render_ecs(&ecsbuf, target);
++				if (result == ISC_R_NOSPACE)
++					return (result);
++				if (result == ISC_R_SUCCESS) {
++					isc_buffer_forward(&optbuf, optlen);
++                                        ADD_STRING(target, "\n");
++                }
+ 				continue;
+ 			} else if (optcode == DNS_OPT_EXPIRE) {
+ 				if (optlen == 4) {
diff --git a/yocto-poky/meta/recipes-connectivity/bind/bind_9.10.2-P4.bb b/yocto-poky/meta/recipes-connectivity/bind/bind_9.10.2-P4.bb
index efae289..19f87d7 100644
--- a/yocto-poky/meta/recipes-connectivity/bind/bind_9.10.2-P4.bb
+++ b/yocto-poky/meta/recipes-connectivity/bind/bind_9.10.2-P4.bb
@@ -20,6 +20,11 @@
            file://0001-build-use-pkg-config-to-find-libxml2.patch \
            file://bind-ensure-searching-for-json-headers-searches-sysr.patch \
            file://0001-gen.c-extend-DIRNAMESIZE-from-256-to-512.patch \
+           file://0001-lib-dns-gen.c-fix-too-long-error.patch \
+           file://CVE-2015-8704.patch \
+           file://CVE-2015-8705.patch \
+           file://CVE-2015-8000.patch \
+           file://CVE-2015-8461.patch \
            "
 
 SRC_URI[md5sum] = "8b1f5064837756c938eadc1537dec5c7"
diff --git a/yocto-poky/meta/recipes-connectivity/bluez5/bluez5.inc b/yocto-poky/meta/recipes-connectivity/bluez5/bluez5.inc
index 039c443..df42c88 100644
--- a/yocto-poky/meta/recipes-connectivity/bluez5/bluez5.inc
+++ b/yocto-poky/meta/recipes-connectivity/bluez5/bluez5.inc
@@ -18,7 +18,6 @@
 
 SRC_URI = "\
     ${KERNELORG_MIRROR}/linux/bluetooth/bluez-${PV}.tar.xz \
-    file://bluetooth.conf \
 "
 S = "${WORKDIR}/bluez-${PV}"
 
@@ -53,8 +52,8 @@
 	if [ -f ${S}/profiles/input/input.conf ]; then
 	    install -m 0644 ${S}/profiles/input/input.conf ${D}/${sysconfdir}/bluetooth/
 	fi
-	# at_console doesn't really work with the current state of OE, so punch some more holes so people can actually use BT
-	install -m 0644 ${WORKDIR}/bluetooth.conf ${D}/${sysconfdir}/dbus-1/system.d/
+
+	install -m 0644 ${S}/src/bluetooth.conf ${D}/${sysconfdir}/dbus-1/system.d/
 
 	# Install desired tools that upstream leaves in build area
         for f in ${NOINST_TOOLS} ; do
diff --git a/yocto-poky/meta/recipes-connectivity/bluez5/bluez5/bluetooth.conf b/yocto-poky/meta/recipes-connectivity/bluez5/bluez5/bluetooth.conf
deleted file mode 100644
index 26845bb..0000000
--- a/yocto-poky/meta/recipes-connectivity/bluez5/bluez5/bluetooth.conf
+++ /dev/null
@@ -1,17 +0,0 @@
-<!-- This configuration file specifies the required security policies
-     for Bluetooth core daemon to work. -->
-
-<!DOCTYPE busconfig PUBLIC "-//freedesktop//DTD D-BUS Bus Configuration 1.0//EN"
- "http://www.freedesktop.org/standards/dbus/1.0/busconfig.dtd">
-<busconfig>
-
-  <!-- ../system.conf have denied everything, so we just punch some holes -->
-
-  <policy context="default">
-    <allow own="org.bluez"/>
-    <allow send_destination="org.bluez"/>
-    <allow send_interface="org.bluez.Agent1"/>
-    <allow send_type="method_call"/>
-  </policy>
-
-</busconfig>
diff --git a/yocto-poky/meta/recipes-connectivity/connman/connman-conf.bb b/yocto-poky/meta/recipes-connectivity/connman/connman-conf.bb
index bd4c28d..9254ed7 100644
--- a/yocto-poky/meta/recipes-connectivity/connman/connman-conf.bb
+++ b/yocto-poky/meta/recipes-connectivity/connman/connman-conf.bb
@@ -13,14 +13,14 @@
 
 PACKAGE_ARCH = "${MACHINE_ARCH}"
 
-FILES_${PN} = "${localstatedir}/* ${libdir}/*"
+FILES_${PN} = "${localstatedir}/* ${datadir}/*"
 
 do_install() {
     #Configure Wired network interface in case of qemu* machines
     if test -e ${WORKDIR}/wired.config && test -e ${WORKDIR}/wired-setup; then
         install -d ${D}${localstatedir}/lib/connman
         install -m 0644 ${WORKDIR}/wired.config ${D}${localstatedir}/lib/connman
-        install -d ${D}${libdir}/connman
-        install -m 0755 ${WORKDIR}/wired-setup ${D}${libdir}/connman
+        install -d ${D}${datadir}/connman
+        install -m 0755 ${WORKDIR}/wired-setup ${D}${datadir}/connman
     fi
 }
diff --git a/yocto-poky/meta/recipes-connectivity/connman/connman-gnome_0.7.bb b/yocto-poky/meta/recipes-connectivity/connman/connman-gnome_0.7.bb
index f5575d2..7b875f0 100644
--- a/yocto-poky/meta/recipes-connectivity/connman/connman-gnome_0.7.bb
+++ b/yocto-poky/meta/recipes-connectivity/connman/connman-gnome_0.7.bb
@@ -6,7 +6,7 @@
                     file://properties/main.c;beginline=1;endline=20;md5=50c77c81871308b033ab7a1504626afb \
                     file://common/connman-dbus.c;beginline=1;endline=20;md5=de6b485c0e717a0236402d220187717a"
 
-DEPENDS = "gtk+ dbus-glib intltool-native"
+DEPENDS = "gtk+ dbus-glib intltool-native gettext-native"
 
 # 0.7 tag
 SRCREV = "cf3c325b23dae843c5499a113591cfbc98acb143"
@@ -19,7 +19,8 @@
 
 S = "${WORKDIR}/git"
 
-inherit autotools-brokensep gtk-icon-cache pkgconfig
+inherit autotools-brokensep gtk-icon-cache pkgconfig distro_features_check
+ANY_OF_DISTRO_FEATURES = "${GTK2DISTROFEATURES}"
 
 RDEPENDS_${PN} = "connman"
 
diff --git a/yocto-poky/meta/recipes-connectivity/connman/connman.inc b/yocto-poky/meta/recipes-connectivity/connman/connman.inc
index 17dc4b9..afdb3f2 100644
--- a/yocto-poky/meta/recipes-connectivity/connman/connman.inc
+++ b/yocto-poky/meta/recipes-connectivity/connman/connman.inc
@@ -30,6 +30,7 @@
     --disable-polkit \
     --enable-client \
 "
+CFLAGS += "-D_GNU_SOURCE"
 
 PACKAGECONFIG ??= "wispr \
                    ${@bb.utils.contains('DISTRO_FEATURES', 'systemd','systemd', '', d)} \
@@ -67,15 +68,9 @@
 
 SYSTEMD_SERVICE_${PN} = "connman.service"
 SYSTEMD_SERVICE_${PN}-vpn = "connman-vpn.service"
-SYSTEMD_WIRED_SETUP = "ExecStartPre=-${libdir}/connman/wired-setup"
+SYSTEMD_WIRED_SETUP = "ExecStartPre=-${datadir}/connman/wired-setup"
 
-# This allows *everyone* to access ConnMan over DBus, without any access
-# control.  Really the at_console flag should work, which would mean that
-# both this and the xuser patch can be dropped.
 do_compile_append() {
-	sed -i -e s:deny:allow:g ${S}/src/connman-dbus.conf
-	sed -i -e s:deny:allow:g ${S}/vpn/vpn-dbus.conf
-
 	sed -i "s#ExecStart=#${SYSTEMD_WIRED_SETUP}\nExecStart=#" ${B}/src/connman.service
 }
 
@@ -83,7 +78,7 @@
 	if ${@bb.utils.contains('DISTRO_FEATURES','sysvinit','true','false',d)}; then
 		install -d ${D}${sysconfdir}/init.d
 		install -m 0755 ${WORKDIR}/connman ${D}${sysconfdir}/init.d/connman
-		sed -i s%@LIBDIR@%${libdir}% ${D}${sysconfdir}/init.d/connman
+		sed -i s%@DATADIR@%${datadir}% ${D}${sysconfdir}/init.d/connman
 	fi
 
 	install -d ${D}${bindir}
@@ -112,7 +107,6 @@
 
 RDEPENDS_${PN} = "\
 	dbus \
-	${@base_conditional('ROOTLESS_X', '1', 'xuser-account', '', d)} \
 	"
 
 PACKAGES_DYNAMIC += "^${PN}-plugin-.*"
diff --git a/yocto-poky/meta/recipes-connectivity/connman/connman/0001-Detect-backtrace-API-availability-before-using-it.patch b/yocto-poky/meta/recipes-connectivity/connman/connman/0001-Detect-backtrace-API-availability-before-using-it.patch
new file mode 100644
index 0000000..5dc6fd6
--- /dev/null
+++ b/yocto-poky/meta/recipes-connectivity/connman/connman/0001-Detect-backtrace-API-availability-before-using-it.patch
@@ -0,0 +1,55 @@
+From 00d4447395725abaa651e12ed40095081e04011e Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sun, 13 Sep 2015 13:22:01 -0700
+Subject: [PATCH 1/3] Detect backtrace() API availability before using it
+
+C libraries besides glibc do not have backtrace() implemented
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+Upstream-Status: Pending
+
+ configure.ac | 2 ++
+ src/log.c    | 5 ++---
+ 2 files changed, 4 insertions(+), 3 deletions(-)
+
+diff --git a/configure.ac b/configure.ac
+index 69c0eeb..90099f2 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -171,6 +171,8 @@ fi
+ AM_CONDITIONAL(PPTP, test "${enable_pptp}" != "no")
+ AM_CONDITIONAL(PPTP_BUILTIN, test "${enable_pptp}" = "builtin")
+ 
++AC_CHECK_HEADERS([execinfo.h])
++
+ AC_CHECK_HEADERS(resolv.h, dummy=yes,
+ 	AC_MSG_ERROR(resolver header files are required))
+ AC_CHECK_LIB(resolv, ns_initparse, dummy=yes, [
+diff --git a/src/log.c b/src/log.c
+index a693bd0..5b40c1f 100644
+--- a/src/log.c
++++ b/src/log.c
+@@ -30,7 +30,6 @@
+ #include <stdlib.h>
+ #include <string.h>
+ #include <syslog.h>
+-#include <execinfo.h>
+ #include <dlfcn.h>
+ 
+ #include "connman.h"
+@@ -215,9 +214,9 @@ static void print_backtrace(unsigned int offset)
+ static void signal_handler(int signo)
+ {
+ 	connman_error("Aborting (signal %d) [%s]", signo, program_exec);
+-
++#ifdef HAVE_EXECINFO_H
+ 	print_backtrace(2);
+-
++#endif /* HAVE_EXECINFO_H */
+ 	exit(EXIT_FAILURE);
+ }
+ 
+-- 
+2.5.1
+
diff --git a/yocto-poky/meta/recipes-connectivity/connman/connman/0002-resolve-musl-does-not-implement-res_ninit.patch b/yocto-poky/meta/recipes-connectivity/connman/connman/0002-resolve-musl-does-not-implement-res_ninit.patch
new file mode 100644
index 0000000..0593427
--- /dev/null
+++ b/yocto-poky/meta/recipes-connectivity/connman/connman/0002-resolve-musl-does-not-implement-res_ninit.patch
@@ -0,0 +1,77 @@
+From 10b0d16d04b811b1ccd1f9b0cfe757bce8d876a1 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Mon, 6 Apr 2015 23:02:21 -0700
+Subject: [PATCH 2/3] resolve: musl does not implement res_ninit
+
+ported from
+http://git.alpinelinux.org/cgit/aports/plain/testing/connman/libresolv.patch
+
+Upstream-Status: Pending
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ gweb/gresolv.c | 33 ++++++++++++---------------------
+ 1 file changed, 12 insertions(+), 21 deletions(-)
+
+diff --git a/gweb/gresolv.c b/gweb/gresolv.c
+index 5cf7a9a..3ad8e70 100644
+--- a/gweb/gresolv.c
++++ b/gweb/gresolv.c
+@@ -875,8 +875,6 @@ GResolv *g_resolv_new(int index)
+ 	resolv->index = index;
+ 	resolv->nameserver_list = NULL;
+ 
+-	res_ninit(&resolv->res);
+-
+ 	return resolv;
+ }
+ 
+@@ -916,8 +914,6 @@ void g_resolv_unref(GResolv *resolv)
+ 
+ 	flush_nameservers(resolv);
+ 
+-	res_nclose(&resolv->res);
+-
+ 	g_free(resolv);
+ }
+ 
+@@ -1020,24 +1016,19 @@ guint g_resolv_lookup_hostname(GResolv *resolv, const char *hostname,
+ 	debug(resolv, "hostname %s", hostname);
+ 
+ 	if (!resolv->nameserver_list) {
+-		int i;
+-
+-		for (i = 0; i < resolv->res.nscount; i++) {
+-			char buf[100];
+-			int family = resolv->res.nsaddr_list[i].sin_family;
+-			void *sa_addr = &resolv->res.nsaddr_list[i].sin_addr;
+-
+-			if (family != AF_INET &&
+-					resolv->res._u._ext.nsaddrs[i]) {
+-				family = AF_INET6;
+-				sa_addr = &resolv->res._u._ext.nsaddrs[i]->sin6_addr;
++		FILE *f = fopen("/etc/resolv.conf", "r");
++		if (f) {
++			char line[256], *s;
++			int i;
++			while (fgets(line, sizeof(line), f)) {
++				if (strncmp(line, "nameserver", 10) || !isspace(line[10]))
++					continue;
++				for (s = &line[11]; isspace(s[0]); s++);
++				for (i = 0; s[i] && !isspace(s[i]); i++);
++				s[i] = 0;
++				g_resolv_add_nameserver(resolv, s, 53, 0);
+ 			}
+-
+-			if (family != AF_INET && family != AF_INET6)
+-				continue;
+-
+-			if (inet_ntop(family, sa_addr, buf, sizeof(buf)))
+-				g_resolv_add_nameserver(resolv, buf, 53, 0);
++			fclose(f);
+ 		}
+ 
+ 		if (!resolv->nameserver_list)
+-- 
+2.5.1
+
diff --git a/yocto-poky/meta/recipes-connectivity/connman/connman/0003-Fix-header-inclusions-for-musl.patch b/yocto-poky/meta/recipes-connectivity/connman/connman/0003-Fix-header-inclusions-for-musl.patch
new file mode 100644
index 0000000..6327aa2
--- /dev/null
+++ b/yocto-poky/meta/recipes-connectivity/connman/connman/0003-Fix-header-inclusions-for-musl.patch
@@ -0,0 +1,85 @@
+From 67645a01a2f3f52625d8dd77f2811a9e213e1b7d Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sun, 13 Sep 2015 13:28:20 -0700
+Subject: [PATCH] Fix header inclusions for musl
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+Upstream-Status: Pending
+
+ gweb/gresolv.c        | 1 +
+ plugins/wifi.c        | 3 +--
+ src/tethering.c       | 2 --
+ tools/dhcp-test.c     | 1 -
+ tools/dnsproxy-test.c | 1 +
+ 5 files changed, 3 insertions(+), 5 deletions(-)
+
+diff --git a/gweb/gresolv.c b/gweb/gresolv.c
+index 3ad8e70..61d6fe8 100644
+--- a/gweb/gresolv.c
++++ b/gweb/gresolv.c
+@@ -28,6 +28,7 @@
+ #include <stdarg.h>
+ #include <string.h>
+ #include <stdlib.h>
++#include <stdio.h>
+ #include <resolv.h>
+ #include <sys/types.h>
+ #include <sys/socket.h>
+diff --git a/plugins/wifi.c b/plugins/wifi.c
+index dfe849f..99cff3f 100644
+--- a/plugins/wifi.c
++++ b/plugins/wifi.c
+@@ -30,9 +30,8 @@
+ #include <string.h>
+ #include <sys/ioctl.h>
+ #include <sys/socket.h>
+-#include <linux/if_arp.h>
+-#include <linux/wireless.h>
+ #include <net/ethernet.h>
++#include <linux/wireless.h>
+ 
+ #ifndef IFF_LOWER_UP
+ #define IFF_LOWER_UP	0x10000
+diff --git a/src/tethering.c b/src/tethering.c
+index ceeec74..c44cb36 100644
+--- a/src/tethering.c
++++ b/src/tethering.c
+@@ -31,10 +31,8 @@
+ #include <stdio.h>
+ #include <sys/ioctl.h>
+ #include <net/if.h>
+-#include <linux/sockios.h>
+ #include <string.h>
+ #include <fcntl.h>
+-#include <linux/if_tun.h>
+ #include <netinet/in.h>
+ #include <linux/if_bridge.h>
+ 
+diff --git a/tools/dhcp-test.c b/tools/dhcp-test.c
+index c34e10a..eae66fc 100644
+--- a/tools/dhcp-test.c
++++ b/tools/dhcp-test.c
+@@ -33,7 +33,6 @@
+ #include <arpa/inet.h>
+ #include <net/route.h>
+ #include <net/ethernet.h>
+-#include <linux/if_arp.h>
+ 
+ #include <gdhcp/gdhcp.h>
+ 
+diff --git a/tools/dnsproxy-test.c b/tools/dnsproxy-test.c
+index 551cae9..226ba86 100644
+--- a/tools/dnsproxy-test.c
++++ b/tools/dnsproxy-test.c
+@@ -27,6 +27,7 @@
+ #include <stdlib.h>
+ #include <string.h>
+ #include <unistd.h>
++#include <stdio.h>
+ #include <arpa/inet.h>
+ #include <netinet/in.h>
+ #include <sys/types.h>
+-- 
+2.5.1
+
diff --git a/yocto-poky/meta/recipes-connectivity/connman/connman/add_xuser_dbus_permission.patch b/yocto-poky/meta/recipes-connectivity/connman/connman/add_xuser_dbus_permission.patch
deleted file mode 100644
index 707b3ca..0000000
--- a/yocto-poky/meta/recipes-connectivity/connman/connman/add_xuser_dbus_permission.patch
+++ /dev/null
@@ -1,21 +0,0 @@
-Because Poky doesn't support at_console we need to special-case the session
-user.
-
-Upstream-Status: Inappropriate [configuration]
-
-Signed-off-by: Ross Burton <ross.burton@intel.com>
-
-diff --git a/src/connman-dbus.conf b/src/connman-dbus.conf
-index 98a773e..466809c 100644
---- a/src/connman-dbus.conf
-+++ b/src/connman-dbus.conf
-@@ -8,6 +8,9 @@
-         <allow send_interface="net.connman.Counter"/>
-         <allow send_interface="net.connman.Notification"/>
-     </policy>
-+    <policy user="xuser">
-+        <allow send_destination="net.connman"/>
-+    </policy>
-     <policy at_console="true">
-         <allow send_destination="net.connman"/>
-     </policy>
diff --git a/yocto-poky/meta/recipes-connectivity/connman/connman/connman b/yocto-poky/meta/recipes-connectivity/connman/connman/connman
index bf7a94a..c64fa0d 100644
--- a/yocto-poky/meta/recipes-connectivity/connman/connman/connman
+++ b/yocto-poky/meta/recipes-connectivity/connman/connman/connman
@@ -49,8 +49,8 @@
 		fi
 	    fi
 	fi
-	if [ -f @LIBDIR@/connman/wired-setup ] ; then
-		. @LIBDIR@/connman/wired-setup
+	if [ -f @DATADIR@/connman/wired-setup ] ; then
+		. @DATADIR@/connman/wired-setup
 	fi
 	$DAEMON $EXTRA_PARAM
 }
diff --git a/yocto-poky/meta/recipes-connectivity/connman/connman_1.30.bb b/yocto-poky/meta/recipes-connectivity/connman/connman_1.30.bb
index 8c47353..7d65ac9 100644
--- a/yocto-poky/meta/recipes-connectivity/connman/connman_1.30.bb
+++ b/yocto-poky/meta/recipes-connectivity/connman/connman_1.30.bb
@@ -2,7 +2,9 @@
 
 SRC_URI  = "${KERNELORG_MIRROR}/linux/network/${BPN}/${BP}.tar.xz \
             file://0001-plugin.h-Change-visibility-to-default-for-debug-symb.patch \
-            file://add_xuser_dbus_permission.patch \
+            file://0001-Detect-backtrace-API-availability-before-using-it.patch \
+            file://0002-resolve-musl-does-not-implement-res_ninit.patch \
+            file://0003-Fix-header-inclusions-for-musl.patch \
             file://connman \
             "
 SRC_URI[md5sum] = "4a3efdbd6796922db9c6f66da57887fa"
diff --git a/yocto-poky/meta/recipes-connectivity/iproute2/iproute2.inc b/yocto-poky/meta/recipes-connectivity/iproute2/iproute2.inc
index a53a4e6..29f9062 100644
--- a/yocto-poky/meta/recipes-connectivity/iproute2/iproute2.inc
+++ b/yocto-poky/meta/recipes-connectivity/iproute2/iproute2.inc
@@ -15,6 +15,12 @@
 
 EXTRA_OEMAKE = "CC='${CC}' KERNEL_INCLUDE=${STAGING_INCDIR} DOCDIR=${docdir}/iproute2 SUBDIRS='lib tc ip' SBINDIR='${base_sbindir}' LIBDIR='${libdir}'"
 
+do_configure_append () {
+    sh configure ${STAGING_INCDIR}
+    # Explicitly disable ATM support
+    sed -i -e '/TC_CONFIG_ATM/d' Config
+}
+
 do_install () {
     oe_runmake DESTDIR=${D} install
     mv ${D}${base_sbindir}/ip ${D}${base_sbindir}/ip.iproute2
diff --git a/yocto-poky/meta/recipes-connectivity/irda-utils/irda-utils_0.9.18.bb b/yocto-poky/meta/recipes-connectivity/irda-utils/irda-utils_0.9.18.bb
index 8ac3b18..bd2f815 100644
--- a/yocto-poky/meta/recipes-connectivity/irda-utils/irda-utils_0.9.18.bb
+++ b/yocto-poky/meta/recipes-connectivity/irda-utils/irda-utils_0.9.18.bb
@@ -3,7 +3,7 @@
 IrDA allows communication over Infrared with other devices \
 such as phones and laptops."
 HOMEPAGE = "http://irda.sourceforge.net/"
-BUGTRACKER = "irda-users@lists.sourceforge.net"
+BUGTRACKER = "http://sourceforge.net/p/irda/bugs/"
 SECTION = "base"
 LICENSE = "GPLv2+"
 LIC_FILES_CHKSUM = "file://irdadump/COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \
diff --git a/yocto-poky/meta/recipes-connectivity/openssh/openssh/CVE-2016-1907_2.patch b/yocto-poky/meta/recipes-connectivity/openssh/openssh/CVE-2016-1907_2.patch
new file mode 100644
index 0000000..9fac69c
--- /dev/null
+++ b/yocto-poky/meta/recipes-connectivity/openssh/openssh/CVE-2016-1907_2.patch
@@ -0,0 +1,65 @@
+From f98a09cacff7baad8748c9aa217afd155a4d493f Mon Sep 17 00:00:00 2001
+From: "mmcc@openbsd.org" <mmcc@openbsd.org>
+Date: Tue, 20 Oct 2015 03:36:35 +0000
+Subject: [PATCH] upstream commit
+
+Replace a function-local allocation with stack memory.
+
+ok djm@
+
+Upstream-ID: c09fbbab637053a2ab9f33ca142b4e20a4c5a17e
+Upstream-Status: Backport
+CVE: CVE-2016-1907
+
+[YOCTO #8935]
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ clientloop.c | 9 ++-------
+ 1 file changed, 2 insertions(+), 7 deletions(-)
+
+diff --git a/clientloop.c b/clientloop.c
+index 87ceb3d..1e05cba 100644
+--- a/clientloop.c
++++ b/clientloop.c
+@@ -1,4 +1,4 @@
+-/* $OpenBSD: clientloop.c,v 1.275 2015/07/10 06:21:53 markus Exp $ */
++/* $OpenBSD: clientloop.c,v 1.276 2015/10/20 03:36:35 mmcc Exp $ */
+ /*
+  * Author: Tatu Ylonen <ylo@cs.hut.fi>
+  * Copyright (c) 1995 Tatu Ylonen <ylo@cs.hut.fi>, Espoo, Finland
+@@ -311,11 +311,10 @@ client_x11_get_proto(const char *display, const char *xauth_path,
+ 	static char proto[512], data[512];
+ 	FILE *f;
+ 	int got_data = 0, generated = 0, do_unlink = 0, i;
+-	char *xauthdir, *xauthfile;
++	char xauthdir[PATH_MAX] = "", xauthfile[PATH_MAX] = "";
+ 	struct stat st;
+ 	u_int now, x11_timeout_real;
+ 
+-	xauthdir = xauthfile = NULL;
+ 	*_proto = proto;
+ 	*_data = data;
+ 	proto[0] = data[0] = '\0';
+@@ -343,8 +342,6 @@ client_x11_get_proto(const char *display, const char *xauth_path,
+ 			display = xdisplay;
+ 		}
+ 		if (trusted == 0) {
+-			xauthdir = xmalloc(PATH_MAX);
+-			xauthfile = xmalloc(PATH_MAX);
+ 			mktemp_proto(xauthdir, PATH_MAX);
+ 			/*
+ 			 * The authentication cookie should briefly outlive
+@@ -407,8 +404,6 @@ client_x11_get_proto(const char *display, const char *xauth_path,
+ 		unlink(xauthfile);
+ 		rmdir(xauthdir);
+ 	}
+-	free(xauthdir);
+-	free(xauthfile);
+ 
+ 	/*
+ 	 * If we didn't get authentication data, just make up some
+-- 
+1.9.1
+
diff --git a/yocto-poky/meta/recipes-connectivity/openssh/openssh/CVE-2016-1907_3.patch b/yocto-poky/meta/recipes-connectivity/openssh/openssh/CVE-2016-1907_3.patch
new file mode 100644
index 0000000..3dfc51a
--- /dev/null
+++ b/yocto-poky/meta/recipes-connectivity/openssh/openssh/CVE-2016-1907_3.patch
@@ -0,0 +1,329 @@
+From ed4ce82dbfa8a3a3c8ea6fa0db113c71e234416c Mon Sep 17 00:00:00 2001
+From: "djm@openbsd.org" <djm@openbsd.org>
+Date: Wed, 13 Jan 2016 23:04:47 +0000
+Subject: [PATCH] upstream commit
+
+eliminate fallback from untrusted X11 forwarding to trusted
+ forwarding when the X server disables the SECURITY extension; Reported by
+ Thomas Hoger; ok deraadt@
+
+Upstream-ID: f76195bd2064615a63ef9674a0e4096b0713f938
+Upstream-Status: Backport
+CVE: CVE-2016-1907
+
+[YOCTO #8935]
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ clientloop.c | 114 ++++++++++++++++++++++++++++++++++++-----------------------
+ clientloop.h |   4 +--
+ mux.c        |  22 ++++++------
+ ssh.c        |  23 +++++-------
+ 4 files changed, 93 insertions(+), 70 deletions(-)
+
+Index: openssh-7.1p2/clientloop.c
+===================================================================
+--- openssh-7.1p2.orig/clientloop.c
++++ openssh-7.1p2/clientloop.c
+@@ -1,4 +1,4 @@
+-/* $OpenBSD: clientloop.c,v 1.276 2015/10/20 03:36:35 mmcc Exp $ */
++/* $OpenBSD: clientloop.c,v 1.279 2016/01/13 23:04:47 djm Exp $ */
+ /*
+  * Author: Tatu Ylonen <ylo@cs.hut.fi>
+  * Copyright (c) 1995 Tatu Ylonen <ylo@cs.hut.fi>, Espoo, Finland
+@@ -288,6 +288,9 @@ client_x11_display_valid(const char *dis
+ {
+ 	size_t i, dlen;
+ 
++	if (display == NULL)
++		return 0;
++
+ 	dlen = strlen(display);
+ 	for (i = 0; i < dlen; i++) {
+ 		if (!isalnum((u_char)display[i]) &&
+@@ -301,34 +304,33 @@ client_x11_display_valid(const char *dis
+ 
+ #define SSH_X11_PROTO		"MIT-MAGIC-COOKIE-1"
+ #define X11_TIMEOUT_SLACK	60
+-void
++int
+ client_x11_get_proto(const char *display, const char *xauth_path,
+     u_int trusted, u_int timeout, char **_proto, char **_data)
+ {
+-	char cmd[1024];
+-	char line[512];
+-	char xdisplay[512];
++	char cmd[1024], line[512], xdisplay[512];
++	char xauthfile[PATH_MAX], xauthdir[PATH_MAX];
+ 	static char proto[512], data[512];
+ 	FILE *f;
+-	int got_data = 0, generated = 0, do_unlink = 0, i;
+-	char xauthdir[PATH_MAX] = "", xauthfile[PATH_MAX] = "";
++	int got_data = 0, generated = 0, do_unlink = 0, i, r;
+ 	struct stat st;
+ 	u_int now, x11_timeout_real;
+ 
+ 	*_proto = proto;
+ 	*_data = data;
+-	proto[0] = data[0] = '\0';
++	proto[0] = data[0] = xauthfile[0] = xauthdir[0] = '\0';
+ 
+-	if (xauth_path == NULL ||(stat(xauth_path, &st) == -1)) {
+-		debug("No xauth program.");
+-	} else if (!client_x11_display_valid(display)) {
+-		logit("DISPLAY '%s' invalid, falling back to fake xauth data",
++	if (!client_x11_display_valid(display)) {
++		logit("DISPLAY \"%s\" invalid; disabling X11 forwarding",
+ 		    display);
+-	} else {
+-		if (display == NULL) {
+-			debug("x11_get_proto: DISPLAY not set");
+-			return;
+-		}
++		return -1;
++	}
++	if (xauth_path != NULL && stat(xauth_path, &st) == -1) {
++		debug("No xauth program.");
++		xauth_path = NULL;
++	}
++
++	if (xauth_path != NULL) {
+ 		/*
+ 		 * Handle FamilyLocal case where $DISPLAY does
+ 		 * not match an authorization entry.  For this we
+@@ -337,43 +339,60 @@ client_x11_get_proto(const char *display
+ 		 *      is not perfect.
+ 		 */
+ 		if (strncmp(display, "localhost:", 10) == 0) {
+-			snprintf(xdisplay, sizeof(xdisplay), "unix:%s",
+-			    display + 10);
++			if ((r = snprintf(xdisplay, sizeof(xdisplay), "unix:%s",
++			    display + 10)) < 0 ||
++			    (size_t)r >= sizeof(xdisplay)) {
++				error("%s: display name too long", __func__);
++				return -1;
++			}
+ 			display = xdisplay;
+ 		}
+ 		if (trusted == 0) {
+-			mktemp_proto(xauthdir, PATH_MAX);
+ 			/*
++			 * Generate an untrusted X11 auth cookie.
++			 *
+ 			 * The authentication cookie should briefly outlive
+ 			 * ssh's willingness to forward X11 connections to
+ 			 * avoid nasty fail-open behaviour in the X server.
+ 			 */
++			mktemp_proto(xauthdir, sizeof(xauthdir));
++			if (mkdtemp(xauthdir) == NULL) {
++				error("%s: mkdtemp: %s",
++				    __func__, strerror(errno));
++				return -1;
++			}
++			do_unlink = 1;
++			if ((r = snprintf(xauthfile, sizeof(xauthfile),
++			    "%s/xauthfile", xauthdir)) < 0 ||
++			    (size_t)r >= sizeof(xauthfile)) {
++				error("%s: xauthfile path too long", __func__);
++				unlink(xauthfile);
++				rmdir(xauthdir);
++				return -1;
++			}
++
+ 			if (timeout >= UINT_MAX - X11_TIMEOUT_SLACK)
+ 				x11_timeout_real = UINT_MAX;
+ 			else
+ 				x11_timeout_real = timeout + X11_TIMEOUT_SLACK;
+-			if (mkdtemp(xauthdir) != NULL) {
+-				do_unlink = 1;
+-				snprintf(xauthfile, PATH_MAX, "%s/xauthfile",
+-				    xauthdir);
+-				snprintf(cmd, sizeof(cmd),
+-				    "%s -f %s generate %s " SSH_X11_PROTO
+-				    " untrusted timeout %u 2>" _PATH_DEVNULL,
+-				    xauth_path, xauthfile, display,
+-				    x11_timeout_real);
+-				debug2("x11_get_proto: %s", cmd);
+-				if (x11_refuse_time == 0) {
+-					now = monotime() + 1;
+-					if (UINT_MAX - timeout < now)
+-						x11_refuse_time = UINT_MAX;
+-					else
+-						x11_refuse_time = now + timeout;
+-					channel_set_x11_refuse_time(
+-					    x11_refuse_time);
+-				}
+-				if (system(cmd) == 0)
+-					generated = 1;
++			if ((r = snprintf(cmd, sizeof(cmd),
++			    "%s -f %s generate %s " SSH_X11_PROTO
++			    " untrusted timeout %u 2>" _PATH_DEVNULL,
++			    xauth_path, xauthfile, display,
++			    x11_timeout_real)) < 0 ||
++			    (size_t)r >= sizeof(cmd))
++				fatal("%s: cmd too long", __func__);
++			debug2("%s: %s", __func__, cmd);
++			if (x11_refuse_time == 0) {
++				now = monotime() + 1;
++				if (UINT_MAX - timeout < now)
++					x11_refuse_time = UINT_MAX;
++				else
++					x11_refuse_time = now + timeout;
++				channel_set_x11_refuse_time(x11_refuse_time);
+ 			}
++			if (system(cmd) == 0)
++				generated = 1;
+ 		}
+ 
+ 		/*
+@@ -395,9 +414,7 @@ client_x11_get_proto(const char *display
+ 				got_data = 1;
+ 			if (f)
+ 				pclose(f);
+-		} else
+-			error("Warning: untrusted X11 forwarding setup failed: "
+-			    "xauth key data not generated");
++		}
+ 	}
+ 
+ 	if (do_unlink) {
+@@ -405,6 +422,13 @@ client_x11_get_proto(const char *display
+ 		rmdir(xauthdir);
+ 	}
+ 
++	/* Don't fall back to fake X11 data for untrusted forwarding */
++	if (!trusted && !got_data) {
++		error("Warning: untrusted X11 forwarding setup failed: "
++		    "xauth key data not generated");
++		return -1;
++	}
++
+ 	/*
+ 	 * If we didn't get authentication data, just make up some
+ 	 * data.  The forwarding code will check the validity of the
+@@ -427,6 +451,8 @@ client_x11_get_proto(const char *display
+ 			rnd >>= 8;
+ 		}
+ 	}
++
++	return 0;
+ }
+ 
+ /*
+Index: openssh-7.1p2/clientloop.h
+===================================================================
+--- openssh-7.1p2.orig/clientloop.h
++++ openssh-7.1p2/clientloop.h
+@@ -1,4 +1,4 @@
+-/* $OpenBSD: clientloop.h,v 1.31 2013/06/02 23:36:29 dtucker Exp $ */
++/* $OpenBSD: clientloop.h,v 1.32 2016/01/13 23:04:47 djm Exp $ */
+ 
+ /*
+  * Author: Tatu Ylonen <ylo@cs.hut.fi>
+@@ -39,7 +39,7 @@
+ 
+ /* Client side main loop for the interactive session. */
+ int	 client_loop(int, int, int);
+-void	 client_x11_get_proto(const char *, const char *, u_int, u_int,
++int	 client_x11_get_proto(const char *, const char *, u_int, u_int,
+ 	    char **, char **);
+ void	 client_global_request_reply_fwd(int, u_int32_t, void *);
+ void	 client_session2_setup(int, int, int, const char *, struct termios *,
+Index: openssh-7.1p2/mux.c
+===================================================================
+--- openssh-7.1p2.orig/mux.c
++++ openssh-7.1p2/mux.c
+@@ -1,4 +1,4 @@
+-/* $OpenBSD: mux.c,v 1.54 2015/08/19 23:18:26 djm Exp $ */
++/* $OpenBSD: mux.c,v 1.58 2016/01/13 23:04:47 djm Exp $ */
+ /*
+  * Copyright (c) 2002-2008 Damien Miller <djm@openbsd.org>
+  *
+@@ -1354,16 +1354,18 @@ mux_session_confirm(int id, int success,
+ 		char *proto, *data;
+ 
+ 		/* Get reasonable local authentication information. */
+-		client_x11_get_proto(display, options.xauth_location,
++		if (client_x11_get_proto(display, options.xauth_location,
+ 		    options.forward_x11_trusted, options.forward_x11_timeout,
+-		    &proto, &data);
+-		/* Request forwarding with authentication spoofing. */
+-		debug("Requesting X11 forwarding with authentication "
+-		    "spoofing.");
+-		x11_request_forwarding_with_spoofing(id, display, proto,
+-		    data, 1);
+-		client_expect_confirm(id, "X11 forwarding", CONFIRM_WARN);
+-		/* XXX exit_on_forward_failure */
++		    &proto, &data) == 0) {
++			/* Request forwarding with authentication spoofing. */
++			debug("Requesting X11 forwarding with authentication "
++			    "spoofing.");
++			x11_request_forwarding_with_spoofing(id, display, proto,
++			    data, 1);
++			/* XXX exit_on_forward_failure */
++			client_expect_confirm(id, "X11 forwarding",
++			    CONFIRM_WARN);
++		}
+ 	}
+ 
+ 	if (cctx->want_agent_fwd && options.forward_agent) {
+Index: openssh-7.1p2/ssh.c
+===================================================================
+--- openssh-7.1p2.orig/ssh.c
++++ openssh-7.1p2/ssh.c
+@@ -1,4 +1,4 @@
+-/* $OpenBSD: ssh.c,v 1.420 2015/07/30 00:01:34 djm Exp $ */
++/* $OpenBSD: ssh.c,v 1.433 2016/01/13 23:04:47 djm Exp $ */
+ /*
+  * Author: Tatu Ylonen <ylo@cs.hut.fi>
+  * Copyright (c) 1995 Tatu Ylonen <ylo@cs.hut.fi>, Espoo, Finland
+@@ -1604,6 +1604,7 @@ ssh_session(void)
+ 	struct winsize ws;
+ 	char *cp;
+ 	const char *display;
++	char *proto = NULL, *data = NULL;
+ 
+ 	/* Enable compression if requested. */
+ 	if (options.compression) {
+@@ -1674,13 +1675,9 @@ ssh_session(void)
+ 	display = getenv("DISPLAY");
+ 	if (display == NULL && options.forward_x11)
+ 		debug("X11 forwarding requested but DISPLAY not set");
+-	if (options.forward_x11 && display != NULL) {
+-		char *proto, *data;
+-		/* Get reasonable local authentication information. */
+-		client_x11_get_proto(display, options.xauth_location,
+-		    options.forward_x11_trusted,
+-		    options.forward_x11_timeout,
+-		    &proto, &data);
++	if (options.forward_x11 && client_x11_get_proto(display,
++	    options.xauth_location, options.forward_x11_trusted,
++	    options.forward_x11_timeout, &proto, &data) == 0) {
+ 		/* Request forwarding with authentication spoofing. */
+ 		debug("Requesting X11 forwarding with authentication "
+ 		    "spoofing.");
+@@ -1770,6 +1767,7 @@ ssh_session2_setup(int id, int success,
+ 	extern char **environ;
+ 	const char *display;
+ 	int interactive = tty_flag;
++	char *proto = NULL, *data = NULL;
+ 
+ 	if (!success)
+ 		return; /* No need for error message, channels code sens one */
+@@ -1777,12 +1775,9 @@ ssh_session2_setup(int id, int success,
+ 	display = getenv("DISPLAY");
+ 	if (display == NULL && options.forward_x11)
+ 		debug("X11 forwarding requested but DISPLAY not set");
+-	if (options.forward_x11 && display != NULL) {
+-		char *proto, *data;
+-		/* Get reasonable local authentication information. */
+-		client_x11_get_proto(display, options.xauth_location,
+-		    options.forward_x11_trusted,
+-		    options.forward_x11_timeout, &proto, &data);
++	if (options.forward_x11 && client_x11_get_proto(display,
++	    options.xauth_location, options.forward_x11_trusted,
++	    options.forward_x11_timeout, &proto, &data) == 0) {
+ 		/* Request forwarding with authentication spoofing. */
+ 		debug("Requesting X11 forwarding with authentication "
+ 		    "spoofing.");
diff --git a/yocto-poky/meta/recipes-connectivity/openssh/openssh/CVE-2016-1907_upstream_commit.patch b/yocto-poky/meta/recipes-connectivity/openssh/openssh/CVE-2016-1907_upstream_commit.patch
new file mode 100644
index 0000000..f3d132e
--- /dev/null
+++ b/yocto-poky/meta/recipes-connectivity/openssh/openssh/CVE-2016-1907_upstream_commit.patch
@@ -0,0 +1,33 @@
+From d77148e3a3ef6c29b26ec74331455394581aa257 Mon Sep 17 00:00:00 2001
+From: "djm@openbsd.org" <djm@openbsd.org>
+Date: Sun, 8 Nov 2015 21:59:11 +0000
+Subject: [PATCH] upstream commit
+
+fix OOB read in packet code caused by missing return
+ statement found by Ben Hawkes; ok markus@ deraadt@
+
+Upstream-ID: a3e3a85434ebfa0690d4879091959591f30efc62
+
+Upstream-Status: Backport
+CVE: CVE-2016-1907
+
+[YOCTO #8935]
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ packet.c | 1 +
+ 1 file changed, 1 insertion(+)
+
+Index: openssh-7.1p2/packet.c
+===================================================================
+--- openssh-7.1p2.orig/packet.c
++++ openssh-7.1p2/packet.c
+@@ -1855,6 +1855,7 @@ ssh_packet_process_incoming(struct ssh *
+ 		if (len >= state->packet_discard) {
+ 			if ((r = ssh_packet_stop_discard(ssh)) != 0)
+ 				return r;
++			return SSH_ERR_CONN_CORRUPT;
+ 		}
+ 		state->packet_discard -= len;
+ 		return 0;
diff --git a/yocto-poky/meta/recipes-connectivity/openssh/openssh/sshd@.service b/yocto-poky/meta/recipes-connectivity/openssh/openssh/sshd@.service
index bb2d68e..9d83dfb 100644
--- a/yocto-poky/meta/recipes-connectivity/openssh/openssh/sshd@.service
+++ b/yocto-poky/meta/recipes-connectivity/openssh/openssh/sshd@.service
@@ -4,7 +4,9 @@
 After=sshdgenkeys.service
 
 [Service]
-ExecStart=-@SBINDIR@/sshd -i
+Environment="SSHD_OPTS="
+EnvironmentFile=-/etc/default/ssh
+ExecStart=-@SBINDIR@/sshd -i $SSHD_OPTS
 ExecReload=@BASE_BINDIR@/kill -HUP $MAINPID
 StandardInput=socket
 StandardError=syslog
diff --git a/yocto-poky/meta/recipes-connectivity/openssh/openssh/sshdgenkeys.service b/yocto-poky/meta/recipes-connectivity/openssh/openssh/sshdgenkeys.service
index d65086f..148e6ad 100644
--- a/yocto-poky/meta/recipes-connectivity/openssh/openssh/sshdgenkeys.service
+++ b/yocto-poky/meta/recipes-connectivity/openssh/openssh/sshdgenkeys.service
@@ -1,11 +1,22 @@
 [Unit]
 Description=OpenSSH Key Generation
-ConditionPathExists=|!/etc/ssh/ssh_host_rsa_key
-ConditionPathExists=|!/etc/ssh/ssh_host_dsa_key
-ConditionPathExists=|!/etc/ssh/ssh_host_ecdsa_key
-ConditionPathExists=|!/etc/ssh/ssh_host_ed25519_key
+RequiresMountsFor=/var /run
+ConditionPathExists=!/var/run/ssh/ssh_host_rsa_key
+ConditionPathExists=!/var/run/ssh/ssh_host_dsa_key
+ConditionPathExists=!/var/run/ssh/ssh_host_ecdsa_key
+ConditionPathExists=!/var/run/ssh/ssh_host_ed25519_key
+ConditionPathExists=!/etc/ssh/ssh_host_rsa_key
+ConditionPathExists=!/etc/ssh/ssh_host_dsa_key
+ConditionPathExists=!/etc/ssh/ssh_host_ecdsa_key
+ConditionPathExists=!/etc/ssh/ssh_host_ed25519_key
 
 [Service]
-ExecStart=@BINDIR@/ssh-keygen -A
+Environment="SYSCONFDIR=/etc/ssh"
+EnvironmentFile=-/etc/default/ssh
+ExecStart=@BASE_BINDIR@/mkdir -p $SYSCONFDIR
+ExecStart=@BINDIR@/ssh-keygen -q -f ${SYSCONFDIR}/ssh_host_rsa_key -N '' -t rsa
+ExecStart=@BINDIR@/ssh-keygen -q -f ${SYSCONFDIR}/ssh_host_dsa_key -N '' -t dsa
+ExecStart=@BINDIR@/ssh-keygen -q -f ${SYSCONFDIR}/ssh_host_ecdsa_key -N '' -t ecdsa
+ExecStart=@BINDIR@/ssh-keygen -q -f ${SYSCONFDIR}/ssh_host_ed25519_key -N '' -t ed25519
 Type=oneshot
 RemainAfterExit=yes
diff --git a/yocto-poky/meta/recipes-connectivity/openssh/openssh_7.1p1.bb b/yocto-poky/meta/recipes-connectivity/openssh/openssh_7.1p2.bb
similarity index 93%
rename from yocto-poky/meta/recipes-connectivity/openssh/openssh_7.1p1.bb
rename to yocto-poky/meta/recipes-connectivity/openssh/openssh_7.1p2.bb
index eeeb4b4..714c391 100644
--- a/yocto-poky/meta/recipes-connectivity/openssh/openssh_7.1p1.bb
+++ b/yocto-poky/meta/recipes-connectivity/openssh/openssh_7.1p2.bb
@@ -20,12 +20,15 @@
            file://sshdgenkeys.service \
            file://volatiles.99_sshd \
            file://add-test-support-for-busybox.patch \
-           file://run-ptest"
+           file://run-ptest \
+           file://CVE-2016-1907_upstream_commit.patch \
+           file://CVE-2016-1907_2.patch \
+           file://CVE-2016-1907_3.patch "
 
 PAM_SRC_URI = "file://sshd"
 
-SRC_URI[md5sum] = "8709736bc8a8c253bc4eeb4829888ca5"
-SRC_URI[sha256sum] = "fc0a6d2d1d063d5c66dffd952493d0cda256cad204f681de0f84ef85b2ad8428"
+SRC_URI[md5sum] = "4d8547670e2a220d5ef805ad9e47acf2"
+SRC_URI[sha256sum] = "dd75f024dcf21e06a0d6421d582690bf987a1f6323e32ad6619392f3bfde6bbd"
 
 inherit useradd update-rc.d update-alternatives systemd
 
@@ -87,7 +90,7 @@
 
 do_install_append () {
 	if [ "${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'pam', '', d)}" = "pam" ]; then
-		install -D -m 0755 ${WORKDIR}/sshd ${D}${sysconfdir}/pam.d/sshd
+		install -D -m 0644 ${WORKDIR}/sshd ${D}${sysconfdir}/pam.d/sshd
 		sed -i -e 's:#UsePAM no:UsePAM yes:' ${WORKDIR}/sshd_config ${D}${sysconfdir}/ssh/sshd_config
 	fi
 
diff --git a/yocto-poky/meta/recipes-connectivity/openssl/openssl.inc b/yocto-poky/meta/recipes-connectivity/openssl/openssl.inc
index 53dcfd9..8af423f 100644
--- a/yocto-poky/meta/recipes-connectivity/openssl/openssl.inc
+++ b/yocto-poky/meta/recipes-connectivity/openssl/openssl.inc
@@ -118,7 +118,7 @@
         linux-*-mips64)
                target=linux-mips
                 ;;
-	linux-microblaze*)
+	linux-microblaze*|linux-nios2*)
 		target=linux-generic32
 		;;
 	linux-powerpc)
diff --git a/yocto-poky/meta/recipes-connectivity/openssl/openssl/0001-Add-test-for-CVE-2015-3194.patch b/yocto-poky/meta/recipes-connectivity/openssl/openssl/0001-Add-test-for-CVE-2015-3194.patch
new file mode 100644
index 0000000..39a2e5a
--- /dev/null
+++ b/yocto-poky/meta/recipes-connectivity/openssl/openssl/0001-Add-test-for-CVE-2015-3194.patch
@@ -0,0 +1,66 @@
+From 00456fded43eadd4bb94bf675ae4ea5d158a764f Mon Sep 17 00:00:00 2001
+From: "Dr. Stephen Henson" <steve@openssl.org>
+Date: Wed, 4 Nov 2015 13:30:03 +0000
+Subject: [PATCH] Add test for CVE-2015-3194
+
+Reviewed-by: Richard Levitte <levitte@openssl.org>
+
+Upstream-Status: Backport
+
+This patch was imported from 
+https://git.openssl.org/?p=openssl.git;a=commit;h=00456fded43eadd4bb94bf675ae4ea5d158a764f
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ test/certs/pss1.pem | 21 +++++++++++++++++++++
+ test/tx509          |  7 +++++++
+ 2 files changed, 28 insertions(+)
+ create mode 100644 test/certs/pss1.pem
+
+diff --git a/test/certs/pss1.pem b/test/certs/pss1.pem
+new file mode 100644
+index 0000000..29da71d
+--- /dev/null
++++ b/test/certs/pss1.pem
+@@ -0,0 +1,21 @@
++-----BEGIN CERTIFICATE-----
++MIIDdjCCAjqgAwIBAgIJANcwZLyfEv7DMD4GCSqGSIb3DQEBCjAxoA0wCwYJYIZI
++AWUDBAIBoRowGAYJKoZIhvcNAQEIMAsGCWCGSAFlAwQCAaIEAgIA3jAnMSUwIwYD
++VQQDDBxUZXN0IEludmFsaWQgUFNTIGNlcnRpZmljYXRlMB4XDTE1MTEwNDE2MDIz
++NVoXDTE1MTIwNDE2MDIzNVowJzElMCMGA1UEAwwcVGVzdCBJbnZhbGlkIFBTUyBj
++ZXJ0aWZpY2F0ZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMTaM7WH
++qVCAGAIA+zL1KWvvASTrhlq+1ePdO7wsrWX2KiYoTYrJYTnxhLnn0wrHqApt79nL
++IBG7cfShyZqFHOY/IzlYPMVt+gPo293gw96Fds5JBsjhjkyGnOyr9OUntFqvxDbT
++IIFU7o9IdxD4edaqjRv+fegVE+B79pDk4s0ujsk6dULtCg9Rst0ucGFo19mr+b7k
++dbfn8pZ72ZNDJPueVdrUAWw9oll61UcYfk75XdrLk6JlL41GrYHc8KlfXf43gGQq
++QfrpHkg4Ih2cI6Wt2nhFGAzrlcorzLliQIUJRIhM8h4IgDfpBpaPdVQLqS2pFbXa
++5eQjqiyJwak2vJ8CAwEAAaNQME4wHQYDVR0OBBYEFCt180N4oGUt5LbzBwQ4Ia+2
++4V97MB8GA1UdIwQYMBaAFCt180N4oGUt5LbzBwQ4Ia+24V97MAwGA1UdEwQFMAMB
++Af8wMQYJKoZIhvcNAQEKMCSgDTALBglghkgBZQMEAgGhDTALBgkqhkiG9w0BAQii
++BAICAN4DggEBAAjBtm90lGxgddjc4Xu/nbXXFHVs2zVcHv/mqOZoQkGB9r/BVgLb
++xhHrFZ2pHGElbUYPfifdS9ztB73e1d4J+P29o0yBqfd4/wGAc/JA8qgn6AAEO/Xn
++plhFeTRJQtLZVl75CkHXgUGUd3h+ADvKtcBuW9dSUncaUrgNKR8u/h/2sMG38RWY
++DzBddC/66YTa3r7KkVUfW7yqRQfELiGKdcm+bjlTEMsvS+EhHup9CzbpoCx2Fx9p
++NPtFY3yEObQhmL1JyoCRWqBE75GzFPbRaiux5UpEkns+i3trkGssZzsOuVqHNTNZ
++lC9+9hPHIoc9UMmAQNo1vGIW3NWVoeGbaJ8=
++-----END CERTIFICATE-----
+diff --git a/test/tx509 b/test/tx509
+index 0ce3b52..77f5cac 100644
+--- a/test/tx509
++++ b/test/tx509
+@@ -74,5 +74,12 @@ if [ $? != 0 ]; then exit 1; fi
+ cmp x509-f.p x509-ff.p3
+ if [ $? != 0 ]; then exit 1; fi
+ 
++echo "Parsing test certificates"
++
++$cmd -in certs/pss1.pem -text -noout >/dev/null
++if [ $? != 0 ]; then exit 1; fi
++
++echo OK
++
+ /bin/rm -f x509-f.* x509-ff.* x509-fff.*
+ exit 0
+-- 
+2.3.5
+
diff --git a/yocto-poky/meta/recipes-connectivity/openssl/openssl/CVE-2015-3193-bn-asm-x86_64-mont5.pl-fix-carry-propagating-bug-CVE.patch b/yocto-poky/meta/recipes-connectivity/openssl/openssl/CVE-2015-3193-bn-asm-x86_64-mont5.pl-fix-carry-propagating-bug-CVE.patch
new file mode 100644
index 0000000..125016a
--- /dev/null
+++ b/yocto-poky/meta/recipes-connectivity/openssl/openssl/CVE-2015-3193-bn-asm-x86_64-mont5.pl-fix-carry-propagating-bug-CVE.patch
@@ -0,0 +1,101 @@
+From d73cc256c8e256c32ed959456101b73ba9842f72 Mon Sep 17 00:00:00 2001
+From: Andy Polyakov <appro@openssl.org>
+Date: Tue, 1 Dec 2015 09:00:32 +0100
+Subject: [PATCH] bn/asm/x86_64-mont5.pl: fix carry propagating bug
+ (CVE-2015-3193).
+
+Reviewed-by: Richard Levitte <levitte@openssl.org>
+(cherry picked from commit e7c078db57908cbf16074c68034977565ffaf107)
+
+Upstream-Status: Backport
+
+This patch was imported from 
+https://git.openssl.org/?p=openssl.git;a=commit;h=d73cc256c8e256c32ed959456101b73ba9842f72
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ crypto/bn/asm/x86_64-mont5.pl | 22 +++++++++++++++++++---
+ crypto/bn/bntest.c            | 18 ++++++++++++++++++
+ 2 files changed, 37 insertions(+), 3 deletions(-)
+
+Index: openssl-1.0.2d/crypto/bn/asm/x86_64-mont5.pl
+===================================================================
+--- openssl-1.0.2d.orig/crypto/bn/asm/x86_64-mont5.pl
++++ openssl-1.0.2d/crypto/bn/asm/x86_64-mont5.pl
+@@ -1779,6 +1779,15 @@ sqr8x_reduction:
+ .align	32
+ .L8x_tail_done:
+ 	add	(%rdx),%r8		# can this overflow?
++	adc	\$0,%r9
++	adc	\$0,%r10
++	adc	\$0,%r11
++	adc	\$0,%r12
++	adc	\$0,%r13
++	adc	\$0,%r14
++	adc	\$0,%r15		# can't overflow, because we
++					# started with "overhung" part
++					# of multiplication
+ 	xor	%rax,%rax
+ 
+ 	neg	$carry
+@@ -3125,6 +3134,15 @@ sqrx8x_reduction:
+ .align	32
+ .Lsqrx8x_tail_done:
+ 	add	24+8(%rsp),%r8		# can this overflow?
++	adc	\$0,%r9
++	adc	\$0,%r10
++	adc	\$0,%r11
++	adc	\$0,%r12
++	adc	\$0,%r13
++	adc	\$0,%r14
++	adc	\$0,%r15		# can't overflow, because we
++					# started with "overhung" part
++					# of multiplication
+ 	mov	$carry,%rax		# xor	%rax,%rax
+ 
+ 	sub	16+8(%rsp),$carry	# mov 16(%rsp),%cf
+@@ -3168,13 +3186,11 @@ my ($rptr,$nptr)=("%rdx","%rbp");
+ my @ri=map("%r$_",(10..13));
+ my @ni=map("%r$_",(14..15));
+ $code.=<<___;
+-	xor	%rbx,%rbx
++	xor	%ebx,%ebx
+ 	sub	%r15,%rsi		# compare top-most words
+ 	adc	%rbx,%rbx
+ 	mov	%rcx,%r10		# -$num
+-	.byte	0x67
+ 	or	%rbx,%rax
+-	.byte	0x67
+ 	mov	%rcx,%r9		# -$num
+ 	xor	\$1,%rax
+ 	sar	\$3+2,%rcx		# cf=0
+Index: openssl-1.0.2d/crypto/bn/bntest.c
+===================================================================
+--- openssl-1.0.2d.orig/crypto/bn/bntest.c
++++ openssl-1.0.2d/crypto/bn/bntest.c
+@@ -1027,6 +1027,24 @@ int test_mod_exp_mont_consttime(BIO *bp,
+             return 0;
+         }
+     }
++
++    /* Regression test for carry propagation bug in sqr8x_reduction */
++    BN_hex2bn(&a, "050505050505");
++    BN_hex2bn(&b, "02");
++    BN_hex2bn(&c,
++        "4141414141414141414141274141414141414141414141414141414141414141"
++        "4141414141414141414141414141414141414141414141414141414141414141"
++        "4141414141414141414141800000000000000000000000000000000000000000"
++        "0000000000000000000000000000000000000000000000000000000000000000"
++        "0000000000000000000000000000000000000000000000000000000000000000"
++        "0000000000000000000000000000000000000000000000000000000001");
++    BN_mod_exp(d, a, b, c, ctx);
++    BN_mul(e, a, a, ctx);
++    if (BN_cmp(d, e)) {
++        fprintf(stderr, "BN_mod_exp and BN_mul produce different results!\n");
++        return 0;
++    }
++
+     BN_free(a);
+     BN_free(b);
+     BN_free(c);
diff --git a/yocto-poky/meta/recipes-connectivity/openssl/openssl/CVE-2015-3194-1-Add-PSS-parameter-check.patch b/yocto-poky/meta/recipes-connectivity/openssl/openssl/CVE-2015-3194-1-Add-PSS-parameter-check.patch
new file mode 100644
index 0000000..13d4891
--- /dev/null
+++ b/yocto-poky/meta/recipes-connectivity/openssl/openssl/CVE-2015-3194-1-Add-PSS-parameter-check.patch
@@ -0,0 +1,45 @@
+From c394a488942387246653833359a5c94b5832674e Mon Sep 17 00:00:00 2001
+From: "Dr. Stephen Henson" <steve@openssl.org>
+Date: Fri, 2 Oct 2015 12:35:19 +0100
+Subject: [PATCH] Add PSS parameter check.
+MIME-Version: 1.0
+Content-Type: text/plain; charset=UTF-8
+Content-Transfer-Encoding: 8bit
+
+Avoid seg fault by checking mgf1 parameter is not NULL. This can be
+triggered during certificate verification so could be a DoS attack
+against a client or a server enabling client authentication.
+
+Thanks to Loïc Jonas Etienne (Qnective AG) for discovering this bug.
+
+CVE-2015-3194
+
+Reviewed-by: Richard Levitte <levitte@openssl.org>
+
+Upstream-Status: Backport
+
+This patch was imported from 
+https://git.openssl.org/?p=openssl.git;a=commit;h=c394a488942387246653833359a5c94b5832674e
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ crypto/rsa/rsa_ameth.c | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/crypto/rsa/rsa_ameth.c b/crypto/rsa/rsa_ameth.c
+index ca3922e..4e06218 100644
+--- a/crypto/rsa/rsa_ameth.c
++++ b/crypto/rsa/rsa_ameth.c
+@@ -268,7 +268,7 @@ static X509_ALGOR *rsa_mgf1_decode(X509_ALGOR *alg)
+ {
+     const unsigned char *p;
+     int plen;
+-    if (alg == NULL)
++    if (alg == NULL || alg->parameter == NULL)
+         return NULL;
+     if (OBJ_obj2nid(alg->algorithm) != NID_mgf1)
+         return NULL;
+-- 
+2.3.5
+
diff --git a/yocto-poky/meta/recipes-connectivity/openssl/openssl/CVE-2015-3195-Fix-leak-with-ASN.1-combine.patch b/yocto-poky/meta/recipes-connectivity/openssl/openssl/CVE-2015-3195-Fix-leak-with-ASN.1-combine.patch
new file mode 100644
index 0000000..6fc4d0e
--- /dev/null
+++ b/yocto-poky/meta/recipes-connectivity/openssl/openssl/CVE-2015-3195-Fix-leak-with-ASN.1-combine.patch
@@ -0,0 +1,66 @@
+From cc598f321fbac9c04da5766243ed55d55948637d Mon Sep 17 00:00:00 2001
+From: "Dr. Stephen Henson" <steve@openssl.org>
+Date: Tue, 10 Nov 2015 19:03:07 +0000
+Subject: [PATCH] Fix leak with ASN.1 combine.
+
+When parsing a combined structure pass a flag to the decode routine
+so on error a pointer to the parent structure is not zeroed as
+this will leak any additional components in the parent.
+
+This can leak memory in any application parsing PKCS#7 or CMS structures.
+
+CVE-2015-3195.
+
+Thanks to Adam Langley (Google/BoringSSL) for discovering this bug using
+libFuzzer.
+
+PR#4131
+
+Reviewed-by: Richard Levitte <levitte@openssl.org>
+
+Upstream-Status: Backport
+
+This patch was imported from
+https://git.openssl.org/?p=openssl.git;a=commit;h=cc598f321fbac9c04da5766243ed55d55948637d
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ crypto/asn1/tasn_dec.c | 7 +++++--
+ 1 file changed, 5 insertions(+), 2 deletions(-)
+
+diff --git a/crypto/asn1/tasn_dec.c b/crypto/asn1/tasn_dec.c
+index febf605..9256049 100644
+--- a/crypto/asn1/tasn_dec.c
++++ b/crypto/asn1/tasn_dec.c
+@@ -180,6 +180,8 @@ int ASN1_item_ex_d2i(ASN1_VALUE **pval, const unsigned char **in, long len,
+     int otag;
+     int ret = 0;
+     ASN1_VALUE **pchptr, *ptmpval;
++    int combine = aclass & ASN1_TFLG_COMBINE;
++    aclass &= ~ASN1_TFLG_COMBINE;
+     if (!pval)
+         return 0;
+     if (aux && aux->asn1_cb)
+@@ -500,7 +502,8 @@ int ASN1_item_ex_d2i(ASN1_VALUE **pval, const unsigned char **in, long len,
+  auxerr:
+     ASN1err(ASN1_F_ASN1_ITEM_EX_D2I, ASN1_R_AUX_ERROR);
+  err:
+-    ASN1_item_ex_free(pval, it);
++    if (combine == 0)
++        ASN1_item_ex_free(pval, it);
+     if (errtt)
+         ERR_add_error_data(4, "Field=", errtt->field_name,
+                            ", Type=", it->sname);
+@@ -689,7 +692,7 @@ static int asn1_template_noexp_d2i(ASN1_VALUE **val,
+     } else {
+         /* Nothing special */
+         ret = ASN1_item_ex_d2i(val, &p, len, ASN1_ITEM_ptr(tt->item),
+-                               -1, 0, opt, ctx);
++                               -1, tt->flags & ASN1_TFLG_COMBINE, opt, ctx);
+         if (!ret) {
+             ASN1err(ASN1_F_ASN1_TEMPLATE_NOEXP_D2I, ERR_R_NESTED_ASN1_ERROR);
+             goto err;
+-- 
+2.3.5
+
diff --git a/yocto-poky/meta/recipes-connectivity/openssl/openssl/CVE-2015-3197.patch b/yocto-poky/meta/recipes-connectivity/openssl/openssl/CVE-2015-3197.patch
new file mode 100644
index 0000000..dd288c9
--- /dev/null
+++ b/yocto-poky/meta/recipes-connectivity/openssl/openssl/CVE-2015-3197.patch
@@ -0,0 +1,63 @@
+From d81a1600588b726c2bdccda7efad3cc7a87d6245 Mon Sep 17 00:00:00 2001
+From: Viktor Dukhovni <openssl-users@dukhovni.org>
+Date: Wed, 30 Dec 2015 22:44:51 -0500
+Subject: [PATCH] Better SSLv2 cipher-suite enforcement
+
+Based on patch by: Nimrod Aviram <nimrod.aviram@gmail.com>
+
+CVE-2015-3197
+
+Reviewed-by: Tim Hudson <tjh@openssl.org>
+Reviewed-by: Richard Levitte <levitte@openssl.org>
+
+Upstream-Status: Backport
+https://github.com/openssl/openssl/commit/d81a1600588b726c2bdccda7efad3cc7a87d6245
+
+CVE: CVE-2015-3197
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ ssl/s2_srvr.c | 15 +++++++++++++--
+ 1 file changed, 13 insertions(+), 2 deletions(-)
+
+Index: openssl-1.0.2d/ssl/s2_srvr.c
+===================================================================
+--- openssl-1.0.2d.orig/ssl/s2_srvr.c
++++ openssl-1.0.2d/ssl/s2_srvr.c
+@@ -402,7 +402,7 @@ static int get_client_master_key(SSL *s)
+         }
+ 
+         cp = ssl2_get_cipher_by_char(p);
+-        if (cp == NULL) {
++        if (cp == NULL || sk_SSL_CIPHER_find(s->session->ciphers, cp) < 0) {
+             ssl2_return_error(s, SSL2_PE_NO_CIPHER);
+             SSLerr(SSL_F_GET_CLIENT_MASTER_KEY, SSL_R_NO_CIPHER_MATCH);
+             return (-1);
+@@ -687,8 +687,12 @@ static int get_client_hello(SSL *s)
+             prio = cs;
+             allow = cl;
+         }
++
++        /* Generate list of SSLv2 ciphers shared between client and server */
+         for (z = 0; z < sk_SSL_CIPHER_num(prio); z++) {
+-            if (sk_SSL_CIPHER_find(allow, sk_SSL_CIPHER_value(prio, z)) < 0) {
++            const SSL_CIPHER *cp = sk_SSL_CIPHER_value(prio, z);
++            if ((cp->algorithm_ssl & SSL_SSLV2) == 0 ||
++                sk_SSL_CIPHER_find(allow, cp) < 0) {
+                 (void)sk_SSL_CIPHER_delete(prio, z);
+                 z--;
+             }
+@@ -697,6 +701,13 @@ static int get_client_hello(SSL *s)
+             sk_SSL_CIPHER_free(s->session->ciphers);
+             s->session->ciphers = prio;
+         }
++
++        /* Make sure we have at least one cipher in common */
++        if (sk_SSL_CIPHER_num(s->session->ciphers) == 0) {
++            ssl2_return_error(s, SSL2_PE_NO_CIPHER);
++            SSLerr(SSL_F_GET_CLIENT_HELLO, SSL_R_NO_CIPHER_MATCH);
++            return -1;
++        }
+         /*
+          * s->session->ciphers should now have a list of ciphers that are on
+          * both the client and server. This list is ordered by the order the
diff --git a/yocto-poky/meta/recipes-connectivity/openssl/openssl/CVE-2016-0701_1.patch b/yocto-poky/meta/recipes-connectivity/openssl/openssl/CVE-2016-0701_1.patch
new file mode 100644
index 0000000..cf2d9a7
--- /dev/null
+++ b/yocto-poky/meta/recipes-connectivity/openssl/openssl/CVE-2016-0701_1.patch
@@ -0,0 +1,102 @@
+From 878e2c5b13010329c203f309ed0c8f2113f85648 Mon Sep 17 00:00:00 2001
+From: Matt Caswell <matt@openssl.org>
+Date: Mon, 18 Jan 2016 11:31:58 +0000
+Subject: [PATCH] Prevent small subgroup attacks on DH/DHE
+
+Historically OpenSSL only ever generated DH parameters based on "safe"
+primes. More recently (in version 1.0.2) support was provided for
+generating X9.42 style parameter files such as those required for RFC
+5114 support. The primes used in such files may not be "safe". Where an
+application is using DH configured with parameters based on primes that
+are not "safe" then an attacker could use this fact to find a peer's
+private DH exponent. This attack requires that the attacker complete
+multiple handshakes in which the peer uses the same DH exponent.
+
+A simple mitigation is to ensure that y^q (mod p) == 1
+
+CVE-2016-0701 (fix part 1 of 2)
+
+Issue reported by Antonio Sanso.
+
+Reviewed-by: Viktor Dukhovni <viktor@openssl.org>
+
+Upstream-Status: Backport
+
+https://github.com/openssl/openssl/commit/878e2c5b13010329c203f309ed0c8f2113f85648
+
+CVE: CVE-2016-0701
+Signed-of-by: Armin Kuster <akuster@mvisa.com>
+
+---
+ crypto/dh/dh.h       |  1 +
+ crypto/dh/dh_check.c | 35 +++++++++++++++++++++++++----------
+ 2 files changed, 26 insertions(+), 10 deletions(-)
+
+diff --git a/crypto/dh/dh.h b/crypto/dh/dh.h
+index b177673..5498a9d 100644
+--- a/crypto/dh/dh.h
++++ b/crypto/dh/dh.h
+@@ -174,6 +174,7 @@ struct dh_st {
+ /* DH_check_pub_key error codes */
+ # define DH_CHECK_PUBKEY_TOO_SMALL       0x01
+ # define DH_CHECK_PUBKEY_TOO_LARGE       0x02
++# define DH_CHECK_PUBKEY_INVALID         0x03
+ 
+ /*
+  * primes p where (p-1)/2 is prime too are called "safe"; we define this for
+diff --git a/crypto/dh/dh_check.c b/crypto/dh/dh_check.c
+index 347467c..5adedc0 100644
+--- a/crypto/dh/dh_check.c
++++ b/crypto/dh/dh_check.c
+@@ -151,23 +151,38 @@ int DH_check(const DH *dh, int *ret)
+ int DH_check_pub_key(const DH *dh, const BIGNUM *pub_key, int *ret)
+ {
+     int ok = 0;
+-    BIGNUM *q = NULL;
++    BIGNUM *tmp = NULL;
++    BN_CTX *ctx = NULL;
+ 
+     *ret = 0;
+-    q = BN_new();
+-    if (q == NULL)
++    ctx = BN_CTX_new();
++    if (ctx == NULL)
+         goto err;
+-    BN_set_word(q, 1);
+-    if (BN_cmp(pub_key, q) <= 0)
++    BN_CTX_start(ctx);
++    tmp = BN_CTX_get(ctx);
++    if (tmp == NULL)
++        goto err;
++    BN_set_word(tmp, 1);
++    if (BN_cmp(pub_key, tmp) <= 0)
+         *ret |= DH_CHECK_PUBKEY_TOO_SMALL;
+-    BN_copy(q, dh->p);
+-    BN_sub_word(q, 1);
+-    if (BN_cmp(pub_key, q) >= 0)
++    BN_copy(tmp, dh->p);
++    BN_sub_word(tmp, 1);
++    if (BN_cmp(pub_key, tmp) >= 0)
+         *ret |= DH_CHECK_PUBKEY_TOO_LARGE;
+ 
++    if (dh->q != NULL) {
++        /* Check pub_key^q == 1 mod p */
++        if (!BN_mod_exp(tmp, pub_key, dh->q, dh->p, ctx))
++            goto err;
++        if (!BN_is_one(tmp))
++            *ret |= DH_CHECK_PUBKEY_INVALID;
++    }
++
+     ok = 1;
+  err:
+-    if (q != NULL)
+-        BN_free(q);
++    if (ctx != NULL) {
++        BN_CTX_end(ctx);
++        BN_CTX_free(ctx);
++    }
+     return (ok);
+ }
+-- 
+2.3.5
+
diff --git a/yocto-poky/meta/recipes-connectivity/openssl/openssl/CVE-2016-0701_2.patch b/yocto-poky/meta/recipes-connectivity/openssl/openssl/CVE-2016-0701_2.patch
new file mode 100644
index 0000000..05caf0a
--- /dev/null
+++ b/yocto-poky/meta/recipes-connectivity/openssl/openssl/CVE-2016-0701_2.patch
@@ -0,0 +1,156 @@
+From c5b831f21d0d29d1e517d139d9d101763f60c9a2 Mon Sep 17 00:00:00 2001
+From: Matt Caswell <matt@openssl.org>
+Date: Thu, 17 Dec 2015 02:57:20 +0000
+Subject: [PATCH] Always generate DH keys for ephemeral DH cipher suites
+
+Modified version of the commit ffaef3f15 in the master branch by Stephen
+Henson. This makes the SSL_OP_SINGLE_DH_USE option a no-op and always
+generates a new DH key for every handshake regardless.
+
+CVE-2016-0701 (fix part 2 or 2)
+
+Issue reported by Antonio Sanso
+
+Reviewed-by: Viktor Dukhovni <viktor@openssl.org>
+
+Upstream-Status: Backport
+
+https://github.com/openssl/openssl/commit/c5b831f21d0d29d1e517d139d9d101763f60c9a2
+
+CVE: CVE-2016-0701 #2
+Signed-of-by: Armin Kuster <akuster@mvisa.com>
+
+---
+ doc/ssl/SSL_CTX_set_tmp_dh_callback.pod | 29 +++++------------------------
+ ssl/s3_lib.c                            | 14 --------------
+ ssl/s3_srvr.c                           | 17 +++--------------
+ ssl/ssl.h                               |  2 +-
+ 4 files changed, 9 insertions(+), 53 deletions(-)
+
+Index: openssl-1.0.2d/doc/ssl/SSL_CTX_set_tmp_dh_callback.pod
+===================================================================
+--- openssl-1.0.2d.orig/doc/ssl/SSL_CTX_set_tmp_dh_callback.pod
++++ openssl-1.0.2d/doc/ssl/SSL_CTX_set_tmp_dh_callback.pod
+@@ -48,25 +48,8 @@ even if he gets hold of the normal (cert
+ only used for signing.
+ 
+ In order to perform a DH key exchange the server must use a DH group
+-(DH parameters) and generate a DH key.
+-The server will always generate a new DH key during the negotiation
+-if either the DH parameters are supplied via callback or the
+-SSL_OP_SINGLE_DH_USE option of SSL_CTX_set_options(3) is set (or both).
+-It will  immediately create a DH key if DH parameters are supplied via
+-SSL_CTX_set_tmp_dh() and SSL_OP_SINGLE_DH_USE is not set.
+-In this case,
+-it may happen that a key is generated on initialization without later
+-being needed, while on the other hand the computer time during the
+-negotiation is being saved.
+-
+-If "strong" primes were used to generate the DH parameters, it is not strictly
+-necessary to generate a new key for each handshake but it does improve forward
+-secrecy. If it is not assured that "strong" primes were used,
+-SSL_OP_SINGLE_DH_USE must be used in order to prevent small subgroup
+-attacks. Always using SSL_OP_SINGLE_DH_USE has an impact on the
+-computer time needed during negotiation, but it is not very large, so
+-application authors/users should consider always enabling this option.
+-The option is required to implement perfect forward secrecy (PFS).
++(DH parameters) and generate a DH key. The server will always generate
++a new DH key during the negotiation.
+ 
+ As generating DH parameters is extremely time consuming, an application
+ should not generate the parameters on the fly but supply the parameters.
+@@ -93,10 +76,9 @@ can supply the DH parameters via a callb
+ Previous versions of the callback used B<is_export> and B<keylength>
+ parameters to control parameter generation for export and non-export
+ cipher suites. Modern servers that do not support export ciphersuites
+-are advised to either use SSL_CTX_set_tmp_dh() in combination with
+-SSL_OP_SINGLE_DH_USE, or alternatively, use the callback but ignore
+-B<keylength> and B<is_export> and simply supply at least 2048-bit
+-parameters in the callback.
++are advised to either use SSL_CTX_set_tmp_dh() or alternatively, use
++the callback but ignore B<keylength> and B<is_export> and simply
++supply at least 2048-bit parameters in the callback.
+ 
+ =head1 EXAMPLES
+ 
+@@ -128,7 +110,6 @@ partly left out.)
+  if (SSL_CTX_set_tmp_dh(ctx, dh_2048) != 1) {
+    /* Error. */
+  }
+- SSL_CTX_set_options(ctx, SSL_OP_SINGLE_DH_USE);
+  ...
+ 
+ =head1 RETURN VALUES
+Index: openssl-1.0.2d/ssl/s3_lib.c
+===================================================================
+--- openssl-1.0.2d.orig/ssl/s3_lib.c
++++ openssl-1.0.2d/ssl/s3_lib.c
+@@ -3206,13 +3206,6 @@ long ssl3_ctrl(SSL *s, int cmd, long lar
+                 SSLerr(SSL_F_SSL3_CTRL, ERR_R_DH_LIB);
+                 return (ret);
+             }
+-            if (!(s->options & SSL_OP_SINGLE_DH_USE)) {
+-                if (!DH_generate_key(dh)) {
+-                    DH_free(dh);
+-                    SSLerr(SSL_F_SSL3_CTRL, ERR_R_DH_LIB);
+-                    return (ret);
+-                }
+-            }
+             if (s->cert->dh_tmp != NULL)
+                 DH_free(s->cert->dh_tmp);
+             s->cert->dh_tmp = dh;
+@@ -3710,13 +3703,6 @@ long ssl3_ctx_ctrl(SSL_CTX *ctx, int cmd
+                 SSLerr(SSL_F_SSL3_CTX_CTRL, ERR_R_DH_LIB);
+                 return 0;
+             }
+-            if (!(ctx->options & SSL_OP_SINGLE_DH_USE)) {
+-                if (!DH_generate_key(new)) {
+-                    SSLerr(SSL_F_SSL3_CTX_CTRL, ERR_R_DH_LIB);
+-                    DH_free(new);
+-                    return 0;
+-                }
+-            }
+             if (cert->dh_tmp != NULL)
+                 DH_free(cert->dh_tmp);
+             cert->dh_tmp = new;
+Index: openssl-1.0.2d/ssl/s3_srvr.c
+===================================================================
+--- openssl-1.0.2d.orig/ssl/s3_srvr.c
++++ openssl-1.0.2d/ssl/s3_srvr.c
+@@ -1684,20 +1684,9 @@ int ssl3_send_server_key_exchange(SSL *s
+             }
+ 
+             s->s3->tmp.dh = dh;
+-            if ((dhp->pub_key == NULL ||
+-                 dhp->priv_key == NULL ||
+-                 (s->options & SSL_OP_SINGLE_DH_USE))) {
+-                if (!DH_generate_key(dh)) {
+-                    SSLerr(SSL_F_SSL3_SEND_SERVER_KEY_EXCHANGE, ERR_R_DH_LIB);
+-                    goto err;
+-                }
+-            } else {
+-                dh->pub_key = BN_dup(dhp->pub_key);
+-                dh->priv_key = BN_dup(dhp->priv_key);
+-                if ((dh->pub_key == NULL) || (dh->priv_key == NULL)) {
+-                    SSLerr(SSL_F_SSL3_SEND_SERVER_KEY_EXCHANGE, ERR_R_DH_LIB);
+-                    goto err;
+-                }
++            if (!DH_generate_key(dh)) {
++                SSLerr(SSL_F_SSL3_SEND_SERVER_KEY_EXCHANGE, ERR_R_DH_LIB);
++                goto err;
+             }
+             r[0] = dh->p;
+             r[1] = dh->g;
+Index: openssl-1.0.2d/ssl/ssl.h
+===================================================================
+--- openssl-1.0.2d.orig/ssl/ssl.h
++++ openssl-1.0.2d/ssl/ssl.h
+@@ -625,7 +625,7 @@ struct ssl_session_st {
+ # define SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION        0x00040000L
+ /* If set, always create a new key when using tmp_ecdh parameters */
+ # define SSL_OP_SINGLE_ECDH_USE                          0x00080000L
+-/* If set, always create a new key when using tmp_dh parameters */
++/* Does nothing: retained for compatibility */
+ # define SSL_OP_SINGLE_DH_USE                            0x00100000L
+ /* Does nothing: retained for compatibiity */
+ # define SSL_OP_EPHEMERAL_RSA                            0x0
diff --git a/yocto-poky/meta/recipes-connectivity/openssl/openssl/ptest_makefile_deps.patch b/yocto-poky/meta/recipes-connectivity/openssl/openssl/ptest_makefile_deps.patch
new file mode 100644
index 0000000..4202e61
--- /dev/null
+++ b/yocto-poky/meta/recipes-connectivity/openssl/openssl/ptest_makefile_deps.patch
@@ -0,0 +1,248 @@
+Additional Makefile dependencies removal for test targets
+
+Removing the dependency check for test targets as these tests are
+causing a number of failures and "noise" during ptest execution.
+
+Upstream-Status: Inappropriate [config]
+
+Signed-off-by: Maxin B. John <maxin.john@intel.com>
+
+diff -Naur openssl-1.0.2d-orig/test/Makefile openssl-1.0.2d/test/Makefile
+--- openssl-1.0.2d-orig/test/Makefile	2015-09-28 12:50:41.530022979 +0300
++++ openssl-1.0.2d/test/Makefile	2015-09-28 12:57:45.930717240 +0300
+@@ -155,67 +155,67 @@
+ 	( $(MAKE) $$i && echo "PASS: $$i" ) || echo "FAIL: $$i"; \
+ 	done)
+ 
+-test_evp: $(EVPTEST)$(EXE_EXT) evptests.txt
++test_evp:
+ 	../util/shlib_wrap.sh ./$(EVPTEST) evptests.txt
+ 
+-test_evp_extra: $(EVPEXTRATEST)$(EXE_EXT)
++test_evp_extra:
+ 	../util/shlib_wrap.sh ./$(EVPEXTRATEST)
+ 
+-test_des: $(DESTEST)$(EXE_EXT)
++test_des:
+ 	../util/shlib_wrap.sh ./$(DESTEST)
+ 
+-test_idea: $(IDEATEST)$(EXE_EXT)
++test_idea:
+ 	../util/shlib_wrap.sh ./$(IDEATEST)
+ 
+-test_sha: $(SHATEST)$(EXE_EXT) $(SHA1TEST)$(EXE_EXT) $(SHA256TEST)$(EXE_EXT) $(SHA512TEST)$(EXE_EXT)
++test_sha:
+ 	../util/shlib_wrap.sh ./$(SHATEST)
+ 	../util/shlib_wrap.sh ./$(SHA1TEST)
+ 	../util/shlib_wrap.sh ./$(SHA256TEST)
+ 	../util/shlib_wrap.sh ./$(SHA512TEST)
+ 
+-test_mdc2: $(MDC2TEST)$(EXE_EXT)
++test_mdc2:
+ 	../util/shlib_wrap.sh ./$(MDC2TEST)
+ 
+-test_md5: $(MD5TEST)$(EXE_EXT)
++test_md5:
+ 	../util/shlib_wrap.sh ./$(MD5TEST)
+ 
+-test_md4: $(MD4TEST)$(EXE_EXT)
++test_md4:
+ 	../util/shlib_wrap.sh ./$(MD4TEST)
+ 
+-test_hmac: $(HMACTEST)$(EXE_EXT)
++test_hmac:
+ 	../util/shlib_wrap.sh ./$(HMACTEST)
+ 
+-test_wp: $(WPTEST)$(EXE_EXT)
++test_wp:
+ 	../util/shlib_wrap.sh ./$(WPTEST)
+ 
+-test_md2: $(MD2TEST)$(EXE_EXT)
++test_md2:
+ 	../util/shlib_wrap.sh ./$(MD2TEST)
+ 
+-test_rmd: $(RMDTEST)$(EXE_EXT)
++test_rmd:
+ 	../util/shlib_wrap.sh ./$(RMDTEST)
+ 
+-test_bf: $(BFTEST)$(EXE_EXT)
++test_bf:
+ 	../util/shlib_wrap.sh ./$(BFTEST)
+ 
+-test_cast: $(CASTTEST)$(EXE_EXT)
++test_cast:
+ 	../util/shlib_wrap.sh ./$(CASTTEST)
+ 
+-test_rc2: $(RC2TEST)$(EXE_EXT)
++test_rc2:
+ 	../util/shlib_wrap.sh ./$(RC2TEST)
+ 
+-test_rc4: $(RC4TEST)$(EXE_EXT)
++test_rc4:
+ 	../util/shlib_wrap.sh ./$(RC4TEST)
+ 
+-test_rc5: $(RC5TEST)$(EXE_EXT)
++test_rc5:
+ 	../util/shlib_wrap.sh ./$(RC5TEST)
+ 
+-test_rand: $(RANDTEST)$(EXE_EXT)
++test_rand:
+ 	../util/shlib_wrap.sh ./$(RANDTEST)
+ 
+-test_enc: ../apps/openssl$(EXE_EXT) testenc
++test_enc:
+ 	@sh ./testenc
+ 
+-test_x509: ../apps/openssl$(EXE_EXT) tx509 testx509.pem v3-cert1.pem v3-cert2.pem
++test_x509:
+ 	echo test normal x509v1 certificate
+ 	sh ./tx509 2>/dev/null
+ 	echo test first x509v3 certificate
+@@ -223,25 +223,25 @@
+ 	echo test second x509v3 certificate
+ 	sh ./tx509 v3-cert2.pem 2>/dev/null
+ 
+-test_rsa: ../apps/openssl$(EXE_EXT) trsa testrsa.pem
++test_rsa:
+ 	@sh ./trsa 2>/dev/null
+ 	../util/shlib_wrap.sh ./$(RSATEST)
+ 
+-test_crl: ../apps/openssl$(EXE_EXT) tcrl testcrl.pem
++test_crl:
+ 	@sh ./tcrl 2>/dev/null
+ 
+-test_sid: ../apps/openssl$(EXE_EXT) tsid testsid.pem
++test_sid:
+ 	@sh ./tsid 2>/dev/null
+ 
+-test_req: ../apps/openssl$(EXE_EXT) treq testreq.pem testreq2.pem
++test_req:
+ 	@sh ./treq 2>/dev/null
+ 	@sh ./treq testreq2.pem 2>/dev/null
+ 
+-test_pkcs7: ../apps/openssl$(EXE_EXT) tpkcs7 tpkcs7d testp7.pem pkcs7-1.pem
++test_pkcs7:
+ 	@sh ./tpkcs7 2>/dev/null
+ 	@sh ./tpkcs7d 2>/dev/null
+ 
+-test_bn: $(BNTEST)$(EXE_EXT) $(EXPTEST)$(EXE_EXT) bctest
++test_bn:
+ 	@echo starting big number library test, could take a while...
+ 	@../util/shlib_wrap.sh ./$(BNTEST) >tmp.bntest
+ 	@echo quit >>tmp.bntest
+@@ -250,33 +250,33 @@
+ 	@echo 'test a^b%c implementations'
+ 	../util/shlib_wrap.sh ./$(EXPTEST)
+ 
+-test_ec: $(ECTEST)$(EXE_EXT)
++test_ec:
+ 	@echo 'test elliptic curves'
+ 	../util/shlib_wrap.sh ./$(ECTEST)
+ 
+-test_ecdsa: $(ECDSATEST)$(EXE_EXT)
++test_ecdsa:
+ 	@echo 'test ecdsa'
+ 	../util/shlib_wrap.sh ./$(ECDSATEST)
+ 
+-test_ecdh: $(ECDHTEST)$(EXE_EXT)
++test_ecdh:
+ 	@echo 'test ecdh'
+ 	../util/shlib_wrap.sh ./$(ECDHTEST)
+ 
+-test_verify: ../apps/openssl$(EXE_EXT)
++test_verify:
+ 	@echo "The following command should have some OK's and some failures"
+ 	@echo "There are definitly a few expired certificates"
+ 	../util/shlib_wrap.sh ../apps/openssl verify -CApath ../certs/demo ../certs/demo/*.pem
+ 
+-test_dh: $(DHTEST)$(EXE_EXT)
++test_dh:
+ 	@echo "Generate a set of DH parameters"
+ 	../util/shlib_wrap.sh ./$(DHTEST)
+ 
+-test_dsa: $(DSATEST)$(EXE_EXT)
++test_dsa:
+ 	@echo "Generate a set of DSA parameters"
+ 	../util/shlib_wrap.sh ./$(DSATEST)
+ 	../util/shlib_wrap.sh ./$(DSATEST) -app2_1
+ 
+-test_gen testreq.pem: ../apps/openssl$(EXE_EXT) testgen test.cnf
++test_gen testreq.pem:
+ 	@echo "Generate and verify a certificate request"
+ 	@sh ./testgen
+ 
+@@ -288,13 +288,11 @@
+ 	@cat certCA.ss certU.ss > intP1.ss
+ 	@cat certCA.ss certU.ss certP1.ss > intP2.ss
+ 
+-test_engine:  $(ENGINETEST)$(EXE_EXT)
++test_engine:
+ 	@echo "Manipulate the ENGINE structures"
+ 	../util/shlib_wrap.sh ./$(ENGINETEST)
+ 
+-test_ssl: keyU.ss certU.ss certCA.ss certP1.ss keyP1.ss certP2.ss keyP2.ss \
+-		intP1.ss intP2.ss $(SSLTEST)$(EXE_EXT) testssl testsslproxy \
+-		../apps/server2.pem serverinfo.pem
++test_ssl:
+ 	@echo "test SSL protocol"
+ 	@if [ -n "$(FIPSCANLIB)" ]; then \
+ 	  sh ./testfipsssl keyU.ss certU.ss certCA.ss; \
+@@ -304,7 +302,7 @@
+ 	@sh ./testsslproxy keyP1.ss certP1.ss intP1.ss
+ 	@sh ./testsslproxy keyP2.ss certP2.ss intP2.ss
+ 
+-test_ca: ../apps/openssl$(EXE_EXT) testca CAss.cnf Uss.cnf
++test_ca:
+ 	@if ../util/shlib_wrap.sh ../apps/openssl no-rsa; then \
+ 	  echo "skipping CA.sh test -- requires RSA"; \
+ 	else \
+@@ -312,11 +310,11 @@
+ 	  sh ./testca; \
+ 	fi
+ 
+-test_aes: #$(AESTEST)
++test_aes:
+ #	@echo "test Rijndael"
+ #	../util/shlib_wrap.sh ./$(AESTEST)
+ 
+-test_tsa: ../apps/openssl$(EXE_EXT) testtsa CAtsa.cnf ../util/shlib_wrap.sh
++test_tsa:
+ 	@if ../util/shlib_wrap.sh ../apps/openssl no-rsa; then \
+ 	  echo "skipping testtsa test -- requires RSA"; \
+ 	else \
+@@ -331,7 +329,7 @@
+ 	@echo "Test JPAKE"
+ 	../util/shlib_wrap.sh ./$(JPAKETEST)
+ 
+-test_cms: ../apps/openssl$(EXE_EXT) cms-test.pl smcont.txt
++test_cms:
+ 	@echo "CMS consistency test"
+ 	$(PERL) cms-test.pl
+ 
+@@ -339,22 +337,22 @@
+ 	@echo "Test SRP"
+ 	../util/shlib_wrap.sh ./srptest
+ 
+-test_ocsp: ../apps/openssl$(EXE_EXT) tocsp
++test_ocsp:
+ 	@echo "Test OCSP"
+ 	@sh ./tocsp
+ 
+-test_v3name: $(V3NAMETEST)$(EXE_EXT)
++test_v3name:
+ 	@echo "Test X509v3_check_*"
+ 	../util/shlib_wrap.sh ./$(V3NAMETEST)
+ 
+ test_heartbeat:
+ 	../util/shlib_wrap.sh ./$(HEARTBEATTEST)
+ 
+-test_constant_time: $(CONSTTIMETEST)$(EXE_EXT)
++test_constant_time:
+ 	@echo "Test constant time utilites"
+ 	../util/shlib_wrap.sh ./$(CONSTTIMETEST)
+ 
+-test_verify_extra: $(VERIFYEXTRATEST)$(EXE_EXT)
++test_verify_extra:
+ 	@echo $(START) $@
+ 	../util/shlib_wrap.sh ./$(VERIFYEXTRATEST)
+ 
diff --git a/yocto-poky/meta/recipes-connectivity/openssl/openssl_1.0.2d.bb b/yocto-poky/meta/recipes-connectivity/openssl/openssl_1.0.2d.bb
index 32d8dce..8defa5b 100644
--- a/yocto-poky/meta/recipes-connectivity/openssl/openssl_1.0.2d.bb
+++ b/yocto-poky/meta/recipes-connectivity/openssl/openssl_1.0.2d.bb
@@ -36,6 +36,14 @@
             file://run-ptest \
             file://crypto_use_bigint_in_x86-64_perl.patch \
             file://openssl-1.0.2a-x32-asm.patch \
+            file://ptest_makefile_deps.patch  \
+            file://CVE-2015-3193-bn-asm-x86_64-mont5.pl-fix-carry-propagating-bug-CVE.patch \
+            file://CVE-2015-3194-1-Add-PSS-parameter-check.patch \
+            file://0001-Add-test-for-CVE-2015-3194.patch \
+            file://CVE-2015-3195-Fix-leak-with-ASN.1-combine.patch \
+            file://CVE-2015-3197.patch \
+            file://CVE-2016-0701_1.patch \
+            file://CVE-2016-0701_2.patch \
            "
 
 SRC_URI[md5sum] = "38dd619b2e77cbac69b99f52a053d25a"
@@ -55,3 +63,13 @@
 do_configure_prepend() {
   cp ${WORKDIR}/find.pl ${S}/util/find.pl
 }
+
+# The crypto_use_bigint patch means that perl's bignum module needs to be
+# installed, but some distributions (for example Fedora 23) don't ship it by
+# default.  As the resulting error is very misleading check for bignum before
+# building.
+do_configure_prepend() {
+	if ! perl -Mbigint -e true; then
+		bbfatal "The perl module 'bignum' was not found but this is required to build openssl.  Please install this module (often packaged as perl-bignum) and re-run bitbake."
+	fi
+}
diff --git a/yocto-poky/meta/recipes-connectivity/socat/socat/CVE-2016-2217.patch b/yocto-poky/meta/recipes-connectivity/socat/socat/CVE-2016-2217.patch
new file mode 100644
index 0000000..0cd4179
--- /dev/null
+++ b/yocto-poky/meta/recipes-connectivity/socat/socat/CVE-2016-2217.patch
@@ -0,0 +1,372 @@
+Upstream-Status: Backport
+
+http://www.dest-unreach.org/socat/download/socat-1.7.3.1.patch 
+
+CVE: CVE-2016-2217
+[Yocto # 9024]
+Singed-off-by: Armin Kuster <akuster@mvista.com>
+
+Index: socat-1.7.3.0/CHANGES
+===================================================================
+--- socat-1.7.3.0.orig/CHANGES
++++ socat-1.7.3.0/CHANGES
+@@ -1,8 +1,39 @@
+ 
++####################### V 1.7.3.1:
++
++security:
++	Socat security advisory 8
++	A stack overflow in vulnerability was found that can be triggered when
++	command line arguments (complete address specifications, host names,
++	file names) are longer than 512 bytes.
++	Successful exploitation might allow an attacker to execute arbitrary
++	code with the privileges of the socat process.
++	This vulnerability can only be exploited when an attacker is able to
++	inject data into socat's command line.
++	A vulnerable scenario would be a CGI script that reads data from clients
++	and uses (parts of) this data as hostname for a Socat invocation.
++	Test: NESTEDOVFL
++	Credits to Takumi Akiyama for finding and reporting this issue.
++
++	Socat security advisory 7
++	MSVR-1499
++	In the OpenSSL address implementation the hard coded 1024 bit DH p
++	parameter was not prime. The effective cryptographic strength of a key
++	exchange using these parameters was weaker than the one one could get by
++	using a prime p. Moreover, since there is no indication of how these
++	parameters were chosen, the existence of a trapdoor that makes possible
++	for an eavesdropper to recover the shared secret from a key exchange
++	that uses them cannot be ruled out.
++	Futhermore, 1024bit is not considered sufficiently secure.
++	Fix: generated a new 2048bit prime.
++	Thanks to Santiago Zanella-Beguelin and Microsoft Vulnerability
++	Research (MSVR) for finding and reporting this issue.
++
+ ####################### V 1.7.3.0:
+ 
+ security:
+-	(CVE Id pending)
++	Socat security advisory 6
++	CVE-2015-1379: Possible DoS with fork
+ 	Fixed problems with signal handling caused by use of not async signal
+ 	safe functions in signal handlers that could freeze socat, allowing
+ 	denial of service attacks.
+@@ -240,6 +271,7 @@ docu:
+ ####################### V 1.7.2.3:
+ 
+ security:
++	Socat security advisory 5
+ 	CVE-2014-0019: socats PROXY-CONNECT address was vulnerable to a buffer
+ 	overflow with data from command line (see socat-secadv5.txt)
+ 	Credits to Florian Weimer of the Red Hat Product Security Team
+@@ -247,6 +279,7 @@ security:
+ ####################### V 1.7.2.2:
+ 
+ security:
++	Socat security advisory 4
+ 	CVE-2013-3571:
+ 	after refusing a client connection due to bad source address or source
+ 	port socat shutdown() the socket but did not close() it, resulting in
+@@ -258,6 +291,7 @@ security:
+ ####################### V 1.7.2.1:
+ 
+ security:
++	Socat security advisory 3
+ 	CVE-2012-0219:
+ 	fixed a possible heap buffer overflow in the readline address. This bug
+ 	could be exploited when all of the following conditions were met:
+@@ -391,6 +425,7 @@ docu:
+ ####################### V 1.7.1.3:
+ 
+ security:
++	Socat security advisory 2
+ 	CVE-2010-2799:
+ 	fixed a stack overflow vulnerability that occurred when command
+ 	line arguments (whole addresses, host names, file names) were longer
+@@ -892,6 +927,7 @@ further corrections:
+ ####################### V 1.4.0.3:
+ 
+ security:
++	Socat security advisory 1
+ 	CVE-2004-1484:
+ 	fix to a syslog() based format string vulnerability that can lead to
+ 	remote code execution. See advisory socat-adv-1.txt
+Index: socat-1.7.3.0/VERSION
+===================================================================
+--- socat-1.7.3.0.orig/VERSION
++++ socat-1.7.3.0/VERSION
+@@ -1 +1 @@
+-"1.7.3.0"
++"1.7.3.1"
+Index: socat-1.7.3.0/nestlex.c
+===================================================================
+--- socat-1.7.3.0.orig/nestlex.c
++++ socat-1.7.3.0/nestlex.c
+@@ -1,5 +1,5 @@
+ /* source: nestlex.c */
+-/* Copyright Gerhard Rieger 2006-2010 */
++/* Copyright Gerhard Rieger */
+ /* Published under the GNU General Public License V.2, see file COPYING */
+ 
+ /* a function for lexical scanning of nested character patterns */
+@@ -9,6 +9,17 @@
+ 
+ #include "sysincludes.h"
+ 
++static int _nestlex(const char **addr,
++		    char **token,
++		    ptrdiff_t *len,
++		    const char *ends[],
++		    const char *hquotes[],
++		    const char *squotes[],
++		    const char *nests[],
++		    bool dropquotes,
++		    bool c_esc,
++		    bool html_esc
++		    );
+ 
+ /* sub: scan a string and copy its value to output string
+    end scanning when an unescaped, unnested string from ends array is found
+@@ -33,6 +44,22 @@ int nestlex(const char **addr,	/* input
+ 	    bool c_esc,		/* solve C char escapes: \n \t \0 etc */
+ 	    bool html_esc	/* solve HTML char escapes: %0d %08 etc */
+ 	    ) {
++   return
++      _nestlex(addr, token, (ptrdiff_t *)len, ends, hquotes, squotes, nests,
++	       dropquotes, c_esc, html_esc);
++}
++
++static int _nestlex(const char **addr,
++		    char **token,
++		    ptrdiff_t *len,
++		    const char *ends[],
++		    const char *hquotes[],
++		    const char *squotes[],
++		    const char *nests[],
++		    bool dropquotes,
++		    bool c_esc,
++		    bool html_esc
++		    ) {
+    const char *in = *addr;	/* pointer into input string */
+    const char **endx;	/* loops over end patterns */
+    const char **quotx;	/* loops over quote patterns */
+@@ -77,16 +104,18 @@ int nestlex(const char **addr,	/* input
+ 		  if (--*len <= 0) { *addr = in; *token = out; return -1; }
+ 	       }
+ 	    }
+-	    /* we call nestlex recursively */
++	    /* we call _nestlex recursively */
+ 	    endnest[0] = *quotx;
+ 	    endnest[1] = NULL;
+ 	    result =
+-	       nestlex(&in, &out, len, endnest, NULL/*hquotes*/,
++	       _nestlex(&in, &out, len, endnest, NULL/*hquotes*/,
+ 		       NULL/*squotes*/, NULL/*nests*/,
+ 		       false, c_esc, html_esc);
+ 	    if (result == 0 && dropquotes) {
+ 	       /* we strip this quote */
+ 	       in += strlen(*quotx);
++	    } else if (result < 0) {
++	       *addr = in; *token = out; return result;
+ 	    } else {
+ 	       /* we copy the trailing quote */
+ 	       for (i = strlen(*quotx); i > 0; --i) {
+@@ -110,7 +139,7 @@ int nestlex(const char **addr,	/* input
+ 	 if (!strncmp(in, *quotx, strlen(*quotx))) {
+ 	    /* this quote pattern matches */
+ 	    /* we strip this quote */
+-	    /* we call nestlex recursively */
++	    /* we call _nestlex recursively */
+ 	    const char *endnest[2];
+ 	    if (dropquotes) {
+ 	       /* we strip this quote */
+@@ -124,13 +153,15 @@ int nestlex(const char **addr,	/* input
+ 	    endnest[0] = *quotx;
+ 	    endnest[1] = NULL;
+ 	    result =
+-	       nestlex(&in, &out, len, endnest, hquotes,
++	       _nestlex(&in, &out, len, endnest, hquotes,
+ 		       squotes, nests,
+ 		       false, c_esc, html_esc);
+ 
+ 	    if (result == 0 && dropquotes) {
+ 	       /* we strip the trailing quote */
+ 	       in += strlen(*quotx);
++	    } else if (result < 0) {
++	       *addr = in; *token = out; return result;
+ 	    } else {
+ 	       /* we copy the trailing quote */
+ 	       for (i = strlen(*quotx); i > 0; --i) {
+@@ -162,7 +193,7 @@ int nestlex(const char **addr,	/* input
+ 	    }
+ 
+ 	    result =
+-	       nestlex(&in, &out, len, endnest, hquotes, squotes, nests,
++	       _nestlex(&in, &out, len, endnest, hquotes, squotes, nests,
+ 		       false, c_esc, html_esc);
+ 	    if (result == 0) {
+ 	       /* copy endnest */
+@@ -175,6 +206,8 @@ int nestlex(const char **addr,	/* input
+ 		  }
+ 		  --i;
+ 	       }
++	    } else if (result < 0) {
++	       *addr = in; *token = out; return result;
+ 	    }
+ 	    break;
+ 	 }
+@@ -211,7 +244,7 @@ int nestlex(const char **addr,	/* input
+ 	 }
+ 	 *out++ = c;
+ 	 --*len;
+-	 if (*len == 0) {
++	 if (*len <= 0) {
+ 	    *addr = in;
+ 	    *token = out;
+ 	    return -1;	/* output overflow */
+@@ -222,7 +255,7 @@ int nestlex(const char **addr,	/* input
+       /* just a simple char */
+       *out++ = c;
+       --*len;
+-      if (*len == 0) {
++      if (*len <= 0) {
+ 	 *addr = in;
+ 	 *token = out;
+ 	 return -1;	/* output overflow */
+Index: socat-1.7.3.0/nestlex.h
+===================================================================
+--- socat-1.7.3.0.orig/nestlex.h
++++ socat-1.7.3.0/nestlex.h
+@@ -1,5 +1,5 @@
+ /* source: nestlex.h */
+-/* Copyright Gerhard Rieger 2006 */
++/* Copyright Gerhard Rieger */
+ /* Published under the GNU General Public License V.2, see file COPYING */
+ 
+ #ifndef __nestlex_h_included
+Index: socat-1.7.3.0/socat.spec
+===================================================================
+--- socat-1.7.3.0.orig/socat.spec
++++ socat-1.7.3.0/socat.spec
+@@ -1,6 +1,6 @@
+ 
+ %define majorver 1.7
+-%define minorver 3.0
++%define minorver 3.1
+ 
+ Summary: socat - multipurpose relay
+ Name: socat
+Index: socat-1.7.3.0/test.sh
+===================================================================
+--- socat-1.7.3.0.orig/test.sh
++++ socat-1.7.3.0/test.sh
+@@ -2266,8 +2266,8 @@ gentestcert () {
+ gentestdsacert () {
+     local name="$1"
+     if [ -s $name.key -a -s $name.crt -a -s $name.pem ]; then return; fi
+-    openssl dsaparam -out $name-dsa.pem 512 >/dev/null 2>&1
+-    openssl dhparam -dsaparam -out $name-dh.pem 512 >/dev/null 2>&1
++    openssl dsaparam -out $name-dsa.pem 1024 >/dev/null 2>&1
++    openssl dhparam -dsaparam -out $name-dh.pem 1024 >/dev/null 2>&1
+     openssl req -newkey dsa:$name-dsa.pem -keyout $name.key -nodes -x509 -config $TESTCERT_CONF -out $name.crt -days 3653 >/dev/null 2>&1
+     cat $name-dsa.pem $name-dh.pem $name.key $name.crt >$name.pem
+ }
+@@ -10973,6 +10973,42 @@ CMD0="$TRACE $SOCAT $opts OPENSSL:localh
+ printf "test $F_n $TEST... " $N
+ $CMD0 </dev/null 1>&0 2>"${te}0"
+ rc0=$?
++if [ $rc0 -lt 128 ] || [ $rc0 -eq 255 ]; then
++    $PRINTF "$OK\n"
++    numOK=$((numOK+1))
++else
++    $PRINTF "$FAILED\n"
++    echo "$CMD0"
++    cat "${te}0"
++    numFAIL=$((numFAIL+1))
++    listFAIL="$listFAIL $N"
++fi
++fi # NUMCOND
++ ;;
++esac
++PORT=$((PORT+1))
++N=$((N+1))
++
++# socat up to 1.7.3.0 had a stack overflow vulnerability that occurred when
++# command line arguments (whole addresses, host names, file names) were longer
++# than 512 bytes and specially crafted.
++NAME=NESTEDOVFL
++case "$TESTS" in
++*%$N%*|*%functions%*|*%bugs%*|*%security%*|*%exec%*|*%$NAME%*)
++TEST="$NAME: stack overflow on overly long nested arg"
++# provide a long host name to TCP-CONNECT and check socats exit code
++if ! eval $NUMCOND; then :; else
++tf="$td/test$N.stdout"
++te="$td/test$N.stderr"
++tdiff="$td/test$N.diff"
++da="test$N $(date) $RANDOM"
++# prepare long data - perl might not be installed
++rm -f "$td/test$N.dat"
++i=0; while [ $i -lt 64 ]; do  echo -n "AAAAAAAAAAAAAAAA" >>"$td/test$N.dat"; i=$((i+1)); done
++CMD0="$TRACE $SOCAT $opts EXEC:[$(cat "$td/test$N.dat")] STDIO"
++printf "test $F_n $TEST... " $N
++$CMD0 </dev/null 1>&0 2>"${te}0"
++rc0=$?
+ if [ $rc0 -lt 128 ] || [ $rc0 -eq 255 ]; then
+     $PRINTF "$OK\n"
+     numOK=$((numOK+1))
+Index: socat-1.7.3.0/xio-openssl.c
+===================================================================
+--- socat-1.7.3.0.orig/xio-openssl.c
++++ socat-1.7.3.0/xio-openssl.c
+@@ -912,20 +912,27 @@ int
+    }
+ 
+    {
+-      static unsigned char dh1024_p[] = {
+-	 0xCC,0x17,0xF2,0xDC,0x96,0xDF,0x59,0xA4,0x46,0xC5,0x3E,0x0E,
+-	 0xB8,0x26,0x55,0x0C,0xE3,0x88,0xC1,0xCE,0xA7,0xBC,0xB3,0xBF,
+-	 0x16,0x94,0xD8,0xA9,0x45,0xA2,0xCE,0xA9,0x5B,0x22,0x25,0x5F,
+-	 0x92,0x59,0x94,0x1C,0x22,0xBF,0xCB,0xC8,0xC8,0x57,0xCB,0xBF,
+-	 0xBC,0x0E,0xE8,0x40,0xF9,0x87,0x03,0xBF,0x60,0x9B,0x08,0xC6,
+-	 0x8E,0x99,0xC6,0x05,0xFC,0x00,0xD6,0x6D,0x90,0xA8,0xF5,0xF8,
+-	 0xD3,0x8D,0x43,0xC8,0x8F,0x7A,0xBD,0xBB,0x28,0xAC,0x04,0x69,
+-	 0x4A,0x0B,0x86,0x73,0x37,0xF0,0x6D,0x4F,0x04,0xF6,0xF5,0xAF,
+-	 0xBF,0xAB,0x8E,0xCE,0x75,0x53,0x4D,0x7F,0x7D,0x17,0x78,0x0E,
+-	 0x12,0x46,0x4A,0xAF,0x95,0x99,0xEF,0xBC,0xA6,0xC5,0x41,0x77,
+-	 0x43,0x7A,0xB9,0xEC,0x8E,0x07,0x3C,0x6D,
++      static unsigned char dh2048_p[] = {
++	 0x00,0xdc,0x21,0x64,0x56,0xbd,0x9c,0xb2,0xac,0xbe,0xc9,0x98,0xef,0x95,0x3e,
++	 0x26,0xfa,0xb5,0x57,0xbc,0xd9,0xe6,0x75,0xc0,0x43,0xa2,0x1c,0x7a,0x85,0xdf,
++	 0x34,0xab,0x57,0xa8,0xf6,0xbc,0xf6,0x84,0x7d,0x05,0x69,0x04,0x83,0x4c,0xd5,
++	 0x56,0xd3,0x85,0x09,0x0a,0x08,0xff,0xb5,0x37,0xa1,0xa3,0x8a,0x37,0x04,0x46,
++	 0xd2,0x93,0x31,0x96,0xf4,0xe4,0x0d,0x9f,0xbd,0x3e,0x7f,0x9e,0x4d,0xaf,0x08,
++	 0xe2,0xe8,0x03,0x94,0x73,0xc4,0xdc,0x06,0x87,0xbb,0x6d,0xae,0x66,0x2d,0x18,
++	 0x1f,0xd8,0x47,0x06,0x5c,0xcf,0x8a,0xb5,0x00,0x51,0x57,0x9b,0xea,0x1e,0xd8,
++	 0xdb,0x8e,0x3c,0x1f,0xd3,0x2f,0xba,0x1f,0x5f,0x3d,0x15,0xc1,0x3b,0x2c,0x82,
++	 0x42,0xc8,0x8c,0x87,0x79,0x5b,0x38,0x86,0x3a,0xeb,0xfd,0x81,0xa9,0xba,0xf7,
++	 0x26,0x5b,0x93,0xc5,0x3e,0x03,0x30,0x4b,0x00,0x5c,0xb6,0x23,0x3e,0xea,0x94,
++	 0xc3,0xb4,0x71,0xc7,0x6e,0x64,0x3b,0xf8,0x92,0x65,0xad,0x60,0x6c,0xd4,0x7b,
++	 0xa9,0x67,0x26,0x04,0xa8,0x0a,0xb2,0x06,0xeb,0xe0,0x7d,0x90,0xdd,0xdd,0xf5,
++	 0xcf,0xb4,0x11,0x7c,0xab,0xc1,0xa3,0x84,0xbe,0x27,0x77,0xc7,0xde,0x20,0x57,
++	 0x66,0x47,0xa7,0x35,0xfe,0x0d,0x6a,0x1c,0x52,0xb8,0x58,0xbf,0x26,0x33,0x81,
++	 0x5e,0xb7,0xa9,0xc0,0xee,0x58,0x11,0x74,0x86,0x19,0x08,0x89,0x1c,0x37,0x0d,
++	 0x52,0x47,0x70,0x75,0x8b,0xa8,0x8b,0x30,0x11,0x71,0x36,0x62,0xf0,0x73,0x41,
++	 0xee,0x34,0x9d,0x0a,0x2b,0x67,0x4e,0x6a,0xa3,0xe2,0x99,0x92,0x1b,0xf5,0x32,
++	 0x73,0x63
+       };
+-      static unsigned char dh1024_g[] = {
++      static unsigned char dh2048_g[] = {
+ 	 0x02,
+       };
+       DH *dh;
+@@ -938,8 +945,8 @@ int
+ 	 }
+ 	 Error("DH_new() failed");
+       } else {
+-	 dh->p = BN_bin2bn(dh1024_p, sizeof(dh1024_p), NULL);
+-	 dh->g = BN_bin2bn(dh1024_g, sizeof(dh1024_g), NULL);
++	 dh->p = BN_bin2bn(dh2048_p, sizeof(dh2048_p), NULL);
++	 dh->g = BN_bin2bn(dh2048_g, sizeof(dh2048_g), NULL);
+ 	 if ((dh->p == NULL) || (dh->g == NULL)) {
+ 	    while (err = ERR_get_error()) {
+ 	       Warn1("BN_bin2bn(): %s",
diff --git a/yocto-poky/meta/recipes-connectivity/socat/socat_1.7.3.0.bb b/yocto-poky/meta/recipes-connectivity/socat/socat_1.7.3.0.bb
index b58e0a7..6d76d0f 100644
--- a/yocto-poky/meta/recipes-connectivity/socat/socat_1.7.3.0.bb
+++ b/yocto-poky/meta/recipes-connectivity/socat/socat_1.7.3.0.bb
@@ -14,6 +14,7 @@
 
 SRC_URI = "http://www.dest-unreach.org/socat/download/socat-${PV}.tar.bz2 \
            file://Makefile.in-fix-for-parallel-build.patch \
+           file://CVE-2016-2217.patch \
 "
 
 SRC_URI[md5sum] = "b607edb65bc6c57f4a43f06247504274"
diff --git a/yocto-poky/meta/recipes-core/busybox/busybox.inc b/yocto-poky/meta/recipes-core/busybox/busybox.inc
index ed8f9fe..4d4709a 100644
--- a/yocto-poky/meta/recipes-core/busybox/busybox.inc
+++ b/yocto-poky/meta/recipes-core/busybox/busybox.inc
@@ -103,9 +103,8 @@
 }
 
 do_prepare_config () {
-	sed -e 's#@DATADIR@#${datadir}#g' \
+	sed -e '/CONFIG_STATIC/d' \
 		< ${WORKDIR}/defconfig > ${S}/.config
-	sed -i -e '/CONFIG_STATIC/d' .config
 	echo "# CONFIG_STATIC is not set" >> .config
 	for i in 'CROSS' 'DISTRO FEATURES'; do echo "### $i"; done >> \
 		${S}/.config
diff --git a/yocto-poky/meta/recipes-core/busybox/busybox/0001-Switch-to-POSIX-utmpx-API.patch b/yocto-poky/meta/recipes-core/busybox/busybox/0001-Switch-to-POSIX-utmpx-API.patch
new file mode 100644
index 0000000..1d299ee
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/busybox/busybox/0001-Switch-to-POSIX-utmpx-API.patch
@@ -0,0 +1,388 @@
+From 86a7f18f211af1abda5c855d2674b0fcb53de524 Mon Sep 17 00:00:00 2001
+From: Bernhard Reutner-Fischer <rep.dot.nop@gmail.com>
+Date: Thu, 2 Apr 2015 23:03:46 +0200
+Subject: [PATCH] *: Switch to POSIX utmpx API
+
+UTMP is SVID legacy, UTMPX is mandated by POSIX.
+
+Glibc and uClibc have identical layout of UTMP and UTMPX, both of these
+libc treat _PATH_UTMPX as _PATH_UTMP so from a user-perspective nothing
+changes except the names of the API entrypoints.
+
+Signed-off-by: Bernhard Reutner-Fischer <rep.dot.nop@gmail.com>
+---
+Upstream-Status: Backport
+
+ coreutils/who.c        |  8 ++++----
+ include/libbb.h        |  2 +-
+ init/halt.c            |  4 ++--
+ libbb/utmp.c           | 44 ++++++++++++++++++++++----------------------
+ miscutils/last.c       |  8 ++++----
+ miscutils/last_fancy.c | 16 ++++++++++------
+ miscutils/runlevel.c   | 12 ++++++------
+ miscutils/wall.c       |  8 ++++----
+ procps/uptime.c        |  6 +++---
+ 9 files changed, 56 insertions(+), 52 deletions(-)
+
+diff --git a/coreutils/who.c b/coreutils/who.c
+index f955ce6..8337212 100644
+--- a/coreutils/who.c
++++ b/coreutils/who.c
+@@ -73,7 +73,7 @@ static void idle_string(char *str6, time_t t)
+ int who_main(int argc, char **argv) MAIN_EXTERNALLY_VISIBLE;
+ int who_main(int argc UNUSED_PARAM, char **argv)
+ {
+-	struct utmp *ut;
++	struct utmpx *ut;
+ 	unsigned opt;
+ 	int do_users = (ENABLE_USERS && (!ENABLE_WHO || applet_name[0] == 'u'));
+ 	const char *fmt = "%s";
+@@ -83,8 +83,8 @@ int who_main(int argc UNUSED_PARAM, char **argv)
+ 	if (opt & 2) // -H
+ 		printf("USER\t\tTTY\t\tIDLE\tTIME\t\t HOST\n");
+ 
+-	setutent();
+-	while ((ut = getutent()) != NULL) {
++	setutxent();
++	while ((ut = getutxent()) != NULL) {
+ 		if (ut->ut_user[0]
+ 		 && ((opt & 1) || ut->ut_type == USER_PROCESS)
+ 		) {
+@@ -126,6 +126,6 @@ int who_main(int argc UNUSED_PARAM, char **argv)
+ 	if (do_users)
+ 		bb_putchar('\n');
+ 	if (ENABLE_FEATURE_CLEAN_UP)
+-		endutent();
++		endutxent();
+ 	return EXIT_SUCCESS;
+ }
+diff --git a/include/libbb.h b/include/libbb.h
+index 26b6868..0f8363b 100644
+--- a/include/libbb.h
++++ b/include/libbb.h
+@@ -84,7 +84,7 @@
+ # include <selinux/av_permissions.h>
+ #endif
+ #if ENABLE_FEATURE_UTMP
+-# include <utmp.h>
++# include <utmpx.h>
+ #endif
+ #if ENABLE_LOCALE_SUPPORT
+ # include <locale.h>
+diff --git a/init/halt.c b/init/halt.c
+index 7974adb..ad12d91 100644
+--- a/init/halt.c
++++ b/init/halt.c
+@@ -74,7 +74,7 @@
+ 
+ static void write_wtmp(void)
+ {
+-	struct utmp utmp;
++	struct utmpx utmp;
+ 	struct utsname uts;
+ 	/* "man utmp" says wtmp file should *not* be created automagically */
+ 	/*if (access(bb_path_wtmp_file, R_OK|W_OK) == -1) {
+@@ -88,7 +88,7 @@ static void write_wtmp(void)
+ 	utmp.ut_line[0] = '~'; utmp.ut_line[1] = '~'; /* = strcpy(utmp.ut_line, "~~"); */
+ 	uname(&uts);
+ 	safe_strncpy(utmp.ut_host, uts.release, sizeof(utmp.ut_host));
+-	updwtmp(bb_path_wtmp_file, &utmp);
++	updwtmpx(bb_path_wtmp_file, &utmp);
+ }
+ #else
+ #define write_wtmp() ((void)0)
+diff --git a/libbb/utmp.c b/libbb/utmp.c
+index 8ad9ba2..bd07670 100644
+--- a/libbb/utmp.c
++++ b/libbb/utmp.c
+@@ -16,7 +16,7 @@ static void touch(const char *filename)
+ 
+ void FAST_FUNC write_new_utmp(pid_t pid, int new_type, const char *tty_name, const char *username, const char *hostname)
+ {
+-	struct utmp utent;
++	struct utmpx utent;
+ 	char *id;
+ 	unsigned width;
+ 
+@@ -45,17 +45,17 @@ void FAST_FUNC write_new_utmp(pid_t pid, int new_type, const char *tty_name, con
+ 		tty_name += 3;
+ 	strncpy(id, tty_name, width);
+ 
+-	touch(_PATH_UTMP);
+-	//utmpname(_PATH_UTMP);
+-	setutent();
++	touch(_PATH_UTMPX);
++	//utmpxname(_PATH_UTMPX);
++	setutxent();
+ 	/* Append new one (hopefully, unless we collide on ut_id) */
+-	pututline(&utent);
+-	endutent();
++	pututxline(&utent);
++	endutxent();
+ 
+ #if ENABLE_FEATURE_WTMP
+ 	/* "man utmp" says wtmp file should *not* be created automagically */
+ 	/*touch(bb_path_wtmp_file);*/
+-	updwtmp(bb_path_wtmp_file, &utent);
++	updwtmpx(bb_path_wtmp_file, &utent);
+ #endif
+ }
+ 
+@@ -64,17 +64,17 @@ void FAST_FUNC write_new_utmp(pid_t pid, int new_type, const char *tty_name, con
+  */
+ void FAST_FUNC update_utmp(pid_t pid, int new_type, const char *tty_name, const char *username, const char *hostname)
+ {
+-	struct utmp utent;
+-	struct utmp *utp;
++	struct utmpx utent;
++	struct utmpx *utp;
+ 
+-	touch(_PATH_UTMP);
+-	//utmpname(_PATH_UTMP);
+-	setutent();
++	touch(_PATH_UTMPX);
++	//utmpxname(_PATH_UTMPX);
++	setutxent();
+ 
+ 	/* Did init/getty/telnetd/sshd/... create an entry for us?
+ 	 * It should be (new_type-1), but we'd also reuse
+ 	 * any other potentially stale xxx_PROCESS entry */
+-	while ((utp = getutent()) != NULL) {
++	while ((utp = getutxent()) != NULL) {
+ 		if (utp->ut_pid == pid
+ 		// && ut->ut_line[0]
+ 		 && utp->ut_id[0] /* must have nonzero id */
+@@ -88,25 +88,25 @@ void FAST_FUNC update_utmp(pid_t pid, int new_type, const char *tty_name, const
+ 				/* Stale record. Nuke hostname */
+ 				memset(utp->ut_host, 0, sizeof(utp->ut_host));
+ 			}
+-			/* NB: pututline (see later) searches for matching utent
+-			 * using getutid(utent) - we must not change ut_id
++			/* NB: pututxline (see later) searches for matching utxent
++			 * using getutxid(utent) - we must not change ut_id
+ 			 * if we want *exactly this* record to be overwritten!
+ 			 */
+ 			break;
+ 		}
+ 	}
+-	//endutent(); - no need, pututline can deal with (and actually likes)
++	//endutxent(); - no need, pututxline can deal with (and actually likes)
+ 	//the situation when utmp file is positioned on found record
+ 
+ 	if (!utp) {
+ 		if (new_type != DEAD_PROCESS)
+ 			write_new_utmp(pid, new_type, tty_name, username, hostname);
+ 		else
+-			endutent();
++			endutxent();
+ 		return;
+ 	}
+ 
+-	/* Make a copy. We can't use *utp, pututline's internal getutid
++	/* Make a copy. We can't use *utp, pututxline's internal getutxid
+ 	 * will overwrite it before it is used! */
+ 	utent = *utp;
+ 
+@@ -120,14 +120,14 @@ void FAST_FUNC update_utmp(pid_t pid, int new_type, const char *tty_name, const
+ 	utent.ut_tv.tv_sec = time(NULL);
+ 
+ 	/* Update, or append new one */
+-	//setutent();
+-	pututline(&utent);
+-	endutent();
++	//setutxent();
++	pututxline(&utent);
++	endutxent();
+ 
+ #if ENABLE_FEATURE_WTMP
+ 	/* "man utmp" says wtmp file should *not* be created automagically */
+ 	/*touch(bb_path_wtmp_file);*/
+-	updwtmp(bb_path_wtmp_file, &utent);
++	updwtmpx(bb_path_wtmp_file, &utent);
+ #endif
+ }
+ 
+diff --git a/miscutils/last.c b/miscutils/last.c
+index a144c7e..6d8b584 100644
+--- a/miscutils/last.c
++++ b/miscutils/last.c
+@@ -32,21 +32,21 @@
+ 
+ #if defined UT_LINESIZE \
+ 	&& ((UT_LINESIZE != 32) || (UT_NAMESIZE != 32) || (UT_HOSTSIZE != 256))
+-#error struct utmp member char[] size(s) have changed!
++#error struct utmpx member char[] size(s) have changed!
+ #elif defined __UT_LINESIZE \
+ 	&& ((__UT_LINESIZE != 32) || (__UT_NAMESIZE != 64) || (__UT_HOSTSIZE != 256))
+-#error struct utmp member char[] size(s) have changed!
++#error struct utmpx member char[] size(s) have changed!
+ #endif
+ 
+ #if EMPTY != 0 || RUN_LVL != 1 || BOOT_TIME != 2 || NEW_TIME != 3 || \
+ 	OLD_TIME != 4
+-#error Values for the ut_type field of struct utmp changed
++#error Values for the ut_type field of struct utmpx changed
+ #endif
+ 
+ int last_main(int argc, char **argv) MAIN_EXTERNALLY_VISIBLE;
+ int last_main(int argc UNUSED_PARAM, char **argv UNUSED_PARAM)
+ {
+-	struct utmp ut;
++	struct utmpx ut;
+ 	int n, file = STDIN_FILENO;
+ 	time_t t_tmp;
+ 	off_t pos;
+diff --git a/miscutils/last_fancy.c b/miscutils/last_fancy.c
+index 16ed9e9..8194e31 100644
+--- a/miscutils/last_fancy.c
++++ b/miscutils/last_fancy.c
+@@ -22,6 +22,10 @@
+ #define HEADER_LINE_WIDE  "USER", "TTY", \
+ 	INET6_ADDRSTRLEN, INET6_ADDRSTRLEN, "HOST", "LOGIN", "  TIME", ""
+ 
++#if !defined __UT_LINESIZE && defined UT_LINESIZE
++# define __UT_LINESIZE UT_LINESIZE
++#endif
++
+ enum {
+ 	NORMAL,
+ 	LOGGED,
+@@ -39,7 +43,7 @@ enum {
+ 
+ #define show_wide (option_mask32 & LAST_OPT_W)
+ 
+-static void show_entry(struct utmp *ut, int state, time_t dur_secs)
++static void show_entry(struct utmpx *ut, int state, time_t dur_secs)
+ {
+ 	unsigned days, hours, mins;
+ 	char duration[sizeof("(%u+02:02)") + sizeof(int)*3];
+@@ -104,7 +108,7 @@ static void show_entry(struct utmp *ut, int state, time_t dur_secs)
+ 		duration_str);
+ }
+ 
+-static int get_ut_type(struct utmp *ut)
++static int get_ut_type(struct utmpx *ut)
+ {
+ 	if (ut->ut_line[0] == '~') {
+ 		if (strcmp(ut->ut_user, "shutdown") == 0) {
+@@ -142,7 +146,7 @@ static int get_ut_type(struct utmp *ut)
+ 	return ut->ut_type;
+ }
+ 
+-static int is_runlevel_shutdown(struct utmp *ut)
++static int is_runlevel_shutdown(struct utmpx *ut)
+ {
+ 	if (((ut->ut_pid & 255) == '0') || ((ut->ut_pid & 255) == '6')) {
+ 		return 1;
+@@ -154,7 +158,7 @@ static int is_runlevel_shutdown(struct utmp *ut)
+ int last_main(int argc, char **argv) MAIN_EXTERNALLY_VISIBLE;
+ int last_main(int argc UNUSED_PARAM, char **argv)
+ {
+-	struct utmp ut;
++	struct utmpx ut;
+ 	const char *filename = _PATH_WTMP;
+ 	llist_t *zlist;
+ 	off_t pos;
+@@ -242,9 +246,9 @@ int last_main(int argc UNUSED_PARAM, char **argv)
+ 			{
+ 				llist_t *el, *next;
+ 				for (el = zlist; el; el = next) {
+-					struct utmp *up = (struct utmp *)el->data;
++					struct utmpx *up = (struct utmpx *)el->data;
+ 					next = el->link;
+-					if (strncmp(up->ut_line, ut.ut_line, UT_LINESIZE) == 0) {
++					if (strncmp(up->ut_line, ut.ut_line, __UT_LINESIZE) == 0) {
+ 						if (show) {
+ 							show_entry(&ut, NORMAL, up->ut_tv.tv_sec);
+ 							show = 0;
+diff --git a/miscutils/runlevel.c b/miscutils/runlevel.c
+index 76231df..8558db8 100644
+--- a/miscutils/runlevel.c
++++ b/miscutils/runlevel.c
+@@ -29,19 +29,19 @@
+ int runlevel_main(int argc, char **argv) MAIN_EXTERNALLY_VISIBLE;
+ int runlevel_main(int argc UNUSED_PARAM, char **argv)
+ {
+-	struct utmp *ut;
++	struct utmpx *ut;
+ 	char prev;
+ 
+-	if (argv[1]) utmpname(argv[1]);
++	if (argv[1]) utmpxname(argv[1]);
+ 
+-	setutent();
+-	while ((ut = getutent()) != NULL) {
++	setutxent();
++	while ((ut = getutxent()) != NULL) {
+ 		if (ut->ut_type == RUN_LVL) {
+ 			prev = ut->ut_pid / 256;
+ 			if (prev == 0) prev = 'N';
+ 			printf("%c %c\n", prev, ut->ut_pid % 256);
+ 			if (ENABLE_FEATURE_CLEAN_UP)
+-				endutent();
++				endutxent();
+ 			return 0;
+ 		}
+ 	}
+@@ -49,6 +49,6 @@ int runlevel_main(int argc UNUSED_PARAM, char **argv)
+ 	puts("unknown");
+ 
+ 	if (ENABLE_FEATURE_CLEAN_UP)
+-		endutent();
++		endutxent();
+ 	return 1;
+ }
+diff --git a/miscutils/wall.c b/miscutils/wall.c
+index bb709ee..50658f4 100644
+--- a/miscutils/wall.c
++++ b/miscutils/wall.c
+@@ -32,7 +32,7 @@
+ int wall_main(int argc, char **argv) MAIN_EXTERNALLY_VISIBLE;
+ int wall_main(int argc UNUSED_PARAM, char **argv)
+ {
+-	struct utmp *ut;
++	struct utmpx *ut;
+ 	char *msg;
+ 	int fd;
+ 
+@@ -46,8 +46,8 @@ int wall_main(int argc UNUSED_PARAM, char **argv)
+ 	msg = xmalloc_read(fd, NULL);
+ 	if (ENABLE_FEATURE_CLEAN_UP && argv[1])
+ 		close(fd);
+-	setutent();
+-	while ((ut = getutent()) != NULL) {
++	setutxent();
++	while ((ut = getutxent()) != NULL) {
+ 		char *line;
+ 		if (ut->ut_type != USER_PROCESS)
+ 			continue;
+@@ -56,7 +56,7 @@ int wall_main(int argc UNUSED_PARAM, char **argv)
+ 		free(line);
+ 	}
+ 	if (ENABLE_FEATURE_CLEAN_UP) {
+-		endutent();
++		endutxent();
+ 		free(msg);
+ 	}
+ 	return EXIT_SUCCESS;
+diff --git a/procps/uptime.c b/procps/uptime.c
+index 778812a..149bae6 100644
+--- a/procps/uptime.c
++++ b/procps/uptime.c
+@@ -81,10 +81,10 @@ int uptime_main(int argc UNUSED_PARAM, char **argv UNUSED_PARAM)
+ 
+ #if ENABLE_FEATURE_UPTIME_UTMP_SUPPORT
+ 	{
+-		struct utmp *ut;
++		struct utmpx *ut;
+ 		unsigned users = 0;
+-		while ((ut = getutent()) != NULL) {
+-			if ((ut->ut_type == USER_PROCESS) && (ut->ut_name[0] != '\0'))
++		while ((ut = getutxent()) != NULL) {
++			if ((ut->ut_type == USER_PROCESS) && (ut->ut_user[0] != '\0'))
+ 				users++;
+ 		}
+ 		printf(",  %u users", users);
+-- 
+2.5.1
+
diff --git a/yocto-poky/meta/recipes-core/busybox/busybox/0001-Use-CC-when-linking-instead-of-LD-and-use-CFLAGS-and.patch b/yocto-poky/meta/recipes-core/busybox/busybox/0001-Use-CC-when-linking-instead-of-LD-and-use-CFLAGS-and.patch
new file mode 100644
index 0000000..2bf2b91
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/busybox/busybox/0001-Use-CC-when-linking-instead-of-LD-and-use-CFLAGS-and.patch
@@ -0,0 +1,114 @@
+From a9333eb6a7b8dbda735947cd5bc981ff9352a2c9 Mon Sep 17 00:00:00 2001
+From: Nathan Phillip Brink <ohnobinki@ohnopublishing.net>
+Date: Thu, 10 Mar 2011 00:27:08 -0500
+Subject: [PATCH 1/2] Use $(CC) when linking instead of $(LD) and use $(CFLAGS)
+ and $(EXTRA_CFLAGS) when linking.
+
+This fixes the issue where LDFLAGS escaped with -Wl are ignored during
+compilation. It also simplifies using CFLAGS or EXTRA_CFLAGS (such as
+-m32 on x86_64 or -flto) which apply to both compilation and linking
+situations.
+
+Signed-off-by: Nathan Phillip Brink <ohnobinki@ohnopublishing.net>
+---
+Upstream-Status: Pending
+
+ Makefile               |  7 ++++---
+ scripts/Makefile.build |  8 ++++----
+ scripts/Makefile.lib   | 13 +++----------
+ 3 files changed, 11 insertions(+), 17 deletions(-)
+
+Index: busybox-1.23.2/Makefile
+===================================================================
+--- busybox-1.23.2.orig/Makefile
++++ busybox-1.23.2/Makefile
+@@ -309,7 +309,8 @@ CHECKFLAGS     := -D__linux__ -Dlinux -D
+ MODFLAGS	= -DMODULE
+ CFLAGS_MODULE   = $(MODFLAGS)
+ AFLAGS_MODULE   = $(MODFLAGS)
+-LDFLAGS_MODULE  = -r
++LDFLAGS_RELOCATABLE = -r -nostdlib
++LDFLAGS_MODULE  = $(LDFLAGS_RELOCATABLE)
+ CFLAGS_KERNEL	=
+ AFLAGS_KERNEL	=
+ 
+@@ -331,7 +332,7 @@ KERNELVERSION = $(VERSION).$(PATCHLEVEL)
+ export	VERSION PATCHLEVEL SUBLEVEL KERNELRELEASE KERNELVERSION \
+ 	ARCH CONFIG_SHELL HOSTCC HOSTCFLAGS CROSS_COMPILE AS LD CC \
+ 	CPP AR NM STRIP OBJCOPY OBJDUMP MAKE AWK GENKSYMS PERL UTS_MACHINE \
+-	HOSTCXX HOSTCXXFLAGS LDFLAGS_MODULE CHECK CHECKFLAGS
++	HOSTCXX HOSTCXXFLAGS LDFLAGS_RELOCATABLE LDFLAGS_MODULE CHECK CHECKFLAGS
+ 
+ export CPPFLAGS NOSTDINC_FLAGS LINUXINCLUDE OBJCOPYFLAGS LDFLAGS
+ export CFLAGS CFLAGS_KERNEL CFLAGS_MODULE
+@@ -610,7 +611,7 @@ quiet_cmd_busybox__ ?= LINK    $@
+       cmd_busybox__ ?= $(srctree)/scripts/trylink \
+       "$@" \
+       "$(CC)" \
+-      "$(CFLAGS) $(CFLAGS_busybox)" \
++      "$(CFLAGS) $(CFLAGS_busybox) $(EXTRA_CFLAGS)" \
+       "$(LDFLAGS) $(EXTRA_LDFLAGS)" \
+       "$(core-y)" \
+       "$(libs-y)" \
+Index: busybox-1.23.2/scripts/Makefile.build
+===================================================================
+--- busybox-1.23.2.orig/scripts/Makefile.build
++++ busybox-1.23.2/scripts/Makefile.build
+@@ -174,7 +174,7 @@ cmd_modversions =							\
+ 		| $(GENKSYMS) -a $(ARCH)				\
+ 		> $(@D)/.tmp_$(@F:.o=.ver);				\
+ 									\
+-		$(LD) $(LDFLAGS) -r -o $@ $(@D)/.tmp_$(@F) 		\
++               $(CC) $(ld_flags_partial) $(LDFLAGS_RELOCATABLE) -o $@ $(@D)/.tmp_$(@F)        \
+ 			-T $(@D)/.tmp_$(@F:.o=.ver);			\
+ 		rm -f $(@D)/.tmp_$(@F) $(@D)/.tmp_$(@F:.o=.ver);	\
+ 	else								\
+@@ -257,7 +257,7 @@ quiet_cmd_link_o_target = LD      $@
+ # If the list of objects to link is empty, just create an empty built-in.o
+ # -nostdlib is added to make "make LD=gcc ..." work (some people use that)
+ cmd_link_o_target = $(if $(strip $(obj-y)),\
+-		$(LD) -nostdlib $(ld_flags) -r -o $@ $(filter $(obj-y), $^),\
++		$(CC) $(ld_flags_partial) $(LDFLAGS_RELOCATABLE) -o $@ $(filter $(obj-y), $^),\
+ 		rm -f $@; $(AR) rcs $@)
+ 
+ $(builtin-target): $(obj-y) FORCE
+@@ -292,10 +292,10 @@ $($(subst $(obj)/,,$(@:.o=-objs)))    \
+ $($(subst $(obj)/,,$(@:.o=-y)))), $^)
+ 
+ quiet_cmd_link_multi-y = LD      $@
+-cmd_link_multi-y = $(LD) $(ld_flags) -r -o $@ $(link_multi_deps)
++cmd_link_multi-y = $(CC) $(ld_flags_partial) $(LDFLAGS_RELOCATABLE) -o $@ $(link_multi_deps)
+ 
+ quiet_cmd_link_multi-m = LD [M]  $@
+-cmd_link_multi-m = $(LD) $(ld_flags) $(LDFLAGS_MODULE) -o $@ $(link_multi_deps)
++cmd_link_multi-m = $(CC) $(ld_flags) $(LDFLAGS_MODULE) -o $@ $(link_multi_deps)
+ 
+ # We would rather have a list of rules like
+ # 	foo.o: $(foo-objs)
+Index: busybox-1.23.2/scripts/Makefile.lib
+===================================================================
+--- busybox-1.23.2.orig/scripts/Makefile.lib
++++ busybox-1.23.2/scripts/Makefile.lib
+@@ -121,7 +121,8 @@ cpp_flags      = -Wp,-MD,$(depfile) $(NO
+ # yet ld_flags is fed to ld.
+ #ld_flags       = $(LDFLAGS) $(EXTRA_LDFLAGS)
+ # Remove the -Wl, prefix from linker options normally passed through gcc
+-ld_flags       = $(filter-out -Wl$(comma)%,$(LDFLAGS) $(EXTRA_LDFLAGS))
++ld_flags       = $(filter-out -Wl$(comma)%,$(LDFLAGS) $(EXTRA_LDFLAGS) $(CFLAGS) $(EXTRA_CFLAGS))
++ld_flags_partial = $($(filter-out -shared%, $(filter-out -pie%,$(ld_flags))))
+ 
+ 
+ # Finds the multi-part object the current object will be linked into
+@@ -151,10 +152,8 @@ $(obj)/%:: $(src)/%_shipped
+ # Linking
+ # ---------------------------------------------------------------------------
+ 
+-# TODO: LDFLAGS usually is supposed to contain gcc's flags, not ld's.
+-# but here we feed them to ld!
+-quiet_cmd_ld = LD      $@
+-cmd_ld = $(LD) $(LDFLAGS) $(EXTRA_LDFLAGS) $(LDFLAGS_$(@F)) \
++quiet_cmd_ld = CC    $@
++cmd_ld = $(CC) $(ld_flags) $(LDFLAGS_$(@F)) \
+ 	       $(filter-out FORCE,$^) -o $@
+ 
+ # Objcopy
diff --git a/yocto-poky/meta/recipes-core/busybox/busybox/0001-randconfig-fix.patch b/yocto-poky/meta/recipes-core/busybox/busybox/0001-randconfig-fix.patch
new file mode 100644
index 0000000..415ec34
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/busybox/busybox/0001-randconfig-fix.patch
@@ -0,0 +1,33 @@
+If CONFIG_FEATURE_LAST_SMALL is enabled the build fails because of a broken
+__UT_NAMESIZE test.
+
+Upstream-Status: Backport
+Signed-off-by: Ross Burton <ross.burton@intel.com>
+
+From 932302666b0354ede63504d1bef8393cab28db8b Mon Sep 17 00:00:00 2001
+From: Denys Vlasenko <vda.linux@googlemail.com>
+Date: Sun, 11 Oct 2015 16:58:18 +0200
+Subject: [PATCH] randconfig fix
+
+Signed-off-by: Denys Vlasenko <vda.linux@googlemail.com>
+---
+ miscutils/last.c | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+
+diff --git a/miscutils/last.c b/miscutils/last.c
+index 6d8b584..f8f3437 100644
+--- a/miscutils/last.c
++++ b/miscutils/last.c
+@@ -34,7 +34,8 @@
+ 	&& ((UT_LINESIZE != 32) || (UT_NAMESIZE != 32) || (UT_HOSTSIZE != 256))
+ #error struct utmpx member char[] size(s) have changed!
+ #elif defined __UT_LINESIZE \
+-	&& ((__UT_LINESIZE != 32) || (__UT_NAMESIZE != 64) || (__UT_HOSTSIZE != 256))
++	&& ((__UT_LINESIZE != 32) || (__UT_NAMESIZE != 32) || (__UT_HOSTSIZE != 256))
++/* __UT_NAMESIZE was checked with 64 above, but glibc-2.11 definitely uses 32! */
+ #error struct utmpx member char[] size(s) have changed!
+ #endif
+ 
+-- 
+2.6.4
+
diff --git a/yocto-poky/meta/recipes-core/busybox/busybox/0002-Passthrough-r-to-linker.patch b/yocto-poky/meta/recipes-core/busybox/busybox/0002-Passthrough-r-to-linker.patch
new file mode 100644
index 0000000..de286fb
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/busybox/busybox/0002-Passthrough-r-to-linker.patch
@@ -0,0 +1,32 @@
+From df2cc76cdebc4773361477f3db203790f6986e3b Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sat, 22 Aug 2015 23:42:40 -0700
+Subject: [PATCH 2/2] Passthrough -r to linker
+
+clang does not have -r switch and it does not pass it down to linker
+either, LDFLAGS_RELOCATABLE is used when CC is used for LD, so this
+should not cause side effects
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+Upstream-Status: Pending
+
+ Makefile | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/Makefile b/Makefile
+index 9da02cb..10dd4a9 100644
+--- a/Makefile
++++ b/Makefile
+@@ -309,7 +309,7 @@ CHECKFLAGS     := -D__linux__ -Dlinux -D__STDC__ -Dunix -D__unix__ -Wbitwise $(C
+ MODFLAGS	= -DMODULE
+ CFLAGS_MODULE   = $(MODFLAGS)
+ AFLAGS_MODULE   = $(MODFLAGS)
+-LDFLAGS_RELOCATABLE = -r -nostdlib
++LDFLAGS_RELOCATABLE = -Xlinker -r -nostdlib
+ LDFLAGS_MODULE  = $(LDFLAGS_RELOCATABLE)
+ CFLAGS_KERNEL	=
+ AFLAGS_KERNEL	=
+-- 
+2.1.4
+
diff --git a/yocto-poky/meta/recipes-core/busybox/busybox_1.23.2.bb b/yocto-poky/meta/recipes-core/busybox/busybox_1.23.2.bb
index e4d9f97..7258df0 100644
--- a/yocto-poky/meta/recipes-core/busybox/busybox_1.23.2.bb
+++ b/yocto-poky/meta/recipes-core/busybox/busybox_1.23.2.bb
@@ -30,8 +30,12 @@
            file://login-utilities.cfg \
            file://recognize_connmand.patch \
            file://busybox-cross-menuconfig.patch \
+           file://0001-Switch-to-POSIX-utmpx-API.patch \
            file://0001-ifconfig-fix-double-free-fatal-error-in-INET_sprint.patch \
            file://0001-chown-fix-help-text.patch \
+           file://0001-Use-CC-when-linking-instead-of-LD-and-use-CFLAGS-and.patch \
+           file://0002-Passthrough-r-to-linker.patch \
+           file://0001-randconfig-fix.patch \
            file://mount-via-label.cfg \
            file://sha1sum.cfg \
            file://sha256sum.cfg \
diff --git a/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/loadavg.patch b/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/loadavg.patch
new file mode 100644
index 0000000..c72efd4
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/loadavg.patch
@@ -0,0 +1,18 @@
+Remove hardcoded paths so OE's configure QA does not detect it and fail the builds
+For cross compilation is less interesting to look into host paths for target libraries anyway
+
+Upstream-Status: Inappropriate [OE Specific]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
+Index: coreutils-6.9/m4/getloadavg.m4
+===================================================================
+--- coreutils-6.9.orig/m4/getloadavg.m4
++++ coreutils-6.9/m4/getloadavg.m4
+@@ -49,7 +49,6 @@ if test $gl_have_func = no; then
+   # There is a commonly available library for RS/6000 AIX.
+   # Since it is not a standard part of AIX, it might be installed locally.
+   gl_getloadavg_LIBS=$LIBS
+-  LIBS="-L/usr/local/lib $LIBS"
+   AC_CHECK_LIB(getloadavg, getloadavg,
+ 	       [LIBS="-lgetloadavg $LIBS"], [LIBS=$gl_getloadavg_LIBS])
+ fi
diff --git a/yocto-poky/meta/recipes-core/coreutils/coreutils_6.9.bb b/yocto-poky/meta/recipes-core/coreutils/coreutils_6.9.bb
index 4ff1d50..e9f82ab 100644
--- a/yocto-poky/meta/recipes-core/coreutils/coreutils_6.9.bb
+++ b/yocto-poky/meta/recipes-core/coreutils/coreutils_6.9.bb
@@ -9,6 +9,7 @@
 LIC_FILES_CHKSUM = "file://COPYING;md5=751419260aa954499f7abaabaa882bbe \
                     file://src/ls.c;beginline=4;endline=16;md5=15ed60f67b1db5fedd5dbc37cf8a9543"
 PR = "r5"
+DEPENDS = "virtual/libiconv"
 
 inherit autotools gettext texinfo
 
@@ -25,6 +26,7 @@
            file://coreutils-build-with-acl.patch \
            file://coreutils-fix-texinfo.patch \
            file://fix_for_manpage_building.patch \
+           file://loadavg.patch \
            "
 
 SRC_URI[md5sum] = "c9607d8495f16e98906e7ed2d9751a06"
diff --git a/yocto-poky/meta/recipes-core/coreutils/coreutils_8.24.bb b/yocto-poky/meta/recipes-core/coreutils/coreutils_8.24.bb
index 034ebcd..f042346 100644
--- a/yocto-poky/meta/recipes-core/coreutils/coreutils_8.24.bb
+++ b/yocto-poky/meta/recipes-core/coreutils/coreutils_8.24.bb
@@ -62,7 +62,7 @@
 	mkdir -p ${B}/src
 }
 
-do_install_append() {
+do_install_append_class-target() {
 	for i in df mktemp base64; do mv ${D}${bindir}/$i ${D}${bindir}/$i.${BPN}; done
 
 	install -d ${D}${base_bindir}
diff --git a/yocto-poky/meta/recipes-core/dropbear/dropbear/dropbear@.service b/yocto-poky/meta/recipes-core/dropbear/dropbear/dropbear@.service
index 6fe9942..b420bcd 100644
--- a/yocto-poky/meta/recipes-core/dropbear/dropbear/dropbear@.service
+++ b/yocto-poky/meta/recipes-core/dropbear/dropbear/dropbear@.service
@@ -4,8 +4,9 @@
 After=syslog.target dropbearkey.service
 
 [Service]
+Environment="DROPBEAR_RSAKEY_DIR=/etc/dropbear"
 EnvironmentFile=-/etc/default/dropbear
-ExecStart=-@SBINDIR@/dropbear -i -r /etc/dropbear/dropbear_rsa_host_key $DROPBEAR_EXTRA_ARGS
+ExecStart=-@SBINDIR@/dropbear -i -r ${DROPBEAR_RSAKEY_DIR}/dropbear_rsa_host_key $DROPBEAR_EXTRA_ARGS
 ExecReload=@BASE_BINDIR@/kill -HUP $MAINPID
 StandardInput=socket
 KillMode=process
diff --git a/yocto-poky/meta/recipes-core/dropbear/dropbear/dropbearkey.service b/yocto-poky/meta/recipes-core/dropbear/dropbear/dropbearkey.service
index ccc21d5..c49053d 100644
--- a/yocto-poky/meta/recipes-core/dropbear/dropbear/dropbearkey.service
+++ b/yocto-poky/meta/recipes-core/dropbear/dropbear/dropbearkey.service
@@ -1,8 +1,13 @@
 [Unit]
 Description=SSH Key Generation
-ConditionPathExists=|!/etc/dropbear/dropbear_rsa_host_key
+RequiresMountsFor=/var /var/lib
+ConditionPathExists=!/etc/dropbear/dropbear_rsa_host_key
+ConditionPathExists=!/var/lib/dropbear/dropbear_rsa_host_key
 
 [Service]
+Environment="DROPBEAR_RSAKEY_DIR=/etc/dropbear"
+EnvironmentFile=-/etc/default/dropbear
 Type=oneshot
-ExecStart=@SBINDIR@/dropbearkey -t rsa -f /etc/dropbear/dropbear_rsa_host_key
+ExecStart=@BASE_BINDIR@/mkdir -p ${DROPBEAR_RSAKEY_DIR}
+ExecStart=@SBINDIR@/dropbearkey -t rsa -f ${DROPBEAR_RSAKEY_DIR}/dropbear_rsa_host_key
 RemainAfterExit=yes
diff --git a/yocto-poky/meta/recipes-core/glibc/cross-localedef-native_2.22.bb b/yocto-poky/meta/recipes-core/glibc/cross-localedef-native_2.22.bb
index 2153ece..3aefe74 100644
--- a/yocto-poky/meta/recipes-core/glibc/cross-localedef-native_2.22.bb
+++ b/yocto-poky/meta/recipes-core/glibc/cross-localedef-native_2.22.bb
@@ -14,12 +14,13 @@
 
 FILESEXTRAPATHS =. "${FILE_DIRNAME}/${PN}:${FILE_DIRNAME}/glibc:"
 
-BRANCH ?= "release/${PV}/master"
+SRCBRANCH ?= "release/${PV}/master"
 GLIBC_GIT_URI ?= "git://sourceware.org/git/glibc.git"
 
-SRC_URI = "${GLIBC_GIT_URI};branch=${BRANCH};name=glibc \
+SRC_URI = "${GLIBC_GIT_URI};branch=${SRCBRANCH};name=glibc \
            git://github.com/kraj/localedef;branch=master;name=localedef;destsuffix=git/localedef \
            file://fix_for_centos_5.8.patch \
+	   file://strcoll-Remove-incorrect-STRDIFF-based-optimization-.patch \
            ${EGLIBCPATCHES} \
 "
 EGLIBCPATCHES = "\
diff --git a/yocto-poky/meta/recipes-core/glibc/glibc-locale.inc b/yocto-poky/meta/recipes-core/glibc/glibc-locale.inc
index df6d073..2352bd0 100644
--- a/yocto-poky/meta/recipes-core/glibc/glibc-locale.inc
+++ b/yocto-poky/meta/recipes-core/glibc/glibc-locale.inc
@@ -87,7 +87,7 @@
 	if [ -e ${LOCALETREESRC}/${datadir}/locale ]; then
 		cp -fpPR ${LOCALETREESRC}/${datadir}/locale ${D}${datadir}
 	fi
-	chown root.root -R ${D}
+	chown root:root -R ${D}
 	cp -fpPR ${LOCALETREESRC}/SUPPORTED ${WORKDIR}
 }
 
diff --git a/yocto-poky/meta/recipes-core/glibc/glibc/0028-Clear-ELF_RTYPE_CLASS_EXTERN_PROTECTED_DATA-for-prel.patch b/yocto-poky/meta/recipes-core/glibc/glibc/0028-Clear-ELF_RTYPE_CLASS_EXTERN_PROTECTED_DATA-for-prel.patch
new file mode 100644
index 0000000..3455df1
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/glibc/glibc/0028-Clear-ELF_RTYPE_CLASS_EXTERN_PROTECTED_DATA-for-prel.patch
@@ -0,0 +1,84 @@
+From cadaf1336332ca7bcdfe4a400776e5782a20e26d Mon Sep 17 00:00:00 2001
+From: "H.J. Lu" <hjl.tools@gmail.com>
+Date: Wed, 28 Oct 2015 07:49:44 -0700
+Subject: [PATCH] Keep only ELF_RTYPE_CLASS_{PLT|COPY} bits for prelink
+
+prelink runs ld.so with the environment variable LD_TRACE_PRELINKING
+set to dump the relocation type class from _dl_debug_bindings.  prelink
+has the following relocation type classes:
+
+ #define RTYPE_CLASS_VALID       8
+ #define RTYPE_CLASS_PLT         (8|1)
+ #define RTYPE_CLASS_COPY        (8|2)
+ #define RTYPE_CLASS_TLS         (8|4)
+
+where ELF_RTYPE_CLASS_EXTERN_PROTECTED_DATA has a conflict with
+RTYPE_CLASS_TLS.
+
+Since prelink only uses ELF_RTYPE_CLASS_PLT and ELF_RTYPE_CLASS_COPY
+bits, we should clear the other bits when the DL_DEBUG_PRELINK bit is
+set.
+
+	[BZ #19178]
+	* elf/dl-lookup.c (RTYPE_CLASS_VALID): New.
+	(RTYPE_CLASS_PLT): Likewise.
+	(RTYPE_CLASS_COPY): Likewise.
+	(RTYPE_CLASS_TLS): Likewise.
+	(_dl_debug_bindings): Use RTYPE_CLASS_TLS and RTYPE_CLASS_VALID
+	to set relocation type class for DL_DEBUG_PRELINK.  Keep only
+	ELF_RTYPE_CLASS_PLT and ELF_RTYPE_CLASS_COPY bits for
+	DL_DEBUG_PRELINK.
+
+Upstream-Status: submitted (https://sourceware.org/bugzilla/show_bug.cgi?id=19178)
+
+Signed-off-by: Mark Hatle <mark.hatle@windriver.com>
+---
+ elf/dl-lookup.c | 21 +++++++++++++++++++--
+ 1 file changed, 19 insertions(+), 2 deletions(-)
+
+diff --git a/elf/dl-lookup.c b/elf/dl-lookup.c
+index 581fb20..6ae6cc3 100644
+--- a/elf/dl-lookup.c
++++ b/elf/dl-lookup.c
+@@ -1016,6 +1016,18 @@ _dl_debug_bindings (const char *undef_name, struct link_map *undef_map,
+ #ifdef SHARED
+   if (GLRO(dl_debug_mask) & DL_DEBUG_PRELINK)
+     {
++/* ELF_RTYPE_CLASS_XXX must match RTYPE_CLASS_XXX used by prelink with
++   LD_TRACE_PRELINKING.  */
++#define RTYPE_CLASS_VALID	8
++#define RTYPE_CLASS_PLT		(8|1)
++#define RTYPE_CLASS_COPY	(8|2)
++#define RTYPE_CLASS_TLS		(8|4)
++#if ELF_RTYPE_CLASS_PLT != 0 && ELF_RTYPE_CLASS_PLT != 1
++# error ELF_RTYPE_CLASS_PLT must be 0 or 1!
++#endif
++#if ELF_RTYPE_CLASS_COPY != 0 && ELF_RTYPE_CLASS_COPY != 2
++# error ELF_RTYPE_CLASS_COPY must be 0 or 2!
++#endif
+       int conflict = 0;
+       struct sym_val val = { NULL, NULL };
+ 
+@@ -1071,12 +1083,17 @@ _dl_debug_bindings (const char *undef_name, struct link_map *undef_map,
+ 
+       if (value->s)
+ 	{
++	  /* Keep only ELF_RTYPE_CLASS_PLT and ELF_RTYPE_CLASS_COPY
++	     bits since since prelink only uses them.  */
++	  type_class &= ELF_RTYPE_CLASS_PLT | ELF_RTYPE_CLASS_COPY;
+ 	  if (__glibc_unlikely (ELFW(ST_TYPE) (value->s->st_info)
+ 				== STT_TLS))
+-	    type_class = 4;
++	    /* Clear the RTYPE_CLASS_VALID bit in RTYPE_CLASS_TLS.  */
++	    type_class = RTYPE_CLASS_TLS & ~RTYPE_CLASS_VALID;
+ 	  else if (__glibc_unlikely (ELFW(ST_TYPE) (value->s->st_info)
+ 				     == STT_GNU_IFUNC))
+-	    type_class |= 8;
++	    /* Set the RTYPE_CLASS_VALID bit.  */
++	    type_class |= RTYPE_CLASS_VALID;
+ 	}
+ 
+       if (conflict
+-- 
+1.9.3
+
diff --git a/yocto-poky/meta/recipes-core/glibc/glibc/CVE-2015-7547.patch b/yocto-poky/meta/recipes-core/glibc/glibc/CVE-2015-7547.patch
new file mode 100644
index 0000000..4e539f8
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/glibc/glibc/CVE-2015-7547.patch
@@ -0,0 +1,642 @@
+From e9db92d3acfe1822d56d11abcea5bfc4c41cf6ca Mon Sep 17 00:00:00 2001
+From: Carlos O'Donell <carlos@systemhalted.org>
+Date: Tue, 16 Feb 2016 21:26:37 -0500
+Subject: [PATCH] CVE-2015-7547: getaddrinfo() stack-based buffer overflow (Bug
+ 18665).
+
+* A stack-based buffer overflow was found in libresolv when invoked from
+  libnss_dns, allowing specially crafted DNS responses to seize control
+  of execution flow in the DNS client.  The buffer overflow occurs in
+  the functions send_dg (send datagram) and send_vc (send TCP) for the
+  NSS module libnss_dns.so.2 when calling getaddrinfo with AF_UNSPEC
+  family.  The use of AF_UNSPEC triggers the low-level resolver code to
+  send out two parallel queries for A and AAAA.  A mismanagement of the
+  buffers used for those queries could result in the response of a query
+  writing beyond the alloca allocated buffer created by
+  _nss_dns_gethostbyname4_r.  Buffer management is simplified to remove
+  the overflow.  Thanks to the Google Security Team and Red Hat for
+  reporting the security impact of this issue, and Robert Holiday of
+  Ciena for reporting the related bug 18665. (CVE-2015-7547)
+
+See also:
+https://sourceware.org/ml/libc-alpha/2016-02/msg00416.html
+https://sourceware.org/ml/libc-alpha/2016-02/msg00418.html
+
+Upstream-Status: Backport
+CVE: CVE-2015-7547
+
+https://sourceware.org/git/?p=glibc.git;a=commit;h=e9db92d3acfe1822d56d11abcea5bfc4c41cf6ca
+minor tweeking to apply to Changelog and res_send.c
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ ChangeLog                 |  17 ++-
+ NEWS                      |  14 +++
+ resolv/nss_dns/dns-host.c | 111 +++++++++++++++++++-
+ resolv/res_query.c        |   3 +
+ resolv/res_send.c         | 260 +++++++++++++++++++++++++++++++++++-----------
+ 5 files changed, 339 insertions(+), 66 deletions(-)
+
+Index: git/NEWS
+===================================================================
+--- git.orig/NEWS
++++ git/NEWS
+@@ -105,6 +105,20 @@ Security related changes:
+   depending on the length of the string passed as an argument to the
+   functions.  Reported by Joseph Myers.
+ 
++* A stack-based buffer overflow was found in libresolv when invoked from
++  libnss_dns, allowing specially crafted DNS responses to seize control
++  of execution flow in the DNS client.  The buffer overflow occurs in
++  the functions send_dg (send datagram) and send_vc (send TCP) for the
++  NSS module libnss_dns.so.2 when calling getaddrinfo with AF_UNSPEC
++  family.  The use of AF_UNSPEC triggers the low-level resolver code to
++  send out two parallel queries for A and AAAA.  A mismanagement of the
++  buffers used for those queries could result in the response of a query
++  writing beyond the alloca allocated buffer created by
++  _nss_dns_gethostbyname4_r.  Buffer management is simplified to remove
++  the overflow.  Thanks to the Google Security Team and Red Hat for
++  reporting the security impact of this issue, and Robert Holiday of
++  Ciena for reporting the related bug 18665. (CVE-2015-7547)
++
+ * The following bugs are resolved with this release:
+ 
+   6652, 10672, 12674, 12847, 12926, 13862, 14132, 14138, 14171, 14498,
+Index: git/resolv/nss_dns/dns-host.c
+===================================================================
+--- git.orig/resolv/nss_dns/dns-host.c
++++ git/resolv/nss_dns/dns-host.c
+@@ -1031,7 +1031,10 @@ gaih_getanswer_slice (const querybuf *an
+   int h_namelen = 0;
+ 
+   if (ancount == 0)
+-    return NSS_STATUS_NOTFOUND;
++    {
++      *h_errnop = HOST_NOT_FOUND;
++      return NSS_STATUS_NOTFOUND;
++    }
+ 
+   while (ancount-- > 0 && cp < end_of_message && had_error == 0)
+     {
+@@ -1208,7 +1211,14 @@ gaih_getanswer_slice (const querybuf *an
+   /* Special case here: if the resolver sent a result but it only
+      contains a CNAME while we are looking for a T_A or T_AAAA record,
+      we fail with NOTFOUND instead of TRYAGAIN.  */
+-  return canon == NULL ? NSS_STATUS_TRYAGAIN : NSS_STATUS_NOTFOUND;
++  if (canon != NULL)
++    {
++      *h_errnop = HOST_NOT_FOUND;
++      return NSS_STATUS_NOTFOUND;
++    }
++
++  *h_errnop = NETDB_INTERNAL;
++  return NSS_STATUS_TRYAGAIN;
+ }
+ 
+ 
+@@ -1222,11 +1232,101 @@ gaih_getanswer (const querybuf *answer1,
+ 
+   enum nss_status status = NSS_STATUS_NOTFOUND;
+ 
++  /* Combining the NSS status of two distinct queries requires some
++     compromise and attention to symmetry (A or AAAA queries can be
++     returned in any order).  What follows is a breakdown of how this
++     code is expected to work and why. We discuss only SUCCESS,
++     TRYAGAIN, NOTFOUND and UNAVAIL, since they are the only returns
++     that apply (though RETURN and MERGE exist).  We make a distinction
++     between TRYAGAIN (recoverable) and TRYAGAIN' (not-recoverable).
++     A recoverable TRYAGAIN is almost always due to buffer size issues
++     and returns ERANGE in errno and the caller is expected to retry
++     with a larger buffer.
++
++     Lastly, you may be tempted to make significant changes to the
++     conditions in this code to bring about symmetry between responses.
++     Please don't change anything without due consideration for
++     expected application behaviour.  Some of the synthesized responses
++     aren't very well thought out and sometimes appear to imply that
++     IPv4 responses are always answer 1, and IPv6 responses are always
++     answer 2, but that's not true (see the implementation of send_dg
++     and send_vc to see response can arrive in any order, particularly
++     for UDP). However, we expect it holds roughly enough of the time
++     that this code works, but certainly needs to be fixed to make this
++     a more robust implementation.
++
++     ----------------------------------------------
++     | Answer 1 Status /   | Synthesized | Reason |
++     | Answer 2 Status     | Status      |        |
++     |--------------------------------------------|
++     | SUCCESS/SUCCESS     | SUCCESS     | [1]    |
++     | SUCCESS/TRYAGAIN    | TRYAGAIN    | [5]    |
++     | SUCCESS/TRYAGAIN'   | SUCCESS     | [1]    |
++     | SUCCESS/NOTFOUND    | SUCCESS     | [1]    |
++     | SUCCESS/UNAVAIL     | SUCCESS     | [1]    |
++     | TRYAGAIN/SUCCESS    | TRYAGAIN    | [2]    |
++     | TRYAGAIN/TRYAGAIN   | TRYAGAIN    | [2]    |
++     | TRYAGAIN/TRYAGAIN'  | TRYAGAIN    | [2]    |
++     | TRYAGAIN/NOTFOUND   | TRYAGAIN    | [2]    |
++     | TRYAGAIN/UNAVAIL    | TRYAGAIN    | [2]    |
++     | TRYAGAIN'/SUCCESS   | SUCCESS     | [3]    |
++     | TRYAGAIN'/TRYAGAIN  | TRYAGAIN    | [3]    |
++     | TRYAGAIN'/TRYAGAIN' | TRYAGAIN'   | [3]    |
++     | TRYAGAIN'/NOTFOUND  | TRYAGAIN'   | [3]    |
++     | TRYAGAIN'/UNAVAIL   | UNAVAIL     | [3]    |
++     | NOTFOUND/SUCCESS    | SUCCESS     | [3]    |
++     | NOTFOUND/TRYAGAIN   | TRYAGAIN    | [3]    |
++     | NOTFOUND/TRYAGAIN'  | TRYAGAIN'   | [3]    |
++     | NOTFOUND/NOTFOUND   | NOTFOUND    | [3]    |
++     | NOTFOUND/UNAVAIL    | UNAVAIL     | [3]    |
++     | UNAVAIL/SUCCESS     | UNAVAIL     | [4]    |
++     | UNAVAIL/TRYAGAIN    | UNAVAIL     | [4]    |
++     | UNAVAIL/TRYAGAIN'   | UNAVAIL     | [4]    |
++     | UNAVAIL/NOTFOUND    | UNAVAIL     | [4]    |
++     | UNAVAIL/UNAVAIL     | UNAVAIL     | [4]    |
++     ----------------------------------------------
++
++     [1] If the first response is a success we return success.
++	 This ignores the state of the second answer and in fact
++	 incorrectly sets errno and h_errno to that of the second
++	 answer.  However because the response is a success we ignore
++	 *errnop and *h_errnop (though that means you touched errno on
++	 success).  We are being conservative here and returning the
++	 likely IPv4 response in the first answer as a success.
++
++     [2] If the first response is a recoverable TRYAGAIN we return
++	 that instead of looking at the second response.  The
++	 expectation here is that we have failed to get an IPv4 response
++	 and should retry both queries.
++
++     [3] If the first response was not a SUCCESS and the second
++	 response is not NOTFOUND (had a SUCCESS, need to TRYAGAIN,
++	 or failed entirely e.g. TRYAGAIN' and UNAVAIL) then use the
++	 result from the second response, otherwise the first responses
++	 status is used.  Again we have some odd side-effects when the
++	 second response is NOTFOUND because we overwrite *errnop and
++	 *h_errnop that means that a first answer of NOTFOUND might see
++	 its *errnop and *h_errnop values altered.  Whether it matters
++	 in practice that a first response NOTFOUND has the wrong
++	 *errnop and *h_errnop is undecided.
++
++     [4] If the first response is UNAVAIL we return that instead of
++	 looking at the second response.  The expectation here is that
++	 it will have failed similarly e.g. configuration failure.
++
++     [5] Testing this code is complicated by the fact that truncated
++	 second response buffers might be returned as SUCCESS if the
++	 first answer is a SUCCESS.  To fix this we add symmetry to
++	 TRYAGAIN with the second response.  If the second response
++	 is a recoverable error we now return TRYAGIN even if the first
++	 response was SUCCESS.  */
++
+   if (anslen1 > 0)
+     status = gaih_getanswer_slice(answer1, anslen1, qname,
+ 				  &pat, &buffer, &buflen,
+ 				  errnop, h_errnop, ttlp,
+ 				  &first);
++
+   if ((status == NSS_STATUS_SUCCESS || status == NSS_STATUS_NOTFOUND
+        || (status == NSS_STATUS_TRYAGAIN
+ 	   /* We want to look at the second answer in case of an
+@@ -1242,8 +1342,15 @@ gaih_getanswer (const querybuf *answer1,
+ 						     &pat, &buffer, &buflen,
+ 						     errnop, h_errnop, ttlp,
+ 						     &first);
++      /* Use the second response status in some cases.  */
+       if (status != NSS_STATUS_SUCCESS && status2 != NSS_STATUS_NOTFOUND)
+ 	status = status2;
++      /* Do not return a truncated second response (unless it was
++	 unavoidable e.g. unrecoverable TRYAGAIN).  */
++      if (status == NSS_STATUS_SUCCESS
++	  && (status2 == NSS_STATUS_TRYAGAIN
++	      && *errnop == ERANGE && *h_errnop != NO_RECOVERY))
++	status = NSS_STATUS_TRYAGAIN;
+     }
+ 
+   return status;
+Index: git/resolv/res_query.c
+===================================================================
+--- git.orig/resolv/res_query.c
++++ git/resolv/res_query.c
+@@ -396,6 +396,7 @@ __libc_res_nsearch(res_state statp,
+ 		  {
+ 		    free (*answerp2);
+ 		    *answerp2 = NULL;
++		    *nanswerp2 = 0;
+ 		    *answerp2_malloced = 0;
+ 		  }
+ 	}
+@@ -447,6 +448,7 @@ __libc_res_nsearch(res_state statp,
+ 			  {
+ 			    free (*answerp2);
+ 			    *answerp2 = NULL;
++			    *nanswerp2 = 0;
+ 			    *answerp2_malloced = 0;
+ 			  }
+ 
+@@ -521,6 +523,7 @@ __libc_res_nsearch(res_state statp,
+ 	  {
+ 	    free (*answerp2);
+ 	    *answerp2 = NULL;
++	    *nanswerp2 = 0;
+ 	    *answerp2_malloced = 0;
+ 	  }
+ 	if (saved_herrno != -1)
+Index: git/resolv/res_send.c
+===================================================================
+--- git.orig/resolv/res_send.c
++++ git/resolv/res_send.c
+@@ -1,3 +1,20 @@
++/* Copyright (C) 2016 Free Software Foundation, Inc.
++   This file is part of the GNU C Library.
++
++   The GNU C Library is free software; you can redistribute it and/or
++   modify it under the terms of the GNU Lesser General Public
++   License as published by the Free Software Foundation; either
++   version 2.1 of the License, or (at your option) any later version.
++
++   The GNU C Library is distributed in the hope that it will be useful,
++   but WITHOUT ANY WARRANTY; without even the implied warranty of
++   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
++   Lesser General Public License for more details.
++
++   You should have received a copy of the GNU Lesser General Public
++   License along with the GNU C Library; if not, see
++   <http://www.gnu.org/licenses/>.  */
++
+ /*
+  * Copyright (c) 1985, 1989, 1993
+  *    The Regents of the University of California.  All rights reserved.
+@@ -363,6 +380,8 @@ __libc_res_nsend(res_state statp, const
+ #ifdef USE_HOOKS
+ 	if (__glibc_unlikely (statp->qhook || statp->rhook))       {
+ 		if (anssiz < MAXPACKET && ansp) {
++			/* Always allocate MAXPACKET, callers expect
++			   this specific size.  */
+ 			u_char *buf = malloc (MAXPACKET);
+ 			if (buf == NULL)
+ 				return (-1);
+@@ -638,6 +657,77 @@ get_nsaddr (res_state statp, int n)
+     return (struct sockaddr *) (void *) &statp->nsaddr_list[n];
+ }
+ 
++/* The send_vc function is responsible for sending a DNS query over TCP
++   to the nameserver numbered NS from the res_state STATP i.e.
++   EXT(statp).nssocks[ns].  The function supports sending both IPv4 and
++   IPv6 queries at the same serially on the same socket.
++
++   Please note that for TCP there is no way to disable sending both
++   queries, unlike UDP, which honours RES_SNGLKUP and RES_SNGLKUPREOP
++   and sends the queries serially and waits for the result after each
++   sent query.  This implemetnation should be corrected to honour these
++   options.
++
++   Please also note that for TCP we send both queries over the same
++   socket one after another.  This technically violates best practice
++   since the server is allowed to read the first query, respond, and
++   then close the socket (to service another client).  If the server
++   does this, then the remaining second query in the socket data buffer
++   will cause the server to send the client an RST which will arrive
++   asynchronously and the client's OS will likely tear down the socket
++   receive buffer resulting in a potentially short read and lost
++   response data.  This will force the client to retry the query again,
++   and this process may repeat until all servers and connection resets
++   are exhausted and then the query will fail.  It's not known if this
++   happens with any frequency in real DNS server implementations.  This
++   implementation should be corrected to use two sockets by default for
++   parallel queries.
++
++   The query stored in BUF of BUFLEN length is sent first followed by
++   the query stored in BUF2 of BUFLEN2 length.  Queries are sent
++   serially on the same socket.
++
++   Answers to the query are stored firstly in *ANSP up to a max of
++   *ANSSIZP bytes.  If more than *ANSSIZP bytes are needed and ANSCP
++   is non-NULL (to indicate that modifying the answer buffer is allowed)
++   then malloc is used to allocate a new response buffer and ANSCP and
++   ANSP will both point to the new buffer.  If more than *ANSSIZP bytes
++   are needed but ANSCP is NULL, then as much of the response as
++   possible is read into the buffer, but the results will be truncated.
++   When truncation happens because of a small answer buffer the DNS
++   packets header field TC will bet set to 1, indicating a truncated
++   message and the rest of the socket data will be read and discarded.
++
++   Answers to the query are stored secondly in *ANSP2 up to a max of
++   *ANSSIZP2 bytes, with the actual response length stored in
++   *RESPLEN2.  If more than *ANSSIZP bytes are needed and ANSP2
++   is non-NULL (required for a second query) then malloc is used to
++   allocate a new response buffer, *ANSSIZP2 is set to the new buffer
++   size and *ANSP2_MALLOCED is set to 1.
++
++   The ANSP2_MALLOCED argument will eventually be removed as the
++   change in buffer pointer can be used to detect the buffer has
++   changed and that the caller should use free on the new buffer.
++
++   Note that the answers may arrive in any order from the server and
++   therefore the first and second answer buffers may not correspond to
++   the first and second queries.
++
++   It is not supported to call this function with a non-NULL ANSP2
++   but a NULL ANSCP.  Put another way, you can call send_vc with a
++   single unmodifiable buffer or two modifiable buffers, but no other
++   combination is supported.
++
++   It is the caller's responsibility to free the malloc allocated
++   buffers by detecting that the pointers have changed from their
++   original values i.e. *ANSCP or *ANSP2 has changed.
++
++   If errors are encountered then *TERRNO is set to an appropriate
++   errno value and a zero result is returned for a recoverable error,
++   and a less-than zero result is returned for a non-recoverable error.
++
++   If no errors are encountered then *TERRNO is left unmodified and
++   a the length of the first response in bytes is returned.  */
+ static int
+ send_vc(res_state statp,
+ 	const u_char *buf, int buflen, const u_char *buf2, int buflen2,
+@@ -647,11 +737,7 @@ send_vc(res_state statp,
+ {
+ 	const HEADER *hp = (HEADER *) buf;
+ 	const HEADER *hp2 = (HEADER *) buf2;
+-	u_char *ans = *ansp;
+-	int orig_anssizp = *anssizp;
+-	// XXX REMOVE
+-	// int anssiz = *anssizp;
+-	HEADER *anhp = (HEADER *) ans;
++	HEADER *anhp = (HEADER *) *ansp;
+ 	struct sockaddr *nsap = get_nsaddr (statp, ns);
+ 	int truncating, connreset, n;
+ 	/* On some architectures compiler might emit a warning indicating
+@@ -743,6 +829,8 @@ send_vc(res_state statp,
+ 	 * Receive length & response
+ 	 */
+ 	int recvresp1 = 0;
++	/* Skip the second response if there is no second query.
++	   To do that we mark the second response as received.  */
+ 	int recvresp2 = buf2 == NULL;
+ 	uint16_t rlen16;
+  read_len:
+@@ -779,40 +867,14 @@ send_vc(res_state statp,
+ 	u_char **thisansp;
+ 	int *thisresplenp;
+ 	if ((recvresp1 | recvresp2) == 0 || buf2 == NULL) {
++               /* We have not received any responses
++                  yet or we only have one response to
++                  receive.  */
+ 		thisanssizp = anssizp;
+ 		thisansp = anscp ?: ansp;
+ 		assert (anscp != NULL || ansp2 == NULL);
+ 		thisresplenp = &resplen;
+ 	} else {
+-		if (*anssizp != MAXPACKET) {
+-			/* No buffer allocated for the first
+-			   reply.  We can try to use the rest
+-			   of the user-provided buffer.  */
+-#if __GNUC_PREREQ (4, 7)
+-			DIAG_PUSH_NEEDS_COMMENT;
+-			DIAG_IGNORE_NEEDS_COMMENT (5, "-Wmaybe-uninitialized");
+-#endif
+-#if _STRING_ARCH_unaligned
+-			*anssizp2 = orig_anssizp - resplen;
+-			*ansp2 = *ansp + resplen;
+-#else
+-			int aligned_resplen
+-			  = ((resplen + __alignof__ (HEADER) - 1)
+-			     & ~(__alignof__ (HEADER) - 1));
+-			*anssizp2 = orig_anssizp - aligned_resplen;
+-			*ansp2 = *ansp + aligned_resplen;
+-#endif
+-#if __GNUC_PREREQ (4, 7)
+-			DIAG_POP_NEEDS_COMMENT;
+-#endif
+-		} else {
+-			/* The first reply did not fit into the
+-			   user-provided buffer.  Maybe the second
+-			   answer will.  */
+-			*anssizp2 = orig_anssizp;
+-			*ansp2 = *ansp;
+-		}
+-
+ 		thisanssizp = anssizp2;
+ 		thisansp = ansp2;
+ 		thisresplenp = resplen2;
+@@ -820,10 +882,14 @@ send_vc(res_state statp,
+ 	anhp = (HEADER *) *thisansp;
+ 
+ 	*thisresplenp = rlen;
+-	if (rlen > *thisanssizp) {
+-		/* Yes, we test ANSCP here.  If we have two buffers
+-		   both will be allocatable.  */
+-		if (__glibc_likely (anscp != NULL))       {
++	/* Is the answer buffer too small?  */
++	if (*thisanssizp < rlen) {
++		/* If the current buffer is not the the static
++		   user-supplied buffer then we can reallocate
++		   it.  */
++		if (thisansp != NULL && thisansp != ansp) {
++			/* Always allocate MAXPACKET, callers expect
++			   this specific size.  */
+ 			u_char *newp = malloc (MAXPACKET);
+ 			if (newp == NULL) {
+ 				*terrno = ENOMEM;
+@@ -835,6 +901,9 @@ send_vc(res_state statp,
+ 			if (thisansp == ansp2)
+ 			  *ansp2_malloced = 1;
+ 			anhp = (HEADER *) newp;
++			/* A uint16_t can't be larger than MAXPACKET
++			   thus it's safe to allocate MAXPACKET but
++			   read RLEN bytes instead.  */
+ 			len = rlen;
+ 		} else {
+ 			Dprint(statp->options & RES_DEBUG,
+@@ -997,6 +1066,66 @@ reopen (res_state statp, int *terrno, in
+ 	return 1;
+ }
+ 
++/* The send_dg function is responsible for sending a DNS query over UDP
++   to the nameserver numbered NS from the res_state STATP i.e.
++   EXT(statp).nssocks[ns].  The function supports IPv4 and IPv6 queries
++   along with the ability to send the query in parallel for both stacks
++   (default) or serially (RES_SINGLKUP).  It also supports serial lookup
++   with a close and reopen of the socket used to talk to the server
++   (RES_SNGLKUPREOP) to work around broken name servers.
++
++   The query stored in BUF of BUFLEN length is sent first followed by
++   the query stored in BUF2 of BUFLEN2 length.  Queries are sent
++   in parallel (default) or serially (RES_SINGLKUP or RES_SNGLKUPREOP).
++
++   Answers to the query are stored firstly in *ANSP up to a max of
++   *ANSSIZP bytes.  If more than *ANSSIZP bytes are needed and ANSCP
++   is non-NULL (to indicate that modifying the answer buffer is allowed)
++   then malloc is used to allocate a new response buffer and ANSCP and
++   ANSP will both point to the new buffer.  If more than *ANSSIZP bytes
++   are needed but ANSCP is NULL, then as much of the response as
++   possible is read into the buffer, but the results will be truncated.
++   When truncation happens because of a small answer buffer the DNS
++   packets header field TC will bet set to 1, indicating a truncated
++   message, while the rest of the UDP packet is discarded.
++
++   Answers to the query are stored secondly in *ANSP2 up to a max of
++   *ANSSIZP2 bytes, with the actual response length stored in
++   *RESPLEN2.  If more than *ANSSIZP bytes are needed and ANSP2
++   is non-NULL (required for a second query) then malloc is used to
++   allocate a new response buffer, *ANSSIZP2 is set to the new buffer
++   size and *ANSP2_MALLOCED is set to 1.
++
++   The ANSP2_MALLOCED argument will eventually be removed as the
++   change in buffer pointer can be used to detect the buffer has
++   changed and that the caller should use free on the new buffer.
++
++   Note that the answers may arrive in any order from the server and
++   therefore the first and second answer buffers may not correspond to
++   the first and second queries.
++
++   It is not supported to call this function with a non-NULL ANSP2
++   but a NULL ANSCP.  Put another way, you can call send_vc with a
++   single unmodifiable buffer or two modifiable buffers, but no other
++   combination is supported.
++
++   It is the caller's responsibility to free the malloc allocated
++   buffers by detecting that the pointers have changed from their
++   original values i.e. *ANSCP or *ANSP2 has changed.
++
++   If an answer is truncated because of UDP datagram DNS limits then
++   *V_CIRCUIT is set to 1 and the return value non-zero to indicate to
++   the caller to retry with TCP.  The value *GOTSOMEWHERE is set to 1
++   if any progress was made reading a response from the nameserver and
++   is used by the caller to distinguish between ECONNREFUSED and
++   ETIMEDOUT (the latter if *GOTSOMEWHERE is 1).
++
++   If errors are encountered then *TERRNO is set to an appropriate
++   errno value and a zero result is returned for a recoverable error,
++   and a less-than zero result is returned for a non-recoverable error.
++
++   If no errors are encountered then *TERRNO is left unmodified and
++   a the length of the first response in bytes is returned.  */
+ static int
+ send_dg(res_state statp,
+ 	const u_char *buf, int buflen, const u_char *buf2, int buflen2,
+@@ -1006,8 +1135,6 @@ send_dg(res_state statp,
+ {
+ 	const HEADER *hp = (HEADER *) buf;
+ 	const HEADER *hp2 = (HEADER *) buf2;
+-	u_char *ans = *ansp;
+-	int orig_anssizp = *anssizp;
+ 	struct timespec now, timeout, finish;
+ 	struct pollfd pfd[1];
+ 	int ptimeout;
+@@ -1040,6 +1167,8 @@ send_dg(res_state statp,
+ 	int need_recompute = 0;
+ 	int nwritten = 0;
+ 	int recvresp1 = 0;
++	/* Skip the second response if there is no second query.
++	   To do that we mark the second response as received.  */
+ 	int recvresp2 = buf2 == NULL;
+ 	pfd[0].fd = EXT(statp).nssocks[ns];
+ 	pfd[0].events = POLLOUT;
+@@ -1203,55 +1332,56 @@ send_dg(res_state statp,
+ 		int *thisresplenp;
+ 
+ 		if ((recvresp1 | recvresp2) == 0 || buf2 == NULL) {
++			/* We have not received any responses
++			   yet or we only have one response to
++			   receive.  */
+ 			thisanssizp = anssizp;
+ 			thisansp = anscp ?: ansp;
+ 			assert (anscp != NULL || ansp2 == NULL);
+ 			thisresplenp = &resplen;
+ 		} else {
+-			if (*anssizp != MAXPACKET) {
+-				/* No buffer allocated for the first
+-				   reply.  We can try to use the rest
+-				   of the user-provided buffer.  */
+-#if _STRING_ARCH_unaligned
+-				*anssizp2 = orig_anssizp - resplen;
+-				*ansp2 = *ansp + resplen;
+-#else
+-				int aligned_resplen
+-				  = ((resplen + __alignof__ (HEADER) - 1)
+-				     & ~(__alignof__ (HEADER) - 1));
+-				*anssizp2 = orig_anssizp - aligned_resplen;
+-				*ansp2 = *ansp + aligned_resplen;
+-#endif
+-			} else {
+-				/* The first reply did not fit into the
+-				   user-provided buffer.  Maybe the second
+-				   answer will.  */
+-				*anssizp2 = orig_anssizp;
+-				*ansp2 = *ansp;
+-			}
+-
+ 			thisanssizp = anssizp2;
+ 			thisansp = ansp2;
+ 			thisresplenp = resplen2;
+ 		}
+ 
+ 		if (*thisanssizp < MAXPACKET
+-		    /* Yes, we test ANSCP here.  If we have two buffers
+-		       both will be allocatable.  */
+-		    && anscp
++		    /* If the current buffer is not the the static
++		       user-supplied buffer then we can reallocate
++		       it.  */
++		    && (thisansp != NULL && thisansp != ansp)
+ #ifdef FIONREAD
++		    /* Is the size too small?  */
+ 		    && (ioctl (pfd[0].fd, FIONREAD, thisresplenp) < 0
+ 			|| *thisanssizp < *thisresplenp)
+ #endif
+                     ) {
++			/* Always allocate MAXPACKET, callers expect
++			   this specific size.  */
+ 			u_char *newp = malloc (MAXPACKET);
+ 			if (newp != NULL) {
+-				*anssizp = MAXPACKET;
+-				*thisansp = ans = newp;
++				*thisanssizp = MAXPACKET;
++				*thisansp = newp;
+ 				if (thisansp == ansp2)
+ 				  *ansp2_malloced = 1;
+ 			}
+ 		}
++		/* We could end up with truncation if anscp was NULL
++		   (not allowed to change caller's buffer) and the
++		   response buffer size is too small.  This isn't a
++		   reliable way to detect truncation because the ioctl
++		   may be an inaccurate report of the UDP message size.
++		   Therefore we use this only to issue debug output.
++		   To do truncation accurately with UDP we need
++		   MSG_TRUNC which is only available on Linux.  We
++		   can abstract out the Linux-specific feature in the
++		   future to detect truncation.  */
++		if (__glibc_unlikely (*thisanssizp < *thisresplenp)) {
++			Dprint(statp->options & RES_DEBUG,
++			       (stdout, ";; response may be truncated (UDP)\n")
++			);
++		}
++
+ 		HEADER *anhp = (HEADER *) *thisansp;
+ 		socklen_t fromlen = sizeof(struct sockaddr_in6);
+ 		assert (sizeof(from) <= fromlen);
+Index: git/ChangeLog
+===================================================================
+--- git.orig/ChangeLog
++++ git/ChangeLog
+@@ -1,3 +1,18 @@
++2016-02-15  Carlos O'Donell  <carlos@redhat.com>
++
++   [BZ #18665]
++   * resolv/nss_dns/dns-host.c (gaih_getanswer_slice): Always set
++   *herrno_p.
++   (gaih_getanswer): Document functional behviour. Return tryagain
++   if any result is tryagain.
++   * resolv/res_query.c (__libc_res_nsearch): Set buffer size to zero
++   when freed.
++   * resolv/res_send.c: Add copyright text.
++   (__libc_res_nsend): Document that MAXPACKET is expected.
++   (send_vc): Document. Remove buffer reuse.
++   (send_dg): Document. Remove buffer reuse. Set *thisanssizp to set the
++   size of the buffer. Add Dprint for truncated UDP buffer.
++
+ 2015-09-26  Paul Pluzhnikov  <ppluzhnikov@google.com>
+ 
+ 	[BZ #18985]
diff --git a/yocto-poky/meta/recipes-core/glibc/glibc/CVE-2015-8776.patch b/yocto-poky/meta/recipes-core/glibc/glibc/CVE-2015-8776.patch
new file mode 100644
index 0000000..684f344
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/glibc/glibc/CVE-2015-8776.patch
@@ -0,0 +1,155 @@
+From d36c75fc0d44deec29635dd239b0fbd206ca49b7 Mon Sep 17 00:00:00 2001
+From: Paul Pluzhnikov <ppluzhnikov@google.com>
+Date: Sat, 26 Sep 2015 13:27:48 -0700
+Subject: [PATCH] Fix BZ #18985 -- out of range data to strftime() causes a
+ segfault
+
+Upstream-Status: Backport
+CVE: CVE-2015-8776
+[Yocto # 8980]
+
+https://sourceware.org/git/gitweb.cgi?p=glibc.git;h=d36c75fc0d44deec29635dd239b0fbd206ca49b7
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ ChangeLog           |  8 ++++++++
+ NEWS                |  2 +-
+ time/strftime_l.c   | 20 +++++++++++++-------
+ time/tst-strftime.c | 52 +++++++++++++++++++++++++++++++++++++++++++++++++++-
+ 4 files changed, 73 insertions(+), 9 deletions(-)
+
+Index: git/ChangeLog
+===================================================================
+--- git.orig/ChangeLog
++++ git/ChangeLog
+@@ -1,3 +1,11 @@
++2015-09-26  Paul Pluzhnikov  <ppluzhnikov@google.com>
++
++	[BZ #18985]
++	* time/strftime_l.c (a_wkday, f_wkday, a_month, f_month): Range check.
++	(__strftime_internal): Likewise.
++	* time/tst-strftime.c (do_bz18985): New test.
++	(do_test): Call it.
++
+ 2015-12-04  Joseph Myers  <joseph@codesourcery.com>
+ 
+ 	[BZ #16961]
+Index: git/time/strftime_l.c
+===================================================================
+--- git.orig/time/strftime_l.c
++++ git/time/strftime_l.c
+@@ -514,13 +514,17 @@ __strftime_internal (s, maxsize, format,
+      only a few elements.  Dereference the pointers only if the format
+      requires this.  Then it is ok to fail if the pointers are invalid.  */
+ # define a_wkday \
+-  ((const CHAR_T *) _NL_CURRENT (LC_TIME, NLW(ABDAY_1) + tp->tm_wday))
++  ((const CHAR_T *) (tp->tm_wday < 0 || tp->tm_wday > 6			     \
++		     ? "?" : _NL_CURRENT (LC_TIME, NLW(ABDAY_1) + tp->tm_wday)))
+ # define f_wkday \
+-  ((const CHAR_T *) _NL_CURRENT (LC_TIME, NLW(DAY_1) + tp->tm_wday))
++  ((const CHAR_T *) (tp->tm_wday < 0 || tp->tm_wday > 6			     \
++		     ? "?" : _NL_CURRENT (LC_TIME, NLW(DAY_1) + tp->tm_wday)))
+ # define a_month \
+-  ((const CHAR_T *) _NL_CURRENT (LC_TIME, NLW(ABMON_1) + tp->tm_mon))
++  ((const CHAR_T *) (tp->tm_mon < 0 || tp->tm_mon > 11			     \
++		     ? "?" : _NL_CURRENT (LC_TIME, NLW(ABMON_1) + tp->tm_mon)))
+ # define f_month \
+-  ((const CHAR_T *) _NL_CURRENT (LC_TIME, NLW(MON_1) + tp->tm_mon))
++  ((const CHAR_T *) (tp->tm_mon < 0 || tp->tm_mon > 11			     \
++		     ? "?" : _NL_CURRENT (LC_TIME, NLW(MON_1) + tp->tm_mon)))
+ # define ampm \
+   ((const CHAR_T *) _NL_CURRENT (LC_TIME, tp->tm_hour > 11		      \
+ 				 ? NLW(PM_STR) : NLW(AM_STR)))
+@@ -530,8 +534,10 @@ __strftime_internal (s, maxsize, format,
+ # define ap_len STRLEN (ampm)
+ #else
+ # if !HAVE_STRFTIME
+-#  define f_wkday (weekday_name[tp->tm_wday])
+-#  define f_month (month_name[tp->tm_mon])
++#  define f_wkday (tp->tm_wday < 0 || tp->tm_wday > 6	\
++		   ? "?" : weekday_name[tp->tm_wday])
++#  define f_month (tp->tm_mon < 0 || tp->tm_mon > 11	\
++		   ? "?" : month_name[tp->tm_mon])
+ #  define a_wkday f_wkday
+ #  define a_month f_month
+ #  define ampm (L_("AMPM") + 2 * (tp->tm_hour > 11))
+@@ -1325,7 +1331,7 @@ __strftime_internal (s, maxsize, format,
+ 		  *tzset_called = true;
+ 		}
+ # endif
+-	      zone = tzname[tp->tm_isdst];
++	      zone = tp->tm_isdst <= 1 ? tzname[tp->tm_isdst] : "?";
+ 	    }
+ #endif
+ 	  if (! zone)
+Index: git/time/tst-strftime.c
+===================================================================
+--- git.orig/time/tst-strftime.c
++++ git/time/tst-strftime.c
+@@ -4,6 +4,56 @@
+ #include <time.h>
+ 
+ 
++static int
++do_bz18985 (void)
++{
++  char buf[1000];
++  struct tm ttm;
++  int rc, ret = 0;
++
++  memset (&ttm, 1, sizeof (ttm));
++  ttm.tm_zone = NULL;  /* Dereferenced directly if non-NULL.  */
++  rc = strftime (buf, sizeof (buf), "%a %A %b %B %c %z %Z", &ttm);
++
++  if (rc == 66)
++    {
++      const char expected[]
++	= "? ? ? ? ? ? 16843009 16843009:16843009:16843009 16844909 +467836 ?";
++      if (0 != strcmp (buf, expected))
++	{
++	  printf ("expected:\n  %s\ngot:\n  %s\n", expected, buf);
++	  ret += 1;
++	}
++    }
++  else
++    {
++      printf ("expected 66, got %d\n", rc);
++      ret += 1;
++    }
++
++  /* Check negative values as well.  */
++  memset (&ttm, 0xFF, sizeof (ttm));
++  ttm.tm_zone = NULL;  /* Dereferenced directly if non-NULL.  */
++  rc = strftime (buf, sizeof (buf), "%a %A %b %B %c %z %Z", &ttm);
++
++  if (rc == 30)
++    {
++      const char expected[] = "? ? ? ? ? ? -1 -1:-1:-1 1899  ";
++      if (0 != strcmp (buf, expected))
++	{
++	  printf ("expected:\n  %s\ngot:\n  %s\n", expected, buf);
++	  ret += 1;
++	}
++    }
++  else
++    {
++      printf ("expected 30, got %d\n", rc);
++      ret += 1;
++    }
++
++  return ret;
++}
++
+ static struct
+ {
+   const char *fmt;
+@@ -104,7 +154,7 @@ do_test (void)
+ 	}
+     }
+ 
+-  return result;
++  return result + do_bz18985 ();
+ }
+ 
+ #define TEST_FUNCTION do_test ()
diff --git a/yocto-poky/meta/recipes-core/glibc/glibc/CVE-2015-8777.patch b/yocto-poky/meta/recipes-core/glibc/glibc/CVE-2015-8777.patch
new file mode 100644
index 0000000..eeab72d
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/glibc/glibc/CVE-2015-8777.patch
@@ -0,0 +1,123 @@
+From a014cecd82b71b70a6a843e250e06b541ad524f7 Mon Sep 17 00:00:00 2001
+From: Florian Weimer <fweimer@redhat.com>
+Date: Thu, 15 Oct 2015 09:23:07 +0200
+Subject: [PATCH] Always enable pointer guard [BZ #18928]
+
+Honoring the LD_POINTER_GUARD environment variable in AT_SECURE mode
+has security implications.  This commit enables pointer guard
+unconditionally, and the environment variable is now ignored.
+
+        [BZ #18928]
+        * sysdeps/generic/ldsodefs.h (struct rtld_global_ro): Remove
+        _dl_pointer_guard member.
+        * elf/rtld.c (_rtld_global_ro): Remove _dl_pointer_guard
+        initializer.
+        (security_init): Always set up pointer guard.
+        (process_envvars): Do not process LD_POINTER_GUARD.
+
+Upstream-Status: Backport
+CVE: CVE-2015-8777
+[Yocto # 8980]
+
+https://sourceware.org/git/gitweb.cgi?p=glibc.git;a=commit;h=a014cecd82b71b70a6a843e250e06b541ad524f7
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ ChangeLog                  | 10 ++++++++++
+ NEWS                       | 13 ++++++++-----
+ elf/rtld.c                 | 15 ++++-----------
+ sysdeps/generic/ldsodefs.h |  3 ---
+ 4 files changed, 22 insertions(+), 19 deletions(-)
+
+Index: git/ChangeLog
+===================================================================
+--- git.orig/ChangeLog
++++ git/ChangeLog
+@@ -1,3 +1,14 @@
++2015-10-15  Florian Weimer  <fweimer@redhat.com>
++
++   [BZ #18928]
++   * sysdeps/generic/ldsodefs.h (struct rtld_global_ro): Remove
++   _dl_pointer_guard member.
++   * elf/rtld.c (_rtld_global_ro): Remove _dl_pointer_guard
++   initializer.
++   (security_init): Always set up pointer guard.
++   (process_envvars): Do not process LD_POINTER_GUARD.
++
++
+ 2015-08-10  Maxim Ostapenko  <m.ostapenko@partner.samsung.com>
+ 
+ 	[BZ #18778]
+Index: git/NEWS
+===================================================================
+--- git.orig/NEWS
++++ git/NEWS
+@@ -34,7 +34,10 @@ Version 2.22
+   18533, 18534, 18536, 18539, 18540, 18542, 18544, 18545, 18546, 18547,
+   18549, 18553, 18557, 18558, 18569, 18583, 18585, 18586, 18592, 18593,
+   18594, 18602, 18612, 18613, 18619, 18633, 18635, 18641, 18643, 18648,
+-  18657, 18676, 18694, 18696.
++  18657, 18676, 18694, 18696, 18928.
++
++* The LD_POINTER_GUARD environment variable can no longer be used to
++  disable the pointer guard feature.  It is always enabled.
+ 
+ * Cache information can be queried via sysconf() function on s390 e.g. with
+   _SC_LEVEL1_ICACHE_SIZE as argument.
+Index: git/elf/rtld.c
+===================================================================
+--- git.orig/elf/rtld.c
++++ git/elf/rtld.c
+@@ -163,7 +163,6 @@ struct rtld_global_ro _rtld_global_ro at
+     ._dl_hwcap_mask = HWCAP_IMPORTANT,
+     ._dl_lazy = 1,
+     ._dl_fpu_control = _FPU_DEFAULT,
+-    ._dl_pointer_guard = 1,
+     ._dl_pagesize = EXEC_PAGESIZE,
+     ._dl_inhibit_cache = 0,
+ 
+@@ -710,15 +709,12 @@ security_init (void)
+ #endif
+ 
+   /* Set up the pointer guard as well, if necessary.  */
+-  if (GLRO(dl_pointer_guard))
+-    {
+-      uintptr_t pointer_chk_guard = _dl_setup_pointer_guard (_dl_random,
+-							     stack_chk_guard);
++  uintptr_t pointer_chk_guard
++    = _dl_setup_pointer_guard (_dl_random, stack_chk_guard);
+ #ifdef THREAD_SET_POINTER_GUARD
+-      THREAD_SET_POINTER_GUARD (pointer_chk_guard);
++  THREAD_SET_POINTER_GUARD (pointer_chk_guard);
+ #endif
+-      __pointer_chk_guard_local = pointer_chk_guard;
+-    }
++  __pointer_chk_guard_local = pointer_chk_guard;
+ 
+   /* We do not need the _dl_random value anymore.  The less
+      information we leave behind, the better, so clear the
+@@ -2478,9 +2474,6 @@ process_envvars (enum mode *modep)
+ 	      GLRO(dl_use_load_bias) = envline[14] == '1' ? -1 : 0;
+ 	      break;
+ 	    }
+-
+-	  if (memcmp (envline, "POINTER_GUARD", 13) == 0)
+-	    GLRO(dl_pointer_guard) = envline[14] != '0';
+ 	  break;
+ 
+ 	case 14:
+Index: git/sysdeps/generic/ldsodefs.h
+===================================================================
+--- git.orig/sysdeps/generic/ldsodefs.h
++++ git/sysdeps/generic/ldsodefs.h
+@@ -600,9 +600,6 @@ struct rtld_global_ro
+   /* List of auditing interfaces.  */
+   struct audit_ifaces *_dl_audit;
+   unsigned int _dl_naudit;
+-
+-  /* 0 if internal pointer values should not be guarded, 1 if they should.  */
+-  EXTERN int _dl_pointer_guard;
+ };
+ # define __rtld_global_attribute__
+ # if IS_IN (rtld)
diff --git a/yocto-poky/meta/recipes-core/glibc/glibc/CVE-2015-8779.patch b/yocto-poky/meta/recipes-core/glibc/glibc/CVE-2015-8779.patch
new file mode 100644
index 0000000..4dc93c7
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/glibc/glibc/CVE-2015-8779.patch
@@ -0,0 +1,262 @@
+From 0f58539030e436449f79189b6edab17d7479796e Mon Sep 17 00:00:00 2001
+From: Paul Pluzhnikov <ppluzhnikov@google.com>
+Date: Sat, 8 Aug 2015 15:53:03 -0700
+Subject: [PATCH] Fix BZ #17905
+
+Upstream-Status: Backport
+CVE: CVE-2015-8779
+[Yocto # 8980]
+
+https://sourceware.org/git/gitweb.cgi?p=glibc.git;h=0f58539030e436449f79189b6edab17d7479796e
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ ChangeLog              |  8 ++++++++
+ NEWS                   |  2 +-
+ catgets/Makefile       |  9 ++++++++-
+ catgets/catgets.c      | 19 ++++++++++++-------
+ catgets/open_catalog.c | 23 ++++++++++++++---------
+ catgets/tst-catgets.c  | 31 +++++++++++++++++++++++++++++++
+ 6 files changed, 74 insertions(+), 18 deletions(-)
+
+Index: git/catgets/Makefile
+===================================================================
+--- git.orig/catgets/Makefile
++++ git/catgets/Makefile
+@@ -37,6 +37,7 @@ ifeq (y,$(OPTION_EGLIBC_CATGETS))
+ ifeq ($(run-built-tests),yes)
+ tests-special += $(objpfx)de/libc.cat $(objpfx)test1.cat $(objpfx)test2.cat \
+ 		 $(objpfx)sample.SJIS.cat $(objpfx)test-gencat.out
++tests-special += $(objpfx)tst-catgets-mem.out
+ endif
+ endif
+ gencat-modules	= xmalloc
+@@ -53,9 +54,11 @@ catgets-CPPFLAGS := -DNLSPATH='"$(msgcat
+ 
+ generated += de.msg test1.cat test1.h test2.cat test2.h sample.SJIS.cat \
+ 	     test-gencat.h
++generated += tst-catgets.mtrace tst-catgets-mem.out
++
+ generated-dirs += de
+ 
+-tst-catgets-ENV = NLSPATH="$(objpfx)%l/%N.cat" LANG=de
++tst-catgets-ENV = NLSPATH="$(objpfx)%l/%N.cat" LANG=de MALLOC_TRACE=$(objpfx)tst-catgets.mtrace
+ 
+ ifeq ($(run-built-tests),yes)
+ # This test just checks whether the program produces any error or not.
+@@ -89,4 +92,8 @@ $(objpfx)test-gencat.out: test-gencat.sh
+ $(objpfx)sample.SJIS.cat: sample.SJIS $(objpfx)gencat
+ 	$(built-program-cmd) -H $(objpfx)test-gencat.h < $(word 1,$^) > $@; \
+ 	$(evaluate-test)
++
++$(objpfx)tst-catgets-mem.out: $(objpfx)tst-catgets.out
++	$(common-objpfx)malloc/mtrace $(objpfx)tst-catgets.mtrace > $@; \
++	$(evaluate-test)
+ endif
+Index: git/catgets/catgets.c
+===================================================================
+--- git.orig/catgets/catgets.c
++++ git/catgets/catgets.c
+@@ -16,7 +16,6 @@
+    License along with the GNU C Library; if not, see
+    <http://www.gnu.org/licenses/>.  */
+ 
+-#include <alloca.h>
+ #include <errno.h>
+ #include <locale.h>
+ #include <nl_types.h>
+@@ -35,6 +34,7 @@ catopen (const char *cat_name, int flag)
+   __nl_catd result;
+   const char *env_var = NULL;
+   const char *nlspath = NULL;
++  char *tmp = NULL;
+ 
+   if (strchr (cat_name, '/') == NULL)
+     {
+@@ -54,7 +54,10 @@ catopen (const char *cat_name, int flag)
+ 	{
+ 	  /* Append the system dependent directory.  */
+ 	  size_t len = strlen (nlspath) + 1 + sizeof NLSPATH;
+-	  char *tmp = alloca (len);
++	  tmp = malloc (len);
++
++	  if (__glibc_unlikely (tmp == NULL))
++	    return (nl_catd) -1;
+ 
+ 	  __stpcpy (__stpcpy (__stpcpy (tmp, nlspath), ":"), NLSPATH);
+ 	  nlspath = tmp;
+@@ -65,16 +68,18 @@ catopen (const char *cat_name, int flag)
+ 
+   result = (__nl_catd) malloc (sizeof (*result));
+   if (result == NULL)
+-    /* We cannot get enough memory.  */
+-    return (nl_catd) -1;
+-
+-  if (__open_catalog (cat_name, nlspath, env_var, result) != 0)
++    {
++      /* We cannot get enough memory.  */
++      result = (nl_catd) -1;
++    }
++  else if (__open_catalog (cat_name, nlspath, env_var, result) != 0)
+     {
+       /* Couldn't open the file.  */
+       free ((void *) result);
+-      return (nl_catd) -1;
++      result = (nl_catd) -1;
+     }
+ 
++  free (tmp);
+   return (nl_catd) result;
+ }
+ 
+Index: git/catgets/open_catalog.c
+===================================================================
+--- git.orig/catgets/open_catalog.c
++++ git/catgets/open_catalog.c
+@@ -47,6 +47,7 @@ __open_catalog (const char *cat_name, co
+   size_t tab_size;
+   const char *lastp;
+   int result = -1;
++  char *buf = NULL;
+ 
+   if (strchr (cat_name, '/') != NULL || nlspath == NULL)
+     fd = open_not_cancel_2 (cat_name, O_RDONLY);
+@@ -57,23 +58,23 @@ __open_catalog (const char *cat_name, co
+   if (__glibc_unlikely (bufact + (n) >= bufmax))			      \
+     {									      \
+       char *old_buf = buf;						      \
+-      bufmax += 256 + (n);						      \
+-      buf = (char *) alloca (bufmax);					      \
+-      memcpy (buf, old_buf, bufact);					      \
++      bufmax += (bufmax < 256 + (n)) ? 256 + (n) : bufmax;		      \
++      buf = realloc (buf, bufmax);					      \
++      if (__glibc_unlikely (buf == NULL))				      \
++	{								      \
++	  free (old_buf);						      \
++	  return -1;							      \
++	}								      \
+     }
+ 
+       /* The RUN_NLSPATH variable contains a colon separated list of
+ 	 descriptions where we expect to find catalogs.  We have to
+ 	 recognize certain % substitutions and stop when we found the
+ 	 first existing file.  */
+-      char *buf;
+       size_t bufact;
+-      size_t bufmax;
++      size_t bufmax = 0;
+       size_t len;
+ 
+-      buf = NULL;
+-      bufmax = 0;
+-
+       fd = -1;
+       while (*run_nlspath != '\0')
+ 	{
+@@ -188,7 +189,10 @@ __open_catalog (const char *cat_name, co
+ 
+   /* Avoid dealing with directories and block devices */
+   if (__builtin_expect (fd, 0) < 0)
+-    return -1;
++    {
++      free (buf);
++      return -1;
++    }
+ 
+   if (__builtin_expect (__fxstat64 (_STAT_VER, fd, &st), 0) < 0)
+     goto close_unlock_return;
+@@ -325,6 +329,7 @@ __open_catalog (const char *cat_name, co
+   /* Release the lock again.  */
+  close_unlock_return:
+   close_not_cancel_no_status (fd);
++  free (buf);
+ 
+   return result;
+ }
+Index: git/catgets/tst-catgets.c
+===================================================================
+--- git.orig/catgets/tst-catgets.c
++++ git/catgets/tst-catgets.c
+@@ -1,7 +1,10 @@
++#include <assert.h>
+ #include <mcheck.h>
+ #include <nl_types.h>
+ #include <stdio.h>
++#include <stdlib.h>
+ #include <string.h>
++#include <sys/resource.h>
+ 
+ 
+ static const char *msgs[] =
+@@ -12,6 +15,33 @@ static const char *msgs[] =
+ };
+ #define nmsgs (sizeof (msgs) / sizeof (msgs[0]))
+ 
++
++/* Test for unbounded alloca.  */
++static int
++do_bz17905 (void)
++{
++  char *buf;
++  struct rlimit rl;
++  nl_catd result;
++
++  const int sz = 1024 * 1024;
++
++  getrlimit (RLIMIT_STACK, &rl);
++  rl.rlim_cur = sz;
++  setrlimit (RLIMIT_STACK, &rl);
++
++  buf = malloc (sz + 1); 
++  memset (buf, 'A', sz);
++  buf[sz] = '\0';
++  setenv ("NLSPATH", buf, 1);
++
++  result = catopen (buf, NL_CAT_LOCALE);
++  assert (result == (nl_catd) -1);
++
++  free (buf);
++  return 0;
++}
++
+ #define ROUNDS 5
+ 
+ static int
+@@ -62,6 +92,7 @@ do_test (void)
+ 	}
+     }
+ 
++  result += do_bz17905 ();
+   return result;
+ }
+ 
+Index: git/ChangeLog
+===================================================================
+--- git.orig/ChangeLog
++++ git/ChangeLog
+@@ -1,3 +1,11 @@
++2015-08-08  Paul Pluzhnikov  <ppluzhnikov@google.com>
++
++   [BZ #17905]
++   * catgets/Makefile (tst-catgets-mem): New test.
++   * catgets/catgets.c (catopen): Don't use unbounded alloca.
++   * catgets/open_catalog.c (__open_catalog): Likewise.
++   * catgets/tst-catgets.c (do_bz17905): Test unbounded alloca.
++
+ 2015-10-15  Florian Weimer  <fweimer@redhat.com>
+ 
+    [BZ #18928]
+Index: git/NEWS
+===================================================================
+--- git.orig/NEWS
++++ git/NEWS
+@@ -9,7 +9,7 @@ Version 2.22.1
+ 
+ * The following bugs are resolved with this release:
+ 
+-  18778, 18781, 18787.
++  18778, 18781, 18787, 17905.
+ 
+ Version 2.22
+ 
diff --git a/yocto-poky/meta/recipes-core/glibc/glibc/CVE-2015-9761_1.patch b/yocto-poky/meta/recipes-core/glibc/glibc/CVE-2015-9761_1.patch
new file mode 100644
index 0000000..3aca913
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/glibc/glibc/CVE-2015-9761_1.patch
@@ -0,0 +1,1039 @@
+From e02cabecf0d025ec4f4ddee290bdf7aadb873bb3 Mon Sep 17 00:00:00 2001
+From: Joseph Myers <joseph@codesourcery.com>
+Date: Tue, 24 Nov 2015 22:24:52 +0000
+Subject: [PATCH] Refactor strtod parsing of NaN payloads.
+
+The nan* functions handle their string argument by constructing a
+NAN(...) string on the stack as a VLA and passing it to strtod
+functions.
+
+This approach has problems discussed in bug 16961 and bug 16962: the
+stack usage is unbounded, and it gives incorrect results in certain
+cases where the argument is not a valid n-char-sequence.
+
+The natural fix for both issues is to refactor the NaN payload parsing
+out of strtod into a separate function that the nan* functions can
+call directly, so that no temporary string needs constructing on the
+stack at all.  This patch does that refactoring in preparation for
+fixing those bugs (but without actually using the new functions from
+nan* - which will also require exporting them from libc at version
+GLIBC_PRIVATE).  This patch is not intended to change any user-visible
+behavior, so no tests are added (fixes for the above bugs will of
+course add tests for them).
+
+This patch builds on my recent fixes for strtol and strtod issues in
+Turkish locales.  Given those fixes, the parsing of NaN payloads is
+locale-independent; thus, the new functions do not need to take a
+locale_t argument.
+
+Tested for x86_64, x86, mips64 and powerpc.
+
+	* stdlib/strtod_nan.c: New file.
+	* stdlib/strtod_nan_double.h: Likewise.
+	* stdlib/strtod_nan_float.h: Likewise.
+	* stdlib/strtod_nan_main.c: Likewise.
+	* stdlib/strtod_nan_narrow.h: Likewise.
+	* stdlib/strtod_nan_wide.h: Likewise.
+	* stdlib/strtof_nan.c: Likewise.
+	* stdlib/strtold_nan.c: Likewise.
+	* sysdeps/ieee754/ldbl-128/strtod_nan_ldouble.h: Likewise.
+	* sysdeps/ieee754/ldbl-128ibm/strtod_nan_ldouble.h: Likewise.
+	* sysdeps/ieee754/ldbl-96/strtod_nan_ldouble.h: Likewise.
+	* wcsmbs/wcstod_nan.c: Likewise.
+	* wcsmbs/wcstof_nan.c: Likewise.
+	* wcsmbs/wcstold_nan.c: Likewise.
+	* stdlib/Makefile (routines): Add strtof_nan, strtod_nan and
+	strtold_nan.
+	* wcsmbs/Makefile (routines): Add wcstod_nan, wcstold_nan and
+	wcstof_nan.
+	* include/stdlib.h (__strtof_nan): Declare and use
+	libc_hidden_proto.
+	(__strtod_nan): Likewise.
+	(__strtold_nan): Likewise.
+	(__wcstof_nan): Likewise.
+	(__wcstod_nan): Likewise.
+	(__wcstold_nan): Likewise.
+	* include/wchar.h (____wcstoull_l_internal): Declare.
+	* stdlib/strtod_l.c: Do not include <ieee754.h>.
+	(____strtoull_l_internal): Remove declaration.
+	(STRTOF_NAN): Define macro.
+	(SET_MANTISSA): Remove macro.
+	(STRTOULL): Likewise.
+	(____STRTOF_INTERNAL): Use STRTOF_NAN to parse NaN payload.
+	* stdlib/strtof_l.c (____strtoull_l_internal): Remove declaration.
+	(STRTOF_NAN): Define macro.
+	(SET_MANTISSA): Remove macro.
+	* sysdeps/ieee754/ldbl-128/strtold_l.c (STRTOF_NAN): Define macro.
+	(SET_MANTISSA): Remove macro.
+	* sysdeps/ieee754/ldbl-128ibm/strtold_l.c (STRTOF_NAN): Define
+	macro.
+	(SET_MANTISSA): Remove macro.
+	* sysdeps/ieee754/ldbl-64-128/strtold_l.c (STRTOF_NAN): Define
+	macro.
+	(SET_MANTISSA): Remove macro.
+	* sysdeps/ieee754/ldbl-96/strtold_l.c (STRTOF_NAN): Define macro.
+	(SET_MANTISSA): Remove macro.
+	* wcsmbs/wcstod_l.c (____wcstoull_l_internal): Remove declaration.
+	* wcsmbs/wcstof_l.c (____wcstoull_l_internal): Likewise.
+	* wcsmbs/wcstold_l.c (____wcstoull_l_internal): Likewise.
+
+Upstream-Status: Backport
+CVE: CVE-2015-9761 patch #1
+[Yocto # 8980]
+
+https://sourceware.org/git/gitweb.cgi?p=glibc.git;h=e02cabecf0d025ec4f4ddee290bdf7aadb873bb3
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ ChangeLog                                        | 49 ++++++++++++++++++
+ include/stdlib.h                                 | 18 +++++++
+ include/wchar.h                                  |  3 ++
+ stdlib/Makefile                                  |  1 +
+ stdlib/strtod_l.c                                | 48 ++++--------------
+ stdlib/strtod_nan.c                              | 24 +++++++++
+ stdlib/strtod_nan_double.h                       | 30 +++++++++++
+ stdlib/strtod_nan_float.h                        | 29 +++++++++++
+ stdlib/strtod_nan_main.c                         | 63 ++++++++++++++++++++++++
+ stdlib/strtod_nan_narrow.h                       | 22 +++++++++
+ stdlib/strtod_nan_wide.h                         | 22 +++++++++
+ stdlib/strtof_l.c                                | 11 +----
+ stdlib/strtof_nan.c                              | 24 +++++++++
+ stdlib/strtold_nan.c                             | 30 +++++++++++
+ sysdeps/ieee754/ldbl-128/strtod_nan_ldouble.h    | 33 +++++++++++++
+ sysdeps/ieee754/ldbl-128/strtold_l.c             | 13 +----
+ sysdeps/ieee754/ldbl-128ibm/strtod_nan_ldouble.h | 30 +++++++++++
+ sysdeps/ieee754/ldbl-128ibm/strtold_l.c          | 10 +---
+ sysdeps/ieee754/ldbl-64-128/strtold_l.c          | 13 +----
+ sysdeps/ieee754/ldbl-96/strtod_nan_ldouble.h     | 30 +++++++++++
+ sysdeps/ieee754/ldbl-96/strtold_l.c              | 10 +---
+ wcsmbs/Makefile                                  |  1 +
+ wcsmbs/wcstod_l.c                                |  3 --
+ wcsmbs/wcstod_nan.c                              | 23 +++++++++
+ wcsmbs/wcstof_l.c                                |  3 --
+ wcsmbs/wcstof_nan.c                              | 23 +++++++++
+ wcsmbs/wcstold_l.c                               |  3 --
+ wcsmbs/wcstold_nan.c                             | 30 +++++++++++
+ 28 files changed, 504 insertions(+), 95 deletions(-)
+ create mode 100644 stdlib/strtod_nan.c
+ create mode 100644 stdlib/strtod_nan_double.h
+ create mode 100644 stdlib/strtod_nan_float.h
+ create mode 100644 stdlib/strtod_nan_main.c
+ create mode 100644 stdlib/strtod_nan_narrow.h
+ create mode 100644 stdlib/strtod_nan_wide.h
+ create mode 100644 stdlib/strtof_nan.c
+ create mode 100644 stdlib/strtold_nan.c
+ create mode 100644 sysdeps/ieee754/ldbl-128/strtod_nan_ldouble.h
+ create mode 100644 sysdeps/ieee754/ldbl-128ibm/strtod_nan_ldouble.h
+ create mode 100644 sysdeps/ieee754/ldbl-96/strtod_nan_ldouble.h
+ create mode 100644 wcsmbs/wcstod_nan.c
+ create mode 100644 wcsmbs/wcstof_nan.c
+ create mode 100644 wcsmbs/wcstold_nan.c
+
+Index: git/include/stdlib.h
+===================================================================
+--- git.orig/include/stdlib.h
++++ git/include/stdlib.h
+@@ -203,6 +203,24 @@ libc_hidden_proto (strtoll)
+ libc_hidden_proto (strtoul)
+ libc_hidden_proto (strtoull)
+ 
++extern float __strtof_nan (const char *, char **, char) internal_function;
++extern double __strtod_nan (const char *, char **, char) internal_function;
++extern long double __strtold_nan (const char *, char **, char)
++     internal_function;
++extern float __wcstof_nan (const wchar_t *, wchar_t **, wchar_t)
++     internal_function;
++extern double __wcstod_nan (const wchar_t *, wchar_t **, wchar_t)
++     internal_function;
++extern long double __wcstold_nan (const wchar_t *, wchar_t **, wchar_t)
++     internal_function;
++
++libc_hidden_proto (__strtof_nan)
++libc_hidden_proto (__strtod_nan)
++libc_hidden_proto (__strtold_nan)
++libc_hidden_proto (__wcstof_nan)
++libc_hidden_proto (__wcstod_nan)
++libc_hidden_proto (__wcstold_nan)
++
+ extern char *__ecvt (double __value, int __ndigit, int *__restrict __decpt,
+ 		     int *__restrict __sign);
+ extern char *__fcvt (double __value, int __ndigit, int *__restrict __decpt,
+Index: git/include/wchar.h
+===================================================================
+--- git.orig/include/wchar.h
++++ git/include/wchar.h
+@@ -52,6 +52,9 @@ extern unsigned long long int __wcstoull
+ 						   __restrict __endptr,
+ 						   int __base,
+ 						   int __group) __THROW;
++extern unsigned long long int ____wcstoull_l_internal (const wchar_t *,
++						       wchar_t **, int, int,
++						       __locale_t);
+ libc_hidden_proto (__wcstof_internal)
+ libc_hidden_proto (__wcstod_internal)
+ libc_hidden_proto (__wcstold_internal)
+Index: git/stdlib/Makefile
+===================================================================
+--- git.orig/stdlib/Makefile
++++ git/stdlib/Makefile
+@@ -51,6 +51,7 @@ routines-y	:=							      \
+ 	strtol_l strtoul_l strtoll_l strtoull_l				      \
+ 	strtof strtod strtold						      \
+ 	strtof_l strtod_l strtold_l					      \
++	strtof_nan strtod_nan strtold_nan				      \
+ 	system canonicalize						      \
+ 	a64l l64a							      \
+ 	getsubopt xpg_basename						      \
+Index: git/stdlib/strtod_l.c
+===================================================================
+--- git.orig/stdlib/strtod_l.c
++++ git/stdlib/strtod_l.c
+@@ -21,8 +21,6 @@
+ #include <xlocale.h>
+ 
+ extern double ____strtod_l_internal (const char *, char **, int, __locale_t);
+-extern unsigned long long int ____strtoull_l_internal (const char *, char **,
+-						       int, int, __locale_t);
+ 
+ /* Configuration part.  These macros are defined by `strtold.c',
+    `strtof.c', `wcstod.c', `wcstold.c', and `wcstof.c' to produce the
+@@ -34,27 +32,20 @@ extern unsigned long long int ____strtou
+ # ifdef USE_WIDE_CHAR
+ #  define STRTOF	wcstod_l
+ #  define __STRTOF	__wcstod_l
++#  define STRTOF_NAN	__wcstod_nan
+ # else
+ #  define STRTOF	strtod_l
+ #  define __STRTOF	__strtod_l
++#  define STRTOF_NAN	__strtod_nan
+ # endif
+ # define MPN2FLOAT	__mpn_construct_double
+ # define FLOAT_HUGE_VAL	HUGE_VAL
+-# define SET_MANTISSA(flt, mant) \
+-  do { union ieee754_double u;						      \
+-       u.d = (flt);							      \
+-       u.ieee_nan.mantissa0 = (mant) >> 32;				      \
+-       u.ieee_nan.mantissa1 = (mant);					      \
+-       if ((u.ieee.mantissa0 | u.ieee.mantissa1) != 0)			      \
+-	 (flt) = u.d;							      \
+-  } while (0)
+ #endif
+ /* End of configuration part.  */
+ 
+ #include <ctype.h>
+ #include <errno.h>
+ #include <float.h>
+-#include <ieee754.h>
+ #include "../locale/localeinfo.h"
+ #include <locale.h>
+ #include <math.h>
+@@ -105,7 +96,6 @@ extern unsigned long long int ____strtou
+ # define TOLOWER_C(Ch) __towlower_l ((Ch), _nl_C_locobj_ptr)
+ # define STRNCASECMP(S1, S2, N) \
+   __wcsncasecmp_l ((S1), (S2), (N), _nl_C_locobj_ptr)
+-# define STRTOULL(S, E, B) ____wcstoull_l_internal ((S), (E), (B), 0, loc)
+ #else
+ # define STRING_TYPE char
+ # define CHAR_TYPE char
+@@ -117,7 +107,6 @@ extern unsigned long long int ____strtou
+ # define TOLOWER_C(Ch) __tolower_l ((Ch), _nl_C_locobj_ptr)
+ # define STRNCASECMP(S1, S2, N) \
+   __strncasecmp_l ((S1), (S2), (N), _nl_C_locobj_ptr)
+-# define STRTOULL(S, E, B) ____strtoull_l_internal ((S), (E), (B), 0, loc)
+ #endif
+ 
+ 
+@@ -668,33 +657,14 @@ ____STRTOF_INTERNAL (nptr, endptr, group
+ 	  if (*cp == L_('('))
+ 	    {
+ 	      const STRING_TYPE *startp = cp;
+-	      do
+-		++cp;
+-	      while ((*cp >= L_('0') && *cp <= L_('9'))
+-		     || ({ CHAR_TYPE lo = TOLOWER (*cp);
+-			   lo >= L_('a') && lo <= L_('z'); })
+-		     || *cp == L_('_'));
+-
+-	      if (*cp != L_(')'))
+-		/* The closing brace is missing.  Only match the NAN
+-		   part.  */
+-		cp = startp;
++          STRING_TYPE *endp;
++          retval = STRTOF_NAN (cp + 1, &endp, L_(')'));
++          if (*endp == L_(')'))
++            /* Consume the closing parenthesis.  */
++            cp = endp + 1;
+ 	      else
+-		{
+-		  /* This is a system-dependent way to specify the
+-		     bitmask used for the NaN.  We expect it to be
+-		     a number which is put in the mantissa of the
+-		     number.  */
+-		  STRING_TYPE *endp;
+-		  unsigned long long int mant;
+-
+-		  mant = STRTOULL (startp + 1, &endp, 0);
+-		  if (endp == cp)
+-		    SET_MANTISSA (retval, mant);
+-
+-		  /* Consume the closing brace.  */
+-		  ++cp;
+-		}
++               /* Only match the NAN part.  */
++               cp = startp;
+ 	    }
+ 
+ 	  if (endptr != NULL)
+Index: git/stdlib/strtod_nan.c
+===================================================================
+--- /dev/null
++++ git/stdlib/strtod_nan.c
+@@ -0,0 +1,24 @@
++/* Convert string for NaN payload to corresponding NaN.  Narrow
++   strings, double.
++   Copyright (C) 2015 Free Software Foundation, Inc.
++   This file is part of the GNU C Library.
++
++   The GNU C Library is free software; you can redistribute it and/or
++   modify it under the terms of the GNU Lesser General Public
++   License as published by the Free Software Foundation; either
++   version 2.1 of the License, or (at your option) any later version.
++
++   The GNU C Library is distributed in the hope that it will be useful,
++   but WITHOUT ANY WARRANTY; without even the implied warranty of
++   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
++   Lesser General Public License for more details.
++
++   You should have received a copy of the GNU Lesser General Public
++   License along with the GNU C Library; if not, see
++   <http://www.gnu.org/licenses/>.  */
++
++#include <strtod_nan_narrow.h>
++#include <strtod_nan_double.h>
++
++#define STRTOD_NAN __strtod_nan
++#include <strtod_nan_main.c>
+Index: git/stdlib/strtod_nan_double.h
+===================================================================
+--- /dev/null
++++ git/stdlib/strtod_nan_double.h
+@@ -0,0 +1,30 @@
++/* Convert string for NaN payload to corresponding NaN.  For double.
++   Copyright (C) 1997-2015 Free Software Foundation, Inc.
++   This file is part of the GNU C Library.
++
++   The GNU C Library is free software; you can redistribute it and/or
++   modify it under the terms of the GNU Lesser General Public
++   License as published by the Free Software Foundation; either
++   version 2.1 of the License, or (at your option) any later version.
++
++   The GNU C Library is distributed in the hope that it will be useful,
++   but WITHOUT ANY WARRANTY; without even the implied warranty of
++   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
++   Lesser General Public License for more details.
++
++   You should have received a copy of the GNU Lesser General Public
++   License along with the GNU C Library; if not, see
++   <http://www.gnu.org/licenses/>.  */
++
++#define FLOAT		double
++#define SET_MANTISSA(flt, mant)				\
++  do							\
++    {							\
++      union ieee754_double u;				\
++      u.d = (flt);					\
++      u.ieee_nan.mantissa0 = (mant) >> 32;		\
++      u.ieee_nan.mantissa1 = (mant);			\
++      if ((u.ieee.mantissa0 | u.ieee.mantissa1) != 0)	\
++	(flt) = u.d;					\
++    }							\
++  while (0)
+Index: git/stdlib/strtod_nan_float.h
+===================================================================
+--- /dev/null
++++ git/stdlib/strtod_nan_float.h
+@@ -0,0 +1,29 @@
++/* Convert string for NaN payload to corresponding NaN.  For float.
++   Copyright (C) 1997-2015 Free Software Foundation, Inc.
++   This file is part of the GNU C Library.
++
++   The GNU C Library is free software; you can redistribute it and/or
++   modify it under the terms of the GNU Lesser General Public
++   License as published by the Free Software Foundation; either
++   version 2.1 of the License, or (at your option) any later version.
++
++   The GNU C Library is distributed in the hope that it will be useful,
++   but WITHOUT ANY WARRANTY; without even the implied warranty of
++   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
++   Lesser General Public License for more details.
++
++   You should have received a copy of the GNU Lesser General Public
++   License along with the GNU C Library; if not, see
++   <http://www.gnu.org/licenses/>.  */
++
++#define	FLOAT		float
++#define SET_MANTISSA(flt, mant)			\
++  do						\
++    {						\
++      union ieee754_float u;			\
++      u.f = (flt);				\
++      u.ieee_nan.mantissa = (mant);		\
++      if (u.ieee.mantissa != 0)			\
++	(flt) = u.f;				\
++    }						\
++  while (0)
+Index: git/stdlib/strtod_nan_main.c
+===================================================================
+--- /dev/null
++++ git/stdlib/strtod_nan_main.c
+@@ -0,0 +1,63 @@
++/* Convert string for NaN payload to corresponding NaN.
++   Copyright (C) 1997-2015 Free Software Foundation, Inc.
++   This file is part of the GNU C Library.
++
++   The GNU C Library is free software; you can redistribute it and/or
++   modify it under the terms of the GNU Lesser General Public
++   License as published by the Free Software Foundation; either
++   version 2.1 of the License, or (at your option) any later version.
++
++   The GNU C Library is distributed in the hope that it will be useful,
++   but WITHOUT ANY WARRANTY; without even the implied warranty of
++   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
++   Lesser General Public License for more details.
++
++   You should have received a copy of the GNU Lesser General Public
++   License along with the GNU C Library; if not, see
++   <http://www.gnu.org/licenses/>.  */
++
++#include <ieee754.h>
++#include <locale.h>
++#include <math.h>
++#include <stdlib.h>
++#include <wchar.h>
++
++
++/* If STR starts with an optional n-char-sequence as defined by ISO C
++   (a sequence of ASCII letters, digits and underscores), followed by
++   ENDC, return a NaN whose payload is set based on STR.  Otherwise,
++   return a default NAN.  If ENDPTR is not NULL, set *ENDPTR to point
++   to the character after the initial n-char-sequence.  */
++
++internal_function
++FLOAT
++STRTOD_NAN (const STRING_TYPE *str, STRING_TYPE **endptr, STRING_TYPE endc)
++{
++  const STRING_TYPE *cp = str;
++
++  while ((*cp >= L_('0') && *cp <= L_('9'))
++	 || (*cp >= L_('A') && *cp <= L_('Z'))
++	 || (*cp >= L_('a') && *cp <= L_('z'))
++	 || *cp == L_('_'))
++    ++cp;
++
++  FLOAT retval = NAN;
++  if (*cp != endc)
++    goto out;
++
++  /* This is a system-dependent way to specify the bitmask used for
++     the NaN.  We expect it to be a number which is put in the
++     mantissa of the number.  */
++  STRING_TYPE *endp;
++  unsigned long long int mant;
++
++  mant = STRTOULL (str, &endp, 0);
++  if (endp == cp)
++    SET_MANTISSA (retval, mant);
++
++ out:
++  if (endptr != NULL)
++    *endptr = (STRING_TYPE *) cp;
++  return retval;
++}
++libc_hidden_def (STRTOD_NAN)
+Index: git/stdlib/strtod_nan_narrow.h
+===================================================================
+--- /dev/null
++++ git/stdlib/strtod_nan_narrow.h
+@@ -0,0 +1,22 @@
++/* Convert string for NaN payload to corresponding NaN.  Narrow strings.
++   Copyright (C) 1997-2015 Free Software Foundation, Inc.
++   This file is part of the GNU C Library.
++
++   The GNU C Library is free software; you can redistribute it and/or
++   modify it under the terms of the GNU Lesser General Public
++   License as published by the Free Software Foundation; either
++   version 2.1 of the License, or (at your option) any later version.
++
++   The GNU C Library is distributed in the hope that it will be useful,
++   but WITHOUT ANY WARRANTY; without even the implied warranty of
++   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
++   Lesser General Public License for more details.
++
++   You should have received a copy of the GNU Lesser General Public
++   License along with the GNU C Library; if not, see
++   <http://www.gnu.org/licenses/>.  */
++
++#define STRING_TYPE char
++#define L_(Ch) Ch
++#define STRTOULL(S, E, B) ____strtoull_l_internal ((S), (E), (B), 0,	\
++						   _nl_C_locobj_ptr)
+Index: git/stdlib/strtod_nan_wide.h
+===================================================================
+--- /dev/null
++++ git/stdlib/strtod_nan_wide.h
+@@ -0,0 +1,22 @@
++/* Convert string for NaN payload to corresponding NaN.  Wide strings.
++   Copyright (C) 1997-2015 Free Software Foundation, Inc.
++   This file is part of the GNU C Library.
++
++   The GNU C Library is free software; you can redistribute it and/or
++   modify it under the terms of the GNU Lesser General Public
++   License as published by the Free Software Foundation; either
++   version 2.1 of the License, or (at your option) any later version.
++
++   The GNU C Library is distributed in the hope that it will be useful,
++   but WITHOUT ANY WARRANTY; without even the implied warranty of
++   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
++   Lesser General Public License for more details.
++
++   You should have received a copy of the GNU Lesser General Public
++   License along with the GNU C Library; if not, see
++   <http://www.gnu.org/licenses/>.  */
++
++#define STRING_TYPE wchar_t
++#define L_(Ch) L##Ch
++#define STRTOULL(S, E, B) ____wcstoull_l_internal ((S), (E), (B), 0,	\
++						   _nl_C_locobj_ptr)
+Index: git/stdlib/strtof_l.c
+===================================================================
+--- git.orig/stdlib/strtof_l.c
++++ git/stdlib/strtof_l.c
+@@ -20,26 +20,19 @@
+ #include <xlocale.h>
+ 
+ extern float ____strtof_l_internal (const char *, char **, int, __locale_t);
+-extern unsigned long long int ____strtoull_l_internal (const char *, char **,
+-						       int, int, __locale_t);
+ 
+ #define	FLOAT		float
+ #define	FLT		FLT
+ #ifdef USE_WIDE_CHAR
+ # define STRTOF		wcstof_l
+ # define __STRTOF	__wcstof_l
++# define STRTOF_NAN	__wcstof_nan
+ #else
+ # define STRTOF		strtof_l
+ # define __STRTOF	__strtof_l
++# define STRTOF_NAN	__strtof_nan
+ #endif
+ #define	MPN2FLOAT	__mpn_construct_float
+ #define	FLOAT_HUGE_VAL	HUGE_VALF
+-#define SET_MANTISSA(flt, mant) \
+-  do { union ieee754_float u;						      \
+-       u.f = (flt);							      \
+-       u.ieee_nan.mantissa = (mant);					      \
+-       if (u.ieee.mantissa != 0)					      \
+-	 (flt) = u.f;							      \
+-  } while (0)
+ 
+ #include "strtod_l.c"
+Index: git/stdlib/strtof_nan.c
+===================================================================
+--- /dev/null
++++ git/stdlib/strtof_nan.c
+@@ -0,0 +1,24 @@
++/* Convert string for NaN payload to corresponding NaN.  Narrow
++   strings, float.
++   Copyright (C) 2015 Free Software Foundation, Inc.
++   This file is part of the GNU C Library.
++
++   The GNU C Library is free software; you can redistribute it and/or
++   modify it under the terms of the GNU Lesser General Public
++   License as published by the Free Software Foundation; either
++   version 2.1 of the License, or (at your option) any later version.
++
++   The GNU C Library is distributed in the hope that it will be useful,
++   but WITHOUT ANY WARRANTY; without even the implied warranty of
++   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
++   Lesser General Public License for more details.
++
++   You should have received a copy of the GNU Lesser General Public
++   License along with the GNU C Library; if not, see
++   <http://www.gnu.org/licenses/>.  */
++
++#include <strtod_nan_narrow.h>
++#include <strtod_nan_float.h>
++
++#define STRTOD_NAN __strtof_nan
++#include <strtod_nan_main.c>
+Index: git/stdlib/strtold_nan.c
+===================================================================
+--- /dev/null
++++ git/stdlib/strtold_nan.c
+@@ -0,0 +1,30 @@
++/* Convert string for NaN payload to corresponding NaN.  Narrow
++   strings, long double.
++   Copyright (C) 2015 Free Software Foundation, Inc.
++   This file is part of the GNU C Library.
++
++   The GNU C Library is free software; you can redistribute it and/or
++   modify it under the terms of the GNU Lesser General Public
++   License as published by the Free Software Foundation; either
++   version 2.1 of the License, or (at your option) any later version.
++
++   The GNU C Library is distributed in the hope that it will be useful,
++   but WITHOUT ANY WARRANTY; without even the implied warranty of
++   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
++   Lesser General Public License for more details.
++
++   You should have received a copy of the GNU Lesser General Public
++   License along with the GNU C Library; if not, see
++   <http://www.gnu.org/licenses/>.  */
++
++#include <math.h>
++
++/* This function is unused if long double and double have the same
++   representation.  */
++#ifndef __NO_LONG_DOUBLE_MATH
++# include <strtod_nan_narrow.h>
++# include <strtod_nan_ldouble.h>
++
++# define STRTOD_NAN __strtold_nan
++# include <strtod_nan_main.c>
++#endif
+Index: git/sysdeps/ieee754/ldbl-128/strtod_nan_ldouble.h
+===================================================================
+--- /dev/null
++++ git/sysdeps/ieee754/ldbl-128/strtod_nan_ldouble.h
+@@ -0,0 +1,33 @@
++/* Convert string for NaN payload to corresponding NaN.  For ldbl-128.
++   Copyright (C) 1997-2015 Free Software Foundation, Inc.
++   This file is part of the GNU C Library.
++
++   The GNU C Library is free software; you can redistribute it and/or
++   modify it under the terms of the GNU Lesser General Public
++   License as published by the Free Software Foundation; either
++   version 2.1 of the License, or (at your option) any later version.
++
++   The GNU C Library is distributed in the hope that it will be useful,
++   but WITHOUT ANY WARRANTY; without even the implied warranty of
++   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
++   Lesser General Public License for more details.
++
++   You should have received a copy of the GNU Lesser General Public
++   License along with the GNU C Library; if not, see
++   <http://www.gnu.org/licenses/>.  */
++
++#define FLOAT		long double
++#define SET_MANTISSA(flt, mant)				\
++  do							\
++    {							\
++      union ieee854_long_double u;			\
++      u.d = (flt);					\
++      u.ieee_nan.mantissa0 = 0;				\
++      u.ieee_nan.mantissa1 = 0;				\
++      u.ieee_nan.mantissa2 = (mant) >> 32;		\
++      u.ieee_nan.mantissa3 = (mant);			\
++      if ((u.ieee.mantissa0 | u.ieee.mantissa1		\
++	   | u.ieee.mantissa2 | u.ieee.mantissa3) != 0)	\
++	(flt) = u.d;					\
++    }							\
++  while (0)
+Index: git/sysdeps/ieee754/ldbl-128/strtold_l.c
+===================================================================
+--- git.orig/sysdeps/ieee754/ldbl-128/strtold_l.c
++++ git/sysdeps/ieee754/ldbl-128/strtold_l.c
+@@ -25,22 +25,13 @@
+ #ifdef USE_WIDE_CHAR
+ # define STRTOF		wcstold_l
+ # define __STRTOF	__wcstold_l
++# define STRTOF_NAN	__wcstold_nan
+ #else
+ # define STRTOF		strtold_l
+ # define __STRTOF	__strtold_l
++# define STRTOF_NAN	__strtold_nan
+ #endif
+ #define MPN2FLOAT	__mpn_construct_long_double
+ #define FLOAT_HUGE_VAL	HUGE_VALL
+-#define SET_MANTISSA(flt, mant) \
+-  do { union ieee854_long_double u;					      \
+-       u.d = (flt);							      \
+-       u.ieee_nan.mantissa0 = 0;					      \
+-       u.ieee_nan.mantissa1 = 0;					      \
+-       u.ieee_nan.mantissa2 = (mant) >> 32;				      \
+-       u.ieee_nan.mantissa3 = (mant);					      \
+-       if ((u.ieee.mantissa0 | u.ieee.mantissa1				      \
+-	    | u.ieee.mantissa2 | u.ieee.mantissa3) != 0)		      \
+-	 (flt) = u.d;							      \
+-  } while (0)
+ 
+ #include <strtod_l.c>
+Index: git/sysdeps/ieee754/ldbl-128ibm/strtod_nan_ldouble.h
+===================================================================
+--- /dev/null
++++ git/sysdeps/ieee754/ldbl-128ibm/strtod_nan_ldouble.h
+@@ -0,0 +1,30 @@
++/* Convert string for NaN payload to corresponding NaN.  For ldbl-128ibm.
++   Copyright (C) 1997-2015 Free Software Foundation, Inc.
++   This file is part of the GNU C Library.
++
++   The GNU C Library is free software; you can redistribute it and/or
++   modify it under the terms of the GNU Lesser General Public
++   License as published by the Free Software Foundation; either
++   version 2.1 of the License, or (at your option) any later version.
++
++   The GNU C Library is distributed in the hope that it will be useful,
++   but WITHOUT ANY WARRANTY; without even the implied warranty of
++   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
++   Lesser General Public License for more details.
++
++   You should have received a copy of the GNU Lesser General Public
++   License along with the GNU C Library; if not, see
++   <http://www.gnu.org/licenses/>.  */
++
++#define FLOAT		long double
++#define SET_MANTISSA(flt, mant)					\
++  do								\
++    {								\
++      union ibm_extended_long_double u;				\
++      u.ld = (flt);						\
++      u.d[0].ieee_nan.mantissa0 = (mant) >> 32;			\
++      u.d[0].ieee_nan.mantissa1 = (mant);			\
++      if ((u.d[0].ieee.mantissa0 | u.d[0].ieee.mantissa1) != 0)	\
++	(flt) = u.ld;						\
++    }								\
++  while (0)
+Index: git/sysdeps/ieee754/ldbl-128ibm/strtold_l.c
+===================================================================
+--- git.orig/sysdeps/ieee754/ldbl-128ibm/strtold_l.c
++++ git/sysdeps/ieee754/ldbl-128ibm/strtold_l.c
+@@ -30,25 +30,19 @@ extern long double ____new_wcstold_l (co
+ # define STRTOF		__new_wcstold_l
+ # define __STRTOF	____new_wcstold_l
+ # define ____STRTOF_INTERNAL ____wcstold_l_internal
++# define STRTOF_NAN	__wcstold_nan
+ #else
+ extern long double ____new_strtold_l (const char *, char **, __locale_t);
+ # define STRTOF		__new_strtold_l
+ # define __STRTOF	____new_strtold_l
+ # define ____STRTOF_INTERNAL ____strtold_l_internal
++# define STRTOF_NAN	__strtold_nan
+ #endif
+ extern __typeof (__STRTOF) STRTOF;
+ libc_hidden_proto (__STRTOF)
+ libc_hidden_proto (STRTOF)
+ #define MPN2FLOAT	__mpn_construct_long_double
+ #define FLOAT_HUGE_VAL	HUGE_VALL
+-# define SET_MANTISSA(flt, mant) \
+-  do { union ibm_extended_long_double u;				      \
+-       u.ld = (flt);							      \
+-       u.d[0].ieee_nan.mantissa0 = (mant) >> 32;			      \
+-       u.d[0].ieee_nan.mantissa1 = (mant);				      \
+-       if ((u.d[0].ieee.mantissa0 | u.d[0].ieee.mantissa1) != 0)	      \
+-	 (flt) = u.ld;							      \
+-  } while (0)
+ 
+ #include <strtod_l.c>
+ 
+Index: git/sysdeps/ieee754/ldbl-64-128/strtold_l.c
+===================================================================
+--- git.orig/sysdeps/ieee754/ldbl-64-128/strtold_l.c
++++ git/sysdeps/ieee754/ldbl-64-128/strtold_l.c
+@@ -30,28 +30,19 @@ extern long double ____new_wcstold_l (co
+ # define STRTOF		__new_wcstold_l
+ # define __STRTOF	____new_wcstold_l
+ # define ____STRTOF_INTERNAL ____wcstold_l_internal
++# define STRTOF_NAN	__wcstold_nan
+ #else
+ extern long double ____new_strtold_l (const char *, char **, __locale_t);
+ # define STRTOF		__new_strtold_l
+ # define __STRTOF	____new_strtold_l
+ # define ____STRTOF_INTERNAL ____strtold_l_internal
++# define STRTOF_NAN	__strtold_nan
+ #endif
+ extern __typeof (__STRTOF) STRTOF;
+ libc_hidden_proto (__STRTOF)
+ libc_hidden_proto (STRTOF)
+ #define MPN2FLOAT	__mpn_construct_long_double
+ #define FLOAT_HUGE_VAL	HUGE_VALL
+-#define SET_MANTISSA(flt, mant) \
+-  do { union ieee854_long_double u;					      \
+-       u.d = (flt);							      \
+-       u.ieee_nan.mantissa0 = 0;					      \
+-       u.ieee_nan.mantissa1 = 0;					      \
+-       u.ieee_nan.mantissa2 = (mant) >> 32;				      \
+-       u.ieee_nan.mantissa3 = (mant);					      \
+-       if ((u.ieee.mantissa0 | u.ieee.mantissa1				      \
+-	    | u.ieee.mantissa2 | u.ieee.mantissa3) != 0)		      \
+-	 (flt) = u.d;							      \
+-  } while (0)
+ 
+ #include <strtod_l.c>
+ 
+Index: git/sysdeps/ieee754/ldbl-96/strtod_nan_ldouble.h
+===================================================================
+--- /dev/null
++++ git/sysdeps/ieee754/ldbl-96/strtod_nan_ldouble.h
+@@ -0,0 +1,30 @@
++/* Convert string for NaN payload to corresponding NaN.  For ldbl-96.
++   Copyright (C) 1997-2015 Free Software Foundation, Inc.
++   This file is part of the GNU C Library.
++
++   The GNU C Library is free software; you can redistribute it and/or
++   modify it under the terms of the GNU Lesser General Public
++   License as published by the Free Software Foundation; either
++   version 2.1 of the License, or (at your option) any later version.
++
++   The GNU C Library is distributed in the hope that it will be useful,
++   but WITHOUT ANY WARRANTY; without even the implied warranty of
++   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
++   Lesser General Public License for more details.
++
++   You should have received a copy of the GNU Lesser General Public
++   License along with the GNU C Library; if not, see
++   <http://www.gnu.org/licenses/>.  */
++
++#define FLOAT		long double
++#define SET_MANTISSA(flt, mant)				\
++  do							\
++    {							\
++      union ieee854_long_double u;			\
++      u.d = (flt);					\
++      u.ieee_nan.mantissa0 = (mant) >> 32;		\
++      u.ieee_nan.mantissa1 = (mant);			\
++      if ((u.ieee.mantissa0 | u.ieee.mantissa1) != 0)	\
++	(flt) = u.d;					\
++    }							\
++  while (0)
+Index: git/sysdeps/ieee754/ldbl-96/strtold_l.c
+===================================================================
+--- git.orig/sysdeps/ieee754/ldbl-96/strtold_l.c
++++ git/sysdeps/ieee754/ldbl-96/strtold_l.c
+@@ -25,19 +25,13 @@
+ #ifdef USE_WIDE_CHAR
+ # define STRTOF		wcstold_l
+ # define __STRTOF	__wcstold_l
++# define STRTOF_NAN	__wcstold_nan
+ #else
+ # define STRTOF		strtold_l
+ # define __STRTOF	__strtold_l
++# define STRTOF_NAN	__strtold_nan
+ #endif
+ #define MPN2FLOAT	__mpn_construct_long_double
+ #define FLOAT_HUGE_VAL	HUGE_VALL
+-#define SET_MANTISSA(flt, mant) \
+-  do { union ieee854_long_double u;					      \
+-       u.d = (flt);							      \
+-       u.ieee_nan.mantissa0 = (mant) >> 32;				      \
+-       u.ieee_nan.mantissa1 = (mant);					      \
+-       if ((u.ieee.mantissa0 | u.ieee.mantissa1) != 0)			      \
+-	 (flt) = u.d;							      \
+-  } while (0)
+ 
+ #include <stdlib/strtod_l.c>
+Index: git/wcsmbs/Makefile
+===================================================================
+--- git.orig/wcsmbs/Makefile
++++ git/wcsmbs/Makefile
+@@ -39,6 +39,7 @@ routines-$(OPTION_POSIX_C_LANG_WIDE_CHAR
+ 	    wcstol wcstoul wcstoll wcstoull wcstod wcstold wcstof \
+ 	    wcstol_l wcstoul_l wcstoll_l wcstoull_l \
+ 	    wcstod_l wcstold_l wcstof_l \
++	    wcstod_nan wcstold_nan wcstof_nan \
+ 	    wcscoll wcsxfrm \
+ 	    wcwidth wcswidth \
+ 	    wcscoll_l wcsxfrm_l \
+Index: git/wcsmbs/wcstod_l.c
+===================================================================
+--- git.orig/wcsmbs/wcstod_l.c
++++ git/wcsmbs/wcstod_l.c
+@@ -23,9 +23,6 @@
+ 
+ extern double ____wcstod_l_internal (const wchar_t *, wchar_t **, int,
+ 				     __locale_t);
+-extern unsigned long long int ____wcstoull_l_internal (const wchar_t *,
+-						       wchar_t **, int, int,
+-						       __locale_t);
+ 
+ #define	USE_WIDE_CHAR	1
+ 
+Index: git/wcsmbs/wcstod_nan.c
+===================================================================
+--- /dev/null
++++ git/wcsmbs/wcstod_nan.c
+@@ -0,0 +1,23 @@
++/* Convert string for NaN payload to corresponding NaN.  Wide strings, double.
++   Copyright (C) 2015 Free Software Foundation, Inc.
++   This file is part of the GNU C Library.
++
++   The GNU C Library is free software; you can redistribute it and/or
++   modify it under the terms of the GNU Lesser General Public
++   License as published by the Free Software Foundation; either
++   version 2.1 of the License, or (at your option) any later version.
++
++   The GNU C Library is distributed in the hope that it will be useful,
++   but WITHOUT ANY WARRANTY; without even the implied warranty of
++   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
++   Lesser General Public License for more details.
++
++   You should have received a copy of the GNU Lesser General Public
++   License along with the GNU C Library; if not, see
++   <http://www.gnu.org/licenses/>.  */
++
++#include "../stdlib/strtod_nan_wide.h"
++#include "../stdlib/strtod_nan_double.h"
++
++#define STRTOD_NAN __wcstod_nan
++#include "../stdlib/strtod_nan_main.c"
+Index: git/wcsmbs/wcstof_l.c
+===================================================================
+--- git.orig/wcsmbs/wcstof_l.c
++++ git/wcsmbs/wcstof_l.c
+@@ -25,8 +25,5 @@
+ 
+ extern float ____wcstof_l_internal (const wchar_t *, wchar_t **, int,
+ 				    __locale_t);
+-extern unsigned long long int ____wcstoull_l_internal (const wchar_t *,
+-						       wchar_t **, int, int,
+-						       __locale_t);
+ 
+ #include <stdlib/strtof_l.c>
+Index: git/wcsmbs/wcstof_nan.c
+===================================================================
+--- /dev/null
++++ git/wcsmbs/wcstof_nan.c
+@@ -0,0 +1,23 @@
++/* Convert string for NaN payload to corresponding NaN.  Wide strings, float.
++   Copyright (C) 2015 Free Software Foundation, Inc.
++   This file is part of the GNU C Library.
++
++   The GNU C Library is free software; you can redistribute it and/or
++   modify it under the terms of the GNU Lesser General Public
++   License as published by the Free Software Foundation; either
++   version 2.1 of the License, or (at your option) any later version.
++
++   The GNU C Library is distributed in the hope that it will be useful,
++   but WITHOUT ANY WARRANTY; without even the implied warranty of
++   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
++   Lesser General Public License for more details.
++
++   You should have received a copy of the GNU Lesser General Public
++   License along with the GNU C Library; if not, see
++   <http://www.gnu.org/licenses/>.  */
++
++#include "../stdlib/strtod_nan_wide.h"
++#include "../stdlib/strtod_nan_float.h"
++
++#define STRTOD_NAN __wcstof_nan
++#include "../stdlib/strtod_nan_main.c"
+Index: git/wcsmbs/wcstold_l.c
+===================================================================
+--- git.orig/wcsmbs/wcstold_l.c
++++ git/wcsmbs/wcstold_l.c
+@@ -24,8 +24,5 @@
+ 
+ extern long double ____wcstold_l_internal (const wchar_t *, wchar_t **, int,
+ 					   __locale_t);
+-extern unsigned long long int ____wcstoull_l_internal (const wchar_t *,
+-						       wchar_t **, int, int,
+-						       __locale_t);
+ 
+ #include <strtold_l.c>
+Index: git/wcsmbs/wcstold_nan.c
+===================================================================
+--- /dev/null
++++ git/wcsmbs/wcstold_nan.c
+@@ -0,0 +1,30 @@
++/* Convert string for NaN payload to corresponding NaN.  Wide strings,
++   long double.
++   Copyright (C) 2015 Free Software Foundation, Inc.
++   This file is part of the GNU C Library.
++
++   The GNU C Library is free software; you can redistribute it and/or
++   modify it under the terms of the GNU Lesser General Public
++   License as published by the Free Software Foundation; either
++   version 2.1 of the License, or (at your option) any later version.
++
++   The GNU C Library is distributed in the hope that it will be useful,
++   but WITHOUT ANY WARRANTY; without even the implied warranty of
++   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
++   Lesser General Public License for more details.
++
++   You should have received a copy of the GNU Lesser General Public
++   License along with the GNU C Library; if not, see
++   <http://www.gnu.org/licenses/>.  */
++
++#include <math.h>
++
++/* This function is unused if long double and double have the same
++   representation.  */
++#ifndef __NO_LONG_DOUBLE_MATH
++# include "../stdlib/strtod_nan_wide.h"
++# include <strtod_nan_ldouble.h>
++
++# define STRTOD_NAN __wcstold_nan
++# include "../stdlib/strtod_nan_main.c"
++#endif
+Index: git/ChangeLog
+===================================================================
+--- git.orig/ChangeLog
++++ git/ChangeLog
+@@ -1,3 +1,57 @@
++2015-11-24  Joseph Myers  <joseph@codesourcery.com>
++ 
++	* stdlib/strtod_nan.c: New file.
++	* stdlib/strtod_nan_double.h: Likewise.
++	* stdlib/strtod_nan_float.h: Likewise.
++	* stdlib/strtod_nan_main.c: Likewise.
++	* stdlib/strtod_nan_narrow.h: Likewise.
++	* stdlib/strtod_nan_wide.h: Likewise.
++	* stdlib/strtof_nan.c: Likewise.
++	* stdlib/strtold_nan.c: Likewise.
++	* sysdeps/ieee754/ldbl-128/strtod_nan_ldouble.h: Likewise.
++	* sysdeps/ieee754/ldbl-128ibm/strtod_nan_ldouble.h: Likewise.
++	* sysdeps/ieee754/ldbl-96/strtod_nan_ldouble.h: Likewise.
++	* wcsmbs/wcstod_nan.c: Likewise.
++	* wcsmbs/wcstof_nan.c: Likewise.
++	* wcsmbs/wcstold_nan.c: Likewise.
++	* stdlib/Makefile (routines): Add strtof_nan, strtod_nan and
++	strtold_nan.
++	* wcsmbs/Makefile (routines): Add wcstod_nan, wcstold_nan and
++	wcstof_nan.
++	* include/stdlib.h (__strtof_nan): Declare and use
++	libc_hidden_proto.
++	(__strtod_nan): Likewise.
++	(__strtold_nan): Likewise.
++	(__wcstof_nan): Likewise.
++	(__wcstod_nan): Likewise.
++	(__wcstold_nan): Likewise.
++	* include/wchar.h (____wcstoull_l_internal): Declare.
++	* stdlib/strtod_l.c: Do not include <ieee754.h>.
++	(____strtoull_l_internal): Remove declaration.
++	(STRTOF_NAN): Define macro.
++	(SET_MANTISSA): Remove macro.
++	(STRTOULL): Likewise.
++	(____STRTOF_INTERNAL): Use STRTOF_NAN to parse NaN payload.
++	* stdlib/strtof_l.c (____strtoull_l_internal): Remove declaration.
++	(STRTOF_NAN): Define macro.
++	(SET_MANTISSA): Remove macro.
++	* sysdeps/ieee754/ldbl-128/strtold_l.c (STRTOF_NAN): Define macro.
++	(SET_MANTISSA): Remove macro.
++	* sysdeps/ieee754/ldbl-128ibm/strtold_l.c (STRTOF_NAN): Define
++	macro.
++	(SET_MANTISSA): Remove macro.
++	* sysdeps/ieee754/ldbl-64-128/strtold_l.c (STRTOF_NAN): Define
++	macro.
++	(SET_MANTISSA): Remove macro.
++	* sysdeps/ieee754/ldbl-96/strtold_l.c (STRTOF_NAN): Define macro.
++	(SET_MANTISSA): Remove macro.
++	* wcsmbs/wcstod_l.c (____wcstoull_l_internal): Remove declaration.
++	* wcsmbs/wcstof_l.c (____wcstoull_l_internal): Likewise.
++	* wcsmbs/wcstold_l.c (____wcstoull_l_internal): Likewise.
++
++ 	[BZ #19266]
++ 	* stdlib/strtod_l.c (____STRTOF_INTERNAL): Check directly for
++ 	upper case and lower case letters inside NAN(), not using TOLOWER.
+ 2015-08-08  Paul Pluzhnikov  <ppluzhnikov@google.com>
+ 
+    [BZ #17905]
diff --git a/yocto-poky/meta/recipes-core/glibc/glibc/CVE-2015-9761_2.patch b/yocto-poky/meta/recipes-core/glibc/glibc/CVE-2015-9761_2.patch
new file mode 100644
index 0000000..e30307f
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/glibc/glibc/CVE-2015-9761_2.patch
@@ -0,0 +1,385 @@
+From 8f5e8b01a1da2a207228f2072c934fa5918554b8 Mon Sep 17 00:00:00 2001
+From: Joseph Myers <joseph@codesourcery.com>
+Date: Fri, 4 Dec 2015 20:36:28 +0000
+Subject: [PATCH] Fix nan functions handling of payload strings (bug 16961, bug
+ 16962).
+
+The nan, nanf and nanl functions handle payload strings by doing e.g.:
+
+  if (tagp[0] != '\0')
+    {
+      char buf[6 + strlen (tagp)];
+      sprintf (buf, "NAN(%s)", tagp);
+      return strtod (buf, NULL);
+    }
+
+This is an unbounded stack allocation based on the length of the
+argument.  Furthermore, if the argument starts with an n-char-sequence
+followed by ')', that n-char-sequence is wrongly treated as
+significant for determining the payload of the resulting NaN, when ISO
+C says the call should be equivalent to strtod ("NAN", NULL), without
+being affected by that initial n-char-sequence.  This patch fixes both
+those problems by using the __strtod_nan etc. functions recently
+factored out of strtod etc. for that purpose, with those functions
+being exported from libc at version GLIBC_PRIVATE.
+
+Tested for x86_64, x86, mips64 and powerpc.
+
+	[BZ #16961]
+	[BZ #16962]
+	* math/s_nan.c (__nan): Use __strtod_nan instead of constructing a
+	string on the stack for strtod.
+	* math/s_nanf.c (__nanf): Use __strtof_nan instead of constructing
+	a string on the stack for strtof.
+	* math/s_nanl.c (__nanl): Use __strtold_nan instead of
+	constructing a string on the stack for strtold.
+	* stdlib/Versions (libc): Add __strtof_nan, __strtod_nan and
+	__strtold_nan to GLIBC_PRIVATE.
+	* math/test-nan-overflow.c: New file.
+	* math/test-nan-payload.c: Likewise.
+	* math/Makefile (tests): Add test-nan-overflow and
+	test-nan-payload.
+
+Upstream-Status: Backport
+CVE: CVE-2015-9761 patch #2
+[Yocto # 8980]
+
+https://sourceware.org/git/gitweb.cgi?p=glibc.git;h=8f5e8b01a1da2a207228f2072c934fa5918554b8
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ ChangeLog                |  17 +++++++
+ NEWS                     |   6 +++
+ math/Makefile            |   3 +-
+ math/s_nan.c             |   9 +---
+ math/s_nanf.c            |   9 +---
+ math/s_nanl.c            |   9 +---
+ math/test-nan-overflow.c |  66 +++++++++++++++++++++++++
+ math/test-nan-payload.c  | 122 +++++++++++++++++++++++++++++++++++++++++++++++
+ stdlib/Versions          |   1 +
+ 9 files changed, 217 insertions(+), 25 deletions(-)
+ create mode 100644 math/test-nan-overflow.c
+ create mode 100644 math/test-nan-payload.c
+
+Index: git/ChangeLog
+===================================================================
+--- git.orig/ChangeLog
++++ git/ChangeLog
+@@ -1,3 +1,20 @@
++2015-12-04  Joseph Myers  <joseph@codesourcery.com>
++
++	[BZ #16961]
++	[BZ #16962]
++	* math/s_nan.c (__nan): Use __strtod_nan instead of constructing a
++	string on the stack for strtod.
++	* math/s_nanf.c (__nanf): Use __strtof_nan instead of constructing
++	a string on the stack for strtof.
++	* math/s_nanl.c (__nanl): Use __strtold_nan instead of
++	constructing a string on the stack for strtold.
++	* stdlib/Versions (libc): Add __strtof_nan, __strtod_nan and
++	__strtold_nan to GLIBC_PRIVATE.
++	* math/test-nan-overflow.c: New file.
++	* math/test-nan-payload.c: Likewise.
++	* math/Makefile (tests): Add test-nan-overflow and
++	test-nan-payload.
++
+ 2015-11-24  Joseph Myers  <joseph@codesourcery.com>
+  
+ 	* stdlib/strtod_nan.c: New file.
+Index: git/NEWS
+===================================================================
+--- git.orig/NEWS
++++ git/NEWS
+@@ -99,6 +99,12 @@ Version 2.22
+ 
+ Version 2.21
+ 
++Security related changes:
++
++* The nan, nanf and nanl functions no longer have unbounded stack usage
++  depending on the length of the string passed as an argument to the
++  functions.  Reported by Joseph Myers.
++
+ * The following bugs are resolved with this release:
+ 
+   6652, 10672, 12674, 12847, 12926, 13862, 14132, 14138, 14171, 14498,
+Index: git/math/Makefile
+===================================================================
+--- git.orig/math/Makefile
++++ git/math/Makefile
+@@ -110,6 +110,7 @@ tests = test-matherr test-fenv atest-exp
+ 	test-tgmath-ret bug-nextafter bug-nexttoward bug-tgmath1 \
+ 	test-tgmath-int test-tgmath2 test-powl tst-CMPLX tst-CMPLX2 test-snan \
+ 	test-fenv-tls test-fenv-preserve test-fenv-return test-fenvinline \
++    test-nan-overflow test-nan-payload \
+ 	$(tests-static)
+ tests-static = test-fpucw-static test-fpucw-ieee-static
+ # We do the `long double' tests only if this data type is available and
+Index: git/math/s_nan.c
+===================================================================
+--- git.orig/math/s_nan.c
++++ git/math/s_nan.c
+@@ -28,14 +28,7 @@
+ double
+ __nan (const char *tagp)
+ {
+-  if (tagp[0] != '\0')
+-    {
+-      char buf[6 + strlen (tagp)];
+-      sprintf (buf, "NAN(%s)", tagp);
+-      return strtod (buf, NULL);
+-    }
+-
+-  return NAN;
++  return __strtod_nan (tagp, NULL, 0);
+ }
+ weak_alias (__nan, nan)
+ #ifdef NO_LONG_DOUBLE
+Index: git/math/s_nanf.c
+===================================================================
+--- git.orig/math/s_nanf.c
++++ git/math/s_nanf.c
+@@ -28,13 +28,6 @@
+ float
+ __nanf (const char *tagp)
+ {
+-  if (tagp[0] != '\0')
+-    {
+-      char buf[6 + strlen (tagp)];
+-      sprintf (buf, "NAN(%s)", tagp);
+-      return strtof (buf, NULL);
+-    }
+-
+-  return NAN;
++  return __strtof_nan (tagp, NULL, 0);
+ }
+ weak_alias (__nanf, nanf)
+Index: git/math/s_nanl.c
+===================================================================
+--- git.orig/math/s_nanl.c
++++ git/math/s_nanl.c
+@@ -28,13 +28,6 @@
+ long double
+ __nanl (const char *tagp)
+ {
+-  if (tagp[0] != '\0')
+-    {
+-      char buf[6 + strlen (tagp)];
+-      sprintf (buf, "NAN(%s)", tagp);
+-      return strtold (buf, NULL);
+-    }
+-
+-  return NAN;
++  return __strtold_nan (tagp, NULL, 0);
+ }
+ weak_alias (__nanl, nanl)
+Index: git/math/test-nan-overflow.c
+===================================================================
+--- /dev/null
++++ git/math/test-nan-overflow.c
+@@ -0,0 +1,66 @@
++/* Test nan functions stack overflow (bug 16962).
++   Copyright (C) 2015 Free Software Foundation, Inc.
++   This file is part of the GNU C Library.
++
++   The GNU C Library is free software; you can redistribute it and/or
++   modify it under the terms of the GNU Lesser General Public
++   License as published by the Free Software Foundation; either
++   version 2.1 of the License, or (at your option) any later version.
++
++   The GNU C Library is distributed in the hope that it will be useful,
++   but WITHOUT ANY WARRANTY; without even the implied warranty of
++   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
++   Lesser General Public License for more details.
++
++   You should have received a copy of the GNU Lesser General Public
++   License along with the GNU C Library; if not, see
++   <http://www.gnu.org/licenses/>.  */
++
++#include <math.h>
++#include <stdio.h>
++#include <string.h>
++#include <sys/resource.h>
++
++#define STACK_LIM 1048576
++#define STRING_SIZE (2 * STACK_LIM)
++
++static int
++do_test (void)
++{
++  int result = 0;
++  struct rlimit lim;
++  getrlimit (RLIMIT_STACK, &lim);
++  lim.rlim_cur = STACK_LIM;
++  setrlimit (RLIMIT_STACK, &lim);
++  char *nanstr = malloc (STRING_SIZE);
++  if (nanstr == NULL)
++    {
++      puts ("malloc failed, cannot test");
++      return 77;
++    }
++  memset (nanstr, '0', STRING_SIZE - 1);
++  nanstr[STRING_SIZE - 1] = 0;
++#define NAN_TEST(TYPE, FUNC)			\
++  do						\
++    {						\
++      char *volatile p = nanstr;		\
++      volatile TYPE v = FUNC (p);		\
++      if (isnan (v))				\
++	puts ("PASS: " #FUNC);			\
++      else					\
++	{					\
++	  puts ("FAIL: " #FUNC);		\
++	  result = 1;				\
++	}					\
++    }						\
++  while (0)
++  NAN_TEST (float, nanf);
++  NAN_TEST (double, nan);
++#ifndef NO_LONG_DOUBLE
++  NAN_TEST (long double, nanl);
++#endif
++  return result;
++}
++
++#define TEST_FUNCTION do_test ()
++#include "../test-skeleton.c"
+Index: git/math/test-nan-payload.c
+===================================================================
+--- /dev/null
++++ git/math/test-nan-payload.c
+@@ -0,0 +1,122 @@
++/* Test nan functions payload handling (bug 16961).
++   Copyright (C) 2015 Free Software Foundation, Inc.
++   This file is part of the GNU C Library.
++
++   The GNU C Library is free software; you can redistribute it and/or
++   modify it under the terms of the GNU Lesser General Public
++   License as published by the Free Software Foundation; either
++   version 2.1 of the License, or (at your option) any later version.
++
++   The GNU C Library is distributed in the hope that it will be useful,
++   but WITHOUT ANY WARRANTY; without even the implied warranty of
++   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
++   Lesser General Public License for more details.
++
++   You should have received a copy of the GNU Lesser General Public
++   License along with the GNU C Library; if not, see
++   <http://www.gnu.org/licenses/>.  */
++
++#include <float.h>
++#include <math.h>
++#include <stdio.h>
++#include <stdlib.h>
++#include <string.h>
++
++/* Avoid built-in functions.  */
++#define WRAP_NAN(FUNC, STR) \
++  ({ const char *volatile wns = (STR); FUNC (wns); })
++#define WRAP_STRTO(FUNC, STR) \
++  ({ const char *volatile wss = (STR); FUNC (wss, NULL); })
++
++#define CHECK_IS_NAN(TYPE, A)			\
++  do						\
++    {						\
++      if (isnan (A))				\
++	puts ("PASS: " #TYPE " " #A);		\
++      else					\
++	{					\
++	  puts ("FAIL: " #TYPE " " #A);		\
++	  result = 1;				\
++	}					\
++    }						\
++  while (0)
++
++#define CHECK_SAME_NAN(TYPE, A, B)			\
++  do							\
++    {							\
++      if (memcmp (&(A), &(B), sizeof (A)) == 0)		\
++	puts ("PASS: " #TYPE " " #A " = " #B);		\
++      else						\
++	{						\
++	  puts ("FAIL: " #TYPE " " #A " = " #B);	\
++	  result = 1;					\
++	}						\
++    }							\
++  while (0)
++
++#define CHECK_DIFF_NAN(TYPE, A, B)			\
++  do							\
++    {							\
++      if (memcmp (&(A), &(B), sizeof (A)) != 0)		\
++	puts ("PASS: " #TYPE " " #A " != " #B);		\
++      else						\
++	{						\
++	  puts ("FAIL: " #TYPE " " #A " != " #B);	\
++	  result = 1;					\
++	}						\
++    }							\
++  while (0)
++
++/* Cannot test payloads by memcmp for formats where NaNs have padding
++   bits.  */
++#define CAN_TEST_EQ(MANT_DIG) ((MANT_DIG) != 64 && (MANT_DIG) != 106)
++
++#define RUN_TESTS(TYPE, SFUNC, FUNC, MANT_DIG)		\
++  do							\
++    {							\
++     TYPE n123 = WRAP_NAN (FUNC, "123");		\
++     CHECK_IS_NAN (TYPE, n123);				\
++     TYPE s123 = WRAP_STRTO (SFUNC, "NAN(123)");	\
++     CHECK_IS_NAN (TYPE, s123);				\
++     TYPE n456 = WRAP_NAN (FUNC, "456");		\
++     CHECK_IS_NAN (TYPE, n456);				\
++     TYPE s456 = WRAP_STRTO (SFUNC, "NAN(456)");	\
++     CHECK_IS_NAN (TYPE, s456);				\
++     TYPE n123x = WRAP_NAN (FUNC, "123)");		\
++     CHECK_IS_NAN (TYPE, n123x);			\
++     TYPE nemp = WRAP_NAN (FUNC, "");			\
++     CHECK_IS_NAN (TYPE, nemp);				\
++     TYPE semp = WRAP_STRTO (SFUNC, "NAN()");		\
++     CHECK_IS_NAN (TYPE, semp);				\
++     TYPE sx = WRAP_STRTO (SFUNC, "NAN");		\
++     CHECK_IS_NAN (TYPE, sx);				\
++     if (CAN_TEST_EQ (MANT_DIG))			\
++       CHECK_SAME_NAN (TYPE, n123, s123);		\
++     if (CAN_TEST_EQ (MANT_DIG))			\
++       CHECK_SAME_NAN (TYPE, n456, s456);		\
++     if (CAN_TEST_EQ (MANT_DIG))			\
++       CHECK_SAME_NAN (TYPE, nemp, semp);		\
++     if (CAN_TEST_EQ (MANT_DIG))			\
++       CHECK_SAME_NAN (TYPE, n123x, sx);		\
++     CHECK_DIFF_NAN (TYPE, n123, n456);			\
++     CHECK_DIFF_NAN (TYPE, n123, nemp);			\
++     CHECK_DIFF_NAN (TYPE, n123, n123x);		\
++     CHECK_DIFF_NAN (TYPE, n456, nemp);			\
++     CHECK_DIFF_NAN (TYPE, n456, n123x);		\
++    }							\
++  while (0)
++
++static int
++do_test (void)
++{
++  int result = 0;
++  RUN_TESTS (float, strtof, nanf, FLT_MANT_DIG);
++  RUN_TESTS (double, strtod, nan, DBL_MANT_DIG);
++#ifndef NO_LONG_DOUBLE
++  RUN_TESTS (long double, strtold, nanl, LDBL_MANT_DIG);
++#endif
++  return result;
++}
++
++#define TEST_FUNCTION do_test ()
++#include "../test-skeleton.c"
+Index: git/stdlib/Versions
+===================================================================
+--- git.orig/stdlib/Versions
++++ git/stdlib/Versions
+@@ -118,5 +118,6 @@ libc {
+     # Used from other libraries
+     __libc_secure_getenv;
+     __call_tls_dtors;
++    __strtof_nan; __strtod_nan; __strtold_nan;
+   }
+ }
diff --git a/yocto-poky/meta/recipes-core/glibc/glibc/strcoll-Remove-incorrect-STRDIFF-based-optimization-.patch b/yocto-poky/meta/recipes-core/glibc/glibc/strcoll-Remove-incorrect-STRDIFF-based-optimization-.patch
new file mode 100644
index 0000000..8ce255f
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/glibc/glibc/strcoll-Remove-incorrect-STRDIFF-based-optimization-.patch
@@ -0,0 +1,323 @@
+Upstream-Status: Backport
+
+Signed-off-by: Li Xin <lixin.fnst@cn.fujitsu.com>
+
+From https://sourceware.org/git/gitweb.cgi?p=glibc.git;h=6c84109cfa26f35c3dfed3acb97d347361bd5849
+Author: Carlos O'Donell <carlos@systemhalted.org>
+Date:   Thu Oct 8 16:34:53 2015 -0400
+
+    strcoll: Remove incorrect STRDIFF-based optimization (Bug 18589).
+
+    The optimization introduced in commit
+    f13c2a8dff2329c6692a80176262ceaaf8a6f74e, causes regressions in
+    sorting for languages that have digraphs that change sort order, like
+    cs_CZ which sorts ch between h and i.
+
+    My analysis shows the fast-forwarding optimization in STRCOLL advances
+    through a digraph while possibly stopping in the middle which results
+    in a subsequent skipping of the digraph and incorrect sorting. The
+    optimization is incorrect as implemented and because of that I'm
+    removing it for 2.23, and I will also commit this fix for 2.22 where
+    it was originally introduced.
+
+    This patch reverts the optimization, introduces a new bug-strcoll2.c
+    regression test that tests both cs_CZ.UTF-8 and da_DK.ISO-8859-1 and
+    ensures they sort one digraph each correctly. The optimization can't be
+    applied without regressing this test.
+
+    Checked on x86_64, bug-strcoll2.c fails without this patch and passes
+    after. This will also get a fix on 2.22 which has the same bug.
+
+    (cherry picked from commit 87701a58e291bd7ac3b407d10a829dac52c9c16e)
+---
+ locale/C-collate.c           |  4 +-
+ locale/categories.def        |  1 -
+ locale/langinfo.h            |  1 -
+ locale/localeinfo.h          |  7 ----
+ locale/programs/ld-collate.c |  9 -----
+ string/bug-strcoll2.c        | 95 ++++++++++++++++++++++++++++++++++++++++++++
+ string/strcoll_l.c           | 39 +-----------------
+ wcsmbs/wcscoll_l.c           |  1 -
+ 8 files changed, 98 insertions(+), 59 deletions(-)
+ create mode 100644 string/bug-strcoll2.c
+
+diff --git a/locale/C-collate.c b/locale/C-collate.c
+index d7f3c55..06dfdfa 100644
+--- a/locale/C-collate.c
++++ b/locale/C-collate.c
+@@ -144,8 +144,6 @@ const struct __locale_data _nl_C_LC_COLLATE attribute_hidden =
+     /* _NL_COLLATE_COLLSEQWC */
+     { .string = (const char *) collseqwc },
+     /* _NL_COLLATE_CODESET */
+-    { .string = _nl_C_codeset },
+-    /* _NL_COLLATE_ENCODING_TYPE */
+-    { .word = __cet_8bit }
++    { .string = _nl_C_codeset }
+   }
+ };
+diff --git a/locale/categories.def b/locale/categories.def
+index 045489d..a8dda53 100644
+--- a/locale/categories.def
++++ b/locale/categories.def
+@@ -58,7 +58,6 @@ DEFINE_CATEGORY
+   DEFINE_ELEMENT (_NL_COLLATE_COLLSEQMB,        "collate-collseqmb",        std, wstring)
+   DEFINE_ELEMENT (_NL_COLLATE_COLLSEQWC,        "collate-collseqwc",        std, wstring)
+   DEFINE_ELEMENT (_NL_COLLATE_CODESET,		"collate-codeset",	    std, string)
+-  DEFINE_ELEMENT (_NL_COLLATE_ENCODING_TYPE,   "collate-encoding-type",    std, word)
+   ), NO_POSTLOAD)
+ 
+ 
+diff --git a/locale/langinfo.h b/locale/langinfo.h
+index ffc5c7f..a565d9d 100644
+--- a/locale/langinfo.h
++++ b/locale/langinfo.h
+@@ -255,7 +255,6 @@ enum
+   _NL_COLLATE_COLLSEQMB,
+   _NL_COLLATE_COLLSEQWC,
+   _NL_COLLATE_CODESET,
+-  _NL_COLLATE_ENCODING_TYPE,
+   _NL_NUM_LC_COLLATE,
+ 
+   /* LC_CTYPE category: character classification.
+diff --git a/locale/localeinfo.h b/locale/localeinfo.h
+index a7516c0..c076d8e 100644
+--- a/locale/localeinfo.h
++++ b/locale/localeinfo.h
+@@ -110,13 +110,6 @@ enum coll_sort_rule
+   sort_mask
+ };
+ 
+-/* Collation encoding type.  */
+-enum collation_encoding_type
+-{
+-  __cet_other,
+-  __cet_8bit,
+-  __cet_utf8
+-};
+ 
+ /* We can map the types of the entries into a few categories.  */
+ enum value_type
+diff --git a/locale/programs/ld-collate.c b/locale/programs/ld-collate.c
+index 16e9039..3c88c6d 100644
+--- a/locale/programs/ld-collate.c
++++ b/locale/programs/ld-collate.c
+@@ -32,7 +32,6 @@
+ #include "linereader.h"
+ #include "locfile.h"
+ #include "elem-hash.h"
+-#include "../localeinfo.h"
+ 
+ /* Uncomment the following line in the production version.  */
+ /* #define NDEBUG 1 */
+@@ -2130,8 +2129,6 @@ collate_output (struct localedef_t *locale, const struct charmap_t *charmap,
+ 	  /* The words have to be handled specially.  */
+ 	  if (idx == _NL_ITEM_INDEX (_NL_COLLATE_SYMB_HASH_SIZEMB))
+ 	    add_locale_uint32 (&file, 0);
+-	  else if (idx == _NL_ITEM_INDEX (_NL_COLLATE_ENCODING_TYPE))
+-	    add_locale_uint32 (&file, __cet_other);
+ 	  else
+ 	    add_locale_empty (&file);
+ 	}
+@@ -2495,12 +2492,6 @@ collate_output (struct localedef_t *locale, const struct charmap_t *charmap,
+   add_locale_raw_data (&file, collate->mbseqorder, 256);
+   add_locale_collseq_table (&file, &collate->wcseqorder);
+   add_locale_string (&file, charmap->code_set_name);
+-  if (strcmp (charmap->code_set_name, "UTF-8") == 0)
+-    add_locale_uint32 (&file, __cet_utf8);
+-  else if (charmap->mb_cur_max == 1)
+-    add_locale_uint32 (&file, __cet_8bit);
+-  else
+-    add_locale_uint32 (&file, __cet_other);
+   write_locale_data (output_path, LC_COLLATE, "LC_COLLATE", &file);
+ 
+   obstack_free (&weightpool, NULL);
+diff --git a/string/bug-strcoll2.c b/string/bug-strcoll2.c
+new file mode 100644
+index 0000000..950b090
+--- /dev/null
++++ b/string/bug-strcoll2.c
+@@ -0,0 +1,95 @@
++/* Bug 18589: sort-test.sh fails at random.
++ * Copyright (C) 1998-2015 Free Software Foundation, Inc.
++ * This file is part of the GNU C Library.
++ * Contributed by Ulrich Drepper <drepper@cygnus.com>, 1998.
++ *
++ * The GNU C Library is free software; you can redistribute it and/or
++ * modify it under the terms of the GNU Lesser General Public
++ * License as published by the Free Software Foundation; either
++ * version 2.1 of the License, or (at your option) any later version.
++ *
++ * The GNU C Library is distributed in the hope that it will be useful,
++ * but WITHOUT ANY WARRANTY; without even the implied warranty of
++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
++ * Lesser General Public License for more details.
++ *
++ * You should have received a copy of the GNU Lesser General Public
++ * License along with the GNU C Library; if not, see
++ * <http://www.gnu.org/licenses/>.  */
++
++#include <stdio.h>
++#include <string.h>
++#include <locale.h>
++
++/* An incorrect strcoll optimization resulted in incorrect
++ *    results from strcoll for cs_CZ and da_DK.  */
++
++int
++test_cs_CZ (void)
++{
++  const char t1[] = "config";
++  const char t2[] = "choose";
++  if (setlocale (LC_ALL, "cs_CZ.UTF-8") == NULL)
++    {
++      perror ("setlocale");
++      return 1;
++    }
++  /* In Czech the digraph ch sorts after c, therefore we expect
++ *      config to sort before choose.  */
++  int a = strcoll (t1, t2);
++  int b = strcoll (t2, t1);
++  printf ("strcoll (\"%s\", \"%s\") = %d\n", t1, t2, a);
++  printf ("strcoll (\"%s\", \"%s\") = %d\n", t2, t1, b);
++  if (a < 0 && b > 0)
++    {
++      puts ("PASS: config < choose");
++      return 0;
++    }
++  else
++    {
++      puts ("FAIL: Wrong sorting in cz_CZ.UTF-8.");
++      return 1;
++    }
++}
++
++int
++test_da_DK (void)
++{
++  const char t1[] = "AS";
++  const char t2[] = "AA";
++  if (setlocale (LC_ALL, "da_DK.ISO-8859-1") == NULL)
++    {
++      perror ("setlocale");
++      return 1;
++    }
++  /* AA should be treated as the last letter of the Danish alphabet,
++ *      hence sorting after AS.  */
++  int a = strcoll (t1, t2);
++  int b = strcoll (t2, t1);
++  printf ("strcoll (\"%s\", \"%s\") = %d\n", t1, t2, a);
++  printf ("strcoll (\"%s\", \"%s\") = %d\n", t2, t1, b);
++  if (a < 0 && b > 0)
++    {
++      puts ("PASS: AS < AA");
++      return 0;
++    }
++  else
++    {
++      puts ("FAIL: Wrong sorting in da_DK.ISO-8859-1");
++      return 1;
++    }
++}
++
++static int
++do_test (void)
++{
++  int err = 0;
++  err |= test_cs_CZ ();
++  err |= test_da_DK ();
++  return err;
++}
++
++#define TEST_FUNCTION do_test ()
++#include "../test-skeleton.c"
++
++
+diff --git a/string/strcoll_l.c b/string/strcoll_l.c
+index b36b18c..a18b65e 100644
+--- a/string/strcoll_l.c
++++ b/string/strcoll_l.c
+@@ -30,7 +30,6 @@
+ # define STRING_TYPE char
+ # define USTRING_TYPE unsigned char
+ # define STRCOLL __strcoll_l
+-# define STRDIFF __strdiff
+ # define STRCMP strcmp
+ # define WEIGHT_H "../locale/weight.h"
+ # define SUFFIX	MB
+@@ -43,19 +42,6 @@
+ #include "../locale/localeinfo.h"
+ #include WEIGHT_H
+ 
+-#define MASK_UTF8_7BIT  (1 << 7)
+-#define MASK_UTF8_START (3 << 6)
+-
+-size_t
+-STRDIFF (const STRING_TYPE *s, const STRING_TYPE *t)
+-{
+-  size_t n;
+-
+-  for (n = 0; *s != '\0' && *s++ == *t++; ++n)
+-    continue;
+-
+-  return n;
+-}
+ 
+ /* Track status while looking for sequences in a string.  */
+ typedef struct
+@@ -274,29 +260,9 @@ STRCOLL (const STRING_TYPE *s1, const STRING_TYPE *s2, __locale_t l)
+   const USTRING_TYPE *extra;
+   const int32_t *indirect;
+ 
+-  /* In case there is no locale specific sort order (C / POSIX).  */
+   if (nrules == 0)
+     return STRCMP (s1, s2);
+ 
+-  /* Fast forward to the position of the first difference.  Needs to be
+-     encoding aware as the byte-by-byte comparison can stop in the middle
+-     of a char sequence for multibyte encodings like UTF-8.  */
+-  uint_fast32_t encoding =
+-    current->values[_NL_ITEM_INDEX (_NL_COLLATE_ENCODING_TYPE)].word;
+-  if (encoding != __cet_other)
+-    {
+-      size_t diff = STRDIFF (s1, s2);
+-      if (diff > 0)
+-	{
+-	  if (encoding == __cet_utf8 && (*(s1 + diff) & MASK_UTF8_7BIT) != 0)
+-	    do
+-	      diff--;
+-	    while (diff > 0 && (*(s1 + diff) & MASK_UTF8_START) != MASK_UTF8_START);
+-	  s1 += diff;
+-	  s2 += diff;
+-	}
+-    }
+-
+   /* Catch empty strings.  */
+   if (__glibc_unlikely (*s1 == '\0') || __glibc_unlikely (*s2 == '\0'))
+     return (*s1 != '\0') - (*s2 != '\0');
+@@ -363,9 +329,8 @@ STRCOLL (const STRING_TYPE *s1, const STRING_TYPE *s2, __locale_t l)
+ 		     byte-level comparison to ensure that we don't waste time
+ 		     going through multiple passes for totally equal strings
+ 		     before proceeding to subsequent passes.  */
+-		  if (pass == 0 && encoding == __cet_other &&
+-		      STRCMP (s1, s2) == 0)
+-		    return result;
++		  if (pass == 0 && STRCMP (s1, s2) == 0)
++                    return result;
+ 		  else
+ 		    break;
+ 	        }
+diff --git a/wcsmbs/wcscoll_l.c b/wcsmbs/wcscoll_l.c
+index 6d9384a..87f240d 100644
+--- a/wcsmbs/wcscoll_l.c
++++ b/wcsmbs/wcscoll_l.c
+@@ -23,7 +23,6 @@
+ #define STRING_TYPE wchar_t
+ #define USTRING_TYPE wint_t
+ #define STRCOLL __wcscoll_l
+-#define STRDIFF __wcsdiff
+ #define STRCMP __wcscmp
+ #define WEIGHT_H "../locale/weightwc.h"
+ #define SUFFIX	WC
+-- 
+1.8.4.2
+
diff --git a/yocto-poky/meta/recipes-core/glibc/glibc/use_64bit_atomics.patch b/yocto-poky/meta/recipes-core/glibc/glibc/use_64bit_atomics.patch
new file mode 100644
index 0000000..eb7f2b2
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/glibc/glibc/use_64bit_atomics.patch
@@ -0,0 +1,24 @@
+This patch alows using 64 bit atomic instructions on a
+32 bit platform. This is safe, providing x86 is Pentium or
+later (would not work on i386, i486). Using 64 bit atomic 
+instructions bypasses code containing a bug as documented in
+https://bugzilla.yoctoproject.org/show_bug.cgi?id=8140
+
+Upstream-Status: TBD
+
+Signed-off-by: Juro Bystricky <juro.bystricky@intel.com>
+
+
+Index: libc/sysdeps/i386/i486/bits/atomic.h
+===================================================================
+--- libc.orig/sysdeps/i386/i486/bits/atomic.h
++++ libc/sysdeps/i386/i486/bits/atomic.h
+@@ -54,7 +54,7 @@ typedef uintmax_t uatomic_max_t;
+ # endif
+ #endif
+ 
+-#define __HAVE_64B_ATOMICS 0
++#define __HAVE_64B_ATOMICS 1
+ #define USE_ATOMIC_COMPILER_BUILTINS 0
+ 
+ 
diff --git a/yocto-poky/meta/recipes-core/glibc/glibc_2.22.bb b/yocto-poky/meta/recipes-core/glibc/glibc_2.22.bb
index 09f0428..a13b7f9 100644
--- a/yocto-poky/meta/recipes-core/glibc/glibc_2.22.bb
+++ b/yocto-poky/meta/recipes-core/glibc/glibc_2.22.bb
@@ -9,11 +9,11 @@
 
 SRCREV ?= "a34d1c6afc86521d6ad17662a3b5362d8481514c"
 
-BRANCH ?= "release/${PV}/master"
+SRCBRANCH ?= "release/${PV}/master"
 
 GLIBC_GIT_URI ?= "git://sourceware.org/git/glibc.git"
 
-SRC_URI = "${GLIBC_GIT_URI};branch=${BRANCH};name=glibc \
+SRC_URI = "${GLIBC_GIT_URI};branch=${SRCBRANCH};name=glibc \
            file://0004-Backport-https-sourceware.org-ml-libc-ports-2007-12-.patch \
            file://0005-fsl-e500-e5500-e6500-603e-fsqrt-implementation.patch \
            file://0006-readlib-Add-OECORE_KNOWN_INTERPRETER_NAMES-to-known-.patch \
@@ -39,6 +39,14 @@
            file://0026-eglibc-dl_debug_mask-is-controlled-by-__OPTION_EGLIB.patch \
            file://0027-eglibc-use-option-groups-Conditionally-exclude-c-tes.patch \
            file://nscd-no-bash.patch \
+           file://strcoll-Remove-incorrect-STRDIFF-based-optimization-.patch \
+           file://0028-Clear-ELF_RTYPE_CLASS_EXTERN_PROTECTED_DATA-for-prel.patch \
+           file://CVE-2015-8777.patch \
+           file://CVE-2015-8779.patch \
+           file://CVE-2015-9761_1.patch \
+           file://CVE-2015-9761_2.patch \
+           file://CVE-2015-8776.patch \
+           file://CVE-2015-7547.patch \
 "
 
 SRC_URI += "\
@@ -50,6 +58,7 @@
            file://0001-nativesdk-glibc-Look-for-host-system-ld.so.cache-as-.patch \
            file://0002-nativesdk-glibc-Fix-buffer-overrun-with-a-relocated-.patch \
            file://0003-nativesdk-glibc-Raise-the-size-of-arrays-containing-.patch \
+           file://use_64bit_atomics.patch \
 "
 
 S = "${WORKDIR}/git"
diff --git a/yocto-poky/meta/recipes-core/images/build-appliance-image_12.0.1.bb b/yocto-poky/meta/recipes-core/images/build-appliance-image_12.0.1.bb
index 0a86ba4..fdeadb6 100644
--- a/yocto-poky/meta/recipes-core/images/build-appliance-image_12.0.1.bb
+++ b/yocto-poky/meta/recipes-core/images/build-appliance-image_12.0.1.bb
@@ -21,8 +21,8 @@
 
 inherit core-image
 
-SRCREV ?= "d01cd53429b1c20f01dac97f1b9b659cb9dc9812"
-SRC_URI = "git://git.yoctoproject.org/poky \
+SRCREV ?= "7fe17a2942ff03e2ec47d566fd5393f52b2eb736"
+SRC_URI = "git://git.yoctoproject.org/poky;branch=jethro \
            file://Yocto_Build_Appliance.vmx \
            file://Yocto_Build_Appliance.vmxf \
           "
diff --git a/yocto-poky/meta/recipes-core/initrdscripts/files/init-install-efi.sh b/yocto-poky/meta/recipes-core/initrdscripts/files/init-install-efi.sh
index fc4908e..0443a9d 100644
--- a/yocto-poky/meta/recipes-core/initrdscripts/files/init-install-efi.sh
+++ b/yocto-poky/meta/recipes-core/initrdscripts/files/init-install-efi.sh
@@ -134,7 +134,7 @@
 # 2) they are detected asynchronously (need rootwait)
 rootwait=""
 part_prefix=""
-if [ ! "${device#mmcblk}" = "${device}" ]; then
+if [ ! "${device#/dev/mmcblk}" = "${device}" ]; then
     part_prefix="p"
     rootwait="rootwait"
 fi
@@ -184,8 +184,8 @@
 echo "Copying rootfs files..."
 cp -a /src_root/* /tgt_root
 if [ -d /tgt_root/etc/ ] ; then
-    boot_uuid=$(blkid -o value -s UUID ${device}1)
-    swap_part_uuid=$(blkid -o value -s PARTUUID ${device}3)
+    boot_uuid=$(blkid -o value -s UUID ${bootfs})
+    swap_part_uuid=$(blkid -o value -s PARTUUID ${swap})
     echo "/dev/disk/by-partuuid/$swap_part_uuid                swap             swap       defaults              0  0" >> /tgt_root/etc/fstab
     echo "UUID=$boot_uuid              /boot            vfat       defaults              1  2" >> /tgt_root/etc/fstab
     # We dont want udev to mount our root device while we're booting...
@@ -206,7 +206,7 @@
 cp /run/media/$1/EFI/BOOT/*.efi $EFIDIR
 
 if [ -f /run/media/$1/EFI/BOOT/grub.cfg ]; then
-    root_part_uuid=$(blkid -o value -s PARTUUID ${device}2)
+    root_part_uuid=$(blkid -o value -s PARTUUID ${rootfs})
     GRUBCFG="$EFIDIR/grub.cfg"
     cp /run/media/$1/EFI/BOOT/grub.cfg $GRUBCFG
     # Update grub config for the installed image
@@ -223,6 +223,7 @@
 fi
 
 if [ -d /run/media/$1/loader ]; then
+    rootuuid=$(blkid -o value -s PARTUUID ${rootfs})
     GUMMIBOOT_CFGS="/boot/loader/entries/*.conf"
     # copy config files for gummiboot
     cp -dr /run/media/$1/loader /boot
diff --git a/yocto-poky/meta/recipes-core/initrdscripts/initramfs-framework/finish b/yocto-poky/meta/recipes-core/initrdscripts/initramfs-framework/finish
index e712ff0..d09bbb8 100755
--- a/yocto-poky/meta/recipes-core/initrdscripts/initramfs-framework/finish
+++ b/yocto-poky/meta/recipes-core/initrdscripts/initramfs-framework/finish
@@ -37,7 +37,7 @@
 				fi
 				mount $flags $bootparam_root $ROOTFS_DIR
 			else
-				debug "root '$bootparam_root' doesn't exist."
+				msg "root '$bootparam_root' doesn't exist."
 			fi
 		fi
 
diff --git a/yocto-poky/meta/recipes-core/initrdscripts/initramfs-framework/init b/yocto-poky/meta/recipes-core/initrdscripts/initramfs-framework/init
index 9291ad5..204f237 100755
--- a/yocto-poky/meta/recipes-core/initrdscripts/initramfs-framework/init
+++ b/yocto-poky/meta/recipes-core/initrdscripts/initramfs-framework/init
@@ -58,7 +58,7 @@
     echo $1 >/dev/console
     echo >/dev/console
 
-    if [ -n "bootparam_init_fatal_sh" ]; then
+    if [ -n "$bootparam_init_fatal_sh" ]; then
         sh
     else
 	while [ "true" ]; do
diff --git a/yocto-poky/meta/recipes-core/initscripts/initscripts-1.0/sysfs.sh b/yocto-poky/meta/recipes-core/initscripts/initscripts-1.0/sysfs.sh
index 0cfe76e..0a52c90 100644
--- a/yocto-poky/meta/recipes-core/initscripts/initscripts-1.0/sysfs.sh
+++ b/yocto-poky/meta/recipes-core/initscripts/initscripts-1.0/sysfs.sh
@@ -21,3 +21,7 @@
 if [ -e /sys/kernel/debug ] && grep -q debugfs /proc/filesystems; then
   mount -t debugfs debugfs /sys/kernel/debug
 fi
+
+if ! [ -e /dev/zero ] && [ -e /dev ] && grep -q devtmpfs /proc/filesystems; then
+  mount -n -t devtmpfs devtmpfs /dev
+fi
diff --git a/yocto-poky/meta/recipes-core/kbd/kbd_2.0.2.bb b/yocto-poky/meta/recipes-core/kbd/kbd_2.0.2.bb
index 136dc7a..49bb6c9 100644
--- a/yocto-poky/meta/recipes-core/kbd/kbd_2.0.2.bb
+++ b/yocto-poky/meta/recipes-core/kbd/kbd_2.0.2.bb
@@ -34,3 +34,4 @@
 ALTERNATIVE_PRIORITY = "100"
 
 BBCLASSEXTEND = "native"
+PARALLEL_MAKEINST = ""
diff --git a/yocto-poky/meta/recipes-core/libxml/libxml2.inc b/yocto-poky/meta/recipes-core/libxml/libxml2.inc
index 1c3c37d..310d5bb 100644
--- a/yocto-poky/meta/recipes-core/libxml/libxml2.inc
+++ b/yocto-poky/meta/recipes-core/libxml/libxml2.inc
@@ -21,6 +21,22 @@
            file://libxml-m4-use-pkgconfig.patch \
            file://configure.ac-fix-cross-compiling-warning.patch \
            file://0001-CVE-2015-1819-Enforce-the-reader-to-run-in-constant-.patch \
+           file://CVE-2015-7941-1-Stop-parsing-on-entities-boundaries-errors.patch \
+           file://CVE-2015-7941-2-Cleanup-conditional-section-error-handling.patch \
+           file://CVE-2015-8317-Fail-parsing-early-on-if-encoding-conversion-failed.patch \
+           file://CVE-2015-7942-Another-variation-of-overflow-in-Conditional-section.patch \
+           file://CVE-2015-7942-2-Fix-an-error-in-previous-Conditional-section-patch.patch \
+           file://0001-CVE-2015-8035-Fix-XZ-compression-support-loop.patch \
+           file://CVE-2015-7498-Avoid-processing-entities-after-encoding-conversion-.patch \
+           file://0001-CVE-2015-7497-Avoid-an-heap-buffer-overflow-in-xmlDi.patch \
+           file://CVE-2015-7499-1-Add-xmlHaltParser-to-stop-the-parser.patch \
+           file://CVE-2015-7499-2-Detect-incoherency-on-GROW.patch \
+           file://0001-Fix-a-bug-on-name-parsing-at-the-end-of-current-inpu.patch \
+           file://0001-CVE-2015-7500-Fix-memory-access-error-due-to-incorre.patch \
+           file://0001-CVE-2015-8242-Buffer-overead-with-HTML-parser-in-pus.patch \
+           file://0001-CVE-2015-5312-Another-entity-expansion-issue.patch \
+           file://CVE-2015-8241.patch \
+           file://CVE-2015-8710.patch \
           "
 
 BINCONFIG = "${bindir}/xml2-config"
diff --git a/yocto-poky/meta/recipes-core/libxml/libxml2/0001-CVE-2015-5312-Another-entity-expansion-issue.patch b/yocto-poky/meta/recipes-core/libxml/libxml2/0001-CVE-2015-5312-Another-entity-expansion-issue.patch
new file mode 100644
index 0000000..979618d
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/libxml/libxml2/0001-CVE-2015-5312-Another-entity-expansion-issue.patch
@@ -0,0 +1,39 @@
+From 69030714cde66d525a8884bda01b9e8f0abf8e1e Mon Sep 17 00:00:00 2001
+From: David Drysdale <drysdale@google.com>
+Date: Fri, 20 Nov 2015 11:13:45 +0800
+Subject: [PATCH] CVE-2015-5312 Another entity expansion issue
+
+For https://bugzilla.gnome.org/show_bug.cgi?id=756733
+It is one case where the code in place to detect entities expansions
+failed to exit when the situation was detected, leading to DoS
+Problem reported by Kostya Serebryany @ Google
+Patch provided by David Drysdale @ Google
+
+Upstream-Status: Backport
+
+CVE-2015-5312
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ parser.c | 4 ++++
+ 1 file changed, 4 insertions(+)
+
+diff --git a/parser.c b/parser.c
+index b7b6668..da6e729 100644
+--- a/parser.c
++++ b/parser.c
+@@ -2806,6 +2806,10 @@ xmlStringLenDecodeEntities(xmlParserCtxtPtr ctxt, const xmlChar *str, int len,
+ 			                      0, 0, 0);
+ 		ctxt->depth--;
+ 
++		if ((ctxt->lastError.code == XML_ERR_ENTITY_LOOP) ||
++		    (ctxt->lastError.code == XML_ERR_INTERNAL_ERROR))
++		    goto int_error;
++
+ 		if (rep != NULL) {
+ 		    current = rep;
+ 		    while (*current != 0) { /* non input consuming loop */
+-- 
+2.3.5
+
diff --git a/yocto-poky/meta/recipes-core/libxml/libxml2/0001-CVE-2015-7497-Avoid-an-heap-buffer-overflow-in-xmlDi.patch b/yocto-poky/meta/recipes-core/libxml/libxml2/0001-CVE-2015-7497-Avoid-an-heap-buffer-overflow-in-xmlDi.patch
new file mode 100644
index 0000000..955c961
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/libxml/libxml2/0001-CVE-2015-7497-Avoid-an-heap-buffer-overflow-in-xmlDi.patch
@@ -0,0 +1,40 @@
+From 6360a31a84efe69d155ed96306b9a931a40beab9 Mon Sep 17 00:00:00 2001
+From: David Drysdale <drysdale@google.com>
+Date: Fri, 20 Nov 2015 10:47:12 +0800
+Subject: [PATCH] CVE-2015-7497 Avoid an heap buffer overflow in
+ xmlDictComputeFastQKey
+
+For https://bugzilla.gnome.org/show_bug.cgi?id=756528
+It was possible to hit a negative offset in the name indexing
+used to randomize the dictionary key generation
+Reported and fix provided by David Drysdale @ Google
+
+Upstream-Status: Backport
+
+CVE-2015-7497
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ dict.c | 5 ++++-
+ 1 file changed, 4 insertions(+), 1 deletion(-)
+
+diff --git a/dict.c b/dict.c
+index 5f71d55..8c8f931 100644
+--- a/dict.c
++++ b/dict.c
+@@ -486,7 +486,10 @@ xmlDictComputeFastQKey(const xmlChar *prefix, int plen,
+ 	value += 30 * (*prefix);
+ 
+     if (len > 10) {
+-        value += name[len - (plen + 1 + 1)];
++        int offset = len - (plen + 1 + 1);
++	if (offset < 0)
++	    offset = len - (10 + 1);
++	value += name[offset];
+         len = 10;
+ 	if (plen > 10)
+ 	    plen = 10;
+-- 
+2.3.5
+
diff --git a/yocto-poky/meta/recipes-core/libxml/libxml2/0001-CVE-2015-7500-Fix-memory-access-error-due-to-incorre.patch b/yocto-poky/meta/recipes-core/libxml/libxml2/0001-CVE-2015-7500-Fix-memory-access-error-due-to-incorre.patch
new file mode 100644
index 0000000..b486079
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/libxml/libxml2/0001-CVE-2015-7500-Fix-memory-access-error-due-to-incorre.patch
@@ -0,0 +1,131 @@
+From f1063fdbe7fa66332bbb76874101c2a7b51b519f Mon Sep 17 00:00:00 2001
+From: Daniel Veillard <veillard@redhat.com>
+Date: Fri, 20 Nov 2015 16:06:59 +0800
+Subject: [PATCH] CVE-2015-7500 Fix memory access error due to incorrect
+ entities boundaries
+
+For https://bugzilla.gnome.org/show_bug.cgi?id=756525
+handle properly the case where we popped out of the current entity
+while processing a start tag
+Reported by Kostya Serebryany @ Google
+
+This slightly modifies the output of 754946 in regression tests
+
+Upstream-Status: Backport
+
+CVE-2015-7500
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ parser.c                     | 28 ++++++++++++++++++++++------
+ result/errors/754946.xml.err |  7 +++++--
+ 2 files changed, 27 insertions(+), 8 deletions(-)
+
+diff --git a/parser.c b/parser.c
+index c7e4574..c5741e3 100644
+--- a/parser.c
++++ b/parser.c
+@@ -9348,7 +9348,7 @@ xmlParseStartTag2(xmlParserCtxtPtr ctxt, const xmlChar **pref,
+     const xmlChar **atts = ctxt->atts;
+     int maxatts = ctxt->maxatts;
+     int nratts, nbatts, nbdef;
+-    int i, j, nbNs, attval, oldline, oldcol;
++    int i, j, nbNs, attval, oldline, oldcol, inputNr;
+     const xmlChar *base;
+     unsigned long cur;
+     int nsNr = ctxt->nsNr;
+@@ -9367,6 +9367,7 @@ reparse:
+     SHRINK;
+     base = ctxt->input->base;
+     cur = ctxt->input->cur - ctxt->input->base;
++    inputNr = ctxt->inputNr;
+     oldline = ctxt->input->line;
+     oldcol = ctxt->input->col;
+     nbatts = 0;
+@@ -9392,7 +9393,8 @@ reparse:
+      */
+     SKIP_BLANKS;
+     GROW;
+-    if (ctxt->input->base != base) goto base_changed;
++    if ((ctxt->input->base != base) || (inputNr != ctxt->inputNr))
++        goto base_changed;
+ 
+     while (((RAW != '>') &&
+ 	   ((RAW != '/') || (NXT(1) != '>')) &&
+@@ -9403,7 +9405,7 @@ reparse:
+ 
+ 	attname = xmlParseAttribute2(ctxt, prefix, localname,
+ 	                             &aprefix, &attvalue, &len, &alloc);
+-	if (ctxt->input->base != base) {
++	if ((ctxt->input->base != base) || (inputNr != ctxt->inputNr)) {
+ 	    if ((attvalue != NULL) && (alloc != 0))
+ 	        xmlFree(attvalue);
+ 	    attvalue = NULL;
+@@ -9552,7 +9554,8 @@ skip_ns:
+ 		    break;
+ 		}
+ 		SKIP_BLANKS;
+-		if (ctxt->input->base != base) goto base_changed;
++		if ((ctxt->input->base != base) || (inputNr != ctxt->inputNr))
++		    goto base_changed;
+ 		continue;
+ 	    }
+ 
+@@ -9589,7 +9592,8 @@ failed:
+ 	GROW
+         if (ctxt->instate == XML_PARSER_EOF)
+             break;
+-	if (ctxt->input->base != base) goto base_changed;
++	if ((ctxt->input->base != base) || (inputNr != ctxt->inputNr))
++	    goto base_changed;
+ 	if ((RAW == '>') || (((RAW == '/') && (NXT(1) == '>'))))
+ 	    break;
+ 	if (!IS_BLANK_CH(RAW)) {
+@@ -9605,7 +9609,8 @@ failed:
+ 	    break;
+ 	}
+         GROW;
+-	if (ctxt->input->base != base) goto base_changed;
++	if ((ctxt->input->base != base) || (inputNr != ctxt->inputNr))
++	    goto base_changed;
+     }
+ 
+     /*
+@@ -9772,6 +9777,17 @@ base_changed:
+ 	    if ((ctxt->attallocs[j] != 0) && (atts[i] != NULL))
+ 	        xmlFree((xmlChar *) atts[i]);
+     }
++
++    /*
++     * We can't switch from one entity to another in the middle
++     * of a start tag
++     */
++    if (inputNr != ctxt->inputNr) {
++        xmlFatalErrMsg(ctxt, XML_ERR_ENTITY_BOUNDARY,
++		    "Start tag doesn't start and stop in the same entity\n");
++	return(NULL);
++    }
++
+     ctxt->input->cur = ctxt->input->base + cur;
+     ctxt->input->line = oldline;
+     ctxt->input->col = oldcol;
+diff --git a/result/errors/754946.xml.err b/result/errors/754946.xml.err
+index 423dff5..a75088b 100644
+--- a/result/errors/754946.xml.err
++++ b/result/errors/754946.xml.err
+@@ -11,6 +11,9 @@ Entity: line 1: parser error : DOCTYPE improperly terminated
+ Entity: line 1: 
+ A<lbbbbbbbbbbbbbbbbbbb_
+ ^
++./test/errors/754946.xml:1: parser error : Start tag doesn't start and stop in the same entity
++>%SYSTEM;<![
++         ^
+ ./test/errors/754946.xml:1: parser error : Extra content at the end of the document
+-<!DOCTYPEA[<!ENTITY %
+-  ^
++>%SYSTEM;<![
++         ^
+-- 
+2.3.5
+
diff --git a/yocto-poky/meta/recipes-core/libxml/libxml2/0001-CVE-2015-8035-Fix-XZ-compression-support-loop.patch b/yocto-poky/meta/recipes-core/libxml/libxml2/0001-CVE-2015-8035-Fix-XZ-compression-support-loop.patch
new file mode 100644
index 0000000..7107355
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/libxml/libxml2/0001-CVE-2015-8035-Fix-XZ-compression-support-loop.patch
@@ -0,0 +1,38 @@
+From f0709e3ca8f8947f2d91ed34e92e38a4c23eae63 Mon Sep 17 00:00:00 2001
+From: Daniel Veillard <veillard@redhat.com>
+Date: Tue, 3 Nov 2015 15:31:25 +0800
+Subject: [PATCH] CVE-2015-8035 Fix XZ compression support loop
+
+For https://bugzilla.gnome.org/show_bug.cgi?id=757466
+DoS when parsing specially crafted XML document if XZ support
+is compiled in (which wasn't the case for 2.9.2 and master since
+Nov 2013, fixed in next commit !)
+
+Upstream-Status: Backport
+
+CVE-2015-8035
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ xzlib.c | 4 ++++
+ 1 file changed, 4 insertions(+)
+
+diff --git a/xzlib.c b/xzlib.c
+index 0dcb9f4..1fab546 100644
+--- a/xzlib.c
++++ b/xzlib.c
+@@ -581,6 +581,10 @@ xz_decomp(xz_statep state)
+             xz_error(state, LZMA_DATA_ERROR, "compressed data error");
+             return -1;
+         }
++        if (ret == LZMA_PROG_ERROR) {
++            xz_error(state, LZMA_PROG_ERROR, "compression error");
++            return -1;
++        }
+     } while (strm->avail_out && ret != LZMA_STREAM_END);
+ 
+     /* update available output and crc check value */
+-- 
+2.3.5
+
diff --git a/yocto-poky/meta/recipes-core/libxml/libxml2/0001-CVE-2015-8242-Buffer-overead-with-HTML-parser-in-pus.patch b/yocto-poky/meta/recipes-core/libxml/libxml2/0001-CVE-2015-8242-Buffer-overead-with-HTML-parser-in-pus.patch
new file mode 100644
index 0000000..73531b3
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/libxml/libxml2/0001-CVE-2015-8242-Buffer-overead-with-HTML-parser-in-pus.patch
@@ -0,0 +1,49 @@
+From 8fb4a770075628d6441fb17a1e435100e2f3b1a2 Mon Sep 17 00:00:00 2001
+From: Hugh Davenport <hugh@allthethings.co.nz>
+Date: Fri, 20 Nov 2015 17:16:06 +0800
+Subject: [PATCH] CVE-2015-8242 Buffer overead with HTML parser in push mode
+
+For https://bugzilla.gnome.org/show_bug.cgi?id=756372
+Error in the code pointing to the codepoint in the stack for the
+current char value instead of the pointer in the input that the SAX
+callback expects
+Reported and fixed by Hugh Davenport
+
+Upstream-Status: Backport
+
+CVE-2015-8242
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ HTMLparser.c | 6 +++---
+ 1 file changed, 3 insertions(+), 3 deletions(-)
+
+diff --git a/HTMLparser.c b/HTMLparser.c
+index bdf7807..b729197 100644
+--- a/HTMLparser.c
++++ b/HTMLparser.c
+@@ -5735,17 +5735,17 @@ htmlParseTryOrFinish(htmlParserCtxtPtr ctxt, int terminate) {
+ 				if (ctxt->keepBlanks) {
+ 				    if (ctxt->sax->characters != NULL)
+ 					ctxt->sax->characters(
+-						ctxt->userData, &cur, 1);
++						ctxt->userData, &in->cur[0], 1);
+ 				} else {
+ 				    if (ctxt->sax->ignorableWhitespace != NULL)
+ 					ctxt->sax->ignorableWhitespace(
+-						ctxt->userData, &cur, 1);
++						ctxt->userData, &in->cur[0], 1);
+ 				}
+ 			    } else {
+ 				htmlCheckParagraph(ctxt);
+ 				if (ctxt->sax->characters != NULL)
+ 				    ctxt->sax->characters(
+-					    ctxt->userData, &cur, 1);
++					    ctxt->userData, &in->cur[0], 1);
+ 			    }
+ 			}
+ 			ctxt->token = 0;
+-- 
+2.3.5
+
diff --git a/yocto-poky/meta/recipes-core/libxml/libxml2/0001-Fix-a-bug-on-name-parsing-at-the-end-of-current-inpu.patch b/yocto-poky/meta/recipes-core/libxml/libxml2/0001-Fix-a-bug-on-name-parsing-at-the-end-of-current-inpu.patch
new file mode 100644
index 0000000..a86b9ee
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/libxml/libxml2/0001-Fix-a-bug-on-name-parsing-at-the-end-of-current-inpu.patch
Binary files differ
diff --git a/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-7498-Avoid-processing-entities-after-encoding-conversion-.patch b/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-7498-Avoid-processing-entities-after-encoding-conversion-.patch
new file mode 100644
index 0000000..47ba897
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-7498-Avoid-processing-entities-after-encoding-conversion-.patch
@@ -0,0 +1,89 @@
+From afd27c21f6b36e22682b7da20d726bce2dcb2f43 Mon Sep 17 00:00:00 2001
+From: Daniel Veillard <veillard@redhat.com>
+Date: Mon, 9 Nov 2015 18:07:18 +0800
+Subject: [PATCH] Avoid processing entities after encoding conversion failures
+
+For https://bugzilla.gnome.org/show_bug.cgi?id=756527
+and was also raised by Chromium team in the past
+
+When we hit a convwersion failure when switching encoding
+it is bestter to stop parsing there, this was treated as a
+fatal error but the parser was continuing to process to extract
+more errors, unfortunately that makes little sense as the data
+is obviously corrupt and can potentially lead to unexpected behaviour.
+
+Upstream-Status: Backport
+
+CVE-2015-7498
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ parser.c          |  7 +++++--
+ parserInternals.c | 11 ++++++++++-
+ 2 files changed, 15 insertions(+), 3 deletions(-)
+
+diff --git a/parser.c b/parser.c
+index 134afe7..c79b4e8 100644
+--- a/parser.c
++++ b/parser.c
+@@ -10665,7 +10665,8 @@ xmlParseXMLDecl(xmlParserCtxtPtr ctxt) {
+ 	xmlFatalErrMsg(ctxt, XML_ERR_SPACE_REQUIRED, "Blank needed here\n");
+     }
+     xmlParseEncodingDecl(ctxt);
+-    if (ctxt->errNo == XML_ERR_UNSUPPORTED_ENCODING) {
++    if ((ctxt->errNo == XML_ERR_UNSUPPORTED_ENCODING) ||
++         (ctxt->instate == XML_PARSER_EOF)) {
+ 	/*
+ 	 * The XML REC instructs us to stop parsing right here
+ 	 */
+@@ -10789,6 +10790,7 @@ xmlParseDocument(xmlParserCtxtPtr ctxt) {
+ 
+     if (CUR == 0) {
+ 	xmlFatalErr(ctxt, XML_ERR_DOCUMENT_EMPTY, NULL);
++	return(-1);
+     }
+ 
+     /*
+@@ -10806,7 +10808,8 @@ xmlParseDocument(xmlParserCtxtPtr ctxt) {
+ 	 * Note that we will switch encoding on the fly.
+ 	 */
+ 	xmlParseXMLDecl(ctxt);
+-	if (ctxt->errNo == XML_ERR_UNSUPPORTED_ENCODING) {
++	if ((ctxt->errNo == XML_ERR_UNSUPPORTED_ENCODING) ||
++	    (ctxt->instate == XML_PARSER_EOF)) {
+ 	    /*
+ 	     * The XML REC instructs us to stop parsing right here
+ 	     */
+diff --git a/parserInternals.c b/parserInternals.c
+index df204fd..c8230c1 100644
+--- a/parserInternals.c
++++ b/parserInternals.c
+@@ -937,6 +937,7 @@ xmlSwitchEncoding(xmlParserCtxtPtr ctxt, xmlCharEncoding enc)
+ {
+     xmlCharEncodingHandlerPtr handler;
+     int len = -1;
++    int ret;
+ 
+     if (ctxt == NULL) return(-1);
+     switch (enc) {
+@@ -1097,7 +1098,15 @@ xmlSwitchEncoding(xmlParserCtxtPtr ctxt, xmlCharEncoding enc)
+     if (handler == NULL)
+ 	return(-1);
+     ctxt->charset = XML_CHAR_ENCODING_UTF8;
+-    return(xmlSwitchToEncodingInt(ctxt, handler, len));
++    ret = xmlSwitchToEncodingInt(ctxt, handler, len);
++    if ((ret < 0) || (ctxt->errNo == XML_I18N_CONV_FAILED)) {
++        /*
++	 * on encoding conversion errors, stop the parser
++	 */
++        xmlStopParser(ctxt);
++	ctxt->errNo = XML_I18N_CONV_FAILED;
++    }
++    return(ret);
+ }
+ 
+ /**
+-- 
+2.3.5
+
diff --git a/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-7499-1-Add-xmlHaltParser-to-stop-the-parser.patch b/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-7499-1-Add-xmlHaltParser-to-stop-the-parser.patch
new file mode 100644
index 0000000..e39ec65
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-7499-1-Add-xmlHaltParser-to-stop-the-parser.patch
@@ -0,0 +1,88 @@
+From 28cd9cb747a94483f4aea7f0968d202c20bb4cfc Mon Sep 17 00:00:00 2001
+From: Daniel Veillard <veillard@redhat.com>
+Date: Fri, 20 Nov 2015 14:55:30 +0800
+Subject: [PATCH] Add xmlHaltParser() to stop the parser
+
+The problem is doing it in a consistent and safe fashion
+It's more complex than just setting ctxt->instate = XML_PARSER_EOF
+Update the public function to reuse that new internal routine
+
+Upstream-Status: Backport
+
+CVE-2015-7499-1
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ parser.c | 34 +++++++++++++++++++++++++++++-----
+ 1 file changed, 29 insertions(+), 5 deletions(-)
+
+diff --git a/parser.c b/parser.c
+index da6e729..b6e99b1 100644
+--- a/parser.c
++++ b/parser.c
+@@ -94,6 +94,8 @@ static xmlParserCtxtPtr
+ xmlCreateEntityParserCtxtInternal(const xmlChar *URL, const xmlChar *ID,
+ 	                  const xmlChar *base, xmlParserCtxtPtr pctx);
+ 
++static void xmlHaltParser(xmlParserCtxtPtr ctxt);
++
+ /************************************************************************
+  *									*
+  *	Arbitrary limits set in the parser. See XML_PARSE_HUGE		*
+@@ -12625,25 +12627,47 @@ xmlCreatePushParserCtxt(xmlSAXHandlerPtr sax, void *user_data,
+ #endif /* LIBXML_PUSH_ENABLED */
+ 
+ /**
+- * xmlStopParser:
++ * xmlHaltParser:
+  * @ctxt:  an XML parser context
+  *
+- * Blocks further parser processing
++ * Blocks further parser processing don't override error
++ * for internal use
+  */
+-void
+-xmlStopParser(xmlParserCtxtPtr ctxt) {
++static void
++xmlHaltParser(xmlParserCtxtPtr ctxt) {
+     if (ctxt == NULL)
+         return;
+     ctxt->instate = XML_PARSER_EOF;
+-    ctxt->errNo = XML_ERR_USER_STOP;
+     ctxt->disableSAX = 1;
+     if (ctxt->input != NULL) {
++        /*
++	 * in case there was a specific allocation deallocate before
++	 * overriding base
++	 */
++        if (ctxt->input->free != NULL) {
++	    ctxt->input->free((xmlChar *) ctxt->input->base);
++	    ctxt->input->free = NULL;
++	}
+ 	ctxt->input->cur = BAD_CAST"";
+ 	ctxt->input->base = ctxt->input->cur;
+     }
+ }
+ 
+ /**
++ * xmlStopParser:
++ * @ctxt:  an XML parser context
++ *
++ * Blocks further parser processing
++ */
++void
++xmlStopParser(xmlParserCtxtPtr ctxt) {
++    if (ctxt == NULL)
++        return;
++    xmlHaltParser(ctxt);
++    ctxt->errNo = XML_ERR_USER_STOP;
++}
++
++/**
+  * xmlCreateIOParserCtxt:
+  * @sax:  a SAX handler
+  * @user_data:  The user data returned on SAX callbacks
+-- 
+2.3.5
+
diff --git a/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-7499-2-Detect-incoherency-on-GROW.patch b/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-7499-2-Detect-incoherency-on-GROW.patch
new file mode 100644
index 0000000..aff3920
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-7499-2-Detect-incoherency-on-GROW.patch
@@ -0,0 +1,43 @@
+From 35bcb1d758ed70aa7b257c9c3b3ff55e54e3d0da Mon Sep 17 00:00:00 2001
+From: Daniel Veillard <veillard@redhat.com>
+Date: Fri, 20 Nov 2015 15:04:09 +0800
+Subject: [PATCH] Detect incoherency on GROW
+
+the current pointer to the input has to be between the base and end
+if not stop everything we have an internal state error.
+
+Upstream-Status: Backport
+
+CVE-2015-7499-2
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ parser.c | 9 ++++++++-
+ 1 file changed, 8 insertions(+), 1 deletion(-)
+
+diff --git a/parser.c b/parser.c
+index 1810f99..ab007aa 100644
+--- a/parser.c
++++ b/parser.c
+@@ -2075,9 +2075,16 @@ static void xmlGROW (xmlParserCtxtPtr ctxt) {
+          ((ctxt->input->buf) && (ctxt->input->buf->readcallback != (xmlInputReadCallback) xmlNop)) &&
+         ((ctxt->options & XML_PARSE_HUGE) == 0)) {
+         xmlFatalErr(ctxt, XML_ERR_INTERNAL_ERROR, "Huge input lookup");
+-        ctxt->instate = XML_PARSER_EOF;
++        xmlHaltParser(ctxt);
++	return;
+     }
+     xmlParserInputGrow(ctxt->input, INPUT_CHUNK);
++    if ((ctxt->input->cur > ctxt->input->end) ||
++        (ctxt->input->cur < ctxt->input->base)) {
++        xmlHaltParser(ctxt);
++        xmlFatalErr(ctxt, XML_ERR_INTERNAL_ERROR, "cur index out of bound");
++	return;
++    }
+     if ((ctxt->input->cur != NULL) && (*ctxt->input->cur == 0) &&
+         (xmlParserInputGrow(ctxt->input, INPUT_CHUNK) <= 0))
+ 	    xmlPopInput(ctxt);
+-- 
+2.3.5
+
diff --git a/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-7941-1-Stop-parsing-on-entities-boundaries-errors.patch b/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-7941-1-Stop-parsing-on-entities-boundaries-errors.patch
new file mode 100644
index 0000000..11da9f9
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-7941-1-Stop-parsing-on-entities-boundaries-errors.patch
@@ -0,0 +1,39 @@
+From a7dfab7411cbf545f359dd3157e5df1eb0e7ce31 Mon Sep 17 00:00:00 2001
+From: Daniel Veillard <veillard@redhat.com>
+Date: Mon, 23 Feb 2015 11:17:35 +0800
+Subject: [PATCH] Stop parsing on entities boundaries errors
+
+For https://bugzilla.gnome.org/show_bug.cgi?id=744980
+
+There are times, like on unterminated entities that it's preferable to
+stop parsing, even if that means less error reporting. Entities are
+feeding the parser on further processing, and if they are ill defined
+then it's possible to get the parser to bug. Also do the same on
+Conditional Sections if the input is broken, as the structure of
+the document can't be guessed.
+
+Upstream-Status: Backport
+
+CVE-2015-7941-1
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ parser.c | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/parser.c b/parser.c
+index a8d1b67..bbe97eb 100644
+--- a/parser.c
++++ b/parser.c
+@@ -5658,6 +5658,7 @@ xmlParseEntityDecl(xmlParserCtxtPtr ctxt) {
+ 	if (RAW != '>') {
+ 	    xmlFatalErrMsgStr(ctxt, XML_ERR_ENTITY_NOT_FINISHED,
+ 	            "xmlParseEntityDecl: entity %s not terminated\n", name);
++	    xmlStopParser(ctxt);
+ 	} else {
+ 	    if (input != ctxt->input) {
+ 		xmlFatalErrMsg(ctxt, XML_ERR_ENTITY_BOUNDARY,
+-- 
+2.3.5
+
diff --git a/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-7941-2-Cleanup-conditional-section-error-handling.patch b/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-7941-2-Cleanup-conditional-section-error-handling.patch
new file mode 100644
index 0000000..b7bd960
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-7941-2-Cleanup-conditional-section-error-handling.patch
@@ -0,0 +1,56 @@
+From 9b8512337d14c8ddf662fcb98b0135f225a1c489 Mon Sep 17 00:00:00 2001
+From: Daniel Veillard <veillard@redhat.com>
+Date: Mon, 23 Feb 2015 11:29:20 +0800
+Subject: [PATCH] Cleanup conditional section error handling
+
+For https://bugzilla.gnome.org/show_bug.cgi?id=744980
+
+The error handling of Conditional Section also need to be
+straightened as the structure of the document can't be
+guessed on a failure there and it's better to stop parsing
+as further errors are likely to be irrelevant.
+
+Upstream-Status: Backport
+
+CVE-2015-7941-2
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ parser.c | 6 ++++++
+ 1 file changed, 6 insertions(+)
+
+diff --git a/parser.c b/parser.c
+index bbe97eb..fe603ac 100644
+--- a/parser.c
++++ b/parser.c
+@@ -6770,6 +6770,8 @@ xmlParseConditionalSections(xmlParserCtxtPtr ctxt) {
+ 	SKIP_BLANKS;
+ 	if (RAW != '[') {
+ 	    xmlFatalErr(ctxt, XML_ERR_CONDSEC_INVALID, NULL);
++	    xmlStopParser(ctxt);
++	    return;
+ 	} else {
+ 	    if (ctxt->input->id != id) {
+ 		xmlValidityError(ctxt, XML_ERR_ENTITY_BOUNDARY,
+@@ -6830,6 +6832,8 @@ xmlParseConditionalSections(xmlParserCtxtPtr ctxt) {
+ 	SKIP_BLANKS;
+ 	if (RAW != '[') {
+ 	    xmlFatalErr(ctxt, XML_ERR_CONDSEC_INVALID, NULL);
++	    xmlStopParser(ctxt);
++	    return;
+ 	} else {
+ 	    if (ctxt->input->id != id) {
+ 		xmlValidityError(ctxt, XML_ERR_ENTITY_BOUNDARY,
+@@ -6885,6 +6889,8 @@ xmlParseConditionalSections(xmlParserCtxtPtr ctxt) {
+ 
+     } else {
+ 	xmlFatalErr(ctxt, XML_ERR_CONDSEC_INVALID_KEYWORD, NULL);
++	xmlStopParser(ctxt);
++	return;
+     }
+ 
+     if (RAW == 0)
+-- 
+2.3.5
+
diff --git a/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-7942-2-Fix-an-error-in-previous-Conditional-section-patch.patch b/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-7942-2-Fix-an-error-in-previous-Conditional-section-patch.patch
new file mode 100644
index 0000000..34b6036
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-7942-2-Fix-an-error-in-previous-Conditional-section-patch.patch
@@ -0,0 +1,35 @@
+From 41ac9049a27f52e7a1f3b341f8714149fc88d450 Mon Sep 17 00:00:00 2001
+From: Daniel Veillard <veillard@redhat.com>
+Date: Tue, 27 Oct 2015 10:53:44 +0800
+Subject: [PATCH] Fix an error in previous Conditional section patch
+
+an off by one mistake in the change, led to error on correct
+document where the end of the included entity was exactly
+the end of the conditional section, leading to regtest failure
+
+Upstream-Status: Backport
+
+CVE-2015-7942-2
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ parser.c | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/parser.c b/parser.c
+index b9217ff..d67b300 100644
+--- a/parser.c
++++ b/parser.c
+@@ -6916,7 +6916,7 @@ xmlParseConditionalSections(xmlParserCtxtPtr ctxt) {
+ 				 NULL, NULL);
+ 	}
+ 	if ((ctxt-> instate != XML_PARSER_EOF) &&
+-	    ((ctxt->input->cur + 3) < ctxt->input->end))
++	    ((ctxt->input->cur + 3) <= ctxt->input->end))
+ 	    SKIP(3);
+     }
+ }
+-- 
+2.3.5
+
diff --git a/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-7942-Another-variation-of-overflow-in-Conditional-section.patch b/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-7942-Another-variation-of-overflow-in-Conditional-section.patch
new file mode 100644
index 0000000..40082ec
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-7942-Another-variation-of-overflow-in-Conditional-section.patch
@@ -0,0 +1,39 @@
+From bd0526e66a56e75a18da8c15c4750db8f801c52d Mon Sep 17 00:00:00 2001
+From: Daniel Veillard <veillard@redhat.com>
+Date: Fri, 23 Oct 2015 19:02:28 +0800
+Subject: [PATCH] Another variation of overflow in Conditional sections
+
+Which happen after the previous fix to
+https://bugzilla.gnome.org/show_bug.cgi?id=756456
+
+But stopping the parser and exiting we didn't pop the intermediary entities
+and doing the SKIP there applies on an input which may be too small
+
+Upstream-Status: Backport
+
+CVE-2015-7942
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ parser.c | 4 +++-
+ 1 file changed, 3 insertions(+), 1 deletion(-)
+
+diff --git a/parser.c b/parser.c
+index a65e4cc..b9217ff 100644
+--- a/parser.c
++++ b/parser.c
+@@ -6915,7 +6915,9 @@ xmlParseConditionalSections(xmlParserCtxtPtr ctxt) {
+ 	"All markup of the conditional section is not in the same entity\n",
+ 				 NULL, NULL);
+ 	}
+-        SKIP(3);
++	if ((ctxt-> instate != XML_PARSER_EOF) &&
++	    ((ctxt->input->cur + 3) < ctxt->input->end))
++	    SKIP(3);
+     }
+ }
+ 
+-- 
+2.3.5
+
diff --git a/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-8241.patch b/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-8241.patch
new file mode 100644
index 0000000..89a46ad
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-8241.patch
@@ -0,0 +1,40 @@
+From ab2b9a93ff19cedde7befbf2fcc48c6e352b6cbe Mon Sep 17 00:00:00 2001
+From: Hugh Davenport <hugh@allthethings.co.nz>
+Date: Tue, 3 Nov 2015 20:40:49 +0800
+Subject: [PATCH] Avoid extra processing of MarkupDecl when EOF
+
+For https://bugzilla.gnome.org/show_bug.cgi?id=756263
+
+One place where ctxt->instate == XML_PARSER_EOF whic was set up
+by entity detection issues doesn't get noticed, and even overrided
+
+Upstream-status: Backport
+
+https://git.gnome.org/browse/libxml2/commit/?id=ab2b9a93ff19cedde7befbf2fcc48c6e352b6cbe
+
+CVE: CVE-2015-8241
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ parser.c | 8 ++++++++
+ 1 file changed, 8 insertions(+)
+
+Index: libxml2-2.9.2/parser.c
+===================================================================
+--- libxml2-2.9.2.orig/parser.c
++++ libxml2-2.9.2/parser.c
+@@ -6999,6 +6999,14 @@ xmlParseMarkupDecl(xmlParserCtxtPtr ctxt
+ 	    xmlParsePI(ctxt);
+ 	}
+     }
++
++    /*
++     * detect requirement to exit there and act accordingly
++     * and avoid having instate overriden later on
++     */
++    if (ctxt->instate == XML_PARSER_EOF)
++        return;
++
+     /*
+      * This is only for internal subset. On external entities,
+      * the replacement is done before parsing stage
diff --git a/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-8317-Fail-parsing-early-on-if-encoding-conversion-failed.patch b/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-8317-Fail-parsing-early-on-if-encoding-conversion-failed.patch
new file mode 100644
index 0000000..59425cb
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-8317-Fail-parsing-early-on-if-encoding-conversion-failed.patch
@@ -0,0 +1,42 @@
+From 709a952110e98621c9b78c4f26462a9d8333102e Mon Sep 17 00:00:00 2001
+From: Daniel Veillard <veillard@redhat.com>
+Date: Mon, 29 Jun 2015 16:10:26 +0800
+Subject: [PATCH] Fail parsing early on if encoding conversion failed
+
+For https://bugzilla.gnome.org/show_bug.cgi?id=751631
+
+If we fail conversing the current input stream while
+processing the encoding declaration of the XMLDecl
+then it's safer to just abort there and not try to
+report further errors.
+
+Upstream-Status: Backport
+
+CVE-2015-8317
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ parser.c | 6 +++++-
+ 1 file changed, 5 insertions(+), 1 deletion(-)
+
+diff --git a/parser.c b/parser.c
+index a3a9568..0edd53b 100644
+--- a/parser.c
++++ b/parser.c
+@@ -10471,7 +10471,11 @@ xmlParseEncodingDecl(xmlParserCtxtPtr ctxt) {
+ 
+             handler = xmlFindCharEncodingHandler((const char *) encoding);
+ 	    if (handler != NULL) {
+-		xmlSwitchToEncoding(ctxt, handler);
++		if (xmlSwitchToEncoding(ctxt, handler) < 0) {
++		    /* failed to convert */
++		    ctxt->errNo = XML_ERR_UNSUPPORTED_ENCODING;
++		    return(NULL);
++		}
+ 	    } else {
+ 		xmlFatalErrMsgStr(ctxt, XML_ERR_UNSUPPORTED_ENCODING,
+ 			"Unsupported encoding %s\n", encoding);
+-- 
+2.3.5
+
diff --git a/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-8710.patch b/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-8710.patch
new file mode 100644
index 0000000..be06cc2
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2015-8710.patch
@@ -0,0 +1,71 @@
+From e724879d964d774df9b7969fc846605aa1bac54c Mon Sep 17 00:00:00 2001
+From: Daniel Veillard <veillard@redhat.com>
+Date: Fri, 30 Oct 2015 21:14:55 +0800
+Subject: [PATCH] Fix parsing short unclosed comment uninitialized access
+
+For https://bugzilla.gnome.org/show_bug.cgi?id=746048
+The HTML parser was too optimistic when processing comments and
+didn't check for the end of the stream on the first 2 characters
+
+Upstream-Status: Backport
+
+https://git.gnome.org/browse/libxml2/commit/?id=e724879d964d774df9b7969fc846605aa1bac54c
+
+CVE: CVE-2015-8710
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ HTMLparser.c | 21 ++++++++++++++-------
+ 1 file changed, 14 insertions(+), 7 deletions(-)
+
+Index: libxml2-2.9.2/HTMLparser.c
+===================================================================
+--- libxml2-2.9.2.orig/HTMLparser.c
++++ libxml2-2.9.2/HTMLparser.c
+@@ -3245,12 +3245,17 @@ htmlParseComment(htmlParserCtxtPtr ctxt)
+ 	ctxt->instate = state;
+ 	return;
+     }
++    len = 0;
++    buf[len] = 0;
+     q = CUR_CHAR(ql);
++    if (!IS_CHAR(q))
++        goto unfinished;
+     NEXTL(ql);
+     r = CUR_CHAR(rl);
++    if (!IS_CHAR(r))
++        goto unfinished;
+     NEXTL(rl);
+     cur = CUR_CHAR(l);
+-    len = 0;
+     while (IS_CHAR(cur) &&
+            ((cur != '>') ||
+ 	    (r != '-') || (q != '-'))) {
+@@ -3281,18 +3286,20 @@ htmlParseComment(htmlParserCtxtPtr ctxt)
+ 	}
+     }
+     buf[len] = 0;
+-    if (!IS_CHAR(cur)) {
+-	htmlParseErr(ctxt, XML_ERR_COMMENT_NOT_FINISHED,
+-	             "Comment not terminated \n<!--%.50s\n", buf, NULL);
+-	xmlFree(buf);
+-    } else {
++    if (IS_CHAR(cur)) {
+         NEXT;
+ 	if ((ctxt->sax != NULL) && (ctxt->sax->comment != NULL) &&
+ 	    (!ctxt->disableSAX))
+ 	    ctxt->sax->comment(ctxt->userData, buf);
+ 	xmlFree(buf);
++	ctxt->instate = state;
++	return;
+     }
+-    ctxt->instate = state;
++
++unfinished:
++    htmlParseErr(ctxt, XML_ERR_COMMENT_NOT_FINISHED,
++		 "Comment not terminated \n<!--%.50s\n", buf, NULL);
++    xmlFree(buf);
+ }
+ 
+ /**
diff --git a/yocto-poky/meta/recipes-core/meta/meta-ide-support.bb b/yocto-poky/meta/recipes-core/meta/meta-ide-support.bb
index 2f92912..86c57cd 100644
--- a/yocto-poky/meta/recipes-core/meta/meta-ide-support.bb
+++ b/yocto-poky/meta/recipes-core/meta/meta-ide-support.bb
@@ -13,5 +13,4 @@
   toolchain_create_tree_env_script
 }
 
-do_populate_ide_support[nostamp] = "1"
 addtask populate_ide_support before do_build after do_install
diff --git a/yocto-poky/meta/recipes-core/meta/nativesdk-buildtools-perl-dummy.bb b/yocto-poky/meta/recipes-core/meta/nativesdk-buildtools-perl-dummy.bb
index d971c3c..9041734 100644
--- a/yocto-poky/meta/recipes-core/meta/nativesdk-buildtools-perl-dummy.bb
+++ b/yocto-poky/meta/recipes-core/meta/nativesdk-buildtools-perl-dummy.bb
@@ -2,10 +2,17 @@
 LICENSE = "MIT"
 LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
 
-inherit nativesdk
+inherit allarch
 
-# Put it somewhere separate to ensure it's never used except when we want it
-PACKAGE_ARCH = "buildtools-dummy-${SDKPKGSUFFIX}"
+PR = "r1"
+
+python() {
+    # Put the package somewhere separate to ensure it's never used except
+    # when we want it
+    # (note that we have to do this in anonymous python here to avoid
+    # allarch.bbclass disabling itself)
+    d.setVar('PACKAGE_ARCH', 'buildtools-dummy-${SDKPKGSUFFIX}')
+}
 
 PERLPACKAGES = "nativesdk-perl \
                 nativesdk-perl-module-file-path"
diff --git a/yocto-poky/meta/recipes-core/meta/signing-keys.bb b/yocto-poky/meta/recipes-core/meta/signing-keys.bb
new file mode 100644
index 0000000..cc401f3
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/meta/signing-keys.bb
@@ -0,0 +1,45 @@
+# Copyright (C) 2015 Intel Corporation
+# Released under the MIT license (see COPYING.MIT for the terms)
+
+DESCRIPTION = "Make public keys of the signing keys available"
+LICENSE = "MIT"
+PACKAGES = ""
+
+do_fetch[noexec] = "1"
+do_unpack[noexec] = "1"
+do_patch[noexec] = "1"
+do_configure[noexec] = "1"
+do_compile[noexec] = "1"
+do_install[noexec] = "1"
+do_package[noexec] = "1"
+do_packagedata[noexec] = "1"
+do_package_write_ipk[noexec] = "1"
+do_package_write_rpm[noexec] = "1"
+do_package_write_deb[noexec] = "1"
+do_populate_sysroot[noexec] = "1"
+
+EXCLUDE_FROM_WORLD = "1"
+
+def export_gpg_pubkey(d, keyid, path):
+    import bb
+    gpg_bin = d.getVar('GPG_BIN', True) or \
+              bb.utils.which(os.getenv('PATH'), "gpg")
+    cmd = '%s --batch --yes --export --armor -o %s %s' % \
+          (gpg_bin, path, keyid)
+    status, output = oe.utils.getstatusoutput(cmd)
+    if status:
+        raise bb.build.FuncFailed('Failed to export gpg public key (%s): %s' %
+                                  (keyid, output))
+
+python do_export_public_keys () {
+    if d.getVar("RPM_SIGN_PACKAGES", True):
+        # Export public key of the rpm signing key
+        export_gpg_pubkey(d, d.getVar("RPM_GPG_NAME", True),
+                          d.getVar('RPM_GPG_PUBKEY', True))
+
+    if d.getVar('PACKAGE_FEED_SIGN', True) == '1':
+        # Export public key of the feed signing key
+        export_gpg_pubkey(d, d.getVar("PACKAGE_FEED_GPG_NAME", True),
+                          d.getVar('PACKAGE_FEED_GPG_PUBKEY', True))
+}
+addtask do_export_public_keys before do_build
diff --git a/yocto-poky/meta/recipes-core/meta/uninative-tarball.bb b/yocto-poky/meta/recipes-core/meta/uninative-tarball.bb
index 41f7927..21f3bd9 100644
--- a/yocto-poky/meta/recipes-core/meta/uninative-tarball.bb
+++ b/yocto-poky/meta/recipes-core/meta/uninative-tarball.bb
@@ -7,6 +7,7 @@
 
 TOOLCHAIN_HOST_TASK = "\
     nativesdk-glibc \
+    nativesdk-glibc-gconv-ibm850 \
     nativesdk-patchelf \
     "
 
diff --git a/yocto-poky/meta/recipes-core/os-release/os-release.bb b/yocto-poky/meta/recipes-core/os-release/os-release.bb
index cc431d2..c690b82 100644
--- a/yocto-poky/meta/recipes-core/os-release/os-release.bb
+++ b/yocto-poky/meta/recipes-core/os-release/os-release.bb
@@ -32,11 +32,12 @@
                 f.write('{0}={1}\n'.format(field, value))
     if d.getVar('RPM_SIGN_PACKAGES', True) == '1':
         rpm_gpg_pubkey = d.getVar('RPM_GPG_PUBKEY', True)
-        os.mkdir('${B}/rpm-gpg')
-        distro_version = self.d.getVar('DISTRO_VERSION', True) or "oe.0"
+        bb.utils.mkdirhier('${B}/rpm-gpg')
+        distro_version = d.getVar('DISTRO_VERSION', True) or "oe.0"
         shutil.copy2(rpm_gpg_pubkey, d.expand('${B}/rpm-gpg/RPM-GPG-KEY-%s' % distro_version))
 }
 do_compile[vardeps] += "${OS_RELEASE_FIELDS}"
+do_compile[depends] += "signing-keys:do_export_public_keys"
 
 do_install () {
     install -d ${D}${sysconfdir}
diff --git a/yocto-poky/meta/recipes-core/packagegroups/packagegroup-core-standalone-sdk-target.bb b/yocto-poky/meta/recipes-core/packagegroups/packagegroup-core-standalone-sdk-target.bb
index 37f5e43..6997f39 100644
--- a/yocto-poky/meta/recipes-core/packagegroups/packagegroup-core-standalone-sdk-target.bb
+++ b/yocto-poky/meta/recipes-core/packagegroups/packagegroup-core-standalone-sdk-target.bb
@@ -7,6 +7,8 @@
 RDEPENDS_${PN} = "\
     libgcc \
     libgcc-dev \
+    libatomic \
+    libatomic-dev \
     libstdc++ \
     libstdc++-dev \
     ${LIBC_DEPENDENCIES} \
diff --git a/yocto-poky/meta/recipes-core/readline/readline-6.3/readline63-003 b/yocto-poky/meta/recipes-core/readline/readline-6.3/readline-cve-2014-2524.patch
similarity index 100%
rename from yocto-poky/meta/recipes-core/readline/readline-6.3/readline63-003
rename to yocto-poky/meta/recipes-core/readline/readline-6.3/readline-cve-2014-2524.patch
diff --git a/yocto-poky/meta/recipes-core/readline/readline_6.3.bb b/yocto-poky/meta/recipes-core/readline/readline_6.3.bb
index 55964a6..fc362ae 100644
--- a/yocto-poky/meta/recipes-core/readline/readline_6.3.bb
+++ b/yocto-poky/meta/recipes-core/readline/readline_6.3.bb
@@ -1,6 +1,6 @@
 require readline.inc
 
-SRC_URI += "file://readline63-003 \
+SRC_URI += "file://readline-cve-2014-2524.patch;striplevel=0 \
             file://readline-dispatch-multikey.patch"
 
 SRC_URI[archive.md5sum] = "33c8fb279e981274f485fd91da77e94a"
diff --git a/yocto-poky/meta/recipes-core/systemd/systemd/0001-fix-build-on-uClibc-exp10.patch b/yocto-poky/meta/recipes-core/systemd/systemd/0001-fix-build-on-uClibc-exp10.patch
new file mode 100644
index 0000000..76ce4b7
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/systemd/systemd/0001-fix-build-on-uClibc-exp10.patch
@@ -0,0 +1,22 @@
+Inspired by: http://peter.korsgaard.com/patches/alsa-utils/alsamixer-fix-build-on-uClibc-exp10.patch
+
+exp10 extension is not part of uClibc, so compute it.
+
+
+Signed-off-by: Samuel Martin <s.martin49@gmail.com>
+
+Upstream-Status: Pending
+
+Index: git/src/basic/missing.h
+===================================================================
+--- git.orig/src/basic/missing.h
++++ git/src/basic/missing.h
+@@ -1036,3 +1036,8 @@ static inline int kcmp(pid_t pid1, pid_t
+ #ifndef INPUT_PROP_ACCELEROMETER
+ #define INPUT_PROP_ACCELEROMETER  0x06
+ #endif
++
++#ifdef __UCLIBC__
++/* 10^x = 10^(log e^x) = (e^x)^log10 = e^(x * log 10) */
++#define exp10(x) (exp((x) * log(10)))
++#endif /* __UCLIBC__ */
diff --git a/yocto-poky/meta/recipes-core/systemd/systemd/0022-Use-getenv-when-secure-versions-are-not-available.patch b/yocto-poky/meta/recipes-core/systemd/systemd/0022-Use-getenv-when-secure-versions-are-not-available.patch
new file mode 100644
index 0000000..30e3817
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/systemd/systemd/0022-Use-getenv-when-secure-versions-are-not-available.patch
@@ -0,0 +1,39 @@
+From cb71e4beea3b3b11e5951f95c829cd2eee9fcf7b Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sat, 12 Sep 2015 19:10:04 +0000
+Subject: [PATCH 22/31] Use getenv when secure versions are not available
+
+musl doesnt implement secure version, so we default
+to it if configure does not detect a secure imeplementation
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+Upstream-Status: Rejected
+
+ src/basic/missing.h | 5 +++--
+ 1 file changed, 3 insertions(+), 2 deletions(-)
+
+diff --git a/src/basic/missing.h b/src/basic/missing.h
+index bf9b490..d6dbc7d 100644
+--- a/src/basic/missing.h
++++ b/src/basic/missing.h
+@@ -584,13 +584,14 @@ static inline int name_to_handle_at(int fd, const char *name, struct file_handle
+         return syscall(__NR_name_to_handle_at, fd, name, handle, mnt_id, flags);
+ }
+ #endif
+-
+-#ifndef HAVE_SECURE_GETENV
++#ifdef HAVE_SECURE_GETENV
+ #  ifdef HAVE___SECURE_GETENV
+ #    define secure_getenv __secure_getenv
+ #  else
+ #    error "neither secure_getenv nor __secure_getenv are available"
+ #  endif
++#else
++#  define secure_getenv getenv
+ #endif
+ 
+ #ifndef CIFS_MAGIC_NUMBER
+-- 
+2.5.2
+
diff --git a/yocto-poky/meta/recipes-core/systemd/systemd/rules-whitelist-hd-devices.patch b/yocto-poky/meta/recipes-core/systemd/systemd/rules-whitelist-hd-devices.patch
new file mode 100644
index 0000000..8975b05
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/systemd/systemd/rules-whitelist-hd-devices.patch
@@ -0,0 +1,32 @@
+From f77b7e5626e70c3a775e993816a33af5a61dea42 Mon Sep 17 00:00:00 2001
+From: Patrick Ohly <patrick.ohly@intel.com>
+Date: Wed, 16 Sep 2015 13:55:58 +0200
+Subject: [PATCH] rules: whitelist hd* devices
+
+qemu by default emulates IDE and the linux-yocto kernel(s) use
+CONFIG_IDE instead of the more modern libsata, so disks appear as
+/dev/hd*. Patch rejected upstream because CONFIG_IDE is deprecated.
+
+Upstream-Status: Denied [https://github.com/systemd/systemd/pull/1276]
+
+Signed-off-by: Patrick Ohly <patrick.ohly@intel.com>
+---
+ rules/60-persistent-storage.rules | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/rules/60-persistent-storage.rules b/rules/60-persistent-storage.rules
+index 0b14bb4..1c4d97a 100644
+--- a/rules/60-persistent-storage.rules
++++ b/rules/60-persistent-storage.rules
+@@ -6,7 +6,7 @@
+ ACTION=="remove", GOTO="persistent_storage_end"
+ 
+ SUBSYSTEM!="block", GOTO="persistent_storage_end"
+-KERNEL!="loop*|mmcblk*[0-9]|msblk*[0-9]|mspblk*[0-9]|nvme*|sd*|sr*|vd*|xvd*|bcache*|cciss*|dasd*|ubd*", GOTO="persistent_storage_end"
++KERNEL!="loop*|mmcblk*[0-9]|msblk*[0-9]|mspblk*[0-9]|nvme*|hd*|sd*|sr*|vd*|xvd*|bcache*|cciss*|dasd*|ubd*", GOTO="persistent_storage_end"
+ 
+ # ignore partitions that span the entire disk
+ TEST=="whole_disk", GOTO="persistent_storage_end"
+-- 
+2.1.4
+
diff --git a/yocto-poky/meta/recipes-core/systemd/systemd_225.bb b/yocto-poky/meta/recipes-core/systemd/systemd_225.bb
index f7d4c7d..18c2448 100644
--- a/yocto-poky/meta/recipes-core/systemd/systemd_225.bb
+++ b/yocto-poky/meta/recipes-core/systemd/systemd_225.bb
@@ -18,7 +18,7 @@
 
 PE = "1"
 
-DEPENDS = "kmod docbook-sgml-dtd-4.1-native intltool-native gperf-native acl readline dbus libcap libcgroup glib-2.0 qemu-native util-linux"
+DEPENDS = "kmod docbook-sgml-dtd-4.1-native intltool-native gperf-native acl readline dbus libcap libcgroup qemu-native util-linux"
 
 SECTION = "base/shell"
 
@@ -45,6 +45,7 @@
            file://00-create-volatile.conf \
            file://init \
            file://run-ptest \
+           file://rules-whitelist-hd-devices.patch \
           "
 SRC_URI_append_qemuall = " file://qemuall_io_latency-core-device.c-Change-the-default-device-timeout-to-2.patch"
 
@@ -52,6 +53,8 @@
 
 SRC_URI_append_libc-uclibc = "\
             file://0001-units-Prefer-getty-to-agetty-in-console-setup-system.patch \
+            file://0022-Use-getenv-when-secure-versions-are-not-available.patch \
+            file://0001-fix-build-on-uClibc-exp10.patch \
            "
 LDFLAGS_append_libc-uclibc = " -lrt"
 
@@ -87,6 +90,7 @@
 PACKAGECONFIG[ldconfig] = "--enable-ldconfig,--disable-ldconfig,,"
 PACKAGECONFIG[selinux] = "--enable-selinux,--disable-selinux,libselinux"
 PACKAGECONFIG[valgrind] = "ac_cv_header_valgrind_memcheck_h=yes ac_cv_header_valgrind_valgrind_h=yes ,ac_cv_header_valgrind_memcheck_h=no ac_cv_header_valgrind_valgrind_h=no ,valgrind"
+PACKAGECONFIG[qrencode] = "--enable-qrencode,--disable-qrencode,qrencode"
 
 CACHED_CONFIGUREVARS += "ac_cv_path_KILL=${base_bindir}/kill"
 CACHED_CONFIGUREVARS += "ac_cv_path_KMOD=${base_bindir}/kmod"
@@ -123,6 +127,9 @@
 # uclibc does not have NSS
 EXTRA_OECONF_append_libc-uclibc = " --disable-myhostname "
 
+# disable problematic GCC 5.2 optimizations [YOCTO #8291]
+FULL_OPTIMIZATION_append_arm = " -fno-schedule-insns -fno-schedule-insns2"
+
 do_configure_prepend() {
 	export NM="${HOST_PREFIX}gcc-nm"
 	export AR="${HOST_PREFIX}gcc-ar"
@@ -186,8 +193,8 @@
 	sed -i -e 's/.*ForwardToSyslog.*/ForwardToSyslog=yes/' ${D}${sysconfdir}/systemd/journald.conf
 	# this file is needed to exist if networkd is disabled but timesyncd is still in use since timesyncd checks it
 	# for existence else it fails
-	if [ -s ${D}${libdir}/tmpfiles.d/systemd.conf ]; then
-		${@bb.utils.contains('PACKAGECONFIG', 'networkd', ':', 'sed -i -e "\$ad /run/systemd/netif/links 0755 root root -" ${D}${libdir}/tmpfiles.d/systemd.conf', d)}
+	if [ -s ${D}${exec_prefix}/lib/tmpfiles.d/systemd.conf ]; then
+		${@bb.utils.contains('PACKAGECONFIG', 'networkd', ':', 'sed -i -e "\$ad /run/systemd/netif/links 0755 root root -" ${D}${exec_prefix}/lib/tmpfiles.d/systemd.conf', d)}
 	fi
 	install -Dm 0755 ${S}/src/systemctl/systemd-sysv-install.SKELETON ${D}${systemd_unitdir}/systemd-sysv-install
 }
diff --git a/yocto-poky/meta/recipes-core/uclibc/uclibc-git.inc b/yocto-poky/meta/recipes-core/uclibc/uclibc-git.inc
index 14a577f..b718479 100644
--- a/yocto-poky/meta/recipes-core/uclibc/uclibc-git.inc
+++ b/yocto-poky/meta/recipes-core/uclibc/uclibc-git.inc
@@ -16,5 +16,10 @@
         file://0005-Always-use-O2-for-compiling-fork.c.patch \
         file://0006-ldso-limited-support-for-ORIGIN-in-rpath.patch \
         file://0007-nptl-atfork-Hide-pthread_atfork-in-shared-versions.patch \
+        file://0001-gcc5-optimizes-away-the-write-only-static-functions-.patch \
+        file://0001-fcntl-Add-AT_EMPTY_PATH-for-all-and-O_PATH-for-arm.patch \
+        file://0001-wire-in-syncfs.patch \
+        file://CVE-2016-2224.patch \
+        file://CVE-2016-2225.patch \
 "
 S = "${WORKDIR}/git"
diff --git a/yocto-poky/meta/recipes-core/uclibc/uclibc-git/0001-fcntl-Add-AT_EMPTY_PATH-for-all-and-O_PATH-for-arm.patch b/yocto-poky/meta/recipes-core/uclibc/uclibc-git/0001-fcntl-Add-AT_EMPTY_PATH-for-all-and-O_PATH-for-arm.patch
new file mode 100644
index 0000000..6942db4
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/uclibc/uclibc-git/0001-fcntl-Add-AT_EMPTY_PATH-for-all-and-O_PATH-for-arm.patch
@@ -0,0 +1,42 @@
+From 4c8f5fe7d41493e8e181941ae5a01713155f44d1 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Thu, 15 Oct 2015 15:34:39 +0000
+Subject: [PATCH] fcntl: Add AT_EMPTY_PATH for all and O_PATH for arm
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+Upstream-Status: Pending
+
+ include/fcntl.h                     | 3 +++
+ libc/sysdeps/linux/arm/bits/fcntl.h | 1 +
+ 2 files changed, 4 insertions(+)
+
+diff --git a/include/fcntl.h b/include/fcntl.h
+index 11000dd..8a7ad9b 100644
+--- a/include/fcntl.h
++++ b/include/fcntl.h
+@@ -65,6 +65,9 @@ __BEGIN_DECLS
+ # define AT_SYMLINK_FOLLOW	0x400	/* Follow symbolic links.  */
+ # define AT_EACCESS		0x200	/* Test access permitted for
+ 					   effective IDs, not real IDs.  */
++# ifdef __USE_GNU
++#  define AT_EMPTY_PATH         0x1000  /* Allow empty relative pathname.  */
++# endif
+ #endif
+ 
+ /* Do the file control operation described by CMD on FD.
+diff --git a/libc/sysdeps/linux/arm/bits/fcntl.h b/libc/sysdeps/linux/arm/bits/fcntl.h
+index aedc154..c6ba958 100644
+--- a/libc/sysdeps/linux/arm/bits/fcntl.h
++++ b/libc/sysdeps/linux/arm/bits/fcntl.h
+@@ -50,6 +50,7 @@
+ # define O_DIRECT	0200000	/* Direct disk access.	*/
+ # define O_NOATIME     01000000 /* Do not set atime.  */
+ # define O_CLOEXEC     02000000 /* Set close_on_exec.  */
++# define O_PATH       010000000 /* Resolve pathname but do not open file.  */
+ #endif
+ 
+ /* For now Linux has synchronisity options for data and read operations.
+-- 
+2.6.1
+
diff --git a/yocto-poky/meta/recipes-core/uclibc/uclibc-git/0001-gcc5-optimizes-away-the-write-only-static-functions-.patch b/yocto-poky/meta/recipes-core/uclibc/uclibc-git/0001-gcc5-optimizes-away-the-write-only-static-functions-.patch
new file mode 100644
index 0000000..e622f87
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/uclibc/uclibc-git/0001-gcc5-optimizes-away-the-write-only-static-functions-.patch
@@ -0,0 +1,51 @@
+From 2659fb25d32f4b29c1c96aa5730fe40e19d53ab0 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Wed, 14 Oct 2015 17:38:37 -0700
+Subject: [PATCH] gcc5 optimizes away the write only static functions and we
+ end up with
+
+  librt/librt_so.a(rt-unwind-resume.oS): In function `_Unwind_Resume':
+  rt-unwind-resume.c:(.text+0x3c): undefined reference to `libgcc_s_resume'
+  collect2: error: ld returned 1 exit status
+  make[2]: *** [lib/librt.so] Error 1
+
+marking these functions explicitly used with __attribute_used__ avoids
+that optimization.
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+Upstream-Status: Pending
+
+ libpthread/nptl/sysdeps/unix/sysv/linux/arm/unwind-forcedunwind.c | 2 +-
+ libpthread/nptl/sysdeps/unix/sysv/linux/arm/unwind-resume.c       | 2 +-
+ 2 files changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/libpthread/nptl/sysdeps/unix/sysv/linux/arm/unwind-forcedunwind.c b/libpthread/nptl/sysdeps/unix/sysv/linux/arm/unwind-forcedunwind.c
+index f4d6f41..0c2edd7 100644
+--- a/libpthread/nptl/sysdeps/unix/sysv/linux/arm/unwind-forcedunwind.c
++++ b/libpthread/nptl/sysdeps/unix/sysv/linux/arm/unwind-forcedunwind.c
+@@ -27,7 +27,7 @@
+ #define __libc_fatal(x)         {/*write(STDERR_FILENO, x, strlen(x));*/ abort();}
+ 
+ static void *libgcc_s_handle;
+-static void (*libgcc_s_resume) (struct _Unwind_Exception *exc);
++static void (*libgcc_s_resume) (struct _Unwind_Exception *exc) __attribute_used__;
+ static _Unwind_Reason_Code (*libgcc_s_personality)
+   (_Unwind_State, struct _Unwind_Exception *, struct _Unwind_Context *);
+ static _Unwind_Reason_Code (*libgcc_s_forcedunwind)
+diff --git a/libpthread/nptl/sysdeps/unix/sysv/linux/arm/unwind-resume.c b/libpthread/nptl/sysdeps/unix/sysv/linux/arm/unwind-resume.c
+index f9a4ffb..f0c3047 100644
+--- a/libpthread/nptl/sysdeps/unix/sysv/linux/arm/unwind-resume.c
++++ b/libpthread/nptl/sysdeps/unix/sysv/linux/arm/unwind-resume.c
+@@ -25,7 +25,7 @@
+ #define __libc_dlclose          dlclose
+ #define __libc_fatal(x)         {/*write(STDERR_FILENO, x, strlen(x));*/ abort();}
+ 
+-static void (*libgcc_s_resume) (struct _Unwind_Exception *exc);
++static void (*libgcc_s_resume) (struct _Unwind_Exception *exc) __attribute_used__;
+ static _Unwind_Reason_Code (*libgcc_s_personality)
+   (_Unwind_State, struct _Unwind_Exception *, struct _Unwind_Context *);
+ 
+-- 
+2.6.1
+
diff --git a/yocto-poky/meta/recipes-core/uclibc/uclibc-git/0001-wire-in-syncfs.patch b/yocto-poky/meta/recipes-core/uclibc/uclibc-git/0001-wire-in-syncfs.patch
new file mode 100644
index 0000000..079ad6b
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/uclibc/uclibc-git/0001-wire-in-syncfs.patch
@@ -0,0 +1,49 @@
+From 4f2db1b46bda5e376245ec36198b137709f069e8 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Thu, 15 Oct 2015 17:03:37 +0000
+Subject: [PATCH] wire in syncfs
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+Upstream-Status: Pending
+
+ include/unistd.h                   |  2 +-
+ libc/sysdeps/linux/common/syncfs.c | 13 +++++++++++++
+ 2 files changed, 14 insertions(+), 1 deletion(-)
+ create mode 100644 libc/sysdeps/linux/common/syncfs.c
+
+diff --git a/include/unistd.h b/include/unistd.h
+index 3793d2d..d01bb08 100644
+--- a/include/unistd.h
++++ b/include/unistd.h
+@@ -1073,7 +1073,7 @@ extern char *getpass (const char *__prompt) __nonnull ((1));
+ extern int fsync (int __fd);
+ #endif /* Use BSD || X/Open || Unix98.  */
+ 
+-#if 0 /*def __USE_GNU */
++#ifdef __USE_GNU
+ /* Make all changes done to all files on the file system associated
+  *    with FD actually appear on disk.  */
+ extern int syncfs (int __fd) __THROW;
+diff --git a/libc/sysdeps/linux/common/syncfs.c b/libc/sysdeps/linux/common/syncfs.c
+new file mode 100644
+index 0000000..d2eed05
+--- /dev/null
++++ b/libc/sysdeps/linux/common/syncfs.c
+@@ -0,0 +1,13 @@
++/* vi: set sw=4 ts=4: */
++/*
++ * fsync() for uClibc
++ *
++ * Copyright (C) 2000-2006 Erik Andersen <andersen@uclibc.org>
++ *
++ * Licensed under the LGPL v2.1, see the file COPYING.LIB in this tarball.
++ */
++
++#include <sys/syscall.h>
++#include <unistd.h>
++
++_syscall1(int, syncfs, int, fd)
+-- 
+2.6.1
+
diff --git a/yocto-poky/meta/recipes-core/uclibc/uclibc-git/CVE-2016-2224.patch b/yocto-poky/meta/recipes-core/uclibc/uclibc-git/CVE-2016-2224.patch
new file mode 100644
index 0000000..218b60a
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/uclibc/uclibc-git/CVE-2016-2224.patch
@@ -0,0 +1,49 @@
+From 16719c1a7078421928e6d31dd1dec574825ef515 Mon Sep 17 00:00:00 2001
+From: Waldemar Brodkorb <wbx@openadk.org>
+Date: Sun, 17 Jan 2016 15:47:22 +0100
+Subject: [PATCH] Do not follow compressed items forever.
+
+It is possible to get stuck in an infinite loop when receiving a
+specially crafted DNS reply. Exit the loop after a number of iteration
+and consider the packet invalid.
+
+Signed-off-by: Daniel Fahlgren <daniel@fahlgren.se>
+Signed-off-by: Waldemar Brodkorb <wbx@uclibc-ng.org>
+
+Upstream-status: Backport
+http://repo.or.cz/uclibc-ng.git/commit/16719c1a7078421928e6d31dd1dec574825ef515
+
+CVE: CVE-2016-2224
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ libc/inet/resolv.c | 5 ++++-
+ 1 file changed, 4 insertions(+), 1 deletion(-)
+
+Index: git/libc/inet/resolv.c
+===================================================================
+--- git.orig/libc/inet/resolv.c
++++ git/libc/inet/resolv.c
+@@ -666,11 +666,12 @@ int __decode_dotted(const unsigned char
+ 	bool measure = 1;
+ 	unsigned total = 0;
+ 	unsigned used = 0;
++	unsigned maxiter = 256;
+ 
+ 	if (!packet)
+ 		return -1;
+ 
+-	while (1) {
++	while (--maxiter) {
+ 		if (offset >= packet_len)
+ 			return -1;
+ 		b = packet[offset++];
+@@ -707,6 +708,8 @@ int __decode_dotted(const unsigned char
+ 		else
+ 			dest[used++] = '\0';
+ 	}
++	if (!maxiter)
++		return -1;
+ 
+ 	/* The null byte must be counted too */
+ 	if (measure)
diff --git a/yocto-poky/meta/recipes-core/uclibc/uclibc-git/CVE-2016-2225.patch b/yocto-poky/meta/recipes-core/uclibc/uclibc-git/CVE-2016-2225.patch
new file mode 100644
index 0000000..0217e4b
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/uclibc/uclibc-git/CVE-2016-2225.patch
@@ -0,0 +1,32 @@
+From bb01edff0377f2585ce304ecbadcb7b6cde372ac Mon Sep 17 00:00:00 2001
+From: Waldemar Brodkorb <wbx@openadk.org>
+Date: Mon, 25 Jan 2016 21:11:34 +0100
+Subject: [PATCH] Make sure to always terminate decoded string
+
+Write a terminating '\0' to dest when the first byte of the encoded data
+is 0. This corner case was previously missed.
+
+Signed-off-by: Daniel Fahlgren <daniel@fahlgren.se>
+Signed-off-by: Waldemar Brodkorb <wbx@uclibc-ng.org>
+
+Upstream-Status: Backport
+http://repo.or.cz/uclibc-ng.git/commit/bb01edff0377f2585ce304ecbadcb7b6cde372ac
+CVE: CVE-2016-2225
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ libc/inet/resolv.c | 1 +
+ 1 file changed, 1 insertion(+)
+
+Index: git/libc/inet/resolv.c
+===================================================================
+--- git.orig/libc/inet/resolv.c
++++ git/libc/inet/resolv.c
+@@ -671,6 +671,7 @@ int __decode_dotted(const unsigned char
+ 	if (!packet)
+ 		return -1;
+ 
++	dest[0] = '\0';
+ 	while (--maxiter) {
+ 		if (offset >= packet_len)
+ 			return -1;
diff --git a/yocto-poky/meta/recipes-core/uclibc/uclibc-git/uClibc.distro b/yocto-poky/meta/recipes-core/uclibc/uclibc-git/uClibc.distro
index 6575482..3827b66 100644
--- a/yocto-poky/meta/recipes-core/uclibc/uclibc-git/uClibc.distro
+++ b/yocto-poky/meta/recipes-core/uclibc/uclibc-git/uClibc.distro
@@ -182,6 +182,8 @@
 
 # COMPILE_IN_THUMB_MODE is not set
 
+# needed by shadow
+UCLIBC_HAS_UTMP=y
 # needed by systemd
 UCLIBC_HAS_UTMPX=y
 UCLIBC_LINUX_MODULE_26=y
diff --git a/yocto-poky/meta/recipes-core/udev/udev.inc b/yocto-poky/meta/recipes-core/udev/udev.inc
index a00dad5..c378ae3 100644
--- a/yocto-poky/meta/recipes-core/udev/udev.inc
+++ b/yocto-poky/meta/recipes-core/udev/udev.inc
@@ -15,6 +15,8 @@
 DEPENDS = "glib-2.0 libusb usbutils pciutils glib-2.0-native gperf-native libxslt-native util-linux"
 RPROVIDES_${PN} = "hotplug"
 
+PROVIDES = "libgudev"
+
 SRC_URI = "${KERNELORG_MIRROR}/linux/utils/kernel/hotplug/udev-${PV}.tar.gz \
            file://0001-Fixing-keyboard_force_release.sh-shell-script-path.patch \
            file://avoid-mouse-autosuspend.patch \
diff --git a/yocto-poky/meta/recipes-core/util-linux/util-linux.inc b/yocto-poky/meta/recipes-core/util-linux/util-linux.inc
index a4072bc..594108f 100644
--- a/yocto-poky/meta/recipes-core/util-linux/util-linux.inc
+++ b/yocto-poky/meta/recipes-core/util-linux/util-linux.inc
@@ -163,6 +163,12 @@
 	echo 'MOUNTALL="-t nonfs,nosmbfs,noncpfs"' > ${D}${sysconfdir}/default/mountall
 
 	rm -f ${D}${bindir}/chkdupexe
+
+	if [ "${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'pam', '', d)}" = "pam" ]; then
+		install -d ${D}${sysconfdir}/pam.d
+		install -m 0644 ${WORKDIR}/runuser.pamd ${D}${sysconfdir}/pam.d/runuser
+		install -m 0644 ${WORKDIR}/runuser-l.pamd ${D}${sysconfdir}/pam.d/runuser-l
+	fi
 }
 
 # reset and nologin causes a conflict with ncurses-native and shadow-native
diff --git a/yocto-poky/meta/recipes-core/util-linux/util-linux/runuser-l.pamd b/yocto-poky/meta/recipes-core/util-linux/util-linux/runuser-l.pamd
new file mode 100644
index 0000000..4b368cc
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/util-linux/util-linux/runuser-l.pamd
@@ -0,0 +1,3 @@
+auth	include		runuser
+session	optional	pam_keyinit.so force revoke
+session include		runuser
diff --git a/yocto-poky/meta/recipes-core/util-linux/util-linux/runuser.pamd b/yocto-poky/meta/recipes-core/util-linux/util-linux/runuser.pamd
new file mode 100644
index 0000000..48d133b
--- /dev/null
+++ b/yocto-poky/meta/recipes-core/util-linux/util-linux/runuser.pamd
@@ -0,0 +1,4 @@
+auth	sufficient	pam_rootok.so
+session	optional	pam_keyinit.so revoke
+session	required	pam_limits.so
+session	required	pam_unix.so
diff --git a/yocto-poky/meta/recipes-core/util-linux/util-linux_2.26.2.bb b/yocto-poky/meta/recipes-core/util-linux/util-linux_2.26.2.bb
index fc7dc6e..e09fdfa 100644
--- a/yocto-poky/meta/recipes-core/util-linux/util-linux_2.26.2.bb
+++ b/yocto-poky/meta/recipes-core/util-linux/util-linux_2.26.2.bb
@@ -13,6 +13,8 @@
             file://uclibc-__progname-conflict.patch \
             file://configure-sbindir.patch \
             file://fix-parallel-build.patch \
+            file://runuser.pamd \
+            file://runuser-l.pamd \
             ${OLDHOST} \
 "
 SRC_URI[md5sum] = "9bdf368c395f1b70325d0eb22c7f48fb"
diff --git a/yocto-poky/meta/recipes-devtools/binutils/binutils/binutils-octeon3.patch b/yocto-poky/meta/recipes-devtools/binutils/binutils/binutils-octeon3.patch
index 6108c0d..4e8c69f 100644
--- a/yocto-poky/meta/recipes-devtools/binutils/binutils/binutils-octeon3.patch
+++ b/yocto-poky/meta/recipes-devtools/binutils/binutils/binutils-octeon3.patch
@@ -229,7 +229,7 @@
 +  { "octeon3",   1, bfd_mach_mips_octeon3, CPU_OCTEON3,
 +    ISA_MIPS64R2 | INSN_OCTEON3, ASE_VIRT | ASE_VIRT64,
 +    mips_cp0_names_numeric,
-+    NULL, 0, mips_hwr_names_numeric },
++    NULL, 0, mips_cp1_names_mips3264, mips_hwr_names_numeric },
 +
    { "xlr", 1, bfd_mach_mips_xlr, CPU_XLR,
      ISA_MIPS64 | INSN_XLR, 0,
diff --git a/yocto-poky/meta/recipes-devtools/build-compare/build-compare_git.bb b/yocto-poky/meta/recipes-devtools/build-compare/build-compare_git.bb
index 7ac3784..676f11d 100644
--- a/yocto-poky/meta/recipes-devtools/build-compare/build-compare_git.bb
+++ b/yocto-poky/meta/recipes-devtools/build-compare/build-compare_git.bb
@@ -10,7 +10,6 @@
            file://Ignore-DWARF-sections.patch;striplevel=1 \
            file://0001-Add-support-for-deb-and-ipk-packaging.patch \
            "
-PATCHTOOL = "git"
 
 SRCREV = "c5352c054c6ef15735da31b76d6d88620f4aff0a"
 
diff --git a/yocto-poky/meta/recipes-devtools/ccache/ccache_3.2.3.bb b/yocto-poky/meta/recipes-devtools/ccache/ccache_3.2.3.bb
index 357df75..97f557a 100644
--- a/yocto-poky/meta/recipes-devtools/ccache/ccache_3.2.3.bb
+++ b/yocto-poky/meta/recipes-devtools/ccache/ccache_3.2.3.bb
@@ -5,4 +5,6 @@
 
 SRCREV = "4cad46e8ee0053144bb00919f0dadd20c1f87013"
 
-SRC_URI += "file://0001-Fix-regression-in-recent-change-related-to-zlib-in-n.patch"
+SRC_URI += "file://0001-Fix-regression-in-recent-change-related-to-zlib-in-n.patch \
+            file://0002-dev.mk.in-fix-file-name-too-long.patch \
+"
diff --git a/yocto-poky/meta/recipes-devtools/ccache/files/0002-dev.mk.in-fix-file-name-too-long.patch b/yocto-poky/meta/recipes-devtools/ccache/files/0002-dev.mk.in-fix-file-name-too-long.patch
new file mode 100644
index 0000000..837cfad
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/ccache/files/0002-dev.mk.in-fix-file-name-too-long.patch
@@ -0,0 +1,32 @@
+From 71bd0082c6edcf73f054a8a4fa34bd8dd4de7cd7 Mon Sep 17 00:00:00 2001
+From: Robert Yang <liezhi.yang@windriver.com>
+Date: Wed, 16 Sep 2015 19:45:40 -0700
+Subject: [PATCH] dev.mk.in: fix file name too long
+
+The all_cppflags change paths to filename which cause file name too long
+error when the path is longer than NAME_MAX (usually 255). Strip srcdir
+to fix the problem.
+
+Upstream-Status: Pending
+
+Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
+---
+ dev.mk.in |    2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/dev.mk.in b/dev.mk.in
+index 1261ad3..ec55ac4 100644
+--- a/dev.mk.in
++++ b/dev.mk.in
+@@ -1,7 +1,7 @@
+ # GNU make syntax reigns in this file.
+ 
+ all_cflags += -Werror
+-all_cppflags += -MD -MP -MF .deps/$(subst .._,,$(subst /,_,$<)).d
++all_cppflags += -MD -MP -MF .deps/$(subst .._,,$(subst /,_,$(subst $(srcdir)/,,$<))).d
+ 
+ ASCIIDOC = asciidoc
+ GPERF = gperf
+-- 
+1.7.9.5
+
diff --git a/yocto-poky/meta/recipes-devtools/dpkg/dpkg/CVE-2015-0860.patch b/yocto-poky/meta/recipes-devtools/dpkg/dpkg/CVE-2015-0860.patch
new file mode 100644
index 0000000..1f259d3
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/dpkg/dpkg/CVE-2015-0860.patch
@@ -0,0 +1,52 @@
+From f1aac7d933819569bf6f347c3c0d5a64a90bbce0 Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?Hanno=20B=C3=B6ck?= <hanno@hboeck.de>
+Date: Thu, 19 Nov 2015 20:03:10 +0100
+Subject: [PATCH] dpkg-deb: Fix off-by-one write access on ctrllenbuf variable
+
+This affects old format .deb packages.
+
+Fixes: CVE-2015-0860
+Warned-by: afl
+Signed-off-by: Guillem Jover <guillem@debian.org>
+
+Upstream-Status: Backport
+
+https://anonscm.debian.org/cgit/dpkg/dpkg.git/commit/?h=wheezy&id=f1aac7d933819569bf6f347c3c0d5a64a90bbce0
+
+CVE: CVE-2015-0860
+
+hand merge Changelog
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ debian/changelog   | 3 +++
+ dpkg-deb/extract.c | 2 +-
+ 2 files changed, 4 insertions(+), 1 deletion(-)
+
+Index: dpkg-1.18.2/dpkg-deb/extract.c
+===================================================================
+--- dpkg-1.18.2.orig/dpkg-deb/extract.c
++++ dpkg-1.18.2/dpkg-deb/extract.c
+@@ -247,7 +247,7 @@ extracthalf(const char *debar, const cha
+     if (errstr)
+       ohshit(_("archive has invalid format version: %s"), errstr);
+ 
+-    r = read_line(arfd, ctrllenbuf, 1, sizeof(ctrllenbuf));
++    r = read_line(arfd, ctrllenbuf, 1, sizeof(ctrllenbuf) - 1);
+     if (r < 0)
+       read_fail(r, debar, _("archive control member size"));
+     if (sscanf(ctrllenbuf, "%jd%c%d", &ctrllennum, &nlc, &dummy) != 2 ||
+Index: dpkg-1.18.2/ChangeLog
+===================================================================
+--- dpkg-1.18.2.orig/ChangeLog
++++ dpkg-1.18.2/ChangeLog
+@@ -1,3 +1,8 @@
++[ Guillem Jover ]
++  * Fix an off-by-one write access in dpkg-deb when parsing the old format
++    .deb control member size. Thanks to Hanno Böck <hanno@hboeck.de>.
++    Fixes CVE-2015-0860.
++
+ commit 5459d330c73cdcfd1327bc93c0ebddc2da4a3a3a (HEAD -> master, tag: 1.18.2)
+ Author: Guillem Jover <guillem@debian.org>
+ Date:   Mon Aug 3 15:41:05 2015 +0200
diff --git a/yocto-poky/meta/recipes-devtools/dpkg/dpkg_1.18.2.bb b/yocto-poky/meta/recipes-devtools/dpkg/dpkg_1.18.2.bb
index 4c3fa4f..2fc096d 100644
--- a/yocto-poky/meta/recipes-devtools/dpkg/dpkg_1.18.2.bb
+++ b/yocto-poky/meta/recipes-devtools/dpkg/dpkg_1.18.2.bb
@@ -12,6 +12,7 @@
 	    file://0003-Our-pre-postinsts-expect-D-to-be-set-when-running-in.patch \
 	    file://0004-The-lutimes-function-doesn-t-work-properly-for-all-s.patch \
 	    file://0005-dpkg-compiler.m4-remove-Wvla.patch \
+        file://CVE-2015-0860.patch \
            "
 
 SRC_URI[md5sum] = "63b9d869081ec49adeef6c5ff62d6576"
diff --git a/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs/copy-in-create-hardlinks-with-the-correct-directory-.patch b/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs/copy-in-create-hardlinks-with-the-correct-directory-.patch
new file mode 100644
index 0000000..f549693
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs/copy-in-create-hardlinks-with-the-correct-directory-.patch
@@ -0,0 +1,81 @@
+From 2dcf8e92bc39e05b3c799f53fe911c024aee4375 Mon Sep 17 00:00:00 2001
+From: Robert Yang <liezhi.yang@windriver.com>
+Date: Fri, 23 Oct 2015 03:21:05 -0700
+Subject: [PATCH] copy-in: create hardlinks with the correct directory
+ filetype
+
+When we're creating hard links via ext2fs_link, the (misnamed?) flags
+argument specifies the filetype for the directory entry.  This is
+*derived* from i_mode, so provide a translator.  Otherwise, fsck will
+complain about unset file types.
+
+Signed-off-by: Darrick J. Wong <darrick.wong@oracle.com>
+Signed-off-by: Theodore Ts'o <tytso@mit.edu>
+
+Upstream-Status: Backport
+
+Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
+---
+ misc/create_inode.c |   33 +++++++++++++++++++++++++++++++--
+ 1 file changed, 31 insertions(+), 2 deletions(-)
+
+diff --git a/misc/create_inode.c b/misc/create_inode.c
+index fcec5aa..b8565da 100644
+--- a/misc/create_inode.c
++++ b/misc/create_inode.c
+@@ -22,6 +22,33 @@
+ /* For saving the hard links */
+ int hdlink_cnt = HDLINK_CNT;
+ 
++static int ext2_file_type(unsigned int mode)
++{
++	if (LINUX_S_ISREG(mode))
++		return EXT2_FT_REG_FILE;
++
++	if (LINUX_S_ISDIR(mode))
++		return EXT2_FT_DIR;
++
++	if (LINUX_S_ISCHR(mode))
++		return EXT2_FT_CHRDEV;
++
++	if (LINUX_S_ISBLK(mode))
++		return EXT2_FT_BLKDEV;
++
++	if (LINUX_S_ISLNK(mode))
++		return EXT2_FT_SYMLINK;
++
++	if (LINUX_S_ISFIFO(mode))
++		return EXT2_FT_FIFO;
++
++	if (LINUX_S_ISSOCK(mode))
++		return EXT2_FT_SOCK;
++
++	return 0;
++}
++
++
+ /* Link an inode number to a directory */
+ static errcode_t add_link(ext2_ino_t parent_ino, ext2_ino_t ino, const char *name)
+ {
+@@ -34,14 +61,16 @@ static errcode_t add_link(ext2_ino_t parent_ino, ext2_ino_t ino, const char *nam
+ 		return retval;
+ 	}
+ 
+-	retval = ext2fs_link(current_fs, parent_ino, name, ino, inode.i_flags);
++	retval = ext2fs_link(current_fs, parent_ino, name, ino,
++	             ext2_file_type(inode.i_mode));
+ 	if (retval == EXT2_ET_DIR_NO_SPACE) {
+ 		retval = ext2fs_expand_dir(current_fs, parent_ino);
+ 		if (retval) {
+ 			com_err(__func__, retval, "while expanding directory");
+ 			return retval;
+ 		}
+-		retval = ext2fs_link(current_fs, parent_ino, name, ino, inode.i_flags);
++		retval = ext2fs_link(current_fs, parent_ino, name, ino,
++	                     ext2_file_type(inode.i_mode));
+ 	}
+ 	if (retval) {
+ 		com_err(__func__, retval, "while linking %s", name);
+-- 
+1.7.9.5
+
diff --git a/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs_1.42.9.bb b/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs_1.42.9.bb
index 97e29c8..a8edeef 100644
--- a/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs_1.42.9.bb
+++ b/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs_1.42.9.bb
@@ -23,6 +23,7 @@
             file://cache_inode.patch \
             file://CVE-2015-0247.patch \
             file://0001-libext2fs-fix-potential-buffer-overflow-in-closefs.patch \
+            file://copy-in-create-hardlinks-with-the-correct-directory-.patch \
 "
 
 SRC_URI[md5sum] = "3f8e41e63b432ba114b33f58674563f7"
@@ -60,12 +61,27 @@
 	install -v -m 755 ${S}/contrib/populate-extfs.sh ${D}${base_sbindir}/
 }
 
+# Need to find the right mke2fs.conf file
+e2fsprogs_conf_fixup () {
+	for i in mke2fs mkfs.ext2 mkfs.ext3 mkfs.ext4 mkfs.ext4dev; do
+		create_wrapper ${D}${base_sbindir}/$i MKE2FS_CONFIG=${sysconfdir}/mke2fs.conf
+	done
+}
+
 do_install_append_class-target() {
 	# Clean host path in compile_et, mk_cmds
 	sed -i -e "s,ET_DIR=\"${S}/lib/et\",ET_DIR=\"${datadir}/et\",g" ${D}${bindir}/compile_et
 	sed -i -e "s,SS_DIR=\"${S}/lib/ss\",SS_DIR=\"${datadir}/ss\",g" ${D}${bindir}/mk_cmds
 }
 
+do_install_append_class-native() {
+	e2fsprogs_conf_fixup
+}
+
+do_install_append_class-nativesdk() {
+	e2fsprogs_conf_fixup
+}
+
 RDEPENDS_e2fsprogs = "e2fsprogs-badblocks"
 RRECOMMENDS_e2fsprogs = "e2fsprogs-mke2fs e2fsprogs-e2fsck"
 
diff --git a/yocto-poky/meta/recipes-devtools/file/file/host-file.patch b/yocto-poky/meta/recipes-devtools/file/file/host-file.patch
new file mode 100644
index 0000000..a7efbdc
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/file/file/host-file.patch
@@ -0,0 +1,32 @@
+Upstream-Status: Submitted (http://bugs.gw.com/view.php?id=485)
+Signed-off-by: Ross Burton <ross.burton@intel.com>
+
+From 3cde199d03b39632361c275cd30fa0612a03138b Mon Sep 17 00:00:00 2001
+From: Ross Burton <ross.burton@intel.com>
+Date: Mon, 19 Oct 2015 10:30:57 +0100
+Subject: [PATCH 2/2] When using the host file, respect FILE_COMPILE
+
+If we're cross-compiling and not using the file binary that was just built,
+execute the binary that we've been told to use (via FILE_COMPILE) when checking
+the version instead of assuming that "file" is correct as the actual compile
+uses FILE_COMPILE so different binaries may be used.
+---
+ magic/Makefile.am | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/magic/Makefile.am b/magic/Makefile.am
+index 89ac844..67067fe 100644
+--- a/magic/Makefile.am
++++ b/magic/Makefile.am
+@@ -293,7 +293,7 @@ ${MAGIC}: $(EXTRA_DIST) $(FILE_COMPILE_DEP)
+ 	@(if expr "${FILE_COMPILE}" : '.*/.*' > /dev/null; then \
+ 	    echo "Using ${FILE_COMPILE} to generate ${MAGIC}" > /dev/null; \
+ 	  else \
+-	    v=$$(file --version | sed -e s/file-// -e q); \
++	    v=$$(${FILE_COMPILE} --version | sed -e s/file-// -e q); \
+ 	    if [ "$$v" != "${PACKAGE_VERSION}" ]; then \
+ 		echo "Cannot use the installed version of file ($$v) to"; \
+ 		echo "cross-compile file ${PACKAGE_VERSION}"; \
+-- 
+2.1.4
+
diff --git a/yocto-poky/meta/recipes-devtools/file/file_5.24.bb b/yocto-poky/meta/recipes-devtools/file/file_5.24.bb
index 08b95d7..d04f121 100644
--- a/yocto-poky/meta/recipes-devtools/file/file_5.24.bb
+++ b/yocto-poky/meta/recipes-devtools/file/file_5.24.bb
@@ -8,12 +8,13 @@
 LICENSE = "BSD"
 LIC_FILES_CHKSUM = "file://COPYING;beginline=2;md5=6a7382872edb68d33e1a9398b6e03188"
 
-DEPENDS = "zlib file-native"
+DEPENDS = "zlib file-replacement-native"
 DEPENDS_class-native = "zlib-native"
 
 SRC_URI = "git://github.com/file/file.git \
         file://debian-742262.patch \
         file://0001-Add-P-prompt-into-Usage-info.patch \
+        file://host-file.patch \
         "
 
 SRCREV = "3c0874be4d3232d672b20f513451a39cfd7c585a"
@@ -21,6 +22,9 @@
 
 inherit autotools
 
+EXTRA_OEMAKE_append_class-target = "-e FILE_COMPILE=${STAGING_BINDIR_NATIVE}/file-native/file"
+EXTRA_OEMAKE_append_class-nativesdk = "-e FILE_COMPILE=${STAGING_BINDIR_NATIVE}/file-native/file"
+
 FILES_${PN} += "${datadir}/misc/*.mgc"
 
 do_install_append_class-native() {
@@ -34,3 +38,7 @@
 }
 
 BBCLASSEXTEND = "native nativesdk"
+PROVIDES_append_class-native = " file-replacement-native"
+# Don't use NATIVE_PACKAGE_PATH_SUFFIX as that hides libmagic from anyone who
+# depends on file-replacement-native.
+bindir_append_class-native = "/file-native"
diff --git a/yocto-poky/meta/recipes-devtools/gcc/gcc-4.8.inc b/yocto-poky/meta/recipes-devtools/gcc/gcc-4.8.inc
index 6a2454d..b3e1c33 100644
--- a/yocto-poky/meta/recipes-devtools/gcc/gcc-4.8.inc
+++ b/yocto-poky/meta/recipes-devtools/gcc/gcc-4.8.inc
@@ -69,6 +69,7 @@
     file://0047-repomembug.patch \
     file://0049-Enable-SPE-AltiVec-generation-on-powepc-linux-target.patch \
     file://target-gcc-includedir.patch \
+    file://0051-gcc-483-universal-initializer-no-warning.patch \
 "
 SRC_URI[md5sum] = "5a84a30839b2aca22a2d723de2a626ec"
 SRC_URI[sha256sum] = "4a80aa23798b8e9b5793494b8c976b39b8d9aa2e53cd5ed5534aff662a7f8695"
diff --git a/yocto-poky/meta/recipes-devtools/gcc/gcc-4.8/0051-gcc-483-universal-initializer-no-warning.patch b/yocto-poky/meta/recipes-devtools/gcc/gcc-4.8/0051-gcc-483-universal-initializer-no-warning.patch
new file mode 100644
index 0000000..fde227b
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/gcc/gcc-4.8/0051-gcc-483-universal-initializer-no-warning.patch
@@ -0,0 +1,107 @@
+Upstream-Status: Backport
+
+Signed-off-by: Kai Kang <kai.kang@windriver.com>
+---
+Fix for https://gcc.gnu.org/bugzilla/show_bug.cgi?id=53119
+wrong warning when using the universal zero initializer {0}
+
+Backported to GCC 4.8.3
+
+Subject: 2014-06-05  S. Gilles  <sgilles@terpmail.umd.edu>
+X-Git-Url: http://repo.or.cz/w/official-gcc.git/commitdiff_plain/95cdf3fdf2d440eb7775def8e35ab970651c33d9?hp=14a3093e9943937cbc63dfbf4d51ca60f8325b29
+git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@211289 138bc75d-0d04-0410-961f-82ee72b054a4
+
+--- gcc-4.8.3.org/gcc/c/c-typeck.c	2014-08-03 20:52:09.257042137 +0200
++++ gcc-4.8.3/gcc/c/c-typeck.c	2014-08-03 20:57:10.645042614 +0200
+@@ -62,9 +62,9 @@
+    if expr.original_code == SIZEOF_EXPR.  */
+ tree c_last_sizeof_arg;
+ 
+-/* Nonzero if we've already printed a "missing braces around initializer"
+-   message within this initializer.  */
+-static int missing_braces_mentioned;
++/* Nonzero if we might need to print a "missing braces around
++   initializer" message within this initializer.  */
++static int found_missing_braces;
+ 
+ static int require_constant_value;
+ static int require_constant_elements;
+@@ -6379,6 +6379,9 @@
+ /* 1 if this constructor is erroneous so far.  */
+ static int constructor_erroneous;
+ 
++/* 1 if this constructor is the universal zero initializer { 0 }.  */
++static int constructor_zeroinit;
++
+ /* Structure for managing pending initializer elements, organized as an
+    AVL tree.  */
+ 
+@@ -6540,7 +6543,7 @@
+   constructor_stack = 0;
+   constructor_range_stack = 0;
+ 
+-  missing_braces_mentioned = 0;
++  found_missing_braces = 0;
+ 
+   spelling_base = 0;
+   spelling_size = 0;
+@@ -6635,6 +6638,7 @@
+   constructor_type = type;
+   constructor_incremental = 1;
+   constructor_designated = 0;
++  constructor_zeroinit = 1;
+   designator_depth = 0;
+   designator_erroneous = 0;
+ 
+@@ -6832,11 +6836,8 @@
+ 	set_nonincremental_init (braced_init_obstack);
+     }
+ 
+-  if (implicit == 1 && warn_missing_braces && !missing_braces_mentioned)
+-    {
+-      missing_braces_mentioned = 1;
+-      warning_init (OPT_Wmissing_braces, "missing braces around initializer");
+-    }
++  if (implicit == 1)
++    found_missing_braces = 1;
+ 
+   if (TREE_CODE (constructor_type) == RECORD_TYPE
+ 	   || TREE_CODE (constructor_type) == UNION_TYPE)
+@@ -6969,16 +6970,23 @@
+ 	}
+     }
+ 
++  if (vec_safe_length (constructor_elements) != 1)
++    constructor_zeroinit = 0;
++
++  /* Warn when some structs are initialized with direct aggregation.  */
++  if (!implicit && found_missing_braces && warn_missing_braces
++      && !constructor_zeroinit)
++    {
++      warning_init (OPT_Wmissing_braces,
++		    "missing braces around initializer");
++    }
++
+   /* Warn when some struct elements are implicitly initialized to zero.  */
+   if (warn_missing_field_initializers
+       && constructor_type
+       && TREE_CODE (constructor_type) == RECORD_TYPE
+       && constructor_unfilled_fields)
+     {
+-	bool constructor_zeroinit =
+-	 (vec_safe_length (constructor_elements) == 1
+-	  && integer_zerop ((*constructor_elements)[0].value));
+-
+ 	/* Do not warn for flexible array members or zero-length arrays.  */
+ 	while (constructor_unfilled_fields
+ 	       && (!DECL_SIZE (constructor_unfilled_fields)
+@@ -8093,6 +8101,9 @@
+   designator_depth = 0;
+   designator_erroneous = 0;
+ 
++  if (!implicit && value.value && !integer_zerop (value.value))
++    constructor_zeroinit = 0;
++
+   /* Handle superfluous braces around string cst as in
+      char x[] = {"foo"}; */
+   if (string_flag
diff --git a/yocto-poky/meta/recipes-devtools/gcc/gcc-4.9.inc b/yocto-poky/meta/recipes-devtools/gcc/gcc-4.9.inc
index 691ba5f..95b553c 100644
--- a/yocto-poky/meta/recipes-devtools/gcc/gcc-4.9.inc
+++ b/yocto-poky/meta/recipes-devtools/gcc/gcc-4.9.inc
@@ -80,6 +80,7 @@
     file://0062-Use-SYSTEMLIBS_DIR-replacement-instead-of-hardcoding.patch \
     file://0063-nativesdk-gcc-support.patch \
     file://0064-handle-target-sysroot-multilib.patch \
+    file://0065-gcc-483-universal-initializer-no-warning.patch \
 "
 SRC_URI[md5sum] = "6f831b4d251872736e8e9cc09746f327"
 SRC_URI[sha256sum] = "2332b2a5a321b57508b9031354a8503af6fdfb868b8c1748d33028d100a8b67e"
diff --git a/yocto-poky/meta/recipes-devtools/gcc/gcc-4.9/0065-gcc-483-universal-initializer-no-warning.patch b/yocto-poky/meta/recipes-devtools/gcc/gcc-4.9/0065-gcc-483-universal-initializer-no-warning.patch
new file mode 100644
index 0000000..fde227b
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/gcc/gcc-4.9/0065-gcc-483-universal-initializer-no-warning.patch
@@ -0,0 +1,107 @@
+Upstream-Status: Backport
+
+Signed-off-by: Kai Kang <kai.kang@windriver.com>
+---
+Fix for https://gcc.gnu.org/bugzilla/show_bug.cgi?id=53119
+wrong warning when using the universal zero initializer {0}
+
+Backported to GCC 4.8.3
+
+Subject: 2014-06-05  S. Gilles  <sgilles@terpmail.umd.edu>
+X-Git-Url: http://repo.or.cz/w/official-gcc.git/commitdiff_plain/95cdf3fdf2d440eb7775def8e35ab970651c33d9?hp=14a3093e9943937cbc63dfbf4d51ca60f8325b29
+git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@211289 138bc75d-0d04-0410-961f-82ee72b054a4
+
+--- gcc-4.8.3.org/gcc/c/c-typeck.c	2014-08-03 20:52:09.257042137 +0200
++++ gcc-4.8.3/gcc/c/c-typeck.c	2014-08-03 20:57:10.645042614 +0200
+@@ -62,9 +62,9 @@
+    if expr.original_code == SIZEOF_EXPR.  */
+ tree c_last_sizeof_arg;
+ 
+-/* Nonzero if we've already printed a "missing braces around initializer"
+-   message within this initializer.  */
+-static int missing_braces_mentioned;
++/* Nonzero if we might need to print a "missing braces around
++   initializer" message within this initializer.  */
++static int found_missing_braces;
+ 
+ static int require_constant_value;
+ static int require_constant_elements;
+@@ -6379,6 +6379,9 @@
+ /* 1 if this constructor is erroneous so far.  */
+ static int constructor_erroneous;
+ 
++/* 1 if this constructor is the universal zero initializer { 0 }.  */
++static int constructor_zeroinit;
++
+ /* Structure for managing pending initializer elements, organized as an
+    AVL tree.  */
+ 
+@@ -6540,7 +6543,7 @@
+   constructor_stack = 0;
+   constructor_range_stack = 0;
+ 
+-  missing_braces_mentioned = 0;
++  found_missing_braces = 0;
+ 
+   spelling_base = 0;
+   spelling_size = 0;
+@@ -6635,6 +6638,7 @@
+   constructor_type = type;
+   constructor_incremental = 1;
+   constructor_designated = 0;
++  constructor_zeroinit = 1;
+   designator_depth = 0;
+   designator_erroneous = 0;
+ 
+@@ -6832,11 +6836,8 @@
+ 	set_nonincremental_init (braced_init_obstack);
+     }
+ 
+-  if (implicit == 1 && warn_missing_braces && !missing_braces_mentioned)
+-    {
+-      missing_braces_mentioned = 1;
+-      warning_init (OPT_Wmissing_braces, "missing braces around initializer");
+-    }
++  if (implicit == 1)
++    found_missing_braces = 1;
+ 
+   if (TREE_CODE (constructor_type) == RECORD_TYPE
+ 	   || TREE_CODE (constructor_type) == UNION_TYPE)
+@@ -6969,16 +6970,23 @@
+ 	}
+     }
+ 
++  if (vec_safe_length (constructor_elements) != 1)
++    constructor_zeroinit = 0;
++
++  /* Warn when some structs are initialized with direct aggregation.  */
++  if (!implicit && found_missing_braces && warn_missing_braces
++      && !constructor_zeroinit)
++    {
++      warning_init (OPT_Wmissing_braces,
++		    "missing braces around initializer");
++    }
++
+   /* Warn when some struct elements are implicitly initialized to zero.  */
+   if (warn_missing_field_initializers
+       && constructor_type
+       && TREE_CODE (constructor_type) == RECORD_TYPE
+       && constructor_unfilled_fields)
+     {
+-	bool constructor_zeroinit =
+-	 (vec_safe_length (constructor_elements) == 1
+-	  && integer_zerop ((*constructor_elements)[0].value));
+-
+ 	/* Do not warn for flexible array members or zero-length arrays.  */
+ 	while (constructor_unfilled_fields
+ 	       && (!DECL_SIZE (constructor_unfilled_fields)
+@@ -8093,6 +8101,9 @@
+   designator_depth = 0;
+   designator_erroneous = 0;
+ 
++  if (!implicit && value.value && !integer_zerop (value.value))
++    constructor_zeroinit = 0;
++
+   /* Handle superfluous braces around string cst as in
+      char x[] = {"foo"}; */
+   if (string_flag
diff --git a/yocto-poky/meta/recipes-devtools/gcc/gcc-5.2.inc b/yocto-poky/meta/recipes-devtools/gcc/gcc-5.2.inc
index f691f58..a6b385a 100644
--- a/yocto-poky/meta/recipes-devtools/gcc/gcc-5.2.inc
+++ b/yocto-poky/meta/recipes-devtools/gcc/gcc-5.2.inc
@@ -73,6 +73,7 @@
            file://0039-libcc1-fix-libcc1-s-install-path-and-rpath.patch \
            file://0040-nativesdk-gcc-support.patch \
            file://0041-handle-target-sysroot-multilib.patch \
+           file://0042-cxxflags-for-build.patch \
           "
 
 BACKPORTS = ""
@@ -98,6 +99,7 @@
     --with-cloog=no \
     --enable-checking=release \
     --enable-cheaders=c_global \
+    --without-isl \
 "
 
 EXTRA_OECONF_INITIAL = "\
@@ -109,6 +111,7 @@
     --disable-lto \
     --disable-plugin \
     --enable-decimal-float=no \
+    --without-isl \
 "
 
 EXTRA_OECONF_append_libc-uclibc = " --disable-decimal-float "
diff --git a/yocto-poky/meta/recipes-devtools/gcc/gcc-5.2/0042-cxxflags-for-build.patch b/yocto-poky/meta/recipes-devtools/gcc/gcc-5.2/0042-cxxflags-for-build.patch
new file mode 100644
index 0000000..1105e29
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/gcc/gcc-5.2/0042-cxxflags-for-build.patch
@@ -0,0 +1,123 @@
+Fix various _FOR_BUILD and related variables
+
+When doing a FOR_BUILD thing, you have to override CFLAGS with
+CFLAGS_FOR_BUILD. And if you use C++, you also have to override
+CXXFLAGS with CXXFLAGS_FOR_BUILD.
+Without this, when building for mingw, you end up trying to use
+the mingw headers for a host build.
+
+The same goes for other variables as well, such as CPPFLAGS,
+CPP, and GMPINC.
+
+Upstream-Status: Pending
+
+Signed-off-by: Peter Seebach <peter.seebach@windriver.com>
+Signed-off-by: Mark Hatle <mark.hatle@windriver.com>
+
+diff --git a/Makefile.in b/Makefile.in
+index 9370174..011c29a 100644
+--- a/Makefile.in
++++ b/Makefile.in
+@@ -152,6 +152,7 @@ BUILD_EXPORTS = \
+ 	CPP="$(CC_FOR_BUILD) -E"; export CPP; \
+ 	CFLAGS="$(CFLAGS_FOR_BUILD)"; export CFLAGS; \
+ 	CONFIG_SHELL="$(SHELL)"; export CONFIG_SHELL; \
++	CPPFLAGS="$(CPPFLAGS_FOR_BUILD)"; export CPPFLAGS; \
+ 	CXX="$(CXX_FOR_BUILD)"; export CXX; \
+ 	CXXFLAGS="$(CXXFLAGS_FOR_BUILD)"; export CXXFLAGS; \
+ 	GCJ="$(GCJ_FOR_BUILD)"; export GCJ; \
+@@ -170,6 +171,9 @@ BUILD_EXPORTS = \
+ # built for the build system to override those in BASE_FLAGS_TO_PASS.
+ EXTRA_BUILD_FLAGS = \
+ 	CFLAGS="$(CFLAGS_FOR_BUILD)" \
++	CXXFLAGS="$(CXXFLAGS_FOR_BUILD)" \
++	CPP="$(CC_FOR_BUILD) -E" \
++	CPPFLAGS="$(CPPFLAGS_FOR_BUILD)" \
+ 	LDFLAGS="$(LDFLAGS_FOR_BUILD)"
+ 
+ # This is the list of directories to built for the host system.
+@@ -187,6 +191,7 @@ HOST_SUBDIR = @host_subdir@
+ HOST_EXPORTS = \
+ 	$(BASE_EXPORTS) \
+ 	CC="$(CC)"; export CC; \
++	CPP="$(CC) -E"; export CPP; \
+ 	ADA_CFLAGS="$(ADA_CFLAGS)"; export ADA_CFLAGS; \
+ 	CFLAGS="$(CFLAGS)"; export CFLAGS; \
+ 	CONFIG_SHELL="$(SHELL)"; export CONFIG_SHELL; \
+@@ -711,6 +715,7 @@ BASE_FLAGS_TO_PASS = \
+ 	"CC_FOR_BUILD=$(CC_FOR_BUILD)" \
+ 	"CFLAGS_FOR_BUILD=$(CFLAGS_FOR_BUILD)" \
+ 	"CXX_FOR_BUILD=$(CXX_FOR_BUILD)" \
++	"CXXFLAGS_FOR_BUILD=$(CXXFLAGS_FOR_BUILD)" \
+ 	"EXPECT=$(EXPECT)" \
+ 	"FLEX=$(FLEX)" \
+ 	"INSTALL=$(INSTALL)" \
+diff --git a/Makefile.tpl b/Makefile.tpl
+index 1ea1954..78a59c3 100644
+--- a/Makefile.tpl
++++ b/Makefile.tpl
+@@ -154,6 +154,7 @@ BUILD_EXPORTS = \
+ 	CC="$(CC_FOR_BUILD)"; export CC; \
+ 	CFLAGS="$(CFLAGS_FOR_BUILD)"; export CFLAGS; \
+ 	CONFIG_SHELL="$(SHELL)"; export CONFIG_SHELL; \
++	CPPFLAGS="$(CPPFLAGS_FOR_BUILD)"; export CPPFLAGS; \
+ 	CXX="$(CXX_FOR_BUILD)"; export CXX; \
+ 	CXXFLAGS="$(CXXFLAGS_FOR_BUILD)"; export CXXFLAGS; \
+ 	GCJ="$(GCJ_FOR_BUILD)"; export GCJ; \
+@@ -172,6 +173,9 @@ BUILD_EXPORTS = \
+ # built for the build system to override those in BASE_FLAGS_TO_PASS.
+ EXTRA_BUILD_FLAGS = \
+ 	CFLAGS="$(CFLAGS_FOR_BUILD)" \
++	CXXFLAGS="$(CXXFLAGS_FOR_BUILD)" \
++	CPP="$(CC_FOR_BUILD) -E" \
++	CPPFLAGS="$(CPPFLAGS_FOR_BUILD)" \
+ 	LDFLAGS="$(LDFLAGS_FOR_BUILD)"
+ 
+ # This is the list of directories to built for the host system.
+@@ -189,6 +193,7 @@ HOST_SUBDIR = @host_subdir@
+ HOST_EXPORTS = \
+ 	$(BASE_EXPORTS) \
+ 	CC="$(CC)"; export CC; \
++	CPP="$(CC) -E"; export CPP; \
+ 	ADA_CFLAGS="$(ADA_CFLAGS)"; export ADA_CFLAGS; \
+ 	CFLAGS="$(CFLAGS)"; export CFLAGS; \
+ 	CONFIG_SHELL="$(SHELL)"; export CONFIG_SHELL; \
+diff --git a/gcc/Makefile.in b/gcc/Makefile.in
+index cd5bc4a..98ae4f4 100644
+--- a/gcc/Makefile.in
++++ b/gcc/Makefile.in
+@@ -762,7 +762,7 @@ BUILD_LINKERFLAGS = $(BUILD_CXXFLAGS)
+ # Native linker and preprocessor flags.  For x-fragment overrides.
+ BUILD_LDFLAGS=@BUILD_LDFLAGS@
+ BUILD_CPPFLAGS= -I. -I$(@D) -I$(srcdir) -I$(srcdir)/$(@D) \
+-		-I$(srcdir)/../include @INCINTL@ $(CPPINC) $(CPPFLAGS)
++		-I$(srcdir)/../include @INCINTL@ $(CPPINC) $(CPPFLAGS_FOR_BUILD)
+ 
+ # Actual name to use when installing a native compiler.
+ GCC_INSTALL_NAME := $(shell echo gcc|sed '$(program_transform_name)')
+diff --git a/gcc/configure b/gcc/configure
+index c7ac14b..5ac63e4 100755
+--- a/gcc/configure
++++ b/gcc/configure
+@@ -11521,7 +11521,7 @@ else
+ 	CC="${CC_FOR_BUILD}" CFLAGS="${CFLAGS_FOR_BUILD}" \
+ 	CXX="${CXX_FOR_BUILD}" CXXFLAGS="${CXXFLAGS_FOR_BUILD}" \
+ 	LD="${LD_FOR_BUILD}" LDFLAGS="${LDFLAGS_FOR_BUILD}" \
+-	GMPINC="" CPPFLAGS="${CPPFLAGS} -DGENERATOR_FILE" \
++	GMPINC="" CPPFLAGS="${CPPFLAGS_FOR_BUILD} -DGENERATOR_FILE" \
+ 	${realsrcdir}/configure \
+ 		--enable-languages=${enable_languages-all} \
+ 		--target=$target_alias --host=$build_alias --build=$build_alias
+diff --git a/gcc/configure.ac b/gcc/configure.ac
+index 50856e6..17a4dfd 100644
+--- a/gcc/configure.ac
++++ b/gcc/configure.ac
+@@ -1633,7 +1633,7 @@ else
+ 	CC="${CC_FOR_BUILD}" CFLAGS="${CFLAGS_FOR_BUILD}" \
+ 	CXX="${CXX_FOR_BUILD}" CXXFLAGS="${CXXFLAGS_FOR_BUILD}" \
+ 	LD="${LD_FOR_BUILD}" LDFLAGS="${LDFLAGS_FOR_BUILD}" \
+-	GMPINC="" CPPFLAGS="${CPPFLAGS} -DGENERATOR_FILE" \
++	GMPINC="" CPPFLAGS="${CPPFLAGS_FOR_BUILD} -DGENERATOR_FILE" \
+ 	${realsrcdir}/configure \
+ 		--enable-languages=${enable_languages-all} \
+ 		--target=$target_alias --host=$build_alias --build=$build_alias
diff --git a/yocto-poky/meta/recipes-devtools/gcc/gcc-common.inc b/yocto-poky/meta/recipes-devtools/gcc/gcc-common.inc
index d63c07f..6f2f224 100644
--- a/yocto-poky/meta/recipes-devtools/gcc/gcc-common.inc
+++ b/yocto-poky/meta/recipes-devtools/gcc/gcc-common.inc
@@ -25,6 +25,11 @@
         return "--with-mips-plt"
     return ""
 
+def get_gcc_ppc_plt_settings(bb, d):
+    if d.getVar('TRANSLATED_TARGET_ARCH', True) in [ 'powerpc' ] and not bb.utils.contains('DISTRO_FEATURES', 'bssplt', True, False, d):
+        return "--enable-secureplt"
+    return ""
+
 def get_long_double_setting(bb, d):
     if d.getVar('TRANSLATED_TARGET_ARCH', True) in [ 'powerpc', 'powerpc64' ] and d.getVar('TCLIBC', True) in [ 'uclibc', 'glibc' ]:
         return "--with-long-double-128"
diff --git a/yocto-poky/meta/recipes-devtools/gcc/gcc-configure-common.inc b/yocto-poky/meta/recipes-devtools/gcc/gcc-configure-common.inc
index a14be73..cee6f4a 100644
--- a/yocto-poky/meta/recipes-devtools/gcc/gcc-configure-common.inc
+++ b/yocto-poky/meta/recipes-devtools/gcc/gcc-configure-common.inc
@@ -47,6 +47,7 @@
     ${EXTRA_OECONF_GCC_FLOAT} \
     ${EXTRA_OECONF_PATHS} \
     ${@get_gcc_mips_plt_setting(bb, d)} \
+    ${@get_gcc_ppc_plt_settings(bb, d)} \
     ${@get_long_double_setting(bb, d)} \
     ${@get_gcc_multiarch_setting(bb, d)} \
 "
diff --git a/yocto-poky/meta/recipes-devtools/gcc/gcc-cross-initial.inc b/yocto-poky/meta/recipes-devtools/gcc/gcc-cross-initial.inc
index 7197447..c0fa139 100644
--- a/yocto-poky/meta/recipes-devtools/gcc/gcc-cross-initial.inc
+++ b/yocto-poky/meta/recipes-devtools/gcc/gcc-cross-initial.inc
@@ -26,6 +26,7 @@
     ${EXTRA_OECONF_INITIAL} \
     ${@bb.utils.contains('DISTRO_FEATURES', 'ld-is-gold', '--with-ld=${STAGING_BINDIR_TOOLCHAIN}/${TARGET_PREFIX}ld.bfd', '', d)} \
     ${EXTRA_OECONF_GCC_FLOAT} \
+    ${@get_gcc_ppc_plt_settings(bb, d)} \
 "
 
 EXTRA_OECONF += "--with-native-system-header-dir=${SYSTEMHEADERS}"
diff --git a/yocto-poky/meta/recipes-devtools/gcc/gcc-multilib-config.inc b/yocto-poky/meta/recipes-devtools/gcc/gcc-multilib-config.inc
index f7f9f55..1c0a45a 100644
--- a/yocto-poky/meta/recipes-devtools/gcc/gcc-multilib-config.inc
+++ b/yocto-poky/meta/recipes-devtools/gcc/gcc-multilib-config.inc
@@ -206,7 +206,7 @@
         # take out '-' mcpu='s and march='s from parameters
         opts = []
         whitelist = (d.getVar("MULTILIB_OPTION_WHITELIST", True) or "").split()
-        for i in tune_parameters['ccargs'].split():
+        for i in d.expand(tune_parameters['ccargs']).split():
             if i in whitelist:
                 # Need to strip '-' from option
                 opts.append(i[1:])
@@ -223,5 +223,6 @@
 }
 
 gcc_multilib_setup[cleandirs] = "${B}/gcc/config"
+gcc_multilib_setup[vardepsexclude] = "SDK_ARCH"
 
 EXTRACONFFUNCS += "gcc_multilib_setup"
diff --git a/yocto-poky/meta/recipes-devtools/gcc/gcc-runtime.inc b/yocto-poky/meta/recipes-devtools/gcc/gcc-runtime.inc
index 09757e6..690d780 100644
--- a/yocto-poky/meta/recipes-devtools/gcc/gcc-runtime.inc
+++ b/yocto-poky/meta/recipes-devtools/gcc/gcc-runtime.inc
@@ -53,6 +53,9 @@
 	if [ -d ${D}${infodir} ]; then
 		rmdir --ignore-fail-on-non-empty -p ${D}${infodir}
 	fi
+	if [ "${TARGET_VENDOR_MULTILIB_ORIGINAL}" != "" -a "${TARGET_VENDOR}" != "${TARGET_VENDOR_MULTILIB_ORIGINAL}" ]; then
+		ln -s ${TARGET_SYS} ${D}${includedir}/c++/${BINV}/${TARGET_ARCH}${TARGET_VENDOR_MULTILIB_ORIGINAL}-${TARGET_OS}
+	fi
 	if [ "${TARGET_OS}" = "linux-gnuspe" ]; then
 		ln -s ${TARGET_SYS} ${D}${includedir}/c++/${BINV}/${TARGET_ARCH}${TARGET_VENDOR}-linux
 	fi
diff --git a/yocto-poky/meta/recipes-devtools/gcc/gcc-shared-source.inc b/yocto-poky/meta/recipes-devtools/gcc/gcc-shared-source.inc
index 9acffb1..aac4b49 100644
--- a/yocto-poky/meta/recipes-devtools/gcc/gcc-shared-source.inc
+++ b/yocto-poky/meta/recipes-devtools/gcc/gcc-shared-source.inc
@@ -5,5 +5,7 @@
 deltask do_unpack
 deltask do_patch
 
+SRC_URI = ""
+
 do_configure[depends] += "gcc-source-${PV}:do_preconfigure"
 do_populate_lic[depends] += "gcc-source-${PV}:do_unpack"
diff --git a/yocto-poky/meta/recipes-devtools/gcc/gcc-target.inc b/yocto-poky/meta/recipes-devtools/gcc/gcc-target.inc
index 6e160c0..d62c15a 100644
--- a/yocto-poky/meta/recipes-devtools/gcc/gcc-target.inc
+++ b/yocto-poky/meta/recipes-devtools/gcc/gcc-target.inc
@@ -31,7 +31,7 @@
 
 FILES_${PN} = "\
     ${bindir}/${TARGET_PREFIX}gcc* \
-    ${libexecdir}/gcc/${TARGET_SYS}/${BINV}/collect2 \
+    ${libexecdir}/gcc/${TARGET_SYS}/${BINV}/collect2* \
     ${libexecdir}/gcc/${TARGET_SYS}/${BINV}/cc* \
     ${libexecdir}/gcc/${TARGET_SYS}/${BINV}/lto* \
     ${libexecdir}/gcc/${TARGET_SYS}/${BINV}/lib*${SOLIBS} \
@@ -83,20 +83,20 @@
     ${bindir}/f95"
 
 FILES_cpp = "\
-    ${bindir}/${TARGET_PREFIX}cpp \
+    ${bindir}/${TARGET_PREFIX}cpp* \
     ${base_libdir}/cpp \
     ${libexecdir}/gcc/${TARGET_SYS}/${BINV}/cc1"
 FILES_cpp-symlinks = "${bindir}/cpp"
 
-FILES_gcov = "${bindir}/${TARGET_PREFIX}gcov \
-    ${bindir}/${TARGET_PREFIX}gcov-tool \
+FILES_gcov = "${bindir}/${TARGET_PREFIX}gcov* \
+    ${bindir}/${TARGET_PREFIX}gcov-tool* \
 "
 FILES_gcov-symlinks = "${bindir}/gcov \
     ${bindir}/gcov-tool \
 "
 
 FILES_g++ = "\
-    ${bindir}/${TARGET_PREFIX}g++ \
+    ${bindir}/${TARGET_PREFIX}g++* \
     ${libexecdir}/gcc/${TARGET_SYS}/${BINV}/cc1plus \
 "
 FILES_g++-symlinks = "\
@@ -141,7 +141,7 @@
 	cd ${D}${bindir}
 
 	# We care about g++ not c++
-	rm -f *c++
+	rm -f *c++*
 
 	# We don't care about the gcc-<version> ones for this
 	rm -f *gcc-?.?*
diff --git a/yocto-poky/meta/recipes-devtools/gcc/gcc_4.9.bb b/yocto-poky/meta/recipes-devtools/gcc/gcc_4.9.bb
index b84baae..a9dc612 100644
--- a/yocto-poky/meta/recipes-devtools/gcc/gcc_4.9.bb
+++ b/yocto-poky/meta/recipes-devtools/gcc/gcc_4.9.bb
@@ -1,10 +1,9 @@
 require recipes-devtools/gcc/gcc-${PV}.inc
 require gcc-target.inc
 
-# Building with thumb enabled on armv4t fails with
-# | gcc-4.8.1-r0/gcc-4.8.1/gcc/cp/decl.c:7438:(.text.unlikely+0x2fa): relocation truncated to fit: R_ARM_THM_CALL against symbol `fancy_abort(char const*, int, char const*)' defined in .glue_7 section in linker stubs
-# | gcc-4.8.1-r0/gcc-4.8.1/gcc/cp/decl.c:7442:(.text.unlikely+0x318): additional relocation overflows omitted from the output
+# http://errors.yoctoproject.org/Errors/Details/20497/
 ARM_INSTRUCTION_SET_armv4 = "arm"
+ARM_INSTRUCTION_SET_armv5 = "arm"
 
 BBCLASSEXTEND = "nativesdk"
 
diff --git a/yocto-poky/meta/recipes-devtools/gcc/libgcc.inc b/yocto-poky/meta/recipes-devtools/gcc/libgcc.inc
index 739adbd..95fa3f4 100644
--- a/yocto-poky/meta/recipes-devtools/gcc/libgcc.inc
+++ b/yocto-poky/meta/recipes-devtools/gcc/libgcc.inc
@@ -15,17 +15,10 @@
 LICENSE_${PN}-dbg = "GPL-3.0-with-GCC-exception"
 
 
-FILES_${PN} = "${base_libdir}/libgcc*.so.*"
 FILES_${PN}-dev = "\
     ${base_libdir}/libgcc*.so \
-    ${libdir}/${TARGET_SYS}/${BINV}/*crt* \
-    ${libdir}/${TARGET_SYS}/${BINV}/64 \
-    ${libdir}/${TARGET_SYS}/${BINV}/32 \
-    ${libdir}/${TARGET_SYS}/${BINV}/x32 \
-    ${libdir}/${TARGET_SYS}/${BINV}/n32 \
-    ${libdir}/${TARGET_SYS}/${BINV}/libgcc* \
     ${@base_conditional('BASETARGET_SYS', '${TARGET_SYS}', '', '${libdir}/${BASETARGET_SYS}', d)} \
-    ${libdir}/${TARGET_SYS}/${BINV}/libgcov.a \
+    ${libdir}/${TARGET_SYS}/${BINV}* \
 "
 
 FILES_${PN}-dbg += "${base_libdir}/.debug/"
diff --git a/yocto-poky/meta/recipes-devtools/git/git-2.5.0/0008-CVE-2015-7545-1.patch b/yocto-poky/meta/recipes-devtools/git/git-2.5.0/0008-CVE-2015-7545-1.patch
new file mode 100644
index 0000000..b552c09
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/git/git-2.5.0/0008-CVE-2015-7545-1.patch
@@ -0,0 +1,446 @@
+From a5adaced2e13c135d5d9cc65be9eb95aa3bacedf Mon Sep 17 00:00:00 2001
+From: Jeff King <peff@peff.net>
+Date: Wed, 16 Sep 2015 13:12:52 -0400
+Subject: [PATCH] transport: add a protocol-whitelist environment variable
+
+If we are cloning an untrusted remote repository into a
+sandbox, we may also want to fetch remote submodules in
+order to get the complete view as intended by the other
+side. However, that opens us up to attacks where a malicious
+user gets us to clone something they would not otherwise
+have access to (this is not necessarily a problem by itself,
+but we may then act on the cloned contents in a way that
+exposes them to the attacker).
+
+Ideally such a setup would sandbox git entirely away from
+high-value items, but this is not always practical or easy
+to set up (e.g., OS network controls may block multiple
+protocols, and we would want to enable some but not others).
+
+We can help this case by providing a way to restrict
+particular protocols. We use a whitelist in the environment.
+This is more annoying to set up than a blacklist, but
+defaults to safety if the set of protocols git supports
+grows). If no whitelist is specified, we continue to default
+to allowing all protocols (this is an "unsafe" default, but
+since the minority of users will want this sandboxing
+effect, it is the only sensible one).
+
+A note on the tests: ideally these would all be in a single
+test file, but the git-daemon and httpd test infrastructure
+is an all-or-nothing proposition rather than a test-by-test
+prerequisite. By putting them all together, we would be
+unable to test the file-local code on machines without
+apache.
+
+Signed-off-by: Jeff King <peff@peff.net>
+Signed-off-by: Junio C Hamano <gitster@pobox.com>
+
+Upstream-Status: Backport
+
+http://archive.ubuntu.com/ubuntu/pool/main/g/git/git_2.5.0-1ubuntu0.1.debian.tar.xz
+
+CVE: CVE-2015-7545 #1
+Singed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ Documentation/git.txt          | 32 ++++++++++++++
+ connect.c                      |  5 +++
+ t/lib-proto-disable.sh         | 96 ++++++++++++++++++++++++++++++++++++++++++
+ t/t5810-proto-disable-local.sh | 14 ++++++
+ t/t5811-proto-disable-git.sh   | 20 +++++++++
+ t/t5812-proto-disable-http.sh  | 20 +++++++++
+ t/t5813-proto-disable-ssh.sh   | 20 +++++++++
+ t/t5814-proto-disable-ext.sh   | 18 ++++++++
+ transport-helper.c             |  2 +
+ transport.c                    | 21 ++++++++-
+ transport.h                    |  7 +++
+ 11 files changed, 254 insertions(+), 1 deletion(-)
+ create mode 100644 t/lib-proto-disable.sh
+ create mode 100755 t/t5810-proto-disable-local.sh
+ create mode 100755 t/t5811-proto-disable-git.sh
+ create mode 100755 t/t5812-proto-disable-http.sh
+ create mode 100755 t/t5813-proto-disable-ssh.sh
+ create mode 100755 t/t5814-proto-disable-ext.sh
+
+Index: git-2.5.0/Documentation/git.txt
+===================================================================
+--- git-2.5.0.orig/Documentation/git.txt	2015-12-11 12:46:48.975637719 -0500
++++ git-2.5.0/Documentation/git.txt	2015-12-11 12:46:48.967637661 -0500
+@@ -1069,6 +1069,38 @@
+ 	an operation has touched every ref (e.g., because you are
+ 	cloning a repository to make a backup).
+ 
++`GIT_ALLOW_PROTOCOL`::
++	If set, provide a colon-separated list of protocols which are
++	allowed to be used with fetch/push/clone. This is useful to
++	restrict recursive submodule initialization from an untrusted
++	repository. Any protocol not mentioned will be disallowed (i.e.,
++	this is a whitelist, not a blacklist). If the variable is not
++	set at all, all protocols are enabled.  The protocol names
++	currently used by git are:
++
++	  - `file`: any local file-based path (including `file://` URLs,
++	    or local paths)
++
++	  - `git`: the anonymous git protocol over a direct TCP
++	    connection (or proxy, if configured)
++
++	  - `ssh`: git over ssh (including `host:path` syntax,
++	    `git+ssh://`, etc).
++
++	  - `rsync`: git over rsync
++
++	  - `http`: git over http, both "smart http" and "dumb http".
++	    Note that this does _not_ include `https`; if you want both,
++	    you should specify both as `http:https`.
++
++	  - any external helpers are named by their protocol (e.g., use
++	    `hg` to allow the `git-remote-hg` helper)
+++
++Note that this controls only git's internal protocol selection.
++If libcurl is used (e.g., by the `http` transport), it may
++redirect to other protocols. There is not currently any way to
++restrict this.
++
+ 
+ Discussion[[Discussion]]
+ ------------------------
+Index: git-2.5.0/connect.c
+===================================================================
+--- git-2.5.0.orig/connect.c	2015-12-11 12:46:48.975637719 -0500
++++ git-2.5.0/connect.c	2015-12-11 12:46:48.967637661 -0500
+@@ -9,6 +9,7 @@
+ #include "url.h"
+ #include "string-list.h"
+ #include "sha1-array.h"
++#include "transport.h"
+ 
+ static char *server_capabilities;
+ static const char *parse_feature_value(const char *, const char *, int *);
+@@ -694,6 +695,8 @@
+ 		else
+ 			target_host = xstrdup(hostandport);
+ 
++		transport_check_allowed("git");
++
+ 		/* These underlying connection commands die() if they
+ 		 * cannot connect.
+ 		 */
+@@ -727,6 +730,7 @@
+ 			int putty, tortoiseplink = 0;
+ 			char *ssh_host = hostandport;
+ 			const char *port = NULL;
++			transport_check_allowed("ssh");
+ 			get_host_and_port(&ssh_host, &port);
+ 
+ 			if (!port)
+@@ -781,6 +785,7 @@
+ 			/* remove repo-local variables from the environment */
+ 			conn->env = local_repo_env;
+ 			conn->use_shell = 1;
++			transport_check_allowed("file");
+ 		}
+ 		argv_array_push(&conn->args, cmd.buf);
+ 
+Index: git-2.5.0/t/lib-proto-disable.sh
+===================================================================
+--- /dev/null	1970-01-01 00:00:00.000000000 +0000
++++ git-2.5.0/t/lib-proto-disable.sh	2015-12-11 12:46:48.967637661 -0500
+@@ -0,0 +1,96 @@
++# Test routines for checking protocol disabling.
++
++# test cloning a particular protocol
++#   $1 - description of the protocol
++#   $2 - machine-readable name of the protocol
++#   $3 - the URL to try cloning
++test_proto () {
++	desc=$1
++	proto=$2
++	url=$3
++
++	test_expect_success "clone $1 (enabled)" '
++		rm -rf tmp.git &&
++		(
++			GIT_ALLOW_PROTOCOL=$proto &&
++			export GIT_ALLOW_PROTOCOL &&
++			git clone --bare "$url" tmp.git
++		)
++	'
++
++	test_expect_success "fetch $1 (enabled)" '
++		(
++			cd tmp.git &&
++			GIT_ALLOW_PROTOCOL=$proto &&
++			export GIT_ALLOW_PROTOCOL &&
++			git fetch
++		)
++	'
++
++	test_expect_success "push $1 (enabled)" '
++		(
++			cd tmp.git &&
++			GIT_ALLOW_PROTOCOL=$proto &&
++			export GIT_ALLOW_PROTOCOL &&
++			git push origin HEAD:pushed
++		)
++	'
++
++	test_expect_success "push $1 (disabled)" '
++		(
++			cd tmp.git &&
++			GIT_ALLOW_PROTOCOL=none &&
++			export GIT_ALLOW_PROTOCOL &&
++			test_must_fail git push origin HEAD:pushed
++		)
++	'
++
++	test_expect_success "fetch $1 (disabled)" '
++		(
++			cd tmp.git &&
++			GIT_ALLOW_PROTOCOL=none &&
++			export GIT_ALLOW_PROTOCOL &&
++			test_must_fail git fetch
++		)
++	'
++
++	test_expect_success "clone $1 (disabled)" '
++		rm -rf tmp.git &&
++		(
++			GIT_ALLOW_PROTOCOL=none &&
++			export GIT_ALLOW_PROTOCOL &&
++			test_must_fail git clone --bare "$url" tmp.git
++		)
++	'
++}
++
++# set up an ssh wrapper that will access $host/$repo in the
++# trash directory, and enable it for subsequent tests.
++setup_ssh_wrapper () {
++	test_expect_success 'setup ssh wrapper' '
++		write_script ssh-wrapper <<-\EOF &&
++		echo >&2 "ssh: $*"
++		host=$1; shift
++		cd "$TRASH_DIRECTORY/$host" &&
++		eval "$*"
++		EOF
++		GIT_SSH="$PWD/ssh-wrapper" &&
++		export GIT_SSH &&
++		export TRASH_DIRECTORY
++	'
++}
++
++# set up a wrapper that can be used with remote-ext to
++# access repositories in the "remote" directory of trash-dir,
++# like "ext::fake-remote %S repo.git"
++setup_ext_wrapper () {
++	test_expect_success 'setup ext wrapper' '
++		write_script fake-remote <<-\EOF &&
++		echo >&2 "fake-remote: $*"
++		cd "$TRASH_DIRECTORY/remote" &&
++		eval "$*"
++		EOF
++		PATH=$TRASH_DIRECTORY:$PATH &&
++		export TRASH_DIRECTORY
++	'
++}
+Index: git-2.5.0/t/t5810-proto-disable-local.sh
+===================================================================
+--- /dev/null	1970-01-01 00:00:00.000000000 +0000
++++ git-2.5.0/t/t5810-proto-disable-local.sh	2015-12-11 12:46:48.967637661 -0500
+@@ -0,0 +1,14 @@
++#!/bin/sh
++
++test_description='test disabling of local paths in clone/fetch'
++. ./test-lib.sh
++. "$TEST_DIRECTORY/lib-proto-disable.sh"
++
++test_expect_success 'setup repository to clone' '
++	test_commit one
++'
++
++test_proto "file://" file "file://$PWD"
++test_proto "path" file .
++
++test_done
+Index: git-2.5.0/t/t5811-proto-disable-git.sh
+===================================================================
+--- /dev/null	1970-01-01 00:00:00.000000000 +0000
++++ git-2.5.0/t/t5811-proto-disable-git.sh	2015-12-11 12:46:48.967637661 -0500
+@@ -0,0 +1,20 @@
++#!/bin/sh
++
++test_description='test disabling of git-over-tcp in clone/fetch'
++. ./test-lib.sh
++. "$TEST_DIRECTORY/lib-proto-disable.sh"
++. "$TEST_DIRECTORY/lib-git-daemon.sh"
++start_git_daemon
++
++test_expect_success 'create git-accessible repo' '
++	bare="$GIT_DAEMON_DOCUMENT_ROOT_PATH/repo.git" &&
++	test_commit one &&
++	git --bare init "$bare" &&
++	git push "$bare" HEAD &&
++	>"$bare/git-daemon-export-ok" &&
++	git -C "$bare" config daemon.receivepack true
++'
++
++test_proto "git://" git "$GIT_DAEMON_URL/repo.git"
++
++test_done
+Index: git-2.5.0/t/t5812-proto-disable-http.sh
+===================================================================
+--- /dev/null	1970-01-01 00:00:00.000000000 +0000
++++ git-2.5.0/t/t5812-proto-disable-http.sh	2015-12-11 12:46:48.967637661 -0500
+@@ -0,0 +1,20 @@
++#!/bin/sh
++
++test_description='test disabling of git-over-http in clone/fetch'
++. ./test-lib.sh
++. "$TEST_DIRECTORY/lib-proto-disable.sh"
++. "$TEST_DIRECTORY/lib-httpd.sh"
++start_httpd
++
++test_expect_success 'create git-accessible repo' '
++	bare="$HTTPD_DOCUMENT_ROOT_PATH/repo.git" &&
++	test_commit one &&
++	git --bare init "$bare" &&
++	git push "$bare" HEAD &&
++	git -C "$bare" config http.receivepack true
++'
++
++test_proto "smart http" http "$HTTPD_URL/smart/repo.git"
++
++stop_httpd
++test_done
+Index: git-2.5.0/t/t5813-proto-disable-ssh.sh
+===================================================================
+--- /dev/null	1970-01-01 00:00:00.000000000 +0000
++++ git-2.5.0/t/t5813-proto-disable-ssh.sh	2015-12-11 12:46:48.967637661 -0500
+@@ -0,0 +1,20 @@
++#!/bin/sh
++
++test_description='test disabling of git-over-ssh in clone/fetch'
++. ./test-lib.sh
++. "$TEST_DIRECTORY/lib-proto-disable.sh"
++
++setup_ssh_wrapper
++
++test_expect_success 'setup repository to clone' '
++	test_commit one &&
++	mkdir remote &&
++	git init --bare remote/repo.git &&
++	git push remote/repo.git HEAD
++'
++
++test_proto "host:path" ssh "remote:repo.git"
++test_proto "ssh://" ssh "ssh://remote/$PWD/remote/repo.git"
++test_proto "git+ssh://" ssh "git+ssh://remote/$PWD/remote/repo.git"
++
++test_done
+Index: git-2.5.0/t/t5814-proto-disable-ext.sh
+===================================================================
+--- /dev/null	1970-01-01 00:00:00.000000000 +0000
++++ git-2.5.0/t/t5814-proto-disable-ext.sh	2015-12-11 12:46:48.967637661 -0500
+@@ -0,0 +1,18 @@
++#!/bin/sh
++
++test_description='test disabling of remote-helper paths in clone/fetch'
++. ./test-lib.sh
++. "$TEST_DIRECTORY/lib-proto-disable.sh"
++
++setup_ext_wrapper
++
++test_expect_success 'setup repository to clone' '
++	test_commit one &&
++	mkdir remote &&
++	git init --bare remote/repo.git &&
++	git push remote/repo.git HEAD
++'
++
++test_proto "remote-helper" ext "ext::fake-remote %S repo.git"
++
++test_done
+Index: git-2.5.0/transport-helper.c
+===================================================================
+--- git-2.5.0.orig/transport-helper.c	2015-12-11 12:46:48.975637719 -0500
++++ git-2.5.0/transport-helper.c	2015-12-11 12:46:48.967637661 -0500
+@@ -1039,6 +1039,8 @@
+ 	struct helper_data *data = xcalloc(1, sizeof(*data));
+ 	data->name = name;
+ 
++	transport_check_allowed(name);
++
+ 	if (getenv("GIT_TRANSPORT_HELPER_DEBUG"))
+ 		debug = 1;
+ 
+Index: git-2.5.0/transport.c
+===================================================================
+--- git-2.5.0.orig/transport.c	2015-12-11 12:46:48.975637719 -0500
++++ git-2.5.0/transport.c	2015-12-11 12:46:48.967637661 -0500
+@@ -912,6 +912,20 @@
+ 	return strchr(url, ':') - url;
+ }
+ 
++void transport_check_allowed(const char *type)
++{
++	struct string_list allowed = STRING_LIST_INIT_DUP;
++	const char *v = getenv("GIT_ALLOW_PROTOCOL");
++
++	if (!v)
++		return;
++
++	string_list_split(&allowed, v, ':', -1);
++	if (!unsorted_string_list_has_string(&allowed, type))
++		die("transport '%s' not allowed", type);
++	string_list_clear(&allowed, 0);
++}
++
+ struct transport *transport_get(struct remote *remote, const char *url)
+ {
+ 	const char *helper;
+@@ -943,12 +957,14 @@
+ 	if (helper) {
+ 		transport_helper_init(ret, helper);
+ 	} else if (starts_with(url, "rsync:")) {
++		transport_check_allowed("rsync");
+ 		ret->get_refs_list = get_refs_via_rsync;
+ 		ret->fetch = fetch_objs_via_rsync;
+ 		ret->push = rsync_transport_push;
+ 		ret->smart_options = NULL;
+ 	} else if (url_is_local_not_ssh(url) && is_file(url) && is_bundle(url, 1)) {
+ 		struct bundle_transport_data *data = xcalloc(1, sizeof(*data));
++		transport_check_allowed("file");
+ 		ret->data = data;
+ 		ret->get_refs_list = get_refs_from_bundle;
+ 		ret->fetch = fetch_refs_from_bundle;
+@@ -960,7 +976,10 @@
+ 		|| starts_with(url, "ssh://")
+ 		|| starts_with(url, "git+ssh://")
+ 		|| starts_with(url, "ssh+git://")) {
+-		/* These are builtin smart transports. */
++		/*
++		 * These are builtin smart transports; "allowed" transports
++		 * will be checked individually in git_connect.
++		 */
+ 		struct git_transport_data *data = xcalloc(1, sizeof(*data));
+ 		ret->data = data;
+ 		ret->set_option = NULL;
+Index: git-2.5.0/transport.h
+===================================================================
+--- git-2.5.0.orig/transport.h	2015-12-11 12:46:48.975637719 -0500
++++ git-2.5.0/transport.h	2015-12-11 12:46:48.971637690 -0500
+@@ -133,6 +133,13 @@
+ /* Returns a transport suitable for the url */
+ struct transport *transport_get(struct remote *, const char *);
+ 
++/*
++ * Check whether a transport is allowed by the environment,
++ * and die otherwise. type should generally be the URL scheme,
++ * as described in Documentation/git.txt
++ */
++void transport_check_allowed(const char *type);
++
+ /* Transport options which apply to git:// and scp-style URLs */
+ 
+ /* The program to use on the remote side to send a pack */
diff --git a/yocto-poky/meta/recipes-devtools/git/git-2.5.0/0009-CVE-2015-7545-2.patch b/yocto-poky/meta/recipes-devtools/git/git-2.5.0/0009-CVE-2015-7545-2.patch
new file mode 100644
index 0000000..8000e26
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/git/git-2.5.0/0009-CVE-2015-7545-2.patch
@@ -0,0 +1,112 @@
+From 33cfccbbf35a56e190b79bdec5c85457c952a021 Mon Sep 17 00:00:00 2001
+From: Jeff King <peff@peff.net>
+Date: Wed, 16 Sep 2015 13:13:12 -0400
+Subject: [PATCH] submodule: allow only certain protocols for submodule fetches
+
+Some protocols (like git-remote-ext) can execute arbitrary
+code found in the URL. The URLs that submodules use may come
+from arbitrary sources (e.g., .gitmodules files in a remote
+repository). Let's restrict submodules to fetching from a
+known-good subset of protocols.
+
+Note that we apply this restriction to all submodule
+commands, whether the URL comes from .gitmodules or not.
+This is more restrictive than we need to be; for example, in
+the tests we run:
+
+  git submodule add ext::...
+
+which should be trusted, as the URL comes directly from the
+command line provided by the user. But doing it this way is
+simpler, and makes it much less likely that we would miss a
+case. And since such protocols should be an exception
+(especially because nobody who clones from them will be able
+to update the submodules!), it's not likely to inconvenience
+anyone in practice.
+
+Reported-by: Blake Burkhart <bburky@bburky.com>
+Signed-off-by: Jeff King <peff@peff.net>
+Signed-off-by: Junio C Hamano <gitster@pobox.com>
+
+Upstream-Status: Backport
+
+http://archive.ubuntu.com/ubuntu/pool/main/g/git/git_2.5.0-1ubuntu0.1.debian.tar.xz
+
+CVE: CVE-2015-7545 #2
+Singed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ git-submodule.sh            |  9 +++++++++
+ t/t5815-submodule-protos.sh | 43 +++++++++++++++++++++++++++++++++++++++++++
+ 2 files changed, 52 insertions(+)
+ create mode 100755 t/t5815-submodule-protos.sh
+
+diff --git a/git-submodule.sh b/git-submodule.sh
+index 36797c3..78c2740 100755
+--- a/git-submodule.sh
++++ b/git-submodule.sh
+@@ -22,6 +22,15 @@ require_work_tree
+ wt_prefix=$(git rev-parse --show-prefix)
+ cd_to_toplevel
+ 
++# Restrict ourselves to a vanilla subset of protocols; the URLs
++# we get are under control of a remote repository, and we do not
++# want them kicking off arbitrary git-remote-* programs.
++#
++# If the user has already specified a set of allowed protocols,
++# we assume they know what they're doing and use that instead.
++: ${GIT_ALLOW_PROTOCOL=file:git:http:https:ssh}
++export GIT_ALLOW_PROTOCOL
++
+ command=
+ branch=
+ force=
+diff --git a/t/t5815-submodule-protos.sh b/t/t5815-submodule-protos.sh
+new file mode 100755
+index 0000000..06f55a1
+--- /dev/null
++++ b/t/t5815-submodule-protos.sh
+@@ -0,0 +1,43 @@
++#!/bin/sh
++
++test_description='test protocol whitelisting with submodules'
++. ./test-lib.sh
++. "$TEST_DIRECTORY"/lib-proto-disable.sh
++
++setup_ext_wrapper
++setup_ssh_wrapper
++
++test_expect_success 'setup repository with submodules' '
++	mkdir remote &&
++	git init remote/repo.git &&
++	(cd remote/repo.git && test_commit one) &&
++	# submodule-add should probably trust what we feed it on the cmdline,
++	# but its implementation is overly conservative.
++	GIT_ALLOW_PROTOCOL=ssh git submodule add remote:repo.git ssh-module &&
++	GIT_ALLOW_PROTOCOL=ext git submodule add "ext::fake-remote %S repo.git" ext-module &&
++	git commit -m "add submodules"
++'
++
++test_expect_success 'clone with recurse-submodules fails' '
++	test_must_fail git clone --recurse-submodules . dst
++'
++
++test_expect_success 'setup individual updates' '
++	rm -rf dst &&
++	git clone . dst &&
++	git -C dst submodule init
++'
++
++test_expect_success 'update of ssh allowed' '
++	git -C dst submodule update ssh-module
++'
++
++test_expect_success 'update of ext not allowed' '
++	test_must_fail git -C dst submodule update ext-module
++'
++
++test_expect_success 'user can override whitelist' '
++	GIT_ALLOW_PROTOCOL=ext git -C dst submodule update ext-module
++'
++
++test_done
diff --git a/yocto-poky/meta/recipes-devtools/git/git-2.5.0/0010-CVE-2015-7545-3.patch b/yocto-poky/meta/recipes-devtools/git/git-2.5.0/0010-CVE-2015-7545-3.patch
new file mode 100644
index 0000000..b6edc9d
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/git/git-2.5.0/0010-CVE-2015-7545-3.patch
@@ -0,0 +1,112 @@
+From 5088d3b38775f8ac12d7f77636775b16059b67ef Mon Sep 17 00:00:00 2001
+From: Jeff King <peff@peff.net>
+Date: Tue, 22 Sep 2015 18:03:49 -0400
+Subject: [PATCH] transport: refactor protocol whitelist code
+
+The current callers only want to die when their transport is
+prohibited. But future callers want to query the mechanism
+without dying.
+
+Let's break out a few query functions, and also save the
+results in a static list so we don't have to re-parse for
+each query.
+
+Based-on-a-patch-by: Blake Burkhart <bburky@bburky.com>
+Signed-off-by: Jeff King <peff@peff.net>
+Signed-off-by: Junio C Hamano <gitster@pobox.com>
+
+Upstream-Status: Backport
+
+http://archive.ubuntu.com/ubuntu/pool/main/g/git/git_2.5.0-1ubuntu0.1.debian.tar.xz
+
+CVE: CVE-2015-7545 #3
+Singed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ transport.c | 38 ++++++++++++++++++++++++++++++--------
+ transport.h | 15 +++++++++++++--
+ 2 files changed, 43 insertions(+), 10 deletions(-)
+
+Index: git-2.5.0/transport.c
+===================================================================
+--- git-2.5.0.orig/transport.c	2015-12-11 12:47:09.547784038 -0500
++++ git-2.5.0/transport.c	2015-12-11 12:47:09.543784009 -0500
+@@ -912,18 +912,40 @@
+ 	return strchr(url, ':') - url;
+ }
+ 
+-void transport_check_allowed(const char *type)
++static const struct string_list *protocol_whitelist(void)
+ {
+-	struct string_list allowed = STRING_LIST_INIT_DUP;
+-	const char *v = getenv("GIT_ALLOW_PROTOCOL");
++	static int enabled = -1;
++	static struct string_list allowed = STRING_LIST_INIT_DUP;
++
++	if (enabled < 0) {
++		const char *v = getenv("GIT_ALLOW_PROTOCOL");
++		if (v) {
++			string_list_split(&allowed, v, ':', -1);
++			string_list_sort(&allowed);
++			enabled = 1;
++		} else {
++			enabled = 0;
++		}
++	}
++
++	return enabled ? &allowed : NULL;
++}
+ 
+-	if (!v)
+-		return;
++int is_transport_allowed(const char *type)
++{
++	const struct string_list *allowed = protocol_whitelist();
++	return !allowed || string_list_has_string(allowed, type);
++}
+ 
+-	string_list_split(&allowed, v, ':', -1);
+-	if (!unsorted_string_list_has_string(&allowed, type))
++void transport_check_allowed(const char *type)
++{
++	if (!is_transport_allowed(type))
+ 		die("transport '%s' not allowed", type);
+-	string_list_clear(&allowed, 0);
++}
++
++int transport_restrict_protocols(void)
++{
++	return !!protocol_whitelist();
+ }
+ 
+ struct transport *transport_get(struct remote *remote, const char *url)
+Index: git-2.5.0/transport.h
+===================================================================
+--- git-2.5.0.orig/transport.h	2015-12-11 12:47:09.547784038 -0500
++++ git-2.5.0/transport.h	2015-12-11 12:47:09.543784009 -0500
+@@ -134,12 +134,23 @@
+ struct transport *transport_get(struct remote *, const char *);
+ 
+ /*
++ * Check whether a transport is allowed by the environment. Type should
++ * generally be the URL scheme, as described in Documentation/git.txt
++ */
++int is_transport_allowed(const char *type);
++
++/*
+  * Check whether a transport is allowed by the environment,
+- * and die otherwise. type should generally be the URL scheme,
+- * as described in Documentation/git.txt
++ * and die otherwise.
+  */
+ void transport_check_allowed(const char *type);
+ 
++/*
++ * Returns true if the user has attempted to turn on protocol
++ * restrictions at all.
++ */
++int transport_restrict_protocols(void);
++
+ /* Transport options which apply to git:// and scp-style URLs */
+ 
+ /* The program to use on the remote side to send a pack */
diff --git a/yocto-poky/meta/recipes-devtools/git/git-2.5.0/0011-CVE-2015-7545-4.patch b/yocto-poky/meta/recipes-devtools/git/git-2.5.0/0011-CVE-2015-7545-4.patch
new file mode 100644
index 0000000..44dcd1e
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/git/git-2.5.0/0011-CVE-2015-7545-4.patch
@@ -0,0 +1,150 @@
+Backport of:
+
+From f4113cac0c88b4f36ee6f3abf3218034440a68e3 Mon Sep 17 00:00:00 2001
+From: Blake Burkhart <bburky@bburky.com>
+Date: Tue, 22 Sep 2015 18:06:04 -0400
+Subject: [PATCH] http: limit redirection to protocol-whitelist
+
+Previously, libcurl would follow redirection to any protocol
+it was compiled for support with. This is desirable to allow
+redirection from HTTP to HTTPS. However, it would even
+successfully allow redirection from HTTP to SFTP, a protocol
+that git does not otherwise support at all. Furthermore
+git's new protocol-whitelisting could be bypassed by
+following a redirect within the remote helper, as it was
+only enforced at transport selection time.
+
+This patch limits redirects within libcurl to HTTP, HTTPS,
+FTP and FTPS. If there is a protocol-whitelist present, this
+list is limited to those also allowed by the whitelist. As
+redirection happens from within libcurl, it is impossible
+for an HTTP redirect to a protocol implemented within
+another remote helper.
+
+When the curl version git was compiled with is too old to
+support restrictions on protocol redirection, we warn the
+user if GIT_ALLOW_PROTOCOL restrictions were requested. This
+is a little inaccurate, as even without that variable in the
+environment, we would still restrict SFTP, etc, and we do
+not warn in that case. But anything else means we would
+literally warn every time git accesses an http remote.
+
+This commit includes a test, but it is not as robust as we
+would hope. It redirects an http request to ftp, and checks
+that curl complained about the protocol, which means that we
+are relying on curl's specific error message to know what
+happened. Ideally we would redirect to a working ftp server
+and confirm that we can clone without protocol restrictions,
+and not with them. But we do not have a portable way of
+providing an ftp server, nor any other protocol that curl
+supports (https is the closest, but we would have to deal
+with certificates).
+
+[jk: added test and version warning]
+
+Signed-off-by: Jeff King <peff@peff.net>
+Signed-off-by: Junio C Hamano <gitster@pobox.com>
+
+Upstream-Status: Backport
+
+http://archive.ubuntu.com/ubuntu/pool/main/g/git/git_2.5.0-1ubuntu0.1.debian.tar.xz
+
+CVE: CVE-2015-7545 #4
+Singed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ Documentation/git.txt         |  5 -----
+ http.c                        | 17 +++++++++++++++++
+ t/lib-httpd/apache.conf       |  1 +
+ t/t5812-proto-disable-http.sh |  9 +++++++++
+ 4 files changed, 27 insertions(+), 5 deletions(-)
+
+Index: git-2.5.0/Documentation/git.txt
+===================================================================
+--- git-2.5.0.orig/Documentation/git.txt	2015-12-11 12:47:18.707849212 -0500
++++ git-2.5.0/Documentation/git.txt	2015-12-11 12:47:18.703849183 -0500
+@@ -1095,11 +1095,6 @@
+ 
+ 	  - any external helpers are named by their protocol (e.g., use
+ 	    `hg` to allow the `git-remote-hg` helper)
+-+
+-Note that this controls only git's internal protocol selection.
+-If libcurl is used (e.g., by the `http` transport), it may
+-redirect to other protocols. There is not currently any way to
+-restrict this.
+ 
+ 
+ Discussion[[Discussion]]
+Index: git-2.5.0/http.c
+===================================================================
+--- git-2.5.0.orig/http.c	2015-12-11 12:47:18.707849212 -0500
++++ git-2.5.0/http.c	2015-12-11 12:47:34.171959268 -0500
+@@ -8,6 +8,7 @@
+ #include "credential.h"
+ #include "version.h"
+ #include "pkt-line.h"
++#include "transport.h"
+ #include "gettext.h"
+ 
+ int active_requests;
+@@ -340,6 +341,7 @@
+ static CURL *get_curl_handle(void)
+ {
+ 	CURL *result = curl_easy_init();
++	long allowed_protocols = 0;
+ 
+ 	if (!result)
+ 		die("curl_easy_init failed");
+@@ -399,6 +401,21 @@
+ #elif LIBCURL_VERSION_NUM >= 0x071101
+ 	curl_easy_setopt(result, CURLOPT_POST301, 1);
+ #endif
++#if LIBCURL_VERSION_NUM >= 0x071304
++	if (is_transport_allowed("http"))
++		allowed_protocols |= CURLPROTO_HTTP;
++	if (is_transport_allowed("https"))
++		allowed_protocols |= CURLPROTO_HTTPS;
++	if (is_transport_allowed("ftp"))
++		allowed_protocols |= CURLPROTO_FTP;
++	if (is_transport_allowed("ftps"))
++		allowed_protocols |= CURLPROTO_FTPS;
++	curl_easy_setopt(result, CURLOPT_REDIR_PROTOCOLS, allowed_protocols);
++#else
++	if (transport_restrict_protocols())
++		warning("protocol restrictions not applied to curl redirects because\n"
++			"your curl version is too old (>= 7.19.4)");
++#endif
+ 
+ 	if (getenv("GIT_CURL_VERBOSE"))
+ 		curl_easy_setopt(result, CURLOPT_VERBOSE, 1);
+Index: git-2.5.0/t/lib-httpd/apache.conf
+===================================================================
+--- git-2.5.0.orig/t/lib-httpd/apache.conf	2015-12-11 12:47:18.707849212 -0500
++++ git-2.5.0/t/lib-httpd/apache.conf	2015-12-11 12:47:18.703849183 -0500
+@@ -119,6 +119,7 @@
+ RewriteRule ^/smart-redir-temp/(.*)$ /smart/$1 [R=302]
+ RewriteRule ^/smart-redir-auth/(.*)$ /auth/smart/$1 [R=301]
+ RewriteRule ^/smart-redir-limited/(.*)/info/refs$ /smart/$1/info/refs [R=301]
++RewriteRule ^/ftp-redir/(.*)$ ftp://localhost:1000/$1 [R=302]
+ 
+ <IfDefine SSL>
+ LoadModule ssl_module modules/mod_ssl.so
+Index: git-2.5.0/t/t5812-proto-disable-http.sh
+===================================================================
+--- git-2.5.0.orig/t/t5812-proto-disable-http.sh	2015-12-11 12:47:18.707849212 -0500
++++ git-2.5.0/t/t5812-proto-disable-http.sh	2015-12-11 12:47:18.703849183 -0500
+@@ -16,5 +16,14 @@
+ 
+ test_proto "smart http" http "$HTTPD_URL/smart/repo.git"
+ 
++test_expect_success 'curl redirects respect whitelist' '
++	test_must_fail env GIT_ALLOW_PROTOCOL=http:https \
++		git clone "$HTTPD_URL/ftp-redir/repo.git" 2>stderr &&
++	{
++		test_i18ngrep "ftp.*disabled" stderr ||
++		test_i18ngrep "your curl version is too old"
++	}
++'
++
+ stop_httpd
+ test_done
diff --git a/yocto-poky/meta/recipes-devtools/git/git-2.5.0/0012-CVE-2015-7545-5.patch b/yocto-poky/meta/recipes-devtools/git/git-2.5.0/0012-CVE-2015-7545-5.patch
new file mode 100644
index 0000000..76d66ba
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/git/git-2.5.0/0012-CVE-2015-7545-5.patch
@@ -0,0 +1,69 @@
+From b258116462399b318c86165c61a5c7123043cfd4 Mon Sep 17 00:00:00 2001
+From: Blake Burkhart <bburky@bburky.com>
+Date: Tue, 22 Sep 2015 18:06:20 -0400
+Subject: [PATCH] http: limit redirection depth
+
+By default, libcurl will follow circular http redirects
+forever. Let's put a cap on this so that somebody who can
+trigger an automated fetch of an arbitrary repository (e.g.,
+for CI) cannot convince git to loop infinitely.
+
+The value chosen is 20, which is the same default that
+Firefox uses.
+
+Signed-off-by: Jeff King <peff@peff.net>
+Signed-off-by: Junio C Hamano <gitster@pobox.com>
+
+Upstream-Status: Backport
+
+http://archive.ubuntu.com/ubuntu/pool/main/g/git/git_2.5.0-1ubuntu0.1.debian.tar.xz
+
+CVE: CVE-2015-7545 #5
+Singed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ http.c                        | 1 +
+ t/lib-httpd/apache.conf       | 3 +++
+ t/t5812-proto-disable-http.sh | 4 ++++
+ 3 files changed, 8 insertions(+)
+
+Index: git-2.5.0/http.c
+===================================================================
+--- git-2.5.0.orig/http.c	2015-12-11 12:48:02.900163824 -0500
++++ git-2.5.0/http.c	2015-12-11 12:48:02.896163796 -0500
+@@ -396,6 +396,7 @@
+ 	}
+ 
+ 	curl_easy_setopt(result, CURLOPT_FOLLOWLOCATION, 1);
++	curl_easy_setopt(result, CURLOPT_MAXREDIRS, 20);
+ #if LIBCURL_VERSION_NUM >= 0x071301
+ 	curl_easy_setopt(result, CURLOPT_POSTREDIR, CURL_REDIR_POST_ALL);
+ #elif LIBCURL_VERSION_NUM >= 0x071101
+Index: git-2.5.0/t/lib-httpd/apache.conf
+===================================================================
+--- git-2.5.0.orig/t/lib-httpd/apache.conf	2015-12-11 12:48:02.900163824 -0500
++++ git-2.5.0/t/lib-httpd/apache.conf	2015-12-11 12:48:02.896163796 -0500
+@@ -121,6 +121,9 @@
+ RewriteRule ^/smart-redir-limited/(.*)/info/refs$ /smart/$1/info/refs [R=301]
+ RewriteRule ^/ftp-redir/(.*)$ ftp://localhost:1000/$1 [R=302]
+ 
++RewriteRule ^/loop-redir/x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-(.*) /$1 [R=302]
++RewriteRule ^/loop-redir/(.*)$ /loop-redir/x-$1 [R=302]
++
+ <IfDefine SSL>
+ LoadModule ssl_module modules/mod_ssl.so
+ 
+Index: git-2.5.0/t/t5812-proto-disable-http.sh
+===================================================================
+--- git-2.5.0.orig/t/t5812-proto-disable-http.sh	2015-12-11 12:48:02.900163824 -0500
++++ git-2.5.0/t/t5812-proto-disable-http.sh	2015-12-11 12:48:02.896163796 -0500
+@@ -25,5 +25,9 @@
+ 	}
+ '
+ 
++test_expect_success 'curl limits redirects' '
++	test_must_fail git clone "$HTTPD_URL/loop-redir/smart/repo.git"
++'
++
+ stop_httpd
+ test_done
diff --git a/yocto-poky/meta/recipes-devtools/git/git_2.5.0.bb b/yocto-poky/meta/recipes-devtools/git/git_2.5.0.bb
index de686c2..792f258 100644
--- a/yocto-poky/meta/recipes-devtools/git/git_2.5.0.bb
+++ b/yocto-poky/meta/recipes-devtools/git/git_2.5.0.bb
@@ -9,3 +9,11 @@
 SRC_URI[tarball.sha256sum] = "8fa13ba8434ff83d24f57f831d55dbb9046434c266641180a37744facfce72ac"
 SRC_URI[manpages.md5sum] = "134b049e51420a336049aac21c88a75a"
 SRC_URI[manpages.sha256sum] = "745e4e797fe5061e781c880d370b1beb480199127da5acaf4e376e0b09d4d685"
+
+SRC_URI += "\
+    file://0008-CVE-2015-7545-1.patch \
+    file://0009-CVE-2015-7545-2.patch \
+    file://0010-CVE-2015-7545-3.patch \
+    file://0011-CVE-2015-7545-4.patch \
+    file://0012-CVE-2015-7545-5.patch \
+    "
diff --git a/yocto-poky/meta/recipes-devtools/guile/guile_2.0.11.bb b/yocto-poky/meta/recipes-devtools/guile/guile_2.0.11.bb
index 4869694..98b465b 100644
--- a/yocto-poky/meta/recipes-devtools/guile/guile_2.0.11.bb
+++ b/yocto-poky/meta/recipes-devtools/guile/guile_2.0.11.bb
@@ -39,7 +39,11 @@
 # add guile-native only to the target recipe's DEPENDS
 DEPENDS_append_class-target = " guile-native libatomic-ops"
 
-RDEPENDS_${PN}_append_libc-glibc_class-target = "glibc-gconv-iso8859-1"
+# The comment of the script guile-config said it has been deprecated but we should
+# at least add the required dependency to make it work since we still provide the script.
+RDEPENDS_${PN} = "pkgconfig"
+
+RDEPENDS_${PN}_append_libc-glibc_class-target = " glibc-gconv-iso8859-1"
 
 EXTRA_OECONF += "${@['--without-libltdl-prefix --without-libgmp-prefix --without-libreadline-prefix', ''][bb.data.inherits_class('native',d)]}"
 
@@ -77,6 +81,12 @@
 		GUILE_LOAD_COMPILED_PATH=${STAGING_LIBDIR_NATIVE}/guile/2.0/ccache
 }
 
+do_install_append_class-target() {
+	# cleanup buildpaths in scripts
+	sed -i -e 's:${STAGING_DIR_NATIVE}::' ${D}/usr/bin/guile-config
+	sed -i -e 's:${STAGING_DIR_HOST}::' ${D}/usr/bin/guile-snarf
+}
+
 SYSROOT_PREPROCESS_FUNCS = "guile_cross_config"
 
 guile_cross_config() {
@@ -109,3 +119,7 @@
                 find ${STAGING_DIR_TARGET}/${libdir}/guile/2.0/ccache -type f | xargs touch
 	fi
 }
+
+# http://errors.yoctoproject.org/Errors/Details/20491/
+ARM_INSTRUCTION_SET_armv4 = "arm"
+ARM_INSTRUCTION_SET_armv5 = "arm"
diff --git a/yocto-poky/meta/recipes-devtools/i2c-tools/i2c-tools_3.1.2.bb b/yocto-poky/meta/recipes-devtools/i2c-tools/i2c-tools_3.1.2.bb
index 92f4d69..042695b 100644
--- a/yocto-poky/meta/recipes-devtools/i2c-tools/i2c-tools_3.1.2.bb
+++ b/yocto-poky/meta/recipes-devtools/i2c-tools/i2c-tools_3.1.2.bb
@@ -30,5 +30,4 @@
                         ${bindir}/decode-dimms \
                         ${bindir}/decode-vaio \
                        "
-RDEPENDS_${PN} += "${PN}-misc"
-RDEPENDS_${PN}-misc += "perl"
+RDEPENDS_${PN}-misc = "${PN} perl"
diff --git a/yocto-poky/meta/recipes-devtools/installer/adt-installer_1.0.bb b/yocto-poky/meta/recipes-devtools/installer/adt-installer_1.0.bb
index 22e3850..4c2f097 100644
--- a/yocto-poky/meta/recipes-devtools/installer/adt-installer_1.0.bb
+++ b/yocto-poky/meta/recipes-devtools/installer/adt-installer_1.0.bb
@@ -82,7 +82,6 @@
 	cp ${WORKDIR}/adt_installer.tar.bz2 ${ADT_DEPLOY}
 }
 
-do_populate_adt[nostamp] = "1"
 do_configure[noexec] = "1"
 do_compile[noexec] = "1"
 do_package[noexec] = "1"
diff --git a/yocto-poky/meta/recipes-devtools/libtool/libtool-2.4.6.inc b/yocto-poky/meta/recipes-devtools/libtool/libtool-2.4.6.inc
index a977c73..de06ccb 100644
--- a/yocto-poky/meta/recipes-devtools/libtool/libtool-2.4.6.inc
+++ b/yocto-poky/meta/recipes-devtools/libtool/libtool-2.4.6.inc
@@ -19,6 +19,7 @@
            file://fix-resolve-lt-sysroot.patch \
            file://nohardcodepaths.patch \
            file://unwind-opt-parsing.patch \
+           file://0001-libtool-Fix-support-for-NIOS2-processor.patch \
           "
 
 SRC_URI[md5sum] = "addf44b646ddb4e3919805aa88fa7c5e"
diff --git a/yocto-poky/meta/recipes-devtools/libtool/libtool/0001-libtool-Fix-support-for-NIOS2-processor.patch b/yocto-poky/meta/recipes-devtools/libtool/libtool/0001-libtool-Fix-support-for-NIOS2-processor.patch
new file mode 100644
index 0000000..bbd36d8
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/libtool/libtool/0001-libtool-Fix-support-for-NIOS2-processor.patch
@@ -0,0 +1,68 @@
+From df2cd898e48208f26320d40c3ed6b19c75c27142 Mon Sep 17 00:00:00 2001
+From: Marek Vasut <marex@denx.de>
+Date: Thu, 17 Sep 2015 00:43:15 +0200
+Subject: [PATCH] libtool: Fix support for NIOS2 processor
+
+The name of the system contains the string "nios2". This string
+is caught by the some of the greedy checks for OS/2 in libtool,
+in particular the *os2* branches of switch statements match for
+the nios2 string, which results in incorrect behavior of libtool.
+
+This patch adds an explicit check for *nios2* before the *os2*
+checks to prevent the OS/2 check incorrectly trapping the nios2
+as well.
+
+Signed-off-by: Marek Vasut <marex@denx.de>
+Upstream-Status: Submitted
+---
+ build-aux/ltmain.in | 20 ++++++++++++++++++++
+ 1 file changed, 20 insertions(+)
+
+diff --git a/build-aux/ltmain.in b/build-aux/ltmain.in
+index d5cf07a..4164284 100644
+--- a/build-aux/ltmain.in
++++ b/build-aux/ltmain.in
+@@ -504,6 +504,12 @@ libtool_validate_options ()
+     test : = "$debug_cmd" || func_append preserve_args " --debug"
+ 
+     case $host in
++      # For NIOS2, we want to make sure that it's not caught by the
++      # more general OS/2 check below. Otherwise, NIOS2 is the same
++      # as the default option.
++      *nios2*)
++        opt_duplicate_compiler_generated_deps=$opt_preserve_dup_deps
++        ;;
+       # Solaris2 added to fix http://debbugs.gnu.org/cgi/bugreport.cgi?bug=16452
+       # see also: http://gcc.gnu.org/bugzilla/show_bug.cgi?id=59788
+       *cygwin* | *mingw* | *pw32* | *cegcc* | *solaris2* | *os2*)
+@@ -6220,6 +6226,15 @@ func_mode_link ()
+ 	if test -n "$library_names" &&
+ 	   { test no = "$use_static_libs" || test -z "$old_library"; }; then
+ 	  case $host in
++	  *nios2*)
++	    # For NIOS2, we want to make sure that it's not caught by the
++	    # more general OS/2 check below. Otherwise, NIOS2 is the same
++	    # as the default option.
++	    if test no = "$installed"; then
++	      func_append notinst_deplibs " $lib"
++	      need_relink=yes
++	    fi
++	    ;;
+ 	  *cygwin* | *mingw* | *cegcc* | *os2*)
+ 	      # No point in relinking DLLs because paths are not encoded
+ 	      func_append notinst_deplibs " $lib"
+@@ -6290,6 +6305,11 @@ func_mode_link ()
+ 	    elif test -n "$soname_spec"; then
+ 	      # bleh windows
+ 	      case $host in
++	      *nios2*)
++		# For NIOS2, we want to make sure that it's not caught by the
++		# more general OS/2 check below. Otherwise, NIOS2 is the same
++		# as the default option.
++		;;
+ 	      *cygwin* | mingw* | *cegcc* | *os2*)
+ 	        func_arith $current - $age
+ 		major=$func_arith_result
+-- 
+2.5.1
+
diff --git a/yocto-poky/meta/recipes-devtools/mmc/mmc-utils_git.bb b/yocto-poky/meta/recipes-devtools/mmc/mmc-utils_git.bb
index 8950360..546f7f2 100644
--- a/yocto-poky/meta/recipes-devtools/mmc/mmc-utils_git.bb
+++ b/yocto-poky/meta/recipes-devtools/mmc/mmc-utils_git.bb
@@ -3,12 +3,12 @@
 LICENSE = "GPLv2"
 LIC_FILES_CHKSUM = "file://mmc.c;beginline=1;endline=17;md5=d7747fc87f1eb22b946ef819969503f0"
 
-BRANCH ?= "master"
+SRCBRANCH ?= "master"
 SRCREV = "f4eb241519f8d500ce6068a70d2389be39ac5189"
 
 PV = "0.1"
 
-SRC_URI = "git://git.kernel.org/pub/scm/linux/kernel/git/cjb/mmc-utils.git;branch=${BRANCH} \
+SRC_URI = "git://git.kernel.org/pub/scm/linux/kernel/git/cjb/mmc-utils.git;branch=${SRCBRANCH} \
            file://0001-mmc.h-don-t-include-asm-generic-int-ll64.h.patch"
 
 S = "${WORKDIR}/git"
diff --git a/yocto-poky/meta/recipes-devtools/mtools/mtools_4.0.18.bb b/yocto-poky/meta/recipes-devtools/mtools/mtools_4.0.18.bb
index 52decfd..24c9d49 100644
--- a/yocto-poky/meta/recipes-devtools/mtools/mtools_4.0.18.bb
+++ b/yocto-poky/meta/recipes-devtools/mtools/mtools_4.0.18.bb
@@ -45,3 +45,8 @@
     mkdir -p ${D}/${bindir}
     mkdir -p ${D}/${datadir}
 }
+
+do_install_append_class-native () {
+    create_wrapper ${D}${bindir}/mcopy \
+        GCONV_PATH=${libdir}/gconv
+}
diff --git a/yocto-poky/meta/recipes-devtools/opensp/opensp_1.5.2.bb b/yocto-poky/meta/recipes-devtools/opensp/opensp_1.5.2.bb
index a1f115c..60a7d2e 100644
--- a/yocto-poky/meta/recipes-devtools/opensp/opensp_1.5.2.bb
+++ b/yocto-poky/meta/recipes-devtools/opensp/opensp_1.5.2.bb
@@ -53,3 +53,7 @@
 FILES_${PN} += "${datadir}/OpenSP/"
 
 BBCLASSEXTEND = "native"
+
+# http://errors.yoctoproject.org/Errors/Details/20489/
+ARM_INSTRUCTION_SET_armv4 = "arm"
+ARM_INSTRUCTION_SET_armv5 = "arm"
diff --git a/yocto-poky/meta/recipes-devtools/opkg/opkg/0001-libopkg-include-stdio.h-for-getting-FILE-defined.patch b/yocto-poky/meta/recipes-devtools/opkg/opkg/0001-libopkg-include-stdio.h-for-getting-FILE-defined.patch
new file mode 100644
index 0000000..acc1338
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/opkg/opkg/0001-libopkg-include-stdio.h-for-getting-FILE-defined.patch
@@ -0,0 +1,45 @@
+From 58f4d3d63cd6097154205ea7ee042005036659b3 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Thu, 10 Sep 2015 21:43:32 -0700
+Subject: [PATCH] libopkg: include stdio.h for getting FILE defined
+To: opkg-devel@googlegroups.com
+Cc: paul@paulbarker.me.uk
+
+For some libc(musl) stdio.h may not get included indirectly which means
+we need to mention it in explicit include list
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+Upstream-Status: Submitted
+
+ libopkg/opkg_verify.c | 1 +
+ libopkg/pkg_src.c     | 1 +
+ 2 files changed, 2 insertions(+)
+
+diff --git a/libopkg/opkg_verify.c b/libopkg/opkg_verify.c
+index 41dc3f4..a71591d 100644
+--- a/libopkg/opkg_verify.c
++++ b/libopkg/opkg_verify.c
+@@ -18,6 +18,7 @@
+ 
+ #include <malloc.h>
+ #include <string.h>
++#include <stdio.h>
+ 
+ #include "file_util.h"
+ #include "opkg_conf.h"
+diff --git a/libopkg/pkg_src.c b/libopkg/pkg_src.c
+index e31ec21..6b49a00 100644
+--- a/libopkg/pkg_src.c
++++ b/libopkg/pkg_src.c
+@@ -20,6 +20,7 @@
+ 
+ #include <malloc.h>
+ #include <unistd.h>
++#include <stdio.h>
+ 
+ #include "file_util.h"
+ #include "opkg_conf.h"
+-- 
+2.5.1
+
diff --git a/yocto-poky/meta/recipes-devtools/opkg/opkg/0001-opkg_conf-create-opkg.lock-in-run-instead-of-var-run.patch b/yocto-poky/meta/recipes-devtools/opkg/opkg/0001-opkg_conf-create-opkg.lock-in-run-instead-of-var-run.patch
new file mode 100644
index 0000000..255021b
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/opkg/opkg/0001-opkg_conf-create-opkg.lock-in-run-instead-of-var-run.patch
@@ -0,0 +1,34 @@
+From a4628a6171f393add9a2b287483ca39bb72b4dd6 Mon Sep 17 00:00:00 2001
+From: Jonathan Liu <net147@gmail.com>
+Date: Mon, 21 Sep 2015 20:23:23 +1000
+Subject: [PATCH] opkg_conf: create opkg.lock in /run instead of /var/run
+
+This avoids a "Could not unlink" warning when extracting a /var/run
+symbolic link pointing to /run from a package as it is unable to
+unlink the /var/run directory when it contains opkg.lock.
+
+This also fixes an issue where /var/run is created as a directory
+instead of a symbolic link to /run.
+
+Upstream-Status: Inappropriate [OE-Specific]
+Signed-off-by: Jonathan Liu <net147@gmail.com>
+---
+ libopkg/opkg_conf.h | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/libopkg/opkg_conf.h b/libopkg/opkg_conf.h
+index 7bca948..5a1bc44 100644
+--- a/libopkg/opkg_conf.h
++++ b/libopkg/opkg_conf.h
+@@ -40,7 +40,7 @@ extern "C" {
+ #define OPKG_CONF_DEFAULT_STATUS_FILE   "/var/lib/opkg/status"
+ #define OPKG_CONF_DEFAULT_CACHE_DIR     "/var/cache/opkg"
+ #define OPKG_CONF_DEFAULT_CONF_FILE_DIR "/etc/opkg"
+-#define OPKG_CONF_DEFAULT_LOCK_FILE     "/var/run/opkg.lock"
++#define OPKG_CONF_DEFAULT_LOCK_FILE     "/run/opkg.lock"
+ 
+ /* In case the config file defines no dest */
+ #define OPKG_CONF_DEFAULT_DEST_NAME "root"
+-- 
+2.5.0
+
diff --git a/yocto-poky/meta/recipes-devtools/opkg/opkg/0001-string_util-New-file-with-bin_to_hex-function.patch b/yocto-poky/meta/recipes-devtools/opkg/opkg/0001-string_util-New-file-with-bin_to_hex-function.patch
new file mode 100644
index 0000000..fb3ac46
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/opkg/opkg/0001-string_util-New-file-with-bin_to_hex-function.patch
@@ -0,0 +1,122 @@
+From 646b80024567a6245c598be3374653fa1fa09a12 Mon Sep 17 00:00:00 2001
+From: Paul Barker <paul@paulbarker.me.uk>
+Date: Sat, 7 Nov 2015 10:23:49 +0000
+Subject: [PATCH 1/4] string_util: New file with bin_to_hex function
+
+This function does very simple conversion from binary data to a hex string.
+
+Signed-off-by: Paul Barker <paul@paulbarker.me.uk>
+Signed-off-by: Alejandro del Castillo <alejandro.delcastillo@ni.com>
+
+Upstream-Status: Accepted
+---
+ libopkg/Makefile.am   |  4 ++--
+ libopkg/string_util.c | 42 ++++++++++++++++++++++++++++++++++++++++++
+ libopkg/string_util.h | 24 ++++++++++++++++++++++++
+ 3 files changed, 68 insertions(+), 2 deletions(-)
+ create mode 100644 libopkg/string_util.c
+ create mode 100644 libopkg/string_util.h
+
+diff --git a/libopkg/Makefile.am b/libopkg/Makefile.am
+index ee3fbee..3e62c24 100644
+--- a/libopkg/Makefile.am
++++ b/libopkg/Makefile.am
+@@ -13,7 +13,7 @@ opkg_headers = active_list.h cksum_list.h conffile.h conffile_list.h \
+ 	pkg_depends.h pkg_dest.h pkg_dest_list.h pkg_extract.h pkg_hash.h \
+ 	pkg_parse.h pkg_src.h pkg_src_list.h pkg_vec.h release.h \
+ 	release_parse.h sha256.h sprintf_alloc.h str_list.h void_list.h \
+-	xregex.h xsystem.h xfuncs.h opkg_verify.h
++	xregex.h xsystem.h xfuncs.h opkg_verify.h string_util.h
+ 
+ opkg_sources = opkg_cmd.c opkg_configure.c opkg_download.c \
+ 	opkg_install.c opkg_remove.c opkg_conf.c release.c \
+@@ -23,7 +23,7 @@ opkg_sources = opkg_cmd.c opkg_configure.c opkg_download.c \
+ 	pkg_src.c pkg_src_list.c str_list.c void_list.c active_list.c \
+ 	file_util.c opkg_message.c md5.c parse_util.c cksum_list.c \
+ 	sprintf_alloc.c xregex.c xsystem.c xfuncs.c opkg_archive.c \
+-	opkg_verify.c
++	opkg_verify.c string_util.c
+ 
+ if HAVE_CURL
+ opkg_sources += opkg_download_curl.c
+diff --git a/libopkg/string_util.c b/libopkg/string_util.c
+new file mode 100644
+index 0000000..822cab6
+--- /dev/null
++++ b/libopkg/string_util.c
+@@ -0,0 +1,42 @@
++/* vi: set expandtab sw=4 sts=4: */
++/* string_util.c - convenience routines for common string operations
++
++   Copyright (C) 2015 Paul Barker
++
++   This program is free software; you can redistribute it and/or
++   modify it under the terms of the GNU General Public License as
++   published by the Free Software Foundation; either version 2, or (at
++   your option) any later version.
++
++   This program is distributed in the hope that it will be useful, but
++   WITHOUT ANY WARRANTY; without even the implied warranty of
++   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
++   General Public License for more details.
++*/
++
++#include "config.h"
++
++#include "string_util.h"
++#include "xfuncs.h"
++
++char *bin_to_hex(const void *bin_data, size_t len)
++{
++    const unsigned char *src = (const unsigned char *)bin_data;
++    char *buf = xmalloc(2 * len + 1);
++    int i;
++
++    static const unsigned char bin2hex[16] = {
++        '0', '1', '2', '3',
++        '4', '5', '6', '7',
++        '8', '9', 'a', 'b',
++        'c', 'd', 'e', 'f'
++    };
++
++    for (i = 0; i < len; i++) {
++        buf[i * 2] = bin2hex[src[i] >> 4];
++        buf[i * 2 + 1] = bin2hex[src[i] & 0xf];
++    }
++
++    buf[len * 2] = '\0';
++    return buf;
++}
+diff --git a/libopkg/string_util.h b/libopkg/string_util.h
+new file mode 100644
+index 0000000..a920e2a
+--- /dev/null
++++ b/libopkg/string_util.h
+@@ -0,0 +1,24 @@
++/* vi: set expandtab sw=4 sts=4: */
++/* string_util.h - convenience routines for common file operations
++
++   Copyright (C) 2015 Paul Barker
++
++   This program is free software; you can redistribute it and/or
++   modify it under the terms of the GNU General Public License as
++   published by the Free Software Foundation; either version 2, or (at
++   your option) any later version.
++
++   This program is distributed in the hope that it will be useful, but
++   WITHOUT ANY WARRANTY; without even the implied warranty of
++   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
++   General Public License for more details.
++*/
++
++#ifndef STRING_UTIL_H
++#define STRING_UTIL_H
++
++#include <stddef.h>
++
++char *bin_to_hex(const void *bin_data, size_t len);
++
++#endif /* STRING_UTIL_H */
+-- 
+1.9.1
+
diff --git a/yocto-poky/meta/recipes-devtools/opkg/opkg/0002-md5-Add-md5_to_string-function.patch b/yocto-poky/meta/recipes-devtools/opkg/opkg/0002-md5-Add-md5_to_string-function.patch
new file mode 100644
index 0000000..3b823c6
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/opkg/opkg/0002-md5-Add-md5_to_string-function.patch
@@ -0,0 +1,110 @@
+From ecad8afab377d8be95eeaafc08afa228c8e030c3 Mon Sep 17 00:00:00 2001
+From: Paul Barker <paul@paulbarker.me.uk>
+Date: Sat, 7 Nov 2015 10:23:50 +0000
+Subject: [PATCH 2/4] md5: Add md5_to_string function
+
+Signed-off-by: Paul Barker <paul@paulbarker.me.uk>
+Signed-off-by: Alejandro del Castillo <alejandro.delcastillo@ni.com>
+
+Upstream-Status: Accepted
+---
+ libopkg/file_util.c | 28 +++-------------------------
+ libopkg/md5.c       |  7 +++++++
+ libopkg/md5.h       |  3 +++
+ 3 files changed, 13 insertions(+), 25 deletions(-)
+
+diff --git a/libopkg/file_util.c b/libopkg/file_util.c
+index 5eff469..cb3dbf0 100644
+--- a/libopkg/file_util.c
++++ b/libopkg/file_util.c
+@@ -349,27 +349,13 @@ int file_mkdir_hier(const char *path, long mode)
+ 
+ char *file_md5sum_alloc(const char *file_name)
+ {
+-    static const int md5sum_bin_len = 16;
+-    static const int md5sum_hex_len = 32;
+-
+-    static const unsigned char bin2hex[16] = {
+-        '0', '1', '2', '3',
+-        '4', '5', '6', '7',
+-        '8', '9', 'a', 'b',
+-        'c', 'd', 'e', 'f'
+-    };
+-
+-    int i, err;
++    int err;
+     FILE *file;
+-    char *md5sum_hex;
+-    unsigned char md5sum_bin[md5sum_bin_len];
+-
+-    md5sum_hex = xcalloc(1, md5sum_hex_len + 1);
++    unsigned char md5sum_bin[16];
+ 
+     file = fopen(file_name, "r");
+     if (file == NULL) {
+         opkg_perror(ERROR, "Failed to open file %s", file_name);
+-        free(md5sum_hex);
+         return NULL;
+     }
+ 
+@@ -377,20 +363,12 @@ char *file_md5sum_alloc(const char *file_name)
+     if (err) {
+         opkg_msg(ERROR, "Could't compute md5sum for %s.\n", file_name);
+         fclose(file);
+-        free(md5sum_hex);
+         return NULL;
+     }
+ 
+     fclose(file);
+ 
+-    for (i = 0; i < md5sum_bin_len; i++) {
+-        md5sum_hex[i * 2] = bin2hex[md5sum_bin[i] >> 4];
+-        md5sum_hex[i * 2 + 1] = bin2hex[md5sum_bin[i] & 0xf];
+-    }
+-
+-    md5sum_hex[md5sum_hex_len] = '\0';
+-
+-    return md5sum_hex;
++    return md5_to_string(md5sum_bin);
+ }
+ 
+ #ifdef HAVE_SHA256
+diff --git a/libopkg/md5.c b/libopkg/md5.c
+index d476b8b..bc2b229 100644
+--- a/libopkg/md5.c
++++ b/libopkg/md5.c
+@@ -30,6 +30,8 @@
+ #include <string.h>
+ #include <sys/types.h>
+ 
++#include "string_util.h"
++
+ #if USE_UNLOCKED_IO
+ #include "unlocked-io.h"
+ #endif
+@@ -431,3 +433,8 @@ void md5_process_block(const void *buffer, size_t len, struct md5_ctx *ctx)
+     ctx->C = C;
+     ctx->D = D;
+ }
++
++char *md5_to_string(const void *md5sum_bin)
++{
++    return bin_to_hex(md5sum_bin, 16);
++}
+diff --git a/libopkg/md5.h b/libopkg/md5.h
+index 01320f5..2a7274d 100644
+--- a/libopkg/md5.h
++++ b/libopkg/md5.h
+@@ -118,6 +118,9 @@ extern int __md5_stream(FILE * stream, void *resblock) __THROW;
+ extern void *__md5_buffer(const char *buffer, size_t len,
+                           void *resblock) __THROW;
+ 
++/* Convert a binary md5sum value to an ASCII string. */
++char *md5_to_string(const void *md5sum_bin);
++
+ #ifdef __cplusplus
+ }
+ #endif
+-- 
+1.9.1
+
diff --git a/yocto-poky/meta/recipes-devtools/opkg/opkg/0003-sha256-Add-sha256_to_string-function.patch b/yocto-poky/meta/recipes-devtools/opkg/opkg/0003-sha256-Add-sha256_to_string-function.patch
new file mode 100644
index 0000000..16e82d7
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/opkg/opkg/0003-sha256-Add-sha256_to_string-function.patch
@@ -0,0 +1,110 @@
+From 92e8378103bba3b91f2dec4e6fda3e1755a7c0fd Mon Sep 17 00:00:00 2001
+From: Paul Barker <paul@paulbarker.me.uk>
+Date: Sat, 7 Nov 2015 10:23:51 +0000
+Subject: [PATCH 3/4] sha256: Add sha256_to_string function
+
+Signed-off-by: Paul Barker <paul@paulbarker.me.uk>
+Signed-off-by: Alejandro del Castillo <alejandro.delcastillo@ni.com>
+
+Upstream-Status: Accepted
+---
+ libopkg/file_util.c | 28 +++-------------------------
+ libopkg/sha256.c    |  7 +++++++
+ libopkg/sha256.h    |  3 +++
+ 3 files changed, 13 insertions(+), 25 deletions(-)
+
+diff --git a/libopkg/file_util.c b/libopkg/file_util.c
+index cb3dbf0..864aedb 100644
+--- a/libopkg/file_util.c
++++ b/libopkg/file_util.c
+@@ -374,27 +374,13 @@ char *file_md5sum_alloc(const char *file_name)
+ #ifdef HAVE_SHA256
+ char *file_sha256sum_alloc(const char *file_name)
+ {
+-    static const int sha256sum_bin_len = 32;
+-    static const int sha256sum_hex_len = 64;
+-
+-    static const unsigned char bin2hex[16] = {
+-        '0', '1', '2', '3',
+-        '4', '5', '6', '7',
+-        '8', '9', 'a', 'b',
+-        'c', 'd', 'e', 'f'
+-    };
+-
+-    int i, err;
++    int err;
+     FILE *file;
+-    char *sha256sum_hex;
+-    unsigned char sha256sum_bin[sha256sum_bin_len];
+-
+-    sha256sum_hex = xcalloc(1, sha256sum_hex_len + 1);
++    unsigned char sha256sum_bin[32];
+ 
+     file = fopen(file_name, "r");
+     if (file == NULL) {
+         opkg_perror(ERROR, "Failed to open file %s", file_name);
+-        free(sha256sum_hex);
+         return NULL;
+     }
+ 
+@@ -402,20 +388,12 @@ char *file_sha256sum_alloc(const char *file_name)
+     if (err) {
+         opkg_msg(ERROR, "Could't compute sha256sum for %s.\n", file_name);
+         fclose(file);
+-        free(sha256sum_hex);
+         return NULL;
+     }
+ 
+     fclose(file);
+ 
+-    for (i = 0; i < sha256sum_bin_len; i++) {
+-        sha256sum_hex[i * 2] = bin2hex[sha256sum_bin[i] >> 4];
+-        sha256sum_hex[i * 2 + 1] = bin2hex[sha256sum_bin[i] & 0xf];
+-    }
+-
+-    sha256sum_hex[sha256sum_hex_len] = '\0';
+-
+-    return sha256sum_hex;
++    return sha256_to_string(sha256sum_bin);
+ }
+ 
+ #endif
+diff --git a/libopkg/sha256.c b/libopkg/sha256.c
+index 0816858..bceed72 100644
+--- a/libopkg/sha256.c
++++ b/libopkg/sha256.c
+@@ -29,6 +29,8 @@
+ #include <stddef.h>
+ #include <string.h>
+ 
++#include "string_util.h"
++
+ #if USE_UNLOCKED_IO
+ #include "unlocked-io.h"
+ #endif
+@@ -517,3 +519,8 @@ void sha256_process_block(const void *buffer, size_t len,
+         h = ctx->state[7] += h;
+     }
+ }
++
++char *sha256_to_string(const void *sha256sum_bin)
++{
++    return bin_to_hex(sha256sum_bin, 32);
++}
+diff --git a/libopkg/sha256.h b/libopkg/sha256.h
+index 734ab54..0d1e9e5 100644
+--- a/libopkg/sha256.h
++++ b/libopkg/sha256.h
+@@ -85,6 +85,9 @@ extern int sha224_stream(FILE * stream, void *resblock);
+ extern void *sha256_buffer(const char *buffer, size_t len, void *resblock);
+ extern void *sha224_buffer(const char *buffer, size_t len, void *resblock);
+ 
++/* Convert a binary sha256sum value to an ASCII string. */
++char *sha256_to_string(const void *sha256sum_bin);
++
+ #ifdef __cplusplus
+ }
+ #endif
+-- 
+1.9.1
+
diff --git a/yocto-poky/meta/recipes-devtools/opkg/opkg/0004-opkg_download-Use-short-cache-file-name.patch b/yocto-poky/meta/recipes-devtools/opkg/opkg/0004-opkg_download-Use-short-cache-file-name.patch
new file mode 100644
index 0000000..7ea661d
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/opkg/opkg/0004-opkg_download-Use-short-cache-file-name.patch
@@ -0,0 +1,85 @@
+From 61636f15718edc7ea17b91f22f1d97b905eaf951 Mon Sep 17 00:00:00 2001
+From: Paul Barker <paul@paulbarker.me.uk>
+Date: Sat, 7 Nov 2015 10:23:52 +0000
+Subject: [PATCH 4/4] opkg_download: Use short cache file name
+
+Source URIs can be very long. The cache directory itself may already have a very
+long path, especially if we're installing packages into an offline rootfs.
+Therefore it's not a good idea to simply tag the source URI onto the cache
+directory path to create a cache file name.
+
+To create shorter cache file names which are deterministic and very likely to be
+unique, we use the md5sum of the source URI along with the basename of the
+source URI. The basename is length limited to ensure that it the resulting
+filename length is always reasonable.
+
+Signed-off-by: Paul Barker <paul@paulbarker.me.uk>
+Signed-off-by: Alejandro del Castillo <alejandro.delcastillo@ni.com>
+
+Upstream-Status: Accepted
+---
+ libopkg/opkg_download.c | 35 ++++++++++++++++++++++++++++-------
+ 1 file changed, 28 insertions(+), 7 deletions(-)
+
+diff --git a/libopkg/opkg_download.c b/libopkg/opkg_download.c
+index e9b86a5..a37b10d 100644
+--- a/libopkg/opkg_download.c
++++ b/libopkg/opkg_download.c
+@@ -29,10 +29,18 @@
+ #include "opkg_verify.h"
+ #include "opkg_utils.h"
+ 
++#include "md5.h"
+ #include "sprintf_alloc.h"
+ #include "file_util.h"
+ #include "xfuncs.h"
+ 
++/* Limit the short file name used to generate cache file names to 90 characters
++ * so that when added to the md5sum (32 characters) and an underscore, the
++ * resulting length is below 128 characters. The maximum file name length
++ * differs between plaforms but 128 characters should be reasonable.
++ */
++#define MAX_SHORT_FILE_NAME_LENGTH 90
++
+ static int opkg_download_set_env()
+ {
+     int r;
+@@ -135,15 +143,28 @@ int opkg_download_internal(const char *src, const char *dest,
+  */
+ char *get_cache_location(const char *src)
+ {
+-    char *cache_name = xstrdup(src);
+-    char *cache_location, *p;
++    unsigned char md5sum_bin[16];
++    char *md5sum_hex;
++    char *cache_location;
++    char *short_file_name;
++    char *tmp = xstrdup(src);
+ 
+-    for (p = cache_name; *p; p++)
+-        if (*p == '/')
+-            *p = '_';
++    md5_buffer(src, strlen(src), md5sum_bin);
++    md5sum_hex = md5_to_string(md5sum_bin);
+ 
+-    sprintf_alloc(&cache_location, "%s/%s", opkg_config->cache_dir, cache_name);
+-    free(cache_name);
++    /* Generate a short file name which will be used along with an md5sum of the
++     * full src URI in the cache file name. This short file name is limited to
++     * MAX_SHORT_FILE_NAME_LENGTH to ensure that the total cache file name
++     * length is reasonable.
++     */
++    short_file_name = basename(tmp);
++    if (strlen(short_file_name) > MAX_SHORT_FILE_NAME_LENGTH)
++        short_file_name[MAX_SHORT_FILE_NAME_LENGTH] = '\0';
++
++    sprintf_alloc(&cache_location, "%s/%s_%s", opkg_config->cache_dir,
++                  md5sum_hex, short_file_name);
++    free(md5sum_hex);
++    free(tmp);
+     return cache_location;
+ }
+ 
+-- 
+1.9.1
+
diff --git a/yocto-poky/meta/recipes-devtools/opkg/opkg_0.3.0.bb b/yocto-poky/meta/recipes-devtools/opkg/opkg_0.3.0.bb
index f4dbb2d..5ad3e92 100644
--- a/yocto-poky/meta/recipes-devtools/opkg/opkg_0.3.0.bb
+++ b/yocto-poky/meta/recipes-devtools/opkg/opkg_0.3.0.bb
@@ -15,6 +15,12 @@
            file://opkg-configure.service \
            file://opkg.conf \
            file://0001-opkg_archive-add-support-for-empty-compressed-files.patch \
+           file://0001-libopkg-include-stdio.h-for-getting-FILE-defined.patch \
+           file://0001-opkg_conf-create-opkg.lock-in-run-instead-of-var-run.patch \
+           file://0001-string_util-New-file-with-bin_to_hex-function.patch \
+           file://0002-md5-Add-md5_to_string-function.patch \
+           file://0003-sha256-Add-sha256_to_string-function.patch \
+           file://0004-opkg_download-Use-short-cache-file-name.patch \
 "
 
 SRC_URI[md5sum] = "3412cdc71d78b98facc84b19331ec64e"
diff --git a/yocto-poky/meta/recipes-devtools/perl/perl-native_5.22.0.bb b/yocto-poky/meta/recipes-devtools/perl/perl-native_5.22.0.bb
index a9a1cab..b4dda31 100644
--- a/yocto-poky/meta/recipes-devtools/perl/perl-native_5.22.0.bb
+++ b/yocto-poky/meta/recipes-devtools/perl/perl-native_5.22.0.bb
@@ -28,16 +28,15 @@
 		-Dcf_by="Open Embedded" \
 		-Dprefix=${prefix} \
 		-Dvendorprefix=${prefix} \
-		-Dvendorprefix=${prefix} \
 		-Dsiteprefix=${prefix} \
 		\
 		-Dbin=${STAGING_BINDIR}/${PN} \
 		-Dprivlib=${STAGING_LIBDIR}/perl/${PV} \
 		-Darchlib=${STAGING_LIBDIR}/perl/${PV} \
-		-Dvendorlib=${STAGING_LIBDIR}/perl/${PV} \
-		-Dvendorarch=${STAGING_LIBDIR}/perl/${PV} \
-		-Dsitelib=${STAGING_LIBDIR}/perl/${PV} \
-		-Dsitearch=${STAGING_LIBDIR}/perl/${PV} \
+		-Dvendorlib=${STAGING_LIBDIR}/perl/vendor_perl/${PV} \
+		-Dvendorarch=${STAGING_LIBDIR}/perl/vendor_perl/${PV} \
+		-Dsitelib=${STAGING_LIBDIR}/perl/site_perl/${PV} \
+		-Dsitearch=${STAGING_LIBDIR}/perl/site_perl/${PV} \
 		\
 		-Duseshrplib \
 		-Dusethreads \
@@ -95,8 +94,11 @@
 		install $i ${D}${libdir}/perl/${PV}/CORE
 	done
 
-	create_wrapper ${D}${bindir}/perl PERL5LIB='$PERL5LIB:${STAGING_LIBDIR}/perl/${PV}:${STAGING_LIBDIR}/perl:${STAGING_LIBDIR}/perl/site_perl/${PV}:${STAGING_LIBDIR}/perl/vendor_perl/${PV}'
-	create_wrapper ${D}${bindir}/perl${PV} PERL5LIB='$PERL5LIB:${STAGING_LIBDIR}/perl/${PV}:${STAGING_LIBDIR}/perl${STAGING_LIBDIR}/perl:${STAGING_LIBDIR}/perl/site_perl/${PV}:${STAGING_LIBDIR}/perl/vendor_perl/${PV}'
+	# Those wrappers mean that perl installed from sstate (which may change
+	# path location) works and that in the nativesdk case, the SDK can be
+	# installed to a different location from the one it was built for.
+	create_wrapper ${D}${bindir}/perl PERL5LIB='$PERL5LIB:${STAGING_LIBDIR}/perl/site_perl/${PV}:${STAGING_LIBDIR}/perl/vendor_perl/${PV}:${STAGING_LIBDIR}/perl/${PV}'
+	create_wrapper ${D}${bindir}/perl${PV} PERL5LIB='$PERL5LIB:${STAGING_LIBDIR}/perl/site_perl/${PV}:${STAGING_LIBDIR}/perl/vendor_perl/${PV}:${STAGING_LIBDIR}/perl/${PV}'
 
 	# Use /usr/bin/env nativeperl for the perl script.
 	for f in `grep -Il '#! *${bindir}/perl' ${D}/${bindir}/*`; do
diff --git a/yocto-poky/meta/recipes-devtools/perl/perl/perl-errno-generation-gcc5.patch b/yocto-poky/meta/recipes-devtools/perl/perl/perl-errno-generation-gcc5.patch
new file mode 100644
index 0000000..efbc55d
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/perl/perl/perl-errno-generation-gcc5.patch
@@ -0,0 +1,23 @@
+Upstream-Status:Inappropriate [embedded specific]
+
+The upstream code assumes that the compiler version used to compiler miniperl/perl-native
+is the same as the one being used to build the perl binary. Since most people are not running
+systems with gcc 5, it is unlikely that it will work on any supported host. Switch out gccversion
+for the version extracted from $CC --version.
+
+--- perl-5.22.0/ext/Errno/Errno_pm.PL	2015-10-19 18:01:20.622143786 -0400
++++ perl-5.22.0-fixed/ext/Errno/Errno_pm.PL	2015-10-19 17:50:35.662137367 -0400
+@@ -224,9 +224,12 @@
+ 
+     {	# BeOS (support now removed) did not enter this block
+     # invoke CPP and read the output
++        my $compiler = $ENV{'CC'};
++        my $compiler_out = `$compiler --version`;
++        my @compiler_version = split / /,$compiler_out; 
+ 
+ 	my $inhibit_linemarkers = '';
+-	if ($Config{gccversion} =~ /\A(\d+)\./ and $1 >= 5) {
++	if (@compiler_version[2] =~ /\A(\d+)\./ and $1 >= 5) {
+ 	    # GCC 5.0 interleaves expanded macros with line numbers breaking
+ 	    # each line into multiple lines. RT#123784
+ 	    $inhibit_linemarkers = ' -P';
diff --git a/yocto-poky/meta/recipes-devtools/perl/perl_5.22.0.bb b/yocto-poky/meta/recipes-devtools/perl/perl_5.22.0.bb
index 3ce7849..9df8d04 100644
--- a/yocto-poky/meta/recipes-devtools/perl/perl_5.22.0.bb
+++ b/yocto-poky/meta/recipes-devtools/perl/perl_5.22.0.bb
@@ -62,6 +62,7 @@
         file://ext-ODBM_File-hints-linux.pl-link-libgdbm_compat.patch \
         file://ext-ODBM_File-t-odbm.t-fix-the-path-of-dbmt_common.p.patch \
         file://perl-PathTools-don-t-filter-out-blib-from-INC.patch \
+        file://perl-errno-generation-gcc5.patch \
 "
 
 # Fix test case issues
@@ -245,7 +246,7 @@
 
 do_install_append_class-nativesdk () {
         create_wrapper ${D}${bindir}/perl \
-            PERL5LIB='$PERL5LIB:$OECORE_NATIVE_SYSROOT/${libdir_nativesdk}/perl:$OECORE_NATIVE_SYSROOT/${libdir_nativesdk}/perl/${PV}:$OECORE_NATIVE_SYSROOT/${libdir_nativesdk}/perl/site_perl/${PV}:$OECORE_NATIVE_SYSROOT/${libdir_nativesdk}/perl/vendor_perl/${PV}'
+            PERL5LIB='$PERL5LIB:$OECORE_NATIVE_SYSROOT/${libdir_nativesdk}/perl/site_perl/${PV}:$OECORE_NATIVE_SYSROOT/${libdir_nativesdk}/perl/vendor_perl/${PV}:$OECORE_NATIVE_SYSROOT/${libdir_nativesdk}/perl/${PV}'
 }
 
 PACKAGE_PREPROCESS_FUNCS += "perl_package_preprocess"
diff --git a/yocto-poky/meta/recipes-devtools/prelink/prelink_git.bb b/yocto-poky/meta/recipes-devtools/prelink/prelink_git.bb
index 79a5f50..e223ef6 100644
--- a/yocto-poky/meta/recipes-devtools/prelink/prelink_git.bb
+++ b/yocto-poky/meta/recipes-devtools/prelink/prelink_git.bb
@@ -8,7 +8,7 @@
 runtime and thus programs come up faster."
 LICENSE = "GPLv2"
 LIC_FILES_CHKSUM = "file://COPYING;md5=c93c0550bd3173f4504b2cbd8991e50b"
-SRCREV = "cdee5a4dd226cc5e9f30f370067a9031f398ef3c"
+SRCREV = "927979bbd115eeb8a75db3231906ef6aca4c4eb6"
 PV = "1.0+git${SRCPV}"
 
 #
@@ -35,7 +35,7 @@
 TARGET_OS_ORIG := "${TARGET_OS}"
 OVERRIDES_append = ":${TARGET_OS_ORIG}"
 
-S = "${WORKDIR}/git/trunk"
+S = "${WORKDIR}/git"
 
 inherit autotools 
 
diff --git a/yocto-poky/meta/recipes-devtools/pseudo/pseudo_1.7.3.bb b/yocto-poky/meta/recipes-devtools/pseudo/pseudo_1.7.4.bb
similarity index 77%
rename from yocto-poky/meta/recipes-devtools/pseudo/pseudo_1.7.3.bb
rename to yocto-poky/meta/recipes-devtools/pseudo/pseudo_1.7.4.bb
index 1e9ef3b..d68e0af 100644
--- a/yocto-poky/meta/recipes-devtools/pseudo/pseudo_1.7.3.bb
+++ b/yocto-poky/meta/recipes-devtools/pseudo/pseudo_1.7.4.bb
@@ -6,8 +6,8 @@
     file://fallback-group \
 "
 
-SRC_URI[md5sum] = "2bd0a44eadd4713e90ad8c152eea77aa"
-SRC_URI[sha256sum] = "e9fc3922f8feb97839b50d14eb1987afdc8f22cdcac93119323cccd5f8444652"
+SRC_URI[md5sum] = "6e4b59a346d08d4a29133c335ea12052"
+SRC_URI[sha256sum] = "f33ff84da328f943155f22cfd49030ef4ad85ad35fc2d9419a203521b65c384c"
 
 PSEUDO_EXTRA_OPTS ?= "--enable-force-async --without-passwd-fallback"
 
diff --git a/yocto-poky/meta/recipes-devtools/pseudo/pseudo_git.bb b/yocto-poky/meta/recipes-devtools/pseudo/pseudo_git.bb
index 31e1223..eb666c0 100644
--- a/yocto-poky/meta/recipes-devtools/pseudo/pseudo_git.bb
+++ b/yocto-poky/meta/recipes-devtools/pseudo/pseudo_git.bb
@@ -1,7 +1,7 @@
 require pseudo.inc
 
-SRCREV = "e795df44a90a426a76b790f1b2774f3046a8fc31"
-PV = "1.7.2+git${SRCPV}"
+SRCREV = "3bc3909fa70535c2ef876009dc58e577b10a7e0e"
+PV = "1.7.4+git${SRCPV}"
 
 DEFAULT_PREFERENCE = "-1"
 
diff --git a/yocto-poky/meta/recipes-devtools/python/python-3.4-manifest.inc b/yocto-poky/meta/recipes-devtools/python/python-3.4-manifest.inc
index 07e1490..97070b6 100644
--- a/yocto-poky/meta/recipes-devtools/python/python-3.4-manifest.inc
+++ b/yocto-poky/meta/recipes-devtools/python/python-3.4-manifest.inc
@@ -58,7 +58,7 @@
 FILES_${PN}-db="${libdir}/python3.4/anydbm.* ${libdir}/python3.4/dumbdbm.* ${libdir}/python3.4/whichdb.* ${libdir}/python3.4/dbm ${libdir}/python3.4/lib-dynload/_dbm.*.so "
 
 SUMMARY_${PN}-debugger="Python debugger"
-RDEPENDS_${PN}-debugger="${PN}-core ${PN}-io ${PN}-lang ${PN}-re ${PN}-stringold ${PN}-shell ${PN}-pprint"
+RDEPENDS_${PN}-debugger="${PN}-core ${PN}-io ${PN}-lang ${PN}-re ${PN}-stringold ${PN}-shell ${PN}-pprint ${PN}-importlib ${PN}-pkgutil"
 FILES_${PN}-debugger="${libdir}/python3.4/bdb.* ${libdir}/python3.4/pdb.* "
 
 SUMMARY_${PN}-dev="Python development package"
diff --git a/yocto-poky/meta/recipes-devtools/python/python-async_0.6.2.bb b/yocto-poky/meta/recipes-devtools/python/python-async_0.6.2.bb
index 8ed0b03..5a17a1a 100644
--- a/yocto-poky/meta/recipes-devtools/python/python-async_0.6.2.bb
+++ b/yocto-poky/meta/recipes-devtools/python/python-async_0.6.2.bb
@@ -10,7 +10,7 @@
 
 S = "${WORKDIR}/async-${PV}"
 
-inherit distutils
+inherit setuptools
 
 RDEPENDS_${PN} += "python-threading python-lang"
 
diff --git a/yocto-poky/meta/recipes-devtools/python/python-pygtk_2.24.0.bb b/yocto-poky/meta/recipes-devtools/python/python-pygtk_2.24.0.bb
index e4c33a8..79b3110 100644
--- a/yocto-poky/meta/recipes-devtools/python/python-pygtk_2.24.0.bb
+++ b/yocto-poky/meta/recipes-devtools/python/python-pygtk_2.24.0.bb
@@ -26,7 +26,9 @@
 
 EXTRA_OECONF = "--disable-docs --with-python-includes=${STAGING_INCDIR}/../"
 
-inherit autotools pkgconfig distutils-base
+inherit autotools pkgconfig distutils-base distro_features_check
+
+ANY_OF_DISTRO_FEATURES = "${GTK2DISTROFEATURES}"
 
 do_configure_prepend() {
 	install -m 0644 ${WORKDIR}/acinclude.m4 ${S}/
diff --git a/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-cache.py-getPackages-matches-name-version.patch b/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-cache.py-getPackages-matches-name-version.patch
new file mode 100644
index 0000000..225b02f
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-cache.py-getPackages-matches-name-version.patch
@@ -0,0 +1,43 @@
+From ee05e55e84b53f4bb0d0baba13ca47a8f84b7cb4 Mon Sep 17 00:00:00 2001
+From: Robert Yang <liezhi.yang@windriver.com>
+Date: Wed, 30 Sep 2015 01:12:52 -0700
+Subject: [PATCH] smart:cache.py: getPackages() matches name + arch
+
+It only matched name ony in the past, for example:
+smart install busybox (matched)
+but:
+smart install busybox@core2_64 (didn't match)
+
+The installation is very slow when no match since it would seach all the
+packages in the repo
+This patch makes it match both.
+
+Upstream-Status: Pending
+
+Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
+---
+ smart/cache.py |    3 ++-
+ smart/ccache.c |    9 ++++++++-
+ 2 files changed, 10 insertions(+), 2 deletions(-)
+
+diff --git a/smart/control.py b/smart/control.py
+index d44abe7..f23a604 100644
+--- a/smart/control.py
++++ b/smart/control.py
+@@ -876,9 +876,13 @@ class Control(object):
+         objects = []
+ 
+         # If we find packages with exactly the given
+-        # name or name-version, use them.
+-        for pkg in self._cache.getPackages(s):
+-            if pkg.name == s or "%s-%s" % (pkg.name, pkg.version) == s:
++        # name, name-version, or name@arch, use them.
++        s_name = s
++        if "@" in s:
++            s_name = s.split("@")[0]
++        for pkg in self._cache.getPackages(s_name):
++            if pkg.name == s or "%s-%s" % (pkg.name, pkg.version) == s \
++                    or "%s@%s" % (pkg.name, pkg.version.split('@')[1]) == s:
+                 objects.append((1.0, pkg))
+          
+         if not objects:
diff --git a/yocto-poky/meta/recipes-devtools/python/python-smartpm_git.bb b/yocto-poky/meta/recipes-devtools/python/python-smartpm_git.bb
index 8b974b0..d6c378b 100644
--- a/yocto-poky/meta/recipes-devtools/python/python-smartpm_git.bb
+++ b/yocto-poky/meta/recipes-devtools/python/python-smartpm_git.bb
@@ -23,6 +23,7 @@
           file://smart-add-for-rpm-ignoresize-check.patch \
           file://smart-already-installed-message.patch \
           file://smart-set-noprogress-for-pycurl.patch \
+          file://smart-cache.py-getPackages-matches-name-version.patch \
          "
 
 SRCREV = "407a7eca766431257dcd1da15175cc36a1bb22d0"
diff --git a/yocto-poky/meta/recipes-devtools/qemu/qemu.inc b/yocto-poky/meta/recipes-devtools/qemu/qemu.inc
index f6c0ae3..b17da2f 100644
--- a/yocto-poky/meta/recipes-devtools/qemu/qemu.inc
+++ b/yocto-poky/meta/recipes-devtools/qemu/qemu.inc
@@ -31,9 +31,6 @@
 
 EXTRA_OECONF += "--target-list=${@get_qemu_target_list(d)} --disable-werror  --disable-bluez --disable-libiscsi --with-system-pixman --extra-cflags='${CFLAGS}'"
 
-EXTRA_OECONF_append_class-native = " --enable-debug --enable-debug-info"
-INHIBIT_SYSROOT_STRIP = "1"
-
 EXTRA_OECONF_class-nativesdk = "--target-list=${@get_qemu_target_list(d)} --disable-werror \
 				"
 export LIBTOOL="${HOST_SYS}-libtool"
@@ -135,9 +132,4 @@
 
 EXTRA_OECONF += "${@bb.utils.contains('PACKAGECONFIG', 'alsa', '--audio-drv-list=oss,alsa', '', d)}"
 
-# Qemu target will not build in world build for ARM or Mips
-BROKEN_qemuarm = "1"
-BROKEN_qemumips64 = "1"
-BROKEN_qemumips = "1"
-
 INSANE_SKIP_${PN} = "arch"
diff --git a/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2015-7295_1.patch b/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2015-7295_1.patch
new file mode 100644
index 0000000..d7ae871
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2015-7295_1.patch
@@ -0,0 +1,63 @@
+From ce317461573bac12b10d67699b4ddf1f97cf066c Mon Sep 17 00:00:00 2001
+From: Jason Wang <jasowang@redhat.com>
+Date: Fri, 25 Sep 2015 13:21:28 +0800
+Subject: [PATCH] virtio: introduce virtqueue_unmap_sg()
+
+Factor out sg unmapping logic. This will be reused by the patch that
+can discard descriptor.
+
+Cc: Michael S. Tsirkin <mst@redhat.com>
+Cc: Andrew James <andrew.james@hpe.com>
+Signed-off-by: Jason Wang <jasowang@redhat.com>
+Reviewed-by: Michael S. Tsirkin <mst@redhat.com>
+Signed-off-by: Michael S. Tsirkin <mst@redhat.com>
+
+Upstream-Status: Backport
+
+git.qemu.org/?p=qemu.git;a=commit;h=ce317461573bac12b10d67699b4ddf1f97cf066c
+
+CVE: CVE-2015-7295 patch #1
+[Yocto # 9013]
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ hw/virtio/virtio.c | 14 ++++++++++----
+ 1 file changed, 10 insertions(+), 4 deletions(-)
+
+Index: qemu-2.4.0/hw/virtio/virtio.c
+===================================================================
+--- qemu-2.4.0.orig/hw/virtio/virtio.c
++++ qemu-2.4.0/hw/virtio/virtio.c
+@@ -243,14 +243,12 @@ int virtio_queue_empty(VirtQueue *vq)
+     return vring_avail_idx(vq) == vq->last_avail_idx;
+ }
+ 
+-void virtqueue_fill(VirtQueue *vq, const VirtQueueElement *elem,
+-                    unsigned int len, unsigned int idx)
++static void virtqueue_unmap_sg(VirtQueue *vq, const VirtQueueElement *elem,
++                               unsigned int len)
+ {
+     unsigned int offset;
+     int i;
+ 
+-    trace_virtqueue_fill(vq, elem, len, idx);
+-
+     offset = 0;
+     for (i = 0; i < elem->in_num; i++) {
+         size_t size = MIN(len - offset, elem->in_sg[i].iov_len);
+@@ -266,6 +264,14 @@ void virtqueue_fill(VirtQueue *vq, const
+         cpu_physical_memory_unmap(elem->out_sg[i].iov_base,
+                                   elem->out_sg[i].iov_len,
+                                   0, elem->out_sg[i].iov_len);
++}
++
++void virtqueue_fill(VirtQueue *vq, const VirtQueueElement *elem,
++                    unsigned int len, unsigned int idx)
++{
++    trace_virtqueue_fill(vq, elem, len, idx);
++
++    virtqueue_unmap_sg(vq, elem, len);
+ 
+     idx = (idx + vring_used_idx(vq)) % vq->vring.num;
+ 
diff --git a/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2015-7295_2.patch b/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2015-7295_2.patch
new file mode 100644
index 0000000..45dfab3
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2015-7295_2.patch
@@ -0,0 +1,58 @@
+From 29b9f5efd78ae0f9cc02dd169b6e80d2c404bade Mon Sep 17 00:00:00 2001
+From: Jason Wang <jasowang@redhat.com>
+Date: Fri, 25 Sep 2015 13:21:29 +0800
+Subject: [PATCH] virtio: introduce virtqueue_discard()
+
+This patch introduces virtqueue_discard() to discard a descriptor and
+unmap the sgs. This will be used by the patch that will discard
+descriptor when packet is truncated.
+
+Cc: Michael S. Tsirkin <mst@redhat.com>
+Signed-off-by: Jason Wang <jasowang@redhat.com>
+Reviewed-by: Michael S. Tsirkin <mst@redhat.com>
+Signed-off-by: Michael S. Tsirkin <mst@redhat.com>
+Upstream-Status: Backport
+
+git.qemu.org/?p=qemu.git;a=commit;h=29b9f5efd78ae0f9cc02dd169b6e80d2c404bade
+ 
+CVE: CVE-2015-7295 patch #2
+[Yocto # 9013]
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ hw/virtio/virtio.c         | 7 +++++++
+ include/hw/virtio/virtio.h | 2 ++
+ 2 files changed, 9 insertions(+)
+
+Index: qemu-2.4.0/hw/virtio/virtio.c
+===================================================================
+--- qemu-2.4.0.orig/hw/virtio/virtio.c
++++ qemu-2.4.0/hw/virtio/virtio.c
+@@ -266,6 +266,13 @@ static void virtqueue_unmap_sg(VirtQueue
+                                   0, elem->out_sg[i].iov_len);
+ }
+ 
++void virtqueue_discard(VirtQueue *vq, const VirtQueueElement *elem,
++                       unsigned int len)
++{
++    vq->last_avail_idx--;
++    virtqueue_unmap_sg(vq, elem, len);
++}
++
+ void virtqueue_fill(VirtQueue *vq, const VirtQueueElement *elem,
+                     unsigned int len, unsigned int idx)
+ {
+Index: qemu-2.4.0/include/hw/virtio/virtio.h
+===================================================================
+--- qemu-2.4.0.orig/include/hw/virtio/virtio.h
++++ qemu-2.4.0/include/hw/virtio/virtio.h
+@@ -146,6 +146,8 @@ void virtio_del_queue(VirtIODevice *vdev
+ void virtqueue_push(VirtQueue *vq, const VirtQueueElement *elem,
+                     unsigned int len);
+ void virtqueue_flush(VirtQueue *vq, unsigned int count);
++void virtqueue_discard(VirtQueue *vq, const VirtQueueElement *elem,
++                       unsigned int len);
+ void virtqueue_fill(VirtQueue *vq, const VirtQueueElement *elem,
+                     unsigned int len, unsigned int idx);
+ 
diff --git a/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2015-7295_3.patch b/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2015-7295_3.patch
new file mode 100644
index 0000000..74442e3
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2015-7295_3.patch
@@ -0,0 +1,52 @@
+From 0cf33fb6b49a19de32859e2cdc6021334f448fb3 Mon Sep 17 00:00:00 2001
+From: Jason Wang <jasowang@redhat.com>
+Date: Fri, 25 Sep 2015 13:21:30 +0800
+Subject: [PATCH] virtio-net: correctly drop truncated packets
+
+When packet is truncated during receiving, we drop the packets but
+neither discard the descriptor nor add and signal used
+descriptor. This will lead several issues:
+
+- sg mappings are leaked
+- rx will be stalled if a lots of packets were truncated
+
+In order to be consistent with vhost, fix by discarding the descriptor
+in this case.
+
+Cc: Michael S. Tsirkin <mst@redhat.com>
+Signed-off-by: Jason Wang <jasowang@redhat.com>
+Reviewed-by: Michael S. Tsirkin <mst@redhat.com>
+Signed-off-by: Michael S. Tsirkin <mst@redhat.com>
+
+Upstream-Status: Backport
+
+git.qemu.org/?p=qemu.git;a=commit;h=0cf33fb6b49a19de32859e2cdc6021334f448fb3
+
+CVE: CVE-2015-7295 patch #3
+[Yocto # 9013]
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ hw/net/virtio-net.c | 8 +-------
+ 1 file changed, 1 insertion(+), 7 deletions(-)
+
+Index: qemu-2.4.0/hw/net/virtio-net.c
+===================================================================
+--- qemu-2.4.0.orig/hw/net/virtio-net.c
++++ qemu-2.4.0/hw/net/virtio-net.c
+@@ -1086,13 +1086,7 @@ static ssize_t virtio_net_receive(NetCli
+          * must have consumed the complete packet.
+          * Otherwise, drop it. */
+         if (!n->mergeable_rx_bufs && offset < size) {
+-#if 0
+-            error_report("virtio-net truncated non-mergeable packet: "
+-                         "i %zd mergeable %d offset %zd, size %zd, "
+-                         "guest hdr len %zd, host hdr len %zd",
+-                         i, n->mergeable_rx_bufs,
+-                         offset, size, n->guest_hdr_len, n->host_hdr_len);
+-#endif
++            virtqueue_discard(q->rx_vq, &elem, total);
+             return size;
+         }
+ 
diff --git a/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2015-7504.patch b/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2015-7504.patch
new file mode 100644
index 0000000..90a7947
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2015-7504.patch
@@ -0,0 +1,56 @@
+From 837f21aacf5a714c23ddaadbbc5212f9b661e3f7 Mon Sep 17 00:00:00 2001
+From: Prasad J Pandit <pjp@fedoraproject.org>
+Date: Fri, 20 Nov 2015 11:50:31 +0530
+Subject: [PATCH] net: pcnet: add check to validate receive data
+ size(CVE-2015-7504)
+
+In loopback mode, pcnet_receive routine appends CRC code to the
+receive buffer. If the data size given is same as the buffer size,
+the appended CRC code overwrites 4 bytes after s->buffer. Added a
+check to avoid that.
+
+Reported by: Qinghao Tang <luodalongde@gmail.com>
+Cc: qemu-stable@nongnu.org
+Reviewed-by: Michael S. Tsirkin <mst@redhat.com>
+Signed-off-by: Prasad J Pandit <pjp@fedoraproject.org>
+Signed-off-by: Jason Wang <jasowang@redhat.com>
+
+Upstream-Status: Backport
+
+http://git.qemu.org/?p=qemu.git;a=commit;h=837f21aacf5a714c23ddaadbbc5212f9b661e3f7
+
+CVE: CVE-2015-7504
+[Yocto # 9013]
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ hw/net/pcnet.c | 8 +++++---
+ 1 file changed, 5 insertions(+), 3 deletions(-)
+
+Index: qemu-2.4.0/hw/net/pcnet.c
+===================================================================
+--- qemu-2.4.0.orig/hw/net/pcnet.c
++++ qemu-2.4.0/hw/net/pcnet.c
+@@ -1085,7 +1085,7 @@ ssize_t pcnet_receive(NetClientState *nc
+                 uint32_t fcs = ~0;
+                 uint8_t *p = src;
+ 
+-                while (p != &src[size-4])
++                while (p != &src[size])
+                     CRC(fcs, *p++);
+                 crc_err = (*(uint32_t *)p != htonl(fcs));
+             }
+@@ -1234,8 +1234,10 @@ static void pcnet_transmit(PCNetState *s
+         bcnt = 4096 - GET_FIELD(tmd.length, TMDL, BCNT);
+ 
+         /* if multi-tmd packet outsizes s->buffer then skip it silently.
+-           Note: this is not what real hw does */
+-        if (s->xmit_pos + bcnt > sizeof(s->buffer)) {
++         * Note: this is not what real hw does.
++         * Last four bytes of s->buffer are used to store CRC FCS code.
++         */
++        if (s->xmit_pos + bcnt > sizeof(s->buffer) - 4) {
+             s->xmit_pos = -1;
+             goto txdone;
+         }
diff --git a/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2015-7512.patch b/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2015-7512.patch
new file mode 100644
index 0000000..50b8a6c
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2015-7512.patch
@@ -0,0 +1,44 @@
+From 8b98a2f07175d46c3f7217639bd5e03f2ec56343 Mon Sep 17 00:00:00 2001
+From: Jason Wang <jasowang@redhat.com>
+Date: Mon, 30 Nov 2015 15:00:06 +0800
+Subject: [PATCH] pcnet: fix rx buffer overflow(CVE-2015-7512)
+
+Backends could provide a packet whose length is greater than buffer
+size. Check for this and truncate the packet to avoid rx buffer
+overflow in this case.
+
+Cc: Prasad J Pandit <pjp@fedoraproject.org>
+Cc: qemu-stable@nongnu.org
+Reviewed-by: Michael S. Tsirkin <mst@redhat.com>
+Signed-off-by: Jason Wang <jasowang@redhat.com>
+
+Upsteam_Status: Backport
+
+http://git.qemu.org/?p=qemu.git;a=commit;h=8b98a2f07175d46c3f7217639bd5e03f2ec56343
+
+CVE: CVE-2015-7512
+[Yocto # 9013]
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ hw/net/pcnet.c | 6 ++++++
+ 1 file changed, 6 insertions(+)
+
+Index: qemu-2.4.0/hw/net/pcnet.c
+===================================================================
+--- qemu-2.4.0.orig/hw/net/pcnet.c
++++ qemu-2.4.0/hw/net/pcnet.c
+@@ -1065,6 +1065,12 @@ ssize_t pcnet_receive(NetClientState *nc
+             int pktcount = 0;
+ 
+             if (!s->looptest) {
++                if (size > 4092) {
++#ifdef PCNET_DEBUG_RMD
++                    fprintf(stderr, "pcnet: truncates rx packet.\n");
++#endif
++                    size = 4092;
++                }
+                 memcpy(src, buf, size);
+                 /* no need to compute the CRC */
+                 src[size] = 0;
diff --git a/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2015-8345.patch b/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2015-8345.patch
new file mode 100644
index 0000000..310b458
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2015-8345.patch
@@ -0,0 +1,73 @@
+From 00837731d254908a841d69298a4f9f077babaf24 Mon Sep 17 00:00:00 2001
+From: Stefan Weil <sw@weilnetz.de>
+Date: Fri, 20 Nov 2015 08:42:33 +0100
+Subject: [PATCH] eepro100: Prevent two endless loops
+
+http://lists.nongnu.org/archive/html/qemu-devel/2015-11/msg04592.html
+shows an example how an endless loop in function action_command can
+be achieved.
+
+During my code review, I noticed a 2nd case which can result in an
+endless loop.
+
+Reported-by: Qinghao Tang <luodalongde@gmail.com>
+Signed-off-by: Stefan Weil <sw@weilnetz.de>
+Signed-off-by: Jason Wang <jasowang@redhat.com>
+
+Upstream-Status: Backport
+
+http://git.qemu.org/?p=qemu.git;a=commit;h=00837731d254908a841d69298a4f9f077babaf24
+
+CVE: CVE-2015-8345
+[Yocto # 9013]
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ hw/net/eepro100.c | 16 ++++++++++++++++
+ 1 file changed, 16 insertions(+)
+
+diff --git a/hw/net/eepro100.c b/hw/net/eepro100.c
+index 60333b7..685a478 100644
+--- a/hw/net/eepro100.c
++++ b/hw/net/eepro100.c
+@@ -774,6 +774,11 @@ static void tx_command(EEPRO100State *s)
+ #if 0
+         uint16_t tx_buffer_el = lduw_le_pci_dma(&s->dev, tbd_address + 6);
+ #endif
++        if (tx_buffer_size == 0) {
++            /* Prevent an endless loop. */
++            logout("loop in %s:%u\n", __FILE__, __LINE__);
++            break;
++        }
+         tbd_address += 8;
+         TRACE(RXTX, logout
+             ("TBD (simplified mode): buffer address 0x%08x, size 0x%04x\n",
+@@ -855,6 +860,10 @@ static void set_multicast_list(EEPRO100State *s)
+ 
+ static void action_command(EEPRO100State *s)
+ {
++    /* The loop below won't stop if it gets special handcrafted data.
++       Therefore we limit the number of iterations. */
++    unsigned max_loop_count = 16;
++
+     for (;;) {
+         bool bit_el;
+         bool bit_s;
+@@ -870,6 +879,13 @@ static void action_command(EEPRO100State *s)
+ #if 0
+         bool bit_sf = ((s->tx.command & COMMAND_SF) != 0);
+ #endif
++
++        if (max_loop_count-- == 0) {
++            /* Prevent an endless loop. */
++            logout("loop in %s:%u\n", __FILE__, __LINE__);
++            break;
++        }
++
+         s->cu_offset = s->tx.link;
+         TRACE(OTHER,
+               logout("val=(cu start), status=0x%04x, command=0x%04x, link=0x%08x\n",
+-- 
+2.3.5
+
diff --git a/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2015-8504.patch b/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2015-8504.patch
new file mode 100644
index 0000000..9e66021
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2015-8504.patch
@@ -0,0 +1,51 @@
+From 4c65fed8bdf96780735dbdb92a8bd0d6b6526cc3 Mon Sep 17 00:00:00 2001
+From: Prasad J Pandit <pjp@fedoraproject.org>
+Date: Thu, 3 Dec 2015 18:54:17 +0530
+Subject: [PATCH] ui: vnc: avoid floating point exception
+
+While sending 'SetPixelFormat' messages to a VNC server,
+the client could set the 'red-max', 'green-max' and 'blue-max'
+values to be zero. This leads to a floating point exception in
+write_png_palette while doing frame buffer updates.
+
+Reported-by: Lian Yihan <lianyihan@360.cn>
+Signed-off-by: Prasad J Pandit <pjp@fedoraproject.org>
+Reviewed-by: Gerd Hoffmann <kraxel@redhat.com>
+Signed-off-by: Peter Maydell <peter.maydell@linaro.org>
+
+Upstream-Status: Backport
+
+http://git.qemu.org/?p=qemu.git;a=commitdiff;h=4c65fed8bdf96780735dbdb92a8
+
+CVE: CVE-2015-8504
+[Yocto # 9013]
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ ui/vnc.c | 6 +++---
+ 1 file changed, 3 insertions(+), 3 deletions(-)
+
+Index: qemu-2.4.0/ui/vnc.c
+===================================================================
+--- qemu-2.4.0.orig/ui/vnc.c
++++ qemu-2.4.0/ui/vnc.c
+@@ -2189,15 +2189,15 @@ static void set_pixel_format(VncState *v
+         return;
+     }
+ 
+-    vs->client_pf.rmax = red_max;
++    vs->client_pf.rmax = red_max ? red_max : 0xFF;
+     vs->client_pf.rbits = hweight_long(red_max);
+     vs->client_pf.rshift = red_shift;
+     vs->client_pf.rmask = red_max << red_shift;
+-    vs->client_pf.gmax = green_max;
++    vs->client_pf.gmax = green_max ? green_max : 0xFF;
+     vs->client_pf.gbits = hweight_long(green_max);
+     vs->client_pf.gshift = green_shift;
+     vs->client_pf.gmask = green_max << green_shift;
+-    vs->client_pf.bmax = blue_max;
++    vs->client_pf.bmax = blue_max ? blue_max : 0xFF;
+     vs->client_pf.bbits = hweight_long(blue_max);
+     vs->client_pf.bshift = blue_shift;
+     vs->client_pf.bmask = blue_max << blue_shift;
diff --git a/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2016-1568.patch b/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2016-1568.patch
new file mode 100644
index 0000000..9c40ffb
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2016-1568.patch
@@ -0,0 +1,46 @@
+From 4ab0359a8ae182a7ac5c99609667273167703fab Mon Sep 17 00:00:00 2001
+From: Prasad J Pandit <pjp@fedoraproject.org>
+Date: Mon, 11 Jan 2016 14:10:42 -0500
+Subject: [PATCH] ide: ahci: reset ncq object to unused on error
+
+When processing NCQ commands, AHCI device emulation prepares a
+NCQ transfer object; To which an aio control block(aiocb) object
+is assigned in 'execute_ncq_command'. In case, when the NCQ
+command is invalid, the 'aiocb' object is not assigned, and NCQ
+transfer object is left as 'used'. This leads to a use after
+free kind of error in 'bdrv_aio_cancel_async' via 'ahci_reset_port'.
+Reset NCQ transfer object to 'unused' to avoid it.
+
+[Maintainer edit: s/ACHI/AHCI/ in the commit message. --js]
+
+Reported-by: Qinghao Tang <luodalongde@gmail.com>
+Signed-off-by: Prasad J Pandit <pjp@fedoraproject.org>
+Reviewed-by: John Snow <jsnow@redhat.com>
+Message-id: 1452282511-4116-1-git-send-email-ppandit@redhat.com
+Signed-off-by: John Snow <jsnow@redhat.com>
+
+Upstream-Status: Backport
+
+http://git.qemu.org/?p=qemu.git;a=commit;h=4ab0359a8ae182a7ac5c99609667273167703fab
+
+CVE: CVE-2016-1568
+[Yocto # 9013]
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ hw/ide/ahci.c | 1 +
+ 1 file changed, 1 insertion(+)
+
+Index: qemu-2.4.0/hw/ide/ahci.c
+===================================================================
+--- qemu-2.4.0.orig/hw/ide/ahci.c
++++ qemu-2.4.0/hw/ide/ahci.c
+@@ -898,6 +898,7 @@ static void ncq_err(NCQTransferState *nc
+     ide_state->error = ABRT_ERR;
+     ide_state->status = READY_STAT | ERR_STAT;
+     ncq_tfs->drive->port_regs.scr_err |= (1 << ncq_tfs->tag);
++    ncq_tfs->used = 0;
+ }
+ 
+ static void ncq_finish(NCQTransferState *ncq_tfs)
diff --git a/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2016-2197.patch b/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2016-2197.patch
new file mode 100644
index 0000000..946435c
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2016-2197.patch
@@ -0,0 +1,59 @@
+From: Prasad J Pandit <address@hidden>
+
+When IDE AHCI emulation uses Frame Information Structures(FIS)
+engine for data transfer, the mapped FIS buffer address is stored
+in a static 'bounce.buffer'. When a request is made to map another
+memory region, address_space_map() returns NULL because
+'bounce.buffer' is in_use. It leads to a null pointer dereference
+error while doing 'dma_memory_unmap'. Add a check to avoid it.
+
+Reported-by: Zuozhi fzz <address@hidden>
+Signed-off-by: Prasad J Pandit <address@hidden>
+
+Upstream-Status: Backport
+https://lists.gnu.org/archive/html/qemu-devel/2016-01/msg05740.html
+
+CVE: CVE-2016-2197
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ hw/ide/ahci.c | 16 ++++++++++------
+  1 file changed, 10 insertions(+), 6 deletions(-)
+
+  Update as per review
+    -> https://lists.gnu.org/archive/html/qemu-devel/2016-01/msg05715.html
+
+Index: qemu-2.5.0/hw/ide/ahci.c
+===================================================================
+--- qemu-2.5.0.orig/hw/ide/ahci.c
++++ qemu-2.5.0/hw/ide/ahci.c
+@@ -661,9 +661,11 @@ static bool ahci_map_fis_address(AHCIDev
+ 
+ static void ahci_unmap_fis_address(AHCIDevice *ad)
+ {
+-    dma_memory_unmap(ad->hba->as, ad->res_fis, 256,
+-                     DMA_DIRECTION_FROM_DEVICE, 256);
+-    ad->res_fis = NULL;
++    if (ad->res_fis) {
++        dma_memory_unmap(ad->hba->as, ad->res_fis, 256,
++                         DMA_DIRECTION_FROM_DEVICE, 256);
++        ad->res_fis = NULL;
++    }
+ }
+ 
+ static bool ahci_map_clb_address(AHCIDevice *ad)
+@@ -677,9 +679,11 @@ static bool ahci_map_clb_address(AHCIDev
+ 
+ static void ahci_unmap_clb_address(AHCIDevice *ad)
+ {
+-    dma_memory_unmap(ad->hba->as, ad->lst, 1024,
+-                     DMA_DIRECTION_FROM_DEVICE, 1024);
+-    ad->lst = NULL;
++    if (ad->lst) {
++        dma_memory_unmap(ad->hba->as, ad->lst, 1024,
++                         DMA_DIRECTION_FROM_DEVICE, 1024);
++        ad->lst = NULL;
++    }
+ }
+ 
+ static void ahci_write_fis_sdb(AHCIState *s, NCQTransferState *ncq_tfs)
diff --git a/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2016-2198.patch b/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2016-2198.patch
new file mode 100644
index 0000000..f1201f0
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2016-2198.patch
@@ -0,0 +1,45 @@
+From: Prasad J Pandit <address@hidden>
+
+USB Ehci emulation supports host controller capability registers.
+But its mmio '.write' function was missing, which lead to a null
+pointer dereference issue. Add a do nothing 'ehci_caps_write'
+definition to avoid it; Do nothing because capability registers
+are Read Only(RO).
+
+Reported-by: Zuozhi Fzz <address@hidden>
+Signed-off-by: Prasad J Pandit <address@hidden>
+
+Upstream-Status: Backport
+https://lists.gnu.org/archive/html/qemu-devel/2016-01/msg05899.html
+
+CVE: CVE-2016-2198
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ hw/usb/hcd-ehci.c | 6 ++++++
+  1 file changed, 6 insertions(+)
+
+Index: qemu-2.5.0/hw/usb/hcd-ehci.c
+===================================================================
+--- qemu-2.5.0.orig/hw/usb/hcd-ehci.c
++++ qemu-2.5.0/hw/usb/hcd-ehci.c
+@@ -893,6 +893,11 @@ static uint64_t ehci_caps_read(void *ptr
+     return s->caps[addr];
+ }
+ 
++static void ehci_caps_write(void *ptr, hwaddr addr,
++                             uint64_t val, unsigned size)
++{
++}
++
+ static uint64_t ehci_opreg_read(void *ptr, hwaddr addr,
+                                 unsigned size)
+ {
+@@ -2310,6 +2315,7 @@ static void ehci_frame_timer(void *opaqu
+ 
+ static const MemoryRegionOps ehci_mmio_caps_ops = {
+     .read = ehci_caps_read,
++    .write = ehci_caps_write,
+     .valid.min_access_size = 1,
+     .valid.max_access_size = 4,
+     .impl.min_access_size = 1,
diff --git a/yocto-poky/meta/recipes-devtools/qemu/qemu/no-valgrind.patch b/yocto-poky/meta/recipes-devtools/qemu/qemu/no-valgrind.patch
new file mode 100644
index 0000000..91f7280
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/qemu/qemu/no-valgrind.patch
@@ -0,0 +1,19 @@
+There isn't an option to enable or disable valgrind support, so disable it to avoid non-deterministic builds.
+
+Upstream-Status: Inappropriate
+Signed-off-by: Ross Burton <ross.burton@intel.com>
+
+diff --git a/configure b/configure
+index b3c4f51..4d3929e 100755
+--- a/configure
++++ b/configure
+@@ -4193,9 +4192,0 @@ valgrind_h=no
+-cat > $TMPC << EOF
+-#include <valgrind/valgrind.h>
+-int main(void) {
+-  return 0;
+-}
+-EOF
+-if compile_prog "" "" ; then
+-    valgrind_h=yes
+-fi
diff --git a/yocto-poky/meta/recipes-devtools/qemu/qemu_2.4.0.bb b/yocto-poky/meta/recipes-devtools/qemu/qemu_2.4.0.bb
index 59b1788..8d47b16 100644
--- a/yocto-poky/meta/recipes-devtools/qemu/qemu_2.4.0.bb
+++ b/yocto-poky/meta/recipes-devtools/qemu/qemu_2.4.0.bb
@@ -9,6 +9,17 @@
             file://smc91c111_fix1.patch \
             file://smc91c111_fix2.patch \
             file://smc91c111_fix3.patch \
+            file://no-valgrind.patch \
+            file://CVE-2015-8504.patch \
+            file://CVE-2015-7504.patch \
+            file://CVE-2015-7512.patch \
+            file://CVE-2015-8345.patch \
+            file://CVE-2016-1568.patch \
+            file://CVE-2015-7295_1.patch \
+            file://CVE-2015-7295_2.patch \
+            file://CVE-2015-7295_3.patch \
+            file://CVE-2016-2197.patch \
+            file://CVE-2016-2198.patch \
            "
 SRC_URI_prepend = "http://wiki.qemu-project.org/download/${BP}.tar.bz2"
 SRC_URI[md5sum] = "186ee8194140a484a455f8e3c74589f4"
diff --git a/yocto-poky/meta/recipes-devtools/rpm/rpm/configure.ac-check-for-both-gpg2-and-gpg.patch b/yocto-poky/meta/recipes-devtools/rpm/rpm/configure.ac-check-for-both-gpg2-and-gpg.patch
new file mode 100644
index 0000000..7894a42
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/rpm/rpm/configure.ac-check-for-both-gpg2-and-gpg.patch
@@ -0,0 +1,29 @@
+configure.ac: search for both gpg2 and gpg
+
+On some platforms the GnuPG binary is named 'gpg2' whereas others have 'gpg'.
+This patch increases compatibility by searching for 'gpg' in addition to
+'gpg2'.
+
+Upstream-Status: Pending
+
+Signed-off-by: Markus Lehtonen <markus.lehtonen@linux.intel.com>
+---
+ configure.ac | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/configure.ac b/configure.ac
+index 6746b4c..f6922ae 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -562,7 +562,7 @@ AC_PATH_PROG(__DIFF, diff, /bin/diff, $MYPATH)
+ AC_PATH_PROG(__DITTO, ditto, %{_bindir}/ditto, $MYPATH)
+ AC_PATH_PROG(__FILE, file, %{_bindir}/file, $MYPATH)
+ AC_PATH_PROG(__GIT, git, %{_bindir}/git, $MYPATH)
+-AC_PATH_PROG(__GPG, gpg2, %{_bindir}/gpg2, $MYPATH)
++AC_PATH_PROGS(__GPG, [gpg2 gpg], %{_bindir}/gpg2, $MYPATH)
+ AC_PATH_PROG(__GSR, gsr, %{_bindir}/gsr, $MYPATH)
+ AC_PATH_PROG(__GST_INSPECT, gst-inspect-0.10, %{_bindir}/gst-inspect-0.10, $MYPATH)
+ AC_PATH_PROG(__GZIP, gzip, /bin/gzip, $MYPATH)
+-- 
+2.1.4
+
diff --git a/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-check-rootpath-reasonableness.patch b/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-check-rootpath-reasonableness.patch
index 3986030..3d8d645 100644
--- a/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-check-rootpath-reasonableness.patch
+++ b/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-check-rootpath-reasonableness.patch
@@ -37,7 +37,7 @@
 +    int ret,rootdir_len;
 +
 +    if(rootdir == NULL) {
-+        return;
++        return -1;
 +    }
 +
 +    rootdir_len = strlen(rootdir);
diff --git a/yocto-poky/meta/recipes-devtools/rpm/rpm_4.11.2.bb b/yocto-poky/meta/recipes-devtools/rpm/rpm_4.11.2.bb
index 210c943..f4a2110 100644
--- a/yocto-poky/meta/recipes-devtools/rpm/rpm_4.11.2.bb
+++ b/yocto-poky/meta/recipes-devtools/rpm/rpm_4.11.2.bb
@@ -22,7 +22,8 @@
 LICENSE = "GPL-2.0+"
 LIC_FILES_CHKSUM ??= "file://${COMMON_LICENSE_DIR}/GPL-2.0;md5=801f80980d171dd6425610833a22dbe6"
 
-DEPENDS = "db libxml2 xz findutils file popt nss bzip2 elfutils patch attr zlib acl gzip make binutils python"
+DEPENDS = "db libxml2 xz findutils file popt nss bzip2 elfutils attr zlib acl gzip python"
+DEPENDS_append_class-native = " file-replacement-native"
 
 SRC_URI += "http://rpm.org/releases/rpm-4.11.x/${BP}.tar.bz2 \
             file://use-pkgconfig-for-python.patch \
diff --git a/yocto-poky/meta/recipes-devtools/rpm/rpm_5.4+cvs.bb b/yocto-poky/meta/recipes-devtools/rpm/rpm_5.4+cvs.bb
index 8903f3bc..951b251 100644
--- a/yocto-poky/meta/recipes-devtools/rpm/rpm_5.4+cvs.bb
+++ b/yocto-poky/meta/recipes-devtools/rpm/rpm_5.4+cvs.bb
@@ -43,6 +43,7 @@
 LIC_FILES_CHKSUM = "file://COPYING.LIB;md5=2d5025d4aa3495befef8f17206a5b0a1"
 
 DEPENDS = "libpcre attr acl popt ossp-uuid file byacc-native"
+DEPENDS_append_class-native = " file-replacement-native"
 
 S = "${WORKDIR}/rpm"
 
diff --git a/yocto-poky/meta/recipes-devtools/rpm/rpm_5.4.14.bb b/yocto-poky/meta/recipes-devtools/rpm/rpm_5.4.14.bb
index 1f9a4bd..73b3734 100644
--- a/yocto-poky/meta/recipes-devtools/rpm/rpm_5.4.14.bb
+++ b/yocto-poky/meta/recipes-devtools/rpm/rpm_5.4.14.bb
@@ -41,6 +41,7 @@
 LIC_FILES_CHKSUM = "file://COPYING.LIB;md5=2d5025d4aa3495befef8f17206a5b0a1"
 
 DEPENDS = "libpcre attr acl popt ossp-uuid file byacc-native"
+DEPENDS_append_class-native = " file-replacement-native"
 
 # rpm2cpio is a shell script, which is part of the rpm src.rpm.  It is needed
 # in order to extract the distribution SRPM into a format we can extract...
@@ -98,6 +99,7 @@
 	   file://rpm-check-rootpath-reasonableness.patch \
 	   file://rpm-macros.in-disable-external-key-server.patch \
 	   file://rpm-opendb-before-verifyscript-to-avoid-null-point.patch \
+	   file://configure.ac-check-for-both-gpg2-and-gpg.patch \
 	  "
 
 # Uncomment the following line to enable platform score debugging
diff --git a/yocto-poky/meta/recipes-devtools/rpm/rpmresolve/rpmresolve.c b/yocto-poky/meta/recipes-devtools/rpm/rpmresolve/rpmresolve.c
index 7f4caf9..c0b4d56 100644
--- a/yocto-poky/meta/recipes-devtools/rpm/rpmresolve/rpmresolve.c
+++ b/yocto-poky/meta/recipes-devtools/rpm/rpmresolve/rpmresolve.c
@@ -42,7 +42,7 @@
 int getPackageStr(rpmts ts, const char *NVRA, rpmTag tag, char **value)
 {
     int rc = -1;
-    rpmmi mi = rpmtsInitIterator(ts, RPMTAG_NVRA, NVRA, 0);
+    rpmmi mi = rpmmiInit(rpmtsGetRdb(ts), RPMTAG_NVRA, NVRA, 0);
     Header h;
     if ((h = rpmmiNext(mi)) != NULL) {
         HE_t he = (HE_t) memset(alloca(sizeof(*he)), 0, sizeof(*he));
@@ -225,7 +225,7 @@
 int lookupProvider(rpmts ts, const char *req, char **provider)
 {
     int rc = 0;
-    rpmmi provmi = rpmtsInitIterator(ts, RPMTAG_PROVIDENAME, req, 0);
+    rpmmi provmi = rpmmiInit(rpmtsGetRdb(ts), RPMTAG_PROVIDENAME, req, 0);
     if(provmi) {
         Header h;
         if ((h = rpmmiNext(provmi)) != NULL) {
@@ -266,7 +266,7 @@
         HE_t he = (HE_t) memset(alloca(sizeof(*he)), 0, sizeof(*he));
         int nkeys = argvCount(keys);
         for(i=0; i<nkeys; i++) {
-            rpmmi mi = rpmtsInitIterator(ts[0], RPMTAG_NVRA, keys[i], 0);
+            rpmmi mi = rpmmiInit(db, RPMTAG_NVRA, keys[i], 0);
             Header h;
             if ((h = rpmmiNext(mi)) != NULL) {
                 /* Get name of package */
@@ -280,6 +280,8 @@
                         printf("DEBUG: %s requires null\n", name);
                     }
                     rc = 0;
+                    free(name);
+                    (void)rpmmiFree(mi);
                     continue;
                 }
                 ARGV_t reqs = (ARGV_t)he->p.ptr;
@@ -412,7 +414,7 @@
     }
 
     for(i=0; i<tscount; i++)
-        (void) rpmtsCloseDB(ts[i]);
+        (void)rpmtsFree(ts[i]);
     free(ts);
 
     if( outfile ) {
diff --git a/yocto-poky/meta/recipes-devtools/subversion/subversion-1.8.13/subversion-CVE-2015-3184.patch b/yocto-poky/meta/recipes-devtools/subversion/subversion-1.8.13/subversion-CVE-2015-3184.patch
new file mode 100644
index 0000000..0663bd2
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/subversion/subversion-1.8.13/subversion-CVE-2015-3184.patch
@@ -0,0 +1,2094 @@
+Fix CVE-2015-3184
+
+Patch is from:
+http://subversion.apache.org/security/CVE-2015-3184-advisory.txt
+
+Upstream-Status: Backport
+
+Signed-off-by: Wenzong Fan <wenzong.fan@windriver.com>
+
+Index: Makefile.in
+===================================================================
+--- a/Makefile.in	(revision 1691883)
++++ b/Makefile.in	(working copy)
+@@ -357,6 +357,7 @@ TEST_SHLIB_VAR_SWIG_RB=\
+   fi;
+ 
+ APXS = @APXS@
++HTTPD_VERSION = @HTTPD_VERSION@
+ 
+ PYTHON = @PYTHON@
+ PERL = @PERL@
+@@ -509,6 +510,9 @@ check: bin @TRANSFORM_LIBTOOL_SCRIPTS@ $(TEST_DEPS
+ 	  if test "$(HTTP_LIBRARY)" != ""; then                              \
+ 	    flags="--http-library $(HTTP_LIBRARY) $$flags";                  \
+ 	  fi;                                                                \
++	  if test "$(HTTPD_VERSION)" != ""; then                              \
++	    flags="--httpd-version $(HTTPD_VERSION) $$flags";                  \
++	  fi;                                                                \
+ 	  if test "$(SERVER_MINOR_VERSION)" != ""; then                      \
+ 	    flags="--server-minor-version $(SERVER_MINOR_VERSION) $$flags";  \
+ 	  fi;                                                                \
+Index: build/ac-macros/apache.m4
+===================================================================
+--- a/build/ac-macros/apache.m4	(revision 1691883)
++++ b/build/ac-macros/apache.m4	(working copy)
+@@ -160,6 +160,20 @@ if test -n "$APXS" && test "$APXS" != "no"; then
+     BUILD_APACHE_RULE=apache-mod
+     INSTALL_APACHE_RULE=install-mods-shared
+     INSTALL_APACHE_MODS=true
++    HTTPD="`$APXS -q sbindir`/`$APXS -q PROGNAME`"
++    if ! test -e $HTTPD ; then
++      HTTPD="`$APXS -q bindir`/`$APXS -q PROGNAME`"
++    fi
++    HTTPD_VERSION=["`$HTTPD -v | $SED -e 's@^.*/\([0-9.]*\)\(.*$\)@\1@ ; 1q'`"]
++    AC_ARG_ENABLE(broken-httpd-auth,
++      AS_HELP_STRING([--enable-broken-httpd-auth],
++                     [Allow building against httpd 2.4 with broken auth]),
++      [broken_httpd_auth=$enableval],[broken_httpd_auth=no])
++    if test "$enable_broken_httpd_auth" = "yes"; then
++      AC_MSG_NOTICE([Building with broken httpd auth])
++      AC_DEFINE(SVN_ALLOW_BROKEN_HTTPD_AUTH, 1,
++                [Defined to allow building against httpd 2.4 with broken auth])
++    fi
+ 
+     case $host in
+       *-*-cygwin*)
+@@ -178,6 +192,7 @@ AC_SUBST(APACHE_LDFLAGS)
+ AC_SUBST(APACHE_INCLUDES)
+ AC_SUBST(APACHE_LIBEXECDIR)
+ AC_SUBST(INSTALL_APACHE_MODS)
++AC_SUBST(HTTPD_VERSION)
+ 
+ # there aren't any flags that interest us ...
+ #if test -n "$APXS" && test "$APXS" != "no"; then
+Index: build/run_tests.py
+===================================================================
+--- a/build/run_tests.py	(revision 1691883)
++++ b/build/run_tests.py	(working copy)
+@@ -29,6 +29,7 @@
+             [--fs-type=<fs-type>] [--fsfs-packing] [--fsfs-sharding=<n>]
+             [--list] [--milestone-filter=<regex>] [--mode-filter=<type>]
+             [--server-minor-version=<version>] [--http-proxy=<host>:<port>]
++            [--httpd-version=<version>]
+             [--config-file=<file>] [--ssl-cert=<file>]
+             <abs_srcdir> <abs_builddir>
+             <prog ...>
+@@ -125,7 +126,7 @@ class TestHarness:
+                fsfs_sharding=None, fsfs_packing=None,
+                list_tests=None, svn_bin=None, mode_filter=None,
+                milestone_filter=None, set_log_level=None, ssl_cert=None,
+-               http_proxy=None):
++               http_proxy=None, httpd_version=None):
+     '''Construct a TestHarness instance.
+ 
+     ABS_SRCDIR and ABS_BUILDDIR are the source and build directories.
+@@ -178,6 +179,7 @@ class TestHarness:
+     self.log = None
+     self.ssl_cert = ssl_cert
+     self.http_proxy = http_proxy
++    self.httpd_version = httpd_version
+     if not sys.stdout.isatty() or sys.platform == 'win32':
+       TextColors.disable()
+ 
+@@ -481,6 +483,8 @@ class TestHarness:
+       svntest.main.options.ssl_cert = self.ssl_cert
+     if self.http_proxy is not None:
+       svntest.main.options.http_proxy = self.http_proxy
++    if self.httpd_version is not None:
++      svntest.main.options.httpd_version = self.httpd_version
+ 
+     svntest.main.options.srcdir = self.srcdir
+ 
+@@ -645,7 +649,7 @@ def main():
+                             'enable-sasl', 'parallel', 'config-file=',
+                             'log-to-stdout', 'list', 'milestone-filter=',
+                             'mode-filter=', 'set-log-level=', 'ssl-cert=',
+-                            'http-proxy='])
++                            'http-proxy=', 'httpd-version='])
+   except getopt.GetoptError:
+     args = []
+ 
+@@ -656,9 +660,9 @@ def main():
+   base_url, fs_type, verbose, cleanup, enable_sasl, http_library, \
+     server_minor_version, fsfs_sharding, fsfs_packing, parallel, \
+     config_file, log_to_stdout, list_tests, mode_filter, milestone_filter, \
+-    set_log_level, ssl_cert, http_proxy = \
++    set_log_level, ssl_cert, http_proxy, httpd_version = \
+             None, None, None, None, None, None, None, None, None, None, None, \
+-            None, None, None, None, None, None, None
++            None, None, None, None, None, None, None, None
+   for opt, val in opts:
+     if opt in ['-u', '--url']:
+       base_url = val
+@@ -696,6 +700,8 @@ def main():
+       ssl_cert = val
+     elif opt in ['--http-proxy']:
+       http_proxy = val
++    elif opt in ['--httpd-version']:
++      httpd_version = val
+     else:
+       raise getopt.GetoptError
+ 
+@@ -712,7 +718,7 @@ def main():
+                    fsfs_sharding, fsfs_packing, list_tests,
+                    mode_filter=mode_filter, milestone_filter=milestone_filter,
+                    set_log_level=set_log_level, ssl_cert=ssl_cert,
+-                   http_proxy=http_proxy)
++                   http_proxy=http_proxy, httpd_version=httpd_version)
+ 
+   failed = th.run(args[2:])
+   if failed:
+Index: subversion/mod_authz_svn/mod_authz_svn.c
+===================================================================
+--- a/subversion/mod_authz_svn/mod_authz_svn.c	(revision 1691883)
++++ b/subversion/mod_authz_svn/mod_authz_svn.c	(working copy)
+@@ -48,6 +48,23 @@
+ #include "svn_dirent_uri.h"
+ #include "private/svn_fspath.h"
+ 
++/* The apache headers define these and they conflict with our definitions. */
++#ifdef PACKAGE_BUGREPORT
++#undef PACKAGE_BUGREPORT
++#endif
++#ifdef PACKAGE_NAME
++#undef PACKAGE_NAME
++#endif
++#ifdef PACKAGE_STRING
++#undef PACKAGE_STRING
++#endif
++#ifdef PACKAGE_TARNAME
++#undef PACKAGE_TARNAME
++#endif
++#ifdef PACKAGE_VERSION
++#undef PACKAGE_VERSION
++#endif
++#include "svn_private_config.h"
+ 
+ #ifdef APLOG_USE_MODULE
+ APLOG_USE_MODULE(authz_svn);
+@@ -67,6 +84,30 @@ typedef struct authz_svn_config_rec {
+   const char *force_username_case;
+ } authz_svn_config_rec;
+ 
++#if AP_MODULE_MAGIC_AT_LEAST(20060110,0) /* version where
++                                            ap_some_auth_required breaks */
++#  if AP_MODULE_MAGIC_AT_LEAST(20120211,47) /* first version with
++                                               force_authn hook and
++                                               ap_some_authn_required() which
++                                               allows us to work without
++                                               ap_some_auth_required() */
++#    define USE_FORCE_AUTHN 1
++#    define IN_SOME_AUTHN_NOTE "authz_svn-in-some-authn"
++#    define FORCE_AUTHN_NOTE "authz_svn-force-authn"
++#  else
++     /* ap_some_auth_required() is busted and no viable alternative exists */
++#    ifndef SVN_ALLOW_BROKEN_HTTPD_AUTH
++#      error This version of httpd has a security hole with mod_authz_svn
++#    else
++       /* user wants to build anyway */
++#      define USE_FORCE_AUTHN 0
++#    endif
++#  endif
++#else
++   /* old enough that ap_some_auth_required() still works */
++#  define USE_FORCE_AUTHN 0
++#endif
++
+ /*
+  * Configuration
+  */
+@@ -819,9 +860,51 @@ access_checker(request_rec *r)
+                                                     &authz_svn_module);
+   const char *repos_path = NULL;
+   const char *dest_repos_path = NULL;
+-  int status;
++  int status, authn_required;
+ 
++#if USE_FORCE_AUTHN
++  /* Use the force_authn() hook available in 2.4.x to work securely
++   * given that ap_some_auth_required() is no longer functional for our
++   * purposes in 2.4.x.
++   */
++  int authn_configured;
++
+   /* We are not configured to run */
++  if (!conf->anonymous || apr_table_get(r->notes, IN_SOME_AUTHN_NOTE)
++      || (! (conf->access_file || conf->repo_relative_access_file)))
++    return DECLINED;
++
++  /* Authentication is configured */
++  authn_configured = ap_auth_type(r) != NULL;
++  if (authn_configured)
++    {
++      /* If the user is trying to authenticate, let him.  It doesn't
++       * make much sense to grant anonymous access but deny authenticated
++       * users access, even though you can do that with '$anon' in the
++       * access file.
++       */
++      if (apr_table_get(r->headers_in,
++                        (PROXYREQ_PROXY == r->proxyreq)
++                        ? "Proxy-Authorization" : "Authorization"))
++        {
++          /* Set the note to force authn regardless of what access_checker_ex
++             hook requires */
++          apr_table_setn(r->notes, FORCE_AUTHN_NOTE, (const char*)1);
++
++          /* provide the proper return so the access_checker hook doesn't
++           * prevent the code from continuing on to the other auth hooks */
++          if (ap_satisfies(r) != SATISFY_ANY)
++            return OK;
++          else
++            return HTTP_FORBIDDEN;
++        }
++    }    
++
++#else
++  /* Support for older versions of httpd that have a working
++   * ap_some_auth_required() */
++
++  /* We are not configured to run */
+   if (!conf->anonymous
+       || (! (conf->access_file || conf->repo_relative_access_file)))
+     return DECLINED;
+@@ -834,9 +917,10 @@ access_checker(request_rec *r)
+       if (ap_satisfies(r) != SATISFY_ANY)
+         return DECLINED;
+ 
+-      /* If the user is trying to authenticate, let him.  If anonymous
+-       * access is allowed, so is authenticated access, by definition
+-       * of the meaning of '*' in the access file.
++      /* If the user is trying to authenticate, let him.  It doesn't
++       * make much sense to grant anonymous access but deny authenticated
++       * users access, even though you can do that with '$anon' in the
++       * access file.
+        */
+       if (apr_table_get(r->headers_in,
+                         (PROXYREQ_PROXY == r->proxyreq)
+@@ -848,6 +932,7 @@ access_checker(request_rec *r)
+           return HTTP_FORBIDDEN;
+         }
+     }
++#endif
+ 
+   /* If anon access is allowed, return OK */
+   status = req_check_access(r, conf, &repos_path, &dest_repos_path);
+@@ -856,7 +941,26 @@ access_checker(request_rec *r)
+       if (!conf->authoritative)
+         return DECLINED;
+ 
++#if USE_FORCE_AUTHN
++      if (authn_configured) {
++          /* We have to check to see if authn is required because if so we must
++           * return UNAUTHORIZED (401) rather than FORBIDDEN (403) since returning
++           * the 403 leaks information about what paths may exist to
++           * unauthenticated users.  We must set a note here in order
++           * to use ap_some_authn_rquired() without triggering an infinite
++           * loop since the call will trigger this function to be called again. */
++          apr_table_setn(r->notes, IN_SOME_AUTHN_NOTE, (const char*)1);
++          authn_required = ap_some_authn_required(r);
++          apr_table_unset(r->notes, IN_SOME_AUTHN_NOTE);
++          if (authn_required)
++            {
++              ap_note_auth_failure(r);
++              return HTTP_UNAUTHORIZED;
++            }
++      }
++#else
+       if (!ap_some_auth_required(r))
++#endif
+         log_access_verdict(APLOG_MARK, r, 0, repos_path, dest_repos_path);
+ 
+       return HTTP_FORBIDDEN;
+@@ -937,6 +1041,17 @@ auth_checker(request_rec *r)
+   return OK;
+ }
+ 
++#if USE_FORCE_AUTHN
++static int
++force_authn(request_rec *r)
++{
++  if (apr_table_get(r->notes, FORCE_AUTHN_NOTE))
++    return OK;
++
++  return DECLINED;
++}
++#endif
++
+ /*
+  * Module flesh
+  */
+@@ -953,6 +1068,9 @@ register_hooks(apr_pool_t *p)
+    * give SSLOptions +FakeBasicAuth a chance to work. */
+   ap_hook_check_user_id(check_user_id, mod_ssl, NULL, APR_HOOK_FIRST);
+   ap_hook_auth_checker(auth_checker, NULL, NULL, APR_HOOK_FIRST);
++#if USE_FORCE_AUTHN
++  ap_hook_force_authn(force_authn, NULL, NULL, APR_HOOK_FIRST);
++#endif
+   ap_register_provider(p,
+                        AUTHZ_SVN__SUBREQ_BYPASS_PROV_GRP,
+                        AUTHZ_SVN__SUBREQ_BYPASS_PROV_NAME,
+Index: subversion/tests/cmdline/README
+===================================================================
+--- a/subversion/tests/cmdline/README	(revision 1691883)
++++ b/subversion/tests/cmdline/README	(working copy)
+@@ -83,6 +83,133 @@ paths adjusted appropriately:
+      Require valid-user
+    </Location>
+ 
++   <Location /authz-test-work/anon>
++     DAV               svn
++     SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp
++     AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz
++     SVNListParentPath On
++     # This may seem unnecessary but granting access to everyone here is necessary
++     # to exercise a bug with httpd 2.3.x+.  The "Require all granted" syntax is
++     # new to 2.3.x+ which we can detect with the mod_authz_core.c module
++     # signature.  Use the "Allow from all" syntax with older versions for symmetry.
++     <IfModule mod_authz_core.c>
++       Require all granted
++     </IfModule>
++     <IfModule !mod_authz_core.c>
++       Allow from all
++     </IfMOdule>
++   </Location>
++   <Location /authz-test-work/mixed>
++     DAV               svn
++     SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp
++     AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz
++     SVNListParentPath On
++     AuthType          Basic
++     AuthName          "Subversion Repository"
++     AuthUserFile /usr/local/apache2/conf/users
++     Require           valid-user
++     Satisfy Any
++   </Location>
++   <Location /authz-test-work/mixed-noauthwhenanon>
++     DAV               svn
++     SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp
++     AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz
++     SVNListParentPath On
++     AuthType          Basic
++     AuthName          "Subversion Repository"
++     AuthUserFile /usr/local/apache2/conf/users
++     Require           valid-user
++     AuthzSVNNoAuthWhenAnonymousAllowed On
++   </Location>
++   <Location /authz-test-work/authn>
++     DAV               svn
++     SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp
++     AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz
++     SVNListParentPath On
++     AuthType          Basic
++     AuthName          "Subversion Repository"
++     AuthUserFile /usr/local/apache2/conf/users
++     Require           valid-user
++   </Location>
++   <Location /authz-test-work/authn-anonoff>
++     DAV               svn
++     SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp
++     AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz
++     SVNListParentPath On
++     AuthType          Basic
++     AuthName          "Subversion Repository"
++     AuthUserFile /usr/local/apache2/conf/users
++     Require           valid-user
++     AuthzSVNAnonymous Off
++   </Location>
++   <Location /authz-test-work/authn-lcuser>
++     DAV               svn
++     SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp
++     AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz
++     SVNListParentPath On
++     AuthType          Basic
++     AuthName          "Subversion Repository"
++     AuthUserFile /usr/local/apache2/conf/users
++     Require           valid-user
++     AuthzForceUsernameCase Lower
++   </Location>
++   <Location /authz-test-work/authn-lcuser>
++     DAV               svn
++     SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp
++     AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz
++     SVNListParentPath On
++     AuthType          Basic
++     AuthName          "Subversion Repository"
++     AuthUserFile /usr/local/apache2/conf/users
++     Require           valid-user
++     AuthzForceUsernameCase Lower
++   </Location>
++   <Location /authz-test-work/authn-group>
++     DAV               svn
++     SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp
++     AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz
++     SVNListParentPath On
++     AuthType          Basic
++     AuthName          "Subversion Repository"
++     AuthUserFile /usr/local/apache2/conf/users
++     AuthGroupFile /usr/local/apache2/conf/groups
++     Require           group random
++     AuthzSVNAuthoritative Off
++   </Location>
++   <IfModule mod_authz_core.c>
++     <Location /authz-test-work/sallrany>
++       DAV               svn
++       SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp
++       AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz
++       SVNListParentPath On
++       AuthType          Basic
++       AuthName          "Subversion Repository"
++       AuthUserFile /usr/local/apache2/conf/users
++       AuthzSendForbiddenOnFailure On
++       Satisfy All
++       <RequireAny>
++         Require valid-user
++         Require expr req('ALLOW') == '1'
++       </RequireAny>
++     </Location>
++     <Location /authz-test-work/sallrall>
++       DAV               svn
++       SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp
++       AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz
++       SVNListParentPath On
++       AuthType          Basic
++       AuthName          "Subversion Repository"
++       AuthUserFile /usr/local/apache2/conf/users
++       AuthzSendForbiddenOnFailure On
++       Satisfy All
++       <RequireAll>
++         Require valid-user
++         Require expr req('ALLOW') == '1'
++       </RequireAll>
++     </Location>
++   </IfModule>
++
++
+    RedirectMatch permanent ^/svn-test-work/repositories/REDIRECT-PERM-(.*)$ /svn-test-work/repositories/$1
+    RedirectMatch           ^/svn-test-work/repositories/REDIRECT-TEMP-(.*)$ /svn-test-work/repositories/$1
+ 
+@@ -101,8 +228,17 @@ just drop the following 2-line snippet into the
+ ----------------------------
+ jrandom:xCGl35kV9oWCY
+ jconstant:xCGl35kV9oWCY
++JRANDOM:xCGl35kV9oWCY
++JCONSTANT:xCGl35kV9oWCY
+ ----------------------------
+ 
++and these lines into the
++/usr/local/apache/conf/groups file:
++----------------------------
++random: jrandom
++constant: jconstant
++----------------------------
++
+ Now, (re)start Apache and run the tests over mod_dav_svn.
+ 
+ You can run a test script over DAV:
+@@ -138,6 +274,8 @@ Note [1]: It would be quite too much to expect tho
+           ----------------------------
+           jrandom:$apr1$3p1.....$FQW6RceW5QhJ2blWDQgKn0
+           jconstant:$apr1$jp1.....$Usrqji1c9H6AbOxOGAzzb0
++          JRANDOM:$apr1$3p1.....$FQW6RceW5QhJ2blWDQgKn0
++          JCONSTANT:$apr1$jp1.....$Usrqji1c9H6AbOxOGAzzb0
+           ----------------------------
+ 
+ 
+Index: subversion/tests/cmdline/davautocheck.sh
+===================================================================
+--- a/subversion/tests/cmdline/davautocheck.sh	(revision 1691883)
++++ b/subversion/tests/cmdline/davautocheck.sh	(working copy)
+@@ -289,8 +289,6 @@ LOAD_MOD_AUTHN_CORE="$(get_loadmodule_config mod_a
+     || fail "Authn_Core module not found."
+ LOAD_MOD_AUTHZ_CORE="$(get_loadmodule_config mod_authz_core)" \
+     || fail "Authz_Core module not found."
+-LOAD_MOD_AUTHZ_HOST="$(get_loadmodule_config mod_authz_host)" \
+-    || fail "Authz_Host module not found."
+ LOAD_MOD_UNIXD=$(get_loadmodule_config mod_unixd) \
+     || fail "UnixD module not found"
+ }
+@@ -298,6 +296,10 @@ LOAD_MOD_AUTHN_FILE="$(get_loadmodule_config mod_a
+     || fail "Authn_File module not found."
+ LOAD_MOD_AUTHZ_USER="$(get_loadmodule_config mod_authz_user)" \
+     || fail "Authz_User module not found."
++LOAD_MOD_AUTHZ_GROUPFILE="$(get_loadmodule_config mod_authz_groupfile)" \
++    || fail "Authz_GroupFile module not found."
++LOAD_MOD_AUTHZ_HOST="$(get_loadmodule_config mod_authz_host)" \
++    || fail "Authz_Host module not found."
+ }
+ if [ ${APACHE_MPM:+set} ]; then
+     LOAD_MOD_MPM=$(get_loadmodule_config mod_mpm_$APACHE_MPM) \
+@@ -328,6 +330,7 @@ HTTPD_ERROR_LOG="$HTTPD_ROOT/error_log"
+ HTTPD_MIME_TYPES="$HTTPD_ROOT/mime.types"
+ BASE_URL="http://localhost:$HTTPD_PORT"
+ HTTPD_USERS="$HTTPD_ROOT/users"
++HTTPD_GROUPS="$HTTPD_ROOT/groups"
+ 
+ mkdir "$HTTPD_ROOT" \
+   || fail "couldn't create temporary directory '$HTTPD_ROOT'"
+@@ -388,6 +391,14 @@ fi
+ say "Adding users for lock authentication"
+ $HTPASSWD -bc $HTTPD_USERS jrandom   rayjandom
+ $HTPASSWD -b  $HTTPD_USERS jconstant rayjandom
++$HTPASSWD -b  $HTTPD_USERS JRANDOM   rayjandom
++$HTPASSWD -b  $HTTPD_USERS JCONSTANT rayjandom
++ 
++say "Adding groups for mod_authz_svn tests"
++cat > "$HTTPD_GROUPS" <<__EOF__
++random: jrandom
++constant: jconstant
++__EOF__
+ 
+ touch $HTTPD_MIME_TYPES
+ 
+@@ -405,7 +416,9 @@ $LOAD_MOD_AUTHN_CORE
+ $LOAD_MOD_AUTHN_FILE
+ $LOAD_MOD_AUTHZ_CORE
+ $LOAD_MOD_AUTHZ_USER
++$LOAD_MOD_AUTHZ_GROUPFILE
+ $LOAD_MOD_AUTHZ_HOST
++$LOAD_MOD_ACCESS_COMPAT
+ LoadModule          authz_svn_module "$MOD_AUTHZ_SVN"
+ 
+ __EOF__
+@@ -497,6 +510,161 @@ CustomLog           "$HTTPD_ROOT/ops" "%t %u %{SVN
+   SVNAdvertiseV2Protocol ${ADVERTISE_V2_PROTOCOL}
+   ${SVN_PATH_AUTHZ_LINE}
+ </Location>
++<Location /authz-test-work/anon>
++  DAV               svn
++  SVNParentPath     "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp"
++  AuthzSVNAccessFile "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/authz"
++  SVNAdvertiseV2Protocol ${ADVERTISE_V2_PROTOCOL}
++  SVNCacheRevProps  ${CACHE_REVPROPS_SETTING}
++  SVNListParentPath On
++  # This may seem unnecessary but granting access to everyone here is necessary
++  # to exercise a bug with httpd 2.3.x+.  The "Require all granted" syntax is
++  # new to 2.3.x+ which we can detect with the mod_authz_core.c module
++  # signature.  Use the "Allow from all" syntax with older versions for symmetry.
++  <IfModule mod_authz_core.c>
++    Require all granted
++  </IfModule>
++  <IfModule !mod_authz_core.c>
++    Allow from all
++  </IfMOdule>
++  ${SVN_PATH_AUTHZ_LINE}
++</Location>
++<Location /authz-test-work/mixed>
++  DAV               svn
++  SVNParentPath     "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp"
++  AuthzSVNAccessFile "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/authz"
++  SVNAdvertiseV2Protocol ${ADVERTISE_V2_PROTOCOL}
++  SVNCacheRevProps  ${CACHE_REVPROPS_SETTING}
++  SVNListParentPath On
++  AuthType          Basic
++  AuthName          "Subversion Repository"
++  AuthUserFile      $HTTPD_USERS
++  Require           valid-user
++  Satisfy Any
++  ${SVN_PATH_AUTHZ_LINE}
++</Location>
++<Location /authz-test-work/mixed-noauthwhenanon>
++  DAV               svn
++  SVNParentPath     "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp"
++  AuthzSVNAccessFile "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/authz"
++  SVNAdvertiseV2Protocol ${ADVERTISE_V2_PROTOCOL}
++  SVNCacheRevProps  ${CACHE_REVPROPS_SETTING}
++  SVNListParentPath On
++  AuthType          Basic
++  AuthName          "Subversion Repository"
++  AuthUserFile      $HTTPD_USERS
++  Require           valid-user
++  AuthzSVNNoAuthWhenAnonymousAllowed On
++  SVNPathAuthz On
++</Location>
++<Location /authz-test-work/authn>
++  DAV               svn
++  SVNParentPath     "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp"
++  AuthzSVNAccessFile "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/authz"
++  SVNAdvertiseV2Protocol ${ADVERTISE_V2_PROTOCOL}
++  SVNCacheRevProps  ${CACHE_REVPROPS_SETTING}
++  SVNListParentPath On
++  AuthType          Basic
++  AuthName          "Subversion Repository"
++  AuthUserFile      $HTTPD_USERS
++  Require           valid-user
++  ${SVN_PATH_AUTHZ_LINE}
++</Location>
++<Location /authz-test-work/authn-anonoff>
++  DAV               svn
++  SVNParentPath     "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp"
++  AuthzSVNAccessFile "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/authz"
++  SVNAdvertiseV2Protocol ${ADVERTISE_V2_PROTOCOL}
++  SVNCacheRevProps  ${CACHE_REVPROPS_SETTING}
++  SVNListParentPath On
++  AuthType          Basic
++  AuthName          "Subversion Repository"
++  AuthUserFile      $HTTPD_USERS
++  Require           valid-user
++  AuthzSVNAnonymous Off
++  SVNPathAuthz On
++</Location>
++<Location /authz-test-work/authn-lcuser>
++  DAV               svn
++  SVNParentPath     "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp"
++  AuthzSVNAccessFile "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/authz"
++  SVNAdvertiseV2Protocol ${ADVERTISE_V2_PROTOCOL}
++  SVNCacheRevProps  ${CACHE_REVPROPS_SETTING}
++  SVNListParentPath On
++  AuthType          Basic
++  AuthName          "Subversion Repository"
++  AuthUserFile      $HTTPD_USERS
++  Require           valid-user
++  AuthzForceUsernameCase Lower
++  ${SVN_PATH_AUTHZ_LINE}
++</Location>
++<Location /authz-test-work/authn-lcuser>
++  DAV               svn
++  SVNParentPath     "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp"
++  AuthzSVNAccessFile "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/authz"
++  SVNAdvertiseV2Protocol ${ADVERTISE_V2_PROTOCOL}
++  SVNCacheRevProps  ${CACHE_REVPROPS_SETTING}
++  SVNListParentPath On
++  AuthType          Basic
++  AuthName          "Subversion Repository"
++  AuthUserFile      $HTTPD_USERS
++  Require           valid-user
++  AuthzForceUsernameCase Lower
++  ${SVN_PATH_AUTHZ_LINE}
++</Location>
++<Location /authz-test-work/authn-group>
++  DAV               svn
++  SVNParentPath     "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp"
++  AuthzSVNAccessFile "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/authz"
++  SVNAdvertiseV2Protocol ${ADVERTISE_V2_PROTOCOL}
++  SVNCacheRevProps  ${CACHE_REVPROPS_SETTING}
++  SVNListParentPath On
++  AuthType          Basic
++  AuthName          "Subversion Repository"
++  AuthUserFile      $HTTPD_USERS
++  AuthGroupFile     $HTTPD_GROUPS
++  Require           group random
++  AuthzSVNAuthoritative Off
++  SVNPathAuthz On
++</Location>
++<IfModule mod_authz_core.c>
++  <Location /authz-test-work/sallrany>
++    DAV               svn
++    SVNParentPath     "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp"
++    AuthzSVNAccessFile "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/authz"
++    SVNAdvertiseV2Protocol ${ADVERTISE_V2_PROTOCOL}
++    SVNCacheRevProps  ${CACHE_REVPROPS_SETTING}
++    SVNListParentPath On
++    AuthType          Basic
++    AuthName          "Subversion Repository"
++    AuthUserFile      $HTTPD_USERS
++    AuthzSendForbiddenOnFailure On
++    Satisfy All
++    <RequireAny>
++      Require valid-user
++      Require expr req('ALLOW') == '1'
++    </RequireAny>
++    ${SVN_PATH_AUTHZ_LINE}
++  </Location>
++  <Location /authz-test-work/sallrall>
++    DAV               svn
++    SVNParentPath     "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp"
++    AuthzSVNAccessFile "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/authz"
++    SVNAdvertiseV2Protocol ${ADVERTISE_V2_PROTOCOL}
++    SVNCacheRevProps  ${CACHE_REVPROPS_SETTING}
++    SVNListParentPath On
++    AuthType          Basic
++    AuthName          "Subversion Repository"
++    AuthUserFile      $HTTPD_USERS
++    AuthzSendForbiddenOnFailure On
++    Satisfy All
++    <RequireAll>
++      Require valid-user
++      Require expr req('ALLOW') == '1'
++    </RequireAll>
++    ${SVN_PATH_AUTHZ_LINE}
++  </Location>
++</IfModule>
+ RedirectMatch permanent ^/svn-test-work/repositories/REDIRECT-PERM-(.*)\$ /svn-test-work/repositories/\$1
+ RedirectMatch           ^/svn-test-work/repositories/REDIRECT-TEMP-(.*)\$ /svn-test-work/repositories/\$1
+ __EOF__
+Index: subversion/tests/cmdline/mod_authz_svn_tests.py
+===================================================================
+--- a/subversion/tests/cmdline/mod_authz_svn_tests.py	(nonexistent)
++++ b/subversion/tests/cmdline/mod_authz_svn_tests.py	(working copy)
+@@ -0,0 +1,1073 @@
++#!/usr/bin/env python
++#
++#  mod_authz_svn_tests.py:  testing mod_authz_svn
++#
++#  Subversion is a tool for revision control.
++#  See http://subversion.apache.org for more information.
++#
++# ====================================================================
++#    Licensed to the Apache Software Foundation (ASF) under one
++#    or more contributor license agreements.  See the NOTICE file
++#    distributed with this work for additional information
++#    regarding copyright ownership.  The ASF licenses this file
++#    to you under the Apache License, Version 2.0 (the
++#    "License"); you may not use this file except in compliance
++#    with the License.  You may obtain a copy of the License at
++#
++#      http://www.apache.org/licenses/LICENSE-2.0
++#
++#    Unless required by applicable law or agreed to in writing,
++#    software distributed under the License is distributed on an
++#    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
++#    KIND, either express or implied.  See the License for the
++#    specific language governing permissions and limitations
++#    under the License.
++######################################################################
++
++# General modules
++import os, re, logging
++
++logger = logging.getLogger()
++
++# Our testing module
++import svntest
++
++# (abbreviation)
++Skip = svntest.testcase.Skip_deco
++SkipUnless = svntest.testcase.SkipUnless_deco
++XFail = svntest.testcase.XFail_deco
++Issues = svntest.testcase.Issues_deco
++Issue = svntest.testcase.Issue_deco
++Wimp = svntest.testcase.Wimp_deco
++
++ls_of_D_no_H = '''<html><head><title>repos - Revision 1: /A/D</title></head>
++<body>
++ <h2>repos - Revision 1: /A/D</h2>
++ <ul>
++  <li><a href="../">..</a></li>
++  <li><a href="G/">G/</a></li>
++  <li><a href="gamma">gamma</a></li>
++ </ul>
++</body></html>'''
++
++ls_of_D_H = '''<html><head><title>repos - Revision 1: /A/D</title></head>
++<body>
++ <h2>repos - Revision 1: /A/D</h2>
++ <ul>
++  <li><a href="../">..</a></li>
++  <li><a href="G/">G/</a></li>
++  <li><a href="H/">H/</a></li>
++  <li><a href="gamma">gamma</a></li>
++ </ul>
++</body></html>'''
++
++ls_of_H = '''<html><head><title>repos - Revision 1: /A/D/H</title></head>
++<body>
++ <h2>repos - Revision 1: /A/D/H</h2>
++ <ul>
++  <li><a href="../">..</a></li>
++  <li><a href="chi">chi</a></li>
++  <li><a href="omega">omega</a></li>
++  <li><a href="psi">psi</a></li>
++ </ul>
++</body></html>'''
++
++user1 = svntest.main.wc_author
++user1_upper = user1.upper()
++user1_pass = svntest.main.wc_passwd
++user1_badpass = 'XXX'
++assert user1_pass != user1_badpass, "Passwords can't match"
++user2 = svntest.main.wc_author2
++user2_upper = user2.upper()
++user2_pass = svntest.main.wc_passwd
++user2_badpass = 'XXX'
++assert user2_pass != user2_badpass, "Passwords can't match"
++
++def write_authz_file(sbox):
++    svntest.main.write_authz_file(sbox, {
++                                          '/':  '$anonymous = r\n' +
++                                                'jrandom = rw\n' +
++                                                'jconstant = rw',
++                                          '/A/D/H': '$anonymous =\n' +
++                                                    '$authenticated =\n' +
++                                                    'jrandom = rw'
++                                        })
++
++def write_authz_file_groups(sbox):
++    authz_name = sbox.authz_name()
++    svntest.main.write_authz_file(sbox,{
++                                         '/':  '* =',
++                                       })
++
++def verify_get(test_area_url, path, user, pw,
++               expected_status, expected_body, headers):
++  import httplib
++  from urlparse import urlparse
++  import base64
++
++  req_url = test_area_url + path
++
++  loc = urlparse(req_url)
++
++  if loc.scheme == 'http':
++    h = httplib.HTTPConnection(loc.hostname, loc.port)
++  else:
++    h = httplib.HTTPSConnection(loc.hostname, loc.port)
++
++  if headers is None:
++    headers = {}
++
++  if user and pw:
++      auth_info = user + ':' + pw
++      headers['Authorization'] = 'Basic ' + base64.b64encode(auth_info)
++  else:
++      auth_info = "anonymous"
++
++  h.request('GET', req_url, None, headers)
++
++  r = h.getresponse()
++
++  actual_status = r.status
++  if expected_status and expected_status != actual_status:
++
++      logger.warn("Expected status '" + str(expected_status) +
++                  "' but got '" + str(actual_status) +
++                  "' on url '" + req_url + "' (" +
++                  auth_info + ").")
++      raise svntest.Failure
++
++  if expected_body:
++      actual_body = r.read()
++      if expected_body != actual_body:
++        logger.warn("Expected body:")
++        logger.warn(expected_body)
++        logger.warn("But got:")
++        logger.warn(actual_body)
++        logger.warn("on url '" + req_url + "' (" + auth_info + ").")
++        raise svntest.Failure
++
++def verify_gets(test_area_url, tests):
++  for test in tests:
++      verify_get(test_area_url, test['path'], test.get('user'), test.get('pw'),
++                 test['status'], test.get('body'), test.get('headers'))
++
++
++######################################################################
++# Tests
++#
++#   Each test must return on success or raise on failure.
++
++
++#----------------------------------------------------------------------
++
++
++@SkipUnless(svntest.main.is_ra_type_dav)
++def anon(sbox):
++  "test anonymous access"
++  sbox.build(read_only = True, create_wc = False)
++
++  test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos',
++                                        '/authz-test-work/anon')
++
++  write_authz_file(sbox)
++
++  anon_tests = ( 
++                 { 'path': '', 'status': 301 },
++                 { 'path': '/', 'status': 200 },
++                 { 'path': '/repos', 'status': 301 },
++                 { 'path': '/repos/', 'status': 200 },
++                 { 'path': '/repos/A', 'status': 301 },
++                 { 'path': '/repos/A/', 'status': 200 },
++                 { 'path': '/repos/A/D', 'status': 301 },
++                 { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H },
++                 { 'path': '/repos/A/D/gamma', 'status': 200 },
++                 { 'path': '/repos/A/D/H', 'status': 403 },
++                 { 'path': '/repos/A/D/H/', 'status': 403 },
++                 { 'path': '/repos/A/D/H/chi', 'status': 403 },
++                 # auth isn't configured so nothing should change when passing
++                 # authn details
++                 { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
++                   'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H', 'status': 403, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H/', 'status': 403, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user1, 'pw': user1_pass},
++                 { 'path': '', 'status': 301, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
++                   'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/H', 'status': 403, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/H/', 'status': 403, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '', 'status': 301, 'user': user2, 'pw': user1_pass},
++                 { 'path': '/', 'status': 200, 'user': user2, 'pw': user1_pass},
++                 { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user1_pass},
++                 { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user1_pass},
++                 { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user1_pass},
++                 { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user1_pass},
++                 { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
++                   'user': user2, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 { 'path': '', 'status': 301, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
++                   'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_badpass},
++               )
++
++  verify_gets(test_area_url, anon_tests)
++
++
++@SkipUnless(svntest.main.is_ra_type_dav)
++def mixed(sbox):
++  "test mixed anonymous and authenticated access"
++  sbox.build(read_only = True, create_wc = False)
++
++  test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos',
++                                        '/authz-test-work/mixed')
++
++  write_authz_file(sbox)
++
++  mixed_tests = (
++                 { 'path': '', 'status': 301,  },
++                 { 'path': '/', 'status': 200,  },
++                 { 'path': '/repos', 'status': 301,  },
++                 { 'path': '/repos/', 'status': 200,  },
++                 { 'path': '/repos/A', 'status': 301,  },
++                 { 'path': '/repos/A/', 'status': 200,  },
++                 { 'path': '/repos/A/D', 'status': 301,  },
++                 { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
++                   },
++                 { 'path': '/repos/A/D/gamma', 'status': 200, },
++                 { 'path': '/repos/A/D/H', 'status': 401, },
++                 { 'path': '/repos/A/D/H/', 'status': 401, },
++                 { 'path': '/repos/A/D/H/chi', 'status': 401, },
++                 # auth is configured and user1 is allowed access to H
++                 { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H,
++                   'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 # try with the wrong password for user1
++                 { 'path': '', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/H', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/H/', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 # auth is configured and user2 is not allowed access to H
++                 { 'path': '', 'status': 301, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
++                   'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 # try with the wrong password for user2
++                 { 'path': '', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/H', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/H/', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 )
++
++  verify_gets(test_area_url, mixed_tests)
++
++@SkipUnless(svntest.main.is_ra_type_dav)
++@XFail(svntest.main.is_httpd_authz_provider_enabled)
++# uses the AuthzSVNNoAuthWhenAnonymousAllowed On directive
++# this is broken with httpd 2.3.x+ since it requires the auth system to accept
++# r->user == NULL and there is a test for this in server/request.c now.  It
++# was intended as a workaround for the lack of Satisfy Any in 2.3.x+ which
++# was resolved by httpd with mod_access_compat in 2.3.x+.
++def mixed_noauthwhenanon(sbox):
++  "test mixed with noauthwhenanon directive"
++  sbox.build(read_only = True, create_wc = False)
++
++  test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos',
++                                        '/authz-test-work/mixed-noauthwhenanon')
++
++  write_authz_file(sbox)
++
++  noauthwhenanon_tests = (
++                 { 'path': '', 'status': 301,  },
++                 { 'path': '/', 'status': 200,  },
++                 { 'path': '/repos', 'status': 301,  },
++                 { 'path': '/repos/', 'status': 200,  },
++                 { 'path': '/repos/A', 'status': 301,  },
++                 { 'path': '/repos/A/', 'status': 200,  },
++                 { 'path': '/repos/A/D', 'status': 301,  },
++                 { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
++                   },
++                 { 'path': '/repos/A/D/gamma', 'status': 200, },
++                 { 'path': '/repos/A/D/H', 'status': 401, },
++                 { 'path': '/repos/A/D/H/', 'status': 401, },
++                 { 'path': '/repos/A/D/H/chi', 'status': 401, },
++                 # auth is configured and user1 is allowed access to H
++                 { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H,
++                   'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 # try with the wrong password for user1
++                 # note that unlike doing this with Satisfy Any this case
++                 # actually provides anon access when provided with an invalid
++                 # password
++                 { 'path': '', 'status': 301, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/', 'status': 200, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/H', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/H/', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 # auth is configured and user2 is not allowed access to H
++                 { 'path': '', 'status': 301, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
++                   'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 # try with the wrong password for user2
++                 { 'path': '', 'status': 301, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/', 'status': 200, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/H', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/H/', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 )
++
++  verify_gets(test_area_url, noauthwhenanon_tests)
++
++
++@SkipUnless(svntest.main.is_ra_type_dav)
++def authn(sbox):
++  "test authenticated only access"
++  sbox.build(read_only = True, create_wc = False)
++
++  test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos',
++                                        '/authz-test-work/authn')
++
++  write_authz_file(sbox)
++
++  authn_tests = (
++                 { 'path': '', 'status': 401,  },
++                 { 'path': '/', 'status': 401,  },
++                 { 'path': '/repos', 'status': 401,  },
++                 { 'path': '/repos/', 'status': 401,  },
++                 { 'path': '/repos/A', 'status': 401,  },
++                 { 'path': '/repos/A/', 'status': 401,  },
++                 { 'path': '/repos/A/D', 'status': 401,  },
++                 { 'path': '/repos/A/D/', 'status': 401, },
++                 { 'path': '/repos/A/D/gamma', 'status': 401, },
++                 { 'path': '/repos/A/D/H', 'status': 401, },
++                 { 'path': '/repos/A/D/H/', 'status': 401, },
++                 { 'path': '/repos/A/D/H/chi', 'status': 401, },
++                 # auth is configured and user1 is allowed access to H
++                 { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H,
++                   'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 # try with upper case username for user1
++                 { 'path': '', 'status': 301, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/', 'status': 200, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/A', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/A/', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/A/D', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/gamma', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H/', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
++                 # try with the wrong password for user1
++                 { 'path': '', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/H', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/H/', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 # auth is configured and user2 is not allowed access to H
++                 { 'path': '', 'status': 301, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
++                   'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 # try with upper case username for user2
++                 { 'path': '', 'status': 301, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/', 'status': 200, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/A', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/A/', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/A/D', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/gamma', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
++                 # try with the wrong password for user2
++                 { 'path': '', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/H', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/H/', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 )
++
++  verify_gets(test_area_url, authn_tests)
++
++@SkipUnless(svntest.main.is_ra_type_dav)
++def authn_anonoff(sbox):
++  "test authenticated only access with anonoff"
++  sbox.build(read_only = True, create_wc = False)
++
++  test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos',
++                                        '/authz-test-work/authn-anonoff')
++
++  write_authz_file(sbox)
++
++  anonoff_tests = (
++                 { 'path': '', 'status': 401,  },
++                 { 'path': '/', 'status': 401,  },
++                 { 'path': '/repos', 'status': 401,  },
++                 { 'path': '/repos/', 'status': 401,  },
++                 { 'path': '/repos/A', 'status': 401,  },
++                 { 'path': '/repos/A/', 'status': 401,  },
++                 { 'path': '/repos/A/D', 'status': 401,  },
++                 { 'path': '/repos/A/D/', 'status': 401, },
++                 { 'path': '/repos/A/D/gamma', 'status': 401, },
++                 { 'path': '/repos/A/D/H', 'status': 401, },
++                 { 'path': '/repos/A/D/H/', 'status': 401, },
++                 { 'path': '/repos/A/D/H/chi', 'status': 401, },
++                 # auth is configured and user1 is allowed access to H
++                 { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H,
++                   'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 # try with upper case username for user1
++                 { 'path': '', 'status': 301, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/', 'status': 200, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/A', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/A/', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/A/D', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/gamma', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H/', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
++                 # try with the wrong password for user1
++                 { 'path': '', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/H', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/H/', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 # auth is configured and user2 is not allowed access to H
++                 { 'path': '', 'status': 301, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
++                   'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 # try with upper case username for user2
++                 { 'path': '', 'status': 301, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/', 'status': 200, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/A', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/A/', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/A/D', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/gamma', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
++                 # try with the wrong password for user2
++                 { 'path': '', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/H', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/H/', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 )
++
++  verify_gets(test_area_url, anonoff_tests)
++
++@SkipUnless(svntest.main.is_ra_type_dav)
++def authn_lcuser(sbox):
++  "test authenticated only access with lcuser"
++  sbox.build(read_only = True, create_wc = False)
++
++  test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos',
++                                        '/authz-test-work/authn-lcuser')
++
++  write_authz_file(sbox)
++
++  lcuser_tests = (
++                 # try with upper case username for user1 (works due to lcuser option)
++                 { 'path': '', 'status': 301, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/', 'status': 200, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos', 'status': 301, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/', 'status': 200, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/A', 'status': 301, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/A/', 'status': 200, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/A/D', 'status': 301, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H,
++                   'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H', 'status': 301, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1_upper, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1_upper, 'pw': user1_pass},
++                 # try with upper case username for user2 (works due to lcuser option)
++                 { 'path': '', 'status': 301, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/', 'status': 200, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos', 'status': 301, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/', 'status': 200, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/A', 'status': 301, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/A/', 'status': 200, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/A/D', 'status': 301, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
++                   'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
++                 )
++
++  verify_gets(test_area_url, lcuser_tests)
++
++# authenticated access only by group - a excuse to use AuthzSVNAuthoritative Off
++# this is terribly messed up, Require group runs after mod_authz_svn.
++# so if mod_authz_svn grants the access then it doesn't matter what the group
++# requirement says.  If we reject the access then you can use the AuthzSVNAuthoritative Off
++# directive to fall through to the group check.  Overall the behavior of setups like this
++# is almost guaranteed to not be what users expect.
++@SkipUnless(svntest.main.is_ra_type_dav)
++def authn_group(sbox):
++  "test authenticated only access via groups"
++  sbox.build(read_only = True, create_wc = False)
++
++  test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos',
++                                        '/authz-test-work/authn-group')
++
++  # Can't use write_authz_file() as most tests because we want to deny all
++  # access with mod_authz_svn so the tests fall through to the group handling
++  authz_name = sbox.authz_name()
++  svntest.main.write_authz_file(sbox, {
++                                        '/':  '* =',
++                                      })
++
++  group_tests = (
++                 { 'path': '', 'status': 401, },
++                 { 'path': '/', 'status': 401, },
++                 { 'path': '/repos', 'status': 401, },
++                 { 'path': '/repos/', 'status': 401, },
++                 { 'path': '/repos/A', 'status': 401, },
++                 { 'path': '/repos/A/', 'status': 401, },
++                 { 'path': '/repos/A/D', 'status': 401, },
++                 { 'path': '/repos/A/D/', 'status': 401, },
++                 { 'path': '/repos/A/D/gamma', 'status': 401, },
++                 { 'path': '/repos/A/D/H', 'status': 401, },
++                 { 'path': '/repos/A/D/H/', 'status': 401, },
++                 { 'path': '/repos/A/D/H/chi', 'status': 401, },
++                 # auth is configured and user1 is allowed access repo including H
++                 { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H,
++                   'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 )
++
++  verify_gets(test_area_url, group_tests)
++
++# This test exists to validate our behavior when used with the new authz
++# provider system introduced in httpd 2.3.x.  The Satisfy directive
++# determines how older authz hooks are combined and the RequireA(ll|ny)
++# blocks handles how new authz providers are combined.  The overall results of
++# all the authz providers (combined per the Require* blocks) are then
++# combined with the other authz hooks via the Satisfy directive.
++# Meaning this test requires that mod_authz_svn says yes and there is
++# either a valid user or the ALLOW header is 1.  The header may seem
++# like a silly test but it's easier to excercise than say a host directive
++# in a repeatable test.
++@SkipUnless(svntest.main.is_httpd_authz_provider_enabled)
++def authn_sallrany(sbox):
++  "test satisfy all require any config"
++  sbox.build(read_only = True, create_wc = False)
++
++  test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos',
++                                        '/authz-test-work/sallrany')
++
++  write_authz_file(sbox)
++
++  allow_header = { 'ALLOW': '1' }
++
++  sallrany_tests = (
++                 #anon access isn't allowed without ALLOW header
++                 { 'path': '', 'status': 401, },
++                 { 'path': '/', 'status': 401, },
++                 { 'path': '/repos', 'status': 401, },
++                 { 'path': '/repos/', 'status': 401, },
++                 { 'path': '/repos/A', 'status': 401, },
++                 { 'path': '/repos/A/', 'status': 401, },
++                 { 'path': '/repos/A/D', 'status': 401, },
++                 { 'path': '/repos/A/D/', 'status': 401, },
++                 { 'path': '/repos/A/D/gamma', 'status': 401, },
++                 { 'path': '/repos/A/D/H', 'status': 401, },
++                 { 'path': '/repos/A/D/H/', 'status': 401, },
++                 { 'path': '/repos/A/D/H/chi', 'status': 401, },
++                 # auth is configured and user1 is allowed access repo including H
++                 { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H,
++                   'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H', 'status': 301, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1, 'pw': user1_pass},
++                 # try with the wrong password for user1
++                 { 'path': '', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/H', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/H/', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user1, 'pw': user1_badpass},
++                 # auth is configured and user2 is not allowed access to H
++                 { 'path': '', 'status': 301, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
++                   'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 # try with the wrong password for user2
++                 { 'path': '', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/H', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/H/', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user2, 'pw': user2_badpass},
++                 # anon is allowed with the ALLOW header
++                 { 'path': '', 'status': 301, 'headers': allow_header },
++                 { 'path': '/', 'status': 200, 'headers': allow_header },
++                 { 'path': '/repos', 'status': 301, 'headers': allow_header },
++                 { 'path': '/repos/', 'status': 200, 'headers': allow_header },
++                 { 'path': '/repos/A', 'status': 301, 'headers': allow_header },
++                 { 'path': '/repos/A/', 'status': 200, 'headers': allow_header },
++                 { 'path': '/repos/A/D', 'status': 301, 'headers': allow_header },
++                 { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H, 'headers': allow_header },
++                 { 'path': '/repos/A/D/gamma', 'status': 200, 'headers': allow_header },
++                 # these 3 tests return 403 instead of 401 becasue the config allows
++                 # the anon user with the ALLOW header without any auth and the old hook
++                 # system has no way of knowing it should return 401 since authentication is
++                 # configured and can change the behavior.  It could decide to return 401 just on
++                 # the basis of authentication being configured but then that leaks info in other
++                 # cases so it's better for this case to be "broken".
++                 { 'path': '/repos/A/D/H', 'status': 403, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H/', 'status': 403, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H/chi', 'status': 403, 'headers': allow_header },
++                 # auth is configured and user1 is allowed access repo including H
++                 { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
++                 { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
++                 { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
++                 { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
++                 { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H,
++                   'user': user1, 'pw': user1_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
++                 # try with the wrong password for user1
++                 { 'path': '', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
++                 { 'path': '/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
++                 { 'path': '/repos', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
++                 { 'path': '/repos/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/D', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
++                 # auth is configured and user2 is not allowed access to H
++                 { 'path': '', 'status': 301, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
++                 { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
++                 { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
++                 { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
++                 { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
++                   'user': user2, 'pw': user2_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
++                 # try with the wrong password for user2
++                 { 'path': '', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
++                 { 'path': '/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
++                 { 'path': '/repos', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
++                 { 'path': '/repos/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/D', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
++
++                 )
++
++  verify_gets(test_area_url, sallrany_tests)
++
++# See comments on authn_sallrany test for some background on the interaction
++# of Satisfy Any and the newer Require blocks.
++@SkipUnless(svntest.main.is_httpd_authz_provider_enabled)
++def authn_sallrall(sbox):
++  "test satisfy all require all config"
++  sbox.build(read_only = True, create_wc = False)
++
++  test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos',
++                                        '/authz-test-work/sallrall')
++
++  write_authz_file(sbox)
++
++  allow_header = { 'ALLOW': '1' }
++
++  sallrall_tests = (
++                 #anon access isn't allowed without ALLOW header
++                 { 'path': '', 'status': 403, },
++                 { 'path': '/', 'status': 403, },
++                 { 'path': '/repos', 'status': 403, },
++                 { 'path': '/repos/', 'status': 403, },
++                 { 'path': '/repos/A', 'status': 403, },
++                 { 'path': '/repos/A/', 'status': 403, },
++                 { 'path': '/repos/A/D', 'status': 403, },
++                 { 'path': '/repos/A/D/', 'status': 403, },
++                 { 'path': '/repos/A/D/gamma', 'status': 403, },
++                 { 'path': '/repos/A/D/H', 'status': 403, },
++                 { 'path': '/repos/A/D/H/', 'status': 403, },
++                 { 'path': '/repos/A/D/H/chi', 'status': 403, },
++                 # auth is configured but no access is allowed without the ALLOW header
++                 { 'path': '', 'status': 403, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/', 'status': 403, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos', 'status': 403, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/', 'status': 403, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A', 'status': 403, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/', 'status': 403, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D', 'status': 403, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/', 'status': 403, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/gamma', 'status': 403, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H', 'status': 403, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H/', 'status': 403, 'user': user1, 'pw': user1_pass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user1, 'pw': user1_pass},
++                 # try with the wrong password for user1
++                 { 'path': '', 'status': 403, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/', 'status': 403, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos', 'status': 403, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/', 'status': 403, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A', 'status': 403, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/', 'status': 403, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D', 'status': 403, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/', 'status': 403, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/gamma', 'status': 403, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/H', 'status': 403, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/H/', 'status': 403, 'user': user1, 'pw': user1_badpass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user1, 'pw': user1_badpass},
++                 # auth is configured but no access is allowed without the ALLOW header
++                 { 'path': '', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/gamma', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass},
++                 # try with the wrong password for user2
++                 { 'path': '', 'status': 403, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/', 'status': 403, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos', 'status': 403, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/', 'status': 403, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A', 'status': 403, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/', 'status': 403, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D', 'status': 403, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/', 'status': 403, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/gamma', 'status': 403, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_badpass},
++                 { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_badpass},
++                 # anon is not allowed even with ALLOW header
++                 { 'path': '', 'status': 401, 'headers': allow_header },
++                 { 'path': '/', 'status': 401, 'headers': allow_header },
++                 { 'path': '/repos', 'status': 401, 'headers': allow_header },
++                 { 'path': '/repos/', 'status': 401, 'headers': allow_header },
++                 { 'path': '/repos/A', 'status': 401, 'headers': allow_header },
++                 { 'path': '/repos/A/', 'status': 401, 'headers': allow_header },
++                 { 'path': '/repos/A/D', 'status': 401, 'headers': allow_header },
++                 { 'path': '/repos/A/D/', 'status': 401, 'headers': allow_header },
++                 { 'path': '/repos/A/D/gamma', 'status': 401, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H', 'status': 401, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H/', 'status': 401, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H/chi', 'status': 401, 'headers': allow_header },
++                 # auth is configured and user1 is allowed access repo including H
++                 { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
++                 { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
++                 { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
++                 { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
++                 { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H,
++                   'user': user1, 'pw': user1_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
++                 # try with the wrong password for user1
++                 { 'path': '', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
++                 { 'path': '/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
++                 { 'path': '/repos', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
++                 { 'path': '/repos/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/D', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
++                 # auth is configured and user2 is not allowed access to H
++                 { 'path': '', 'status': 301, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
++                 { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
++                 { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
++                 { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
++                 { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
++                   'user': user2, 'pw': user2_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
++                 # try with the wrong password for user2
++                 { 'path': '', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
++                 { 'path': '/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
++                 { 'path': '/repos', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
++                 { 'path': '/repos/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/D', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
++                 { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
++
++                 )
++
++  verify_gets(test_area_url, sallrall_tests)
++
++
++########################################################################
++# Run the tests
++
++
++# list all tests here, starting with None:
++test_list = [ None,
++              anon,
++              mixed,
++              mixed_noauthwhenanon,
++              authn,
++              authn_anonoff,
++              authn_lcuser,
++              authn_group,
++              authn_sallrany,
++              authn_sallrall,
++             ]
++serial_only = True
++
++if __name__ == '__main__':
++  svntest.main.run_tests(test_list)
++  # NOTREACHED
++
++
++### End of file.
+
+Property changes on: subversion/tests/cmdline/mod_authz_svn_tests.py
+___________________________________________________________________
+Added: svn:eol-style
+## -0,0 +1 ##
++native
+\ No newline at end of property
+Index: subversion/tests/cmdline/svntest/main.py
+===================================================================
+--- a/subversion/tests/cmdline/svntest/main.py	(revision 1691883)
++++ b/subversion/tests/cmdline/svntest/main.py	(working copy)
+@@ -1378,6 +1378,30 @@ def is_plaintext_password_storage_disabled():
+     return False
+   return True
+ 
++
++# https://issues.apache.org/bugzilla/show_bug.cgi?id=56480
++# https://issues.apache.org/bugzilla/show_bug.cgi?id=55397
++__mod_dav_url_quoting_broken_versions = frozenset([
++    '2.2.27',
++    '2.2.26',
++    '2.2.25',
++    '2.4.9',
++    '2.4.8',
++    '2.4.7',
++    '2.4.6',
++    '2.4.5',
++])
++def is_mod_dav_url_quoting_broken():
++    if is_ra_type_dav():
++        return (options.httpd_version in __mod_dav_url_quoting_broken_versions)
++    return None
++
++def is_httpd_authz_provider_enabled():
++    if is_ra_type_dav():
++      v = options.httpd_version.split('.')
++      return (v[0] == '2' and int(v[1]) >= 3) or int(v[0]) > 2
++    return None
++
+ ######################################################################
+ 
+ 
+@@ -1435,6 +1459,8 @@ class TestSpawningThread(threading.Thread):
+       args.append('--ssl-cert=' + options.ssl_cert)
+     if options.http_proxy:
+       args.append('--http-proxy=' + options.http_proxy)
++    if options.httpd_version:
++      args.append('--httpd-version=' + options.httpd_version)
+ 
+     result, stdout_lines, stderr_lines = spawn_process(command, 0, False, None,
+                                                        *args)
+@@ -1600,6 +1626,12 @@ class TestRunner:
+       sandbox.cleanup_test_paths()
+     return exit_code
+ 
++def is_httpd_authz_provider_enabled():
++    if is_ra_type_dav():
++      v = options.httpd_version.split('.')
++      return (v[0] == '2' and int(v[1]) >= 3) or int(v[0]) > 2
++    return None
++
+ ######################################################################
+ # Main testing functions
+ 
+@@ -1780,6 +1812,8 @@ def _create_parser():
+                     help='Path to SSL server certificate.')
+   parser.add_option('--http-proxy', action='store',
+                     help='Use the HTTP Proxy at hostname:port.')
++  parser.add_option('--httpd-version', action='store',
++                    help='Assume HTTPD is this version.')
+   parser.add_option('--tools-bin', action='store', dest='tools_bin',
+                     help='Use the svn tools installed in this path')
+ 
+Index: win-tests.py
+===================================================================
+--- a/win-tests.py	(revision 1691883)
++++ b/win-tests.py	(working copy)
+@@ -481,6 +481,7 @@ class Httpd:
+     self.httpd_config = os.path.join(self.root, 'httpd.conf')
+     self.httpd_users = os.path.join(self.root, 'users')
+     self.httpd_mime_types = os.path.join(self.root, 'mime.types')
++    self.httpd_groups = os.path.join(self.root, 'groups')
+     self.abs_builddir = abs_builddir
+     self.abs_objdir = abs_objdir
+     self.service_name = 'svn-test-httpd-' + str(httpd_port)
+@@ -494,6 +495,7 @@ class Httpd:
+     create_target_dir(self.root_dir)
+ 
+     self._create_users_file()
++    self._create_groups_file()
+     self._create_mime_types_file()
+     self._create_dontdothat_file()
+ 
+@@ -540,6 +542,8 @@ class Httpd:
+     if self.httpd_ver >= 2.2:
+       fp.write(self._sys_module('auth_basic_module', 'mod_auth_basic.so'))
+       fp.write(self._sys_module('authn_file_module', 'mod_authn_file.so'))
++      fp.write(self._sys_module('authz_groupfile_module', 'mod_authz_groupfile.so'))
++      fp.write(self._sys_module('authz_host_module', 'mod_authz_host.so'))
+     else:
+       fp.write(self._sys_module('auth_module', 'mod_auth.so'))
+     fp.write(self._sys_module('alias_module', 'mod_alias.so'))
+@@ -562,6 +566,7 @@ class Httpd:
+     # Define two locations for repositories
+     fp.write(self._svn_repo('repositories'))
+     fp.write(self._svn_repo('local_tmp'))
++    fp.write(self._svn_authz_repo())
+ 
+     # And two redirects for the redirect tests
+     fp.write('RedirectMatch permanent ^/svn-test-work/repositories/'
+@@ -592,7 +597,18 @@ class Httpd:
+                                     'jrandom', 'rayjandom'])
+     os.spawnv(os.P_WAIT, htpasswd, ['htpasswd.exe', '-bp',  self.httpd_users,
+                                     'jconstant', 'rayjandom'])
++    os.spawnv(os.P_WAIT, htpasswd, ['htpasswd.exe', '-bp',  self.httpd_users,
++                                    'JRANDOM', 'rayjandom'])
++    os.spawnv(os.P_WAIT, htpasswd, ['htpasswd.exe', '-bp',  self.httpd_users,
++                                    'JCONSTANT', 'rayjandom'])
+ 
++  def _create_groups_file(self):
++    "Create groups for mod_authz_svn tests"
++    fp = open(self.httpd_groups, 'w')
++    fp.write('random: jrandom\n')
++    fp.write('constant: jconstant\n')
++    fp.close()
++
+   def _create_mime_types_file(self):
+     "Create empty mime.types file"
+     fp = open(self.httpd_mime_types, 'w')
+@@ -652,6 +668,153 @@ class Httpd:
+       '  DontDoThatConfigFile ' + self._quote(self.dontdothat_file) + '\n' \
+       '</Location>\n'
+ 
++  def _svn_authz_repo(self):
++    local_tmp = os.path.join(self.abs_builddir,
++                             CMDLINE_TEST_SCRIPT_NATIVE_PATH,
++                             'svn-test-work', 'local_tmp')
++    return \
++      '<Location /authz-test-work/anon>' + '\n' \
++      '  DAV               svn' + '\n' \
++      '  SVNParentPath     ' + local_tmp + '\n' \
++      '  AuthzSVNAccessFile ' + self._quote(self.authz_file) + '\n' \
++      '  SVNAdvertiseV2Protocol ' + self.httpv2_option + '\n' \
++      '  SVNListParentPath On' + '\n' \
++      '  <IfModule mod_authz_core.c>' + '\n' \
++      '    Require all granted' + '\n' \
++      '  </IfModule>' + '\n' \
++      '  <IfModule !mod_authz_core.c>' + '\n' \
++      '    Allow from all' + '\n' \
++      '  </IfModule>' + '\n' \
++      '  SVNPathAuthz ' + self.path_authz_option + '\n' \
++      '</Location>' + '\n' \
++      '<Location /authz-test-work/mixed>' + '\n' \
++      '  DAV               svn' + '\n' \
++      '  SVNParentPath     ' + local_tmp + '\n' \
++      '  AuthzSVNAccessFile ' + self._quote(self.authz_file) + '\n' \
++      '  SVNAdvertiseV2Protocol ' + self.httpv2_option + '\n' \
++      '  SVNListParentPath On' + '\n' \
++      '  AuthType          Basic' + '\n' \
++      '  AuthName          "Subversion Repository"' + '\n' \
++      '  AuthUserFile    ' + self._quote(self.httpd_users) + '\n' \
++      '  Require           valid-user' + '\n' \
++      '  Satisfy Any' + '\n' \
++      '  SVNPathAuthz ' + self.path_authz_option + '\n' \
++      '</Location>' + '\n' \
++      '<Location /authz-test-work/mixed-noauthwhenanon>' + '\n' \
++      '  DAV               svn' + '\n' \
++      '  SVNParentPath     ' + local_tmp + '\n' \
++      '  AuthzSVNAccessFile ' + self._quote(self.authz_file) + '\n' \
++      '  SVNAdvertiseV2Protocol ' + self.httpv2_option + '\n' \
++      '  SVNListParentPath On' + '\n' \
++      '  AuthType          Basic' + '\n' \
++      '  AuthName          "Subversion Repository"' + '\n' \
++      '  AuthUserFile    ' + self._quote(self.httpd_users) + '\n' \
++      '  Require           valid-user' + '\n' \
++      '  AuthzSVNNoAuthWhenAnonymousAllowed On' + '\n' \
++      '  SVNPathAuthz On' + '\n' \
++      '</Location>' + '\n' \
++      '<Location /authz-test-work/authn>' + '\n' \
++      '  DAV               svn' + '\n' \
++      '  SVNParentPath     ' + local_tmp + '\n' \
++      '  AuthzSVNAccessFile ' + self._quote(self.authz_file) + '\n' \
++      '  SVNAdvertiseV2Protocol ' + self.httpv2_option + '\n' \
++      '  SVNListParentPath On' + '\n' \
++      '  AuthType          Basic' + '\n' \
++      '  AuthName          "Subversion Repository"' + '\n' \
++      '  AuthUserFile    ' + self._quote(self.httpd_users) + '\n' \
++      '  Require           valid-user' + '\n' \
++      '  SVNPathAuthz ' + self.path_authz_option + '\n' \
++      '</Location>' + '\n' \
++      '<Location /authz-test-work/authn-anonoff>' + '\n' \
++      '  DAV               svn' + '\n' \
++      '  SVNParentPath     ' + local_tmp + '\n' \
++      '  AuthzSVNAccessFile ' + self._quote(self.authz_file) + '\n' \
++      '  SVNAdvertiseV2Protocol ' + self.httpv2_option + '\n' \
++      '  SVNListParentPath On' + '\n' \
++      '  AuthType          Basic' + '\n' \
++      '  AuthName          "Subversion Repository"' + '\n' \
++      '  AuthUserFile    ' + self._quote(self.httpd_users) + '\n' \
++      '  Require           valid-user' + '\n' \
++      '  AuthzSVNAnonymous Off' + '\n' \
++      '  SVNPathAuthz On' + '\n' \
++      '</Location>' + '\n' \
++      '<Location /authz-test-work/authn-lcuser>' + '\n' \
++      '  DAV               svn' + '\n' \
++      '  SVNParentPath     ' + local_tmp + '\n' \
++      '  AuthzSVNAccessFile ' + self._quote(self.authz_file) + '\n' \
++      '  SVNAdvertiseV2Protocol ' + self.httpv2_option + '\n' \
++      '  SVNListParentPath On' + '\n' \
++      '  AuthType          Basic' + '\n' \
++      '  AuthName          "Subversion Repository"' + '\n' \
++      '  AuthUserFile    ' + self._quote(self.httpd_users) + '\n' \
++      '  Require           valid-user' + '\n' \
++      '  AuthzForceUsernameCase Lower' + '\n' \
++      '  SVNPathAuthz ' + self.path_authz_option + '\n' \
++      '</Location>' + '\n' \
++      '<Location /authz-test-work/authn-lcuser>' + '\n' \
++      '  DAV               svn' + '\n' \
++      '  SVNParentPath     ' + local_tmp + '\n' \
++      '  AuthzSVNAccessFile ' + self._quote(self.authz_file) + '\n' \
++      '  SVNAdvertiseV2Protocol ' + self.httpv2_option + '\n' \
++      '  SVNListParentPath On' + '\n' \
++      '  AuthType          Basic' + '\n' \
++      '  AuthName          "Subversion Repository"' + '\n' \
++      '  AuthUserFile    ' + self._quote(self.httpd_users) + '\n' \
++      '  Require           valid-user' + '\n' \
++      '  AuthzForceUsernameCase Lower' + '\n' \
++      '  SVNPathAuthz ' + self.path_authz_option + '\n' \
++      '</Location>' + '\n' \
++      '<Location /authz-test-work/authn-group>' + '\n' \
++      '  DAV               svn' + '\n' \
++      '  SVNParentPath     ' + local_tmp + '\n' \
++      '  AuthzSVNAccessFile ' + self._quote(self.authz_file) + '\n' \
++      '  SVNAdvertiseV2Protocol ' + self.httpv2_option + '\n' \
++      '  SVNListParentPath On' + '\n' \
++      '  AuthType          Basic' + '\n' \
++      '  AuthName          "Subversion Repository"' + '\n' \
++      '  AuthUserFile    ' + self._quote(self.httpd_users) + '\n' \
++      '  AuthGroupFile    ' + self._quote(self.httpd_groups) + '\n' \
++      '  Require           group random' + '\n' \
++      '  AuthzSVNAuthoritative Off' + '\n' \
++      '  SVNPathAuthz On' + '\n' \
++      '</Location>' + '\n' \
++      '<IfModule mod_authz_core.c>' + '\n' \
++      '<Location /authz-test-work/sallrany>' + '\n' \
++      '  DAV               svn' + '\n' \
++      '  SVNParentPath     ' + local_tmp + '\n' \
++      '  AuthzSVNAccessFile ' + self._quote(self.authz_file) + '\n' \
++      '  SVNAdvertiseV2Protocol ' + self.httpv2_option + '\n' \
++      '  SVNListParentPath On' + '\n' \
++      '  AuthType          Basic' + '\n' \
++      '  AuthName          "Subversion Repository"' + '\n' \
++      '  AuthUserFile    ' + self._quote(self.httpd_users) + '\n' \
++      '  AuthzSendForbiddenOnFailure On' + '\n' \
++      '  Satisfy All' + '\n' \
++      '  <RequireAny>' + '\n' \
++      '    Require valid-user' + '\n' \
++      '    Require expr req(\'ALLOW\') == \'1\'' + '\n' \
++      '  </RequireAny>' + '\n' \
++      '  SVNPathAuthz ' + self.path_authz_option + '\n' \
++      '</Location>' + '\n' \
++      '<Location /authz-test-work/sallrall>'+ '\n' \
++      '  DAV               svn' + '\n' \
++      '  SVNParentPath     ' + local_tmp + '\n' \
++      '  AuthzSVNAccessFile ' + self._quote(self.authz_file) + '\n' \
++      '  SVNAdvertiseV2Protocol ' + self.httpv2_option + '\n' \
++      '  SVNListParentPath On' + '\n' \
++      '  AuthType          Basic' + '\n' \
++      '  AuthName          "Subversion Repository"' + '\n' \
++      '  AuthUserFile    ' + self._quote(self.httpd_users) + '\n' \
++      '  AuthzSendForbiddenOnFailure On' + '\n' \
++      '  Satisfy All' + '\n' \
++      '  <RequireAll>' + '\n' \
++      '    Require valid-user' + '\n' \
++      '    Require expr req(\'ALLOW\') == \'1\'' + '\n' \
++      '  </RequireAll>' + '\n' \
++      '  SVNPathAuthz ' + self.path_authz_option + '\n' \
++      '</Location>' + '\n' \
++      '</IfModule>' + '\n' \
++
+   def start(self):
+     if self.service:
+       self._start_service()
+@@ -786,6 +949,10 @@ if not test_javahl:
+     log_file = os.path.join(abs_builddir, log)
+     fail_log_file = os.path.join(abs_builddir, faillog)
+ 
++  if run_httpd:
++    httpd_version = "%.1f" % daemon.httpd_ver
++  else:
++    httpd_version = None
+   th = run_tests.TestHarness(abs_srcdir, abs_builddir,
+                              log_file,
+                              fail_log_file,
+@@ -795,6 +962,7 @@ if not test_javahl:
+                              fsfs_sharding, fsfs_packing,
+                              list_tests, svn_bin, mode_filter,
+                              milestone_filter,
++                             httpd_version=httpd_version,
+                              set_log_level=log_level, ssl_cert=ssl_cert)
+   old_cwd = os.getcwd()
+   try:
diff --git a/yocto-poky/meta/recipes-devtools/subversion/subversion-1.8.13/subversion-CVE-2015-3187.patch b/yocto-poky/meta/recipes-devtools/subversion/subversion-1.8.13/subversion-CVE-2015-3187.patch
new file mode 100644
index 0000000..494e11c
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/subversion/subversion-1.8.13/subversion-CVE-2015-3187.patch
@@ -0,0 +1,346 @@
+Fix CVE-2015-3187
+
+Patch is from:
+http://subversion.apache.org/security/CVE-2015-3187-advisory.txt
+
+Upstream-Status: Backport
+
+Signed-off-by: Wenzong Fan <wenzong.fan@windriver.com>
+
+Index: subversion/libsvn_repos/rev_hunt.c
+===================================================================
+--- a/subversion/libsvn_repos/rev_hunt.c	(revision 1685077)
++++ b/subversion/libsvn_repos/rev_hunt.c	(working copy)
+@@ -726,23 +726,6 @@ svn_repos_trace_node_locations(svn_fs_t *fs,
+       if (! prev_path)
+         break;
+ 
+-      if (authz_read_func)
+-        {
+-          svn_boolean_t readable;
+-          svn_fs_root_t *tmp_root;
+-
+-          SVN_ERR(svn_fs_revision_root(&tmp_root, fs, revision, currpool));
+-          SVN_ERR(authz_read_func(&readable, tmp_root, path,
+-                                  authz_read_baton, currpool));
+-          if (! readable)
+-            {
+-              svn_pool_destroy(lastpool);
+-              svn_pool_destroy(currpool);
+-
+-              return SVN_NO_ERROR;
+-            }
+-        }
+-
+       /* Assign the current path to all younger revisions until we reach
+          the copy target rev. */
+       while ((revision_ptr < revision_ptr_end)
+@@ -765,6 +748,20 @@ svn_repos_trace_node_locations(svn_fs_t *fs,
+       path = prev_path;
+       revision = prev_rev;
+ 
++      if (authz_read_func)
++        {
++          svn_boolean_t readable;
++          SVN_ERR(svn_fs_revision_root(&root, fs, revision, currpool));
++          SVN_ERR(authz_read_func(&readable, root, path,
++                                  authz_read_baton, currpool));
++          if (!readable)
++            {
++              svn_pool_destroy(lastpool);
++              svn_pool_destroy(currpool);
++              return SVN_NO_ERROR;
++            }
++        }
++
+       /* Clear last pool and switch. */
+       svn_pool_clear(lastpool);
+       tmppool = lastpool;
+Index: subversion/tests/cmdline/authz_tests.py
+===================================================================
+--- a/subversion/tests/cmdline/authz_tests.py	(revision 1685077)
++++ b/subversion/tests/cmdline/authz_tests.py	(working copy)
+@@ -609,8 +609,10 @@ def authz_log_and_tracing_test(sbox):
+ 
+   ## cat
+ 
++  expected_err2 = ".*svn: E195012: Unable to find repository location.*"
++
+   # now see if we can look at the older version of rho
+-  svntest.actions.run_and_verify_svn(None, None, expected_err,
++  svntest.actions.run_and_verify_svn(None, None, expected_err2,
+                                      'cat', '-r', '2', D_url+'/rho')
+ 
+   if sbox.repo_url.startswith('http'):
+@@ -627,10 +629,11 @@ def authz_log_and_tracing_test(sbox):
+   svntest.actions.run_and_verify_svn(None, None, expected_err,
+                                      'diff', '-r', 'HEAD', G_url+'/rho')
+ 
+-  svntest.actions.run_and_verify_svn(None, None, expected_err,
++  # diff treats the unreadable path as indicating an add so no error
++  svntest.actions.run_and_verify_svn(None, None, [],
+                                      'diff', '-r', '2', D_url+'/rho')
+ 
+-  svntest.actions.run_and_verify_svn(None, None, expected_err,
++  svntest.actions.run_and_verify_svn(None, None, [],
+                                      'diff', '-r', '2:4', D_url+'/rho')
+ 
+ # test whether read access is correctly granted and denied
+Index: subversion/tests/libsvn_repos/repos-test.c
+===================================================================
+--- a/subversion/tests/libsvn_repos/repos-test.c	(revision 1685077)
++++ b/subversion/tests/libsvn_repos/repos-test.c	(working copy)
+@@ -3524,6 +3524,245 @@ test_load_r0_mergeinfo(const svn_test_opts_t *opts
+   return SVN_NO_ERROR;
+ }
+ 
++static svn_error_t *
++mkdir_delete_copy(svn_repos_t *repos,
++                  const char *src,
++                  const char *dst,
++                  apr_pool_t *pool)
++{
++  svn_fs_t *fs = svn_repos_fs(repos);
++  svn_revnum_t youngest_rev;
++  svn_fs_txn_t *txn;
++  svn_fs_root_t *txn_root, *rev_root;
++
++  SVN_ERR(svn_fs_youngest_rev(&youngest_rev, fs, pool));
++  
++  SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, pool));
++  SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
++  SVN_ERR(svn_fs_make_dir(txn_root, "A/T", pool));
++  SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool));
++
++  SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, pool));
++  SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
++  SVN_ERR(svn_fs_delete(txn_root, "A/T", pool));
++  SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool));
++
++  SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, pool));
++  SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
++  SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest_rev - 1, pool));
++  SVN_ERR(svn_fs_copy(rev_root, src, txn_root, dst, pool));
++  SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool));
++
++  return SVN_NO_ERROR;
++}
++
++struct authz_read_baton_t {
++  apr_hash_t *paths;
++  apr_pool_t *pool;
++  const char *deny;
++};
++
++static svn_error_t *
++authz_read_func(svn_boolean_t *allowed,
++                svn_fs_root_t *root,
++                const char *path,
++                void *baton,
++                apr_pool_t *pool)
++{
++  struct authz_read_baton_t *b = baton;
++
++  if (b->deny && !strcmp(b->deny, path))
++    *allowed = FALSE;
++  else
++    *allowed = TRUE;
++
++  svn_hash_sets(b->paths, apr_pstrdup(b->pool, path), (void*)1);
++
++  return SVN_NO_ERROR;
++}
++
++static svn_error_t *
++verify_locations(apr_hash_t *actual,
++                 apr_hash_t *expected,
++                 apr_hash_t *checked,
++                 apr_pool_t *pool)
++{
++  apr_hash_index_t *hi;
++
++  for (hi = apr_hash_first(pool, expected); hi; hi = apr_hash_next(hi))
++    {
++      const svn_revnum_t *rev = svn__apr_hash_index_key(hi);
++      const char *path = apr_hash_get(actual, rev, sizeof(svn_revnum_t));
++
++      if (!path)
++        return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
++                                 "expected %s for %d found (null)",
++                                 (char*)svn__apr_hash_index_val(hi),
++                                 (int)*rev);
++      else if (strcmp(path, svn__apr_hash_index_val(hi)))
++        return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
++                                 "expected %s for %d found %s",
++                                 (char*)svn__apr_hash_index_val(hi),
++                                 (int)*rev, path);
++
++    }
++
++  for (hi = apr_hash_first(pool, actual); hi; hi = apr_hash_next(hi))
++    {
++      const svn_revnum_t *rev = svn__apr_hash_index_key(hi);
++      const char *path = apr_hash_get(expected, rev, sizeof(svn_revnum_t));
++
++      if (!path)
++        return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
++                                 "found %s for %d expected (null)",
++                                 (char*)svn__apr_hash_index_val(hi),
++                                 (int)*rev);
++      else if (strcmp(path, svn__apr_hash_index_val(hi)))
++        return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
++                                 "found %s for %d expected %s",
++                                 (char*)svn__apr_hash_index_val(hi),
++                                 (int)*rev, path);
++
++      if (!svn_hash_gets(checked, path))
++        return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
++                                 "did not check %s", path);
++    }
++
++  return SVN_NO_ERROR;
++}
++
++static void
++set_expected(apr_hash_t *expected,
++             svn_revnum_t rev,
++             const char *path,
++             apr_pool_t *pool)
++{
++  svn_revnum_t *rp = apr_palloc(pool, sizeof(svn_revnum_t));
++  *rp = rev;
++  apr_hash_set(expected, rp, sizeof(svn_revnum_t), path);
++}
++
++static svn_error_t *
++trace_node_locations_authz(const svn_test_opts_t *opts,
++                           apr_pool_t *pool)
++{
++  svn_repos_t *repos;
++  svn_fs_t *fs;
++  svn_revnum_t youngest_rev = 0;
++  svn_fs_txn_t *txn;
++  svn_fs_root_t *txn_root;
++  struct authz_read_baton_t arb;
++  apr_array_header_t *revs = apr_array_make(pool, 10, sizeof(svn_revnum_t));
++  apr_hash_t *locations;
++  apr_hash_t *expected = apr_hash_make(pool);
++  int i;
++
++  /* Create test repository. */
++  SVN_ERR(svn_test__create_repos(&repos, "test-repo-trace-node-locations-authz",
++                                 opts, pool));
++  fs = svn_repos_fs(repos);
++
++  /* r1 create A */
++  SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, pool));
++  SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
++  SVN_ERR(svn_fs_make_dir(txn_root, "A", pool));
++  SVN_ERR(svn_fs_make_file(txn_root, "A/f", pool));
++  SVN_ERR(svn_test__set_file_contents(txn_root, "A/f", "foobar", pool));
++  SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool));
++
++  /* r4 copy A to B */
++  SVN_ERR(mkdir_delete_copy(repos, "A", "B", pool));
++
++  /* r7 copy B to C */
++  SVN_ERR(mkdir_delete_copy(repos, "B", "C", pool));
++
++  /* r10 copy C to D */
++  SVN_ERR(mkdir_delete_copy(repos, "C", "D", pool));
++
++  SVN_ERR(svn_fs_youngest_rev(&youngest_rev, fs, pool));
++  SVN_ERR_ASSERT(youngest_rev == 10);
++
++  arb.paths = apr_hash_make(pool);
++  arb.pool = pool;
++  arb.deny = NULL;
++
++  apr_array_clear(revs);
++  for (i = 0; i <= youngest_rev; ++i)
++    APR_ARRAY_PUSH(revs, svn_revnum_t) = i;
++  set_expected(expected, 10, "/D/f", pool);
++  set_expected(expected, 8, "/C/f", pool);
++  set_expected(expected, 7, "/C/f", pool);
++  set_expected(expected, 5, "/B/f", pool);
++  set_expected(expected, 4, "/B/f", pool);
++  set_expected(expected, 2, "/A/f", pool);
++  set_expected(expected, 1, "/A/f", pool);
++  apr_hash_clear(arb.paths);
++  SVN_ERR(svn_repos_trace_node_locations(fs, &locations, "D/f", 10, revs,
++                                         authz_read_func, &arb, pool));
++  SVN_ERR(verify_locations(locations, expected, arb.paths, pool));
++
++  apr_array_clear(revs);
++  for (i = 1; i <= youngest_rev; ++i)
++    APR_ARRAY_PUSH(revs, svn_revnum_t) = i;
++  apr_hash_clear(arb.paths);
++  SVN_ERR(svn_repos_trace_node_locations(fs, &locations, "D/f", 10, revs,
++                                         authz_read_func, &arb, pool));
++  SVN_ERR(verify_locations(locations, expected, arb.paths, pool));
++
++  apr_array_clear(revs);
++  for (i = 2; i <= youngest_rev; ++i)
++    APR_ARRAY_PUSH(revs, svn_revnum_t) = i;
++  set_expected(expected, 1, NULL, pool);
++  apr_hash_clear(arb.paths);
++  SVN_ERR(svn_repos_trace_node_locations(fs, &locations, "D/f", 10, revs,
++                                         authz_read_func, &arb, pool));
++  SVN_ERR(verify_locations(locations, expected, arb.paths, pool));
++
++  apr_array_clear(revs);
++  for (i = 3; i <= youngest_rev; ++i)
++    APR_ARRAY_PUSH(revs, svn_revnum_t) = i;
++  set_expected(expected, 2, NULL, pool);
++  apr_hash_clear(arb.paths);
++  SVN_ERR(svn_repos_trace_node_locations(fs, &locations, "D/f", 10, revs,
++                                         authz_read_func, &arb, pool));
++  SVN_ERR(verify_locations(locations, expected, arb.paths, pool));
++
++  apr_array_clear(revs);
++  for (i = 6; i <= youngest_rev; ++i)
++    APR_ARRAY_PUSH(revs, svn_revnum_t) = i;
++  set_expected(expected, 5, NULL, pool);
++  set_expected(expected, 4, NULL, pool);
++  apr_hash_clear(arb.paths);
++  SVN_ERR(svn_repos_trace_node_locations(fs, &locations, "D/f", 10, revs,
++                                         authz_read_func, &arb, pool));
++  SVN_ERR(verify_locations(locations, expected, arb.paths, pool));
++
++  arb.deny = "/B/f";
++  apr_array_clear(revs);
++  for (i = 0; i <= youngest_rev; ++i)
++    APR_ARRAY_PUSH(revs, svn_revnum_t) = i;
++  apr_hash_clear(arb.paths);
++  SVN_ERR(svn_repos_trace_node_locations(fs, &locations, "D/f", 10, revs,
++                                         authz_read_func, &arb, pool));
++  SVN_ERR(verify_locations(locations, expected, arb.paths, pool));
++
++  apr_array_clear(revs);
++  for (i = 6; i <= youngest_rev; ++i)
++    APR_ARRAY_PUSH(revs, svn_revnum_t) = i;
++  apr_hash_clear(arb.paths);
++  SVN_ERR(svn_repos_trace_node_locations(fs, &locations, "D/f", 10, revs,
++                                         authz_read_func, &arb, pool));
++  SVN_ERR(verify_locations(locations, expected, arb.paths, pool));
++
++  APR_ARRAY_PUSH(revs, svn_revnum_t) = 0;
++  apr_hash_clear(arb.paths);
++  SVN_ERR(svn_repos_trace_node_locations(fs, &locations, "D/f", 10, revs,
++                                         authz_read_func, &arb, pool));
++  SVN_ERR(verify_locations(locations, expected, arb.paths, pool));
++
++  return SVN_NO_ERROR;
++}
++
+ /* The test table.  */
+ 
+ struct svn_test_descriptor_t test_funcs[] =
+@@ -3573,5 +3812,7 @@ struct svn_test_descriptor_t test_funcs[] =
+                        "test dumping with r0 mergeinfo"),
+     SVN_TEST_OPTS_PASS(test_load_r0_mergeinfo,
+                        "test loading with r0 mergeinfo"),
++    SVN_TEST_OPTS_PASS(trace_node_locations_authz,
++                       "authz for svn_repos_trace_node_locations"),
+     SVN_TEST_NULL
+   };
diff --git a/yocto-poky/meta/recipes-devtools/subversion/subversion_1.8.13.bb b/yocto-poky/meta/recipes-devtools/subversion/subversion_1.8.13.bb
index f843b95..68934b7 100644
--- a/yocto-poky/meta/recipes-devtools/subversion/subversion_1.8.13.bb
+++ b/yocto-poky/meta/recipes-devtools/subversion/subversion_1.8.13.bb
@@ -1,6 +1,7 @@
 SUMMARY = "Subversion (svn) version control system client"
 SECTION = "console/network"
 DEPENDS = "apr-util serf sqlite3 file"
+DEPENDS_append_class-native = " file-replacement-native"
 RDEPENDS_${PN} = "serf"
 LICENSE = "Apache-2"
 HOMEPAGE = "http://subversion.tigris.org"
@@ -13,6 +14,8 @@
            file://libtool2.patch \
            file://disable_macos.patch \
            file://serf.m4-Regex-modified-to-allow-D-in-paths.patch \
+           file://subversion-CVE-2015-3184.patch \
+           file://subversion-CVE-2015-3187.patch \
 "
 SRC_URI[md5sum] = "4413417b529d7bdf82f74e50df02e88b"
 SRC_URI[sha256sum] = "1099cc68840753b48aedb3a27ebd1e2afbcc84ddb871412e5d500e843d607579"
diff --git a/yocto-poky/meta/recipes-devtools/syslinux/syslinux/0010-gcc46-compatibility.patch b/yocto-poky/meta/recipes-devtools/syslinux/syslinux/0010-gcc46-compatibility.patch
new file mode 100644
index 0000000..6279258
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/syslinux/syslinux/0010-gcc46-compatibility.patch
@@ -0,0 +1,37 @@
+don't break with old compilers and -DGNU_EFI_USE_MS_ABI
+It's entirely legitimate to request GNU_EFI_USE_MS_ABI even if the current
+compiler doesn't support it, and gnu-efi should transparently fall back to
+using legacy techniques to set the calling convention.  We don't get type
+checking, but at least it will still compile.
+
+Adapted from gnu-efi
+
+Author: Steve Langasek <steve.langasek@ubuntu.com>
+Upstream-Status: Pending
+
+Index: syslinux-6.03/efi64/include/efi/x86_64/efibind.h
+===================================================================
+--- syslinux-6.03.orig/efi64/include/efi/x86_64/efibind.h
++++ syslinux-6.03/efi64/include/efi/x86_64/efibind.h
+@@ -25,8 +25,6 @@ Revision History
+ #if defined(GNU_EFI_USE_MS_ABI)
+     #if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 7))
+         #define HAVE_USE_MS_ABI 1
+-    #else
+-        #error Compiler is too old for GNU_EFI_USE_MS_ABI
+     #endif
+ #endif
+ 
+Index: syslinux-6.03/gnu-efi/gnu-efi-3.0/inc/x86_64/efibind.h
+===================================================================
+--- syslinux-6.03.orig/gnu-efi/gnu-efi-3.0/inc/x86_64/efibind.h
++++ syslinux-6.03/gnu-efi/gnu-efi-3.0/inc/x86_64/efibind.h
+@@ -25,8 +25,6 @@ Revision History
+ #if defined(GNU_EFI_USE_MS_ABI)
+     #if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 7))
+         #define HAVE_USE_MS_ABI 1
+-    #else
+-        #error Compiler is too old for GNU_EFI_USE_MS_ABI
+     #endif
+ #endif
+ 
diff --git a/yocto-poky/meta/recipes-devtools/syslinux/syslinux/0011-mk-MMD-does-not-take-any-arguments.patch b/yocto-poky/meta/recipes-devtools/syslinux/syslinux/0011-mk-MMD-does-not-take-any-arguments.patch
new file mode 100644
index 0000000..443c1cc
--- /dev/null
+++ b/yocto-poky/meta/recipes-devtools/syslinux/syslinux/0011-mk-MMD-does-not-take-any-arguments.patch
@@ -0,0 +1,33 @@
+From 0f3d83c25491951f1fa84c7957358ef3d1bcd8a9 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Wed, 9 Sep 2015 17:39:22 +0000
+Subject: [PATCH] mk: -MMD does not take any arguments
+
+Specify -Wp for each option, clang seems to not accept
+-Wp,-x,y,-a,b
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+Upstream-Status: Pending
+
+ mk/syslinux.mk | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
+
+Index: syslinux-6.03/mk/syslinux.mk
+===================================================================
+--- syslinux-6.03.orig/mk/syslinux.mk
++++ syslinux-6.03/mk/syslinux.mk
+@@ -82,11 +82,11 @@ ARCH ?= $(strip $(SUBARCH))
+ GCCWARN  = -W -Wall -Wstrict-prototypes $(DEBUGOPT)
+ 
+ # Common stanza to make gcc generate .*.d dependency files
+-MAKEDEPS = -Wp,-MT,$@,-MD,$(dir $@).$(notdir $@).d
++MAKEDEPS = -MT $@ -MD
+ 
+ # Dependencies that exclude system headers; use whenever we use
+ # header files from the platform.
+-UMAKEDEPS = -Wp,-MT,$@,-MMD,$(dir $@).$(notdir $@).d
++UMAKEDEPS = -MT $@ -MMD
+ 
+ # Items that are only appropriate during development; this file is
+ # removed when tarballs are generated.
diff --git a/yocto-poky/meta/recipes-devtools/syslinux/syslinux_6.03.bb b/yocto-poky/meta/recipes-devtools/syslinux/syslinux_6.03.bb
index ef9ae2f..8534528 100644
--- a/yocto-poky/meta/recipes-devtools/syslinux/syslinux_6.03.bb
+++ b/yocto-poky/meta/recipes-devtools/syslinux/syslinux_6.03.bb
@@ -21,6 +21,8 @@
            file://0007-linux-syslinux-implement-ext_construct_sectmap_fs.patch \
            file://0008-libinstaller-syslinuxext-implement-syslinux_patch_bo.patch \
            file://0009-linux-syslinux-implement-install_bootblock.patch \
+           file://0010-gcc46-compatibility.patch \
+           file://0011-mk-MMD-does-not-take-any-arguments.patch \
            "
 
 SRC_URI[md5sum] = "92a253df9211e9c20172796ecf388f13"
diff --git a/yocto-poky/meta/recipes-devtools/unfs3/unfs3_0.9.22.r490.bb b/yocto-poky/meta/recipes-devtools/unfs3/unfs3_0.9.22.r490.bb
index 45cf545..5130895 100644
--- a/yocto-poky/meta/recipes-devtools/unfs3/unfs3_0.9.22.r490.bb
+++ b/yocto-poky/meta/recipes-devtools/unfs3/unfs3_0.9.22.r490.bb
@@ -29,6 +29,7 @@
 BBCLASSEXTEND = "native nativesdk"
 
 inherit autotools
+EXTRA_OECONF_append_class-native = " --sbindir=${bindir}"
 
 # Turn off these header detects else the inode search
 # will walk entire file systems and this is a real problem
diff --git a/yocto-poky/meta/recipes-extended/bash/bash.inc b/yocto-poky/meta/recipes-extended/bash/bash.inc
index c06f157..020409f 100644
--- a/yocto-poky/meta/recipes-extended/bash/bash.inc
+++ b/yocto-poky/meta/recipes-extended/bash/bash.inc
@@ -7,7 +7,7 @@
 inherit autotools gettext texinfo update-alternatives ptest
 
 EXTRA_AUTORECONF += "--exclude=autoheader"
-EXTRA_OECONF = "--enable-job-control"
+EXTRA_OECONF = "--enable-job-control --without-bash-malloc"
 
 # If NON_INTERACTIVE_LOGIN_SHELLS is defined, all login shells read the
 # startup files, even if they are not interactive.
diff --git a/yocto-poky/meta/recipes-extended/byacc/byacc/byacc-open.patch b/yocto-poky/meta/recipes-extended/byacc/byacc/byacc-open.patch
index 9160543..0058311 100644
--- a/yocto-poky/meta/recipes-extended/byacc/byacc/byacc-open.patch
+++ b/yocto-poky/meta/recipes-extended/byacc/byacc/byacc-open.patch
@@ -1,3 +1,15 @@
+Ubuntu defaults to passing _FORTIFY_SOURCE=2 which breaks byacc as it doesn't
+pass enough arguments to open():
+
+ inlined from 'open_tmpfile' at byacc-20150711/main.c:588:5:
+ /usr/include/x86_64-linux-gnu/bits/fcntl2.h:50:24: error: call to '__open_missing_mode' declared with attribute error:
+ open with O_CREAT in second argument needs 3 arguments
+
+Add a mode of 0666 to fix this.
+
+Upstream-Status: Pending
+Signed-off-by: Ross Burton <ross.burton@intel.com>
+
 diff --git a/main.c b/main.c
 index 620ce3f..82071a4 100644
 --- a/main.c
diff --git a/yocto-poky/meta/recipes-extended/bzip2/bzip2-1.0.6/fix-bunzip2-qt-returns-0-for-corrupt-archives.patch b/yocto-poky/meta/recipes-extended/bzip2/bzip2-1.0.6/fix-bunzip2-qt-returns-0-for-corrupt-archives.patch
new file mode 100644
index 0000000..ece90d9
--- /dev/null
+++ b/yocto-poky/meta/recipes-extended/bzip2/bzip2-1.0.6/fix-bunzip2-qt-returns-0-for-corrupt-archives.patch
@@ -0,0 +1,55 @@
+From 8068659388127e8e63f2d2297ba2348c72b20705 Mon Sep 17 00:00:00 2001
+From: Wenzong Fan <wenzong.fan@windriver.com>
+Date: Mon, 12 Oct 2015 03:19:51 -0400
+Subject: [PATCH] bzip2: fix bunzip2 -qt returns 0 for corrupt archives
+
+"bzip2 -t FILE" returns 2 if FILE exists, but is not a valid bzip2 file.
+"bzip2 -qt FILE" returns 0 when this happens, although it does print out
+an error message as is does so.
+
+This has been fix by Debian, just port changes from Debian patch file
+"20-legacy.patch".
+
+Debian defect:
+https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=279025
+
+Fix item from changelog:
+http://archive.debian.net/changelogs/pool/main/b/bzip2/bzip2_1.0.2-7/changelog
+
+  * Fixed "bunzip2 -qt returns 0 for corrupt archives" (Closes: #279025).
+
+Upstream-Status: Pending
+
+Signed-off-by: Wenzong Fan <wenzong.fan@windriver.com>
+---
+ bzip2.c | 14 ++++++++------
+ 1 file changed, 8 insertions(+), 6 deletions(-)
+
+diff --git a/bzip2.c b/bzip2.c
+index 6de9d1d..f2ce668 100644
+--- a/bzip2.c
++++ b/bzip2.c
+@@ -2003,12 +2003,14 @@ IntNative main ( IntNative argc, Char *argv[] )
+             testf ( aa->name );
+ 	 }
+       }
+-      if (testFailsExist && noisy) {
+-         fprintf ( stderr,
+-           "\n"
+-           "You can use the `bzip2recover' program to attempt to recover\n"
+-           "data from undamaged sections of corrupted files.\n\n"
+-         );
++      if (testFailsExist) {
++         if (noisy) {
++            fprintf ( stderr,
++              "\n"
++              "You can use the `bzip2recover' program to attempt to recover\n"
++              "data from undamaged sections of corrupted files.\n\n"
++            );
++         }
+          setExit(2);
+          exit(exitValue);
+       }
+-- 
+1.9.1
+
diff --git a/yocto-poky/meta/recipes-extended/bzip2/bzip2_1.0.6.bb b/yocto-poky/meta/recipes-extended/bzip2/bzip2_1.0.6.bb
index 233fe4c..d7b8c06 100644
--- a/yocto-poky/meta/recipes-extended/bzip2/bzip2_1.0.6.bb
+++ b/yocto-poky/meta/recipes-extended/bzip2/bzip2_1.0.6.bb
@@ -9,6 +9,7 @@
 PR = "r5"
 
 SRC_URI = "http://www.bzip.org/${PV}/${BP}.tar.gz \
+           file://fix-bunzip2-qt-returns-0-for-corrupt-archives.patch \
            file://configure.ac;subdir=${BP} \
            file://Makefile.am;subdir=${BP} \
            file://run-ptest"
diff --git a/yocto-poky/meta/recipes-extended/cpio/cpio_v2.inc b/yocto-poky/meta/recipes-extended/cpio/cpio_v2.inc
index 93de4bb..8520ff2 100644
--- a/yocto-poky/meta/recipes-extended/cpio/cpio_v2.inc
+++ b/yocto-poky/meta/recipes-extended/cpio/cpio_v2.inc
@@ -18,9 +18,11 @@
 
 do_install () {
     autotools_do_install
-    install -d ${D}${base_bindir}/
-    mv "${D}${bindir}/cpio" "${D}${base_bindir}/cpio"
-    rmdir ${D}${bindir}/
+    if [ "${base_bindir}" != "${bindir}" ]; then
+        install -d ${D}${base_bindir}/
+        mv "${D}${bindir}/cpio" "${D}${base_bindir}/cpio"
+        rmdir ${D}${bindir}/
+    fi
 }
 
 PACKAGES =+ "${PN}-rmt"
diff --git a/yocto-poky/meta/recipes-extended/cronie/cronie_1.5.0.bb b/yocto-poky/meta/recipes-extended/cronie/cronie_1.5.0.bb
index 38bd593..697501a 100644
--- a/yocto-poky/meta/recipes-extended/cronie/cronie_1.5.0.bb
+++ b/yocto-poky/meta/recipes-extended/cronie/cronie_1.5.0.bb
@@ -4,7 +4,7 @@
 original cron and has security and configuration enhancements like the \
 ability to use pam and SELinux."
 HOMEPAGE = "https://fedorahosted.org/cronie/"
-BUGTRACKER = "mmaslano@redhat.com"
+BUGTRACKER = "https://bugzilla.redhat.com"
 
 # Internet Systems Consortium License
 LICENSE = "ISC & BSD-3-Clause & BSD-2-Clause & GPLv2+"
diff --git a/yocto-poky/meta/recipes-extended/cups/cups.inc b/yocto-poky/meta/recipes-extended/cups/cups.inc
index 57cdf26..2c34da9 100644
--- a/yocto-poky/meta/recipes-extended/cups/cups.inc
+++ b/yocto-poky/meta/recipes-extended/cups/cups.inc
@@ -28,6 +28,7 @@
 PACKAGECONFIG[avahi] = "--enable-avahi,--disable-avahi,avahi"
 PACKAGECONFIG[acl] = "--enable-acl,--disable-acl,acl"
 PACKAGECONFIG[pam] = "--enable-pam, --disable-pam, libpam"
+PACKAGECONFIG[xinetd] = "--with-xinetd=${sysconfdir}/xinetd.d,--without-xinetd,xinetd"
 
 EXTRA_OECONF = " \
                --enable-gnutls \
@@ -64,6 +65,11 @@
 	rm -fr ${D}/${localstatedir}/run
 	rmdir ${D}/${libdir}/${BPN}/driver
 
+	# Fix the pam configuration file permissions
+	if ${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'true', 'false', d)}; then
+	    chmod 0644 ${D}${sysconfdir}/pam.d/cups
+	fi
+
 	# Remove sysinit script and symlinks if sysvinit is not in DISTRO_FEATURES
 	if ${@bb.utils.contains('DISTRO_FEATURES','sysvinit','false','true',d)}; then
 	    rm -rf ${D}${sysconfdir}/init.d/
diff --git a/yocto-poky/meta/recipes-extended/cwautomacros/cwautomacros_20110201.bb b/yocto-poky/meta/recipes-extended/cwautomacros/cwautomacros_20110201.bb
index 43ea3ce..65a99fc 100644
--- a/yocto-poky/meta/recipes-extended/cwautomacros/cwautomacros_20110201.bb
+++ b/yocto-poky/meta/recipes-extended/cwautomacros/cwautomacros_20110201.bb
@@ -14,6 +14,9 @@
 
 do_install() {
 	oe_runmake CWAUTOMACROSPREFIX=${D}${prefix} install
+
+	# cleanup buildpaths in autogen.sh
+	sed -i -e 's,${D},,g' ${D}${prefix}/share/cwautomacros/scripts/autogen.sh
 }
 
 BBCLASSEXTEND = "native"
diff --git a/yocto-poky/meta/recipes-extended/foomatic/foomatic-filters-4.0.17/CVE-2015-8327.patch b/yocto-poky/meta/recipes-extended/foomatic/foomatic-filters-4.0.17/CVE-2015-8327.patch
new file mode 100644
index 0000000..aaedc88
--- /dev/null
+++ b/yocto-poky/meta/recipes-extended/foomatic/foomatic-filters-4.0.17/CVE-2015-8327.patch
@@ -0,0 +1,23 @@
+Upstream-Status: Backport
+
+
+http://bzr.linuxfoundation.org/loggerhead/openprinting/cups-filters/revision/7406
+
+Hand applied change to util.c. Fix was for cups-filters but also applied to foomatic-filters.
+
+CVE: CVE-2015-8327
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+Index: util.c
+===================================================================
+--- a/util.c
++++ b/util.c
+@@ -31,7 +31,7 @@
+ #include <assert.h>
+ 
+ 
+-const char* shellescapes = "|;<>&!$\'\"#*?()[]{}";
++const char* shellescapes = "|;<>&!$\'\"`#*?()[]{}";
+ 
+ const char * temp_dir()
+ {
diff --git a/yocto-poky/meta/recipes-extended/foomatic/foomatic-filters-4.0.17/CVE-2015-8560.patch b/yocto-poky/meta/recipes-extended/foomatic/foomatic-filters-4.0.17/CVE-2015-8560.patch
new file mode 100644
index 0000000..dc973c4
--- /dev/null
+++ b/yocto-poky/meta/recipes-extended/foomatic/foomatic-filters-4.0.17/CVE-2015-8560.patch
@@ -0,0 +1,23 @@
+Upstream-Status: Backport
+
+
+http://bzr.linuxfoundation.org/loggerhead/openprinting/cups-filters/revision/7419
+
+Hand applied change to util.c. Fix was for cups-filters but also applied to foomatic-filters.
+
+CVE: CVE-2015-8560
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+Index: util.c
+===================================================================
+--- a/util.c
++++ b/util.c
+@@ -31,7 +31,7 @@
+ #include <assert.h>
+ 
+ 
+-const char* shellescapes = "|<>&!$\'\"#*?()[]{}";
++const char* shellescapes = "|;<>&!$\'\"#*?()[]{}";
+ 
+ const char * temp_dir()
+ {
diff --git a/yocto-poky/meta/recipes-extended/foomatic/foomatic-filters_4.0.17.bb b/yocto-poky/meta/recipes-extended/foomatic/foomatic-filters_4.0.17.bb
index 790c981..58ef1f5 100644
--- a/yocto-poky/meta/recipes-extended/foomatic/foomatic-filters_4.0.17.bb
+++ b/yocto-poky/meta/recipes-extended/foomatic/foomatic-filters_4.0.17.bb
@@ -17,6 +17,10 @@
 
 SRC_URI = "http://www.openprinting.org/download/foomatic/foomatic-filters-${PV}.tar.gz"
 
+SRC_URI += "file://CVE-2015-8560.patch \
+            file://CVE-2015-8327.patch \
+           "
+
 SRC_URI[md5sum] = "b05f5dcbfe359f198eef3df5b283d896"
 SRC_URI[sha256sum] = "a2e2e53e502571e88eeb9010c45a0d54671f15707ee104f5c9c22b59ea7a33e3"
 
diff --git a/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/png_mak.patch b/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/png_mak.patch
new file mode 100644
index 0000000..da900ea
--- /dev/null
+++ b/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/png_mak.patch
@@ -0,0 +1,21 @@
+ghostscript: add dependency for pnglibconf.h
+    
+When using parallel make jobs, we need to be sure that
+pnglibconf.h is created before we try to reference it,
+so add a rule to png.mak.
+
+Upstream-Status: Pending
+    
+Signed-off-by: Joe Slater <jslater@windriver.com>
+
+--- a/base/png.mak
++++ b/base/png.mak
+@@ -81,6 +81,8 @@ png.config-clean :
+ $(pnglibconf_h) : $(PNGSRC)scripts$(D)pnglibconf.h.prebuilt
+ 	$(CP_)  $(PNGSRC)scripts$(D)pnglibconf.h.prebuilt $(pnglibconf_h)
+ 
++$(MAKEDIRS) : $(pnglibconf_h)
++
+ PDEP=$(AK) $(pnglibconf_h) $(MAKEDIRS)
+ 
+ png_1=$(PNGOBJ)png.$(OBJ) $(PNGOBJ)pngmem.$(OBJ) $(PNGOBJ)pngerror.$(OBJ) $(PNGOBJ)pngset.$(OBJ)
diff --git a/yocto-poky/meta/recipes-extended/ghostscript/ghostscript_9.16.bb b/yocto-poky/meta/recipes-extended/ghostscript/ghostscript_9.16.bb
index ec4acc6..d584c49 100644
--- a/yocto-poky/meta/recipes-extended/ghostscript/ghostscript_9.16.bb
+++ b/yocto-poky/meta/recipes-extended/ghostscript/ghostscript_9.16.bb
@@ -19,6 +19,7 @@
 SRC_URI_BASE = "http://downloads.ghostscript.com/public/ghostscript-${PV}.tar.gz \
                 file://ghostscript-9.15-parallel-make.patch \
                 file://ghostscript-9.16-Werror-return-type.patch \
+                file://png_mak.patch \
 "
 
 SRC_URI = "${SRC_URI_BASE} \
diff --git a/yocto-poky/meta/recipes-extended/grep/grep_2.21.bb b/yocto-poky/meta/recipes-extended/grep/grep_2.21.bb
index 3661098..c51147b 100644
--- a/yocto-poky/meta/recipes-extended/grep/grep_2.21.bb
+++ b/yocto-poky/meta/recipes-extended/grep/grep_2.21.bb
@@ -23,11 +23,13 @@
 
 do_install () {
 	autotools_do_install
-	install -d ${D}${base_bindir}
-	mv ${D}${bindir}/grep ${D}${base_bindir}/grep
-	mv ${D}${bindir}/egrep ${D}${base_bindir}/egrep
-	mv ${D}${bindir}/fgrep ${D}${base_bindir}/fgrep
-	rmdir ${D}${bindir}/
+	if [ "${base_bindir}" != "${bindir}" ]; then
+		install -d ${D}${base_bindir}
+		mv ${D}${bindir}/grep ${D}${base_bindir}/grep
+		mv ${D}${bindir}/egrep ${D}${base_bindir}/egrep
+		mv ${D}${bindir}/fgrep ${D}${base_bindir}/fgrep
+		rmdir ${D}${bindir}/
+	fi
 }
 
 inherit update-alternatives
diff --git a/yocto-poky/meta/recipes-extended/gzip/gzip.inc b/yocto-poky/meta/recipes-extended/gzip/gzip.inc
index 94480ec..58e5e0c 100644
--- a/yocto-poky/meta/recipes-extended/gzip/gzip.inc
+++ b/yocto-poky/meta/recipes-extended/gzip/gzip.inc
@@ -10,12 +10,14 @@
 EXTRA_OEMAKE_class-target = "GREP=${base_bindir}/grep"
 
 do_install_append () {
-	# Rename and move files into /bin (FHS), which is typical place for gzip
-	install -d ${D}${base_bindir}
-	mv ${D}${bindir}/gunzip ${D}${base_bindir}/gunzip
-	mv ${D}${bindir}/gzip ${D}${base_bindir}/gzip
-	mv ${D}${bindir}/zcat ${D}${base_bindir}/zcat
-	mv ${D}${bindir}/uncompress ${D}${base_bindir}/uncompress
+	if [ "${base_bindir}" != "${bindir}" ]; then
+		# Rename and move files into /bin (FHS), which is typical place for gzip
+		install -d ${D}${base_bindir}
+		mv ${D}${bindir}/gunzip ${D}${base_bindir}/gunzip
+		mv ${D}${bindir}/gzip ${D}${base_bindir}/gzip
+		mv ${D}${bindir}/zcat ${D}${base_bindir}/zcat
+		mv ${D}${bindir}/uncompress ${D}${base_bindir}/uncompress
+	fi
 }
 
 inherit update-alternatives
diff --git a/yocto-poky/meta/recipes-extended/images/wic-image-minimal.bb b/yocto-poky/meta/recipes-extended/images/wic-image-minimal.bb
deleted file mode 100644
index 073c569..0000000
--- a/yocto-poky/meta/recipes-extended/images/wic-image-minimal.bb
+++ /dev/null
@@ -1,14 +0,0 @@
-SUMMARY = "An example of partitioned image."
-
-IMAGE_INSTALL = "packagegroup-core-boot ${ROOTFS_PKGMANAGE_BOOTSTRAP}"
-
-IMAGE_FSTYPES = "wic.bz2"
-RM_OLD_IMAGE = "1"
-
-# core-image-minimal is referenced in .wks, so we need its rootfs
-# to be ready before our rootfs
-do_rootfs[depends] += "core-image-minimal:do_rootfs"
-
-IMAGE_ROOTFS_EXTRA_SPACE = "2000"
-
-inherit image
diff --git a/yocto-poky/meta/recipes-extended/images/wic-image-minimal.wks b/yocto-poky/meta/recipes-extended/images/wic-image-minimal.wks
deleted file mode 100644
index 29cd8f2..0000000
--- a/yocto-poky/meta/recipes-extended/images/wic-image-minimal.wks
+++ /dev/null
@@ -1,10 +0,0 @@
-# short-description: Example of partitioned image with complex layout
-# long-description: This image contains boot partition and 3 rootfs partitions
-# created from core-image-minimal and wic-image-minimal image recipes.
-
-part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024
-part / --source rootfs --ondisk sda --fstype=ext2 --label platform --align 1024
-part /core --source rootfs --rootfs-dir=core-image-minimal --ondisk sda --fstype=ext2 --label core --align 1024
-part /backup --source rootfs --rootfs-dir=wic-image-minimal --ondisk sda --fstype=ext2 --label backup --align 1024
-
-bootloader  --timeout=0  --append="rootwait console=tty0"
diff --git a/yocto-poky/meta/recipes-extended/iptables/iptables/0002-configure.ac-only-check-conntrack-when-libnfnetlink-enabled.patch b/yocto-poky/meta/recipes-extended/iptables/iptables/0002-configure.ac-only-check-conntrack-when-libnfnetlink-enabled.patch
new file mode 100644
index 0000000..89ad8f6
--- /dev/null
+++ b/yocto-poky/meta/recipes-extended/iptables/iptables/0002-configure.ac-only-check-conntrack-when-libnfnetlink-enabled.patch
@@ -0,0 +1,34 @@
+Package libnetfilter-conntrack depends on package libnfnetlink. iptables
+checks package libnetfilter-conntrack whatever its package config
+libnfnetlink is enabled or not. When libnfnetlink is disabled but
+package libnetfilter-conntrack exists, it fails randomly with:
+
+| In file included from .../iptables/1.4.21-r0/iptables-1.4.21/extensions/libxt_connlabel.c:8:0:
+| .../tmp/sysroots/qemumips/usr/include/libnetfilter_conntrack/libnetfilter_conntrack.h:14:42: fatal error: libnfnetlink/linux_nfnetlink.h: No such file or directory
+| compilation terminated.
+| GNUmakefile:96: recipe for target 'libxt_connlabel.oo' failed
+
+Only check libnetfilter-conntrack when libnfnetlink is enabled to fix it.
+
+Upstream-Status: Pending
+
+Signed-off-by: Kai Kang <kai.kang@windriver.com>
+
+diff --git a/configure.ac b/configure.ac
+index 5d7e62b..e331ee7 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -88,8 +88,12 @@ if test "$ac_cv_header_linux_ip_vs_h" != "yes"; then
+ 	blacklist_modules="$blacklist_modules ipvs";
+ fi;
+ 
+-PKG_CHECK_MODULES([libnetfilter_conntrack], [libnetfilter_conntrack >= 1.0.4],
++nfconntrack=0
++AS_IF([test "x$enable_libnfnetlink" = "xyes"], [
++  PKG_CHECK_MODULES([libnetfilter_conntrack], [libnetfilter_conntrack >= 1.0.4],
+ 	[nfconntrack=1], [nfconntrack=0])
++  ])
++
+ AM_CONDITIONAL([HAVE_LIBNETFILTER_CONNTRACK], [test "$nfconntrack" = 1])
+ 
+ if test "$nfconntrack" -ne 1; then
diff --git a/yocto-poky/meta/recipes-extended/iptables/iptables_1.4.21.bb b/yocto-poky/meta/recipes-extended/iptables/iptables_1.4.21.bb
index 31c017b..deea5e5 100644
--- a/yocto-poky/meta/recipes-extended/iptables/iptables_1.4.21.bb
+++ b/yocto-poky/meta/recipes-extended/iptables/iptables_1.4.21.bb
@@ -23,6 +23,7 @@
            file://types.h-add-defines-that-are-required-for-if_packet.patch \
            file://0001-configure-Add-option-to-enable-disable-libnfnetlink.patch \
            file://0001-fix-build-with-musl.patch \
+           file://0002-configure.ac-only-check-conntrack-when-libnfnetlink-enabled.patch \
           "
 
 SRC_URI[md5sum] = "536d048c8e8eeebcd9757d0863ebb0c0"
@@ -38,7 +39,7 @@
 PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
 
 # libnfnetlink recipe is in meta-networking layer
-PACKAGECONFIG[libnfnetlink] = "--enable-libnfnetlink,--disable-libnfnetlink,libnfnetlink"
+PACKAGECONFIG[libnfnetlink] = "--enable-libnfnetlink,--disable-libnfnetlink,libnfnetlink libnetfilter-conntrack"
 
 do_configure_prepend() {
 	# Remove some libtool m4 files
diff --git a/yocto-poky/meta/recipes-extended/libaio/libaio/system-linkage.patch b/yocto-poky/meta/recipes-extended/libaio/libaio/system-linkage.patch
new file mode 100644
index 0000000..0b1f475
--- /dev/null
+++ b/yocto-poky/meta/recipes-extended/libaio/libaio/system-linkage.patch
@@ -0,0 +1,37 @@
+From 94bba6880b1f10c6b3bf33a17ac40935d65a81ae Mon Sep 17 00:00:00 2001
+From: Ross Burton <ross.burton@intel.com>
+Date: Fri, 6 Nov 2015 15:19:46 +0000
+Subject: [PATCH] Don't remove the system libraries and startup files from
+ libaio, as in some build configurations these are required.  For example,
+ including conf/include/security_flags.inc on PPC results in:
+
+io_queue_init.os: In function `io_queue_init':
+tmp/work/ppce300c3-poky-linux/libaio/0.3.110-r0/libaio-0.3.110/src/io_queue_init.c:33:
+undefined reference to `__stack_chk_fail_local'
+
+Upstream-Status: Pending
+Signed-off-by: Ross Burton <ross.burton@intel.com>
+---
+ src/Makefile | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/src/Makefile b/src/Makefile
+index eadb336..56ab701 100644
+--- a/src/Makefile
++++ b/src/Makefile
+@@ -3,10 +3,10 @@ includedir=$(prefix)/include
+ libdir=$(prefix)/lib
+ 
+ CFLAGS ?= -g -fomit-frame-pointer -O2
+-CFLAGS += -nostdlib -nostartfiles -Wall -I. -fPIC
++CFLAGS += -Wall -I. -fPIC
+ SO_CFLAGS=-shared $(CFLAGS)
+ L_CFLAGS=$(CFLAGS)
+-LINK_FLAGS=
++LINK_FLAGS=$(LDFLAGS)
+ LINK_FLAGS+=$(LDFLAGS)
+ 
+ soname=libaio.so.1
+-- 
+2.1.4
+
diff --git a/yocto-poky/meta/recipes-extended/libaio/libaio_0.3.110.bb b/yocto-poky/meta/recipes-extended/libaio/libaio_0.3.110.bb
index cbe29ce..2adfa0a 100644
--- a/yocto-poky/meta/recipes-extended/libaio/libaio_0.3.110.bb
+++ b/yocto-poky/meta/recipes-extended/libaio/libaio_0.3.110.bb
@@ -11,18 +11,13 @@
            file://destdir.patch \
            file://libaio_fix_for_x32.patch \
            file://libaio_fix_for_mips_syscalls.patch \
-"
+           file://system-linkage.patch \
+           "
 
 SRC_URI[md5sum] = "2a35602e43778383e2f4907a4ca39ab8"
 SRC_URI[sha256sum] = "e019028e631725729376250e32b473012f7cb68e1f7275bfc1bbcdd0f8745f7e"
 
 EXTRA_OEMAKE =+ "prefix=${prefix} includedir=${includedir} libdir=${libdir}"
-# Need libc for stack-protector's __stack_chk_fail_local() bounce function
-LDFLAGS_append_x86 = " -lc"
-
-do_configure () {
-    sed -i 's#LINK_FLAGS=.*#LINK_FLAGS=$(LDFLAGS)#' src/Makefile
-}
 
 do_install () {
     oe_runmake install DESTDIR=${D}
diff --git a/yocto-poky/meta/recipes-extended/libarchive/libarchive/0001-Add-ARCHIVE_EXTRACT_SECURE_NOABSOLUTEPATHS-option.patch b/yocto-poky/meta/recipes-extended/libarchive/libarchive/libarchive-CVE-2015-2304.patch
similarity index 100%
rename from yocto-poky/meta/recipes-extended/libarchive/libarchive/0001-Add-ARCHIVE_EXTRACT_SECURE_NOABSOLUTEPATHS-option.patch
rename to yocto-poky/meta/recipes-extended/libarchive/libarchive/libarchive-CVE-2015-2304.patch
diff --git a/yocto-poky/meta/recipes-extended/libarchive/libarchive_3.1.2.bb b/yocto-poky/meta/recipes-extended/libarchive/libarchive_3.1.2.bb
index aaa3255..716db9a 100644
--- a/yocto-poky/meta/recipes-extended/libarchive/libarchive_3.1.2.bb
+++ b/yocto-poky/meta/recipes-extended/libarchive/libarchive_3.1.2.bb
@@ -32,7 +32,7 @@
 SRC_URI = "http://libarchive.org/downloads/libarchive-${PV}.tar.gz \
            file://libarchive-CVE-2013-0211.patch \
            file://pkgconfig.patch \
-           file://0001-Add-ARCHIVE_EXTRACT_SECURE_NOABSOLUTEPATHS-option.patch \
+           file://libarchive-CVE-2015-2304.patch \
            file://mkdir.patch \
            "
 
diff --git a/yocto-poky/meta/recipes-extended/libtirpc/libtirpc/remove-des-uclibc.patch b/yocto-poky/meta/recipes-extended/libtirpc/libtirpc/remove-des-uclibc.patch
index d003348..553b1ff 100644
--- a/yocto-poky/meta/recipes-extended/libtirpc/libtirpc/remove-des-uclibc.patch
+++ b/yocto-poky/meta/recipes-extended/libtirpc/libtirpc/remove-des-uclibc.patch
@@ -3,11 +3,11 @@
 Upstream-Status: Inappropriate [uclibc specific]
 Signed-off-by: Khem Raj <raj.khem@gmail.com>
 
-Index: libtirpc-0.2.3/src/rpc_soc.c
+Index: libtirpc-0.2.5/src/rpc_soc.c
 ===================================================================
---- libtirpc-0.2.3.orig/src/rpc_soc.c	2013-03-10 16:00:51.355282153 -0700
-+++ libtirpc-0.2.3/src/rpc_soc.c	2013-03-10 16:00:51.703282148 -0700
-@@ -520,6 +520,7 @@
+--- libtirpc-0.2.5.orig/src/rpc_soc.c
++++ libtirpc-0.2.5/src/rpc_soc.c
+@@ -520,6 +520,7 @@ clnt_broadcast(prog, vers, proc, xargs,
  	    (resultproc_t) rpc_wrap_bcast, "udp");
  }
  
@@ -15,7 +15,7 @@
  /*
   * Create the client des authentication object. Obsoleted by
   * authdes_seccreate().
-@@ -551,6 +552,7 @@
+@@ -551,6 +552,7 @@ fallback:
  	dummy = authdes_seccreate(servername, window, NULL, ckey);
  	return (dummy);
  }
@@ -23,16 +23,16 @@
  
  /*
   * Create a client handle for a unix connection. Obsoleted by clnt_vc_create()
-Index: libtirpc-0.2.3/src/Makefile.am
+Index: libtirpc-0.2.5/src/Makefile.am
 ===================================================================
---- libtirpc-0.2.3.orig/src/Makefile.am	2013-03-10 16:00:51.355282153 -0700
-+++ libtirpc-0.2.3/src/Makefile.am	2013-03-10 16:00:51.703282148 -0700
-@@ -50,7 +50,7 @@
+--- libtirpc-0.2.5.orig/src/Makefile.am
++++ libtirpc-0.2.5/src/Makefile.am
+@@ -51,7 +51,7 @@ libtirpc_la_SOURCES = auth_none.c auth_u
          rpc_callmsg.c rpc_generic.c rpc_soc.c rpcb_clnt.c rpcb_prot.c \
          rpcb_st_xdr.c svc.c svc_auth.c svc_dg.c svc_auth_unix.c svc_auth_none.c \
          svc_generic.c svc_raw.c svc_run.c svc_simple.c svc_vc.c getpeereid.c \
--        auth_time.c auth_des.c authdes_prot.c
-+        auth_time.c
+-        auth_time.c auth_des.c authdes_prot.c debug.c
++        auth_time.c debug.c
  
  ## XDR
  libtirpc_la_SOURCES += xdr.c xdr_rec.c xdr_array.c xdr_float.c xdr_mem.c xdr_reference.c xdr_stdio.c
diff --git a/yocto-poky/meta/recipes-extended/libtirpc/libtirpc/va_list.patch b/yocto-poky/meta/recipes-extended/libtirpc/libtirpc/va_list.patch
new file mode 100644
index 0000000..855d15b
--- /dev/null
+++ b/yocto-poky/meta/recipes-extended/libtirpc/libtirpc/va_list.patch
@@ -0,0 +1,18 @@
+This patch is fixing build with uclibc where compiler ( gcc5 ) says it cant find va_list
+the patch is right for upstreaming as well
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+Upstream-Status: Pending
+
+Index: libtirpc-0.2.5/src/debug.h
+===================================================================
+--- libtirpc-0.2.5.orig/src/debug.h
++++ libtirpc-0.2.5/src/debug.h
+@@ -22,6 +22,7 @@
+ #ifndef _DEBUG_H
+ #define _DEBUG_H
+ #include <syslog.h>
++#include <stdarg.h>
+ 
+ extern int libtirpc_debug_level;
+ extern int  log_stderr;
diff --git a/yocto-poky/meta/recipes-extended/libtirpc/libtirpc_0.2.5.bb b/yocto-poky/meta/recipes-extended/libtirpc/libtirpc_0.2.5.bb
index 3edf002..330b829 100644
--- a/yocto-poky/meta/recipes-extended/libtirpc/libtirpc_0.2.5.bb
+++ b/yocto-poky/meta/recipes-extended/libtirpc/libtirpc_0.2.5.bb
@@ -15,7 +15,9 @@
            file://libtirpc-0.2.1-fortify.patch \
           "
 
-SRC_URI_append_libc-uclibc = " file://remove-des-uclibc.patch"
+SRC_URI_append_libc-uclibc = " file://remove-des-uclibc.patch \
+                               file://va_list.patch \
+                             "
 
 SRC_URI[libtirpc.md5sum] = "8cd41a5ef5a9b50d0fb6abb98af15368"
 SRC_URI[libtirpc.sha256sum] = "62f9de7c2c8686c568757730e1fef66502a0e00d6cacf33546d0267984e002db"
diff --git a/yocto-poky/meta/recipes-extended/logrotate/logrotate_3.9.1.bb b/yocto-poky/meta/recipes-extended/logrotate/logrotate_3.9.1.bb
index 7d0a159..5f1a601 100644
--- a/yocto-poky/meta/recipes-extended/logrotate/logrotate_3.9.1.bb
+++ b/yocto-poky/meta/recipes-extended/logrotate/logrotate_3.9.1.bb
@@ -53,7 +53,7 @@
 }
 
 do_install(){
-    oe_runmake install DESTDIR=${D} PREFIX=${D} MANDIR=${mandir} BINDIR=${bindir}
+    oe_runmake install DESTDIR=${D} PREFIX=${D} MANDIR=${mandir}
     mkdir -p ${D}${sysconfdir}/logrotate.d
     mkdir -p ${D}${sysconfdir}/cron.daily
     mkdir -p ${D}${localstatedir}/lib
diff --git a/yocto-poky/meta/recipes-extended/lsb/lsb_4.1.bb b/yocto-poky/meta/recipes-extended/lsb/lsb_4.1.bb
index 6215b62..c9f6a8b 100644
--- a/yocto-poky/meta/recipes-extended/lsb/lsb_4.1.bb
+++ b/yocto-poky/meta/recipes-extended/lsb/lsb_4.1.bb
@@ -9,8 +9,8 @@
 LSB_CORE_x86-64 = "lsb-core-amd64"
 RPROVIDES_${PN} += "${LSB_CORE}"
 
-# lsb_release needs getopt
-RDEPENDS_${PN} += "${VIRTUAL-RUNTIME_getopt}"
+# lsb_release needs getopt, lsbinitscripts
+RDEPENDS_${PN} += "${VIRTUAL-RUNTIME_getopt} lsbinitscripts"
 
 LIC_FILES_CHKSUM = "file://README;md5=12da544b1a3a5a1795a21160b49471cf"
 
diff --git a/yocto-poky/meta/recipes-extended/lsb/lsbinitscripts_9.64.bb b/yocto-poky/meta/recipes-extended/lsb/lsbinitscripts_9.64.bb
index 6db667c..150f6f2 100644
--- a/yocto-poky/meta/recipes-extended/lsb/lsbinitscripts_9.64.bb
+++ b/yocto-poky/meta/recipes-extended/lsb/lsbinitscripts_9.64.bb
@@ -3,6 +3,8 @@
 LICENSE = "GPLv2"
 DEPENDS = "popt glib-2.0"
 
+RDEPENDS_${PN} += "util-linux"
+
 LIC_FILES_CHKSUM = "file://COPYING;md5=ebf4e8b49780ab187d51bd26aaa022c6"
 
 S="${WORKDIR}/initscripts-${PV}"
diff --git a/yocto-poky/meta/recipes-extended/ltp/ltp/0001-replace-inline-with-static-inline-for-gcc-5.x.patch b/yocto-poky/meta/recipes-extended/ltp/ltp/0001-replace-inline-with-static-inline-for-gcc-5.x.patch
new file mode 100644
index 0000000..0b594dc
--- /dev/null
+++ b/yocto-poky/meta/recipes-extended/ltp/ltp/0001-replace-inline-with-static-inline-for-gcc-5.x.patch
@@ -0,0 +1,69 @@
+Upstream-Status: Backport [From https://github.com/linux-test-project/ltp/commit/40a2457cb8ec42a05a2f96b0810057efdb2a55f5]
+
+gcc 5.x defaults to -std=gnu11 instead of -std=gnu89 which causes
+semantics for inline functions changes.
+
+The standalone 'inline' causes error with gcc 5 such as:
+
+git/testcases/kernel/syscalls/kill/kill10.c:355: undefined reference to `k_sigaction'
+
+Replace inline with static inline to be compatible with both gcc 4 and 5.
+
+Signed-off-by: Kai Kang <kai.kang@windriver.com>
+---
+ testcases/kernel/controllers/libcontrollers/libcontrollers.c | 2 +-
+ testcases/kernel/controllers/libcontrollers/libcontrollers.h | 2 +-
+ testcases/kernel/syscalls/kill/kill10.c                      | 4 ++--
+ 3 files changed, 4 insertions(+), 4 deletions(-)
+
+diff --git a/testcases/kernel/controllers/libcontrollers/libcontrollers.c b/testcases/kernel/controllers/libcontrollers/libcontrollers.c
+index b01e1b8..8857bc9 100644
+--- a/testcases/kernel/controllers/libcontrollers/libcontrollers.c
++++ b/testcases/kernel/controllers/libcontrollers/libcontrollers.c
+@@ -146,7 +146,7 @@ int read_file(char *filepath, int action, unsigned int *value)
+  * Prints error message and returns -1
+  */
+ 
+-inline int error_function(char *msg1, char *msg2)
++static inline int error_function(char *msg1, char *msg2)
+ {
+ 	fprintf(stdout, "ERROR: %s ", msg1);
+ 	fprintf(stdout, "%s\n", msg2);
+diff --git a/testcases/kernel/controllers/libcontrollers/libcontrollers.h b/testcases/kernel/controllers/libcontrollers/libcontrollers.h
+index 4001555..a1a0dfa 100644
+--- a/testcases/kernel/controllers/libcontrollers/libcontrollers.h
++++ b/testcases/kernel/controllers/libcontrollers/libcontrollers.h
+@@ -70,7 +70,7 @@ enum{
+ 	GET_TASKS
+ };
+ 
+-inline int error_function(char *msg1, char *msg2);
++static inline int error_function(char *msg1, char *msg2);
+ 
+ unsigned int read_shares_file (char *filepath);
+ 
+diff --git a/testcases/kernel/syscalls/kill/kill10.c b/testcases/kernel/syscalls/kill/kill10.c
+index 982d9da..33dbcd3 100644
+--- a/testcases/kernel/syscalls/kill/kill10.c
++++ b/testcases/kernel/syscalls/kill/kill10.c
+@@ -185,7 +185,7 @@ int child_checklist_total = 0;
+ int checklist_cmp(const void *a, const void *b);
+ void checklist_reset(int bit);
+ 
+-inline int k_sigaction(int sig, struct sigaction *sa, struct sigaction *osa);
++static inline int k_sigaction(int sig, struct sigaction *sa, struct sigaction *osa);
+ 
+ char *TCID = "kill10";
+ int TST_TOTAL = 1;
+@@ -756,7 +756,7 @@ void checklist_reset(int bit)
+ 
+ }
+ 
+-inline int k_sigaction(int sig, struct sigaction *sa, struct sigaction *osa)
++static inline int k_sigaction(int sig, struct sigaction *sa, struct sigaction *osa)
+ {
+ 	int ret;
+ 	if ((ret = sigaction(sig, sa, osa)) == -1) {
+---
+-1.9.1
+-
diff --git a/yocto-poky/meta/recipes-extended/ltp/ltp_20150420.bb b/yocto-poky/meta/recipes-extended/ltp/ltp_20150420.bb
index 108ebf1..ed46b5e 100644
--- a/yocto-poky/meta/recipes-extended/ltp/ltp_20150420.bb
+++ b/yocto-poky/meta/recipes-extended/ltp/ltp_20150420.bb
@@ -29,6 +29,7 @@
     file://add-knob-for-numa.patch \
     file://add-knob-for-tirpc.patch \
     file://0001-ltp-vma03-fix-the-alginment-of-page-size.patch \
+    file://0001-replace-inline-with-static-inline-for-gcc-5.x.patch \
 "
 
 S = "${WORKDIR}/git"
diff --git a/yocto-poky/meta/recipes-extended/mailx/mailx_12.5-5.bb b/yocto-poky/meta/recipes-extended/mailx/mailx_12.5-5.bb
index ffa9049..c87c582 100644
--- a/yocto-poky/meta/recipes-extended/mailx/mailx_12.5-5.bb
+++ b/yocto-poky/meta/recipes-extended/mailx/mailx_12.5-5.bb
@@ -41,3 +41,8 @@
 # fio.c:56:17: fatal error: ssl.h: No such file or directory
 # #include <ssl.h>
 PARALLEL_MAKE = ""
+
+# Causes gcc to get stuck and eat all available memory in qemuarm builds
+# http://errors.yoctoproject.org/Errors/Details/20488/
+ARM_INSTRUCTION_SET_armv4 = "arm"
+ARM_INSTRUCTION_SET_armv5 = "arm"
diff --git a/yocto-poky/meta/recipes-extended/pam/libpam/use-utmpx.patch b/yocto-poky/meta/recipes-extended/pam/libpam/use-utmpx.patch
new file mode 100644
index 0000000..dd04bbb
--- /dev/null
+++ b/yocto-poky/meta/recipes-extended/pam/libpam/use-utmpx.patch
@@ -0,0 +1,233 @@
+utmp() may not be configured in and use posix compliant utmpx always
+UTMP is SVID legacy, UTMPX is mandated by POSIX
+
+Upstream-Status: Pending
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+Index: Linux-PAM-1.2.1/libpam/pam_modutil_getlogin.c
+===================================================================
+--- Linux-PAM-1.2.1.orig/libpam/pam_modutil_getlogin.c
++++ Linux-PAM-1.2.1/libpam/pam_modutil_getlogin.c
+@@ -10,8 +10,7 @@
+ 
+ #include <stdlib.h>
+ #include <unistd.h>
+-#include <utmp.h>
+-
++#include <utmpx.h>
+ #define _PAMMODUTIL_GETLOGIN "_pammodutil_getlogin"
+ 
+ const char *
+@@ -22,7 +21,7 @@ pam_modutil_getlogin(pam_handle_t *pamh)
+     const void *void_curr_tty;
+     const char *curr_tty;
+     char *curr_user;
+-    struct utmp *ut, line;
++    struct utmpx *ut, line;
+ 
+     status = pam_get_data(pamh, _PAMMODUTIL_GETLOGIN, &logname);
+     if (status == PAM_SUCCESS) {
+@@ -48,10 +47,10 @@ pam_modutil_getlogin(pam_handle_t *pamh)
+     }
+     logname = NULL;
+ 
+-    setutent();
++    setutxent();
+     strncpy(line.ut_line, curr_tty, sizeof(line.ut_line));
+ 
+-    if ((ut = getutline(&line)) == NULL) {
++    if ((ut = getutxline(&line)) == NULL) {
+ 	goto clean_up_and_go_home;
+     }
+ 
+@@ -74,7 +73,7 @@ pam_modutil_getlogin(pam_handle_t *pamh)
+ 
+ clean_up_and_go_home:
+ 
+-    endutent();
++    endutxent();
+ 
+     return logname;
+ }
+Index: Linux-PAM-1.2.1/modules/pam_issue/pam_issue.c
+===================================================================
+--- Linux-PAM-1.2.1.orig/modules/pam_issue/pam_issue.c
++++ Linux-PAM-1.2.1/modules/pam_issue/pam_issue.c
+@@ -25,7 +25,7 @@
+ #include <string.h>
+ #include <unistd.h>
+ #include <sys/utsname.h>
+-#include <utmp.h>
++#include <utmpx.h>
+ #include <time.h>
+ #include <syslog.h>
+ 
+@@ -246,13 +246,13 @@ read_issue_quoted(pam_handle_t *pamh, FI
+ 	      case 'U':
+ 		{
+ 		    unsigned int users = 0;
+-		    struct utmp *ut;
+-		    setutent();
+-		    while ((ut = getutent())) {
++		    struct utmpx *ut;
++		    setutxent();
++		    while ((ut = getutxent())) {
+ 			if (ut->ut_type == USER_PROCESS)
+ 			    ++users;
+ 		    }
+-		    endutent();
++		    endutxent();
+ 		    if (c == 'U')
+ 			snprintf (buf, sizeof buf, "%u %s", users,
+ 			          (users == 1) ? "user" : "users");
+Index: Linux-PAM-1.2.1/modules/pam_lastlog/pam_lastlog.c
+===================================================================
+--- Linux-PAM-1.2.1.orig/modules/pam_lastlog/pam_lastlog.c
++++ Linux-PAM-1.2.1/modules/pam_lastlog/pam_lastlog.c
+@@ -15,8 +15,9 @@
+ #include <errno.h>
+ #ifdef HAVE_UTMP_H
+ # include <utmp.h>
+-#else
+-# include <lastlog.h>
++#endif
++#ifdef HAVE_UTMPX_H
++# include <utmpx.h>
+ #endif
+ #include <pwd.h>
+ #include <stdlib.h>
+@@ -27,6 +28,12 @@
+ #include <syslog.h>
+ #include <unistd.h>
+ 
++#ifndef HAVE_UTMP_H
++#define UT_LINESIZE 32
++#define UT_HOSTSIZE 32
++#define UT_NAMESIZE 256
++#endif
++
+ #if defined(hpux) || defined(sunos) || defined(solaris)
+ # ifndef _PATH_LASTLOG
+ #  define _PATH_LASTLOG "/usr/adm/lastlog"
+@@ -38,7 +45,7 @@
+ #  define UT_LINESIZE 12
+ # endif /* UT_LINESIZE */
+ #endif
+-#if defined(hpux)
++#if defined(hpux) || !defined HAVE_UTMP_H
+ struct lastlog {
+     time_t  ll_time;
+     char    ll_line[UT_LINESIZE];
+@@ -447,8 +454,8 @@ last_login_failed(pam_handle_t *pamh, in
+ {
+     int retval;
+     int fd;
+-    struct utmp ut;
+-    struct utmp utuser;
++    struct utmpx ut;
++    struct utmpx utuser;
+     int failed = 0;
+     char the_time[256];
+     char *date = NULL;
+Index: Linux-PAM-1.2.1/modules/pam_limits/pam_limits.c
+===================================================================
+--- Linux-PAM-1.2.1.orig/modules/pam_limits/pam_limits.c
++++ Linux-PAM-1.2.1/modules/pam_limits/pam_limits.c
+@@ -33,7 +33,7 @@
+ #include <sys/resource.h>
+ #include <limits.h>
+ #include <glob.h>
+-#include <utmp.h>
++#include <utmpx.h>
+ #ifndef UT_USER  /* some systems have ut_name instead of ut_user */
+ #define UT_USER ut_user
+ #endif
+@@ -227,7 +227,7 @@ static int
+ check_logins (pam_handle_t *pamh, const char *name, int limit, int ctrl,
+               struct pam_limit_s *pl)
+ {
+-    struct utmp *ut;
++    struct utmpx *ut;
+     int count;
+ 
+     if (ctrl & PAM_DEBUG_ARG) {
+@@ -242,7 +242,7 @@ check_logins (pam_handle_t *pamh, const
+         return LOGIN_ERR;
+     }
+ 
+-    setutent();
++    setutxent();
+ 
+     /* Because there is no definition about when an application
+        actually adds a utmp entry, some applications bizarrely do the
+@@ -260,7 +260,7 @@ check_logins (pam_handle_t *pamh, const
+ 	count = 1;
+     }
+ 
+-    while((ut = getutent())) {
++    while((ut = getutxent())) {
+ #ifdef USER_PROCESS
+         if (ut->ut_type != USER_PROCESS) {
+             continue;
+@@ -296,7 +296,7 @@ check_logins (pam_handle_t *pamh, const
+ 	    break;
+ 	}
+     }
+-    endutent();
++    endutxent();
+     if (count > limit) {
+ 	if (name) {
+ 	    pam_syslog(pamh, LOG_WARNING,
+Index: Linux-PAM-1.2.1/modules/pam_timestamp/pam_timestamp.c
+===================================================================
+--- Linux-PAM-1.2.1.orig/modules/pam_timestamp/pam_timestamp.c
++++ Linux-PAM-1.2.1/modules/pam_timestamp/pam_timestamp.c
+@@ -56,7 +56,7 @@
+ #include <time.h>
+ #include <sys/time.h>
+ #include <unistd.h>
+-#include <utmp.h>
++#include <utmpx.h>
+ #include <syslog.h>
+ #include <paths.h>
+ #include "hmacsha1.h"
+@@ -197,15 +197,15 @@ timestamp_good(time_t then, time_t now,
+ static int
+ check_login_time(const char *ruser, time_t timestamp)
+ {
+-	struct utmp utbuf, *ut;
++	struct utmpx utbuf, *ut;
+ 	time_t oldest_login = 0;
+ 
+-	setutent();
++	setutxent();
+ 	while(
+ #ifdef HAVE_GETUTENT_R
+-	      !getutent_r(&utbuf, &ut)
++	      !getutxent_r(&utbuf, &ut)
+ #else
+-	      (ut = getutent()) != NULL
++	      (ut = getutxent()) != NULL
+ #endif
+ 	      ) {
+ 		if (ut->ut_type != USER_PROCESS) {
+@@ -218,7 +218,7 @@ check_login_time(const char *ruser, time
+ 			oldest_login = ut->ut_tv.tv_sec;
+ 		}
+ 	}
+-	endutent();
++	endutxent();
+ 	if(oldest_login == 0 || timestamp < oldest_login) {
+ 		return PAM_AUTH_ERR;
+ 	}
+Index: Linux-PAM-1.2.1/modules/pam_unix/support.c
+===================================================================
+--- Linux-PAM-1.2.1.orig/modules/pam_unix/support.c
++++ Linux-PAM-1.2.1/modules/pam_unix/support.c
+@@ -13,7 +13,6 @@
+ #include <pwd.h>
+ #include <shadow.h>
+ #include <limits.h>
+-#include <utmp.h>
+ #include <errno.h>
+ #include <signal.h>
+ #include <ctype.h>
diff --git a/yocto-poky/meta/recipes-extended/pam/libpam_1.2.1.bb b/yocto-poky/meta/recipes-extended/pam/libpam_1.2.1.bb
index ac3097e..0353356 100644
--- a/yocto-poky/meta/recipes-extended/pam/libpam_1.2.1.bb
+++ b/yocto-poky/meta/recipes-extended/pam/libpam_1.2.1.bb
@@ -28,7 +28,9 @@
 SRC_URI[md5sum] = "9dc53067556d2dd567808fd509519dd6"
 SRC_URI[sha256sum] = "342b1211c0d3b203a7df2540a5b03a428a087bd8a48c17e49ae268f992b334d9"
 
-SRC_URI_append_libc-uclibc = " file://pam-no-innetgr.patch"
+SRC_URI_append_libc-uclibc = " file://pam-no-innetgr.patch \
+                               file://use-utmpx.patch"
+
 SRC_URI_append_libc-musl = " file://pam-no-innetgr.patch"
 
 DEPENDS = "bison flex flex-native cracklib"
diff --git a/yocto-poky/meta/recipes-extended/quota/quota/remove_non_posix_types.patch b/yocto-poky/meta/recipes-extended/quota/quota/remove_non_posix_types.patch
index 5442d98..06ff13c 100644
--- a/yocto-poky/meta/recipes-extended/quota/quota/remove_non_posix_types.patch
+++ b/yocto-poky/meta/recipes-extended/quota/quota/remove_non_posix_types.patch
@@ -183,3 +183,16 @@
  } du_t;
  
  #define	NDU	60000
+Index: quota-tools/rquota_server.c
+===================================================================
+--- quota-tools.orig/rquota_server.c
++++ quota-tools/rquota_server.c
+@@ -60,7 +60,7 @@ extern char nfs_pseudoroot[PATH_MAX];
+  */
+ extern struct authunix_parms *unix_cred;
+ 
+-int in_group(gid_t * gids, u_int len, gid_t gid)
++int in_group(gid_t * gids, uint32_t len, gid_t gid)
+ {
+ 	gid_t *gidsp = gids + len;
+ 
diff --git a/yocto-poky/meta/recipes-extended/quota/quota_4.02.bb b/yocto-poky/meta/recipes-extended/quota/quota_4.02.bb
index 124b0a3..673d584 100644
--- a/yocto-poky/meta/recipes-extended/quota/quota_4.02.bb
+++ b/yocto-poky/meta/recipes-extended/quota/quota_4.02.bb
@@ -23,7 +23,7 @@
 
 inherit autotools-brokensep gettext pkgconfig
 
-CFLAGS += "-I=${includedir}/tirpc"
+CFLAGS += "-I${STAGING_INCDIR}/tirpc"
 LDFLAGS += "-ltirpc"
 ASNEEDED = ""
 EXTRA_OEMAKE += 'STRIP=""'
diff --git a/yocto-poky/meta/recipes-extended/rpcbind/rpcbind/0001-uclibc-nss.patch b/yocto-poky/meta/recipes-extended/rpcbind/rpcbind/0001-uclibc-nss.patch
deleted file mode 100644
index afa55f3..0000000
--- a/yocto-poky/meta/recipes-extended/rpcbind/rpcbind/0001-uclibc-nss.patch
+++ /dev/null
@@ -1,30 +0,0 @@
-Upstream-Status: Pending
-
-From b8f0d7b7318ba344c25785d6f5cf3f8de98012d4 Mon Sep 17 00:00:00 2001
-From: Natanael Copa <ncopa@alpinelinux.org>
-Date: Tue, 2 Feb 2010 09:36:03 +0000
-Subject: [PATCH 1/2] uclibc-nss
-
----
- src/rpcbind.c |    4 ++++
- 1 files changed, 4 insertions(+), 0 deletions(-)
-
-diff --git a/src/rpcbind.c b/src/rpcbind.c
-index 525ffba..1fe1a60 100644
---- a/src/rpcbind.c
-+++ b/src/rpcbind.c
-@@ -67,7 +67,11 @@
- #include <pwd.h>
- #include <string.h>
- #include <errno.h>
-+#if defined(__UCLIBC__)
-+#define __nss_configure_lookup(x,y)
-+#else
- #include <nss.h>
-+#endif
- #include "config.h"
- #include "rpcbind.h"
- 
--- 
-1.6.6.1
-
diff --git a/yocto-poky/meta/recipes-extended/rpcbind/rpcbind/cve-2015-7236.patch b/yocto-poky/meta/recipes-extended/rpcbind/rpcbind/cve-2015-7236.patch
new file mode 100644
index 0000000..f156290
--- /dev/null
+++ b/yocto-poky/meta/recipes-extended/rpcbind/rpcbind/cve-2015-7236.patch
@@ -0,0 +1,83 @@
+commit 06f7ebb1dade2f0dbf872ea2bedf17cff4734bdd
+Author: Olaf Kirch <okir@...e.de>
+Date:   Thu Aug 6 16:27:20 2015 +0200
+
+    Fix memory corruption in PMAP_CALLIT code
+    
+     - A PMAP_CALLIT call comes in on IPv4 UDP
+     - rpcbind duplicates the caller's address to a netbuf and stores it in
+       FINFO[0].caller_addr. caller_addr->buf now points to a memory region A
+       with a size of 16 bytes
+     - rpcbind forwards the call to the local service, receives a reply
+     - when processing the reply, it does this in xprt_set_caller:
+         xprt->xp_rtaddr = *FINFO[0].caller_addr
+       It sends out the reply, and then frees the netbuf caller_addr and
+       caller_addr.buf.
+       However, it does not clear xp_rtaddr, so xp_rtaddr.buf now refers
+       to memory region A, which is free.
+     - When the next call comes in on the UDP/IPv4 socket, svc_dg_recv will
+       be called, which will set xp_rtaddr to the client's address.
+       It will reuse the buffer inside xp_rtaddr, ie it will write a
+       sockaddr_in to region A
+    
+    Some time down the road, an incoming TCP connection is accepted,
+    allocating a fresh SVCXPRT. The memory region A is inside the
+    new SVCXPRT
+    
+     - While processing the TCP call, another UDP call comes in, again
+       overwriting region A with the client's address
+     - TCP client closes connection. In svc_destroy, we now trip over
+       the garbage left in region A
+    
+    We ran into the case where a commercial scanner was triggering
+    occasional rpcbind segfaults. The core file that was captured showed
+    a corrupted xprt->xp_netid pointer that was really a sockaddr_in.
+    
+    Signed-off-by: Olaf Kirch <okir@...e.de>
+
+    Upstream-Status: Backport
+
+    Signed-off-by: Li Zhou <li.zhou@windriver.com>
+---
+ src/rpcb_svc_com.c |   23 ++++++++++++++++++++++-
+ 1 file changed, 22 insertions(+), 1 deletion(-)
+
+Index: rpcbind-0.1.6+git20080930/src/rpcb_svc_com.c
+===================================================================
+--- rpcbind-0.1.6+git20080930.orig/src/rpcb_svc_com.c
++++ rpcbind-0.1.6+git20080930/src/rpcb_svc_com.c
+@@ -1298,12 +1298,33 @@ check_rmtcalls(struct pollfd *pfds, int
+ 	return (ncallbacks_found);
+ }
+ 
++/*
++ * This is really a helper function defined in libtirpc, but unfortunately, it hasn't
++ * been exported yet.
++ */
++static struct netbuf *
++__rpc_set_netbuf(struct netbuf *nb, const void *ptr, size_t len)
++{
++	if (nb->len != len) {
++		if (nb->len)
++			mem_free(nb->buf, nb->len);
++		nb->buf = mem_alloc(len);
++		if (nb->buf == NULL)
++			return NULL;
++
++		nb->maxlen = nb->len = len;
++	}
++	memcpy(nb->buf, ptr, len);
++	return nb;
++}
++
+ static void
+ xprt_set_caller(SVCXPRT *xprt, struct finfo *fi)
+ {
++	const struct netbuf *caller = fi->caller_addr;
+ 	u_int32_t *xidp;
+ 
+-	*(svc_getrpccaller(xprt)) = *(fi->caller_addr);
++	__rpc_set_netbuf(svc_getrpccaller(xprt), caller->buf, caller->len);
+ 	xidp = __rpcb_get_dg_xidp(xprt);
+ 	*xidp = fi->caller_xid;
+ }
diff --git a/yocto-poky/meta/recipes-extended/rpcbind/rpcbind_0.2.3.bb b/yocto-poky/meta/recipes-extended/rpcbind/rpcbind_0.2.3.bb
index 3336021..ecd3ba8 100644
--- a/yocto-poky/meta/recipes-extended/rpcbind/rpcbind_0.2.3.bb
+++ b/yocto-poky/meta/recipes-extended/rpcbind/rpcbind_0.2.3.bb
@@ -19,11 +19,10 @@
            file://rpcbind.conf \
            file://rpcbind.socket \
            file://rpcbind.service \
+           file://cve-2015-7236.patch \
           "
 MUSLPATCHES_libc-musl = "file://musl-sunrpc.patch"
 
-UCLIBCPATCHES_libc-uclibc = "file://0001-uclibc-nss.patch \
-                            "
 UCLIBCPATCHES ?= ""
 MUSLPATCHES ?= ""
 
diff --git a/yocto-poky/meta/recipes-extended/screen/screen/0001-Fix-stack-overflow-due-to-too-deep-recursion.patch b/yocto-poky/meta/recipes-extended/screen/screen/0001-Fix-stack-overflow-due-to-too-deep-recursion.patch
new file mode 100644
index 0000000..2bc9a59
--- /dev/null
+++ b/yocto-poky/meta/recipes-extended/screen/screen/0001-Fix-stack-overflow-due-to-too-deep-recursion.patch
@@ -0,0 +1,57 @@
+Bug: 45713
+
+How to reproduce:
+Run this command inside screen
+$ printf '\x1b[10000000T'
+
+screen will recursively call MScrollV to depth n/256.
+This is time consuming and will overflow stack if n is huge.
+
+Fixes CVE-2015-6806
+
+Upstream-Status: Backport
+
+Signed-off-by: Kuang-che Wu <kcwu@csie.org>
+Signed-off-by: Amadeusz Sławiński <amade@asmblr.net>
+Signed-off-by: Maxin B. John <maxin.john@intel.com>
+---
+diff -Naur screen-4.3.1-orig/ansi.c screen-4.3.1/ansi.c
+--- screen-4.3.1-orig/ansi.c	2015-06-29 00:22:55.000000000 +0300
++++ screen-4.3.1/ansi.c	2015-10-06 13:13:58.297648039 +0300
+@@ -2502,13 +2502,13 @@
+     return;
+   if (n > 0)
+     {
++      if (ye - ys + 1 < n)
++	  n = ye - ys + 1;
+       if (n > 256)
+ 	{
+ 	  MScrollV(p, n - 256, ys, ye, bce);
+ 	  n = 256;
+ 	}
+-      if (ye - ys + 1 < n)
+-	n = ye - ys + 1;
+ #ifdef COPY_PASTE
+       if (compacthist)
+ 	{
+@@ -2562,15 +2562,15 @@
+     }
+   else
+     {
+-      if (n < -256)
+-	{
+-	  MScrollV(p, n + 256, ys, ye, bce);
+-	  n = -256;
+-	}
+       n = -n;
+       if (ye - ys + 1 < n)
+ 	n = ye - ys + 1;
+ 
++      if (n > 256)
++      {
++        MScrollV(p, - (n - 256), ys, ye, bce);
++        n = 256;
++      }
+       ml = p->w_mlines + ye;
+       /* Clear lines */
+       for (i = ye; i > ye - n; i--, ml--)
diff --git a/yocto-poky/meta/recipes-extended/screen/screen_4.3.1.bb b/yocto-poky/meta/recipes-extended/screen/screen_4.3.1.bb
index 92457af..00d878b 100644
--- a/yocto-poky/meta/recipes-extended/screen/screen_4.3.1.bb
+++ b/yocto-poky/meta/recipes-extended/screen/screen_4.3.1.bb
@@ -24,6 +24,7 @@
            file://Avoid-mis-identifying-systems-as-SVR4.patch \
            file://0001-fix-for-multijob-build.patch \
            file://0002-comm.h-now-depends-on-term.h.patch \
+           file://0001-Fix-stack-overflow-due-to-too-deep-recursion.patch \
           "
 
 SRC_URI[md5sum] = "5bb3b0ff2674e29378c31ad3411170ad"
diff --git a/yocto-poky/meta/recipes-extended/sudo/sudo_1.8.14p3.bb b/yocto-poky/meta/recipes-extended/sudo/sudo_1.8.14p3.bb
index 6b3cd6d..b93112f 100644
--- a/yocto-poky/meta/recipes-extended/sudo/sudo_1.8.14p3.bb
+++ b/yocto-poky/meta/recipes-extended/sudo/sudo_1.8.14p3.bb
@@ -22,7 +22,7 @@
 
 do_install_append () {
 	if [ "${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'pam', '', d)}" = "pam" ]; then
-		install -D -m 664 ${WORKDIR}/sudo.pam ${D}/${sysconfdir}/pam.d/sudo
+		install -D -m 644 ${WORKDIR}/sudo.pam ${D}/${sysconfdir}/pam.d/sudo
 	fi
 
 	chmod 4111 ${D}${bindir}/sudo
diff --git a/yocto-poky/meta/recipes-extended/sysstat/sysstat/0001-Include-needed-headers-explicitly.patch b/yocto-poky/meta/recipes-extended/sysstat/sysstat/0001-Include-needed-headers-explicitly.patch
new file mode 100644
index 0000000..c126523
--- /dev/null
+++ b/yocto-poky/meta/recipes-extended/sysstat/sysstat/0001-Include-needed-headers-explicitly.patch
@@ -0,0 +1,62 @@
+From 42325faa88d64cce799977d611b2792beb154643 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Mon, 14 Sep 2015 08:36:59 +0000
+Subject: [PATCH] Include needed headers explicitly
+
+on glibc these headers get pulled in indirectly via other .h files
+but right fix is to include them directly when used
+
+fixes
+
+error: use of undeclared identifier 'PATH_MAX'
+error: called object type 'unsigned int' is not a function or function pointer
+dm_major = major(aux.st_rdev);
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+Upstream-Status: Pending
+
+ common.c    | 1 +
+ ioconf.c    | 1 +
+ sa_common.c | 1 +
+ 3 files changed, 3 insertions(+)
+
+diff --git a/common.c b/common.c
+index a23155b..ad86446 100644
+--- a/common.c
++++ b/common.c
+@@ -20,6 +20,7 @@
+  */
+ 
+ #include <stdio.h>
++#include <limits.h>
+ #include <string.h>
+ #include <stdlib.h>
+ #include <time.h>
+diff --git a/ioconf.c b/ioconf.c
+index 7d88c5d..6d67691 100644
+--- a/ioconf.c
++++ b/ioconf.c
+@@ -27,6 +27,7 @@
+ #include <errno.h>
+ #include <dirent.h>
+ #include <sys/stat.h>
++#include <sys/types.h>
+ 
+ #include "ioconf.h"
+ #include "common.h"
+diff --git a/sa_common.c b/sa_common.c
+index b7351d9..c9e3299 100644
+--- a/sa_common.c
++++ b/sa_common.c
+@@ -20,6 +20,7 @@
+  */
+ 
+ #include <stdio.h>
++#include <limits.h>
+ #include <string.h>
+ #include <stdlib.h>
+ #include <time.h>
+-- 
+2.5.2
+
diff --git a/yocto-poky/meta/recipes-extended/sysstat/sysstat_11.1.5.bb b/yocto-poky/meta/recipes-extended/sysstat/sysstat_11.1.5.bb
index 69d2ec2..bff8616 100644
--- a/yocto-poky/meta/recipes-extended/sysstat/sysstat_11.1.5.bb
+++ b/yocto-poky/meta/recipes-extended/sysstat/sysstat_11.1.5.bb
@@ -2,6 +2,8 @@
 
 LIC_FILES_CHKSUM = "file://COPYING;md5=8ca43cbc842c2336e835926c2166c28b"
 
+SRC_URI += "file://0001-Include-needed-headers-explicitly.patch"
+
 SRC_URI[md5sum] = "4d8e6e72d057189a1660462a678d9ada"
 SRC_URI[sha256sum] = "feb3a90d86ffd69cf5b88144a8876ae05bd42384f559676f08100671589fa2bb"
 
diff --git a/yocto-poky/meta/recipes-extended/tar/tar.inc b/yocto-poky/meta/recipes-extended/tar/tar.inc
index b339c43..93e4da1 100644
--- a/yocto-poky/meta/recipes-extended/tar/tar.inc
+++ b/yocto-poky/meta/recipes-extended/tar/tar.inc
@@ -22,10 +22,12 @@
 }
 
 do_install_append_class-target() {
-    install -d ${D}${base_bindir}
-    mv ${D}${bindir}/tar ${D}${base_bindir}/tar
-    mv ${D}${bindir}/gtar ${D}${base_bindir}/gtar
-    rmdir ${D}${bindir}/
+    if [ "${base_bindir}" != "${bindir}" ]; then
+        install -d ${D}${base_bindir}
+        mv ${D}${bindir}/tar ${D}${base_bindir}/tar
+        mv ${D}${bindir}/gtar ${D}${base_bindir}/gtar
+        rmdir ${D}${bindir}/
+    fi
 }
 
 PACKAGES =+ "${PN}-rmt"
diff --git a/yocto-poky/meta/recipes-extended/texinfo/texinfo_6.0.bb b/yocto-poky/meta/recipes-extended/texinfo/texinfo_6.0.bb
index 8fb715a..a8702cf 100644
--- a/yocto-poky/meta/recipes-extended/texinfo/texinfo_6.0.bb
+++ b/yocto-poky/meta/recipes-extended/texinfo/texinfo_6.0.bb
@@ -10,7 +10,7 @@
 PROVIDES_append_class-native = " texinfo-replacement-native"
 
 def compress_pkg(d):
-    if "compress_doc" in (d.getVar("INHERIT", True) or "").split():
+    if bb.data.inherits_class('compress_doc', d):
          compress = d.getVar("DOC_COMPRESS", True)
          if compress == "gz":
              return "gzip"
diff --git a/yocto-poky/meta/recipes-extended/tzcode/tzcode-native_2015f.bb b/yocto-poky/meta/recipes-extended/tzcode/tzcode-native_2015f.bb
deleted file mode 100644
index a8865a3..0000000
--- a/yocto-poky/meta/recipes-extended/tzcode/tzcode-native_2015f.bb
+++ /dev/null
@@ -1,25 +0,0 @@
-# note that we allow for us to use data later than our code version
-#
-DESCRIPTION = "tzcode, timezone zoneinfo utils -- zic, zdump, tzselect"
-LICENSE = "PD & BSD"
-
-LIC_FILES_CHKSUM = "file://${WORKDIR}/README;md5=d0ff93a73dd5bc3c6e724bb4343760f6"
-
-SRC_URI =" ftp://ftp.iana.org/tz/releases/tzcode${PV}.tar.gz;name=tzcode \
-           ftp://ftp.iana.org/tz/releases/tzdata2015f.tar.gz;name=tzdata"
-
-SRC_URI[tzcode.md5sum] = "19578d432ba8b92f73406a17a9bc268d"
-SRC_URI[tzcode.sha256sum] = "0c95e0a42bb61141f790f4f5f204b954d7654c894aa54a594a215d6f38de84ae"
-SRC_URI[tzdata.md5sum] = "e3b82732d20e973e48af1c6f13df9a1d"
-SRC_URI[tzdata.sha256sum] = "959f81b541e042ecb13c50097d264ae92ff03a57979c478dbcf24d5da242531d"
-
-S = "${WORKDIR}"
-
-inherit native
-
-do_install () {
-        install -d ${D}${bindir}/
-        install -m 755 zic ${D}${bindir}/
-        install -m 755 zdump ${D}${bindir}/
-        install -m 755 tzselect ${D}${bindir}/
-}
diff --git a/yocto-poky/meta/recipes-extended/tzcode/tzcode-native_2016a.bb b/yocto-poky/meta/recipes-extended/tzcode/tzcode-native_2016a.bb
new file mode 100644
index 0000000..76f97f0
--- /dev/null
+++ b/yocto-poky/meta/recipes-extended/tzcode/tzcode-native_2016a.bb
@@ -0,0 +1,25 @@
+# note that we allow for us to use data later than our code version
+#
+SUMMARY = "tzcode, timezone zoneinfo utils -- zic, zdump, tzselect"
+LICENSE = "PD & BSD & BSD-3-Clause"
+
+LIC_FILES_CHKSUM = "file://LICENSE;md5=76ae2becfcb9a685041c6f166b44c2c2"
+
+SRC_URI =" ftp://ftp.iana.org/tz/releases/tzcode${PV}.tar.gz;name=tzcode \
+           ftp://ftp.iana.org/tz/releases/tzdata${PV}.tar.gz;name=tzdata"
+
+SRC_URI[tzcode.md5sum] = "f5e0299925631da7cf82d8ce1205111d"
+SRC_URI[tzcode.sha256sum] = "11ae66d59b844e8c6c81914c9dd73b666627bd7792855ba9de195eee4520c28d"
+SRC_URI[tzdata.md5sum] = "0d3123eb1b453ec0620822bd65be4c42"
+SRC_URI[tzdata.sha256sum] = "5efa6b324e64ef921ef700ac3273a51895f672684a30e342f68e47871c6a8cd1"
+
+S = "${WORKDIR}"
+
+inherit native
+
+do_install () {
+        install -d ${D}${bindir}/
+        install -m 755 zic ${D}${bindir}/
+        install -m 755 zdump ${D}${bindir}/
+        install -m 755 tzselect ${D}${bindir}/
+}
diff --git a/yocto-poky/meta/recipes-extended/tzdata/tzdata_2015f.bb b/yocto-poky/meta/recipes-extended/tzdata/tzdata_2016a.bb
similarity index 96%
rename from yocto-poky/meta/recipes-extended/tzdata/tzdata_2015f.bb
rename to yocto-poky/meta/recipes-extended/tzdata/tzdata_2016a.bb
index 7cda40d..6ba5f81 100644
--- a/yocto-poky/meta/recipes-extended/tzdata/tzdata_2015f.bb
+++ b/yocto-poky/meta/recipes-extended/tzdata/tzdata_2016a.bb
@@ -1,14 +1,15 @@
-DESCRIPTION = "Timezone data"
+SUMMARY = "Timezone data"
 HOMEPAGE = "http://www.iana.org/time-zones"
 SECTION = "base"
-LICENSE = "PD & BSD"
-LIC_FILES_CHKSUM = "file://asia;beginline=2;endline=3;md5=996a9811747aa48db91ed239e5b355a1"
+LICENSE = "PD & BSD & BSD-3-Clause"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=76ae2becfcb9a685041c6f166b44c2c2"
+
 DEPENDS = "tzcode-native"
 
 SRC_URI = "ftp://ftp.iana.org/tz/releases/tzdata${PV}.tar.gz;name=tzdata"
 
-SRC_URI[tzdata.md5sum] = "e3b82732d20e973e48af1c6f13df9a1d"
-SRC_URI[tzdata.sha256sum] = "959f81b541e042ecb13c50097d264ae92ff03a57979c478dbcf24d5da242531d"
+SRC_URI[tzdata.md5sum] = "0d3123eb1b453ec0620822bd65be4c42"
+SRC_URI[tzdata.sha256sum] = "5efa6b324e64ef921ef700ac3273a51895f672684a30e342f68e47871c6a8cd1"
 
 inherit allarch
 
diff --git a/yocto-poky/meta/recipes-extended/unzip/unzip/CVE-2015-7696.patch b/yocto-poky/meta/recipes-extended/unzip/unzip/CVE-2015-7696.patch
new file mode 100644
index 0000000..ea93823
--- /dev/null
+++ b/yocto-poky/meta/recipes-extended/unzip/unzip/CVE-2015-7696.patch
@@ -0,0 +1,38 @@
+Upstream-Status: Backport
+Signed-off-by: Tudor Florea <tudor.flore@enea.com>
+
+From 68efed87fabddd450c08f3112f62a73f61d493c9 Mon Sep 17 00:00:00 2001
+From: Petr Stodulka <pstodulk@redhat.com>
+Date: Mon, 14 Sep 2015 18:23:17 +0200
+Subject: [PATCH 1/2] upstream fix for heap overflow
+
+https://bugzilla.redhat.com/attachment.cgi?id=1073002
+---
+ crypt.c | 12 +++++++++++-
+ 1 file changed, 11 insertions(+), 1 deletion(-)
+
+diff --git a/crypt.c b/crypt.c
+index 784e411..a8975f2 100644
+--- a/crypt.c
++++ b/crypt.c
+@@ -465,7 +465,17 @@ int decrypt(__G__ passwrd)
+     GLOBAL(pInfo->encrypted) = FALSE;
+     defer_leftover_input(__G);
+     for (n = 0; n < RAND_HEAD_LEN; n++) {
+-        b = NEXTBYTE;
++        /* 2012-11-23 SMS.  (OUSPG report.)
++         * Quit early if compressed size < HEAD_LEN.  The resulting
++         * error message ("unable to get password") could be improved,
++         * but it's better than trying to read nonexistent data, and
++         * then continuing with a negative G.csize.  (See
++         * fileio.c:readbyte()).
++         */
++        if ((b = NEXTBYTE) == (ush)EOF)
++        {
++            return PK_ERR;
++        }
+         h[n] = (uch)b;
+         Trace((stdout, " (%02x)", h[n]));
+     }
+-- 
+2.4.6
diff --git a/yocto-poky/meta/recipes-extended/unzip/unzip/CVE-2015-7697.patch b/yocto-poky/meta/recipes-extended/unzip/unzip/CVE-2015-7697.patch
new file mode 100644
index 0000000..da68988
--- /dev/null
+++ b/yocto-poky/meta/recipes-extended/unzip/unzip/CVE-2015-7697.patch
@@ -0,0 +1,31 @@
+Upstream-Status: Backport
+Signed-off-by: Tudor Florea <tudor.flore@enea.com>
+
+From bd8a743ee0a77e65ad07ef4196c4cd366add3f26 Mon Sep 17 00:00:00 2001
+From: Kamil Dudka <kdudka@redhat.com>
+Date: Mon, 14 Sep 2015 18:24:56 +0200
+Subject: [PATCH 2/2] fix infinite loop when extracting empty bzip2 data
+
+---
+ extract.c | 6 ++++++
+ 1 file changed, 6 insertions(+)
+
+diff --git a/extract.c b/extract.c
+index 7134bfe..29db027 100644
+--- a/extract.c
++++ b/extract.c
+@@ -2733,6 +2733,12 @@ __GDEF
+     int repeated_buf_err;
+     bz_stream bstrm;
+ 
++    if (G.incnt <= 0 && G.csize <= 0L) {
++        /* avoid an infinite loop */
++        Trace((stderr, "UZbunzip2() got empty input\n"));
++        return 2;
++    }
++
+ #if (defined(DLL) && !defined(NO_SLIDE_REDIR))
+     if (G.redirect_slide)
+         wsize = G.redirect_size, redirSlide = G.redirect_buffer;
+-- 
+2.4.6
diff --git a/yocto-poky/meta/recipes-extended/unzip/unzip/unzip-6.0_overflow3.diff b/yocto-poky/meta/recipes-extended/unzip/unzip/cve-2014-9636.patch
similarity index 100%
rename from yocto-poky/meta/recipes-extended/unzip/unzip/unzip-6.0_overflow3.diff
rename to yocto-poky/meta/recipes-extended/unzip/unzip/cve-2014-9636.patch
diff --git a/yocto-poky/meta/recipes-extended/unzip/unzip_6.0.bb b/yocto-poky/meta/recipes-extended/unzip/unzip_6.0.bb
index 4a0a713..b386323 100644
--- a/yocto-poky/meta/recipes-extended/unzip/unzip_6.0.bb
+++ b/yocto-poky/meta/recipes-extended/unzip/unzip_6.0.bb
@@ -10,10 +10,12 @@
 	file://avoid-strip.patch \
 	file://define-ldflags.patch \
 	file://06-unzip60-alt-iconv-utf8_CVE-2015-1315.patch \
-	file://unzip-6.0_overflow3.diff \
+	file://cve-2014-9636.patch \
 	file://09-cve-2014-8139-crc-overflow.patch \
 	file://10-cve-2014-8140-test-compr-eb.patch \
 	file://11-cve-2014-8141-getzip64data.patch \
+	file://CVE-2015-7696.patch \
+	file://CVE-2015-7697.patch \
 "
 
 SRC_URI[md5sum] = "62b490407489521db863b523a7f86375"
diff --git a/yocto-poky/meta/recipes-extended/xz/xz_5.2.1.bb b/yocto-poky/meta/recipes-extended/xz/xz_5.2.1.bb
index e0ae48f..cf7fba6 100644
--- a/yocto-poky/meta/recipes-extended/xz/xz_5.2.1.bb
+++ b/yocto-poky/meta/recipes-extended/xz/xz_5.2.1.bb
@@ -6,7 +6,7 @@
 # which is GPLv3 is an m4 macro which isn't shipped in any of our packages,
 # and the LGPL bits are under lib/, which appears to be used for libgnu, which
 # appears to be used for DOS builds. So we're left with GPLv2+ and PD.
-LICENSE = "GPLv2+ & GPLv3+ & LGPLv2.1+ & PD"
+LICENSE = "GPLv2+ & GPL-3.0-with-autoconf-exception & LGPLv2.1+ & PD"
 LICENSE_${PN} = "GPLv2+"
 LICENSE_${PN}-dev = "GPLv2+"
 LICENSE_${PN}-staticdev = "GPLv2+"
diff --git a/yocto-poky/meta/recipes-gnome/epiphany/epiphany_3.16.3.bb b/yocto-poky/meta/recipes-gnome/epiphany/epiphany_3.16.3.bb
index 506fb25..c3745c0 100644
--- a/yocto-poky/meta/recipes-gnome/epiphany/epiphany_3.16.3.bb
+++ b/yocto-poky/meta/recipes-gnome/epiphany/epiphany_3.16.3.bb
@@ -5,7 +5,10 @@
 DEPENDS = "libsoup-2.4 webkitgtk gtk+3 iso-codes ca-certificates avahi libnotify gcr libwnck3 \
 	   gsettings-desktop-schemas gnome-desktop3"
 
-inherit gnomebase gsettings
+inherit gnomebase gsettings distro_features_check
+# libwnck3 is x11 only
+REQUIRED_DISTRO_FEATURES = "x11"
+
 SRC_URI += "file://0001-yelp.m4-drop-the-check-for-itstool.patch"
 SRC_URI[archive.md5sum] = "3296af4532b8019775f4b40d21a341ae"
 SRC_URI[archive.sha256sum] = "d527f1770779ec22d955aeb13b148a846a26144e433ff0480c981af80e2390b1"
diff --git a/yocto-poky/meta/recipes-gnome/gcr/gcr_3.16.0.bb b/yocto-poky/meta/recipes-gnome/gcr/gcr_3.16.0.bb
index 8b5b6e4..e50b3a8 100644
--- a/yocto-poky/meta/recipes-gnome/gcr/gcr_3.16.0.bb
+++ b/yocto-poky/meta/recipes-gnome/gcr/gcr_3.16.0.bb
@@ -7,7 +7,9 @@
 
 DEPENDS = "gtk+3 p11-kit glib-2.0 libgcrypt vala"
 
-inherit autotools gnomebase gtk-icon-cache gtk-doc
+inherit autotools gnomebase gtk-icon-cache gtk-doc distro_features_check
+# depends on gtk+3, but also x11 through gtk+-x11
+REQUIRED_DISTRO_FEATURES = "x11"
 
 SRC_URI[archive.md5sum] = "d5835680be0b6a838e02a528d5378d9c"
 SRC_URI[archive.sha256sum] = "ecfe8df41cc88158364bb15addc670b11e539fe844742983629ba2323888d075"
@@ -16,3 +18,6 @@
     ${datadir}/dbus-1 \
     ${datadir}/gcr-3 \
 "
+
+# http://errors.yoctoproject.org/Errors/Details/20229/
+ARM_INSTRUCTION_SET = "arm"
diff --git a/yocto-poky/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf/CVE-2015-7674.patch b/yocto-poky/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf/CVE-2015-7674.patch
new file mode 100644
index 0000000..d516e88
--- /dev/null
+++ b/yocto-poky/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf/CVE-2015-7674.patch
@@ -0,0 +1,39 @@
+From e9a5704edaa9aee9498f1fbf6e1b70fcce2e55aa Mon Sep 17 00:00:00 2001
+From: Benjamin Otte <otte@redhat.com>
+Date: Tue, 22 Sep 2015 22:44:51 +0200
+Subject: [PATCH] pixops: Don't overflow variables when shifting them
+
+If we shift by 16 bits we need to be sure those 16 bits actually exist.
+They do now.
+
+Upstream-status: Backport
+https://git.gnome.org/browse/gdk-pixbuf/commit/?id=e9a5704edaa9aee9498f1fbf6e1b70fcce2e55aa
+
+CVE:  CVE-2015-7674
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ gdk-pixbuf/pixops/pixops.c | 10 +++++-----
+ 1 file changed, 5 insertions(+), 5 deletions(-)
+
+Index: gdk-pixbuf-2.30.8/gdk-pixbuf/pixops/pixops.c
+===================================================================
+--- gdk-pixbuf-2.30.8.orig/gdk-pixbuf/pixops/pixops.c
++++ gdk-pixbuf-2.30.8/gdk-pixbuf/pixops/pixops.c
+@@ -264,11 +264,11 @@ pixops_scale_nearest (guchar        *des
+ 		      double         scale_x,
+ 		      double         scale_y)
+ {
+-  int i;
+-  int x;
+-  int x_step = (1 << SCALE_SHIFT) / scale_x;
+-  int y_step = (1 << SCALE_SHIFT) / scale_y;
+-  int xmax, xstart, xstop, x_pos, y_pos;
++  gint64 i;
++  gint64 x;
++  gint64 x_step = (1 << SCALE_SHIFT) / scale_x;
++  gint64 y_step = (1 << SCALE_SHIFT) / scale_y;
++  gint64 xmax, xstart, xstop, x_pos, y_pos;
+   const guchar *p;
+ 
+ #define INNER_LOOP(SRC_CHANNELS,DEST_CHANNELS,ASSIGN_PIXEL)     \
diff --git a/yocto-poky/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.30.8.bb b/yocto-poky/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.30.8.bb
index 07c2dce..dcd01b1 100644
--- a/yocto-poky/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.30.8.bb
+++ b/yocto-poky/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.30.8.bb
@@ -9,7 +9,6 @@
 SECTION = "libs"
 
 DEPENDS = "glib-2.0"
-DEPENDS_append_linuxstdbase = " virtual/libx11"
 
 MAJ_VER = "${@oe.utils.trim_version("${PV}", 2)}"
 
@@ -19,6 +18,7 @@
            file://run-ptest \
            file://fatal-loader.patch \
            file://0001-pixops-Be-more-careful-about-integer-overflow.patch \
+           file://CVE-2015-7674.patch \
            "
 
 SRC_URI[md5sum] = "4fed0d54432f1b69fc6e66e608bd5542"
@@ -50,18 +50,19 @@
 FILES_${PN}-xlib = "${libdir}/*pixbuf_xlib*${SOLIBS}"
 ALLOW_EMPTY_${PN}-xlib = "1"
 
-FILES_${PN} = "${bindir}/gdk-pixbuf-query-loaders \
-	${bindir}/gdk-pixbuf-pixdata \
+FILES_${PN} = "${libdir}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders \
 	${libdir}/lib*.so.*"
 
 FILES_${PN}-dev += " \
 	${bindir}/gdk-pixbuf-csource \
+	${bindir}/gdk-pixbuf-pixdata \
 	${includedir}/* \
 	${libdir}/gdk-pixbuf-2.0/${LIBV}/loaders/*.la \
 "
 
 FILES_${PN}-dbg += " \
-        ${libdir}/.debug/* \
+	${libdir}/.debug/* \
+	${libdir}/gdk-pixbuf-2.0/.debug/* \
 	${libdir}/gdk-pixbuf-2.0/${LIBV}/loaders/.debug/* \
 "
 
@@ -81,6 +82,12 @@
     d.appendVar("RDEPENDS_gdk-pixbuf-ptest", " " + packages)
 }
 
+do_install_append() {
+	# Move gdk-pixbuf-query-loaders into libdir so it is always available
+	# in multilib builds.
+	mv ${D}/${bindir}/gdk-pixbuf-query-loaders ${D}/${libdir}/gdk-pixbuf-2.0/
+}
+
 do_install_append_class-native() {
 	find ${D}${libdir} -name "libpixbufloader-*.la" -exec rm \{\} \;
 
@@ -90,8 +97,17 @@
 	create_wrapper ${D}/${bindir}/gdk-pixbuf-pixdata \
 		GDK_PIXBUF_MODULE_FILE=${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/${LIBV}/loaders.cache
 
-	create_wrapper ${D}/${bindir}/gdk-pixbuf-query-loaders \
+	create_wrapper ${D}/${libdir}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders \
 		GDK_PIXBUF_MODULE_FILE=${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/${LIBV}/loaders.cache \
 		GDK_PIXBUF_MODULEDIR=${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/${LIBV}/loaders
 }
 BBCLASSEXTEND = "native"
+
+SSTATEPREINSTFUNCS_append_class-native = " gdkpixbuf_sstate_preinst"
+SYSROOT_PREPROCESS_FUNCS_append_class-native = " gdkpixbuf_sstate_preinst"
+
+gdkpixbuf_sstate_preinst() {
+	if [ "${BB_CURRENTTASK}" = "populate_sysroot" ]; then
+		rm -rf ${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/${LIBV}/*
+	fi
+}
diff --git a/yocto-poky/meta/recipes-gnome/gnome-desktop/gnome-desktop3_3.16.2.bb b/yocto-poky/meta/recipes-gnome/gnome-desktop/gnome-desktop3_3.16.2.bb
index 1f2f06c..3765697 100644
--- a/yocto-poky/meta/recipes-gnome/gnome-desktop/gnome-desktop3_3.16.2.bb
+++ b/yocto-poky/meta/recipes-gnome/gnome-desktop/gnome-desktop3_3.16.2.bb
@@ -10,7 +10,10 @@
 SRC_URI[archive.md5sum] = "ab5bf4cc94ad63639f42adcc1542b1f0"
 SRC_URI[archive.sha256sum] = "3a8f196b46eb9dbd3ba2afb8fb5fef6a8825539d449a02181311242e22227bd0"
 
-DEPENDS += "gsettings-desktop-schemas gconf libxrandr virtual/libx11 gtk+3 glib-2.0 gnome-doc-utils gnome-common startup-notification iso-codes"
+DEPENDS += "gsettings-desktop-schemas gconf libxrandr virtual/libx11 gtk+3 glib-2.0 gnome-doc-utils gnome-common startup-notification xkeyboard-config iso-codes"
+
+inherit distro_features_check
+REQUIRED_DISTRO_FEATURES = "x11"
 
 EXTRA_OECONF = "--disable-desktop-docs"
 
diff --git a/yocto-poky/meta/recipes-gnome/gnome/gnome-doc-utils.inc b/yocto-poky/meta/recipes-gnome/gnome/gnome-doc-utils.inc
index 9587506..8adfac7 100644
--- a/yocto-poky/meta/recipes-gnome/gnome/gnome-doc-utils.inc
+++ b/yocto-poky/meta/recipes-gnome/gnome/gnome-doc-utils.inc
@@ -15,9 +15,6 @@
 EXTRA_OECONF += "--disable-scrollkeeper"
 
 do_install_append() {
-	mkdir -p  ${D}${datadir}/xml/gnome/xslt/
-	cp -pPr ${S}/xslt/* ${D}${datadir}/xml/gnome/xslt/
-
 	chown -R root:root ${D}
 }
 
diff --git a/yocto-poky/meta/recipes-gnome/gtk+/gtk+.inc b/yocto-poky/meta/recipes-gnome/gtk+/gtk+.inc
index be5273d..a197b9d 100644
--- a/yocto-poky/meta/recipes-gnome/gtk+/gtk+.inc
+++ b/yocto-poky/meta/recipes-gnome/gtk+/gtk+.inc
@@ -11,7 +11,7 @@
 SECTION = "libs"
 
 inherit distro_features_check
-ANY_OF_DISTRO_FEATURES = "directfb x11"
+ANY_OF_DISTRO_FEATURES = "${GTK2DISTROFEATURES}"
 
 X11DEPENDS = "virtual/libx11 libxext libxcursor libxrandr libxdamage libxrender libxcomposite"
 DEPENDS = "glib-2.0 pango atk jpeg libpng gdk-pixbuf-native docbook-utils-native \
diff --git a/yocto-poky/meta/recipes-gnome/gtk+/gtk+3.inc b/yocto-poky/meta/recipes-gnome/gtk+/gtk+3.inc
index f29f0d3..22a40d8 100644
--- a/yocto-poky/meta/recipes-gnome/gtk+/gtk+3.inc
+++ b/yocto-poky/meta/recipes-gnome/gtk+/gtk+3.inc
@@ -10,7 +10,8 @@
 
 LICENSE = "LGPLv2 & LGPLv2+ & LGPLv2.1+"
 
-inherit autotools pkgconfig gtk-doc update-alternatives gtk-immodules-cache gsettings
+inherit autotools pkgconfig gtk-doc update-alternatives gtk-immodules-cache gsettings distro_features_check
+ANY_OF_DISTRO_FEATURES = "${GTK3DISTROFEATURES}"
 
 # This should be in autotools.bbclass, but until something elses uses it putting
 # it here avoids rebuilding everything.
@@ -31,9 +32,11 @@
 "
 
 PACKAGECONFIG ??= "${@bb.utils.contains("DISTRO_FEATURES", "x11", "x11", "", d)} \
+                   ${@bb.utils.contains("DISTRO_FEATURES", "opengl x11", "glx", "", d)} \
                    ${@bb.utils.contains("DISTRO_FEATURES", "wayland", "wayland", "", d)}"
 
 PACKAGECONFIG[x11] = "--enable-x11-backend,--disable-x11-backend,at-spi2-atk fontconfig libx11 libxext libxcursor libxi libxdamage libxrandr libxrender libxcomposite libxfixes"
+PACKAGECONFIG[glx] = "--enable-glx,--disable-glx,,libgl"
 PACKAGECONFIG[wayland] = "--enable-wayland-backend,--disable-wayland-backend,wayland libxkbcommon virtual/mesa"
 
 do_install_append() {
diff --git a/yocto-poky/meta/recipes-gnome/gtk+/gtk+3/Do-not-try-to-initialize-GL-without-libGL.patch b/yocto-poky/meta/recipes-gnome/gtk+/gtk+3/Do-not-try-to-initialize-GL-without-libGL.patch
new file mode 100644
index 0000000..c8c480c
--- /dev/null
+++ b/yocto-poky/meta/recipes-gnome/gtk+/gtk+3/Do-not-try-to-initialize-GL-without-libGL.patch
@@ -0,0 +1,60 @@
+From fc22058a10db913534f11348f86681fe9e1838e5 Mon Sep 17 00:00:00 2001
+From: Jussi Kukkonen <jussi.kukkonen@intel.com>
+Date: Fri, 16 Oct 2015 16:35:16 +0300
+Subject: [PATCH] Do not try to initialize GL without libGL
+
+_gdk_x11_screen_update_visuals_for_gl() will end up calling epoxys
+GLX api which will exit() if libGL.so.1 is not present. We do not
+want that to happen and we don't want every app to have to set
+"GDK_GL=disabled" environment variable: so use #ifdef set based on
+opengl distro feature.
+
+Upstream is not interested in the fix as it is: Either epoxy should be
+fixed (to not exit) or GTK+ possibly could do some additional probing
+before calling epoxy APIs.
+
+Upstream-Status: Denied
+Signed-off-by: Jussi Kukkonen <jussi.kukkonen@intel.com>
+---
+ configure.ac            | 7 +++++++
+ gdk/x11/gdkvisual-x11.c | 5 +++++
+ 2 files changed, 12 insertions(+)
+
+diff --git a/configure.ac b/configure.ac
+index 729a62e..58cc1ac 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -328,6 +328,13 @@ AC_ARG_ENABLE(mir-backend,
+                               [enable the Mir gdk backend])],
+ 			      [backend_set=yes])
+ 
++AC_ARG_ENABLE(glx,
++              [AS_HELP_STRING([--enable-glx],
++                              [When enabled Gdk will try to initialize GLX])])
++AS_IF([test "x$enable_glx" != "xno"], [
++  AC_DEFINE([HAVE_GLX], [], [GLX will be available at runtime])
++])
++
+ if test -z "$backend_set"; then
+   if test "$platform_win32" = yes; then
+     enable_win32_backend=yes
+diff --git a/gdk/x11/gdkvisual-x11.c b/gdk/x11/gdkvisual-x11.c
+index f3b062d..c8243f4 100644
+--- a/gdk/x11/gdkvisual-x11.c
++++ b/gdk/x11/gdkvisual-x11.c
+@@ -345,7 +345,12 @@ _gdk_x11_screen_init_visuals (GdkScreen *screen)
+   /* If GL is available we want to pick better default/rgba visuals,
+      as we care about glx details such as alpha/depth/stencil depth,
+      stereo and double buffering */
++  /* update_visuals_for_gl() will end up calling epoxy GLX api which
++     will exit if libgl is not there: so only do this if we know GL
++     is available */
++#ifdef HAVE_GLX
+   _gdk_x11_screen_update_visuals_for_gl (screen);
++#endif
+ }
+ 
+ gint
+-- 
+2.1.4
+
diff --git a/yocto-poky/meta/recipes-gnome/gtk+/gtk+3_3.16.6.bb b/yocto-poky/meta/recipes-gnome/gtk+/gtk+3_3.16.6.bb
index 1d736a4..381e607 100644
--- a/yocto-poky/meta/recipes-gnome/gtk+/gtk+3_3.16.6.bb
+++ b/yocto-poky/meta/recipes-gnome/gtk+/gtk+3_3.16.6.bb
@@ -5,6 +5,7 @@
 SRC_URI = "http://ftp.gnome.org/pub/gnome/sources/gtk+/${MAJ_VER}/gtk+-${PV}.tar.xz \
            file://hardcoded_libtool.patch \
            file://Dont-force-csd.patch \
+           file://Do-not-try-to-initialize-GL-without-libGL.patch \
           "
 
 SRC_URI[md5sum] = "fc59e5c8b5a4585b60623dd708df400b"
diff --git a/yocto-poky/meta/recipes-gnome/gtk-engines/gtk-engines_2.20.2.bb b/yocto-poky/meta/recipes-gnome/gtk-engines/gtk-engines_2.20.2.bb
index c30454c..7c3a87e 100644
--- a/yocto-poky/meta/recipes-gnome/gtk-engines/gtk-engines_2.20.2.bb
+++ b/yocto-poky/meta/recipes-gnome/gtk-engines/gtk-engines_2.20.2.bb
@@ -6,7 +6,7 @@
 LIC_FILES_CHKSUM = "file://COPYING;md5=2d5025d4aa3495befef8f17206a5b0a1"
 
 SECTION = "x11/base"
-DEPENDS = "intltool-native gtk+"
+DEPENDS = "intltool-native gtk+ gettext-native"
 
 PR = "r3"
 
@@ -34,6 +34,9 @@
 inherit gnomebase
 GNOME_COMPRESS_TYPE="bz2"
 
+inherit distro_features_check
+ANY_OF_DISTRO_FEATURES = "${GTK2DISTROFEATURES}"
+
 python populate_packages_prepend() {
     engines_root = os.path.join(d.getVar('libdir', True), "gtk-2.0/2.10.0/engines")
     themes_root = os.path.join(d.getVar('datadir', True), "themes")
diff --git a/yocto-poky/meta/recipes-gnome/gtk-theme-torturer/gtk-theme-torturer_git.bb b/yocto-poky/meta/recipes-gnome/gtk-theme-torturer/gtk-theme-torturer_git.bb
index 889fd89..b67806d 100644
--- a/yocto-poky/meta/recipes-gnome/gtk-theme-torturer/gtk-theme-torturer_git.bb
+++ b/yocto-poky/meta/recipes-gnome/gtk-theme-torturer/gtk-theme-torturer_git.bb
@@ -13,6 +13,9 @@
 
 CFLAGS += "-Wl,-rpath-link,${STAGING_LIBDIR}"
 
+inherit distro_features_check
+ANY_OF_DISTRO_FEATURES = "${GTK2DISTROFEATURES}"
+
 do_install() {
 	install -d ${D}${bindir}
 	install -m 0755 torturer ${D}${bindir}
diff --git a/yocto-poky/meta/recipes-gnome/libglade/libglade_2.6.4.bb b/yocto-poky/meta/recipes-gnome/libglade/libglade_2.6.4.bb
index 15267ca..553e19c 100644
--- a/yocto-poky/meta/recipes-gnome/libglade/libglade_2.6.4.bb
+++ b/yocto-poky/meta/recipes-gnome/libglade/libglade_2.6.4.bb
@@ -11,7 +11,8 @@
 PR = "r5"
 DEPENDS = "zlib gdk-pixbuf gtk+"
 
-inherit autotools pkgconfig gnomebase gtk-doc
+inherit autotools pkgconfig gnomebase gtk-doc distro_features_check
+ANY_OF_DISTRO_FEATURES = "${GTK2DISTROFEATURES}"
 GNOME_COMPRESS_TYPE="bz2"
 
 SRC_URI += "file://glade-cruft.patch file://no-xml2.patch file://python_environment.patch"
diff --git a/yocto-poky/meta/recipes-gnome/librsvg/librsvg/CVE-2015-7558_1.patch b/yocto-poky/meta/recipes-gnome/librsvg/librsvg/CVE-2015-7558_1.patch
new file mode 100644
index 0000000..a3ba41f
--- /dev/null
+++ b/yocto-poky/meta/recipes-gnome/librsvg/librsvg/CVE-2015-7558_1.patch
@@ -0,0 +1,139 @@
+From d1c9191949747f6dcfd207831d15dd4ba00e31f2 Mon Sep 17 00:00:00 2001
+From: Benjamin Otte <otte@redhat.com>
+Date: Wed, 7 Oct 2015 05:31:08 +0200
+Subject: [PATCH] state: Store mask as reference
+
+Instead of immediately looking up the mask, store the reference and look
+it up on use.
+
+Upstream-status: Backport
+
+supporting patch
+https://git.gnome.org/browse/librsvg/commit/rsvg-styles.c?id=d1c9191949747f6dcfd207831d15dd4ba00e31f2
+
+CVE: CVE-2015-7558
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ rsvg-cairo-draw.c |  6 +++++-
+ rsvg-mask.c       | 17 -----------------
+ rsvg-mask.h       |  2 --
+ rsvg-styles.c     | 12 ++++++++----
+ rsvg-styles.h     |  2 +-
+ 5 files changed, 14 insertions(+), 25 deletions(-)
+
+Index: librsvg-2.40.10/rsvg-cairo-draw.c
+===================================================================
+--- librsvg-2.40.10.orig/rsvg-cairo-draw.c
++++ librsvg-2.40.10/rsvg-cairo-draw.c
+@@ -825,7 +825,11 @@ rsvg_cairo_pop_render_stack (RsvgDrawing
+     cairo_set_operator (render->cr, state->comp_op);
+ 
+     if (state->mask) {
+-        rsvg_cairo_generate_mask (render->cr, state->mask, ctx, &render->bbox);
++        RsvgNode *mask;
++
++        mask = rsvg_defs_lookup (ctx->defs, state->mask);
++        if (mask && RSVG_NODE_TYPE (mask) == RSVG_NODE_TYPE_MASK)
++          rsvg_cairo_generate_mask (render->cr, (RsvgMask *) mask, ctx, &render->bbox);
+     } else if (state->opacity != 0xFF)
+         cairo_paint_with_alpha (render->cr, (double) state->opacity / 255.0);
+     else
+Index: librsvg-2.40.10/rsvg-mask.c
+===================================================================
+--- librsvg-2.40.10.orig/rsvg-mask.c
++++ librsvg-2.40.10/rsvg-mask.c
+@@ -103,23 +103,6 @@ rsvg_get_url_string (const char *str)
+ }
+ 
+ RsvgNode *
+-rsvg_mask_parse (const RsvgDefs * defs, const char *str)
+-{
+-    char *name;
+-
+-    name = rsvg_get_url_string (str);
+-    if (name) {
+-        RsvgNode *val;
+-        val = rsvg_defs_lookup (defs, name);
+-        g_free (name);
+-
+-        if (val && RSVG_NODE_TYPE (val) == RSVG_NODE_TYPE_MASK)
+-            return val;
+-    }
+-    return NULL;
+-}
+-
+-RsvgNode *
+ rsvg_clip_path_parse (const RsvgDefs * defs, const char *str)
+ {
+     char *name;
+Index: librsvg-2.40.10/rsvg-mask.h
+===================================================================
+--- librsvg-2.40.10.orig/rsvg-mask.h
++++ librsvg-2.40.10/rsvg-mask.h
+@@ -48,8 +48,6 @@ struct _RsvgMask {
+ 
+ G_GNUC_INTERNAL
+ RsvgNode *rsvg_new_mask	    (void);
+-G_GNUC_INTERNAL
+-RsvgNode *rsvg_mask_parse   (const RsvgDefs * defs, const char *str);
+ 
+ typedef struct _RsvgClipPath RsvgClipPath;
+ 
+Index: librsvg-2.40.10/rsvg-styles.c
+===================================================================
+--- librsvg-2.40.10.orig/rsvg-styles.c
++++ librsvg-2.40.10/rsvg-styles.c
+@@ -221,6 +221,7 @@ rsvg_state_clone (RsvgState * dst, const
+ 
+     *dst = *src;
+     dst->parent = parent;
++    dst->mask = g_strdup (src->mask);
+     dst->font_family = g_strdup (src->font_family);
+     dst->lang = g_strdup (src->lang);
+     rsvg_paint_server_ref (dst->fill);
+@@ -356,7 +357,8 @@ rsvg_state_inherit_run (RsvgState * dst,
+ 
+     if (inherituninheritables) {
+         dst->clip_path_ref = src->clip_path_ref;
+-        dst->mask = src->mask;
++        g_free (dst->mask);
++        dst->mask = g_strdup (src->mask);
+         dst->enable_background = src->enable_background;
+         dst->adobe_blend = src->adobe_blend;
+         dst->opacity = src->opacity;
+@@ -444,6 +446,7 @@ rsvg_state_inherit (RsvgState * dst, con
+ void
+ rsvg_state_finalize (RsvgState * state)
+ {
++    g_free (state->mask);
+     g_free (state->font_family);
+     g_free (state->lang);
+     rsvg_paint_server_unref (state->fill);
+@@ -517,9 +520,10 @@ rsvg_parse_style_pair (RsvgHandle * ctx,
+             state->adobe_blend = 11;
+         else
+             state->adobe_blend = 0;
+-    } else if (g_str_equal (name, "mask"))
+-        state->mask = rsvg_mask_parse (ctx->priv->defs, value);
+-    else if (g_str_equal (name, "clip-path")) {
++    } else if (g_str_equal (name, "mask")) {
++        g_free (state->mask);
++        state->mask = rsvg_get_url_string (value);
++    } else if (g_str_equal (name, "clip-path")) {
+         state->clip_path_ref = rsvg_clip_path_parse (ctx->priv->defs, value);
+     } else if (g_str_equal (name, "overflow")) {
+         if (!g_str_equal (value, "inherit")) {
+Index: librsvg-2.40.10/rsvg-styles.h
+===================================================================
+--- librsvg-2.40.10.orig/rsvg-styles.h
++++ librsvg-2.40.10/rsvg-styles.h
+@@ -80,7 +80,7 @@ struct _RsvgState {
+     cairo_matrix_t personal_affine;
+ 
+     RsvgFilter *filter;
+-    void *mask;
++    char *mask;
+     void *clip_path_ref;
+     guint8 adobe_blend;         /* 0..11 */
+     guint8 opacity;             /* 0..255 */
diff --git a/yocto-poky/meta/recipes-gnome/librsvg/librsvg/CVE-2015-7558_2.patch b/yocto-poky/meta/recipes-gnome/librsvg/librsvg/CVE-2015-7558_2.patch
new file mode 100644
index 0000000..9f6820e
--- /dev/null
+++ b/yocto-poky/meta/recipes-gnome/librsvg/librsvg/CVE-2015-7558_2.patch
@@ -0,0 +1,230 @@
+From 6cfaab12c70cd4a34c4730837f1ecdf792593c90 Mon Sep 17 00:00:00 2001
+From: Benjamin Otte <otte@redhat.com>
+Date: Wed, 7 Oct 2015 07:57:39 +0200
+Subject: [PATCH] state: Look up clip path lazily
+
+Upstream-status: Backport
+
+supporting patch
+https://git.gnome.org/browse/librsvg/commit/rsvg-styles.c?id=6cfaab12c70cd4a34c4730837f1ecdf792593c90
+
+CVE: CVE-2015-7558
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ rsvg-cairo-draw.c | 56 +++++++++++++++++++++++++++++++++----------------------
+ rsvg-mask.c       | 17 -----------------
+ rsvg-mask.h       |  2 --
+ rsvg-styles.c     | 10 +++++++---
+ rsvg-styles.h     |  2 +-
+ 5 files changed, 42 insertions(+), 45 deletions(-)
+
+Index: librsvg-2.40.10/rsvg-cairo-draw.c
+===================================================================
+--- librsvg-2.40.10.orig/rsvg-cairo-draw.c
++++ librsvg-2.40.10/rsvg-cairo-draw.c
+@@ -461,7 +461,7 @@ rsvg_cairo_render_path (RsvgDrawingCtx *
+         return;
+ 
+     need_tmpbuf = ((state->fill != NULL) && (state->stroke != NULL) && state->opacity != 0xff)
+-        || state->clip_path_ref || state->mask || state->filter
++        || state->clip_path || state->mask || state->filter
+         || (state->comp_op != CAIRO_OPERATOR_OVER);
+ 
+     if (need_tmpbuf)
+@@ -708,18 +708,6 @@ rsvg_cairo_generate_mask (cairo_t * cr,
+ }
+ 
+ static void
+-rsvg_cairo_push_early_clips (RsvgDrawingCtx * ctx)
+-{
+-    RsvgCairoRender *render = RSVG_CAIRO_RENDER (ctx->render);
+-  
+-    cairo_save (render->cr);
+-    if (rsvg_current_state (ctx)->clip_path_ref)
+-        if (((RsvgClipPath *) rsvg_current_state (ctx)->clip_path_ref)->units == userSpaceOnUse)
+-            rsvg_cairo_clip (ctx, rsvg_current_state (ctx)->clip_path_ref, NULL);
+-
+-}
+-
+-static void
+ rsvg_cairo_push_render_stack (RsvgDrawingCtx * ctx)
+ {
+     /* XXX: Untested, probably needs help wrt filters */
+@@ -731,9 +719,27 @@ rsvg_cairo_push_render_stack (RsvgDrawin
+     RsvgState *state = rsvg_current_state (ctx);
+     gboolean lateclip = FALSE;
+ 
+-    if (rsvg_current_state (ctx)->clip_path_ref)
+-        if (((RsvgClipPath *) rsvg_current_state (ctx)->clip_path_ref)->units == objectBoundingBox)
+-            lateclip = TRUE;
++    if (rsvg_current_state (ctx)->clip_path) {
++        RsvgNode *node;
++        node = rsvg_defs_lookup (ctx->defs, rsvg_current_state (ctx)->clip_path);
++        if (node && RSVG_NODE_TYPE (node) == RSVG_NODE_TYPE_CLIP_PATH) {
++            RsvgClipPath *clip_path = (RsvgClipPath *) node;
++
++            switch (clip_path->units) {
++            case userSpaceOnUse:
++                rsvg_cairo_clip (ctx, clip_path, NULL);
++                break;
++            case objectBoundingBox:
++                lateclip = TRUE;
++                break;
++
++            default:
++                g_assert_not_reached ();
++                break;
++            }
++
++        }
++    }
+ 
+     if (state->opacity == 0xFF
+         && !state->filter && !state->mask && !lateclip && (state->comp_op == CAIRO_OPERATOR_OVER)
+@@ -774,7 +780,9 @@ rsvg_cairo_push_render_stack (RsvgDrawin
+ void
+ rsvg_cairo_push_discrete_layer (RsvgDrawingCtx * ctx)
+ {
+-    rsvg_cairo_push_early_clips (ctx);
++    RsvgCairoRender *render = RSVG_CAIRO_RENDER (ctx->render);
++
++    cairo_save (render->cr);
+     rsvg_cairo_push_render_stack (ctx);
+ }
+ 
+@@ -783,14 +791,18 @@ rsvg_cairo_pop_render_stack (RsvgDrawing
+ {
+     RsvgCairoRender *render = RSVG_CAIRO_RENDER (ctx->render);
+     cairo_t *child_cr = render->cr;
+-    gboolean lateclip = FALSE;
++    RsvgClipPath *lateclip = NULL;
+     cairo_surface_t *surface = NULL;
+     RsvgState *state = rsvg_current_state (ctx);
+     gboolean nest;
+ 
+-    if (rsvg_current_state (ctx)->clip_path_ref)
+-        if (((RsvgClipPath *) rsvg_current_state (ctx)->clip_path_ref)->units == objectBoundingBox)
+-            lateclip = TRUE;
++    if (rsvg_current_state (ctx)->clip_path) {
++        RsvgNode *node;
++        node = rsvg_defs_lookup (ctx->defs, rsvg_current_state (ctx)->clip_path);
++        if (node && RSVG_NODE_TYPE (node) == RSVG_NODE_TYPE_CLIP_PATH
++            && ((RsvgClipPath *) node)->units == objectBoundingBox)
++            lateclip = (RsvgClipPath *) node;
++    }
+ 
+     if (state->opacity == 0xFF
+         && !state->filter && !state->mask && !lateclip && (state->comp_op == CAIRO_OPERATOR_OVER)
+@@ -820,7 +832,7 @@ rsvg_cairo_pop_render_stack (RsvgDrawing
+                               nest ? 0 : render->offset_y);
+ 
+     if (lateclip)
+-        rsvg_cairo_clip (ctx, rsvg_current_state (ctx)->clip_path_ref, &render->bbox);
++        rsvg_cairo_clip (ctx, lateclip, &render->bbox);
+ 
+     cairo_set_operator (render->cr, state->comp_op);
+ 
+Index: librsvg-2.40.10/rsvg-mask.c
+===================================================================
+--- librsvg-2.40.10.orig/rsvg-mask.c
++++ librsvg-2.40.10/rsvg-mask.c
+@@ -102,23 +102,6 @@ rsvg_get_url_string (const char *str)
+     return NULL;
+ }
+ 
+-RsvgNode *
+-rsvg_clip_path_parse (const RsvgDefs * defs, const char *str)
+-{
+-    char *name;
+-
+-    name = rsvg_get_url_string (str);
+-    if (name) {
+-        RsvgNode *val;
+-        val = rsvg_defs_lookup (defs, name);
+-        g_free (name);
+-
+-        if (val && RSVG_NODE_TYPE (val) == RSVG_NODE_TYPE_CLIP_PATH)
+-            return val;
+-    }
+-    return NULL;
+-}
+-
+ static void
+ rsvg_clip_path_set_atts (RsvgNode * self, RsvgHandle * ctx, RsvgPropertyBag * atts)
+ {
+Index: librsvg-2.40.10/rsvg-mask.h
+===================================================================
+--- librsvg-2.40.10.orig/rsvg-mask.h
++++ librsvg-2.40.10/rsvg-mask.h
+@@ -58,8 +58,6 @@ struct _RsvgClipPath {
+ 
+ G_GNUC_INTERNAL
+ RsvgNode *rsvg_new_clip_path	(void);
+-G_GNUC_INTERNAL
+-RsvgNode *rsvg_clip_path_parse	(const RsvgDefs * defs, const char *str);
+ 
+ G_END_DECLS
+ #endif
+Index: librsvg-2.40.10/rsvg-styles.c
+===================================================================
+--- librsvg-2.40.10.orig/rsvg-styles.c
++++ librsvg-2.40.10/rsvg-styles.c
+@@ -149,7 +149,7 @@ rsvg_state_init (RsvgState * state)
+     state->visible = TRUE;
+     state->cond_true = TRUE;
+     state->filter = NULL;
+-    state->clip_path_ref = NULL;
++    state->clip_path = NULL;
+     state->startMarker = NULL;
+     state->middleMarker = NULL;
+     state->endMarker = NULL;
+@@ -222,6 +222,7 @@ rsvg_state_clone (RsvgState * dst, const
+     *dst = *src;
+     dst->parent = parent;
+     dst->mask = g_strdup (src->mask);
++    dst->clip_path = g_strdup (src->clip_path);
+     dst->font_family = g_strdup (src->font_family);
+     dst->lang = g_strdup (src->lang);
+     rsvg_paint_server_ref (dst->fill);
+@@ -356,7 +357,8 @@ rsvg_state_inherit_run (RsvgState * dst,
+     }
+ 
+     if (inherituninheritables) {
+-        dst->clip_path_ref = src->clip_path_ref;
++        g_free (dst->clip_path);
++        dst->clip_path = g_strdup (src->clip_path);
+         g_free (dst->mask);
+         dst->mask = g_strdup (src->mask);
+         dst->enable_background = src->enable_background;
+@@ -447,6 +449,7 @@ void
+ rsvg_state_finalize (RsvgState * state)
+ {
+     g_free (state->mask);
++    g_free (state->clip_path);
+     g_free (state->font_family);
+     g_free (state->lang);
+     rsvg_paint_server_unref (state->fill);
+@@ -524,7 +527,8 @@ rsvg_parse_style_pair (RsvgHandle * ctx,
+         g_free (state->mask);
+         state->mask = rsvg_get_url_string (value);
+     } else if (g_str_equal (name, "clip-path")) {
+-        state->clip_path_ref = rsvg_clip_path_parse (ctx->priv->defs, value);
++        g_free (state->clip_path);
++        state->clip_path = rsvg_get_url_string (value);
+     } else if (g_str_equal (name, "overflow")) {
+         if (!g_str_equal (value, "inherit")) {
+             state->overflow = rsvg_css_parse_overflow (value, &state->has_overflow);
+Index: librsvg-2.40.10/rsvg-styles.h
+===================================================================
+--- librsvg-2.40.10.orig/rsvg-styles.h
++++ librsvg-2.40.10/rsvg-styles.h
+@@ -81,7 +81,7 @@ struct _RsvgState {
+ 
+     RsvgFilter *filter;
+     char *mask;
+-    void *clip_path_ref;
++    char *clip_path;
+     guint8 adobe_blend;         /* 0..11 */
+     guint8 opacity;             /* 0..255 */
+ 
diff --git a/yocto-poky/meta/recipes-gnome/librsvg/librsvg/CVE-2015-7558_3.patch b/yocto-poky/meta/recipes-gnome/librsvg/librsvg/CVE-2015-7558_3.patch
new file mode 100644
index 0000000..dd67ab7
--- /dev/null
+++ b/yocto-poky/meta/recipes-gnome/librsvg/librsvg/CVE-2015-7558_3.patch
@@ -0,0 +1,223 @@
+From a51919f7e1ca9c535390a746fbf6e28c8402dc61 Mon Sep 17 00:00:00 2001
+From: Benjamin Otte <otte@redhat.com>
+Date: Wed, 7 Oct 2015 08:45:37 +0200
+Subject: [PATCH] rsvg: Add rsvg_acquire_node()
+
+This function does proper recursion checks when looking up resources
+from URLs and thereby helps avoiding infinite loops when cyclic
+references span multiple types of elements.
+
+Upstream-status: Backport
+
+https://git.gnome.org/browse/librsvg/commit/rsvg-styles.c?id=a51919f7e1ca9c535390a746fbf6e28c8402dc61
+
+CVE: CVE-2015-7558
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ rsvg-base.c         | 55 +++++++++++++++++++++++++++++++++++++++++++++++++++++
+ rsvg-cairo-draw.c   | 15 +++++++++++----
+ rsvg-cairo-render.c |  1 +
+ rsvg-filter.c       |  9 +++++++--
+ rsvg-private.h      |  5 +++++
+ 5 files changed, 79 insertions(+), 6 deletions(-)
+
+Index: librsvg-2.40.10/rsvg-base.c
+===================================================================
+--- librsvg-2.40.10.orig/rsvg-base.c
++++ librsvg-2.40.10/rsvg-base.c
+@@ -1236,6 +1236,8 @@ rsvg_drawing_ctx_free (RsvgDrawingCtx *
+ 	g_slist_free (handle->drawsub_stack);
+ 
+     g_slist_free (handle->ptrs);
++    g_warn_if_fail (handle->acquired_nodes == NULL);
++    g_slist_free (handle->acquired_nodes);
+ 	
+     if (handle->base_uri)
+         g_free (handle->base_uri);
+@@ -2018,6 +2020,59 @@ rsvg_push_discrete_layer (RsvgDrawingCtx
+     ctx->render->push_discrete_layer (ctx);
+ }
+ 
++/*
++ * rsvg_acquire_node:
++ * @ctx: The drawing context in use
++ * @url: The IRI to lookup
++ *
++ * Use this function when looking up urls to other nodes. This
++ * function does proper recursion checking and thereby avoids
++ * infinite loops.
++ *
++ * Nodes acquired by this function must be released using
++ * rsvg_release_node() in reverse acquiring order.
++ *
++ * Returns: The node referenced by @url or %NULL if the @url
++ *          does not reference a node.
++ */
++RsvgNode *
++rsvg_acquire_node (RsvgDrawingCtx * ctx, const char *url)
++{
++  RsvgNode *node;
++
++  node = rsvg_defs_lookup (ctx->defs, url);
++  if (node == NULL)
++    return NULL;
++
++  if (g_slist_find (ctx->acquired_nodes, node))
++    return NULL;
++
++  ctx->acquired_nodes = g_slist_prepend (ctx->acquired_nodes, node);
++
++  return node;
++}
++
++/*
++ * rsvg_release_node:
++ * @ctx: The drawing context the node was acquired from
++ * @node: Node to release
++ *
++ * Releases a node previously acquired via rsvg_acquire_node().
++ *
++ * if @node is %NULL, this function does nothing.
++ */
++void
++rsvg_release_node (RsvgDrawingCtx * ctx, RsvgNode *node)
++{
++  if (node == NULL)
++    return;
++
++  g_return_if_fail (ctx->acquired_nodes != NULL);
++  g_return_if_fail (ctx->acquired_nodes->data == node);
++
++  ctx->acquired_nodes = g_slist_remove (ctx->acquired_nodes, node);
++}
++
+ void
+ rsvg_render_path (RsvgDrawingCtx * ctx, const cairo_path_t *path)
+ {
+Index: librsvg-2.40.10/rsvg-cairo-draw.c
+===================================================================
+--- librsvg-2.40.10.orig/rsvg-cairo-draw.c
++++ librsvg-2.40.10/rsvg-cairo-draw.c
+@@ -721,7 +721,7 @@ rsvg_cairo_push_render_stack (RsvgDrawin
+ 
+     if (rsvg_current_state (ctx)->clip_path) {
+         RsvgNode *node;
+-        node = rsvg_defs_lookup (ctx->defs, rsvg_current_state (ctx)->clip_path);
++        node = rsvg_acquire_node (ctx, rsvg_current_state (ctx)->clip_path);
+         if (node && RSVG_NODE_TYPE (node) == RSVG_NODE_TYPE_CLIP_PATH) {
+             RsvgClipPath *clip_path = (RsvgClipPath *) node;
+ 
+@@ -739,6 +739,8 @@ rsvg_cairo_push_render_stack (RsvgDrawin
+             }
+ 
+         }
++        
++        rsvg_release_node (ctx, node);
+     }
+ 
+     if (state->opacity == 0xFF
+@@ -798,10 +800,12 @@ rsvg_cairo_pop_render_stack (RsvgDrawing
+ 
+     if (rsvg_current_state (ctx)->clip_path) {
+         RsvgNode *node;
+-        node = rsvg_defs_lookup (ctx->defs, rsvg_current_state (ctx)->clip_path);
++        node = rsvg_acquire_node (ctx, rsvg_current_state (ctx)->clip_path);
+         if (node && RSVG_NODE_TYPE (node) == RSVG_NODE_TYPE_CLIP_PATH
+             && ((RsvgClipPath *) node)->units == objectBoundingBox)
+             lateclip = (RsvgClipPath *) node;
++        else
++            rsvg_release_node (ctx, node);
+     }
+ 
+     if (state->opacity == 0xFF
+@@ -831,17 +835,20 @@ rsvg_cairo_pop_render_stack (RsvgDrawing
+                               nest ? 0 : render->offset_x,
+                               nest ? 0 : render->offset_y);
+ 
+-    if (lateclip)
++    if (lateclip) {
+         rsvg_cairo_clip (ctx, lateclip, &render->bbox);
++        rsvg_release_node (ctx, (RsvgNode *) lateclip);
++    }
+ 
+     cairo_set_operator (render->cr, state->comp_op);
+ 
+     if (state->mask) {
+         RsvgNode *mask;
+ 
+-        mask = rsvg_defs_lookup (ctx->defs, state->mask);
++        mask = rsvg_acquire_node (ctx, state->mask);
+         if (mask && RSVG_NODE_TYPE (mask) == RSVG_NODE_TYPE_MASK)
+           rsvg_cairo_generate_mask (render->cr, (RsvgMask *) mask, ctx, &render->bbox);
++        rsvg_release_node (ctx, mask);
+     } else if (state->opacity != 0xFF)
+         cairo_paint_with_alpha (render->cr, (double) state->opacity / 255.0);
+     else
+Index: librsvg-2.40.10/rsvg-cairo-render.c
+===================================================================
+--- librsvg-2.40.10.orig/rsvg-cairo-render.c
++++ librsvg-2.40.10/rsvg-cairo-render.c
+@@ -155,6 +155,7 @@ rsvg_cairo_new_drawing_ctx (cairo_t * cr
+     draw->pango_context = NULL;
+     draw->drawsub_stack = NULL;
+     draw->ptrs = NULL;
++    draw->acquired_nodes = NULL;
+ 
+     rsvg_state_push (draw);
+     state = rsvg_current_state (draw);
+Index: librsvg-2.40.10/rsvg-filter.c
+===================================================================
+--- librsvg-2.40.10.orig/rsvg-filter.c
++++ librsvg-2.40.10/rsvg-filter.c
+@@ -3921,6 +3921,7 @@ rsvg_filter_primitive_image_render_in (R
+     RsvgDrawingCtx *ctx;
+     RsvgFilterPrimitiveImage *upself;
+     RsvgNode *drawable;
++    cairo_surface_t *result;
+ 
+     ctx = context->ctx;
+ 
+@@ -3929,13 +3930,17 @@ rsvg_filter_primitive_image_render_in (R
+     if (!upself->href)
+         return NULL;
+ 
+-    drawable = rsvg_defs_lookup (ctx->defs, upself->href->str);
++    drawable = rsvg_acquire_node (ctx, upself->href->str);
+     if (!drawable)
+         return NULL;
+ 
+     rsvg_current_state (ctx)->affine = context->paffine;
+ 
+-    return rsvg_get_surface_of_node (ctx, drawable, context->width, context->height);
++    result = rsvg_get_surface_of_node (ctx, drawable, context->width, context->height);
++
++    rsvg_release_node (ctx, drawable);
++
++    return result;
+ }
+ 
+ static cairo_surface_t *
+Index: librsvg-2.40.10/rsvg-private.h
+===================================================================
+--- librsvg-2.40.10.orig/rsvg-private.h
++++ librsvg-2.40.10/rsvg-private.h
+@@ -200,6 +200,7 @@ struct RsvgDrawingCtx {
+     GSList *vb_stack;
+     GSList *drawsub_stack;
+     GSList *ptrs;
++    GSList *acquired_nodes;
+ };
+ 
+ /*Abstract base class for context for our backends (one as yet)*/
+@@ -360,6 +361,10 @@ void rsvg_pop_discrete_layer    (RsvgDra
+ G_GNUC_INTERNAL
+ void rsvg_push_discrete_layer   (RsvgDrawingCtx * ctx);
+ G_GNUC_INTERNAL
++RsvgNode *rsvg_acquire_node     (RsvgDrawingCtx * ctx, const char *url);
++G_GNUC_INTERNAL
++void rsvg_release_node          (RsvgDrawingCtx * ctx, RsvgNode *node);
++G_GNUC_INTERNAL
+ void rsvg_render_path           (RsvgDrawingCtx * ctx, const cairo_path_t *path);
+ G_GNUC_INTERNAL
+ void rsvg_render_surface        (RsvgDrawingCtx * ctx, cairo_surface_t *surface,
diff --git a/yocto-poky/meta/recipes-gnome/librsvg/librsvg_2.40.10.bb b/yocto-poky/meta/recipes-gnome/librsvg/librsvg_2.40.10.bb
index a8b0e4f..cb8a73c 100644
--- a/yocto-poky/meta/recipes-gnome/librsvg/librsvg_2.40.10.bb
+++ b/yocto-poky/meta/recipes-gnome/librsvg/librsvg_2.40.10.bb
@@ -12,11 +12,17 @@
 
 inherit autotools pkgconfig gnomebase gtk-doc pixbufcache
 
-SRC_URI += "file://gtk-option.patch"
+SRC_URI += "file://gtk-option.patch \
+            file://CVE-2015-7558_1.patch \
+            file://CVE-2015-7558_2.patch \
+            file://CVE-2015-7558_3.patch \
+            "
 
 SRC_URI[archive.md5sum] = "fadebe2e799ab159169ee3198415ff85"
 SRC_URI[archive.sha256sum] = "965c807438ce90b204e930ff80c92eba1606a2f6fd5ccfd09335c99896dd3479"
 
+CACHED_CONFIGUREVARS = "ac_cv_path_GDK_PIXBUF_QUERYLOADERS=${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders"
+
 EXTRA_OECONF = "--disable-introspection --disable-vala"
 
 # The older ld (2.22) on the host (Centos 6.5) doesn't have the
diff --git a/yocto-poky/meta/recipes-gnome/libsecret/libsecret_0.18.2.bb b/yocto-poky/meta/recipes-gnome/libsecret/libsecret_0.18.2.bb
index cebc83b..8fc0018 100644
--- a/yocto-poky/meta/recipes-gnome/libsecret/libsecret_0.18.2.bb
+++ b/yocto-poky/meta/recipes-gnome/libsecret/libsecret_0.18.2.bb
@@ -4,9 +4,12 @@
 
 inherit gnomebase gtk-doc
 
-DEPENDS = "glib-2.0 libgcrypt"
+DEPENDS = "glib-2.0 libgcrypt gettext-native"
 
 EXTRA_OECONF += "--disable-manpages"
 
 SRC_URI[archive.md5sum] = "23cdf8267d11a26f88f0dbec1e2022ad"
 SRC_URI[archive.sha256sum] = "12fd288b012e1b2b1b54d586cd4c6507885715534644b4534b7ef7d7079ba443"
+
+# http://errors.yoctoproject.org/Errors/Details/20228/
+ARM_INSTRUCTION_SET = "arm"
diff --git a/yocto-poky/meta/recipes-gnome/libwnck/libwnck3_3.14.0.bb b/yocto-poky/meta/recipes-gnome/libwnck/libwnck3_3.14.0.bb
index d0f5175..3ee1ae9 100644
--- a/yocto-poky/meta/recipes-gnome/libwnck/libwnck3_3.14.0.bb
+++ b/yocto-poky/meta/recipes-gnome/libwnck/libwnck3_3.14.0.bb
@@ -13,3 +13,8 @@
 inherit gnomebase
 SRC_URI[archive.md5sum] = "4538672e0d775fadedf10abeb8020047"
 SRC_URI[archive.sha256sum] = "f5080076346609b4c36394b879f3a86b92ced3b90a37cb54c8e9a14f00e7921c"
+
+inherit distro_features_check
+# libxres means x11 only
+REQUIRED_DISTRO_FEATURES = "x11"
+
diff --git a/yocto-poky/meta/recipes-graphics/cairo/cairo.inc b/yocto-poky/meta/recipes-graphics/cairo/cairo.inc
index 1e45318..45651ba 100644
--- a/yocto-poky/meta/recipes-graphics/cairo/cairo.inc
+++ b/yocto-poky/meta/recipes-graphics/cairo/cairo.inc
@@ -9,11 +9,14 @@
 HOMEPAGE = "http://cairographics.org"
 BUGTRACKER = "http://bugs.freedesktop.org"
 SECTION = "libs"
+
 LICENSE = "MPL-1 & LGPLv2.1 & GPLv3+"
 LICENSE_${PN} = "MPL-1 & LGPLv2.1"
 LICENSE_${PN}-dev = "MPL-1 & LGPLv2.1"
 LICENSE_${PN}-gobject = "MPL-1 & LGPLv2.1"
+LICENSE_${PN}-script-interpreter = "MPL-1 & LGPLv2.1"
 LICENSE_${PN}-perf-utils = "GPLv3+"
+
 X11DEPENDS = "virtual/libx11 libsm libxrender libxext"
 DEPENDS = "libpng fontconfig pixman glib-2.0 zlib"
 
diff --git a/yocto-poky/meta/recipes-graphics/cairo/cairo/Manually-transpose-the-matrix-in-_cairo_gl_shader_bi.patch b/yocto-poky/meta/recipes-graphics/cairo/cairo/Manually-transpose-the-matrix-in-_cairo_gl_shader_bi.patch
new file mode 100644
index 0000000..955b7d4
--- /dev/null
+++ b/yocto-poky/meta/recipes-graphics/cairo/cairo/Manually-transpose-the-matrix-in-_cairo_gl_shader_bi.patch
@@ -0,0 +1,49 @@
+Upstream-Status: Backport
+
+  http://lists.cairographics.org/archives/cairo/2015-May/026253.html
+  http://cgit.freedesktop.org/cairo/commit/?id=f52f0e2feb1ad0a4de23c475a8c020d41a1764a8
+
+Signed-off-by: Andre McCurdy <armccurdy@gmail.com>
+
+
+From f52f0e2feb1ad0a4de23c475a8c020d41a1764a8 Mon Sep 17 00:00:00 2001
+From: Zan Dobersek <zdobersek@igalia.com>
+Date: Fri, 8 May 2015 01:50:25 -0700
+Subject: [PATCH] Manually transpose the matrix in _cairo_gl_shader_bind_matrix()
+
+To maintain compatibility with OpenGL ES 2.0, the matrix in
+_cairo_gl_shader_bind_matrix() should be manually transposed,
+and GL_FALSE passed as the transpose argument to the
+glUniformMatrix3fv() call as it is the only valid value for
+that parameter in OpenGL ES 2.0.
+
+Reviewed-by: Bryce Harrington <bryce@osg.samsung.com>
+Acked-by: "Henry (Yu) Song" <henry.song@samsung.com>
+---
+ src/cairo-gl-shaders.c | 8 ++++----
+ 1 file changed, 4 insertions(+), 4 deletions(-)
+
+diff --git a/src/cairo-gl-shaders.c b/src/cairo-gl-shaders.c
+index 2710606..fe975d2 100644
+--- a/src/cairo-gl-shaders.c
++++ b/src/cairo-gl-shaders.c
+@@ -973,12 +973,12 @@ _cairo_gl_shader_bind_matrix (cairo_gl_context_t *ctx,
+ {
+     cairo_gl_dispatch_t *dispatch = &ctx->dispatch;
+     float gl_m[9] = {
+-	m->xx, m->xy, m->x0,
+-	m->yx, m->yy, m->y0,
+-	0,     0,     1
++	m->xx, m->yx, 0,
++	m->xy, m->yy, 0,
++	m->x0, m->y0, 1
+     };
+     assert (location != -1);
+-    dispatch->UniformMatrix3fv (location, 1, GL_TRUE, gl_m);
++    dispatch->UniformMatrix3fv (location, 1, GL_FALSE, gl_m);
+ }
+ 
+ void
+-- 
+1.9.1
+
diff --git a/yocto-poky/meta/recipes-graphics/cairo/cairo_1.14.2.bb b/yocto-poky/meta/recipes-graphics/cairo/cairo_1.14.2.bb
index 3817dbf..75cde0a 100644
--- a/yocto-poky/meta/recipes-graphics/cairo/cairo_1.14.2.bb
+++ b/yocto-poky/meta/recipes-graphics/cairo/cairo_1.14.2.bb
@@ -3,6 +3,7 @@
 LIC_FILES_CHKSUM = "file://COPYING;md5=e73e999e0c72b5ac9012424fa157ad77"
 
 SRC_URI = "http://cairographics.org/releases/cairo-${PV}.tar.xz"
+SRC_URI += "file://Manually-transpose-the-matrix-in-_cairo_gl_shader_bi.patch"
 
 SRC_URI[md5sum] = "e1cdfaf1c6c995c4d4c54e07215b0118"
 SRC_URI[sha256sum] = "c919d999ddb1bbbecd4bbe65299ca2abd2079c7e13d224577895afa7005ecceb"
diff --git a/yocto-poky/meta/recipes-graphics/directfb/directfb.inc b/yocto-poky/meta/recipes-graphics/directfb/directfb.inc
index 446aaead..cbd4014 100644
--- a/yocto-poky/meta/recipes-graphics/directfb/directfb.inc
+++ b/yocto-poky/meta/recipes-graphics/directfb/directfb.inc
@@ -22,6 +22,9 @@
 
 LDFLAGS_append =" -lts -lm"
 
+# Workaround for linking issues seen with armv7a + gold
+LDFLAGS_append_armv7a = "${@base_contains('DISTRO_FEATURES', 'ld-is-gold', ' -fuse-ld=bfd ', '', d)}"
+
 BINCONFIG = "${bindir}/directfb-config"
 
 inherit autotools binconfig-disabled pkgconfig
diff --git a/yocto-poky/meta/recipes-graphics/libsdl/libsdl_1.2.15.bb b/yocto-poky/meta/recipes-graphics/libsdl/libsdl_1.2.15.bb
index 266bd42..c0d5c6a 100644
--- a/yocto-poky/meta/recipes-graphics/libsdl/libsdl_1.2.15.bb
+++ b/yocto-poky/meta/recipes-graphics/libsdl/libsdl_1.2.15.bb
@@ -13,8 +13,9 @@
 PROVIDES = "virtual/libsdl"
 
 DEPENDS = "${@bb.utils.contains('DISTRO_FEATURES', 'directfb', 'directfb', '', d)} \
-           ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'virtual/libgl libglu', '', d)} \
+           ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'virtual/libgl', '', d)} \
            ${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'virtual/libx11 libxext libxrandr libxrender', '', d)} \
+           ${@bb.utils.contains('DISTRO_FEATURES', 'x11 opengl', 'libglu', '', d)} \
            tslib"
 DEPENDS_class-nativesdk = "${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'virtual/nativesdk-libx11 nativesdk-libxrandr nativesdk-libxrender nativesdk-libxext', '', d)}"
 
diff --git a/yocto-poky/meta/recipes-graphics/libsdl2/libsdl2_2.0.3.bb b/yocto-poky/meta/recipes-graphics/libsdl2/libsdl2_2.0.3.bb
index 97f64f3..f138f97 100644
--- a/yocto-poky/meta/recipes-graphics/libsdl2/libsdl2_2.0.3.bb
+++ b/yocto-poky/meta/recipes-graphics/libsdl2/libsdl2_2.0.3.bb
@@ -39,7 +39,7 @@
     ${@bb.utils.contains('DISTRO_FEATURES', 'alsa', 'alsa', '', d)} \
     ${@bb.utils.contains('DISTRO_FEATURES', 'directfb', 'directfb', '', d)} \
     ${@bb.utils.contains('DISTRO_FEATURES', 'pulseaudio', 'pulseaudio', '', d)} \
-    ${@bb.utils.contains('DISTRO_FEATURES', 'wayland', 'wayland', '', d)} \
+    ${@bb.utils.contains('DISTRO_FEATURES', 'wayland', 'wayland gles2', '', d)} \
     ${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'x11', '', d)} \
 "
 PACKAGECONFIG[alsa]       = "--enable-alsa --disable-alsatest,--disable-alsa,alsa-lib,"
diff --git a/yocto-poky/meta/recipes-graphics/mesa/mesa-demos/0010-sharedtex_mt-fix-rendering-thread-hang.patch b/yocto-poky/meta/recipes-graphics/mesa/mesa-demos/0010-sharedtex_mt-fix-rendering-thread-hang.patch
new file mode 100644
index 0000000..04e1b44
--- /dev/null
+++ b/yocto-poky/meta/recipes-graphics/mesa/mesa-demos/0010-sharedtex_mt-fix-rendering-thread-hang.patch
@@ -0,0 +1,43 @@
+From 525fa9ded72d22b53c5eb366f61e2ac1d407a2db Mon Sep 17 00:00:00 2001
+From: Awais Belal <awais_belal@mentor.com>
+Date: Thu, 8 Oct 2015 13:49:31 +0500
+Subject: [PATCH] sharedtex_mt: fix rendering thread hang
+
+XNextEvent is a blocking call which locks up the display mutex
+this causes the rendering threads to hang when they try call
+glXSwapBuffers() as that tries to take the same mutex in
+underlying calls through XCopyArea().
+So we only go to XNextEvent when it has at least one event
+and we wouldn't lock indefinitely.
+
+Signed-off-by: Awais Belal <awais_belal@mentor.com>
+Upstream-Status: Backport (2b304e765695d385fd3bf414e6e444020bedb0a8)
+
+---
+ src/xdemos/sharedtex_mt.c | 9 +++++++--
+ 1 file changed, 7 insertions(+), 2 deletions(-)
+
+diff --git a/src/xdemos/sharedtex_mt.c b/src/xdemos/sharedtex_mt.c
+index a90903a..1d503c4 100644
+--- a/src/xdemos/sharedtex_mt.c
++++ b/src/xdemos/sharedtex_mt.c
+@@ -420,9 +420,14 @@ Resize(struct window *h, unsigned int width, unsigned int height)
+ static void
+ EventLoop(void)
+ {
++   int i;
++   XEvent event;
+    while (1) {
+-      int i;
+-      XEvent event;
++      /* Do we have an event? */
++      if (XPending(gDpy) == 0) {
++         usleep(10000);
++         continue;
++      }
+       XNextEvent(gDpy, &event);
+       for (i = 0; i < NumWindows; i++) {
+ 	 struct window *h = &Windows[i];
+-- 
+1.9.1
+
diff --git a/yocto-poky/meta/recipes-graphics/mesa/mesa-demos_8.2.0.bb b/yocto-poky/meta/recipes-graphics/mesa/mesa-demos_8.2.0.bb
index e451642..0094f55 100644
--- a/yocto-poky/meta/recipes-graphics/mesa/mesa-demos_8.2.0.bb
+++ b/yocto-poky/meta/recipes-graphics/mesa/mesa-demos_8.2.0.bb
@@ -19,6 +19,7 @@
     file://0007-Install-few-more-test-programs.patch \
     file://0008-glsl-perf-Add-few-missing-.glsl-.vert-.frag-files-to.patch \
     file://0009-glsl-perf-Install-.glsl-.vert-.frag-files.patch \
+    file://0010-sharedtex_mt-fix-rendering-thread-hang.patch \
 "
 SRC_URI[md5sum] = "72613a2c8c013716db02e3ff59d29061"
 SRC_URI[sha256sum] = "e4bfecb5816ddd4b7b37c1bc876b63f1f7f06fda5879221a9774d0952f90ba92"
diff --git a/yocto-poky/meta/recipes-graphics/piglit/piglit_git.bb b/yocto-poky/meta/recipes-graphics/piglit/piglit_git.bb
index 0d825c9..55ad78c 100644
--- a/yocto-poky/meta/recipes-graphics/piglit/piglit_git.bb
+++ b/yocto-poky/meta/recipes-graphics/piglit/piglit_git.bb
@@ -18,6 +18,12 @@
 # depends on virtual/libx11
 REQUIRED_DISTRO_FEATURES = "x11"
 
+# The built scripts go into the temporary directory according to tempfile
+# (typically /tmp) which can race if multiple builds happen on the same machine,
+# so tell it to use a directory in ${B} to avoid overwriting.
+export TEMP = "${B}/temp/"
+do_compile[dirs] =+ "${B}/temp/"
+
 PACKAGECONFIG ??= ""
 PACKAGECONFIG[freeglut] = "-DPIGLIT_USE_GLUT=1,-DPIGLIT_USE_GLUT=0,freeglut,"
 
diff --git a/yocto-poky/meta/recipes-graphics/waffle/waffle/0001-third_party-threads-Use-PTHREAD_MUTEX_RECURSIVE-by-d.patch b/yocto-poky/meta/recipes-graphics/waffle/waffle/0001-third_party-threads-Use-PTHREAD_MUTEX_RECURSIVE-by-d.patch
new file mode 100644
index 0000000..a0c826e
--- /dev/null
+++ b/yocto-poky/meta/recipes-graphics/waffle/waffle/0001-third_party-threads-Use-PTHREAD_MUTEX_RECURSIVE-by-d.patch
@@ -0,0 +1,54 @@
+From 3b9b8f5f6d1b99af43e95ec0868404e552a85b73 Mon Sep 17 00:00:00 2001
+From: Emil Velikov <emil.l.velikov@gmail.com>
+Date: Thu, 19 Mar 2015 22:26:11 +0000
+Subject: [PATCH] third_party/threads: Use PTHREAD_MUTEX_RECURSIVE by default
+
+PTHREAD_MUTEX_RECURSIVE_NP was used for compatibility with old glibc.
+Although due to the_GNU_SOURCES define the portable,
+PTHREAD_MUTEX_RECURSIVE will be available for Linuxes since at least
+1998. Simplify things giving us compatibility with musl which
+apparently does not provide the non-portable define.
+
+Inspired by almost identical commit in mesa aead7fe2e2b(c11/threads: Use
+PTHREAD_MUTEX_RECURSIVE by default) by Felix Janda.
+
+Signed-off-by: Emil Velikov <emil.l.velikov@gmail.com>
+Reviewed-by: Chad Versace <chad.versace@intel.com>
+---
+Upstream-Status: Backport
+
+ third_party/threads/threads_posix.c | 10 ++++------
+ 1 file changed, 4 insertions(+), 6 deletions(-)
+
+diff --git a/third_party/threads/threads_posix.c b/third_party/threads/threads_posix.c
+index 5835e43..e122bf9 100644
+--- a/third_party/threads/threads_posix.c
++++ b/third_party/threads/threads_posix.c
+@@ -26,6 +26,9 @@
+  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+  * DEALINGS IN THE SOFTWARE.
+  */
++
++#define _GNU_SOURCE
++
+ #include <stdlib.h>
+ #ifndef assert
+ #include <assert.h>
+@@ -150,13 +153,8 @@ int mtx_init(mtx_t *mtx, int type)
+       && type != (mtx_try|mtx_recursive))
+         return thrd_error;
+     pthread_mutexattr_init(&attr);
+-    if ((type & mtx_recursive) != 0) {
+-#if defined(__linux__) || defined(__linux)
+-        pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE_NP);
+-#else
++    if ((type & mtx_recursive) != 0)
+         pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE);
+-#endif
+-    }
+     pthread_mutex_init(mtx, &attr);
+     pthread_mutexattr_destroy(&attr);
+     return thrd_success;
+-- 
+2.5.2
+
diff --git a/yocto-poky/meta/recipes-graphics/waffle/waffle_1.5.1.bb b/yocto-poky/meta/recipes-graphics/waffle/waffle_1.5.1.bb
index b8aa05a..af84020 100644
--- a/yocto-poky/meta/recipes-graphics/waffle/waffle_1.5.1.bb
+++ b/yocto-poky/meta/recipes-graphics/waffle/waffle_1.5.1.bb
@@ -3,7 +3,9 @@
 LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=4c5154407c2490750dd461c50ad94797 \
                     file://include/waffle/waffle.h;endline=24;md5=61dbf8697f61c78645e75a93c585b1bf"
 
-SRC_URI = "http://waffle-gl.org/files/release/${BPN}-${PV}/${BPN}-${PV}.tar.xz"
+SRC_URI = "http://waffle-gl.org/files/release/${BPN}-${PV}/${BPN}-${PV}.tar.xz \
+           file://0001-third_party-threads-Use-PTHREAD_MUTEX_RECURSIVE-by-d.patch \
+          "
 SRC_URI[md5sum] = "c0d802bc3d0aba87c51e423a3a8bdd69"
 SRC_URI[sha256sum] = "cbab0e926515064e818bf089a5af04be33307e5f40d07659fb40d59b2bfe20aa"
 
diff --git a/yocto-poky/meta/recipes-graphics/xorg-app/xwininfo_1.1.3.bb b/yocto-poky/meta/recipes-graphics/xorg-app/xwininfo_1.1.3.bb
index 7a45241..0c23d19 100644
--- a/yocto-poky/meta/recipes-graphics/xorg-app/xwininfo_1.1.3.bb
+++ b/yocto-poky/meta/recipes-graphics/xorg-app/xwininfo_1.1.3.bb
@@ -7,7 +7,7 @@
 and a number of other items."
 
 LIC_FILES_CHKSUM = "file://COPYING;md5=78976cd3115f6faf615accc4e094d90e"
-DEPENDS += "libxext libxmu"
+DEPENDS += "libxext libxmu gettext-native"
 
 PE = "0"
 
diff --git a/yocto-poky/meta/recipes-graphics/xorg-lib/libxcb.inc b/yocto-poky/meta/recipes-graphics/xorg-lib/libxcb.inc
index fe31f20..e40ae77 100644
--- a/yocto-poky/meta/recipes-graphics/xorg-lib/libxcb.inc
+++ b/yocto-poky/meta/recipes-graphics/xorg-lib/libxcb.inc
@@ -14,7 +14,9 @@
 
 SRC_URI = "http://xcb.freedesktop.org/dist/libxcb-${PV}.tar.bz2 \
            file://xcbincludedir.patch \
-           file://disable-check.patch"
+           file://disable-check.patch \
+           file://gcc-mips-pr68302-mips-workaround.patch \
+          "
 
 PACKAGES_DYNAMIC = "^libxcb-.*"
 
diff --git a/yocto-poky/meta/recipes-graphics/xorg-lib/libxcb/gcc-mips-pr68302-mips-workaround.patch b/yocto-poky/meta/recipes-graphics/xorg-lib/libxcb/gcc-mips-pr68302-mips-workaround.patch
new file mode 100644
index 0000000..698d038
--- /dev/null
+++ b/yocto-poky/meta/recipes-graphics/xorg-lib/libxcb/gcc-mips-pr68302-mips-workaround.patch
@@ -0,0 +1,22 @@
+Reduce debug info for xcb.c since on mips we run into a gcc5 bug
+
+https://gcc.gnu.org/bugzilla/show_bug.cgi?id=68302
+
+This patch is a workaround to get past the gcc bug until its resolved.
+it should have minimal impact on libxcb while make it work.
+
+Upstream-Status: Inappropriate [OE-Specific]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
+Index: libxcb-1.11.1/src/Makefile.am
+===================================================================
+--- libxcb-1.11.1.orig/src/Makefile.am
++++ libxcb-1.11.1/src/Makefile.am
+@@ -188,6 +188,7 @@ EXTSOURCES += xkb.c
+ if BUILD_XKB
+ lib_LTLIBRARIES += libxcb-xkb.la
+ libxcb_xkb_la_LDFLAGS = -version-info 1:0:0 -no-undefined
++CFLAGS += -g1
+ libxcb_xkb_la_LIBADD = $(XCB_LIBS)
+ nodist_libxcb_xkb_la_SOURCES = xkb.c xkb.h
+ endif
diff --git a/yocto-poky/meta/recipes-graphics/xorg-lib/pixman/0001-v3-test-add-a-check-for-FE_DIVBYZERO.patch b/yocto-poky/meta/recipes-graphics/xorg-lib/pixman/0001-v3-test-add-a-check-for-FE_DIVBYZERO.patch
new file mode 100644
index 0000000..a60df5f
--- /dev/null
+++ b/yocto-poky/meta/recipes-graphics/xorg-lib/pixman/0001-v3-test-add-a-check-for-FE_DIVBYZERO.patch
@@ -0,0 +1,65 @@
+From fcd5eb9bd0e8674a6f4987a8fce7dc1ba8f9320c Mon Sep 17 00:00:00 2001
+From: Thomas Petazzoni <thomas.petazzoni@free-electrons.com>
+Date: Thu, 17 Sep 2015 03:08:36 +0200
+Subject: [PATCH] [v3] test: add a check for FE_DIVBYZERO
+
+Some architectures, such as Microblaze and Nios2, currently do not
+implement FE_DIVBYZERO, even though they have <fenv.h> and
+feenableexcept(). This commit adds a configure.ac check to verify
+whether FE_DIVBYZERO is defined or not, and if not, disables the
+problematic code in test/utils.c.
+
+Signed-off-by: Thomas Petazzoni <thomas.petazzoni@free-electrons.com>
+Signed-off-by: Marek Vasut <marex@denx.de>
+Upstream-Status: Submitted
+---
+Changes v1 -> v2:
+
+ * Use the ac_cv_have_decl_FE_DIVBYZERO variable, which is
+   automatically set by AC_CHECK_DECL, to decide whether or not
+   HAVE_FEDIVBYZERO should be defined.
+
+Changes v2 -> v3:
+
+ * Use action-if-yes of AC_CHECK_DECL as suggested in
+   http://lists.freedesktop.org/archives/pixman/2014-February/003176.html
+---
+ configure.ac | 5 +++++
+ test/utils.c | 2 ++
+ 2 files changed, 7 insertions(+)
+
+diff --git a/configure.ac b/configure.ac
+index f93cc30..424bfd3 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -891,6 +891,11 @@ if test x$have_feenableexcept = xyes; then
+    AC_DEFINE(HAVE_FEENABLEEXCEPT, 1, [Whether we have feenableexcept()])
+ fi
+ 
++AC_CHECK_DECL([FE_DIVBYZERO],
++	[AC_DEFINE(HAVE_FEDIVBYZERO, 1, [Whether we have FE_DIVBYZERO])],
++	[],
++	[[#include <fenv.h>]])
++
+ AC_CHECK_FUNC(gettimeofday, have_gettimeofday=yes, have_gettimeofday=no)
+ AC_CHECK_HEADER(sys/time.h, have_sys_time_h=yes, have_sys_time_h=no)
+ if test x$have_gettimeofday = xyes && test x$have_sys_time_h = xyes; then
+diff --git a/test/utils.c b/test/utils.c
+index 222d4d5..8657966 100644
+--- a/test/utils.c
++++ b/test/utils.c
+@@ -966,9 +966,11 @@ enable_divbyzero_exceptions (void)
+ {
+ #ifdef HAVE_FENV_H
+ #ifdef HAVE_FEENABLEEXCEPT
++#ifdef HAVE_FEDIVBYZERO
+     feenableexcept (FE_DIVBYZERO);
++#endif
+ #endif
+ #endif
+ }
+ 
+ void
+-- 
+2.1.4
+
diff --git a/yocto-poky/meta/recipes-graphics/xorg-lib/pixman_0.32.6.bb b/yocto-poky/meta/recipes-graphics/xorg-lib/pixman_0.32.6.bb
index eae59b6..317a568 100644
--- a/yocto-poky/meta/recipes-graphics/xorg-lib/pixman_0.32.6.bb
+++ b/yocto-poky/meta/recipes-graphics/xorg-lib/pixman_0.32.6.bb
@@ -31,6 +31,7 @@
             file://0001-ARM-qemu-related-workarounds-in-cpu-features-detecti.patch \
             file://mips-export-revert.patch \
 	    file://asm_include.patch \
+	    file://0001-v3-test-add-a-check-for-FE_DIVBYZERO.patch \
 "
 
 SRC_URI[md5sum] = "8a9e8f14743a39cf303803f369c1f344"
diff --git a/yocto-poky/meta/recipes-graphics/xorg-xserver/xserver-xorg.inc b/yocto-poky/meta/recipes-graphics/xorg-xserver/xserver-xorg.inc
index cc1c02b..9881c94 100644
--- a/yocto-poky/meta/recipes-graphics/xorg-xserver/xserver-xorg.inc
+++ b/yocto-poky/meta/recipes-graphics/xorg-xserver/xserver-xorg.inc
@@ -23,6 +23,9 @@
 
 inherit autotools pkgconfig
 
+inherit distro_features_check
+REQUIRED_DISTRO_FEATURES = "x11"
+
 PROTO_DEPS = "randrproto renderproto fixesproto damageproto xextproto xproto xf86dgaproto xf86miscproto xf86vidmodeproto compositeproto recordproto resourceproto videoproto scrnsaverproto  xineramaproto fontsproto kbproto inputproto bigreqsproto xcmiscproto presentproto"
 LIB_DEPS = "pixman libxfont xtrans libxau libxext libxdmcp libdrm libxkbfile libpciaccess openssl libgcrypt"
 DEPENDS = "${PROTO_DEPS} ${LIB_DEPS} font-util"
diff --git a/yocto-poky/meta/recipes-kernel/kern-tools/kern-tools-native_git.bb b/yocto-poky/meta/recipes-kernel/kern-tools/kern-tools-native_git.bb
index 4753094..445d03a 100644
--- a/yocto-poky/meta/recipes-kernel/kern-tools/kern-tools-native_git.bb
+++ b/yocto-poky/meta/recipes-kernel/kern-tools/kern-tools-native_git.bb
@@ -4,7 +4,7 @@
 
 DEPENDS = "git-native"
 
-SRCREV = "bd144d43ca5b1eaf9e727bced4ce3b61b642297c"
+SRCREV = "17d89d1861b532bbf1a81c1f024953e440db8de7"
 PR = "r12"
 PV = "0.2+git${SRCPV}"
 
diff --git a/yocto-poky/meta/recipes-kernel/kmod/kmod.inc b/yocto-poky/meta/recipes-kernel/kmod/kmod.inc
index e9aa67d..71ffdf8 100644
--- a/yocto-poky/meta/recipes-kernel/kmod/kmod.inc
+++ b/yocto-poky/meta/recipes-kernel/kmod/kmod.inc
@@ -16,7 +16,7 @@
                    "
 inherit autotools gtk-doc
 
-SRCREV = "0d833715eaa65636dda2705b89359a1e0154dc58"
+SRCREV = "114ec87c85c35a2bd3682f9f891e494127be6fb5"
 # Lookout for PV bump too when SRCREV is changed
 PV = "21+git${SRCPV}"
 
diff --git a/yocto-poky/meta/recipes-kernel/linux-firmware/linux-firmware_git.bb b/yocto-poky/meta/recipes-kernel/linux-firmware/linux-firmware_git.bb
index 4939ca6..0878ab1 100644
--- a/yocto-poky/meta/recipes-kernel/linux-firmware/linux-firmware_git.bb
+++ b/yocto-poky/meta/recipes-kernel/linux-firmware/linux-firmware_git.bb
@@ -260,10 +260,10 @@
   /lib/firmware/LICENCE.rtlwifi_firmware.txt \
 "
 FILES_${PN}-rtl8192cu = " \
-  /lib/firmware/rtlwifi/rtl8192cufw.bin \
+  /lib/firmware/rtlwifi/rtl8192cufw*.bin \
 "
 FILES_${PN}-rtl8192ce = " \
-  /lib/firmware/rtlwifi/rtl8192cfw.bin \
+  /lib/firmware/rtlwifi/rtl8192cfw*.bin \
 "
 FILES_${PN}-rtl8192su = " \
   /lib/firmware/rtlwifi/rtl8712u.bin \
diff --git a/yocto-poky/meta/recipes-kernel/linux/linux-yocto-rt_3.14.bb b/yocto-poky/meta/recipes-kernel/linux/linux-yocto-rt_3.14.bb
index 4d3d5c8..bfeabbe 100644
--- a/yocto-poky/meta/recipes-kernel/linux/linux-yocto-rt_3.14.bb
+++ b/yocto-poky/meta/recipes-kernel/linux/linux-yocto-rt_3.14.bb
@@ -5,7 +5,7 @@
 
 SRCREV_machine ?= "8281915527ba8d79e59906c02f28e7aa11424723"
 SRCREV_machine_qemuppc ?= "5e7d372ebc327f28656fc972fab55605eea8aec3"
-SRCREV_meta ?= "3a09b38a9f5015c56d99d17aa7c2f200c566249b"
+SRCREV_meta ?= "060fa80b7996250001ee90c50a4978c8fdb87fc4"
 
 SRC_URI = "git://git.yoctoproject.org/linux-yocto-3.14.git;branch=${KBRANCH};name=machine \
            git://git.yoctoproject.org/yocto-kernel-cache;type=kmeta;name=meta;branch=yocto-3.14;destsuffix=${KMETA}"
@@ -24,5 +24,6 @@
 # Functionality flags
 KERNEL_EXTRA_FEATURES ?= "features/netfilter/netfilter.scc features/taskstats/taskstats.scc"
 KERNEL_FEATURES_append = " ${KERNEL_EXTRA_FEATURES}"
+KERNEL_FEATURES_append_qemuall=" cfg/virtio.scc"
 KERNEL_FEATURES_append_qemux86=" cfg/sound.scc cfg/paravirt_kvm.scc"
 KERNEL_FEATURES_append_qemux86-64=" cfg/sound.scc"
diff --git a/yocto-poky/meta/recipes-kernel/linux/linux-yocto-rt_4.1.bb b/yocto-poky/meta/recipes-kernel/linux/linux-yocto-rt_4.1.bb
index 06483c3..b441bf6 100644
--- a/yocto-poky/meta/recipes-kernel/linux/linux-yocto-rt_4.1.bb
+++ b/yocto-poky/meta/recipes-kernel/linux/linux-yocto-rt_4.1.bb
@@ -1,14 +1,14 @@
-KBRANCH ?= "standard/preempt-rt"
+KBRANCH ?= "standard/preempt-rt/base"
 
 require recipes-kernel/linux/linux-yocto.inc
 
-SRCREV_machine ?= "59b8c4f5e8ddb9c33c62fff22204fe2b0d8c703e"
-SRCREV_meta ?= "429f9e2ff0649b8c9341345622545d874d5e303a"
+SRCREV_machine ?= "3188436876d5eaff8d48f82064367d4a65c3aa97"
+SRCREV_meta ?= "46bb64d605fd336d99fa05bab566b9553b40b4b4"
 
 SRC_URI = "git://git.yoctoproject.org/linux-yocto-4.1.git;branch=${KBRANCH};name=machine \
            git://git.yoctoproject.org/yocto-kernel-cache;type=kmeta;name=meta;branch=yocto-4.1;destsuffix=${KMETA}"
 
-LINUX_VERSION ?= "4.1.6"
+LINUX_VERSION ?= "4.1.15"
 
 PV = "${LINUX_VERSION}+git${SRCPV}"
 
@@ -22,5 +22,6 @@
 # Functionality flags
 KERNEL_EXTRA_FEATURES ?= "features/netfilter/netfilter.scc features/taskstats/taskstats.scc"
 KERNEL_FEATURES_append = " ${KERNEL_EXTRA_FEATURES}"
+KERNEL_FEATURES_append_qemuall=" cfg/virtio.scc"
 KERNEL_FEATURES_append_qemux86=" cfg/sound.scc cfg/paravirt_kvm.scc"
 KERNEL_FEATURES_append_qemux86-64=" cfg/sound.scc"
diff --git a/yocto-poky/meta/recipes-kernel/linux/linux-yocto-tiny_3.14.bb b/yocto-poky/meta/recipes-kernel/linux/linux-yocto-tiny_3.14.bb
index 412c817..e13cb80 100644
--- a/yocto-poky/meta/recipes-kernel/linux/linux-yocto-tiny_3.14.bb
+++ b/yocto-poky/meta/recipes-kernel/linux/linux-yocto-tiny_3.14.bb
@@ -10,7 +10,7 @@
 KCONF_BSP_AUDIT_LEVEL = "2"
 
 SRCREV_machine ?= "578602a722dbfb260801f3b37c6eafd2abb2340d"
-SRCREV_meta ?= "3a09b38a9f5015c56d99d17aa7c2f200c566249b"
+SRCREV_meta ?= "060fa80b7996250001ee90c50a4978c8fdb87fc4"
 
 PV = "${LINUX_VERSION}+git${SRCPV}"
 
diff --git a/yocto-poky/meta/recipes-kernel/linux/linux-yocto-tiny_4.1.bb b/yocto-poky/meta/recipes-kernel/linux/linux-yocto-tiny_4.1.bb
index 061205e..4caa252 100644
--- a/yocto-poky/meta/recipes-kernel/linux/linux-yocto-tiny_4.1.bb
+++ b/yocto-poky/meta/recipes-kernel/linux/linux-yocto-tiny_4.1.bb
@@ -4,13 +4,13 @@
 
 require recipes-kernel/linux/linux-yocto.inc
 
-LINUX_VERSION ?= "4.1.6"
+LINUX_VERSION ?= "4.1.15"
 
 KMETA = "kernel-meta"
 KCONF_BSP_AUDIT_LEVEL = "2"
 
-SRCREV_machine ?= "59b8c4f5e8ddb9c33c62fff22204fe2b0d8c703e"
-SRCREV_meta ?= "429f9e2ff0649b8c9341345622545d874d5e303a"
+SRCREV_machine ?= "788dfc9859321c09f1c58696bf8998f90ccb4f51"
+SRCREV_meta ?= "46bb64d605fd336d99fa05bab566b9553b40b4b4"
 
 PV = "${LINUX_VERSION}+git${SRCPV}"
 
diff --git a/yocto-poky/meta/recipes-kernel/linux/linux-yocto.inc b/yocto-poky/meta/recipes-kernel/linux/linux-yocto.inc
index 3b41a61..81ffa24 100644
--- a/yocto-poky/meta/recipes-kernel/linux/linux-yocto.inc
+++ b/yocto-poky/meta/recipes-kernel/linux/linux-yocto.inc
@@ -11,6 +11,10 @@
 KERNEL_CC_append_aarch64 = " ${TOOLCHAIN_OPTIONS}"
 KERNEL_LD_append_aarch64 = " ${TOOLCHAIN_OPTIONS}"
 
+DEPENDS_append_nios2 = " libgcc"
+KERNEL_CC_append_nios2 = " ${TOOLCHAIN_OPTIONS}"
+KERNEL_LD_append_nios2 = " ${TOOLCHAIN_OPTIONS}"
+
 # A KMACHINE is the mapping of a yocto $MACHINE to what is built
 # by the kernel. This is typically the branch that should be built,
 # and it can be specific to the machine or shared
diff --git a/yocto-poky/meta/recipes-kernel/linux/linux-yocto_3.14.bb b/yocto-poky/meta/recipes-kernel/linux/linux-yocto_3.14.bb
index b6b2e5a..db93d23 100644
--- a/yocto-poky/meta/recipes-kernel/linux/linux-yocto_3.14.bb
+++ b/yocto-poky/meta/recipes-kernel/linux/linux-yocto_3.14.bb
@@ -15,16 +15,18 @@
 SRCREV_machine_qemuarm64 ?= "578602a722dbfb260801f3b37c6eafd2abb2340d"
 SRCREV_machine_qemumips ?= "6ed76ec26b120f65f8547c8612b7334bd2745ec9"
 SRCREV_machine_qemuppc ?= "a86ade84b2e142c0fd7536d96477107b6d07db5c"
-SRCREV_machine_qemux86 ?= "af1f7f586bd32d39c057f17606991b887eadb389"
-SRCREV_machine_qemux86-64 ?= "578602a722dbfb260801f3b37c6eafd2abb2340d"
+SRCREV_machine_qemux86 ?= "d9bf859dfae6f88b88b157119c20ae4d5e51420a"
+SRCREV_machine_qemux86-64 ?= "93b2b800d85c1565af7d96f3776dc38c85ae1902"
 SRCREV_machine_qemumips64 ?= "a63d40b860a6d255005a541894d53729090b40ea"
 SRCREV_machine ?= "578602a722dbfb260801f3b37c6eafd2abb2340d"
-SRCREV_meta ?= "3a09b38a9f5015c56d99d17aa7c2f200c566249b"
+SRCREV_meta ?= "060fa80b7996250001ee90c50a4978c8fdb87fc4"
 
 SRC_URI = "git://git.yoctoproject.org/linux-yocto-3.14.git;branch=${KBRANCH};name=machine; \
            git://git.yoctoproject.org/yocto-kernel-cache;type=kmeta;name=meta;branch=yocto-3.14;destsuffix=${KMETA}"
 
 LINUX_VERSION ?= "3.14.36"
+LINUX_VERSION_qemux86 ?= "3.14.39"
+LINUX_VERSION_qemux86-64 ?= "3.14.39"
 
 PV = "${LINUX_VERSION}+git${SRCPV}"
 
@@ -36,6 +38,7 @@
 # Functionality flags
 KERNEL_EXTRA_FEATURES ?= "features/netfilter/netfilter.scc"
 KERNEL_FEATURES_append = " ${KERNEL_EXTRA_FEATURES}"
+KERNEL_FEATURES_append_qemuall=" cfg/virtio.scc"
 KERNEL_FEATURES_append_qemux86=" cfg/sound.scc cfg/paravirt_kvm.scc"
 KERNEL_FEATURES_append_qemux86-64=" cfg/sound.scc cfg/paravirt_kvm.scc"
 KERNEL_FEATURES_append = " ${@bb.utils.contains("TUNE_FEATURES", "mx32", " cfg/x32.scc", "" ,d)}"
diff --git a/yocto-poky/meta/recipes-kernel/linux/linux-yocto_3.19.bb b/yocto-poky/meta/recipes-kernel/linux/linux-yocto_3.19.bb
index e8c1640..baa575b 100644
--- a/yocto-poky/meta/recipes-kernel/linux/linux-yocto_3.19.bb
+++ b/yocto-poky/meta/recipes-kernel/linux/linux-yocto_3.19.bb
@@ -11,14 +11,14 @@
 KBRANCH_qemux86-64 ?= "standard/common-pc-64/base"
 KBRANCH_qemumips64 ?= "standard/mti-malta64"
 
-SRCREV_machine_qemuarm ?= "963b4df663dba2584ac864e0c016825de0046558"
+SRCREV_machine_qemuarm ?= "857048f10bfe7089ca6007e72431f1c098b07115"
 SRCREV_machine_qemuarm64 ?= "e152349de59b43b2a75f2c332b44171df461d5a0"
 SRCREV_machine_qemumips ?= "cedbbc7b5e72df2e820bb9e7885f12132c5e2fff"
 SRCREV_machine_qemuppc ?= "23a83386e10986a63e6cef712a045445499d002b"
-SRCREV_machine_qemux86 ?= "e152349de59b43b2a75f2c332b44171df461d5a0"
-SRCREV_machine_qemux86-64 ?= "e152349de59b43b2a75f2c332b44171df461d5a0"
+SRCREV_machine_qemux86 ?= "1583bf79b946cd5581d84d8c369b819a5ecb94b4"
+SRCREV_machine_qemux86-64 ?= "1583bf79b946cd5581d84d8c369b819a5ecb94b4"
 SRCREV_machine_qemumips64 ?= "3eb70cea3532e22ab1b6da9864446621229e6616"
-SRCREV_machine ?= "e152349de59b43b2a75f2c332b44171df461d5a0"
+SRCREV_machine ?= "151571a39785218a57c3ae3355cd63694890cc8d"
 SRCREV_meta ?= "1016714868249d64fc16692fd7679672b1efa17b"
 
 SRC_URI = "git://git.yoctoproject.org/linux-yocto-3.19.git;name=machine;branch=${KBRANCH}; \
@@ -36,6 +36,7 @@
 # Functionality flags
 KERNEL_EXTRA_FEATURES ?= "features/netfilter/netfilter.scc"
 KERNEL_FEATURES_append = " ${KERNEL_EXTRA_FEATURES}"
+KERNEL_FEATURES_append_qemuall=" cfg/virtio.scc"
 KERNEL_FEATURES_append_qemux86=" cfg/sound.scc cfg/paravirt_kvm.scc"
 KERNEL_FEATURES_append_qemux86-64=" cfg/sound.scc cfg/paravirt_kvm.scc"
 KERNEL_FEATURES_append = " ${@bb.utils.contains("TUNE_FEATURES", "mx32", " cfg/x32.scc", "" ,d)}"
diff --git a/yocto-poky/meta/recipes-kernel/linux/linux-yocto_4.1.bb b/yocto-poky/meta/recipes-kernel/linux/linux-yocto_4.1.bb
index 46d4616..1bb7336 100644
--- a/yocto-poky/meta/recipes-kernel/linux/linux-yocto_4.1.bb
+++ b/yocto-poky/meta/recipes-kernel/linux/linux-yocto_4.1.bb
@@ -11,20 +11,22 @@
 KBRANCH_qemux86-64 ?= "standard/base"
 KBRANCH_qemumips64 ?= "standard/mti-malta64"
 
-SRCREV_machine_qemuarm ?= "3c1245d162ccb55de1af42bcf3dbf690457bf9e4"
-SRCREV_machine_qemuarm64 ?= "59b8c4f5e8ddb9c33c62fff22204fe2b0d8c703e"
-SRCREV_machine_qemumips ?= "4132a691d0908d10b8f07ce7ece02e6dc94e17bc"
-SRCREV_machine_qemuppc ?= "59b8c4f5e8ddb9c33c62fff22204fe2b0d8c703e"
-SRCREV_machine_qemux86 ?= "59b8c4f5e8ddb9c33c62fff22204fe2b0d8c703e"
-SRCREV_machine_qemux86-64 ?= "59b8c4f5e8ddb9c33c62fff22204fe2b0d8c703e"
-SRCREV_machine_qemumips64 ?= "033e1aa633465449edf544eb81adda0caf16ec60"
-SRCREV_machine ?= "59b8c4f5e8ddb9c33c62fff22204fe2b0d8c703e"
-SRCREV_meta ?= "429f9e2ff0649b8c9341345622545d874d5e303a"
+SRCREV_machine_qemuarm ?= "cf760f381c5e1e58d0c3372d66f4dfdc33f0984c"
+SRCREV_machine_qemuarm64 ?= "788dfc9859321c09f1c58696bf8998f90ccb4f51"
+SRCREV_machine_qemumips ?= "aa46295ab927bd5c960930c377855dbc4e57b195"
+SRCREV_machine_qemuppc ?= "788dfc9859321c09f1c58696bf8998f90ccb4f51"
+SRCREV_machine_qemux86 ?= "2e0ac7b6c4e3ada23a84756287e9b7051ace939a"
+SRCREV_machine_qemux86-64 ?= "2e0ac7b6c4e3ada23a84756287e9b7051ace939a"
+SRCREV_machine_qemumips64 ?= "949c0f2cbb4cf902478d009a7d38b6e4fb29e7c4"
+SRCREV_machine ?= "788dfc9859321c09f1c58696bf8998f90ccb4f51"
+SRCREV_meta ?= "46bb64d605fd336d99fa05bab566b9553b40b4b4"
 
 SRC_URI = "git://git.yoctoproject.org/linux-yocto-4.1.git;name=machine;branch=${KBRANCH}; \
            git://git.yoctoproject.org/yocto-kernel-cache;type=kmeta;name=meta;branch=yocto-4.1;destsuffix=${KMETA}"
 
-LINUX_VERSION ?= "4.1.6"
+LINUX_VERSION ?= "4.1.15"
+LINUX_VERSION_qemux86 ?= "4.1.17"
+LINUX_VERSION_qemux86-64 ?= "4.1.17"
 
 PV = "${LINUX_VERSION}+git${SRCPV}"
 
@@ -36,6 +38,7 @@
 # Functionality flags
 KERNEL_EXTRA_FEATURES ?= "features/netfilter/netfilter.scc"
 KERNEL_FEATURES_append = " ${KERNEL_EXTRA_FEATURES}"
+KERNEL_FEATURES_append_qemuall=" cfg/virtio.scc"
 KERNEL_FEATURES_append_qemux86=" cfg/sound.scc cfg/paravirt_kvm.scc"
 KERNEL_FEATURES_append_qemux86-64=" cfg/sound.scc cfg/paravirt_kvm.scc"
 KERNEL_FEATURES_append = " ${@bb.utils.contains("TUNE_FEATURES", "mx32", " cfg/x32.scc", "" ,d)}"
diff --git a/yocto-poky/meta/recipes-kernel/lttng/lttng-tools/0001-Fix-sessiond-disable-match-app-event-by-name.patch b/yocto-poky/meta/recipes-kernel/lttng/lttng-tools/0001-Fix-sessiond-disable-match-app-event-by-name.patch
new file mode 100644
index 0000000..ac1f34b
--- /dev/null
+++ b/yocto-poky/meta/recipes-kernel/lttng/lttng-tools/0001-Fix-sessiond-disable-match-app-event-by-name.patch
@@ -0,0 +1,58 @@
+From 700c5a9d4dc7b552926b8ddcbba91cc13312aba0 Mon Sep 17 00:00:00 2001
+From: Jonathan Rajotte <jonathan.rajotte-julien@efficios.com>
+Date: Wed, 9 Sep 2015 17:08:20 -0400
+Subject: [PATCH] Fix: sessiond: disable: match app event by name
+MIME-Version: 1.0
+Content-Type: text/plain; charset=UTF-8
+Content-Transfer-Encoding: 8bit
+
+The use of a simple lookup and match on event name is insufficient
+to identify the corresponding ust app event.
+
+Fixes #914
+
+Signed-off-by: Jonathan Rajotte <jonathan.rajotte-julien@efficios.com>
+Signed-off-by: Jérémie Galarneau <jeremie.galarneau@efficios.com>
+
+Upstream-Status: Backport
+
+Signed-off-by: Li Zhou <li.zhou@windriver.com>
+---
+ src/bin/lttng-sessiond/ust-app.c |   10 +++++-----
+ 1 file changed, 5 insertions(+), 5 deletions(-)
+
+diff --git a/src/bin/lttng-sessiond/ust-app.c b/src/bin/lttng-sessiond/ust-app.c
+index 4066b06..53a6f93 100644
+--- a/src/bin/lttng-sessiond/ust-app.c
++++ b/src/bin/lttng-sessiond/ust-app.c
+@@ -3873,7 +3873,7 @@ int ust_app_disable_event_glb(struct ltt_ust_session *usess,
+ {
+ 	int ret = 0;
+ 	struct lttng_ht_iter iter, uiter;
+-	struct lttng_ht_node_str *ua_chan_node, *ua_event_node;
++	struct lttng_ht_node_str *ua_chan_node;
+ 	struct ust_app *app;
+ 	struct ust_app_session *ua_sess;
+ 	struct ust_app_channel *ua_chan;
+@@ -3910,14 +3910,14 @@ int ust_app_disable_event_glb(struct ltt_ust_session *usess,
+ 		}
+ 		ua_chan = caa_container_of(ua_chan_node, struct ust_app_channel, node);
+ 
+-		lttng_ht_lookup(ua_chan->events, (void *)uevent->attr.name, &uiter);
+-		ua_event_node = lttng_ht_iter_get_node_str(&uiter);
+-		if (ua_event_node == NULL) {
++		ua_event = find_ust_app_event(ua_chan->events, uevent->attr.name,
++				uevent->filter, uevent->attr.loglevel,
++				uevent->exclusion);
++		if (ua_event == NULL) {
+ 			DBG2("Event %s not found in channel %s for app pid %d."
+ 					"Skipping", uevent->attr.name, uchan->name, app->pid);
+ 			continue;
+ 		}
+-		ua_event = caa_container_of(ua_event_node, struct ust_app_event, node);
+ 
+ 		ret = disable_ust_app_event(ua_sess, ua_event, app);
+ 		if (ret < 0) {
+-- 
+1.7.9.5
+
diff --git a/yocto-poky/meta/recipes-kernel/lttng/lttng-tools_2.6.0.bb b/yocto-poky/meta/recipes-kernel/lttng/lttng-tools_2.6.0.bb
index 6397a98..909acc3 100644
--- a/yocto-poky/meta/recipes-kernel/lttng/lttng-tools_2.6.0.bb
+++ b/yocto-poky/meta/recipes-kernel/lttng/lttng-tools_2.6.0.bb
@@ -28,6 +28,7 @@
            file://extern-decls.patch \
            file://run-ptest \
            file://lttng-tools-Fix-live-timer-calculation-error.patch \
+           file://0001-Fix-sessiond-disable-match-app-event-by-name.patch \
 	  "
 
 S = "${WORKDIR}/git"
@@ -37,8 +38,6 @@
 USERADD_PACKAGES = "${PN}"
 GROUPADD_PARAM_${PN} = "tracing"
 
-export KERNELDIR="${STAGING_KERNEL_DIR}"
-
 FILES_${PN} += "${libdir}/lttng/libexec/* ${datadir}/xml/lttng \
                 ${libdir}/python${PYTHON_BASEVERSION}/site-packages/*"
 FILES_${PN}-dbg += "${libdir}/lttng/libexec/.debug \
diff --git a/yocto-poky/meta/recipes-kernel/oprofile/oprofileui-server_git.bb b/yocto-poky/meta/recipes-kernel/oprofile/oprofileui-server_git.bb
index eb3b78b..cc3477b 100644
--- a/yocto-poky/meta/recipes-kernel/oprofile/oprofileui-server_git.bb
+++ b/yocto-poky/meta/recipes-kernel/oprofile/oprofileui-server_git.bb
@@ -9,7 +9,7 @@
            file://init \
            file://oprofileui-server.service "
 
-DEPENDS += "intltool-native"
+DEPENDS += "intltool-native gettext-native"
 
 EXTRA_OECONF += "--disable-client --enable-server"
 
diff --git a/yocto-poky/meta/recipes-kernel/oprofile/oprofileui_git.bb b/yocto-poky/meta/recipes-kernel/oprofile/oprofileui_git.bb
index bb69d54..86f3d8e 100644
--- a/yocto-poky/meta/recipes-kernel/oprofile/oprofileui_git.bb
+++ b/yocto-poky/meta/recipes-kernel/oprofile/oprofileui_git.bb
@@ -1,6 +1,9 @@
 require oprofileui.inc
 
-DEPENDS += "gtk+ libglade libxml2 avahi-ui gconf"
+DEPENDS += "gtk+ libglade libxml2 avahi-ui gconf gettext-native"
+
+inherit distro_features_check
+ANY_OF_DISTRO_FEATURES = "${GTK2DISTROFEATURES}"
 
 SRCREV = "389e1875af4721d52c7e65cf9cfffb69b0ed6a59"
 PV = "0.0+git${SRCPV}"
diff --git a/yocto-poky/meta/recipes-kernel/sysprof/sysprof_git.bb b/yocto-poky/meta/recipes-kernel/sysprof/sysprof_git.bb
index 19c3e10..7d87efe 100644
--- a/yocto-poky/meta/recipes-kernel/sysprof/sysprof_git.bb
+++ b/yocto-poky/meta/recipes-kernel/sysprof/sysprof_git.bb
@@ -19,7 +19,8 @@
 
 S = "${WORKDIR}/git"
 
-inherit autotools pkgconfig
+inherit autotools pkgconfig distro_features_check
+ANY_OF_DISTRO_FEATURES = "${GTK2DISTROFEATURES}"
 
 # We do not yet work for aarch64.
 #
diff --git a/yocto-poky/meta/recipes-kernel/trace-cmd/kernelshark_git.bb b/yocto-poky/meta/recipes-kernel/trace-cmd/kernelshark_git.bb
index 9deccae..563182c 100644
--- a/yocto-poky/meta/recipes-kernel/trace-cmd/kernelshark_git.bb
+++ b/yocto-poky/meta/recipes-kernel/trace-cmd/kernelshark_git.bb
@@ -9,6 +9,9 @@
 DEPENDS = "gtk+ libxml2"
 RDEPENDS_${PN} = "trace-cmd"
 
+inherit distro_features_check
+ANY_OF_DISTRO_FEATURES = "${GTK2DISTROFEATURES}"
+
 EXTRA_OEMAKE = "\
     'prefix=${prefix}' \
     'bindir_relative=${@oe.path.relative(prefix, bindir)}' \
diff --git a/yocto-poky/meta/recipes-lsb4/libpng/libpng12_1.2.53.bb b/yocto-poky/meta/recipes-lsb4/libpng/libpng12_1.2.53.bb
index bf861a7..24f6aff 100644
--- a/yocto-poky/meta/recipes-lsb4/libpng/libpng12_1.2.53.bb
+++ b/yocto-poky/meta/recipes-lsb4/libpng/libpng12_1.2.53.bb
@@ -9,7 +9,7 @@
 PN = "libpng12"
 S = "${WORKDIR}/libpng-${PV}"
 
-SRC_URI = "${SOURCEFORGE_MIRROR}/project/libpng/libpng12/${PV}/libpng-${PV}.tar.xz"
+SRC_URI = "${SOURCEFORGE_MIRROR}/project/libpng/libpng12/older-releases/${PV}/libpng-${PV}.tar.xz"
 
 SRC_URI[md5sum] = "7d18a74e6fd2029aee76ccd00e00a9e6"
 SRC_URI[sha256sum] = "b45e49f689e7451bd576569e6a344f7e0d11c02ecbb797f4da0e431526765c0a"
diff --git a/yocto-poky/meta/recipes-multimedia/gstreamer/gst-player_git.bb b/yocto-poky/meta/recipes-multimedia/gstreamer/gst-player_git.bb
index 54cfbbc..9850242 100644
--- a/yocto-poky/meta/recipes-multimedia/gstreamer/gst-player_git.bb
+++ b/yocto-poky/meta/recipes-multimedia/gstreamer/gst-player_git.bb
@@ -16,7 +16,9 @@
 
 S = "${WORKDIR}/git"
 
-inherit autotools gtk-doc lib_package pkgconfig
+inherit autotools gtk-doc lib_package pkgconfig distro_features_check
+
+ANY_OF_DISTRO_FEATURES = "${GTK2DISTROFEATURES}"
 
 do_configure_prepend() {
 	touch ${S}/ChangeLog
diff --git a/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.4.5.bb b/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.4.5.bb
index 97fc7ec..5d74a2e 100644
--- a/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.4.5.bb
+++ b/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.4.5.bb
@@ -25,3 +25,6 @@
 
 S = "${WORKDIR}/gst-libav-${PV}"
 
+# http://errors.yoctoproject.org/Errors/Details/20493/
+ARM_INSTRUCTION_SET_armv4 = "arm"
+ARM_INSTRUCTION_SET_armv5 = "arm"
diff --git a/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx.inc b/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx.inc
index d698904..26c1336 100644
--- a/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx.inc
+++ b/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx.inc
@@ -20,11 +20,10 @@
 python __anonymous () {
     omx_target = d.getVar("GSTREAMER_1_0_OMX_TARGET", True)
     if omx_target in ['generic', 'bellagio']:
-        srcdir = d.getVar("S", True)
         # Bellagio headers are incomplete (they are missing the OMX_VERSION_MAJOR,#
         # OMX_VERSION_MINOR, OMX_VERSION_REVISION, and OMX_VERSION_STEP macros);
         # appending a directory path to gst-omx' internal OpenMAX IL headers fixes this
-        d.appendVar("CFLAGS", " -I%s/omx/openmax" % srcdir)
+        d.appendVar("CFLAGS", " -I${S}/omx/openmax")
     elif omx_target == "rpi":
         # Dedicated Raspberry Pi OpenMAX IL support makes this package machine specific
         d.setVar("PACKAGE_ARCH", d.getVar("MACHINE_ARCH", True))
diff --git a/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad.inc b/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad.inc
index cdedb60..b4f01af 100644
--- a/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad.inc
+++ b/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad.inc
@@ -20,7 +20,7 @@
     ${@bb.utils.contains('DISTRO_FEATURES', 'wayland', 'wayland', '', d)} \
     ${@bb.utils.contains('DISTRO_FEATURES', 'bluetooth', 'bluez', '', d)} \
     ${@bb.utils.contains('DISTRO_FEATURES', 'directfb', 'directfb', '', d)} \
-    orc curl uvch264 neon sndfile \
+    orc curl neon sndfile \
     hls sbc dash bz2 smoothstreaming \
     "
 
@@ -38,7 +38,7 @@
 PACKAGECONFIG[flite]           = "--enable-flite,--disable-flite,flite-alsa"
 PACKAGECONFIG[opencv]          = "--enable-opencv,--disable-opencv,opencv"
 PACKAGECONFIG[wayland]         = "--enable-wayland --enable-egl,--disable-wayland --disable-egl,wayland virtual/egl"
-PACKAGECONFIG[uvch264]         = "--enable-uvch264,--disable-uvch264,libusb1 udev"
+PACKAGECONFIG[uvch264]         = "--enable-uvch264,--disable-uvch264,libusb1 libgudev"
 PACKAGECONFIG[directfb]        = "--enable-directfb,--disable-directfb,directfb"
 PACKAGECONFIG[neon]            = "--enable-neon,--disable-neon,neon"
 PACKAGECONFIG[openal]          = "--enable-openal,--disable-openal,openal-soft"
@@ -55,6 +55,9 @@
 PACKAGECONFIG[webp]            = "--enable-webp,--disable-webp,libwebp"
 PACKAGECONFIG[rtmp]            = "--enable-rtmp,--disable-rtmp,rtmpdump"
 PACKAGECONFIG[libssh2]         = "--enable-libssh2,--disable-libssh2,libssh2"
+PACKAGECONFIG[voamrwbenc]      = "--enable-voamrwbenc,--disable-voamrwbenc,vo-amrwbenc"
+PACKAGECONFIG[voaacenc]        = "--enable-voaacenc,--disable-voaacenc,vo-aacenc"
+PACKAGECONFIG[resindvd]        = "--enable-resindvd,--disable-resindvd,libdvdnav libdvdread"
 
 # these plugins have not been ported to 1.0 (yet):
 #   directdraw vcd apexsink dc1394 lv2 linsys musepack mythtv
@@ -66,8 +69,8 @@
 # these plugins have no corresponding library in OE-core or meta-openembedded:
 #   openni2 winks direct3d directdraw directsound winscreencap osx_video
 #   apple_media android_media avc chromaprint daala dts gme gsm kate ladspa mimic
-#   mpeg2enc mplex ofa openjpeg opensles pvr resindvd rtmp soundtouch spandsp spc
-#   srtp vdpau voaacenc voamrwbenc wasapi zbar
+#   mpeg2enc mplex ofa openjpeg opensles pvr rtmp soundtouch spandsp spc
+#   srtp vdpau wasapi zbar
 
 EXTRA_OECONF += " \
     --enable-dvb \
@@ -106,7 +109,6 @@
     --disable-osx_video \
     --disable-pvr \
     --disable-quicktime \
-    --disable-resindvd \
     --disable-sdl \
     --disable-sdltest \
     --disable-sndio \
@@ -118,8 +120,6 @@
     --disable-timidity \
     --disable-vcd \
     --disable-vdpau \
-    --disable-voaacenc \
-    --disable-voamrwbenc \
     --disable-wasapi \
     --disable-wildmidi \
     --disable-wininet \
@@ -133,3 +133,4 @@
 
 FILES_gstreamer1.0-plugins-bad-opencv += "${datadir}/gst-plugins-bad/1.0/opencv*"
 
+FILES_${PN}-voamrwbenc += "${datadir}/gstreamer-${LIBV}/presets/GstVoAmrwbEnc.prs"
diff --git a/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0001-glimagesink-Downrank-to-marginal.patch b/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0001-glimagesink-Downrank-to-marginal.patch
new file mode 100644
index 0000000..f677603
--- /dev/null
+++ b/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0001-glimagesink-Downrank-to-marginal.patch
@@ -0,0 +1,32 @@
+From c6b37a80806f9128de47f1ccc3f2354f8d436bb6 Mon Sep 17 00:00:00 2001
+From: Alexander Kanavin <alex.kanavin@gmail.com>
+Date: Thu, 24 Sep 2015 19:47:32 +0300
+Subject: [PATCH] glimagesink: Downrank to marginal
+
+On desktop, where there is good OpenGL, xvimagesink will come up first,
+on other platforms, OpenGL can't be trusted because it's either software (like
+in a VM) or broken (like on embedded)., so let ximagesink come above.
+
+Upstream-Status: Submitted [https://bugzilla.gnome.org/show_bug.cgi?id=751684]
+
+Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
+---
+ ext/gl/gstopengl.c | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/ext/gl/gstopengl.c b/ext/gl/gstopengl.c
+index a4b2540..0ccaacd 100644
+--- a/ext/gl/gstopengl.c
++++ b/ext/gl/gstopengl.c
+@@ -101,7 +101,7 @@ plugin_init (GstPlugin * plugin)
+ #endif
+ 
+   if (!gst_element_register (plugin, "glimagesink",
+-          GST_RANK_SECONDARY, GST_TYPE_GLIMAGE_SINK)) {
++          GST_RANK_MARGINAL, GST_TYPE_GLIMAGE_SINK)) {
+     return FALSE;
+   }
+ 
+-- 
+2.1.4
+
diff --git a/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.4.5.bb b/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.4.5.bb
index 59065de..6873669 100644
--- a/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.4.5.bb
+++ b/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.4.5.bb
@@ -5,7 +5,9 @@
                     file://COPYING.LIB;md5=21682e4e8fea52413fd26c60acb907e5 \
                     file://gst/tta/crc32.h;beginline=12;endline=29;md5=27db269c575d1e5317fffca2d33b3b50"
 
-SRC_URI += "file://0001-gl-do-not-check-for-GL-GLU-EGL-GLES2-libs-if-disable.patch"
+SRC_URI += "file://0001-gl-do-not-check-for-GL-GLU-EGL-GLES2-libs-if-disable.patch \
+           file://0001-glimagesink-Downrank-to-marginal.patch \
+           "
 
 SRC_URI[md5sum] = "e0bb39412cf4a48fe0397bcf3a7cd451"
 SRC_URI[sha256sum] = "152fad7250683d72f9deb36c5685428338365fe4a4c87ffe15e38783b14f983c"
diff --git a/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base.inc b/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base.inc
index 47f3f40..4909b10 100644
--- a/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base.inc
+++ b/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base.inc
@@ -25,13 +25,12 @@
 PACKAGECONFIG[theora]  = "--enable-theora,--disable-theora,libtheora"
 PACKAGECONFIG[vorbis]  = "--enable-vorbis,--disable-vorbis,libvorbis"
 PACKAGECONFIG[pango]   = "--enable-pango,--disable-pango,pango"
+# libvisual do not seem to exist anywhere in OE
+PACKAGECONFIG[visual]  = "--enable-libvisual,--disable-libvisual,libvisual"
+PACKAGECONFIG[cdparanoia] = "--enable-cdparanoia,--disable-cdparanoia,cdparanoia"
 
-
-# cdparanoia and libvisual do not seem to exist anywhere in OE
 EXTRA_OECONF += " \
     --disable-freetypetest \
-    --disable-cdparanoia \
-    --disable-libvisual \
 "
 
 FILES_${MLPREFIX}libgsttag-1.0 += "${datadir}/gst-plugins-base/1.0/license-translations.dict"
diff --git a/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good.inc b/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good.inc
index 6e316de..edaafe8 100644
--- a/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good.inc
+++ b/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good.inc
@@ -11,7 +11,7 @@
 PACKAGECONFIG ??= " \
     ${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'x11', '', d)} \
     ${@bb.utils.contains('DISTRO_FEATURES', 'pulseaudio', 'pulseaudio', '', d)} \
-    orc cairo flac gdk-pixbuf gudev jpeg libpng soup speex taglib \
+    orc cairo flac gdk-pixbuf jpeg libpng soup speex taglib v4l2\
     "
 
 X11DEPENDS = "virtual/libx11 libsm libxrender libxfixes libxdamage"
@@ -22,7 +22,8 @@
 PACKAGECONFIG[cairo]      = "--enable-cairo,--disable-cairo,cairo"
 PACKAGECONFIG[flac]       = "--enable-flac,--disable-flac,flac"
 PACKAGECONFIG[gdk-pixbuf] = "--enable-gdk_pixbuf,--disable-gdk_pixbuf,gdk-pixbuf"
-PACKAGECONFIG[gudev]      = "--with-gudev,--without-gudev,udev"
+PACKAGECONFIG[gudev]      = "--with-gudev,--without-gudev,libgudev"
+PACKAGECONFIG[libv4l2]    = "--with-libv4l2,--without-libv4l2,libv4l2"
 PACKAGECONFIG[jack]       = "--enable-jack,--disable-jack,jack"
 PACKAGECONFIG[jpeg]       = "--enable-jpeg,--disable-jpeg,jpeg"
 PACKAGECONFIG[libpng]     = "--enable-libpng,--disable-libpng,libpng"
@@ -31,15 +32,11 @@
 PACKAGECONFIG[taglib]     = "--enable-taglib,--disable-taglib,taglib"
 PACKAGECONFIG[vpx]        = "--enable-vpx,--disable-vpx,libvpx"
 PACKAGECONFIG[wavpack]    = "--enable-wavpack,--disable-wavpack,wavpack"
-
-# the 1394 plugins require both libraw1394 and libiec61883
-# the former is included in meta-oe, the latter isn't
-# -> disabled
+PACKAGECONFIG[dv1394]     = "--enable-dv1394,--disable-dv1394,libraw1394 libiec61883 libavc1394"
+PACKAGECONFIG[v4l2]       = "--enable-gst_v4l2,--disable-gst_v4l2"
 
 EXTRA_OECONF += " \
     --enable-oss \
-    --enable-gst_v4l2 \
-    --without-libv4l2 \
     --disable-directsound \
     --disable-waveform \
     --disable-oss4 \
@@ -51,7 +48,6 @@
     --disable-libdv \
     --disable-shout2 \
     --disable-examples \
-    --disable-dv1394 \
 "
 
 FILES_${PN}-equalizer += "${datadir}/gstreamer-1.0/presets/*.prs"
diff --git a/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0/0003-basesink-Shouldn-t-drop-buffer-when-sync-false.patch b/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0/0003-basesink-Shouldn-t-drop-buffer-when-sync-false.patch
new file mode 100755
index 0000000..d682ee6
--- /dev/null
+++ b/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0/0003-basesink-Shouldn-t-drop-buffer-when-sync-false.patch
@@ -0,0 +1,30 @@
+From 73df2b5c0aea58015788f5a94a3ec65296a688d3 Mon Sep 17 00:00:00 2001
+From: Song Bing <b06498@freescale.com>
+Date: Thu, 2 Jul 2015 14:32:21 +0800
+Subject: [PATCH] basesink: Shouldn't drop buffer when sync=false
+
+Shouldn't drop buffer when sync=false
+
+Upstream-Status: Accepted
+
+https://bugzilla.gnome.org/show_bug.cgi?id=751819
+---
+ libs/gst/base/gstbasesink.c |    2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/libs/gst/base/gstbasesink.c b/libs/gst/base/gstbasesink.c
+index d44e8fc..cd759ac 100644
+--- a/libs/gst/base/gstbasesink.c
++++ b/libs/gst/base/gstbasesink.c
+@@ -3423,7 +3423,7 @@ gst_base_sink_chain_unlocked (GstBaseSink * basesink, GstPad * pad,
+     if (G_UNLIKELY (stepped))
+       goto dropped;
+ 
+-    if (syncable && do_sync) {
++    if (syncable && do_sync && gst_base_sink_get_sync (basesink)) {
+       GstClock *clock;
+ 
+       GST_OBJECT_LOCK (basesink);
+-- 
+1.7.9.5
+
diff --git a/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0_1.4.5.bb b/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0_1.4.5.bb
index db58754..73a4a99 100644
--- a/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0_1.4.5.bb
+++ b/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0_1.4.5.bb
@@ -9,6 +9,7 @@
     file://0001-gstinfo-Shorten-__FILE__-on-all-platforms.patch \
     file://inputselector-sticky-events-haven-t-send-out-when-ac-1-4-1.patch \
     file://0002-basesink-Fix-QoS-lateness-checking-if-subclass-imple.patch \
+    file://0003-basesink-Shouldn-t-drop-buffer-when-sync-false.patch \
 "
 SRC_URI[md5sum] = "88a9289c64a4950ebb4f544980234289"
 SRC_URI[sha256sum] = "40801aa7f979024526258a0e94707ba42b8ab6f7d2206e56adbc4433155cb0ae"
diff --git a/yocto-poky/meta/recipes-multimedia/libpng/libpng-1.6.17/CVE-2015-8126_1.patch b/yocto-poky/meta/recipes-multimedia/libpng/libpng-1.6.17/CVE-2015-8126_1.patch
new file mode 100644
index 0000000..25fe136
--- /dev/null
+++ b/yocto-poky/meta/recipes-multimedia/libpng/libpng-1.6.17/CVE-2015-8126_1.patch
@@ -0,0 +1,91 @@
+From 81f44665cce4cb1373f049a76f3904e981b7a766 Mon Sep 17 00:00:00 2001
+From: Glenn Randers-Pehrson <glennrp at users.sourceforge.net>
+Date: Thu, 29 Oct 2015 09:26:41 -0500
+Subject: [PATCH] [libpng16] Reject attempt to write over-length PLTE chunk
+
+Upstream-Status: Backport
+https://github.com/glennrp/libpng/commit/81f44665cce4cb1373f049a76f3904e981b7a766
+
+CVE: CVE-2015-8126 patch #1
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ libpng-manual.txt | 5 +++++
+ libpng.3          | 5 +++++
+ pngwrite.c        | 4 ++--
+ pngwutil.c        | 7 +++++--
+ 4 files changed, 17 insertions(+), 4 deletions(-)
+
+Index: libpng-1.6.17/libpng-manual.txt
+===================================================================
+--- libpng-1.6.17.orig/libpng-manual.txt
++++ libpng-1.6.17/libpng-manual.txt
+@@ -5109,6 +5109,11 @@ length, which resulted in PNG files that
+ chunk.  This error was fixed in libpng-1.6.3, and a tool (called
+ contrib/tools/png-fix-itxt) has been added to the libpng distribution.
+ 
++Starting with libpng-1.6.19, attempting to write an over-length PLTE chunk
++is an error. Previously this requirement of the PNG specification was not
++enforced. Libpng continues to accept over-length PLTE chunks when reading,
++but does not make any use of the extra entries.
++
+ XIII.  Detecting libpng
+ 
+ The png_get_io_ptr() function has been present since libpng-0.88, has never
+Index: libpng-1.6.17/libpng.3
+===================================================================
+--- libpng-1.6.17.orig/libpng.3
++++ libpng-1.6.17/libpng.3
+@@ -5613,6 +5613,11 @@ length, which resulted in PNG files that
+ chunk.  This error was fixed in libpng-1.6.3, and a tool (called
+ contrib/tools/png-fix-itxt) has been added to the libpng distribution.
+ 
++Starting with libpng-1.6.19, attempting to write an over-length PLTE chunk
++is an error. Previously this requirement of the PNG specification was not
++enforced. Libpng continues to accept over-length PLTE chunks when reading,
++but does not make any use of the extra entries.
++
+ .SH XIII.  Detecting libpng
+ 
+ The png_get_io_ptr() function has been present since libpng-0.88, has never
+Index: libpng-1.6.17/pngwrite.c
+===================================================================
+--- libpng-1.6.17.orig/pngwrite.c
++++ libpng-1.6.17/pngwrite.c
+@@ -205,7 +205,7 @@ png_write_info(png_structrp png_ptr, png
+       png_write_PLTE(png_ptr, info_ptr->palette,
+           (png_uint_32)info_ptr->num_palette);
+ 
+-   else if ((info_ptr->color_type == PNG_COLOR_TYPE_PALETTE) !=0)
++   else if (info_ptr->color_type == PNG_COLOR_TYPE_PALETTE)
+       png_error(png_ptr, "Valid palette required for paletted images");
+ 
+ #ifdef PNG_WRITE_tRNS_SUPPORTED
+Index: libpng-1.6.17/pngwutil.c
+===================================================================
+--- libpng-1.6.17.orig/pngwutil.c
++++ libpng-1.6.17/pngwutil.c
+@@ -922,17 +922,20 @@ void /* PRIVATE */
+ png_write_PLTE(png_structrp png_ptr, png_const_colorp palette,
+     png_uint_32 num_pal)
+ {
+-   png_uint_32 i;
++   png_uint_32 max_num_pal, i;
+    png_const_colorp pal_ptr;
+    png_byte buf[3];
+ 
+    png_debug(1, "in png_write_PLTE");
+ 
++   max_num_pal = (png_ptr->color_type == PNG_COLOR_TYPE_PALETTE) ?
++      (1 << png_ptr->bit_depth) : PNG_MAX_PALETTE_LENGTH;
++
+    if ((
+ #ifdef PNG_MNG_FEATURES_SUPPORTED
+        (png_ptr->mng_features_permitted & PNG_FLAG_MNG_EMPTY_PLTE) == 0 &&
+ #endif
+-       num_pal == 0) || num_pal > 256)
++       num_pal == 0) || num_pal > max_num_pal)
+    {
+       if (png_ptr->color_type == PNG_COLOR_TYPE_PALETTE)
+       {
diff --git a/yocto-poky/meta/recipes-multimedia/libpng/libpng-1.6.17/CVE-2015-8126_2.patch b/yocto-poky/meta/recipes-multimedia/libpng/libpng-1.6.17/CVE-2015-8126_2.patch
new file mode 100644
index 0000000..4aa9170
--- /dev/null
+++ b/yocto-poky/meta/recipes-multimedia/libpng/libpng-1.6.17/CVE-2015-8126_2.patch
@@ -0,0 +1,134 @@
+From a901eb3ce6087e0afeef988247f1a1aa208cb54d Mon Sep 17 00:00:00 2001
+From: Glenn Randers-Pehrson <glennrp at users.sourceforge.net>
+Date: Fri, 30 Oct 2015 07:57:49 -0500
+Subject: [PATCH] [libpng16] Prevent reading over-length PLTE chunk (Cosmin
+ Truta).
+
+Upstream-Status: Backport
+https://github.com/glennrp/libpng/commit/a901eb3ce6087e0afeef988247f1a1aa208cb54d
+
+Many changes involved date and version updates with don't apply in this case.
+
+CVE: CVE-2015-8126 patch #2
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ ANNOUNCE          |  6 +++---
+ CHANGES           |  4 ++--
+ libpng-manual.txt | 11 +++++------
+ libpng.3          | 19 +++++++++----------
+ pngrutil.c        |  3 +++
+ pngset.c          | 13 +++++++++----
+ pngwutil.c        |  6 +++---
+ 7 files changed, 34 insertions(+), 28 deletions(-)
+
+Index: libpng-1.6.17/libpng-manual.txt
+===================================================================
+--- libpng-1.6.17.orig/libpng-manual.txt
++++ libpng-1.6.17/libpng-manual.txt
+@@ -5109,10 +5109,9 @@ length, which resulted in PNG files that
+ chunk.  This error was fixed in libpng-1.6.3, and a tool (called
+ contrib/tools/png-fix-itxt) has been added to the libpng distribution.
+ 
+-Starting with libpng-1.6.19, attempting to write an over-length PLTE chunk
++Starting with libpng-1.6.19, attempting to set an over-length PLTE chunk
+ is an error. Previously this requirement of the PNG specification was not
+-enforced. Libpng continues to accept over-length PLTE chunks when reading,
+-but does not make any use of the extra entries.
++enforced, and the palette was always limited to 256 entries.
+ 
+ XIII.  Detecting libpng
+ 
+Index: libpng-1.6.17/libpng.3
+===================================================================
+--- libpng-1.6.17.orig/libpng.3
++++ libpng-1.6.17/libpng.3
+@@ -5613,10 +5613,9 @@ length, which resulted in PNG files that
+ chunk.  This error was fixed in libpng-1.6.3, and a tool (called
+ contrib/tools/png-fix-itxt) has been added to the libpng distribution.
+ 
+-Starting with libpng-1.6.19, attempting to write an over-length PLTE chunk
++Starting with libpng-1.6.19, attempting to set an over-length PLTE chunk
+ is an error. Previously this requirement of the PNG specification was not
+-enforced. Libpng continues to accept over-length PLTE chunks when reading,
+-but does not make any use of the extra entries.
++enforced, and the palette was always limited to 256 entries.
+ 
+ .SH XIII.  Detecting libpng
+ 
+Index: libpng-1.6.17/pngrutil.c
+===================================================================
+--- libpng-1.6.17.orig/pngrutil.c
++++ libpng-1.6.17/pngrutil.c
+@@ -997,6 +997,9 @@ png_handle_PLTE(png_structrp png_ptr, pn
+     * confusing.
+     *
+     * Fix this by not sharing the palette in this way.
++    *
++    * Starting with libpng-1.6.19, png_set_PLTE() also issues a png_error() when
++    * it attempts to set a palette length that is too large for the bit depth.
+     */
+    png_set_PLTE(png_ptr, info_ptr, palette, num);
+ 
+Index: libpng-1.6.17/pngset.c
+===================================================================
+--- libpng-1.6.17.orig/pngset.c
++++ libpng-1.6.17/pngset.c
+@@ -513,12 +513,17 @@ png_set_PLTE(png_structrp png_ptr, png_i
+     png_const_colorp palette, int num_palette)
+ {
+ 
++   png_uint_32 max_palette_length;
++
+    png_debug1(1, "in %s storage function", "PLTE");
+ 
+    if (png_ptr == NULL || info_ptr == NULL)
+       return;
+ 
+-   if (num_palette < 0 || num_palette > PNG_MAX_PALETTE_LENGTH)
++   max_palette_length = (png_ptr->color_type == PNG_COLOR_TYPE_PALETTE) ?
++      (1 << png_ptr->bit_depth) : PNG_MAX_PALETTE_LENGTH;
++
++   if (num_palette < 0 || num_palette > max_palette_length)
+    {
+       if (info_ptr->color_type == PNG_COLOR_TYPE_PALETTE)
+          png_error(png_ptr, "Invalid palette length");
+@@ -551,8 +556,8 @@ png_set_PLTE(png_structrp png_ptr, png_i
+    png_free_data(png_ptr, info_ptr, PNG_FREE_PLTE, 0);
+ 
+    /* Changed in libpng-1.2.1 to allocate PNG_MAX_PALETTE_LENGTH instead
+-    * of num_palette entries, in case of an invalid PNG file that has
+-    * too-large sample values.
++    * of num_palette entries, in case of an invalid PNG file or incorrect
++    * call to png_set_PLTE() with too-large sample values.
+     */
+    png_ptr->palette = png_voidcast(png_colorp, png_calloc(png_ptr,
+        PNG_MAX_PALETTE_LENGTH * (sizeof (png_color))));
+Index: libpng-1.6.17/pngwutil.c
+===================================================================
+--- libpng-1.6.17.orig/pngwutil.c
++++ libpng-1.6.17/pngwutil.c
+@@ -922,20 +922,20 @@ void /* PRIVATE */
+ png_write_PLTE(png_structrp png_ptr, png_const_colorp palette,
+     png_uint_32 num_pal)
+ {
+-   png_uint_32 max_num_pal, i;
++   png_uint_32 max_palette_length, i;
+    png_const_colorp pal_ptr;
+    png_byte buf[3];
+ 
+    png_debug(1, "in png_write_PLTE");
+ 
+-   max_num_pal = (png_ptr->color_type == PNG_COLOR_TYPE_PALETTE) ?
++   max_palette_length = (png_ptr->color_type == PNG_COLOR_TYPE_PALETTE) ?
+       (1 << png_ptr->bit_depth) : PNG_MAX_PALETTE_LENGTH;
+ 
+    if ((
+ #ifdef PNG_MNG_FEATURES_SUPPORTED
+        (png_ptr->mng_features_permitted & PNG_FLAG_MNG_EMPTY_PLTE) == 0 &&
+ #endif
+-       num_pal == 0) || num_pal > max_num_pal)
++       num_pal == 0) || num_pal > max_palette_length)
+    {
+       if (png_ptr->color_type == PNG_COLOR_TYPE_PALETTE)
+       {
diff --git a/yocto-poky/meta/recipes-multimedia/libpng/libpng-1.6.17/CVE-2015-8126_3.patch b/yocto-poky/meta/recipes-multimedia/libpng/libpng-1.6.17/CVE-2015-8126_3.patch
new file mode 100644
index 0000000..0e0ad23
--- /dev/null
+++ b/yocto-poky/meta/recipes-multimedia/libpng/libpng-1.6.17/CVE-2015-8126_3.patch
@@ -0,0 +1,79 @@
+From 1bef8e97995c33123665582e57d3ed40b57d5978 Mon Sep 17 00:00:00 2001
+From: Glenn Randers-Pehrson <glennrp at users.sourceforge.net>
+Date: Fri, 30 Oct 2015 11:34:37 -0500
+Subject: [PATCH] [libpng16] Silently truncate over-length PLTE chunk while
+ reading.
+
+Upstream-Status: Backport
+https://github.com/glennrp/libpng/commit/1bef8e97995c33123665582e57d3ed40b57d5978
+
+Normal Issues is date and version conflicts not applied.
+
+CVE: CVE-2015-8i26 patch #3
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+
+---
+ ANNOUNCE   |  3 ++-
+ CHANGES    |  3 ++-
+ pngrutil.c | 15 +++++++++++----
+ pngset.c   |  2 +-
+ 4 files changed, 16 insertions(+), 7 deletions(-)
+
+Index: libpng-1.6.17/pngrutil.c
+===================================================================
+--- libpng-1.6.17.orig/pngrutil.c
++++ libpng-1.6.17/pngrutil.c
+@@ -867,7 +867,7 @@ void /* PRIVATE */
+ png_handle_PLTE(png_structrp png_ptr, png_inforp info_ptr, png_uint_32 length)
+ {
+    png_color palette[PNG_MAX_PALETTE_LENGTH];
+-   int num, i;
++   int max_palette_length, num, i;
+ #ifdef PNG_POINTER_INDEXING_SUPPORTED
+    png_colorp pal_ptr;
+ #endif
+@@ -925,9 +925,19 @@ png_handle_PLTE(png_structrp png_ptr, pn
+       return;
+    }
+ 
++   max_palette_length = (png_ptr->color_type == PNG_COLOR_TYPE_PALETTE) ?
++      (1 << png_ptr->bit_depth) : PNG_MAX_PALETTE_LENGTH;
++
+    /* The cast is safe because 'length' is less than 3*PNG_MAX_PALETTE_LENGTH */
+    num = (int)length / 3;
+ 
++   /* If the palette has 256 or fewer entries but is too large for the bit depth,
++    * we don't issue an error, to preserve the behavior of previous libpng versions.
++    * We silently truncate the unused extra palette entries here.
++    */
++   if (num > max_palette_length)
++     num = max_palette_length;
++
+ #ifdef PNG_POINTER_INDEXING_SUPPORTED
+    for (i = 0, pal_ptr = palette; i < num; i++, pal_ptr++)
+    {
+@@ -997,9 +1007,6 @@ png_handle_PLTE(png_structrp png_ptr, pn
+     * confusing.
+     *
+     * Fix this by not sharing the palette in this way.
+-    *
+-    * Starting with libpng-1.6.19, png_set_PLTE() also issues a png_error() when
+-    * it attempts to set a palette length that is too large for the bit depth.
+     */
+    png_set_PLTE(png_ptr, info_ptr, palette, num);
+ 
+Index: libpng-1.6.17/pngset.c
+===================================================================
+--- libpng-1.6.17.orig/pngset.c
++++ libpng-1.6.17/pngset.c
+@@ -523,7 +523,7 @@ png_set_PLTE(png_structrp png_ptr, png_i
+    max_palette_length = (png_ptr->color_type == PNG_COLOR_TYPE_PALETTE) ?
+       (1 << png_ptr->bit_depth) : PNG_MAX_PALETTE_LENGTH;
+ 
+-   if (num_palette < 0 || num_palette > max_palette_length)
++   if (num_palette < 0 || num_palette > (int) max_palette_length)
+    {
+       if (info_ptr->color_type == PNG_COLOR_TYPE_PALETTE)
+          png_error(png_ptr, "Invalid palette length");
diff --git a/yocto-poky/meta/recipes-multimedia/libpng/libpng-1.6.17/CVE-2015-8126_4.patch b/yocto-poky/meta/recipes-multimedia/libpng/libpng-1.6.17/CVE-2015-8126_4.patch
new file mode 100644
index 0000000..2622630
--- /dev/null
+++ b/yocto-poky/meta/recipes-multimedia/libpng/libpng-1.6.17/CVE-2015-8126_4.patch
@@ -0,0 +1,48 @@
+From 83f4c735c88e7f451541c1528d8043c31ba3b466 Mon Sep 17 00:00:00 2001
+From: Glenn Randers-Pehrson <glennrp at users.sourceforge.net>
+Date: Thu, 5 Nov 2015 11:18:44 -0600
+Subject: [PATCH] [libpng16] Clean up coding style in png_handle_PLTE()
+
+Upstream-Status: Backport
+https://github.com/glennrp/libpng/commit/83f4c735c88e7f451541c1528d8043c31ba3b466
+
+CVE:  CVE-2015-8126 patch #4
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ pngrutil.c | 17 ++++++++++-------
+ 1 file changed, 10 insertions(+), 7 deletions(-)
+
+Index: libpng-1.6.17/pngrutil.c
+===================================================================
+--- libpng-1.6.17.orig/pngrutil.c
++++ libpng-1.6.17/pngrutil.c
+@@ -925,18 +925,21 @@ png_handle_PLTE(png_structrp png_ptr, pn
+       return;
+    }
+ 
+-   max_palette_length = (png_ptr->color_type == PNG_COLOR_TYPE_PALETTE) ?
+-      (1 << png_ptr->bit_depth) : PNG_MAX_PALETTE_LENGTH;
+-
+    /* The cast is safe because 'length' is less than 3*PNG_MAX_PALETTE_LENGTH */
+    num = (int)length / 3;
+ 
+-   /* If the palette has 256 or fewer entries but is too large for the bit depth,
+-    * we don't issue an error, to preserve the behavior of previous libpng versions.
+-    * We silently truncate the unused extra palette entries here.
++   /* If the palette has 256 or fewer entries but is too large for the bit
++    * depth, we don't issue an error, to preserve the behavior of previous
++    * libpng versions. We silently truncate the unused extra palette entries
++    * here.
+     */
++   if (png_ptr->color_type == PNG_COLOR_TYPE_PALETTE)
++      max_palette_length = (1 << png_ptr->bit_depth);
++   else
++      max_palette_length = PNG_MAX_PALETTE_LENGTH;
++
+    if (num > max_palette_length)
+-     num = max_palette_length;
++      num = max_palette_length;
+ 
+ #ifdef PNG_POINTER_INDEXING_SUPPORTED
+    for (i = 0, pal_ptr = palette; i < num; i++, pal_ptr++)
diff --git a/yocto-poky/meta/recipes-multimedia/libpng/libpng-1.6.17/CVE-2015-8472.patch b/yocto-poky/meta/recipes-multimedia/libpng/libpng-1.6.17/CVE-2015-8472.patch
new file mode 100644
index 0000000..404f012
--- /dev/null
+++ b/yocto-poky/meta/recipes-multimedia/libpng/libpng-1.6.17/CVE-2015-8472.patch
@@ -0,0 +1,29 @@
+From 9f2ad4928e47036cf1ac9b8fe45a491f15be2324 Mon Sep 17 00:00:00 2001
+From: Glenn Randers-Pehrson <glennrp at users.sourceforge.net>
+Date: Wed, 4 Nov 2015 23:47:42 -0600
+Subject: [PATCH] [libpng16] Fixed new bug with CRC error after reading an
+ over-length palette.
+
+Upstream-Status: Backport
+CVE: CVE-2015-8472 
+
+https://github.com/glennrp/libpng/commit/9f2ad4928e47036cf1ac9b8fe45a491f15be2324
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ pngrutil.c | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+Index: libpng-1.6.17/pngrutil.c
+===================================================================
+--- libpng-1.6.17.orig/pngrutil.c
++++ libpng-1.6.17/pngrutil.c
+@@ -973,7 +973,7 @@ png_handle_PLTE(png_structrp png_ptr, pn
+    if (png_ptr->color_type == PNG_COLOR_TYPE_PALETTE)
+ #endif
+    {
+-      png_crc_finish(png_ptr, 0);
++      png_crc_finish(png_ptr, (int) length - num * 3);
+    }
+ 
+ #ifndef PNG_READ_OPT_PLTE_SUPPORTED
diff --git a/yocto-poky/meta/recipes-multimedia/libpng/libpng_1.6.17.bb b/yocto-poky/meta/recipes-multimedia/libpng/libpng_1.6.17.bb
index 00e5808..cc288c7 100644
--- a/yocto-poky/meta/recipes-multimedia/libpng/libpng_1.6.17.bb
+++ b/yocto-poky/meta/recipes-multimedia/libpng/libpng_1.6.17.bb
@@ -8,8 +8,16 @@
 DEPENDS = "zlib"
 LIBV = "16"
 
-SRC_URI = "${SOURCEFORGE_MIRROR}/project/libpng/libpng${LIBV}/${PV}/libpng-${PV}.tar.xz \
+SRC_URI = "${SOURCEFORGE_MIRROR}/project/libpng/libpng${LIBV}/older-releases/${PV}/libpng-${PV}.tar.xz \
           "
+SRC_URI += "\
+            file://CVE-2015-8126_1.patch \
+            file://CVE-2015-8126_2.patch \
+            file://CVE-2015-8126_3.patch \
+            file://CVE-2015-8126_4.patch \
+            file://CVE-2015-8472.patch \
+            "
+
 SRC_URI[md5sum] = "430a9b76b78533235cd4b9b26ce75c7e"
 SRC_URI[sha256sum] = "98507b55fbe5cd43c51981f2924e4671fd81fe35d52dc53357e20f2c77fa5dfd"
 
diff --git a/yocto-poky/meta/recipes-multimedia/libsndfile/files/libsndfile-fix-CVE-2014-9756.patch b/yocto-poky/meta/recipes-multimedia/libsndfile/files/libsndfile-fix-CVE-2014-9756.patch
new file mode 100644
index 0000000..b54b3ba
--- /dev/null
+++ b/yocto-poky/meta/recipes-multimedia/libsndfile/files/libsndfile-fix-CVE-2014-9756.patch
@@ -0,0 +1,24 @@
+src/file_io.c : Prevent potential divide-by-zero.
+
+Closes: https://github.com/erikd/libsndfile/issues/92
+
+Upstream-Status: Backport
+
+Fixes CVE-2014-9756
+
+Signed-off-by: Erik de Castro Lopo <erikd@mega-nerd.com>
+Signed-off-by: Maxin B. John <maxin.john@intel.com>
+---
+diff -Naur libsndfile-1.0.25-orig/src/file_io.c libsndfile-1.0.25/src/file_io.c
+--- libsndfile-1.0.25-orig/src/file_io.c	2011-01-19 12:12:28.000000000 +0200
++++ libsndfile-1.0.25/src/file_io.c	2015-11-04 15:02:04.337395618 +0200
+@@ -358,6 +358,9 @@
+ {	sf_count_t total = 0 ;
+ 	ssize_t	count ;
+ 
++    if (bytes == 0 || items == 0)
++        return 0 ;
++
+ 	if (psf->virtual_io)
+ 		return psf->vio.write (ptr, bytes*items, psf->vio_user_data) / bytes ;
+ 
diff --git a/yocto-poky/meta/recipes-multimedia/libsndfile/libsndfile1_1.0.25.bb b/yocto-poky/meta/recipes-multimedia/libsndfile/libsndfile1_1.0.25.bb
index 3e02f4e..be875c2 100644
--- a/yocto-poky/meta/recipes-multimedia/libsndfile/libsndfile1_1.0.25.bb
+++ b/yocto-poky/meta/recipes-multimedia/libsndfile/libsndfile1_1.0.25.bb
@@ -9,6 +9,7 @@
 SRC_URI = "http://www.mega-nerd.com/libsndfile/files/libsndfile-${PV}.tar.gz \
            file://0001-src-sd2.c-Fix-segfault-in-SD2-RSRC-parser.patch \
            file://0001-src-sd2.c-Fix-two-potential-buffer-read-overflows.patch \
+           file://libsndfile-fix-CVE-2014-9756.patch \
 "
 
 SRC_URI[md5sum] = "e2b7bb637e01022c7d20f95f9c3990a2"
diff --git a/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2015-8781.patch b/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2015-8781.patch
new file mode 100644
index 0000000..bdbe696
--- /dev/null
+++ b/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2015-8781.patch
@@ -0,0 +1,196 @@
+From aaab5c3c9d2a2c6984f23ccbc79702610439bc65 Mon Sep 17 00:00:00 2001
+From: erouault <erouault>
+Date: Sun, 27 Dec 2015 16:25:11 +0000
+Subject: [PATCH] * libtiff/tif_luv.c: fix potential out-of-bound writes in
+ decode functions in non debug builds by replacing assert()s by regular if
+ checks (bugzilla #2522). Fix potential out-of-bound reads in case of short
+ input data.
+
+Upstream-Status: Backport
+
+https://github.com/vadz/libtiff/commit/aaab5c3c9d2a2c6984f23ccbc79702610439bc65
+hand applied Changelog changes
+
+CVE: CVE-2015-8781
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+---
+ ChangeLog         |  7 +++++++
+ libtiff/tif_luv.c | 55 ++++++++++++++++++++++++++++++++++++++++++++-----------
+ 2 files changed, 51 insertions(+), 11 deletions(-)
+
+Index: tiff-4.0.4/ChangeLog
+===================================================================
+--- tiff-4.0.4.orig/ChangeLog
++++ tiff-4.0.4/ChangeLog
+@@ -1,3 +1,11 @@
++2015-12-27  Even Rouault <even.rouault at spatialys.com>
++
++   * libtiff/tif_luv.c: fix potential out-of-bound writes in decode
++   functions in non debug builds by replacing assert()s by regular if
++   checks (bugzilla #2522).
++   Fix potential out-of-bound reads in case of short input data.
++
++
+ 2015-06-21  Bob Friesenhahn  <bfriesen@simple.dallas.tx.us>
+ 
+ 	* libtiff 4.0.4 released.
+Index: tiff-4.0.4/libtiff/tif_luv.c
+===================================================================
+--- tiff-4.0.4.orig/libtiff/tif_luv.c
++++ tiff-4.0.4/libtiff/tif_luv.c
+@@ -202,7 +202,11 @@ LogL16Decode(TIFF* tif, uint8* op, tmsiz
+ 	if (sp->user_datafmt == SGILOGDATAFMT_16BIT)
+ 		tp = (int16*) op;
+ 	else {
+-		assert(sp->tbuflen >= npixels);
++		if(sp->tbuflen < npixels) {
++			TIFFErrorExt(tif->tif_clientdata, module,
++						 "Translation buffer too short");
++			return (0);
++		}
+ 		tp = (int16*) sp->tbuf;
+ 	}
+ 	_TIFFmemset((void*) tp, 0, npixels*sizeof (tp[0]));
+@@ -211,9 +215,11 @@ LogL16Decode(TIFF* tif, uint8* op, tmsiz
+ 	cc = tif->tif_rawcc;
+ 	/* get each byte string */
+ 	for (shft = 2*8; (shft -= 8) >= 0; ) {
+-		for (i = 0; i < npixels && cc > 0; )
++		for (i = 0; i < npixels && cc > 0; ) {
+ 			if (*bp >= 128) {		/* run */
+-				rc = *bp++ + (2-128);   /* TODO: potential input buffer overrun when decoding corrupt or truncated data */
++				if( cc < 2 )
++					break;
++				rc = *bp++ + (2-128);
+ 				b = (int16)(*bp++ << shft);
+ 				cc -= 2;
+ 				while (rc-- && i < npixels)
+@@ -223,6 +229,7 @@ LogL16Decode(TIFF* tif, uint8* op, tmsiz
+ 				while (--cc && rc-- && i < npixels)
+ 					tp[i++] |= (int16)*bp++ << shft;
+ 			}
++		}
+ 		if (i != npixels) {
+ #if defined(__WIN32__) && (defined(_MSC_VER) || defined(__MINGW32__))
+ 			TIFFErrorExt(tif->tif_clientdata, module,
+@@ -268,13 +275,17 @@ LogLuvDecode24(TIFF* tif, uint8* op, tms
+ 	if (sp->user_datafmt == SGILOGDATAFMT_RAW)
+ 		tp = (uint32 *)op;
+ 	else {
+-		assert(sp->tbuflen >= npixels);
++		if(sp->tbuflen < npixels) {
++			TIFFErrorExt(tif->tif_clientdata, module,
++						 "Translation buffer too short");
++			return (0);
++		}
+ 		tp = (uint32 *) sp->tbuf;
+ 	}
+ 	/* copy to array of uint32 */
+ 	bp = (unsigned char*) tif->tif_rawcp;
+ 	cc = tif->tif_rawcc;
+-	for (i = 0; i < npixels && cc > 0; i++) {
++	for (i = 0; i < npixels && cc >= 3; i++) {
+ 		tp[i] = bp[0] << 16 | bp[1] << 8 | bp[2];
+ 		bp += 3;
+ 		cc -= 3;
+@@ -325,7 +336,11 @@ LogLuvDecode32(TIFF* tif, uint8* op, tms
+ 	if (sp->user_datafmt == SGILOGDATAFMT_RAW)
+ 		tp = (uint32*) op;
+ 	else {
+-		assert(sp->tbuflen >= npixels);
++		if(sp->tbuflen < npixels) {
++			TIFFErrorExt(tif->tif_clientdata, module,
++						 "Translation buffer too short");
++			return (0);
++		}
+ 		tp = (uint32*) sp->tbuf;
+ 	}
+ 	_TIFFmemset((void*) tp, 0, npixels*sizeof (tp[0]));
+@@ -334,11 +349,13 @@ LogLuvDecode32(TIFF* tif, uint8* op, tms
+ 	cc = tif->tif_rawcc;
+ 	/* get each byte string */
+ 	for (shft = 4*8; (shft -= 8) >= 0; ) {
+-		for (i = 0; i < npixels && cc > 0; )
++		for (i = 0; i < npixels && cc > 0; ) {
+ 			if (*bp >= 128) {		/* run */
++				if( cc < 2 )
++					break;
+ 				rc = *bp++ + (2-128);
+ 				b = (uint32)*bp++ << shft;
+-				cc -= 2;                /* TODO: potential input buffer overrun when decoding corrupt or truncated data */
++				cc -= 2;
+ 				while (rc-- && i < npixels)
+ 					tp[i++] |= b;
+ 			} else {			/* non-run */
+@@ -346,6 +363,7 @@ LogLuvDecode32(TIFF* tif, uint8* op, tms
+ 				while (--cc && rc-- && i < npixels)
+ 					tp[i++] |= (uint32)*bp++ << shft;
+ 			}
++		}
+ 		if (i != npixels) {
+ #if defined(__WIN32__) && (defined(_MSC_VER) || defined(__MINGW32__))
+ 			TIFFErrorExt(tif->tif_clientdata, module,
+@@ -413,6 +431,7 @@ LogLuvDecodeTile(TIFF* tif, uint8* bp, t
+ static int
+ LogL16Encode(TIFF* tif, uint8* bp, tmsize_t cc, uint16 s)
+ {
++	static const char module[] = "LogL16Encode";
+ 	LogLuvState* sp = EncoderState(tif);
+ 	int shft;
+ 	tmsize_t i;
+@@ -433,7 +452,11 @@ LogL16Encode(TIFF* tif, uint8* bp, tmsiz
+ 		tp = (int16*) bp;
+ 	else {
+ 		tp = (int16*) sp->tbuf;
+-		assert(sp->tbuflen >= npixels);
++		if(sp->tbuflen < npixels) {
++			TIFFErrorExt(tif->tif_clientdata, module,
++						 "Translation buffer too short");
++			return (0);
++		}
+ 		(*sp->tfunc)(sp, bp, npixels);
+ 	}
+ 	/* compress each byte string */
+@@ -506,6 +529,7 @@ LogL16Encode(TIFF* tif, uint8* bp, tmsiz
+ static int
+ LogLuvEncode24(TIFF* tif, uint8* bp, tmsize_t cc, uint16 s)
+ {
++	static const char module[] = "LogLuvEncode24";
+ 	LogLuvState* sp = EncoderState(tif);
+ 	tmsize_t i;
+ 	tmsize_t npixels;
+@@ -521,7 +545,11 @@ LogLuvEncode24(TIFF* tif, uint8* bp, tms
+ 		tp = (uint32*) bp;
+ 	else {
+ 		tp = (uint32*) sp->tbuf;
+-		assert(sp->tbuflen >= npixels);
++		if(sp->tbuflen < npixels) {
++			TIFFErrorExt(tif->tif_clientdata, module,
++						 "Translation buffer too short");
++			return (0);
++		}
+ 		(*sp->tfunc)(sp, bp, npixels);
+ 	}
+ 	/* write out encoded pixels */
+@@ -553,6 +581,7 @@ LogLuvEncode24(TIFF* tif, uint8* bp, tms
+ static int
+ LogLuvEncode32(TIFF* tif, uint8* bp, tmsize_t cc, uint16 s)
+ {
++	static const char module[] = "LogLuvEncode32";
+ 	LogLuvState* sp = EncoderState(tif);
+ 	int shft;
+ 	tmsize_t i;
+@@ -574,7 +603,11 @@ LogLuvEncode32(TIFF* tif, uint8* bp, tms
+ 		tp = (uint32*) bp;
+ 	else {
+ 		tp = (uint32*) sp->tbuf;
+-		assert(sp->tbuflen >= npixels);
++		if(sp->tbuflen < npixels) {
++			TIFFErrorExt(tif->tif_clientdata, module,
++						 "Translation buffer too short");
++			return (0);
++		}
+ 		(*sp->tfunc)(sp, bp, npixels);
+ 	}
+ 	/* compress each byte string */
diff --git a/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2015-8784.patch b/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2015-8784.patch
new file mode 100644
index 0000000..cf37fd3
--- /dev/null
+++ b/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2015-8784.patch
@@ -0,0 +1,73 @@
+From b18012dae552f85dcc5c57d3bf4e997a15b1cc1c Mon Sep 17 00:00:00 2001
+From: erouault <erouault>
+Date: Sun, 27 Dec 2015 16:55:20 +0000
+Subject: [PATCH] * libtiff/tif_next.c: fix potential out-of-bound write in
+ NeXTDecode() triggered by http://lcamtuf.coredump.cx/afl/vulns/libtiff5.tif
+ (bugzilla #2508)
+
+Upstream-Status: Backport
+https://github.com/vadz/libtiff/commit/b18012dae552f85dcc5c57d3bf4e997a15b1cc1c
+hand applied Changelog changes
+
+CVE:  CVE-2015-8784
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ ChangeLog          |  6 ++++++
+ libtiff/tif_next.c | 10 ++++++++--
+ 2 files changed, 14 insertions(+), 2 deletions(-)
+
+Index: tiff-4.0.4/ChangeLog
+===================================================================
+--- tiff-4.0.4.orig/ChangeLog
++++ tiff-4.0.4/ChangeLog
+@@ -1,5 +1,11 @@
+ 2015-12-27  Even Rouault <even.rouault at spatialys.com>
+ 
++   * libtiff/tif_next.c: fix potential out-of-bound write in NeXTDecode()
++   triggered by http://lcamtuf.coredump.cx/afl/vulns/libtiff5.tif
++   (bugzilla #2508)
++
++2015-12-27  Even Rouault <even.rouault at spatialys.com>
++
+    * libtiff/tif_luv.c: fix potential out-of-bound writes in decode
+    functions in non debug builds by replacing assert()s by regular if
+    checks (bugzilla #2522).
+Index: tiff-4.0.4/libtiff/tif_next.c
+===================================================================
+--- tiff-4.0.4.orig/libtiff/tif_next.c
++++ tiff-4.0.4/libtiff/tif_next.c
+@@ -37,7 +37,7 @@
+ 	case 0:	op[0]  = (unsigned char) ((v) << 6); break;	\
+ 	case 1:	op[0] |= (v) << 4; break;	\
+ 	case 2:	op[0] |= (v) << 2; break;	\
+-	case 3:	*op++ |= (v);	   break;	\
++	case 3:	*op++ |= (v);	   op_offset++; break;	\
+ 	}					\
+ }
+ 
+@@ -106,6 +106,7 @@ NeXTDecode(TIFF* tif, uint8* buf, tmsize
+ 			uint32 imagewidth = tif->tif_dir.td_imagewidth;
+             if( isTiled(tif) )
+                 imagewidth = tif->tif_dir.td_tilewidth;
++            tmsize_t op_offset = 0;
+ 
+ 			/*
+ 			 * The scanline is composed of a sequence of constant
+@@ -122,10 +123,15 @@ NeXTDecode(TIFF* tif, uint8* buf, tmsize
+ 				 * bounds, potentially resulting in a security
+ 				 * issue.
+ 				 */
+-				while (n-- > 0 && npixels < imagewidth)
++				while (n-- > 0 && npixels < imagewidth && op_offset < scanline)
+ 					SETPIXEL(op, grey);
+ 				if (npixels >= imagewidth)
+ 					break;
++                if (op_offset >= scanline ) {
++                    TIFFErrorExt(tif->tif_clientdata, module, "Invalid data for scanline %ld",
++                        (long) tif->tif_row);
++                    return (0);
++                }
+ 				if (cc == 0)
+ 					goto bad;
+ 				n = *bp++, cc--;
diff --git a/yocto-poky/meta/recipes-multimedia/libtiff/tiff_4.0.4.bb b/yocto-poky/meta/recipes-multimedia/libtiff/tiff_4.0.4.bb
index cf3a5f0..f1f5a7e 100644
--- a/yocto-poky/meta/recipes-multimedia/libtiff/tiff_4.0.4.bb
+++ b/yocto-poky/meta/recipes-multimedia/libtiff/tiff_4.0.4.bb
@@ -5,6 +5,8 @@
 
 SRC_URI = "ftp://ftp.remotesensing.org/pub/libtiff/tiff-${PV}.tar.gz \
            file://libtool2.patch \
+           file://CVE-2015-8781.patch \
+           file://CVE-2015-8784.patch \
           "
 
 SRC_URI[md5sum] = "9aee7107408a128c0c7b24286c0db900"
diff --git a/yocto-poky/meta/recipes-multimedia/pulseaudio/pulseaudio/0001-card-add-pa_card_profile.ports.patch b/yocto-poky/meta/recipes-multimedia/pulseaudio/pulseaudio/0001-card-add-pa_card_profile.ports.patch
new file mode 100644
index 0000000..97b2e40
--- /dev/null
+++ b/yocto-poky/meta/recipes-multimedia/pulseaudio/pulseaudio/0001-card-add-pa_card_profile.ports.patch
@@ -0,0 +1,245 @@
+From 6f814b40a01d03f93b36184c19339033949de472 Mon Sep 17 00:00:00 2001
+From: Tanu Kaskinen <tanuk@iki.fi>
+Date: Fri, 23 Oct 2015 12:23:13 +0300
+Subject: [PATCH 1/4] card: add pa_card_profile.ports
+
+Having ports accessible from pa_card_profile allows checking whether all ports
+of a profile are unavailable, and therefore helps with managing the profile
+availability (implemented in a later patch).
+
+http://bugzilla.yoctoproject.org/show_bug.cgi?id=8448
+
+Upstream-Status: Submitted [http://lists.freedesktop.org/archives/pulseaudio-discuss/2015-October/024614.html]
+Signed-off-by: Jussi Kukkonen <jussi.kukkonen@intel.com>
+---
+ src/modules/alsa/alsa-mixer.c                |  4 +++-
+ src/modules/alsa/alsa-ucm.c                  |  1 +
+ src/modules/bluetooth/module-bluez4-device.c |  6 ++++++
+ src/modules/bluetooth/module-bluez5-device.c |  6 ++++++
+ src/pulsecore/card.c                         | 16 ++++++++++++++++
+ src/pulsecore/card.h                         | 18 ++++++++++++------
+ src/pulsecore/device-port.c                  |  7 ++++++-
+ 7 files changed, 50 insertions(+), 8 deletions(-)
+
+diff --git a/src/modules/alsa/alsa-mixer.c b/src/modules/alsa/alsa-mixer.c
+index 47cbd14..c5b82b0 100644
+--- a/src/modules/alsa/alsa-mixer.c
++++ b/src/modules/alsa/alsa-mixer.c
+@@ -4654,8 +4654,10 @@ static pa_device_port* device_port_alsa_init(pa_hashmap *ports, /* card ports */
+         path->port = p;
+     }
+ 
+-    if (cp)
++    if (cp) {
+         pa_hashmap_put(p->profiles, cp->name, cp);
++        pa_card_profile_add_port(cp, p);
++    }
+ 
+     if (extra) {
+         pa_hashmap_put(extra, p->name, p);
+diff --git a/src/modules/alsa/alsa-ucm.c b/src/modules/alsa/alsa-ucm.c
+index aa2d601..c8199d6 100644
+--- a/src/modules/alsa/alsa-ucm.c
++++ b/src/modules/alsa/alsa-ucm.c
+@@ -761,6 +761,7 @@ static void ucm_add_port_combination(
+     if (cp) {
+         pa_log_debug("Adding profile %s to port %s.", cp->name, port->name);
+         pa_hashmap_put(port->profiles, cp->name, cp);
++        pa_card_profile_add_port(cp, port);
+     }
+ 
+     if (hash) {
+diff --git a/src/modules/bluetooth/module-bluez4-device.c b/src/modules/bluetooth/module-bluez4-device.c
+index db69d34..b40c6a0 100644
+--- a/src/modules/bluetooth/module-bluez4-device.c
++++ b/src/modules/bluetooth/module-bluez4-device.c
+@@ -2183,6 +2183,7 @@ static pa_card_profile *create_card_profile(struct userdata *u, const char *uuid
+         p->max_sink_channels = 2;
+         p->max_source_channels = 0;
+         pa_hashmap_put(output_port->profiles, p->name, p);
++        pa_card_profile_add_port(p, output_port);
+ 
+         d = PA_CARD_PROFILE_DATA(p);
+         *d = PA_BLUEZ4_PROFILE_A2DP;
+@@ -2194,6 +2195,7 @@ static pa_card_profile *create_card_profile(struct userdata *u, const char *uuid
+         p->max_sink_channels = 0;
+         p->max_source_channels = 2;
+         pa_hashmap_put(input_port->profiles, p->name, p);
++        pa_card_profile_add_port(p, input_port);
+ 
+         d = PA_CARD_PROFILE_DATA(p);
+         *d = PA_BLUEZ4_PROFILE_A2DP_SOURCE;
+@@ -2206,6 +2208,8 @@ static pa_card_profile *create_card_profile(struct userdata *u, const char *uuid
+         p->max_source_channels = 1;
+         pa_hashmap_put(input_port->profiles, p->name, p);
+         pa_hashmap_put(output_port->profiles, p->name, p);
++        pa_card_profile_add_port(p, input_port);
++        pa_card_profile_add_port(p, output_port);
+ 
+         d = PA_CARD_PROFILE_DATA(p);
+         *d = PA_BLUEZ4_PROFILE_HSP;
+@@ -2218,6 +2222,8 @@ static pa_card_profile *create_card_profile(struct userdata *u, const char *uuid
+         p->max_source_channels = 1;
+         pa_hashmap_put(input_port->profiles, p->name, p);
+         pa_hashmap_put(output_port->profiles, p->name, p);
++        pa_card_profile_add_port(p, input_port);
++        pa_card_profile_add_port(p, output_port);
+ 
+         d = PA_CARD_PROFILE_DATA(p);
+         *d = PA_BLUEZ4_PROFILE_HFGW;
+diff --git a/src/modules/bluetooth/module-bluez5-device.c b/src/modules/bluetooth/module-bluez5-device.c
+index 7238e6f..3321785 100644
+--- a/src/modules/bluetooth/module-bluez5-device.c
++++ b/src/modules/bluetooth/module-bluez5-device.c
+@@ -1790,6 +1790,7 @@ static pa_card_profile *create_card_profile(struct userdata *u, const char *uuid
+         cp->max_sink_channels = 2;
+         cp->max_source_channels = 0;
+         pa_hashmap_put(output_port->profiles, cp->name, cp);
++        pa_card_profile_add_port(cp, output_port);
+ 
+         p = PA_CARD_PROFILE_DATA(cp);
+         *p = PA_BLUETOOTH_PROFILE_A2DP_SINK;
+@@ -1801,6 +1802,7 @@ static pa_card_profile *create_card_profile(struct userdata *u, const char *uuid
+         cp->max_sink_channels = 0;
+         cp->max_source_channels = 2;
+         pa_hashmap_put(input_port->profiles, cp->name, cp);
++        pa_card_profile_add_port(cp, input_port);
+ 
+         p = PA_CARD_PROFILE_DATA(cp);
+         *p = PA_BLUETOOTH_PROFILE_A2DP_SOURCE;
+@@ -1813,6 +1815,8 @@ static pa_card_profile *create_card_profile(struct userdata *u, const char *uuid
+         cp->max_source_channels = 1;
+         pa_hashmap_put(input_port->profiles, cp->name, cp);
+         pa_hashmap_put(output_port->profiles, cp->name, cp);
++        pa_card_profile_add_port(cp, input_port);
++        pa_card_profile_add_port(cp, output_port);
+ 
+         p = PA_CARD_PROFILE_DATA(cp);
+         *p = PA_BLUETOOTH_PROFILE_HEADSET_HEAD_UNIT;
+@@ -1825,6 +1829,8 @@ static pa_card_profile *create_card_profile(struct userdata *u, const char *uuid
+         cp->max_source_channels = 1;
+         pa_hashmap_put(input_port->profiles, cp->name, cp);
+         pa_hashmap_put(output_port->profiles, cp->name, cp);
++        pa_card_profile_add_port(cp, input_port);
++        pa_card_profile_add_port(cp, output_port);
+ 
+         p = PA_CARD_PROFILE_DATA(cp);
+         *p = PA_BLUETOOTH_PROFILE_HEADSET_AUDIO_GATEWAY;
+diff --git a/src/pulsecore/card.c b/src/pulsecore/card.c
+index 6f9391e..cc4c784 100644
+--- a/src/pulsecore/card.c
++++ b/src/pulsecore/card.c
+@@ -50,6 +50,7 @@ pa_card_profile *pa_card_profile_new(const char *name, const char *description,
+     c->n_sinks = c->n_sources = 0;
+     c->max_sink_channels = c->max_source_channels = 0;
+     c->available = PA_AVAILABLE_UNKNOWN;
++    c->ports = pa_hashmap_new(pa_idxset_string_hash_func, pa_idxset_string_compare_func);
+ 
+     return c;
+ }
+@@ -57,11 +58,25 @@ pa_card_profile *pa_card_profile_new(const char *name, const char *description,
+ void pa_card_profile_free(pa_card_profile *c) {
+     pa_assert(c);
+ 
++    if (c->ports) {
++        pa_device_port *port;
++        void *state;
++        PA_HASHMAP_FOREACH(port, c->ports, state)
++            pa_hashmap_remove (port->profiles, c->name);
++        pa_hashmap_free(c->ports);
++    }
++
+     pa_xfree(c->name);
+     pa_xfree(c->description);
+     pa_xfree(c);
+ }
+ 
++void pa_card_profile_add_port(pa_card_profile *profile, pa_device_port *port) {
++    pa_assert(profile);
++
++    pa_hashmap_put(profile->ports, port->name, port);
++}
++
+ void pa_card_profile_set_available(pa_card_profile *c, pa_available_t available) {
+     pa_core *core;
+ 
+@@ -198,6 +213,7 @@ pa_card *pa_card_new(pa_core *core, pa_card_new_data *data) {
+ 
+     c->userdata = NULL;
+     c->set_profile = NULL;
++    c->active_profile = NULL;
+ 
+     pa_device_init_description(c->proplist, c);
+     pa_device_init_icon(c->proplist, true);
+diff --git a/src/pulsecore/card.h b/src/pulsecore/card.h
+index 3e2c004..1c33958 100644
+--- a/src/pulsecore/card.h
++++ b/src/pulsecore/card.h
+@@ -22,19 +22,21 @@
+ 
+ typedef struct pa_card pa_card;
+ 
+-#include <pulse/proplist.h>
+-#include <pulsecore/core.h>
+-#include <pulsecore/module.h>
+-#include <pulsecore/idxset.h>
+-
+ /* This enum replaces pa_port_available_t (defined in pulse/def.h) for
+- * internal use, so make sure both enum types stay in sync. */
++ * internal use, so make sure both enum types stay in sync. This is defined
++ * before the #includes, because device-port.h depends on this enum. */
+ typedef enum pa_available {
+     PA_AVAILABLE_UNKNOWN = 0,
+     PA_AVAILABLE_NO = 1,
+     PA_AVAILABLE_YES = 2,
+ } pa_available_t;
+ 
++#include <pulse/proplist.h>
++#include <pulsecore/core.h>
++#include <pulsecore/device-port.h>
++#include <pulsecore/module.h>
++#include <pulsecore/idxset.h>
++
+ typedef struct pa_card_profile {
+     pa_card *card;
+     char *name;
+@@ -43,6 +45,8 @@ typedef struct pa_card_profile {
+     unsigned priority;
+     pa_available_t available; /* PA_AVAILABLE_UNKNOWN, PA_AVAILABLE_NO or PA_AVAILABLE_YES */
+ 
++    pa_hashmap *ports; /* port name -> pa_device_port */
++
+     /* We probably want to have different properties later on here */
+     unsigned n_sinks;
+     unsigned n_sources;
+@@ -100,6 +104,8 @@ typedef struct pa_card_new_data {
+ pa_card_profile *pa_card_profile_new(const char *name, const char *description, size_t extra);
+ void pa_card_profile_free(pa_card_profile *c);
+ 
++void pa_card_profile_add_port(pa_card_profile *profile, pa_device_port *port);
++
+ /* The profile's available status has changed */
+ void pa_card_profile_set_available(pa_card_profile *c, pa_available_t available);
+ 
+diff --git a/src/pulsecore/device-port.c b/src/pulsecore/device-port.c
+index cfe2a80..f16ecef 100644
+--- a/src/pulsecore/device-port.c
++++ b/src/pulsecore/device-port.c
+@@ -95,8 +95,13 @@ static void device_port_free(pa_object *o) {
+     if (p->proplist)
+         pa_proplist_free(p->proplist);
+ 
+-    if (p->profiles)
++    if (p->profiles) {
++        pa_card_profile *profile;
++        void *state;
++        PA_HASHMAP_FOREACH(profile, p->profiles, state)
++            pa_hashmap_remove (profile->ports, p->name);
+         pa_hashmap_free(p->profiles);
++    }
+ 
+     pa_xfree(p->name);
+     pa_xfree(p->description);
+-- 
+2.1.4
+
diff --git a/yocto-poky/meta/recipes-multimedia/pulseaudio/pulseaudio/0002-alsa-bluetooth-fail-if-user-requested-profile-doesn-.patch b/yocto-poky/meta/recipes-multimedia/pulseaudio/pulseaudio/0002-alsa-bluetooth-fail-if-user-requested-profile-doesn-.patch
new file mode 100644
index 0000000..c3f217b
--- /dev/null
+++ b/yocto-poky/meta/recipes-multimedia/pulseaudio/pulseaudio/0002-alsa-bluetooth-fail-if-user-requested-profile-doesn-.patch
@@ -0,0 +1,60 @@
+From 339eb179baa7810113f6456accc05b3a32c1cdba Mon Sep 17 00:00:00 2001
+From: Tanu Kaskinen <tanuk@iki.fi>
+Date: Fri, 23 Oct 2015 12:36:34 +0300
+Subject: [PATCH 2/4] alsa, bluetooth: fail if user-requested profile doesn't
+ exist
+
+If we can't fulfill the user request fully, I think we shouldn't
+fulfill it at all, to make it clear that the requested operation
+didn't succeed.
+
+http://bugzilla.yoctoproject.org/show_bug.cgi?id=8448
+
+Upstream-Status: Submitted [http://lists.freedesktop.org/archives/pulseaudio-discuss/2015-October/024614.html]
+Signed-off-by: Jussi Kukkonen <jussi.kukkonen@intel.com>
+---
+ src/modules/alsa/module-alsa-card.c          | 10 ++++++++--
+ src/modules/bluetooth/module-bluez4-device.c |  6 ++++--
+ 2 files changed, 12 insertions(+), 4 deletions(-)
+
+diff --git a/src/modules/alsa/module-alsa-card.c b/src/modules/alsa/module-alsa-card.c
+index a7fec04..32f517e 100644
+--- a/src/modules/alsa/module-alsa-card.c
++++ b/src/modules/alsa/module-alsa-card.c
+@@ -754,8 +754,14 @@ int pa__init(pa_module *m) {
+         goto fail;
+     }
+ 
+-    if ((profile = pa_modargs_get_value(u->modargs, "profile", NULL)))
+-        pa_card_new_data_set_profile(&data, profile);
++    if ((profile = pa_modargs_get_value(u->modargs, "profile", NULL))) {
++        if (pa_hashmap_get(data.profiles, profile))
++            pa_card_new_data_set_profile(&data, profile);
++        else {
++            pa_log("No such profile: %s", profile);
++            goto fail;
++        }
++    }
+ 
+     u->card = pa_card_new(m->core, &data);
+     pa_card_new_data_done(&data);
+diff --git a/src/modules/bluetooth/module-bluez4-device.c b/src/modules/bluetooth/module-bluez4-device.c
+index b40c6a0..94e6988 100644
+--- a/src/modules/bluetooth/module-bluez4-device.c
++++ b/src/modules/bluetooth/module-bluez4-device.c
+@@ -2310,8 +2310,10 @@ static int add_card(struct userdata *u) {
+     if ((default_profile = pa_modargs_get_value(u->modargs, "profile", NULL))) {
+         if (pa_hashmap_get(data.profiles, default_profile))
+             pa_card_new_data_set_profile(&data, default_profile);
+-        else
+-            pa_log_warn("Profile '%s' not valid or not supported by device.", default_profile);
++        else {
++            pa_log("Profile '%s' not valid or not supported by device.", default_profile);
++            return -1;
++        }
+     }
+ 
+     u->card = pa_card_new(u->core, &data);
+-- 
+2.1.4
+
diff --git a/yocto-poky/meta/recipes-multimedia/pulseaudio/pulseaudio/0003-card-move-profile-selection-after-pa_card_new.patch b/yocto-poky/meta/recipes-multimedia/pulseaudio/pulseaudio/0003-card-move-profile-selection-after-pa_card_new.patch
new file mode 100644
index 0000000..9585f3d
--- /dev/null
+++ b/yocto-poky/meta/recipes-multimedia/pulseaudio/pulseaudio/0003-card-move-profile-selection-after-pa_card_new.patch
@@ -0,0 +1,363 @@
+From cc41c8a3149ef04d4aa2db3d15032605a5504658 Mon Sep 17 00:00:00 2001
+From: Tanu Kaskinen <tanuk@iki.fi>
+Date: Fri, 23 Oct 2015 12:59:53 +0300
+Subject: [PATCH 3/4] card: move profile selection after pa_card_new()
+
+I want module-alsa-card to set the availability of unavailable
+profiles before the initial card profile gets selected, so that the
+selection logic can use correct availability information.
+module-alsa-card initializes the jack state after calling
+pa_card_new(), however, and the profile selection happens in
+pa_card_new(). This patch solves that by introducing pa_card_put() and
+moving the profile selection code there.
+
+An alternative solution would have been to move the jack
+initialization to happen before pa_card_new() and use pa_card_new_data
+instead of pa_card in the jack initialization code, but I disliked
+that idea (I want to get rid of the "new data" pattern eventually).
+
+The CARD_NEW hook is used when applying the initial profile policy, so
+that was moved to pa_card_put(). That required changing the hook data
+from pa_card_new_data to pa_card. module-card-restore now uses
+pa_card_set_profile() instead of pa_card_new_data_set_profile(). That
+required adding a state variable to pa_card, because
+pa_card_set_profile() needs to distinguish between setting the initial
+profile and setting the profile in other situations.
+
+The order in which the initial profile policy is applied is reversed
+in this patch. Previously the first one to set it won, now the last
+one to set it wins. I think this is better, because if you have N
+parties that want to set the profile, we avoid checking N times
+whether someone else has already set the profile.
+
+http://bugzilla.yoctoproject.org/show_bug.cgi?id=8448
+
+Upstream-Status: Submitted [http://lists.freedesktop.org/archives/pulseaudio-discuss/2015-October/024614.html]
+Signed-off-by: Jussi Kukkonen <jussi.kukkonen@intel.com>
+---
+ src/modules/alsa/module-alsa-card.c          | 19 +++---
+ src/modules/bluetooth/module-bluez4-device.c | 18 +++---
+ src/modules/bluetooth/module-bluez5-device.c |  1 +
+ src/modules/macosx/module-coreaudio-device.c |  1 +
+ src/modules/module-card-restore.c            | 24 ++++----
+ src/pulsecore/card.c                         | 86 +++++++++++++++-------------
+ src/pulsecore/card.h                         |  7 +++
+ 7 files changed, 87 insertions(+), 69 deletions(-)
+
+diff --git a/src/modules/alsa/module-alsa-card.c b/src/modules/alsa/module-alsa-card.c
+index 32f517e..5b39654 100644
+--- a/src/modules/alsa/module-alsa-card.c
++++ b/src/modules/alsa/module-alsa-card.c
+@@ -754,15 +754,6 @@ int pa__init(pa_module *m) {
+         goto fail;
+     }
+ 
+-    if ((profile = pa_modargs_get_value(u->modargs, "profile", NULL))) {
+-        if (pa_hashmap_get(data.profiles, profile))
+-            pa_card_new_data_set_profile(&data, profile);
+-        else {
+-            pa_log("No such profile: %s", profile);
+-            goto fail;
+-        }
+-    }
+-
+     u->card = pa_card_new(m->core, &data);
+     pa_card_new_data_done(&data);
+ 
+@@ -773,6 +764,16 @@ int pa__init(pa_module *m) {
+     u->card->set_profile = card_set_profile;
+ 
+     init_jacks(u);
++    pa_card_put(u->card);
++
++    if ((profile = pa_modargs_get_value(u->modargs, "profile", NULL))) {
++        u->card->active_profile = pa_hashmap_get(u->card->profiles, profile);
++        if (!u->card->active_profile) {
++            pa_log("No such profile: %s", profile);
++            goto fail;
++        }
++    }
++
+     init_profile(u);
+     init_eld_ctls(u);
+ 
+diff --git a/src/modules/bluetooth/module-bluez4-device.c b/src/modules/bluetooth/module-bluez4-device.c
+index 94e6988..5efc5dc 100644
+--- a/src/modules/bluetooth/module-bluez4-device.c
++++ b/src/modules/bluetooth/module-bluez4-device.c
+@@ -2307,15 +2307,6 @@ static int add_card(struct userdata *u) {
+     *d = PA_BLUEZ4_PROFILE_OFF;
+     pa_hashmap_put(data.profiles, p->name, p);
+ 
+-    if ((default_profile = pa_modargs_get_value(u->modargs, "profile", NULL))) {
+-        if (pa_hashmap_get(data.profiles, default_profile))
+-            pa_card_new_data_set_profile(&data, default_profile);
+-        else {
+-            pa_log("Profile '%s' not valid or not supported by device.", default_profile);
+-            return -1;
+-        }
+-    }
+-
+     u->card = pa_card_new(u->core, &data);
+     pa_card_new_data_done(&data);
+ 
+@@ -2326,6 +2317,15 @@ static int add_card(struct userdata *u) {
+ 
+     u->card->userdata = u;
+     u->card->set_profile = card_set_profile;
++    pa_card_put(u->card);
++
++    if ((default_profile = pa_modargs_get_value(u->modargs, "profile", NULL))) {
++        u->card->active_profile = pa_hashmap_get(u->card->profiles, default_profile);
++        if (!u->card->active_profile) {
++            pa_log("Profile '%s' not valid or not supported by device.", default_profile);
++            return -1;
++        }
++    }
+ 
+     d = PA_CARD_PROFILE_DATA(u->card->active_profile);
+ 
+diff --git a/src/modules/bluetooth/module-bluez5-device.c b/src/modules/bluetooth/module-bluez5-device.c
+index 3321785..0081a21 100644
+--- a/src/modules/bluetooth/module-bluez5-device.c
++++ b/src/modules/bluetooth/module-bluez5-device.c
+@@ -1959,6 +1959,7 @@ static int add_card(struct userdata *u) {
+ 
+     u->card->userdata = u;
+     u->card->set_profile = set_profile_cb;
++    pa_card_put(u->card);
+ 
+     p = PA_CARD_PROFILE_DATA(u->card->active_profile);
+     u->profile = *p;
+diff --git a/src/modules/macosx/module-coreaudio-device.c b/src/modules/macosx/module-coreaudio-device.c
+index 4bbb5d5..41f151f 100644
+--- a/src/modules/macosx/module-coreaudio-device.c
++++ b/src/modules/macosx/module-coreaudio-device.c
+@@ -764,6 +764,7 @@ int pa__init(pa_module *m) {
+     pa_card_new_data_done(&card_new_data);
+     u->card->userdata = u;
+     u->card->set_profile = card_set_profile;
++    pa_card_put(u->card);
+ 
+     u->rtpoll = pa_rtpoll_new();
+     pa_thread_mq_init(&u->thread_mq, m->core->mainloop, u->rtpoll);
+diff --git a/src/modules/module-card-restore.c b/src/modules/module-card-restore.c
+index baa2f4f..0501ac8 100644
+--- a/src/modules/module-card-restore.c
++++ b/src/modules/module-card-restore.c
+@@ -485,34 +485,38 @@ static pa_hook_result_t port_offset_change_callback(pa_core *c, pa_device_port *
+     return PA_HOOK_OK;
+ }
+ 
+-static pa_hook_result_t card_new_hook_callback(pa_core *c, pa_card_new_data *new_data, struct userdata *u) {
++static pa_hook_result_t card_new_hook_callback(pa_core *c, pa_card *card, struct userdata *u) {
+     struct entry *e;
+     void *state;
+     pa_device_port *p;
+     struct port_info *p_info;
+ 
+-    pa_assert(new_data);
++    pa_assert(c);
++    pa_assert(card);
++    pa_assert(u);
+ 
+-    if (!(e = entry_read(u, new_data->name)))
++    if (!(e = entry_read(u, card->name)))
+         return PA_HOOK_OK;
+ 
+     if (e->profile[0]) {
+-        if (!new_data->active_profile) {
+-            pa_card_new_data_set_profile(new_data, e->profile);
+-            pa_log_info("Restored profile '%s' for card %s.", new_data->active_profile, new_data->name);
+-            new_data->save_profile = true;
++        pa_card_profile *profile;
+ 
++        profile = pa_hashmap_get(card->profiles, e->profile);
++        if (profile) {
++            pa_card_set_profile(card, profile, true);
++            pa_log_info("Restored profile '%s' for card %s.", card->active_profile->name, card->name);
+         } else
+-            pa_log_debug("Not restoring profile for card %s, because already set.", new_data->name);
++            pa_log_debug("Tried to restore profile %s for card %s, but the card doesn't have such profile.",
++                         e->profile, card->name);
+     }
+ 
+     /* Always restore the latency offsets because their
+      * initial value is always 0 */
+ 
+-    pa_log_info("Restoring port latency offsets for card %s.", new_data->name);
++    pa_log_info("Restoring port latency offsets for card %s.", card->name);
+ 
+     PA_HASHMAP_FOREACH(p_info, e->ports, state)
+-        if ((p = pa_hashmap_get(new_data->ports, p_info->name)))
++        if ((p = pa_hashmap_get(card->ports, p_info->name)))
+             p->latency_offset = p_info->offset;
+ 
+     entry_free(e);
+diff --git a/src/pulsecore/card.c b/src/pulsecore/card.c
+index cc4c784..1b7f71b 100644
+--- a/src/pulsecore/card.c
++++ b/src/pulsecore/card.c
+@@ -151,6 +151,7 @@ pa_card *pa_card_new(pa_core *core, pa_card_new_data *data) {
+     pa_assert(!pa_hashmap_isempty(data->profiles));
+ 
+     c = pa_xnew(pa_card, 1);
++    c->state = PA_CARD_STATE_INIT;
+ 
+     if (!(name = pa_namereg_register(core, data->name, PA_NAMEREG_CARD, c, data->namereg_fail))) {
+         pa_xfree(c);
+@@ -159,12 +160,6 @@ pa_card *pa_card_new(pa_core *core, pa_card_new_data *data) {
+ 
+     pa_card_new_data_set_name(data, name);
+ 
+-    if (pa_hook_fire(&core->hooks[PA_CORE_HOOK_CARD_NEW], data) < 0) {
+-        pa_xfree(c);
+-        pa_namereg_unregister(core, name);
+-        return NULL;
+-    }
+-
+     c->core = core;
+     c->name = pa_xstrdup(data->name);
+     c->proplist = pa_proplist_copy(data->proplist);
+@@ -187,30 +182,6 @@ pa_card *pa_card_new(pa_core *core, pa_card_new_data *data) {
+     PA_HASHMAP_FOREACH(port, c->ports, state)
+         port->card = c;
+ 
+-    c->active_profile = NULL;
+-    c->save_profile = false;
+-
+-    if (data->active_profile)
+-        if ((c->active_profile = pa_hashmap_get(c->profiles, data->active_profile)))
+-            c->save_profile = data->save_profile;
+-
+-    if (!c->active_profile) {
+-        PA_HASHMAP_FOREACH(profile, c->profiles, state) {
+-            if (profile->available == PA_AVAILABLE_NO)
+-                continue;
+-
+-            if (!c->active_profile || profile->priority > c->active_profile->priority)
+-                c->active_profile = profile;
+-        }
+-        /* If all profiles are not available, then we still need to pick one */
+-        if (!c->active_profile) {
+-            PA_HASHMAP_FOREACH(profile, c->profiles, state)
+-                if (!c->active_profile || profile->priority > c->active_profile->priority)
+-                    c->active_profile = profile;
+-        }
+-        pa_assert(c->active_profile);
+-    }
+-
+     c->userdata = NULL;
+     c->set_profile = NULL;
+     c->active_profile = NULL;
+@@ -219,13 +190,39 @@ pa_card *pa_card_new(pa_core *core, pa_card_new_data *data) {
+     pa_device_init_icon(c->proplist, true);
+     pa_device_init_intended_roles(c->proplist);
+ 
+-    pa_assert_se(pa_idxset_put(core->cards, c, &c->index) >= 0);
++    return c;
++}
+ 
+-    pa_log_info("Created %u \"%s\"", c->index, c->name);
+-    pa_subscription_post(core, PA_SUBSCRIPTION_EVENT_CARD|PA_SUBSCRIPTION_EVENT_NEW, c->index);
++void pa_card_put(pa_card *card) {
++    pa_card_profile *profile;
++    void *state;
+ 
+-    pa_hook_fire(&core->hooks[PA_CORE_HOOK_CARD_PUT], c);
+-    return c;
++    pa_assert(card);
++
++    PA_HASHMAP_FOREACH(profile, card->profiles, state) {
++        if (profile->available == PA_AVAILABLE_NO)
++            continue;
++
++        if (!card->active_profile || profile->priority > card->active_profile->priority)
++            card->active_profile = profile;
++    }
++
++    /* If all profiles are unavailable, then we still need to pick one */
++    if (!card->active_profile) {
++        PA_HASHMAP_FOREACH(profile, card->profiles, state)
++            if (!card->active_profile || profile->priority > card->active_profile->priority)
++                card->active_profile = profile;
++    }
++    pa_assert(card->active_profile);
++
++    pa_hook_fire(&card->core->hooks[PA_CORE_HOOK_CARD_NEW], card);
++
++    pa_assert_se(pa_idxset_put(card->core->cards, card, &card->index) >= 0);
++    card->state = PA_CARD_STATE_LINKED;
++
++    pa_log_info("Created %u \"%s\"", card->index, card->name);
++    pa_hook_fire(&card->core->hooks[PA_CORE_HOOK_CARD_PUT], card);
++    pa_subscription_post(card->core, PA_SUBSCRIPTION_EVENT_CARD|PA_SUBSCRIPTION_EVENT_NEW, card->index);
+ }
+ 
+ void pa_card_free(pa_card *c) {
+@@ -292,17 +289,24 @@ int pa_card_set_profile(pa_card *c, pa_card_profile *profile, bool save) {
+         return 0;
+     }
+ 
+-    if ((r = c->set_profile(c, profile)) < 0)
++    /* If we're setting the initial profile, we shouldn't call set_profile(),
++     * because the implementations don't expect that (for historical reasons).
++     * We should just set c->active_profile, and the implementations will
++     * properly set up that profile after pa_card_put() has returned. It would
++     * be probably good to change this so that also the initial profile can be
++     * set up in set_profile(), but if set_profile() fails, that would need
++     * some better handling than what we do here currently. */
++    if (c->state != PA_CARD_STATE_INIT && (r = c->set_profile(c, profile)) < 0)
+         return r;
+ 
+-    pa_subscription_post(c->core, PA_SUBSCRIPTION_EVENT_CARD|PA_SUBSCRIPTION_EVENT_CHANGE, c->index);
+-
+-    pa_log_info("Changed profile of card %u \"%s\" to %s", c->index, c->name, profile->name);
+-
+     c->active_profile = profile;
+     c->save_profile = save;
+ 
+-    pa_hook_fire(&c->core->hooks[PA_CORE_HOOK_CARD_PROFILE_CHANGED], c);
++    if (c->state != PA_CARD_STATE_INIT) {
++        pa_log_info("Changed profile of card %u \"%s\" to %s", c->index, c->name, profile->name);
++        pa_hook_fire(&c->core->hooks[PA_CORE_HOOK_CARD_PROFILE_CHANGED], c);
++        pa_subscription_post(c->core, PA_SUBSCRIPTION_EVENT_CARD|PA_SUBSCRIPTION_EVENT_CHANGE, c->index);
++    }
+ 
+     return 0;
+ }
+diff --git a/src/pulsecore/card.h b/src/pulsecore/card.h
+index 1c33958..dbbc1c2 100644
+--- a/src/pulsecore/card.h
++++ b/src/pulsecore/card.h
+@@ -37,6 +37,11 @@ typedef enum pa_available {
+ #include <pulsecore/module.h>
+ #include <pulsecore/idxset.h>
+ 
++typedef enum pa_card_state {
++    PA_CARD_STATE_INIT,
++    PA_CARD_STATE_LINKED,
++} pa_card_state_t;
++
+ typedef struct pa_card_profile {
+     pa_card *card;
+     char *name;
+@@ -61,6 +66,7 @@ typedef struct pa_card_profile {
+ 
+ struct pa_card {
+     uint32_t index;
++    pa_card_state_t state;
+     pa_core *core;
+ 
+     char *name;
+@@ -115,6 +121,7 @@ void pa_card_new_data_set_profile(pa_card_new_data *data, const char *profile);
+ void pa_card_new_data_done(pa_card_new_data *data);
+ 
+ pa_card *pa_card_new(pa_core *c, pa_card_new_data *data);
++void pa_card_put(pa_card *c);
+ void pa_card_free(pa_card *c);
+ 
+ void pa_card_add_profile(pa_card *c, pa_card_profile *profile);
+-- 
+2.1.4
+
diff --git a/yocto-poky/meta/recipes-multimedia/pulseaudio/pulseaudio/0004-alsa-set-availability-for-some-unavailable-profiles.patch b/yocto-poky/meta/recipes-multimedia/pulseaudio/pulseaudio/0004-alsa-set-availability-for-some-unavailable-profiles.patch
new file mode 100644
index 0000000..bb318aa
--- /dev/null
+++ b/yocto-poky/meta/recipes-multimedia/pulseaudio/pulseaudio/0004-alsa-set-availability-for-some-unavailable-profiles.patch
@@ -0,0 +1,75 @@
+From 0136b73158f60d5dc630ae348b18df3b59a2a5c2 Mon Sep 17 00:00:00 2001
+From: Tanu Kaskinen <tanuk@iki.fi>
+Date: Fri, 23 Oct 2015 13:37:11 +0300
+Subject: [PATCH 4/4] alsa: set availability for (some) unavailable profiles
+
+The alsa card hasn't so far set any availability for profiles. That
+caused an issue with some HDMI hardware: the sound card has two HDMI
+outputs, but only the second of them is actually usable. The
+unavailable port is marked as unavailable and the available port is
+marked as available, but this information isn't propagated to the
+profile availability. Without profile availability information, the
+initial profile policy picks the unavailable one, since it has a
+higher priority value.
+
+This patch adds simple logic for marking some profiles unavailable:
+if the profile only contains unavailable ports, the profile is
+unavailable too. This can be improved in the future so that if a
+profile contains sinks or sources that only contain unavailable ports,
+the profile should be marked as unavailable. Implementing that
+requires adding more information about the sinks and sources to
+pa_card_profile, however.
+
+BugLink: https://bugzilla.yoctoproject.org/show_bug.cgi?id=8448
+
+Upstream-Status: Submitted [http://lists.freedesktop.org/archives/pulseaudio-discuss/2015-October/024614.html]
+Signed-off-by: Jussi Kukkonen <jussi.kukkonen@intel.com>
+---
+ src/modules/alsa/module-alsa-card.c | 24 ++++++++++++++++++++++++
+ 1 file changed, 24 insertions(+)
+
+diff --git a/src/modules/alsa/module-alsa-card.c b/src/modules/alsa/module-alsa-card.c
+index 5b39654..73a846c 100644
+--- a/src/modules/alsa/module-alsa-card.c
++++ b/src/modules/alsa/module-alsa-card.c
+@@ -366,6 +366,7 @@ static int report_jack_state(snd_mixer_elem_t *melem, unsigned int mask) {
+     void *state;
+     pa_alsa_jack *jack;
+     pa_device_port *port;
++    pa_card_profile *profile;
+ 
+     pa_assert(u);
+ 
+@@ -396,6 +397,29 @@ static int report_jack_state(snd_mixer_elem_t *melem, unsigned int mask) {
+             }
+             report_port_state(port, u);
+         }
++
++    /* Update profile availabilities. The logic could be improved; for now we
++     * only set obviously unavailable profiles (those that contain only
++     * unavailable ports) to PA_AVAILABLE_NO and all others to
++     * PA_AVAILABLE_UNKNOWN. */
++    PA_HASHMAP_FOREACH(profile, u->card->profiles, state) {
++        void *state2;
++        pa_available_t available = PA_AVAILABLE_NO;
++
++        /* Don't touch the "off" profile. */
++        if (pa_hashmap_size(profile->ports) == 0)
++            continue;
++
++        PA_HASHMAP_FOREACH(port, profile->ports, state2) {
++            if (port->available != PA_AVAILABLE_NO) {
++                available = PA_AVAILABLE_UNKNOWN;
++                break;
++            }
++        }
++
++        pa_card_profile_set_available(profile, available);
++    }
++
+     return 0;
+ }
+ 
+-- 
+2.1.4
+
diff --git a/yocto-poky/meta/recipes-multimedia/pulseaudio/pulseaudio_6.0.bb b/yocto-poky/meta/recipes-multimedia/pulseaudio/pulseaudio_6.0.bb
index 31e9096..ec629aa 100644
--- a/yocto-poky/meta/recipes-multimedia/pulseaudio/pulseaudio_6.0.bb
+++ b/yocto-poky/meta/recipes-multimedia/pulseaudio/pulseaudio_6.0.bb
@@ -6,6 +6,10 @@
            file://0001-conf-parser-add-support-for-.d-directories.patch \
            file://fix-git-version-gen.patch \
            file://volatiles.04_pulse \
+           file://0001-card-add-pa_card_profile.ports.patch \
+           file://0002-alsa-bluetooth-fail-if-user-requested-profile-doesn-.patch \
+           file://0003-card-move-profile-selection-after-pa_card_new.patch \
+           file://0004-alsa-set-availability-for-some-unavailable-profiles.patch \
 "
 SRC_URI[md5sum] = "b691e83b7434c678dffacfa3a027750e"
 SRC_URI[sha256sum] = "b50640e0b80b1607600accfad2e45aabb79d379bf6354c9671efa2065477f6f6"
diff --git a/yocto-poky/meta/recipes-qt/qt4/qt4-4.8.7.inc b/yocto-poky/meta/recipes-qt/qt4/qt4-4.8.7.inc
index 5257e76..d165514 100644
--- a/yocto-poky/meta/recipes-qt/qt4/qt4-4.8.7.inc
+++ b/yocto-poky/meta/recipes-qt/qt4/qt4-4.8.7.inc
@@ -21,10 +21,10 @@
            file://0018-configure-make-pulseaudio-a-configurable-option.patch \
            file://0019-Fixes-for-gcc-4.7.0-particularly-on-qemux86.patch \
            file://0027-tools.pro-disable-qmeegographicssystemhelper.patch \
-           file://0028-Don-t-crash-on-broken-GIF-images.patch \
            file://0030-aarch64_arm64_qatomic_support.patch \
            file://0031-aarch64_arm64_mkspecs.patch \
            file://0032-aarch64_add_header.patch \
+           file://0034-Fix-kmap2qmap-build-with-clang.patch \
            file://Fix-QWSLock-invalid-argument-logs.patch \
            file://add_check_for_aarch64_32.patch \
            file://g++.conf \
diff --git a/yocto-poky/meta/recipes-qt/qt4/qt4-4.8.7/0028-Don-t-crash-on-broken-GIF-images.patch b/yocto-poky/meta/recipes-qt/qt4/qt4-4.8.7/0028-Don-t-crash-on-broken-GIF-images.patch
deleted file mode 100644
index 906e2fd..0000000
--- a/yocto-poky/meta/recipes-qt/qt4/qt4-4.8.7/0028-Don-t-crash-on-broken-GIF-images.patch
+++ /dev/null
@@ -1,47 +0,0 @@
-From f1b76c126c476c155af8c404b97c42cd1a709333 Mon Sep 17 00:00:00 2001
-From: Lars Knoll <lars.knoll@digia.com>
-Date: Thu, 24 Apr 2014 15:33:27 +0200
-Subject: [PATCH] Don't crash on broken GIF images
-
-Broken GIF images could set invalid width and height
-values inside the image, leading to Qt creating a null
-QImage for it. In that case we need to abort decoding
-the image and return an error.
-
-Initial patch by Rich Moore.
-
-Backport of Id82a4036f478bd6e49c402d6598f57e7e5bb5e1e from Qt 5
-
-Task-number: QTBUG-38367
-Change-Id: I0680740018aaa8356d267b7af3f01fac3697312a
-Security-advisory: CVE-2014-0190
-Reviewed-by: Richard J. Moore <rich@kde.org>
-
-Upstream-Status: Backport
-Signed-off-by: Paul Eggleton <paul.eggleton@linux.intel.com>
-
----
- src/gui/image/qgifhandler.cpp | 7 +++++++
- 1 file changed, 7 insertions(+)
-
-diff --git a/src/gui/image/qgifhandler.cpp b/src/gui/image/qgifhandler.cpp
-index 3324f04..5199dd3 100644
---- a/src/gui/image/qgifhandler.cpp
-+++ b/src/gui/image/qgifhandler.cpp
-@@ -359,6 +359,13 @@ int QGIFFormat::decode(QImage *image, const uchar *buffer, int length,
-                     memset(bits, 0, image->byteCount());
-                 }
- 
-+                // Check if the previous attempt to create the image failed. If it
-+                // did then the image is broken and we should give up.
-+                if (image->isNull()) {
-+                    state = Error;
-+                    return -1;
-+                }
-+
-                 disposePrevious(image);
-                 disposed = false;
- 
--- 
-1.9.3
-
diff --git a/yocto-poky/meta/recipes-qt/qt4/qt4-4.8.7/0034-Fix-kmap2qmap-build-with-clang.patch b/yocto-poky/meta/recipes-qt/qt4/qt4-4.8.7/0034-Fix-kmap2qmap-build-with-clang.patch
new file mode 100644
index 0000000..f47a1d9
--- /dev/null
+++ b/yocto-poky/meta/recipes-qt/qt4/qt4-4.8.7/0034-Fix-kmap2qmap-build-with-clang.patch
@@ -0,0 +1,34 @@
+From: Samuel Gaist <samuel.gaist@edeltech.ch>
+Date: Wed, 4 Mar 2015 20:16:50 +0000 (+0100)
+Subject: Fix kmap2qmap build on OS X
+X-Git-Tag: v5.4.2~6
+X-Git-Url: https://codereview.qt-project.org/gitweb?p=qt%2Fqttools.git;a=commitdiff_plain;h=cf196a2565235f649b88fac55b53270bea23458d;hp=3070815a24239bd0f469bfeb8d0a1f091974e28e
+
+Fix kmap2qmap build on OS X
+
+Currently kmap2qmap fails to build on OS X (clang) This patch aims to
+fix this.
+
+Change-Id: I61c985dc7ad1f2486368c39aa976599d274942ab
+Reviewed-by: Friedemann Kleint <Friedemann.Kleint@theqtcompany.com>
+---
+Upstream-Status: Backport
+Index: qt-everywhere-opensource-src-4.8.7/tools/kmap2qmap/main.cpp
+===================================================================
+--- qt-everywhere-opensource-src-4.8.7.orig/tools/kmap2qmap/main.cpp
++++ qt-everywhere-opensource-src-4.8.7/tools/kmap2qmap/main.cpp
+@@ -385,9 +385,11 @@ static const int symbol_synonyms_size =
+ 
+ // makes the generated array in --header mode a bit more human readable
+ QT_BEGIN_NAMESPACE
+-static bool operator<(const QWSKeyboard::Mapping &m1, const QWSKeyboard::Mapping &m2)
+-{
+-    return m1.keycode != m2.keycode ? m1.keycode < m2.keycode : m1.modifiers < m2.modifiers;
++namespace QWSKeyboard {
++    static bool operator<(const Mapping &m1, const Mapping &m2)
++    {
++        return m1.keycode != m2.keycode ? m1.keycode < m2.keycode : m1.modifiers < m2.modifiers;
++    }
+ }
+ QT_END_NAMESPACE
+ 
diff --git a/yocto-poky/meta/recipes-sato/gtk-engines/gtk-sato-engine.inc b/yocto-poky/meta/recipes-sato/gtk-engines/gtk-sato-engine.inc
index 4e37ff2..fa6b2b2 100644
--- a/yocto-poky/meta/recipes-sato/gtk-engines/gtk-sato-engine.inc
+++ b/yocto-poky/meta/recipes-sato/gtk-engines/gtk-sato-engine.inc
@@ -8,6 +8,9 @@
 DEPENDS = "gtk+"
 RDEPENDS_gtk-theme-sato = "gtk-sato-engine"
 
+inherit distro_features_check
+ANY_OF_DISTRO_FEATURES = "${GTK2DISTROFEATURES}"
+
 PACKAGES += "gtk-theme-sato"
 FILES_${PN} = "${libdir}/gtk-2.0/*/engines/*.so "
 FILES_${PN}-dev = "${libdir}/gtk-2.0/*/engines/*.la"
diff --git a/yocto-poky/meta/recipes-sato/leafpad/leafpad_0.8.18.1.bb b/yocto-poky/meta/recipes-sato/leafpad/leafpad_0.8.18.1.bb
index f715149..093b89f 100644
--- a/yocto-poky/meta/recipes-sato/leafpad/leafpad_0.8.18.1.bb
+++ b/yocto-poky/meta/recipes-sato/leafpad/leafpad_0.8.18.1.bb
@@ -6,7 +6,7 @@
                     file://src/leafpad.h;endline=20;md5=d3d6a89f5e61e8b13bdea537511ba1fa \
                     file://src/utils.c;endline=20;md5=0d2cc6584ba3202448bb274f62739571"
 
-DEPENDS = "gtk+ intltool-native libowl"
+DEPENDS = "gtk+ intltool-native libowl gettext-native"
 # The libowl requires x11 in DISTRO_FEATURES
 REQUIRED_DISTRO_FEATURES = "x11"
 
diff --git a/yocto-poky/meta/recipes-sato/matchbox-terminal/matchbox-terminal_git.bb b/yocto-poky/meta/recipes-sato/matchbox-terminal/matchbox-terminal_git.bb
index 91fd150..c8cbd57 100644
--- a/yocto-poky/meta/recipes-sato/matchbox-terminal/matchbox-terminal_git.bb
+++ b/yocto-poky/meta/recipes-sato/matchbox-terminal/matchbox-terminal_git.bb
@@ -15,4 +15,6 @@
 
 S = "${WORKDIR}/git"
 
-inherit autotools pkgconfig
+inherit autotools pkgconfig distro_features_check
+
+ANY_OF_DISTRO_FEATURES = "${GTK2DISTROFEATURES}"
diff --git a/yocto-poky/meta/recipes-sato/pcmanfm/pcmanfm_1.2.3.bb b/yocto-poky/meta/recipes-sato/pcmanfm/pcmanfm_1.2.3.bb
index 4467303..b63db87 100644
--- a/yocto-poky/meta/recipes-sato/pcmanfm/pcmanfm_1.2.3.bb
+++ b/yocto-poky/meta/recipes-sato/pcmanfm/pcmanfm_1.2.3.bb
@@ -7,7 +7,7 @@
                     file://src/gseal-gtk-compat.h;endline=21;md5=46922c8691f58d124f9420fe16149ce2"
 
 SECTION = "x11"
-DEPENDS = "gtk+ startup-notification libfm intltool-native"
+DEPENDS = "gtk+ startup-notification libfm intltool-native gettext-native"
 DEPENDS_append_poky = " libowl"
 
 
diff --git a/yocto-poky/meta/recipes-sato/puzzles/oh-puzzles_git.bb b/yocto-poky/meta/recipes-sato/puzzles/oh-puzzles_git.bb
index 36cd2cc..9876fe0 100644
--- a/yocto-poky/meta/recipes-sato/puzzles/oh-puzzles_git.bb
+++ b/yocto-poky/meta/recipes-sato/puzzles/oh-puzzles_git.bb
@@ -8,7 +8,7 @@
                     file://src/tree234.c;endline=28;md5=b4feb1976feebf8f1379093ed52f2945"
 
 SECTION = "x11"
-DEPENDS = "gtk+ gconf intltool-native librsvg"
+DEPENDS = "gtk+ gconf intltool-native librsvg gettext-native"
 
 # libowl requires x11 in DISTRO_FEATURES
 DEPENDS_append_poky = " ${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'libowl', '', d)}"
diff --git a/yocto-poky/meta/recipes-sato/webkit/webkitgtk_2.8.5.bb b/yocto-poky/meta/recipes-sato/webkit/webkitgtk_2.8.5.bb
index 82d670d..e29666a 100644
--- a/yocto-poky/meta/recipes-sato/webkit/webkitgtk_2.8.5.bb
+++ b/yocto-poky/meta/recipes-sato/webkit/webkitgtk_2.8.5.bb
@@ -18,7 +18,10 @@
 SRC_URI[md5sum] = "df79991848a5096d3a75289ebce547ae"
 SRC_URI[sha256sum] = "3d1f0c534935f43fd74df90f2648fcee672d60f1f57a30fa557a77891ae04d20"
 
-inherit cmake lib_package pkgconfig perlnative pythonnative
+inherit cmake lib_package pkgconfig perlnative pythonnative distro_features_check
+
+# depends on libxt
+REQUIRED_DISTRO_FEATURES = "x11"
 
 DEPENDS = "zlib enchant libsoup-2.4 curl libxml2 cairo libxslt libxt libidn gnutls \
            gtk+ gtk+3 gstreamer1.0 gstreamer1.0-plugins-base flex-native gperf-native sqlite3 \
@@ -26,33 +29,46 @@
 	   atk udev harfbuzz jpeg libpng pulseaudio librsvg libtheora libvorbis libxcomposite libxtst \
 	   ruby-native libsecret libnotify gstreamer1.0-plugins-bad \
           "
-DEPENDS += " ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'virtual/libgl', '', d)}"
+
+PACKAGECONFIG ??= "${@base_contains('DISTRO_FEATURES', 'x11', 'x11', 'wayland' ,d)} \
+                   ${@base_contains('DISTRO_FEATURES', 'opengl', 'webgl', '' ,d)}"
+
+PACKAGECONFIG[wayland] = "-DENABLE_WAYLAND_TARGET=ON,-DENABLE_WAYLAND_TARGET=OFF,wayland"
+PACKAGECONFIG[x11] = "-DENABLE_X11_TARGET=ON,-DENABLE_X11_TARGET=OFF,virtual/libx11"
+PACKAGECONFIG[geoclue] = "-DENABLE_GEOLOCATION=ON,-DENABLE_GEOLOCATION=OFF,geoclue"
+PACKAGECONFIG[enchant] = "-DENABLE_SPELLCHECK=ON,-DENABLE_SPELLCHECK=OFF,enchant"
+PACKAGECONFIG[gtk2] = "-DENABLE_PLUGIN_PROCESS_GTK2=ON,-DENABLE_PLUGIN_PROCESS_GTK2=OFF,gtk+"
+PACKAGECONFIG[gles2] = "-DENABLE_GLES2=ON,-DENABLE_GLES2=OFF,virtual/libgles2"
+PACKAGECONFIG[webgl] = "-DENABLE_WEBGL=ON,-DENABLE_WEBGL=OFF,virtual/libgl"
+PACKAGECONFIG[libsecret] = "-DENABLE_CREDENTIAL_STORAGE=ON,-DENABLE_CREDENTIAL_STORAGE=OFF,libsecret"
 
 EXTRA_OECMAKE = " \
 		-DPORT=GTK \
 		-DCMAKE_BUILD_TYPE=Release \
-		-DENABLE_INTROSPECTION=False \
-		-DENABLE_MINIBROWSER=True \
-	        ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', '-DENABLE_WEBGL=True', '-DENABLE_WEBGL=False', d)} \
+		-DENABLE_INTROSPECTION=OFF \
+		-DENABLE_GTKDOC=OFF \
+		-DENABLE_MINIBROWSER=ON \
 		"
 
 # Javascript JIT is not supported on powerpc
-EXTRA_OECMAKE_append_powerpc = " -DENABLE_JIT=False "
-EXTRA_OECMAKE_append_powerpc64 = " -DENABLE_JIT=False "
+EXTRA_OECMAKE_append_powerpc = " -DENABLE_JIT=OFF "
+EXTRA_OECMAKE_append_powerpc64 = " -DENABLE_JIT=OFF "
 
 # ARM JIT code does not build on ARMv5/6 anymore, apparently they test only on v7 onwards
-EXTRA_OECMAKE_append_armv5 = " -DENABLE_JIT=False "
-EXTRA_OECMAKE_append_armv6 = " -DENABLE_JIT=False "
+EXTRA_OECMAKE_append_armv5 = " -DENABLE_JIT=OFF "
+EXTRA_OECMAKE_append_armv6 = " -DENABLE_JIT=OFF "
 
 # binutils 2.25.1 has a bug on aarch64:
 # https://sourceware.org/bugzilla/show_bug.cgi?id=18430
-EXTRA_OECMAKE_append_aarch64 = " -DUSE_LD_GOLD=False "
+EXTRA_OECMAKE_append_aarch64 = " -DUSE_LD_GOLD=OFF "
 
 # JIT not supported on MIPS either
-EXTRA_OECMAKE_append_mips = " -DENABLE_JIT=False "
-EXTRA_OECMAKE_append_mips64 = " -DENABLE_JIT=False "
+EXTRA_OECMAKE_append_mips = " -DENABLE_JIT=OFF "
+EXTRA_OECMAKE_append_mips64 = " -DENABLE_JIT=OFF "
 
 FILES_${PN} += "${libdir}/webkit2gtk-4.0/injected-bundle/libwebkit2gtkinjectedbundle.so"
 FILES_${PN}-dbg += "${libdir}/webkit2gtk-4.0/injected-bundle/.debug/libwebkit2gtkinjectedbundle.so"
 FILES_${PN}-dbg += "${libdir}/webkitgtk/webkit2gtk-4.0/.debug/*"
 
+# http://errors.yoctoproject.org/Errors/Details/20370/
+ARM_INSTRUCTION_SET = "arm"
diff --git a/yocto-poky/meta/recipes-support/apr/apr-util_1.5.4.bb b/yocto-poky/meta/recipes-support/apr/apr-util_1.5.4.bb
index 6a14d14..a36f13d 100644
--- a/yocto-poky/meta/recipes-support/apr/apr-util_1.5.4.bb
+++ b/yocto-poky/meta/recipes-support/apr/apr-util_1.5.4.bb
@@ -60,6 +60,12 @@
 	sed -i "s#\(LIBTOOL=\$(apr_builddir)\).*#\1/libtool#" ${S}/build/rules.mk
 }
 
+do_install_append_class-target() {
+	sed -i -e 's,${STAGING_DIR_HOST},,g' \
+	       -e 's,APU_SOURCE_DIR=.*,APR_SOURCE_DIR=,g' \
+	       -e 's,APU_BUILD_DIR=.*,APR_BUILD_DIR=,g' ${D}${bindir}/apu-1-config
+}
+
 FILES_${PN}     += "${libdir}/apr-util-1/apr_dbm_gdbm-1.so"
 FILES_${PN}-dev += "${libdir}/aprutil.exp ${libdir}/apr-util-1/apr_dbm_gdbm.so* ${libdir}/apr-util-1/apr_dbm_gdbm.la"
 FILES_${PN}-dbg += "${libdir}/apr-util-1/.debug/*"
diff --git a/yocto-poky/meta/recipes-support/apr/apr_1.5.2.bb b/yocto-poky/meta/recipes-support/apr/apr_1.5.2.bb
index c1f7f38..1c61e84 100644
--- a/yocto-poky/meta/recipes-support/apr/apr_1.5.2.bb
+++ b/yocto-poky/meta/recipes-support/apr/apr_1.5.2.bb
@@ -32,6 +32,11 @@
 #
 CACHED_CONFIGUREVARS += "ac_cv_header_netinet_sctp_h=no ac_cv_header_netinet_sctp_uio_h=no"
 
+# Otherwise libtool fails to compile apr-utils
+# x86_64-linux-libtool: compile: unable to infer tagged configuration
+# x86_64-linux-libtool:   error: specify a tag with '--tag'
+CCACHE = ""
+
 do_configure_prepend() {
 	# Avoid absolute paths for grep since it causes failures
 	# when using sstate between different hosts with different
@@ -55,7 +60,13 @@
 do_install_append() {
 	oe_multilib_header apr.h
 	install -d ${D}${datadir}/apr
-	cp ${S}/${HOST_SYS}-libtool ${D}${datadir}/build-1/libtool
+}
+
+do_install_append_class-target() {
+	sed -i -e 's,${STAGING_DIR_HOST},,g' ${D}${datadir}/build-1/apr_rules.mk
+	sed -i -e 's,${STAGING_DIR_HOST},,g' \
+	       -e 's,APR_SOURCE_DIR=.*,APR_SOURCE_DIR=,g' \
+	       -e 's,APR_BUILD_DIR=.*,APR_BUILD_DIR=,g' ${D}${bindir}/apr-1-config
 }
 
 SSTATE_SCAN_FILES += "apr_rules.mk libtool"
@@ -73,6 +84,7 @@
 	cp ${S}/build/mkdir.sh $d/
 	cp ${S}/build/make_exports.awk $d/
 	cp ${S}/build/make_var_export.awk $d/
+	cp ${S}/${HOST_SYS}-libtool ${SYSROOT_DESTDIR}${datadir}/build-1/libtool
 }
 
 do_compile_ptest() {
diff --git a/yocto-poky/meta/recipes-support/atk/at-spi2-core_2.16.0.bb b/yocto-poky/meta/recipes-support/atk/at-spi2-core_2.16.0.bb
index 933cbe7..7c12b54 100644
--- a/yocto-poky/meta/recipes-support/atk/at-spi2-core_2.16.0.bb
+++ b/yocto-poky/meta/recipes-support/atk/at-spi2-core_2.16.0.bb
@@ -11,7 +11,7 @@
 SRC_URI[md5sum] = "be6eeea370f913b7639b609913b2cf02"
 SRC_URI[sha256sum] = "1c0b77fb8ce81abbf1d80c0afee9858b3f9229f673b7881995fe0fc16b1a74d0"
 
-DEPENDS = "dbus glib-2.0 virtual/libx11 libxi libxtst intltool-native"
+DEPENDS = "dbus glib-2.0 virtual/libx11 libxi libxtst intltool-native gettext-native"
 
 inherit autotools gtk-doc pkgconfig distro_features_check
 # depends on virtual/libx11
diff --git a/yocto-poky/meta/recipes-support/curl/curl/CVE-2016-0754.patch b/yocto-poky/meta/recipes-support/curl/curl/CVE-2016-0754.patch
new file mode 100644
index 0000000..f0402de
--- /dev/null
+++ b/yocto-poky/meta/recipes-support/curl/curl/CVE-2016-0754.patch
@@ -0,0 +1,417 @@
+From b1bb4ca6d8777683b6a549fb61dba36759da26f4 Mon Sep 17 00:00:00 2001
+From: Ray Satiro <raysatiro@yahoo.com>
+Date: Tue, 26 Jan 2016 23:23:15 +0100
+Subject: [PATCH] curl: avoid local drive traversal when saving file (Windows)
+
+curl does not sanitize colons in a remote file name that is used as the
+local file name. This may lead to a vulnerability on systems where the
+colon is a special path character. Currently Windows/DOS is the only OS
+where this vulnerability applies.
+
+CVE-2016-0754
+
+Bug: http://curl.haxx.se/docs/adv_20160127B.html
+
+Upstream-Status: Backport
+http://curl.haxx.se/CVE-2016-0754.patch
+
+CVE: CVE-2016-0754
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ src/tool_cb_hdr.c  |  40 ++++++------
+ src/tool_doswin.c  | 174 ++++++++++++++++++++++++++++++++++++++++++++---------
+ src/tool_doswin.h  |   2 +-
+ src/tool_operate.c |  29 ++++++---
+ 4 files changed, 187 insertions(+), 58 deletions(-)
+
+diff --git a/src/tool_cb_hdr.c b/src/tool_cb_hdr.c
+index fd208e8..0fca39f 100644
+--- a/src/tool_cb_hdr.c
++++ b/src/tool_cb_hdr.c
+@@ -26,10 +26,11 @@
+ #define ENABLE_CURLX_PRINTF
+ /* use our own printf() functions */
+ #include "curlx.h"
+ 
+ #include "tool_cfgable.h"
++#include "tool_doswin.h"
+ #include "tool_msgs.h"
+ #include "tool_cb_hdr.h"
+ 
+ #include "memdebug.h" /* keep this as LAST include */
+ 
+@@ -112,22 +113,28 @@ size_t tool_header_cb(void *ptr, size_t size, size_t nmemb, void *userdata)
+       /* this expression below typecasts 'cb' only to avoid
+          warning: signed and unsigned type in conditional expression
+       */
+       len = (ssize_t)cb - (p - str);
+       filename = parse_filename(p, len);
+-      if(filename) {
+-        outs->filename = filename;
+-        outs->alloc_filename = TRUE;
+-        outs->is_cd_filename = TRUE;
+-        outs->s_isreg = TRUE;
+-        outs->fopened = FALSE;
+-        outs->stream = NULL;
+-        hdrcbdata->honor_cd_filename = FALSE;
+-        break;
+-      }
+-      else
++      if(!filename)
++        return failure;
++
++#if defined(MSDOS) || defined(WIN32)
++      if(sanitize_file_name(&filename)) {
++        free(filename);
+         return failure;
++      }
++#endif /* MSDOS || WIN32 */
++
++      outs->filename = filename;
++      outs->alloc_filename = TRUE;
++      outs->is_cd_filename = TRUE;
++      outs->s_isreg = TRUE;
++      outs->fopened = FALSE;
++      outs->stream = NULL;
++      hdrcbdata->honor_cd_filename = FALSE;
++      break;
+     }
+   }
+ 
+   return cb;
+ }
+@@ -179,19 +186,16 @@ static char *parse_filename(const char *ptr, size_t len)
+       return NULL;
+     }
+   }
+ 
+   /* scan for the end letter and stop there */
+-  q = p;
+-  while(*q) {
+-    if(q[1] && (q[0] == '\\'))
+-      q++;
+-    else if(q[0] == stop)
++  for(q = p; *q; ++q) {
++    if(*q == stop) {
++      *q = '\0';
+       break;
+-    q++;
++    }
+   }
+-  *q = '\0';
+ 
+   /* make sure the file name doesn't end in \r or \n */
+   q = strchr(p, '\r');
+   if(q)
+     *q = '\0';
+diff --git a/src/tool_doswin.c b/src/tool_doswin.c
+index dd6e8bb..9c6a7a3 100644
+--- a/src/tool_doswin.c
++++ b/src/tool_doswin.c
+@@ -83,46 +83,110 @@ __pragma(warning(pop))
+ #  define _use_lfn(f) ALWAYS_FALSE  /* long file names never available */
+ #elif defined(__DJGPP__)
+ #  include <fcntl.h>                /* _use_lfn(f) prototype */
+ #endif
+ 
+-static const char *msdosify (const char *file_name);
+-static char *rename_if_dos_device_name (char *file_name);
++static char *msdosify(const char *file_name);
++static char *rename_if_dos_device_name(const char *file_name);
+ 
+-/*
+- * sanitize_dos_name: returns a newly allocated string holding a
+- * valid file name which will be a transformation of given argument
+- * in case this wasn't already a valid file name.
+- *
+- * This function takes ownership of given argument, free'ing it before
+- * returning. Caller is responsible of free'ing returned string. Upon
+- * out of memory condition function returns NULL.
+- */
+ 
+-char *sanitize_dos_name(char *file_name)
++/*
++Sanitize *file_name.
++Success: (CURLE_OK) *file_name points to a sanitized version of the original.
++         This function takes ownership of the original *file_name and frees it.
++Failure: (!= CURLE_OK) *file_name is unchanged.
++*/
++CURLcode sanitize_file_name(char **file_name)
+ {
+-  char new_name[PATH_MAX];
++  size_t len;
++  char *p, *sanitized;
++
++  /* Calculate the maximum length of a filename.
++     FILENAME_MAX is often the same as PATH_MAX, in other words it does not
++     discount the path information. PATH_MAX size is calculated based on:
++     <drive-letter><colon><path-sep><max-filename-len><NULL> */
++  const size_t max_filename_len = PATH_MAX - 3 - 1;
++
++  if(!file_name || !*file_name)
++    return CURLE_BAD_FUNCTION_ARGUMENT;
++
++  len = strlen(*file_name);
++
++  if(len >= max_filename_len)
++    len = max_filename_len - 1;
+ 
+-  if(!file_name)
+-    return NULL;
++  sanitized = malloc(len + 1);
+ 
+-  if(strlen(file_name) >= PATH_MAX)
+-    file_name[PATH_MAX-1] = '\0'; /* truncate it */
++  if(!sanitized)
++    return CURLE_OUT_OF_MEMORY;
+ 
+-  strcpy(new_name, msdosify(file_name));
++  strncpy(sanitized, *file_name, len);
++  sanitized[len] = '\0';
+ 
+-  Curl_safefree(file_name);
++  for(p = sanitized; *p; ++p ) {
++    const char *banned;
++    if(1 <= *p && *p <= 31) {
++      *p = '_';
++      continue;
++    }
++    for(banned = "|<>/\\\":?*"; *banned; ++banned) {
++      if(*p == *banned) {
++        *p = '_';
++        break;
++      }
++    }
++  }
+ 
+-  return strdup(rename_if_dos_device_name(new_name));
++#ifdef MSDOS
++  /* msdosify checks for more banned characters for MSDOS, however it allows
++     for some path information to pass through. since we are sanitizing only a
++     filename and cannot allow a path it's important this call be done in
++     addition to and not instead of the banned character check above. */
++  p = msdosify(sanitized);
++  if(!p) {
++    free(sanitized);
++    return CURLE_BAD_FUNCTION_ARGUMENT;
++  }
++  sanitized = p;
++  len = strlen(sanitized);
++#endif
++
++  p = rename_if_dos_device_name(sanitized);
++  if(!p) {
++    free(sanitized);
++    return CURLE_BAD_FUNCTION_ARGUMENT;
++  }
++  sanitized = p;
++  len = strlen(sanitized);
++
++  /* dos_device_name rename will rename a device name, possibly changing the
++     length. If the length is too long now we can't truncate it because we
++     could end up with a device name. In practice this shouldn't be a problem
++     because device names are short, but you never know. */
++  if(len >= max_filename_len) {
++    free(sanitized);
++    return CURLE_BAD_FUNCTION_ARGUMENT;
++  }
++
++  *file_name = sanitized;
++  return CURLE_OK;
+ }
+ 
+-/* The following functions are taken with modification from the DJGPP
+- * port of tar 1.12. They use algorithms originally from DJTAR. */
++/* The functions msdosify, rename_if_dos_device_name and __crt0_glob_function
++ * were taken with modification from the DJGPP port of tar 1.12. They use
++ * algorithms originally from DJTAR.
++ */
+ 
+-static const char *msdosify (const char *file_name)
++/*
++Extra sanitization MSDOS for file_name.
++Returns a copy of file_name that is sanitized by MSDOS standards.
++Warning: path information may pass through. For sanitizing a filename use
++sanitize_file_name which calls this function after sanitizing path info.
++*/
++static char *msdosify(const char *file_name)
+ {
+-  static char dos_name[PATH_MAX];
++  char dos_name[PATH_MAX];
+   static const char illegal_chars_dos[] = ".+, ;=[]" /* illegal in DOS */
+     "|<>\\\":?*"; /* illegal in DOS & W95 */
+   static const char *illegal_chars_w95 = &illegal_chars_dos[8];
+   int idx, dot_idx;
+   const char *s = file_name;
+@@ -199,39 +263,89 @@ static const char *msdosify (const char *file_name)
+     else
+       idx++;
+   }
+ 
+   *d = '\0';
+-  return dos_name;
++  return strdup(dos_name);
+ }
+ 
+-static char *rename_if_dos_device_name (char *file_name)
++/*
++Rename file_name if it's a representation of a device name.
++Returns a copy of file_name, and the copy will have contents different from the
++original if a device name was found.
++*/
++static char *rename_if_dos_device_name(const char *file_name)
+ {
+   /* We could have a file whose name is a device on MS-DOS.  Trying to
+    * retrieve such a file would fail at best and wedge us at worst.  We need
+    * to rename such files. */
+-  char *base;
++  char *p, *base;
+   struct_stat st_buf;
+   char fname[PATH_MAX];
+ 
+   strncpy(fname, file_name, PATH_MAX-1);
+   fname[PATH_MAX-1] = '\0';
+   base = basename(fname);
+   if(((stat(base, &st_buf)) == 0) && (S_ISCHR(st_buf.st_mode))) {
+     size_t blen = strlen(base);
+ 
+-    if(strlen(fname) >= PATH_MAX-1) {
++    if(strlen(fname) == PATH_MAX-1) {
+       /* Make room for the '_' */
+       blen--;
+       base[blen] = '\0';
+     }
+     /* Prepend a '_'.  */
+     memmove(base + 1, base, blen + 1);
+     base[0] = '_';
+-    strcpy(file_name, fname);
+   }
+-  return file_name;
++
++  /* The above stat check does not identify devices for me in Windows 7. For
++     example a stat on COM1 returns a regular file S_IFREG. According to MSDN
++     stat doc that is the correct behavior, so I assume the above code is
++     legacy, maybe MSDOS or DJGPP specific? */
++
++  /* Rename devices.
++     Examples: CON => _CON, CON.EXT => CON_EXT, CON:ADS => CON_ADS */
++  for(p = fname; p; p = (p == fname && fname != base ? base : NULL)) {
++    size_t p_len;
++    int x = (curl_strnequal(p, "CON", 3) ||
++             curl_strnequal(p, "PRN", 3) ||
++             curl_strnequal(p, "AUX", 3) ||
++             curl_strnequal(p, "NUL", 3)) ? 3 :
++            (curl_strnequal(p, "CLOCK$", 6)) ? 6 :
++            (curl_strnequal(p, "COM", 3) || curl_strnequal(p, "LPT", 3)) ?
++              (('1' <= p[3] && p[3] <= '9') ? 4 : 3) : 0;
++
++    if(!x)
++      continue;
++
++    /* the devices may be accessible with an extension or ADS, for
++       example CON.AIR and CON:AIR both access console */
++    if(p[x] == '.' || p[x] == ':') {
++      p[x] = '_';
++      continue;
++    }
++    else if(p[x]) /* no match */
++      continue;
++
++    p_len = strlen(p);
++
++    if(strlen(fname) == PATH_MAX-1) {
++      /* Make room for the '_' */
++      p_len--;
++      p[p_len] = '\0';
++    }
++    /* Prepend a '_'.  */
++    memmove(p + 1, p, p_len + 1);
++    p[0] = '_';
++
++    /* if fname was just modified then the basename pointer must be updated */
++    if(p == fname)
++      base = basename(fname);
++  }
++
++  return strdup(fname);
+ }
+ 
+ #if defined(MSDOS) && (defined(__DJGPP__) || defined(__GO32__))
+ 
+ /*
+diff --git a/src/tool_doswin.h b/src/tool_doswin.h
+index cd216db..fc83f16 100644
+--- a/src/tool_doswin.h
++++ b/src/tool_doswin.h
+@@ -23,11 +23,11 @@
+  ***************************************************************************/
+ #include "tool_setup.h"
+ 
+ #if defined(MSDOS) || defined(WIN32)
+ 
+-char *sanitize_dos_name(char *file_name);
++CURLcode sanitize_file_name(char **filename);
+ 
+ #if defined(MSDOS) && (defined(__DJGPP__) || defined(__GO32__))
+ 
+ char **__crt0_glob_function(char *arg);
+ 
+diff --git a/src/tool_operate.c b/src/tool_operate.c
+index 30d60cb..272ebd4 100644
+--- a/src/tool_operate.c
++++ b/src/tool_operate.c
+@@ -541,30 +541,41 @@ static CURLcode operate_do(struct GlobalConfig *global,
+           if(!outfile) {
+             /* extract the file name from the URL */
+             result = get_url_file_name(&outfile, this_url);
+             if(result)
+               goto show_error;
++
++#if defined(MSDOS) || defined(WIN32)
++            result = sanitize_file_name(&outfile);
++            if(result) {
++              Curl_safefree(outfile);
++              goto show_error;
++            }
++#endif /* MSDOS || WIN32 */
++
+             if(!*outfile && !config->content_disposition) {
+               helpf(global->errors, "Remote file name has no length!\n");
+               result = CURLE_WRITE_ERROR;
+               goto quit_urls;
+             }
+-#if defined(MSDOS) || defined(WIN32)
+-            /* For DOS and WIN32, we do some major replacing of
+-               bad characters in the file name before using it */
+-            outfile = sanitize_dos_name(outfile);
+-            if(!outfile) {
+-              result = CURLE_OUT_OF_MEMORY;
+-              goto show_error;
+-            }
+-#endif /* MSDOS || WIN32 */
+           }
+           else if(urls) {
+             /* fill '#1' ... '#9' terms from URL pattern */
+             char *storefile = outfile;
+             result = glob_match_url(&outfile, storefile, urls);
+             Curl_safefree(storefile);
++
++#if defined(MSDOS) || defined(WIN32)
++            if(!result) {
++              result = sanitize_file_name(&outfile);
++              if(result) {
++                Curl_safefree(outfile);
++                goto show_error;
++              }
++            }
++#endif /* MSDOS || WIN32 */
++
+             if(result) {
+               /* bad globbing */
+               warnf(config->global, "bad output glob!\n");
+               goto quit_urls;
+             }
+-- 
+2.7.0
+
diff --git a/yocto-poky/meta/recipes-support/curl/curl/CVE-2016-0755.patch b/yocto-poky/meta/recipes-support/curl/curl/CVE-2016-0755.patch
new file mode 100644
index 0000000..44b9d9a
--- /dev/null
+++ b/yocto-poky/meta/recipes-support/curl/curl/CVE-2016-0755.patch
@@ -0,0 +1,138 @@
+From d41dcba4e9b69d6b761e3460cc6ae7e8fd8f621f Mon Sep 17 00:00:00 2001
+From: Isaac Boukris <iboukris@gmail.com>
+Date: Wed, 13 Jan 2016 11:05:51 +0200
+Subject: [PATCH] NTLM: Fix ConnectionExists to compare Proxy credentials
+
+Proxy NTLM authentication should compare credentials when
+re-using a connection similar to host authentication, as it
+authenticate the connection.
+
+Example:
+curl -v -x http://proxy:port http://host/ -U good_user:good_pwd
+  --proxy-ntlm --next -x http://proxy:port http://host/
+    [-U fake_user:fake_pwd --proxy-ntlm]
+
+CVE-2016-0755
+
+Bug: http://curl.haxx.se/docs/adv_20160127A.html
+
+Upstream-Status: Backport
+http://curl.haxx.se/CVE-2016-0755.patch
+
+CVE: CVE-2016-0755
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ lib/url.c | 62 ++++++++++++++++++++++++++++++++++++++++----------------------
+ 1 file changed, 40 insertions(+), 22 deletions(-)
+
+Index: curl-7.44.0/lib/url.c
+===================================================================
+--- curl-7.44.0.orig/lib/url.c
++++ curl-7.44.0/lib/url.c
+@@ -3107,12 +3107,17 @@ ConnectionExists(struct SessionHandle *d
+   struct connectdata *check;
+   struct connectdata *chosen = 0;
+   bool canPipeline = IsPipeliningPossible(data, needle);
++  struct connectbundle *bundle;
++
+ #ifdef USE_NTLM
+-  bool wantNTLMhttp = ((data->state.authhost.want & CURLAUTH_NTLM) ||
+-                       (data->state.authhost.want & CURLAUTH_NTLM_WB)) &&
+-    (needle->handler->protocol & PROTO_FAMILY_HTTP) ? TRUE : FALSE;
++  bool wantNTLMhttp = ((data->state.authhost.want &
++                      (CURLAUTH_NTLM | CURLAUTH_NTLM_WB)) &&
++                      (needle->handler->protocol & PROTO_FAMILY_HTTP));
++  bool wantProxyNTLMhttp = (needle->bits.proxy_user_passwd &&
++                           ((data->state.authproxy.want &
++                           (CURLAUTH_NTLM | CURLAUTH_NTLM_WB)) &&
++                           (needle->handler->protocol & PROTO_FAMILY_HTTP)));
+ #endif
+-  struct connectbundle *bundle;
+ 
+   *force_reuse = FALSE;
+   *waitpipe = FALSE;
+@@ -3152,9 +3157,6 @@ ConnectionExists(struct SessionHandle *d
+     curr = bundle->conn_list->head;
+     while(curr) {
+       bool match = FALSE;
+-#if defined(USE_NTLM)
+-      bool credentialsMatch = FALSE;
+-#endif
+       size_t pipeLen;
+ 
+       /*
+@@ -3262,21 +3264,14 @@ ConnectionExists(struct SessionHandle *d
+           continue;
+       }
+ 
+-      if((!(needle->handler->flags & PROTOPT_CREDSPERREQUEST))
+-#ifdef USE_NTLM
+-         || (wantNTLMhttp || check->ntlm.state != NTLMSTATE_NONE)
+-#endif
+-        ) {
+-        /* This protocol requires credentials per connection or is HTTP+NTLM,
++      if(!(needle->handler->flags & PROTOPT_CREDSPERREQUEST)) {
++        /* This protocol requires credentials per connection,
+            so verify that we're using the same name and password as well */
+         if(!strequal(needle->user, check->user) ||
+            !strequal(needle->passwd, check->passwd)) {
+           /* one of them was different */
+           continue;
+         }
+-#if defined(USE_NTLM)
+-        credentialsMatch = TRUE;
+-#endif
+       }
+ 
+       if(!needle->bits.httpproxy || needle->handler->flags&PROTOPT_SSL ||
+@@ -3335,20 +3330,43 @@ ConnectionExists(struct SessionHandle *d
+            possible. (Especially we must not reuse the same connection if
+            partway through a handshake!) */
+         if(wantNTLMhttp) {
+-          if(credentialsMatch && check->ntlm.state != NTLMSTATE_NONE) {
+-            chosen = check;
++          if(!strequal(needle->user, check->user) ||
++             !strequal(needle->passwd, check->passwd))
++            continue;
++        }
++        else if(check->ntlm.state != NTLMSTATE_NONE) {
++          /* Connection is using NTLM auth but we don't want NTLM */
++          continue;
++        }
++
++        /* Same for Proxy NTLM authentication */
++        if(wantProxyNTLMhttp) {
++          if(!strequal(needle->proxyuser, check->proxyuser) ||
++             !strequal(needle->proxypasswd, check->proxypasswd))
++            continue;
++        }
++        else if(check->proxyntlm.state != NTLMSTATE_NONE) {
++          /* Proxy connection is using NTLM auth but we don't want NTLM */
++          continue;
++        }
+ 
++        if(wantNTLMhttp || wantProxyNTLMhttp) {
++          /* Credentials are already checked, we can use this connection */
++          chosen = check;
++
++          if((wantNTLMhttp &&
++             (check->ntlm.state != NTLMSTATE_NONE)) ||
++              (wantProxyNTLMhttp &&
++               (check->proxyntlm.state != NTLMSTATE_NONE))) {
+             /* We must use this connection, no other */
+             *force_reuse = TRUE;
+             break;
+           }
+-          else if(credentialsMatch)
+-            /* this is a backup choice */
+-            chosen = check;
++
++          /* Continue look up for a better connection */
+           continue;
+         }
+ #endif
+-
+         if(canPipeline) {
+           /* We can pipeline if we want to. Let's continue looking for
+              the optimal connection to use, i.e the shortest pipe that is not
diff --git a/yocto-poky/meta/recipes-support/curl/curl_7.44.0.bb b/yocto-poky/meta/recipes-support/curl/curl_7.44.0.bb
index b293303..419ed83 100644
--- a/yocto-poky/meta/recipes-support/curl/curl_7.44.0.bb
+++ b/yocto-poky/meta/recipes-support/curl/curl_7.44.0.bb
@@ -12,7 +12,9 @@
 # curl likes to set -g0 in CFLAGS, so we stop it
 # from mucking around with debug options
 #
-SRC_URI += " file://configure_ac.patch"
+SRC_URI += " file://configure_ac.patch \
+             file://CVE-2016-0754.patch \
+             file://CVE-2016-0755.patch"
 
 SRC_URI[md5sum] = "6b952ca00e5473b16a11f05f06aa8dae"
 SRC_URI[sha256sum] = "1e2541bae6582bb697c0fbae49e1d3e6fad5d05d5aa80dbd6f072e0a44341814"
@@ -45,6 +47,11 @@
 	oe_multilib_header curl/curlbuild.h
 }
 
+do_install_append_class-target() {
+	# cleanup buildpaths from curl-config
+	sed -i -e 's,${STAGING_DIR_HOST},,g' ${D}${bindir}/curl-config
+}
+
 PACKAGES =+ "lib${BPN}"
 
 FILES_lib${BPN} = "${libdir}/lib*.so.*"
diff --git a/yocto-poky/meta/recipes-support/gmp/gmp-4.2.1/Use-__gnu_inline__-attribute.patch b/yocto-poky/meta/recipes-support/gmp/gmp-4.2.1/Use-__gnu_inline__-attribute.patch
new file mode 100644
index 0000000..627d71a
--- /dev/null
+++ b/yocto-poky/meta/recipes-support/gmp/gmp-4.2.1/Use-__gnu_inline__-attribute.patch
@@ -0,0 +1,36 @@
+From 3cb33502bafd04b8ad4ca3454fab16d5ff313297 Mon Sep 17 00:00:00 2001
+From: Jussi Kukkonen <jussi.kukkonen@intel.com>
+Date: Tue, 22 Sep 2015 13:16:23 +0300
+Subject: [PATCH]  Use __gnu_inline__ attribute
+
+gcc5 uses C11 inline rules. This means the old "extern inline"
+semantics are not available without a special attribute.
+
+See: https://gcc.gnu.org/gcc-5/porting_to.html
+
+Upstream-Status: Inappropriate [Fixed in current versions]
+Signed-off-by: Jussi Kukkonen <jussi.kukkonen@intel.com>
+---
+ gmp-h.in | 5 ++++-
+ 1 file changed, 4 insertions(+), 1 deletion(-)
+
+diff --git a/gmp-h.in b/gmp-h.in
+index eed6fe4..361dd1d 100644
+--- a/gmp-h.in
++++ b/gmp-h.in
+@@ -419,8 +419,11 @@ typedef __mpq_struct *mpq_ptr;
+ /* gcc has __inline__ in all modes, including strict ansi.  Give a prototype
+    for an inline too, so as to correctly specify "dllimport" on windows, in
+    case the function is called rather than inlined.  */
++
++/* Use __gnu_inline__ attribute: later gcc uses different "extern inline"
++   behaviour */
+ #ifdef __GNUC__
+-#define __GMP_EXTERN_INLINE      extern __inline__
++#define __GMP_EXTERN_INLINE      extern __inline__ __attribute__ ((__gnu_inline__))
+ #define __GMP_INLINE_PROTOTYPES  1
+ #endif
+ 
+-- 
+2.1.4
+
diff --git a/yocto-poky/meta/recipes-support/gmp/gmp-4.2.1/disable-stdc.patch b/yocto-poky/meta/recipes-support/gmp/gmp-4.2.1/disable-stdc.patch
deleted file mode 100644
index 5decb1c..0000000
--- a/yocto-poky/meta/recipes-support/gmp/gmp-4.2.1/disable-stdc.patch
+++ /dev/null
@@ -1,39 +0,0 @@
-This patch was removed in f181c6ce8b3 when gmp 4.2.1 was mistakenly
-dropped.
-
-Upstream is not interested in patches for ancient versions.
-
-Upstream-Status: Inappropriate
-Signed-off-by: Jussi Kukkonen <jussi.kukkonen@intel.com>
-
-# "extern inline" in traditional gcc means that the function should be 
-# inlined wherever it's seen, while in C99, "extern inline" means that i
-# the function should only be inlined where the inline definition is 
-# seen while in other places it's not inlined:
-# http://gcc.gnu.org/ml/gcc/2006-11/msg00006.html
-#
-# gmp checks "--std=gnu99" to use C99 convention however it internally 
-# defines some "extern inline" functions in gmp.h, which is included
-# by mainly .c files and finally lead a flood of redefinition function
-# errors when linking objects together.
-#
-# So disable C99/ANSI detection to stick to tranditional gcc behavior
-#
-# by Kevin Tian <kevin.tian@intel.com>, 2010-08-13
-#
-# (this patch is licensed under GPLv2+)
-
-diff --git a/configure.in b/configure.in
-index 450cc92..aab0b59 100644
---- a/configure.in
-+++ b/configure.in
-@@ -1869,9 +1869,7 @@ AC_SUBST(DEFN_LONG_LONG_LIMB)
- 
- # The C compiler and preprocessor, put into ANSI mode if possible.
- AC_PROG_CC
--AC_PROG_CC_STDC
- AC_PROG_CPP
--GMP_H_ANSI
- 
- 
- # The C compiler on the build system, and associated tests.
diff --git a/yocto-poky/meta/recipes-support/gmp/gmp_4.2.1.bb b/yocto-poky/meta/recipes-support/gmp/gmp_4.2.1.bb
index 928c01a..bfc6a38 100644
--- a/yocto-poky/meta/recipes-support/gmp/gmp_4.2.1.bb
+++ b/yocto-poky/meta/recipes-support/gmp/gmp_4.2.1.bb
@@ -7,7 +7,7 @@
                     file://COPYING.LIB;md5=fbc093901857fcd118f065f900982c24 \
                     file://gmp-h.in;beginline=6;endline=21;md5=e056f74a12c3277d730dbcfb85d2ca34"
 
-SRC_URI += "file://disable-stdc.patch \
+SRC_URI += "file://Use-__gnu_inline__-attribute.patch \
             file://gmp_fix_for_automake-1.12.patch \
             "
 
diff --git a/yocto-poky/meta/recipes-support/icu/icu/fix-install-manx.patch b/yocto-poky/meta/recipes-support/icu/icu/fix-install-manx.patch
new file mode 100644
index 0000000..ec63f50
--- /dev/null
+++ b/yocto-poky/meta/recipes-support/icu/icu/fix-install-manx.patch
@@ -0,0 +1,48 @@
+The generic recursive target calls target-local so also adding it to the
+dependency list results in races due to install-local being executed twice in
+parallel.  For example, install-manx can fail if the two install processes race
+and one process tries to chown a file that the other process has just deleted.
+
+Also install-manx should be a phony target, and for clarity use $^ instead of $?
+in the install command.
+
+Upstream-Status: Pending
+Signed-off-by: Ross Burton <ross.burton@intel.com>
+
+
+diff --git a/Makefile.in b/Makefile.in
+index 9db6c52..3441afa 100644
+--- a/Makefile.in
++++ b/Makefile.in
+@@ -71,7 +71,7 @@ EXTRA_DATA =
+ 
+ ## List of phony targets
+ .PHONY : all all-local all-recursive install install-local install-udata install-udata-files install-udata-dlls		\
+-install-recursive clean clean-local clean-recursive distclean		\
++install-recursive install-manx clean clean-local clean-recursive distclean		\
+ distclean-local distclean-recursive doc dist dist-local dist-recursive	\
+ check check-local check-recursive clean-recursive-with-twist install-icu \
+ doc install-doc tests icu4j-data icu4j-data-install update-windows-makefiles xcheck-local xcheck-recursive xperf xcheck xperf-recursive \
+@@ -82,10 +82,10 @@ check-exhaustive check-exhaustive-local check-exhaustive-recursive releaseDist
+ 
+ ## List of standard targets
+ all: all-local all-recursive
+-install: install-recursive install-local
++install: install-recursive
+ clean: clean-recursive-with-twist clean-local
+-distclean : distclean-recursive distclean-local
+-dist: dist-recursive dist-local
++distclean : distclean-recursive
++dist: dist-recursive
+ check: all check-recursive
+ check-recursive: all
+ xcheck: all xcheck-recursive
+@@ -352,7 +352,7 @@ config.status: $(srcdir)/configure $(srcdir)/common/unicode/uvernum.h
+ 
+ install-manx: $(MANX_FILES)
+ 	$(MKINSTALLDIRS) $(DESTDIR)$(mandir)/man$(SECTION)
+-	$(INSTALL_DATA) $? $(DESTDIR)$(mandir)/man$(SECTION)
++	$(INSTALL_DATA) $^ $(DESTDIR)$(mandir)/man$(SECTION)
+ 
+ config/%.$(SECTION): $(srcdir)/config/%.$(SECTION).in
+ 	cd $(top_builddir) \
diff --git a/yocto-poky/meta/recipes-support/icu/icu_55.1.bb b/yocto-poky/meta/recipes-support/icu/icu_55.1.bb
index f63a9bd..e91b6f3 100644
--- a/yocto-poky/meta/recipes-support/icu/icu_55.1.bb
+++ b/yocto-poky/meta/recipes-support/icu/icu_55.1.bb
@@ -8,9 +8,14 @@
 
 ICU_PV = "${@icu_download_version(d)}"
 
+# http://errors.yoctoproject.org/Errors/Details/20486/
+ARM_INSTRUCTION_SET_armv4 = "arm"
+ARM_INSTRUCTION_SET_armv5 = "arm"
+
 BASE_SRC_URI = "http://download.icu-project.org/files/icu4c/${PV}/icu4c-${ICU_PV}-src.tgz"
 SRC_URI = "${BASE_SRC_URI} \
            file://icu-pkgdata-large-cmd.patch \
+           file://fix-install-manx.patch \
           "
 
 SRC_URI_append_class-target = "\
diff --git a/yocto-poky/meta/recipes-support/libbsd/files/CVE-2016-2090.patch b/yocto-poky/meta/recipes-support/libbsd/files/CVE-2016-2090.patch
new file mode 100644
index 0000000..2eaae13
--- /dev/null
+++ b/yocto-poky/meta/recipes-support/libbsd/files/CVE-2016-2090.patch
@@ -0,0 +1,50 @@
+From c8f0723d2b4520bdd6b9eb7c3e7976de726d7ff7 Mon Sep 17 00:00:00 2001
+From: Hanno Boeck <hanno@hboeck.de>
+Date: Wed, 27 Jan 2016 15:10:11 +0100
+Subject: [PATCH] Fix heap buffer overflow in fgetwln()
+
+In the function fgetwln() there's a 4 byte heap overflow.
+
+There is a while loop that has this check to see whether there's still
+enough space in the buffer:
+
+		if (!fb->len || wused > fb->len) {
+
+If this is true more memory gets allocated. However this test won't be
+true if wused == fb->len, but at that point wused already points out
+of the buffer. Some lines later there's a write to the buffer:
+
+		fb->wbuf[wused++] = wc;
+
+This bug was found with the help of address sanitizer.
+
+Warned-by: ASAN
+Fixes: https://bugs.freedesktop.org/show_bug.cgi?id=93881
+Signed-off-by: Guillem Jover <guillem@hadrons.org>
+
+Upstream-Status: Backport
+http://cgit.freedesktop.org/libbsd/commit/?id=c8f0723d2b4520bdd6b9eb7c3e7976de726d7ff7
+
+CVE: CVE-2016-2090
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ src/fgetwln.c | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/src/fgetwln.c b/src/fgetwln.c
+index 9ee0776..aa3f927 100644
+--- a/src/fgetwln.c
++++ b/src/fgetwln.c
+@@ -60,7 +60,7 @@ fgetwln(FILE *stream, size_t *lenp)
+ 	fb->fp = stream;
+ 
+ 	while ((wc = fgetwc(stream)) != WEOF) {
+-		if (!fb->len || wused > fb->len) {
++		if (!fb->len || wused >= fb->len) {
+ 			wchar_t *wp;
+ 
+ 			if (fb->len)
+-- 
+2.3.5
+
diff --git a/yocto-poky/meta/recipes-support/libbsd/libbsd_0.7.0.bb b/yocto-poky/meta/recipes-support/libbsd/libbsd_0.7.0.bb
index 902666d..8d9a708 100644
--- a/yocto-poky/meta/recipes-support/libbsd/libbsd_0.7.0.bb
+++ b/yocto-poky/meta/recipes-support/libbsd/libbsd_0.7.0.bb
@@ -13,7 +13,9 @@
 SECTION = "libs"
 DEPENDS = ""
 
-SRC_URI = "http://libbsd.freedesktop.org/releases/${BPN}-${PV}.tar.xz"
+SRC_URI = "http://libbsd.freedesktop.org/releases/${BPN}-${PV}.tar.xz \
+           file://CVE-2016-2090.patch \
+           "
 
 SRC_URI[md5sum] = "fcceb4e66fd448ca4ed42ba22a8babb0"
 SRC_URI[sha256sum] = "0f3b0e17e5c34c038126e0a04351b11e23c6101a7d0ce3beeab29bb6415c10bb"
diff --git a/yocto-poky/meta/recipes-support/libfm/libfm-extra_1.2.3.bb b/yocto-poky/meta/recipes-support/libfm/libfm-extra_1.2.3.bb
index 46d2d62..3675269 100644
--- a/yocto-poky/meta/recipes-support/libfm/libfm-extra_1.2.3.bb
+++ b/yocto-poky/meta/recipes-support/libfm/libfm-extra_1.2.3.bb
@@ -5,7 +5,7 @@
 LIC_FILES_CHKSUM = "file://src/fm-extra.h;beginline=8;endline=21;md5=ef1f84da64b3c01cca447212f7ef6007"
 
 SECTION = "x11/libs"
-DEPENDS = "glib-2.0 intltool-native"
+DEPENDS = "glib-2.0 intltool-native gettext-native"
 
 SRC_URI = "${SOURCEFORGE_MIRROR}/pcmanfm/libfm-${PV}.tar.xz \
            file://0001-nls.m4-Take-it-from-gettext-0.15.patch \
diff --git a/yocto-poky/meta/recipes-support/libfm/libfm_1.2.3.bb b/yocto-poky/meta/recipes-support/libfm/libfm_1.2.3.bb
index 629502f..e9ff656 100644
--- a/yocto-poky/meta/recipes-support/libfm/libfm_1.2.3.bb
+++ b/yocto-poky/meta/recipes-support/libfm/libfm_1.2.3.bb
@@ -9,7 +9,7 @@
 
 
 SECTION = "x11/libs"
-DEPENDS = "glib-2.0 pango gtk+ menu-cache intltool-native libexif libfm-extra"
+DEPENDS = "glib-2.0 pango gtk+ menu-cache intltool-native libexif libfm-extra gettext-native"
 
 SRC_URI = "${SOURCEFORGE_MIRROR}/pcmanfm/libfm-${PV}.tar.xz"
 
@@ -18,7 +18,8 @@
 
 PR = "r1"
 
-inherit autotools pkgconfig gtk-doc
+inherit autotools pkgconfig gtk-doc distro_features_check
+ANY_OF_DISTRO_FEATURES = "${GTK2DISTROFEATURES}"
 
 do_configure[dirs] =+ "${S}/m4"
 
diff --git a/yocto-poky/meta/recipes-support/libgcrypt/files/CVE-2015-7511_1.patch b/yocto-poky/meta/recipes-support/libgcrypt/files/CVE-2015-7511_1.patch
new file mode 100644
index 0000000..14c25b9
--- /dev/null
+++ b/yocto-poky/meta/recipes-support/libgcrypt/files/CVE-2015-7511_1.patch
@@ -0,0 +1,245 @@
+From 2ef48ba59c32bfa1a9265d5eea8ab225a658903a Mon Sep 17 00:00:00 2001
+From: Werner Koch <wk@gnupg.org>
+Date: Thu, 9 Jan 2014 19:14:09 +0100
+Subject: [PATCH] ecc: Make a macro shorter.
+
+* src/mpi.h (MPI_EC_TWISTEDEDWARDS): Rename to MPI_EC_EDWARDS.  CHnage
+all users.
+* cipher/ecc-curves.c (domain_parms): Add parameters for Curve3617 as
+comment.
+* mpi/ec.c (dup_point_twistededwards): Rename to dup_point_edwards.
+(add_points_twistededwards): Rename to add_points_edwards.
+
+Signed-off-by: Werner Koch <wk@gnupg.org>
+
+Upstream-Status: Backport
+2ef48ba59c32bfa1a9265d5eea8ab225a658903a
+
+CVE: CVE-2015-7511 depend patch
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ cipher/ecc-curves.c | 22 +++++++++++++++++++---
+ cipher/ecc-misc.c   |  4 ++--
+ cipher/ecc.c        |  8 ++++----
+ mpi/ec.c            | 22 +++++++++++-----------
+ src/mpi.h           | 11 ++++++++---
+ 5 files changed, 44 insertions(+), 23 deletions(-)
+
+Index: libgcrypt-1.6.3/cipher/ecc-curves.c
+===================================================================
+--- libgcrypt-1.6.3.orig/cipher/ecc-curves.c
++++ libgcrypt-1.6.3/cipher/ecc-curves.c
+@@ -105,7 +105,7 @@ static const ecc_domain_parms_t domain_p
+     {
+       /* (-x^2 + y^2 = 1 + dx^2y^2) */
+       "Ed25519", 256, 0,
+-      MPI_EC_TWISTEDEDWARDS, ECC_DIALECT_ED25519,
++      MPI_EC_EDWARDS, ECC_DIALECT_ED25519,
+       "0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFED",
+       "-0x01",
+       "-0x2DFC9311D490018C7338BF8688861767FF8FF5B2BEBE27548A14B235ECA6874A",
+@@ -113,6 +113,22 @@ static const ecc_domain_parms_t domain_p
+       "0x216936D3CD6E53FEC0A4E231FDD6DC5C692CC7609525A7B2C9562D608F25D51A",
+       "0x6666666666666666666666666666666666666666666666666666666666666658"
+     },
++#if 0 /* No real specs yet found.  */
++    {
++      /* x^2 + y^2 = 1 + 3617x^2y^2 mod 2^414 - 17 */
++      "Curve3617",
++      "0x3FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"
++      "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEF",
++      MPI_EC_EDWARDS, 0,
++      "0x01",
++      "0x0e21",
++      "0x07FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEB3CC92414CF"
++      "706022B36F1C0338AD63CF181B0E71A5E106AF79",
++      "0x1A334905141443300218C0631C326E5FCD46369F44C03EC7F57FF35498A4AB4D"
++      "6D6BA111301A73FAA8537C64C4FD3812F3CBC595",
++      "0x22"
++    },
++#endif /*0*/
+     {
+       "NIST P-192", 192, 1,
+       MPI_EC_WEIERSTRASS, ECC_DIALECT_STANDARD,
+@@ -404,7 +420,7 @@ _gcry_ecc_fill_in_curve (unsigned int nb
+   switch (domain_parms[idx].model)
+     {
+     case MPI_EC_WEIERSTRASS:
+-    case MPI_EC_TWISTEDEDWARDS:
++    case MPI_EC_EDWARDS:
+       break;
+     case MPI_EC_MONTGOMERY:
+       return GPG_ERR_NOT_SUPPORTED;
+@@ -1039,7 +1055,7 @@ _gcry_ecc_get_mpi (const char *name, mpi
+       if (name[1] != '@')
+         return _gcry_mpi_ec_ec2os (ec->Q, ec);
+ 
+-      if (!strcmp (name+2, "eddsa") && ec->model == MPI_EC_TWISTEDEDWARDS)
++      if (!strcmp (name+2, "eddsa") && ec->model == MPI_EC_EDWARDS)
+         {
+           unsigned char *encpk;
+           unsigned int encpklen;
+Index: libgcrypt-1.6.3/cipher/ecc-misc.c
+===================================================================
+--- libgcrypt-1.6.3.orig/cipher/ecc-misc.c
++++ libgcrypt-1.6.3/cipher/ecc-misc.c
+@@ -79,7 +79,7 @@ _gcry_ecc_model2str (enum gcry_mpi_ec_mo
+     {
+     case MPI_EC_WEIERSTRASS:    str = "Weierstrass"; break;
+     case MPI_EC_MONTGOMERY:     str = "Montgomery";  break;
+-    case MPI_EC_TWISTEDEDWARDS: str = "Twisted Edwards"; break;
++    case MPI_EC_EDWARDS:        str = "Edwards"; break;
+     }
+   return str;
+ }
+@@ -252,7 +252,7 @@ _gcry_ecc_compute_public (mpi_point_t Q,
+ 
+   if (!d || !G || !ec->p || !ec->a)
+     return NULL;
+-  if (ec->model == MPI_EC_TWISTEDEDWARDS && !ec->b)
++  if (ec->model == MPI_EC_EDWARDS && !ec->b)
+     return NULL;
+ 
+   if (ec->dialect == ECC_DIALECT_ED25519
+Index: libgcrypt-1.6.3/cipher/ecc.c
+===================================================================
+--- libgcrypt-1.6.3.orig/cipher/ecc.c
++++ libgcrypt-1.6.3/cipher/ecc.c
+@@ -642,7 +642,7 @@ ecc_check_secret_key (gcry_sexp_t keypar
+   if (!curvename)
+     {
+       sk.E.model = ((flags & PUBKEY_FLAG_EDDSA)
+-               ? MPI_EC_TWISTEDEDWARDS
++               ? MPI_EC_EDWARDS
+                : MPI_EC_WEIERSTRASS);
+       sk.E.dialect = ((flags & PUBKEY_FLAG_EDDSA)
+                       ? ECC_DIALECT_ED25519
+@@ -774,7 +774,7 @@ ecc_sign (gcry_sexp_t *r_sig, gcry_sexp_
+   if (!curvename)
+     {
+       sk.E.model = ((ctx.flags & PUBKEY_FLAG_EDDSA)
+-                    ? MPI_EC_TWISTEDEDWARDS
++                    ? MPI_EC_EDWARDS
+                     : MPI_EC_WEIERSTRASS);
+       sk.E.dialect = ((ctx.flags & PUBKEY_FLAG_EDDSA)
+                       ? ECC_DIALECT_ED25519
+@@ -938,7 +938,7 @@ ecc_verify (gcry_sexp_t s_sig, gcry_sexp
+   if (!curvename)
+     {
+       pk.E.model = ((sigflags & PUBKEY_FLAG_EDDSA)
+-                    ? MPI_EC_TWISTEDEDWARDS
++                    ? MPI_EC_EDWARDS
+                     : MPI_EC_WEIERSTRASS);
+       pk.E.dialect = ((sigflags & PUBKEY_FLAG_EDDSA)
+                       ? ECC_DIALECT_ED25519
+@@ -1528,7 +1528,7 @@ compute_keygrip (gcry_md_hd_t md, gcry_s
+   if (!curvename)
+     {
+       model = ((flags & PUBKEY_FLAG_EDDSA)
+-               ? MPI_EC_TWISTEDEDWARDS
++               ? MPI_EC_EDWARDS
+                : MPI_EC_WEIERSTRASS);
+       dialect = ((flags & PUBKEY_FLAG_EDDSA)
+                  ? ECC_DIALECT_ED25519
+Index: libgcrypt-1.6.3/mpi/ec.c
+===================================================================
+--- libgcrypt-1.6.3.orig/mpi/ec.c
++++ libgcrypt-1.6.3/mpi/ec.c
+@@ -605,7 +605,7 @@ _gcry_mpi_ec_get_affine (gcry_mpi_t x, g
+       }
+       return -1;
+ 
+-    case MPI_EC_TWISTEDEDWARDS:
++    case MPI_EC_EDWARDS:
+       {
+         gcry_mpi_t z;
+ 
+@@ -725,7 +725,7 @@ dup_point_montgomery (mpi_point_t result
+ 
+ /*  RESULT = 2 * POINT  (Twisted Edwards version). */
+ static void
+-dup_point_twistededwards (mpi_point_t result, mpi_point_t point, mpi_ec_t ctx)
++dup_point_edwards (mpi_point_t result, mpi_point_t point, mpi_ec_t ctx)
+ {
+ #define X1 (point->x)
+ #define Y1 (point->y)
+@@ -811,8 +811,8 @@ _gcry_mpi_ec_dup_point (mpi_point_t resu
+     case MPI_EC_MONTGOMERY:
+       dup_point_montgomery (result, point, ctx);
+       break;
+-    case MPI_EC_TWISTEDEDWARDS:
+-      dup_point_twistededwards (result, point, ctx);
++    case MPI_EC_EDWARDS:
++      dup_point_edwards (result, point, ctx);
+       break;
+     }
+ }
+@@ -977,9 +977,9 @@ add_points_montgomery (mpi_point_t resul
+ 
+ /* RESULT = P1 + P2  (Twisted Edwards version).*/
+ static void
+-add_points_twistededwards (mpi_point_t result,
+-                           mpi_point_t p1, mpi_point_t p2,
+-                           mpi_ec_t ctx)
++add_points_edwards (mpi_point_t result,
++                    mpi_point_t p1, mpi_point_t p2,
++                    mpi_ec_t ctx)
+ {
+ #define X1 (p1->x)
+ #define Y1 (p1->y)
+@@ -1087,8 +1087,8 @@ _gcry_mpi_ec_add_points (mpi_point_t res
+     case MPI_EC_MONTGOMERY:
+       add_points_montgomery (result, p1, p2, ctx);
+       break;
+-    case MPI_EC_TWISTEDEDWARDS:
+-      add_points_twistededwards (result, p1, p2, ctx);
++    case MPI_EC_EDWARDS:
++      add_points_edwards (result, p1, p2, ctx);
+       break;
+     }
+ }
+@@ -1106,7 +1106,7 @@ _gcry_mpi_ec_mul_point (mpi_point_t resu
+   unsigned int i, loops;
+   mpi_point_struct p1, p2, p1inv;
+ 
+-  if (ctx->model == MPI_EC_TWISTEDEDWARDS)
++  if (ctx->model == MPI_EC_EDWARDS)
+     {
+       /* Simple left to right binary method.  GECC Algorithm 3.27 */
+       unsigned int nbits;
+@@ -1269,7 +1269,7 @@ _gcry_mpi_ec_curve_point (gcry_mpi_point
+       log_fatal ("%s: %s not yet supported\n",
+                  "_gcry_mpi_ec_curve_point", "Montgomery");
+       break;
+-    case MPI_EC_TWISTEDEDWARDS:
++    case MPI_EC_EDWARDS:
+       {
+         /* a · x^2 + y^2 - 1 - b · x^2 · y^2 == 0 */
+         ec_pow2 (x, x, ctx);
+Index: libgcrypt-1.6.3/src/mpi.h
+===================================================================
+--- libgcrypt-1.6.3.orig/src/mpi.h
++++ libgcrypt-1.6.3/src/mpi.h
+@@ -245,13 +245,18 @@ void _gcry_mpi_snatch_point (gcry_mpi_t
+ /* Models describing an elliptic curve.  */
+ enum gcry_mpi_ec_models
+   {
+-
++    /* The Short Weierstrass equation is
++          y^2 = x^3 + ax + b
++     */
+     MPI_EC_WEIERSTRASS = 0,
++    /* The Montgomery equation is
++          by^2 = x^3 + ax^2 + x
++     */
+     MPI_EC_MONTGOMERY,
+-    MPI_EC_TWISTEDEDWARDS
+-    /* The equation for Twisted Edwards curves is
++    /* The Twisted Edwards equation is
+           ax^2 + y^2 = 1 + bx^2y^2
+        Note that we use 'b' instead of the commonly used 'd'.  */
++    MPI_EC_EDWARDS
+   };
+ 
+ /* Dialects used with elliptic curves.  It is easier to keep the
diff --git a/yocto-poky/meta/recipes-support/libgcrypt/files/CVE-2015-7511_2.patch b/yocto-poky/meta/recipes-support/libgcrypt/files/CVE-2015-7511_2.patch
new file mode 100644
index 0000000..8093a18
--- /dev/null
+++ b/yocto-poky/meta/recipes-support/libgcrypt/files/CVE-2015-7511_2.patch
@@ -0,0 +1,55 @@
+From 88e1358962e902ff1cbec8d53ba3eee46407851a Mon Sep 17 00:00:00 2001
+From: NIIBE Yutaka <gniibe@fsij.org>
+Date: Wed, 25 Nov 2015 12:46:19 +0900
+Subject: [PATCH] ecc: Constant-time multiplication for Weierstrass curve.
+
+* mpi/ec.c (_gcry_mpi_ec_mul_point): Use simple left-to-right binary
+method for Weierstrass curve when SCALAR is secure.
+
+Upstream-Status: Backport
+
+http://git.gnupg.org/cgi-bin/gitweb.cgi?p=libgcrypt.git;a=commit;h=88e1358962e902ff1cbec8d53ba3eee46407851a
+
+CVE: CVE-2015-7511 fix
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ mpi/ec.c | 19 +++++++++++++++----
+ 1 file changed, 15 insertions(+), 4 deletions(-)
+
+Index: libgcrypt-1.6.3/mpi/ec.c
+===================================================================
+--- libgcrypt-1.6.3.orig/mpi/ec.c
++++ libgcrypt-1.6.3/mpi/ec.c
+@@ -1106,16 +1106,27 @@ _gcry_mpi_ec_mul_point (mpi_point_t resu
+   unsigned int i, loops;
+   mpi_point_struct p1, p2, p1inv;
+ 
+-  if (ctx->model == MPI_EC_EDWARDS)
++  if (ctx->model == MPI_EC_EDWARDS
++      || (ctx->model == MPI_EC_WEIERSTRASS
++          && mpi_is_secure (scalar)))
+     {
+       /* Simple left to right binary method.  GECC Algorithm 3.27 */
+       unsigned int nbits;
+       int j;
+ 
+       nbits = mpi_get_nbits (scalar);
+-      mpi_set_ui (result->x, 0);
+-      mpi_set_ui (result->y, 1);
+-      mpi_set_ui (result->z, 1);
++      if (ctx->model == MPI_EC_WEIERSTRASS)
++        {
++          mpi_set_ui (result->x, 1);
++          mpi_set_ui (result->y, 1);
++          mpi_set_ui (result->z, 0);
++        }
++      else
++        {
++          mpi_set_ui (result->x, 0);
++          mpi_set_ui (result->y, 1);
++          mpi_set_ui (result->z, 1);
++        }
+ 
+       if (mpi_is_secure (scalar))
+         {
diff --git a/yocto-poky/meta/recipes-support/libgcrypt/libgcrypt_1.6.3.bb b/yocto-poky/meta/recipes-support/libgcrypt/libgcrypt_1.6.3.bb
index cd06ce7..db89faf 100644
--- a/yocto-poky/meta/recipes-support/libgcrypt/libgcrypt_1.6.3.bb
+++ b/yocto-poky/meta/recipes-support/libgcrypt/libgcrypt_1.6.3.bb
@@ -1,4 +1,8 @@
 require libgcrypt.inc
 
+SRC_URI += "\
+        file://CVE-2015-7511_1.patch \
+        file://CVE-2015-7511_2.patch \
+        "
 SRC_URI[md5sum] = "de03b867d02fdf115a1bac8bb8b5c3a3"
 SRC_URI[sha256sum] = "69e94e1a7084d94e1a6ca26d436068cb74862d10a7353cfae579a2d88674ff09"
diff --git a/yocto-poky/meta/recipes-support/libgpg-error/libgpg-error/0001-libgpg-error-Add-nios2-support.patch b/yocto-poky/meta/recipes-support/libgpg-error/libgpg-error/0001-libgpg-error-Add-nios2-support.patch
new file mode 100644
index 0000000..dab1c13
--- /dev/null
+++ b/yocto-poky/meta/recipes-support/libgpg-error/libgpg-error/0001-libgpg-error-Add-nios2-support.patch
@@ -0,0 +1,46 @@
+From b2af652f43991e4ce6297917da542a9eb5135939 Mon Sep 17 00:00:00 2001
+From: Marek Vasut <marex@denx.de>
+Date: Thu, 17 Sep 2015 03:28:06 +0200
+Subject: [PATCH] libgpg-error: Add nios2 support
+
+Add configuration for the NIOS2 processor.
+
+Signed-off-by: Marek Vasut <marex@denx.de>
+Upstream-Status: Submitted
+---
+ src/syscfg/lock-obj-pub.nios2-unknown-linux-gnu.h | 23 +++++++++++++++++++++++
+ 1 file changed, 23 insertions(+)
+ create mode 100644 src/syscfg/lock-obj-pub.nios2-unknown-linux-gnu.h
+
+diff --git a/src/syscfg/lock-obj-pub.nios2-unknown-linux-gnu.h b/src/syscfg/lock-obj-pub.nios2-unknown-linux-gnu.h
+new file mode 100644
+index 0000000..3a24571
+--- /dev/null
++++ b/src/syscfg/lock-obj-pub.nios2-unknown-linux-gnu.h
+@@ -0,0 +1,23 @@
++## lock-obj-pub.nios2-unknown-linux-gnu.h
++## File created by gen-posix-lock-obj - DO NOT EDIT
++## To be included by mkheader into gpg-error.h
++
++typedef struct
++{
++  long _vers;
++  union {
++    volatile char _priv[24];
++    long _x_align;
++    long *_xp_align;
++  } u;
++} gpgrt_lock_t;
++
++#define GPGRT_LOCK_INITIALIZER {1,{{0,0,0,0,0,0,0,0, \
++                                    0,0,0,0,0,0,0,0, \
++                                    0,0,0,0,0,0,0,0}}}
++##
++## Local Variables:
++## mode: c
++## buffer-read-only: t
++## End:
++##
+-- 
+2.5.1
+
diff --git a/yocto-poky/meta/recipes-support/libgpg-error/libgpg-error_1.19.bb b/yocto-poky/meta/recipes-support/libgpg-error/libgpg-error_1.19.bb
index 39dbbcf..c69930a 100644
--- a/yocto-poky/meta/recipes-support/libgpg-error/libgpg-error_1.19.bb
+++ b/yocto-poky/meta/recipes-support/libgpg-error/libgpg-error_1.19.bb
@@ -12,7 +12,9 @@
 SECTION = "libs"
 
 SRC_URI = "ftp://ftp.gnupg.org/gcrypt/libgpg-error/libgpg-error-${PV}.tar.bz2 \
-           file://pkgconfig.patch"
+           file://pkgconfig.patch \
+	   file://0001-libgpg-error-Add-nios2-support.patch \
+	  "
 SRC_URI[md5sum] = "c04c16245b92829281f43b5bef7d16da"
 SRC_URI[sha256sum] = "53120e1333d5c5d28d87ff2854e9e98719c8e214152f17ad5291704d25c4978b"
 
diff --git a/yocto-poky/meta/recipes-support/libksba/libksba/ksba-add-pkgconfig-support.patch b/yocto-poky/meta/recipes-support/libksba/libksba/ksba-add-pkgconfig-support.patch
index b5886c5..d28dfd9 100644
--- a/yocto-poky/meta/recipes-support/libksba/libksba/ksba-add-pkgconfig-support.patch
+++ b/yocto-poky/meta/recipes-support/libksba/libksba/ksba-add-pkgconfig-support.patch
@@ -51,7 +51,7 @@
 +Requires:
 +Version: @VERSION@
 +Libs: -L${libdir} -lksba
-+Libs.private: -L{libdir} -lgpg-error
++Libs.private: -L${libdir} -lgpg-error
 +Cflags: -I${includedir}
 +
 Index: libksba-1.3.0/src/ksba.m4
diff --git a/yocto-poky/meta/recipes-support/libpcre/libpcre_8.37.bb b/yocto-poky/meta/recipes-support/libpcre/libpcre_8.38.bb
similarity index 87%
rename from yocto-poky/meta/recipes-support/libpcre/libpcre_8.37.bb
rename to yocto-poky/meta/recipes-support/libpcre/libpcre_8.38.bb
index bcfc9e9..c567607 100644
--- a/yocto-poky/meta/recipes-support/libpcre/libpcre_8.37.bb
+++ b/yocto-poky/meta/recipes-support/libpcre/libpcre_8.38.bb
@@ -14,14 +14,19 @@
            file://Makefile \
 "
 
-SRC_URI[md5sum] = "ed91be292cb01d21bc7e526816c26981"
-SRC_URI[sha256sum] = "51679ea8006ce31379fb0860e46dd86665d864b5020fc9cd19e71260eef4789d"
+SRC_URI[md5sum] = "00aabbfe56d5a48b270f999b508c5ad2"
+SRC_URI[sha256sum] = "b9e02d36e23024d6c02a2e5b25204b3a4fa6ade43e0a5f869f254f49535079df"
 
 S = "${WORKDIR}/pcre-${PV}"
 
 PROVIDES += "pcre"
 DEPENDS += "bzip2 zlib"
 
+PACKAGECONFIG ??= "pcre8"
+
+PACKAGECONFIG[pcre8] = "--enable-pcre8,--disable-pcre8"
+PACKAGECONFIG[pcre16] = "--enable-pcre16,--disable-pcre16"
+PACKAGECONFIG[pcre32] = "--enable-pcre32,--disable-pcre32"
 PACKAGECONFIG[pcretest-readline] = "--enable-pcretest-libreadline,--disable-pcretest-libreadline,readline,"
 
 BINCONFIG = "${bindir}/pcre-config"
diff --git a/yocto-poky/meta/recipes-support/libunwind/libunwind-1.1/Add-AO_REQUIRE_CAS-to-fix-build-on-ARM-v6.patch b/yocto-poky/meta/recipes-support/libunwind/libunwind-1.1/Add-AO_REQUIRE_CAS-to-fix-build-on-ARM-v6.patch
new file mode 100644
index 0000000..d552502
--- /dev/null
+++ b/yocto-poky/meta/recipes-support/libunwind/libunwind-1.1/Add-AO_REQUIRE_CAS-to-fix-build-on-ARM-v6.patch
@@ -0,0 +1,61 @@
+From 24484e80b3e329c9edee1995e102f8612eedb79c Mon Sep 17 00:00:00 2001
+From: Thomas Petazzoni <thomas.petazzoni@free-electrons.com>
+Date: Tue, 13 May 2014 23:32:27 +0200
+Subject: [PATCH] Add AO_REQUIRE_CAS to fix build on ARM < v6
+
+ARM earlier than ARMv6, such as ARMv4 and ARMv5 do not provide
+optimize atomic operations in libatomic_ops. Since libunwind is using
+such operations, it should define AO_REQUIRE_CAS before including
+<atomic_ops.h> so that libatomic_ops knows it should use emulated
+atomic operations instead (even though they are obviously a lot more
+expensive).
+
+Also, while real atomic operations are all inline functions and
+therefore linking against libatomic_ops was not required, the emulated
+atomic operations actually require linking against libatomic_ops, so
+the commented AC_CHECK_LIB test in acinclude.m4 is uncommented to make
+sure we link against libatomic_ops.
+
+Signed-off-by: Thomas Petazzoni <thomas.petazzoni@free-electrons.com>
+
+Upstream-Status: Pending
+Taken from:
+https://raw.githubusercontent.com/rdnetto/teapot-buildroot/master/package/libunwind/libunwind-02-Add-AO_REQUIRE_CAS-to-fix-build-on-ARM-v6.patch
+
+---
+ acinclude.m4          | 8 +-------
+ include/libunwind_i.h | 1 +
+ 2 files changed, 2 insertions(+), 7 deletions(-)
+
+diff --git a/acinclude.m4 b/acinclude.m4
+index 497f7c2..9c15af1 100644
+--- a/acinclude.m4
++++ b/acinclude.m4
+@@ -22,11 +22,5 @@ fi])
+ AC_DEFUN([CHECK_ATOMIC_OPS],
+ [dnl Check whether the system has the atomic_ops package installed.
+   AC_CHECK_HEADERS(atomic_ops.h)
+-#
+-# Don't link against libatomic_ops for now.  We don't want libunwind
+-# to depend on libatomic_ops.so.  Fortunately, none of the platforms
+-# we care about so far need libatomic_ops.a (everything is done via
+-# inline macros).
+-#
+-#  AC_CHECK_LIB(atomic_ops, main)
++  AC_CHECK_LIB(atomic_ops, main)
+ ])
+diff --git a/include/libunwind_i.h b/include/libunwind_i.h
+index 23f615e..deabdfd 100644
+--- a/include/libunwind_i.h
++++ b/include/libunwind_i.h
+@@ -95,6 +95,7 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.  */
+ 	(pthread_mutex_unlock != NULL ? pthread_mutex_unlock (l) : 0)
+ 
+ #ifdef HAVE_ATOMIC_OPS_H
++# define AO_REQUIRE_CAS
+ # include <atomic_ops.h>
+ static inline int
+ cmpxchg_ptr (void *addr, void *old, void *new)
+-- 
+1.9.2
+
diff --git a/yocto-poky/meta/recipes-support/libunwind/libunwind_1.1.bb b/yocto-poky/meta/recipes-support/libunwind/libunwind_1.1.bb
index 8282c1b..aa62bcc 100644
--- a/yocto-poky/meta/recipes-support/libunwind/libunwind_1.1.bb
+++ b/yocto-poky/meta/recipes-support/libunwind/libunwind_1.1.bb
@@ -6,7 +6,12 @@
     file://Fix-test-case-link-failure-on-PowerPC-systems-with-Altivec.patch \
     file://Link-libunwind-to-libgcc_s-rather-than-libgcc.patch \
     file://0001-Invalid-dwarf-opcodes-can-cause-references-beyond-th.patch \
+    file://Add-AO_REQUIRE_CAS-to-fix-build-on-ARM-v6.patch \
 "
 
 SRC_URI[md5sum] = "fb4ea2f6fbbe45bf032cd36e586883ce"
 SRC_URI[sha256sum] = "9dfe0fcae2a866de9d3942c66995e4b460230446887dbdab302d41a8aee8d09a"
+
+# http://errors.yoctoproject.org/Errors/Details/20487/
+ARM_INSTRUCTION_SET_armv4 = "arm"
+ARM_INSTRUCTION_SET_armv5 = "arm"
diff --git a/yocto-poky/meta/recipes-support/libxslt/libxslt/CVE-2015-7995.patch b/yocto-poky/meta/recipes-support/libxslt/libxslt/CVE-2015-7995.patch
new file mode 100644
index 0000000..e4d09c2
--- /dev/null
+++ b/yocto-poky/meta/recipes-support/libxslt/libxslt/CVE-2015-7995.patch
@@ -0,0 +1,33 @@
+From 7ca19df892ca22d9314e95d59ce2abdeff46b617 Mon Sep 17 00:00:00 2001
+From: Daniel Veillard <veillard@redhat.com>
+Date: Thu, 29 Oct 2015 19:33:23 +0800
+Subject: Fix for type confusion in preprocessing attributes
+
+CVE-2015-7995 http://www.openwall.com/lists/oss-security/2015/10/27/10
+We need to check that the parent node is an element before dereferencing
+its namespace
+
+Upstream-Status: Backport
+
+https://git.gnome.org/browse/libxslt/commit/?id=7ca19df892ca22d9314e95d59ce2abdeff46b617
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+---
+ libxslt/preproc.c | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+
+Index: libxslt-1.1.28/libxslt/preproc.c
+===================================================================
+--- libxslt-1.1.28.orig/libxslt/preproc.c
++++ libxslt-1.1.28/libxslt/preproc.c
+@@ -2245,7 +2245,8 @@ xsltStylePreCompute(xsltStylesheetPtr st
+ 	} else if (IS_XSLT_NAME(inst, "attribute")) {
+ 	    xmlNodePtr parent = inst->parent;
+ 
+-	    if ((parent == NULL) || (parent->ns == NULL) ||
++	    if ((parent == NULL) ||
++	        (parent->type != XML_ELEMENT_NODE) || (parent->ns == NULL) ||
+ 		((parent->ns != inst->ns) &&
+ 		 (!xmlStrEqual(parent->ns->href, inst->ns->href))) ||
+ 		(!xmlStrEqual(parent->name, BAD_CAST "attribute-set"))) {
diff --git a/yocto-poky/meta/recipes-support/libxslt/libxslt_1.1.28.bb b/yocto-poky/meta/recipes-support/libxslt/libxslt_1.1.28.bb
index 166bcd8..87fabec 100644
--- a/yocto-poky/meta/recipes-support/libxslt/libxslt_1.1.28.bb
+++ b/yocto-poky/meta/recipes-support/libxslt/libxslt_1.1.28.bb
@@ -10,7 +10,8 @@
 
 SRC_URI = "ftp://xmlsoft.org/libxslt//libxslt-${PV}.tar.gz \
            file://pkgconfig_fix.patch \
-           file://pkgconfig.patch"
+           file://pkgconfig.patch \
+           file://CVE-2015-7995.patch"
 
 SRC_URI[md5sum] = "9667bf6f9310b957254fdcf6596600b7"
 SRC_URI[sha256sum] = "5fc7151a57b89c03d7b825df5a0fae0a8d5f05674c0e7cf2937ecec4d54a028c"
diff --git a/yocto-poky/meta/recipes-support/nettle/nettle-3.1.1/CVE-2015-8803_8805.patch b/yocto-poky/meta/recipes-support/nettle/nettle-3.1.1/CVE-2015-8803_8805.patch
new file mode 100644
index 0000000..b4ff228
--- /dev/null
+++ b/yocto-poky/meta/recipes-support/nettle/nettle-3.1.1/CVE-2015-8803_8805.patch
@@ -0,0 +1,71 @@
+Upstream-Status: Backport
+https://git.lysator.liu.se/nettle/nettle/commit/c71d2c9d20eeebb985e3872e4550137209e3ce4d
+
+CVE: CVE-2015-8803
+CVE: CVE-2015-8805
+
+Same fix for both.
+
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+Index: nettle-3.1.1/ecc-256.c
+===================================================================
+--- nettle-3.1.1.orig/ecc-256.c
++++ nettle-3.1.1/ecc-256.c
+@@ -113,8 +113,19 @@ ecc_256_modp (const struct ecc_modulo *p
+ 
+       assert (q2 < 2);
+ 
+-      /* We multiply by two low limbs of p, 2^96 - 1, so we could use
+-	 shifts rather than mul. */
++      /*
++	 n-1 n-2 n-3 n-4
++        +---+---+---+---+
++        | u1| u0| u low |
++        +---+---+---+---+
++          - | q1(2^96-1)|
++            +-------+---+
++            |q2(2^.)|
++            +-------+
++
++	 We multiply by two low limbs of p, 2^96 - 1, so we could use
++	 shifts rather than mul.
++      */
+       t = mpn_submul_1 (rp + n - 4, p->m, 2, q1);
+       t += cnd_sub_n (q2, rp + n - 3, p->m, 1);
+       t += (-q2) & 0xffffffff;
+@@ -124,7 +135,10 @@ ecc_256_modp (const struct ecc_modulo *p
+       u0 -= t;
+       t = (u1 < cy);
+       u1 -= cy;
+-      u1 += cnd_add_n (t, rp + n - 4, p->m, 3);
++
++      cy = cnd_add_n (t, rp + n - 4, p->m, 2);
++      u0 += cy;
++      u1 += (u0 < cy);
+       u1 -= (-t) & 0xffffffff;
+     }
+   rp[2] = u0;
+@@ -211,7 +225,7 @@ ecc_256_modq (const struct ecc_modulo *q
+ 
+       /* Conditional add of p */
+       u1 += t;
+-      u2 += (t<<32) + (u0 < t);
++      u2 += (t<<32) + (u1 < t);
+ 
+       t = cnd_add_n (t, rp + n - 4, q->m, 2);
+       u1 += t;
+Index: nettle-3.1.1/ChangeLog
+===================================================================
+--- nettle-3.1.1.orig/ChangeLog
++++ nettle-3.1.1/ChangeLog
+@@ -1,3 +1,9 @@
++2015-12-10  Niels Möller  <nisse@lysator.liu.se>
++
++   * ecc-256.c (ecc_256_modp): Fixed carry propagation bug. Problem
++   reported by Hanno Böck.
++   (ecc_256_modq): Fixed another carry propagation bug.
++
+ 2015-04-24  Niels Möller  <nisse@lysator.liu.se>
+ 
+ 	* Released nettle-3.1.1.
diff --git a/yocto-poky/meta/recipes-support/nettle/nettle-3.1.1/CVE-2015-8804.patch b/yocto-poky/meta/recipes-support/nettle/nettle-3.1.1/CVE-2015-8804.patch
new file mode 100644
index 0000000..1d34db7
--- /dev/null
+++ b/yocto-poky/meta/recipes-support/nettle/nettle-3.1.1/CVE-2015-8804.patch
@@ -0,0 +1,281 @@
+Upstream-Status: Backport
+ https://git.lysator.liu.se/nettle/nettle/commit/fa269b6ad06dd13c901dbd84a12e52b918a09cd7
+
+CVE: CVE-2015-8804
+Signed-off-by: Armin Kuster <akuster@mvista.com>
+
+Index: nettle-3.1.1/ChangeLog
+===================================================================
+--- nettle-3.1.1.orig/ChangeLog
++++ nettle-3.1.1/ChangeLog
+@@ -1,3 +1,11 @@
++2015-12-15  Niels Möller  <nisse@lysator.liu.se>
++
++	* x86_64/ecc-384-modp.asm: Fixed carry propagation bug. Problem
++	reported by Hanno Böck. Simplified the folding to always use
++	non-negative carry, the old code attempted to add in a carry which
++	could be either positive or negative, but didn't get that case
++	right.
++
+ 2015-12-10  Niels Möller  <nisse@lysator.liu.se>
+ 
+    * ecc-256.c (ecc_256_modp): Fixed carry propagation bug. Problem
+Index: nettle-3.1.1/x86_64/ecc-384-modp.asm
+===================================================================
+--- nettle-3.1.1.orig/x86_64/ecc-384-modp.asm
++++ nettle-3.1.1/x86_64/ecc-384-modp.asm
+@@ -1,7 +1,7 @@
+ C x86_64/ecc-384-modp.asm
+ 
+ ifelse(<
+-   Copyright (C) 2013 Niels Möller
++   Copyright (C) 2013, 2015 Niels Möller
+ 
+    This file is part of GNU Nettle.
+ 
+@@ -33,7 +33,7 @@ ifelse(<
+ 	.file "ecc-384-modp.asm"
+ 
+ define(<RP>, <%rsi>)
+-define(<D4>, <%rax>)
++define(<D5>, <%rax>)
+ define(<T0>, <%rbx>)
+ define(<T1>, <%rcx>)
+ define(<T2>, <%rdx>)
+@@ -48,8 +48,8 @@ define(<H4>, <%r13>)
+ define(<H5>, <%r14>)
+ define(<C2>, <%r15>)
+ define(<C0>, H5)	C Overlap
+-define(<D0>, RP)	C Overlap
+-define(<TMP>, H4)	C Overlap
++define(<TMP>, RP)	C Overlap
++
+ 
+ PROLOGUE(nettle_ecc_384_modp)
+ 	W64_ENTRY(2, 0)
+@@ -61,34 +61,38 @@ PROLOGUE(nettle_ecc_384_modp)
+ 	push	%r14
+ 	push	%r15
+ 
+-	C First get top 2 limbs, which need folding twice
++	C First get top 2 limbs, which need folding twice.
++	C B^10 = B^6 + B^4 + 2^32 (B-1)B^4.
++	C We handle the terms as follow:
+ 	C
+-	C   H5 H4
+-	C     -H5
+-	C  ------
+-	C   H0 D4
++	C B^6: Folded immediatly.
+ 	C
+-	C Then shift right, (H1,H0,D4)  <--  (H0,D4) << 32
+-	C and add
++	C B^4: Delayed, added in in the next folding.
+ 	C
+-	C     H5 H4
+-	C     H1 H0
+-	C ----------
+-	C  C2 H1 H0
+-
+-	mov	80(RP), D4
+-	mov	88(RP), H0
+-	mov	D4, H4
+-	mov	H0, H5
+-	sub	H0, D4
+-	sbb	$0, H0
+-
+-	mov	D4, T2
+-	mov	H0, H1
+-	shl	$32, H0
+-	shr	$32, T2
++	C 2^32(B-1) B^4: Low half limb delayed until the next
++	C folding. Top 1.5 limbs subtracted and shifter now, resulting
++	C in 2.5 limbs. The low limb saved in D5, high 1.5 limbs added
++	C in.
++
++	mov	80(RP), H4
++	mov	88(RP), H5
++	C Shift right 32 bits, into H1, H0
++	mov	H4, H0
++	mov	H5, H1
++	mov	H5, D5
+ 	shr	$32, H1
+-	or	T2, H0
++	shl	$32, D5
++	shr	$32, H0
++	or	D5, H0
++
++	C	H1 H0
++	C       -  H1 H0
++	C       --------
++	C       H1 H0 D5
++	mov	H0, D5
++	neg	D5
++	sbb	H1, H0
++	sbb	$0, H1
+ 
+ 	xor	C2, C2
+ 	add	H4, H0
+@@ -127,118 +131,95 @@ PROLOGUE(nettle_ecc_384_modp)
+ 	adc	H3, T5
+ 	adc	$0, C0
+ 
+-	C   H3 H2 H1 H0  0
+-	C - H4 H3 H2 H1 H0
+-	C  ---------------
+-	C   H3 H2 H1 H0 D0
+-
+-	mov	XREG(D4), XREG(D4)
+-	mov	H0, D0
+-	neg	D0
+-	sbb	H1, H0
+-	sbb	H2, H1
+-	sbb	H3, H2
+-	sbb	H4, H3
+-	sbb	$0, D4
+-
+-	C Shift right. High bits are sign, to be added to C0.
+-	mov	D4, TMP
+-	sar	$32, TMP
+-	shl	$32, D4
+-	add	TMP, C0
+-
++	C Shift left, including low half of H4
+ 	mov	H3, TMP
++	shl	$32, H4
+ 	shr	$32, TMP
+-	shl	$32, H3
+-	or	TMP, D4
++	or	TMP, H4
+ 
+ 	mov	H2, TMP
++	shl	$32, H3
+ 	shr	$32, TMP
+-	shl	$32, H2
+ 	or	TMP, H3
+ 
+ 	mov	H1, TMP
++	shl	$32, H2
+ 	shr	$32, TMP
+-	shl	$32, H1
+ 	or	TMP, H2
+ 
+ 	mov	H0, TMP
++	shl	$32, H1
+ 	shr	$32, TMP
+-	shl	$32, H0
+ 	or	TMP, H1
+ 
+-	mov	D0, TMP
+-	shr	$32, TMP
+-	shl	$32, D0
+-	or	TMP, H0
++	shl	$32, H0
++
++	C   H4 H3 H2 H1 H0  0
++	C  -   H4 H3 H2 H1 H0
++	C  ---------------
++	C   H4 H3 H2 H1 H0 TMP
+ 
+-	add	D0, T0
++	mov	H0, TMP
++	neg	TMP
++	sbb	H1, H0
++	sbb	H2, H1
++	sbb	H3, H2
++	sbb	H4, H3
++	sbb	$0, H4
++
++	add	TMP, T0
+ 	adc	H0, T1
+ 	adc	H1, T2
+ 	adc	H2, T3
+ 	adc	H3, T4
+-	adc	D4, T5
++	adc	H4, T5
+ 	adc	$0, C0
+ 
+ 	C Remains to add in C2 and C0
+-	C                         C0  C0<<32  (-2^32+1)C0
+-	C    C2  C2<<32  (-2^32+1)C2
+-	C where C2 is always positive, while C0 may be -1.
++	C Set H1, H0 = (2^96 - 2^32 + 1) C0
+ 	mov	C0, H0
+ 	mov	C0, H1
+-	mov	C0, H2
+-	sar	$63, C0		C Get sign
+ 	shl	$32, H1
+-	sub	H1, H0		C Gives borrow iff C0 > 0
++	sub	H1, H0
+ 	sbb	$0, H1
+-	add	C0, H2
+ 
++	C Set H3, H2 = (2^96 - 2^32 + 1) C2
++	mov	C2, H2
++	mov	C2, H3
++	shl	$32, H3
++	sub	H3, H2
++	sbb	$0, H3
++	add	C0, H2		C No carry. Could use lea trick
++
++	xor	C0, C0
+ 	add	H0, T0
+ 	adc	H1, T1
+-	adc	$0, H2
+-	adc	$0, C0
+-
+-	C Set (H1 H0)  <-- C2 << 96 - C2 << 32 + 1
+-	mov	C2, H0
+-	mov	C2, H1
+-	shl	$32, H1
+-	sub	H1, H0
+-	sbb	$0, H1
+-
+-	add	H2, H0
+-	adc	C0, H1
+-	adc	C2, C0
+-	mov	C0, H2
+-	sar	$63, C0
+-	add	H0, T2
+-	adc	H1, T3
+-	adc	H2, T4
+-	adc	C0, T5
+-	sbb	C0, C0
++	adc	H2, T2
++	adc	H3, T3
++	adc	C2, T4
++	adc	D5, T5		C Value delayed from initial folding
++	adc	$0, C0		C Use sbb and switch sign?
+ 
+ 	C Final unlikely carry
+ 	mov	C0, H0
+ 	mov	C0, H1
+-	mov	C0, H2
+-	sar	$63, C0
+ 	shl	$32, H1
+ 	sub	H1, H0
+ 	sbb	$0, H1
+-	add	C0, H2
+ 
+ 	pop	RP
+ 
+-	sub	H0, T0
++	add	H0, T0
+ 	mov	T0, (RP)
+-	sbb	H1, T1
++	adc	H1, T1
+ 	mov	T1, 8(RP)
+-	sbb	H2, T2
++	adc	C0, T2
+ 	mov	T2, 16(RP)
+-	sbb	C0, T3
++	adc	$0, T3
+ 	mov	T3, 24(RP)
+-	sbb	C0, T4
++	adc	$0, T4
+ 	mov	T4, 32(RP)
+-	sbb	C0, T5
++	adc	$0, T5
+ 	mov	T5, 40(RP)
+ 
+ 	pop	%r15
diff --git a/yocto-poky/meta/recipes-support/nettle/nettle_3.1.1.bb b/yocto-poky/meta/recipes-support/nettle/nettle_3.1.1.bb
index 7d7134f..4a40e9a 100644
--- a/yocto-poky/meta/recipes-support/nettle/nettle_3.1.1.bb
+++ b/yocto-poky/meta/recipes-support/nettle/nettle_3.1.1.bb
@@ -7,5 +7,10 @@
                     file://serpent-decrypt.c;beginline=14;endline=36;md5=ca0d220bc413e1842ecc507690ce416e \
                     file://serpent-set-key.c;beginline=14;endline=36;md5=ca0d220bc413e1842ecc507690ce416e"
 
+SRC_URI += "\
+            file://CVE-2015-8803_8805.patch \
+            file://CVE-2015-8804.patch \
+            "
+
 SRC_URI[md5sum] = "b40fa88dc32f37a182b6b42092ebb144"
 SRC_URI[sha256sum] = "5fd4d25d64d8ddcb85d0d897572af73b05b4d163c6cc49438a5bfbb8ff293d4c"
diff --git a/yocto-poky/meta/recipes-support/p11-kit/p11-kit_0.22.1.bb b/yocto-poky/meta/recipes-support/p11-kit/p11-kit_0.22.1.bb
index 7ad9626..ee77951 100644
--- a/yocto-poky/meta/recipes-support/p11-kit/p11-kit_0.22.1.bb
+++ b/yocto-poky/meta/recipes-support/p11-kit/p11-kit_0.22.1.bb
@@ -10,6 +10,8 @@
 SRC_URI[md5sum] = "4e9bea1106628ffb820bdad24a819fac"
 SRC_URI[sha256sum] = "ef3a339fcf6aa0e32c8c23f79ba7191e57312be2bda8b24e6d121c2670539a5c"
 
+EXTRA_OECONF = "--without-trust-paths"
+
 FILES_${PN}-dev += " \
     ${libdir}/p11-kit-proxy.so \
     ${libdir}/pkcs11/p11-kit-trust.so \
diff --git a/yocto-poky/meta/recipes-support/pinentry/pinentry_0.9.2.bb b/yocto-poky/meta/recipes-support/pinentry/pinentry_0.9.2.bb
index c836ca4..d6b7130 100644
--- a/yocto-poky/meta/recipes-support/pinentry/pinentry_0.9.2.bb
+++ b/yocto-poky/meta/recipes-support/pinentry/pinentry_0.9.2.bb
@@ -10,6 +10,8 @@
 
 inherit autotools
 
+DEPENDS = "gettext-native"
+
 SRC_URI = "ftp://ftp.gnupg.org/gcrypt/${BPN}/${BPN}-${PV}.tar.bz2"
 
 SRC_URI[md5sum] = "f51d454f921111b5156a2291cbf70278"
diff --git a/yocto-poky/meta/recipes-support/user-creation/files/system-xuser.conf b/yocto-poky/meta/recipes-support/user-creation/files/system-xuser.conf
new file mode 100644
index 0000000..d42e3d1
--- /dev/null
+++ b/yocto-poky/meta/recipes-support/user-creation/files/system-xuser.conf
@@ -0,0 +1,11 @@
+<!DOCTYPE busconfig PUBLIC "-//freedesktop//DTD D-BUS Bus Configuration 1.0//EN"
+ "http://www.freedesktop.org/standards/dbus/1.0/busconfig.dtd">
+<busconfig>
+    <policy user="xuser">
+        <allow send_destination="net.connman"/>
+        <allow send_destination="net.connman.vpn"/>
+        <allow send_destination="org.ofono"/>
+        <allow send_destination="org.bluez"/>
+    </policy>
+</busconfig>
+
diff --git a/yocto-poky/meta/recipes-support/user-creation/xuser-account_0.1.bb b/yocto-poky/meta/recipes-support/user-creation/xuser-account_0.1.bb
index 77ba97d..13ba677 100644
--- a/yocto-poky/meta/recipes-support/user-creation/xuser-account_0.1.bb
+++ b/yocto-poky/meta/recipes-support/user-creation/xuser-account_0.1.bb
@@ -2,7 +2,7 @@
 LICENSE = "MIT"
 LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
 
-SRC_URI = ""
+SRC_URI = "file://system-xuser.conf"
 
 inherit allarch useradd
 
@@ -15,9 +15,11 @@
 }
 
 do_install() {
-    :
+    install -D -m 0644 ${WORKDIR}/system-xuser.conf ${D}${sysconfdir}/dbus-1/system.d/system-xuser.conf
 }
 
+FILES_${PN} = "${sysconfdir}/dbus-1/system.d/system-xuser.conf"
+
 USERADD_PACKAGES = "${PN}"
 GROUPADD_PARAM_${PN} = "--system shutdown"
 USERADD_PARAM_${PN} = "--create-home \
diff --git a/yocto-poky/meta/recipes-support/vte/vte-0.28.2/cve-2012-2738.patch b/yocto-poky/meta/recipes-support/vte/vte-0.28.2/cve-2012-2738.patch
new file mode 100644
index 0000000..2407771
--- /dev/null
+++ b/yocto-poky/meta/recipes-support/vte/vte-0.28.2/cve-2012-2738.patch
@@ -0,0 +1,135 @@
+Upstream-Status: Backport
+Signed-off-by: Ross Burton <ross.burton@intel.com>
+
+From e524b0b3bd8fad844ffa73927c199545b892cdbd Mon Sep 17 00:00:00 2001
+From: Christian Persch <chpe@gnome.org>
+Date: Sat, 19 May 2012 19:36:09 +0200
+Subject: [PATCH 1/2] emulation: Limit integer arguments to 65535
+
+To guard against malicious sequences containing excessively big numbers,
+limit all parsed numbers to 16 bit range. Doing this here in the parsing
+routine is a catch-all guard; this doesn't preclude enforcing
+more stringent limits in the handlers themselves.
+
+https://bugzilla.gnome.org/show_bug.cgi?id=676090
+---
+ src/table.c  | 2 +-
+ src/vteseq.c | 2 +-
+ 2 files changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/src/table.c b/src/table.c
+index 140e8c8..85cf631 100644
+--- a/src/table.c
++++ b/src/table.c
+@@ -550,7 +550,7 @@ _vte_table_extract_numbers(GValueArray **array,
+ 		if (G_UNLIKELY (*array == NULL)) {
+ 			*array = g_value_array_new(1);
+ 		}
+-		g_value_set_long(&value, total);
++		g_value_set_long(&value, CLAMP (total, 0, G_MAXUSHORT));
+ 		g_value_array_append(*array, &value);
+ 	} while (i++ < arginfo->length);
+ 	g_value_unset(&value);
+diff --git a/src/vteseq.c b/src/vteseq.c
+index 7ef4c8c..10991db 100644
+--- a/src/vteseq.c
++++ b/src/vteseq.c
+@@ -557,7 +557,7 @@ vte_sequence_handler_multiple(VteTerminal *terminal,
+                               GValueArray *params,
+                               VteTerminalSequenceHandler handler)
+ {
+-        vte_sequence_handler_multiple_limited(terminal, params, handler, G_MAXLONG);
++        vte_sequence_handler_multiple_limited(terminal, params, handler, G_MAXUSHORT);
+ }
+ 
+ static void
+-- 
+2.4.9 (Apple Git-60)
+
+
+From cf1ad453a8def873c49cf6d88162593402f32bb2 Mon Sep 17 00:00:00 2001
+From: Christian Persch <chpe@gnome.org>
+Date: Sat, 19 May 2012 20:04:12 +0200
+Subject: [PATCH 2/2] emulation: Limit repetitions
+
+Don't allow malicious sequences to cause excessive repetitions.
+
+https://bugzilla.gnome.org/show_bug.cgi?id=676090
+---
+ src/vteseq.c | 25 ++++++++++++++++++-------
+ 1 file changed, 18 insertions(+), 7 deletions(-)
+
+diff --git a/src/vteseq.c b/src/vteseq.c
+index 10991db..209522f 100644
+--- a/src/vteseq.c
++++ b/src/vteseq.c
+@@ -1392,7 +1392,7 @@ vte_sequence_handler_dc (VteTerminal *terminal, GValueArray *params)
+ static void
+ vte_sequence_handler_DC (VteTerminal *terminal, GValueArray *params)
+ {
+-	vte_sequence_handler_multiple(terminal, params, vte_sequence_handler_dc);
++	vte_sequence_handler_multiple_r(terminal, params, vte_sequence_handler_dc);
+ }
+ 
+ /* Delete a line at the current cursor position. */
+@@ -1785,7 +1785,7 @@ vte_sequence_handler_reverse_index (VteTerminal *terminal, GValueArray *params)
+ static void
+ vte_sequence_handler_RI (VteTerminal *terminal, GValueArray *params)
+ {
+-	vte_sequence_handler_multiple(terminal, params, vte_sequence_handler_nd);
++	vte_sequence_handler_multiple_r(terminal, params, vte_sequence_handler_nd);
+ }
+ 
+ /* Save cursor (position). */
+@@ -2777,8 +2777,7 @@ vte_sequence_handler_insert_lines (VteTerminal *terminal, GValueArray *params)
+ {
+ 	GValue *value;
+ 	VteScreen *screen;
+-	long param, end, row;
+-	int i;
++	long param, end, row, i, limit;
+ 	screen = terminal->pvt->screen;
+ 	/* The default is one. */
+ 	param = 1;
+@@ -2796,7 +2795,13 @@ vte_sequence_handler_insert_lines (VteTerminal *terminal, GValueArray *params)
+ 	} else {
+ 		end = screen->insert_delta + terminal->row_count - 1;
+ 	}
+-	/* Insert the new lines at the cursor. */
++
++	/* Only allow to insert as many lines as there are between this row
++         * and the end of the scrolling region. See bug #676090.
++         */
++        limit = end - row + 1;
++        param = MIN (param, limit);
++
+ 	for (i = 0; i < param; i++) {
+ 		/* Clear a line off the end of the region and add one to the
+ 		 * top of the region. */
+@@ -2817,8 +2822,7 @@ vte_sequence_handler_delete_lines (VteTerminal *terminal, GValueArray *params)
+ {
+ 	GValue *value;
+ 	VteScreen *screen;
+-	long param, end, row;
+-	int i;
++	long param, end, row, i, limit;
+ 
+ 	screen = terminal->pvt->screen;
+ 	/* The default is one. */
+@@ -2837,6 +2841,13 @@ vte_sequence_handler_delete_lines (VteTerminal *terminal, GValueArray *params)
+ 	} else {
+ 		end = screen->insert_delta + terminal->row_count - 1;
+ 	}
++
++        /* Only allow to delete as many lines as there are between this row
++         * and the end of the scrolling region. See bug #676090.
++         */
++        limit = end - row + 1;
++        param = MIN (param, limit);
++
+ 	/* Clear them from below the current cursor. */
+ 	for (i = 0; i < param; i++) {
+ 		/* Insert a line at the end of the region and remove one from
+-- 
+2.4.9 (Apple Git-60)
+
diff --git a/yocto-poky/meta/recipes-support/vte/vte.inc b/yocto-poky/meta/recipes-support/vte/vte.inc
index 874062a..07b9e10 100644
--- a/yocto-poky/meta/recipes-support/vte/vte.inc
+++ b/yocto-poky/meta/recipes-support/vte/vte.inc
@@ -4,7 +4,8 @@
 DEPENDS = " glib-2.0 gtk+ intltool-native ncurses gobject-introspection-stub"
 RDEPENDS_libvte = "vte-termcap"
 
-inherit gnome gtk-doc
+inherit gnome gtk-doc distro_features_check
+ANY_OF_DISTRO_FEATURES = "${GTK2DISTROFEATURES}"
 
 EXTRA_OECONF = "--disable-python --disable-introspection"
 
diff --git a/yocto-poky/meta/recipes-support/vte/vte_0.28.2.bb b/yocto-poky/meta/recipes-support/vte/vte_0.28.2.bb
index b1025cb..8b4e7f7 100644
--- a/yocto-poky/meta/recipes-support/vte/vte_0.28.2.bb
+++ b/yocto-poky/meta/recipes-support/vte/vte_0.28.2.bb
@@ -4,7 +4,8 @@
 
 PR = "r6"
 
-SRC_URI += "file://obsolete_automake_macros.patch"
+SRC_URI += "file://obsolete_automake_macros.patch \
+            file://cve-2012-2738.patch"
 
 CFLAGS += "-D_GNU_SOURCE"
 
diff --git a/yocto-poky/meta/site/nios2-linux b/yocto-poky/meta/site/nios2-linux
new file mode 100644
index 0000000..9e53e5d
--- /dev/null
+++ b/yocto-poky/meta/site/nios2-linux
@@ -0,0 +1,395 @@
+ac_cv_func_lstat_dereferences_slashed_symlink=${ac_cv_func_lstat_dereferences_slashed_symlink=yes}
+ac_cv_func_lstat_empty_string_bug=${ac_cv_func_lstat_empty_string_bug=no}
+ac_cv_func_stat_empty_string_bug=${ac_cv_func_stat_empty_string_bug=no}
+ac_cv_func_stat_ignores_trailing_slash=${ac_cv_func_stat_ignores_trailing_slash=no}
+ac_cv_header_netinet_sctp_h=${ac_cv_header_netinet_sctp_h=no}
+ac_cv_header_netinet_sctp_uio_h=${ac_cv_header_netinet_sctp_uio_h=no}
+ac_cv_sctp=${ac_cv_sctp=no}
+
+# apache
+ac_cv_func_pthread_key_delete=${ac_cv_func_pthread_key_delete=yes}
+apr_cv_process_shared_works=${apr_cv_process_shared_works=no}
+ac_cv_sizeof_ssize_t=${ac_cv_sizeof_ssize_t=4}
+apr_cv_tcp_nodelay_with_cork=${apr_cv_tcp_nodelay_with_cork=yes}
+
+# bash
+ac_cv_c_long_double=${ac_cv_c_long_double=yes}
+bash_cv_func_sigsetjmp=${bash_cv_func_sigsetjmp=missing}
+
+# coreutils
+utils_cv_sys_open_max=${utils_cv_sys_open_max=1019}
+
+# cvs
+cvs_cv_func_printf_ptr=${cvs_cv_func_printf_ptr=yes}
+
+# db (sleepycat)
+db_cv_fcntl_f_setfd=${db_cv_fcntl_f_setfd=yes}
+db_cv_sprintf_count=${db_cv_sprintf_count=yes}
+db_cv_path_ar=${db_cv_path_ar=/usr/bin/ar}
+db_cv_path_chmod=${db_cv_path_chmod=/bin/chmod}
+db_cv_path_cp=${db_cv_path_cp=/bin/cp}
+db_cv_path_ln=${db_cv_path_ln=/bin/ln}
+db_cv_path_mkdir=${db_cv_path_mkdir=/bin/mkdir}
+db_cv_path_ranlib=${db_cv_path_ranlib=/usr/bin/ranlib}
+db_cv_path_rm=${db_cv_path_rm=/bin/rm}
+db_cv_path_sh=${db_cv_path_sh=/bin/sh}
+db_cv_path_strip=${db_cv_path_strip=/usr/bin/strip}
+db_cv_align_t=${db_cv_align_t='unsigned long long'}
+db_cv_alignp_t=${db_cv_alignp_t='unsigned long'}
+db_cv_mutex=${db_cv_mutex=ARM/gcc-assembly}
+db_cv_posixmutexes=${db_cv_posixmutexes=no}
+db_cv_uimutexes=${db_cv_uimutexes=no}
+
+# D-BUS
+ac_cv_func_posix_getpwnam_r=${ac_cv_func_posix_getpwnam_r=yes}
+
+# edb
+db_cv_spinlocks=${db_cv_spinlocks=no}
+
+# ettercap
+ettercap_cv_type_socklen_t=${ettercap_cv_type_socklen_t=yes}
+
+# fget
+compat_cv_func_snprintf_works=${compat_cv_func_snprintf_works=yes}
+compat_cv_func_basename_works=${compat_cv_func_basename_works=no}
+compat_cv_func_dirname_works=${compat_cv_func_dirname_works=no}
+
+# fnmatch
+ac_cv_func_fnmatch_works=${ac_cv_func_fnmatch_works=yes}
+
+# gettext
+am_cv_func_working_getline=${am_cv_func_working_getline=yes}
+
+# gio
+ac_cv_func_getgrgid_r=${ac_cv_func_getgrgid_r=yes}
+
+# glib
+glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
+glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
+glib_cv_stack_grows=${glib_cv_stack_grows=no}
+glib_cv_uscore=${glib_cv_uscore=no}
+glib_cv_use_pid_surrogate=${glib_cv_use_pid_surrogate=yes}
+glib_cv_has__inline=${glib_cv_has__inline=yes}
+glib_cv_has__inline__=${glib_cv_has__inline__=yes}
+glib_cv_hasinline=${glib_cv_hasinline=yes}
+glib_cv_sane_realloc=${glib_cv_sane_realloc=yes}
+glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
+glib_cv_uscore=${glib_cv_uscore=no}
+glib_cv_va_copy=${glib_cv_va_copy=yes}
+glib_cv_va_val_copy=${glib_cv_va_val_copy=yes}
+glib_cv___va_copy=${glib_cv___va_copy=yes}
+glib_cv_rtldglobal_broken=${glib_cv_rtldglobal_broken=no}
+ac_cv_func_getpwuid_r=${ac_cv_func_getpwuid_r=yes}
+glib_cv_sys_pthread_mutex_trylock_posix=${glib_cv_sys_pthread_mutex_trylock_posix=yes}
+glib_cv_sys_pthread_getspecific_posix=${glib_cv_sys_pthread_getspecific_posix=yes}
+glib_cv_sys_pthread_cond_timedwait_posix=${glib_cv_sys_pthread_cond_timedwait_posix=yes}
+
+# glib-2.0
+glib_cv_long_long_format=${glib_cv_long_long_format=ll}
+glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
+glib_cv_sizeof_intmax_t=${glib_cv_sizeof_intmax_t=8}
+glib_cv_sizeof_ptrdiff_t=${glib_cv_sizeof_ptrdiff_t=4}
+glib_cv_sizeof_size_t=${glib_cv_sizeof_size_t=4}
+glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
+glib_cv_sys_use_pid_niceness_surrogate=${glib_cv_sys_use_pid_niceness_surrogate=yes}
+
+#gstreamer
+as_cv_unaligned_access=${as_cv_unaligned_access=no}
+
+# httppc
+ac_cv_strerror_r_SUSv3=${ac_cv_strerror_r_SUSv3=no}
+
+# ipsec-tools
+ac_cv_va_copy=${ac_cv_va_copy=yes}
+ac_cv___va_copy=${ac_cv___va_copy=yes}
+ac_cv_va_val_copy=${ac_cv_va_val_copy=yes}
+racoon_cv_bug_getaddrinfo=${racoon_cv_bug_getaddrinfo=no}
+
+# jikes
+ac_cv_sizeof_wchar_t=4
+
+# lftp
+ac_cv_need_trio=${ac_cv_need_trio=no}
+lftp_cv_va_copy=${lftp_cv_va_copy=yes}
+lftp_cv_va_val_copy=${lftp_cv_va_val_copy=yes}
+lftp_cv___va_copy=${lftp_cv___va_copy=yes}
+
+# libesmtp
+acx_working_snprintf=${acx_working_snprintf=yes}
+
+# libidl
+libIDL_cv_long_long_format=${libIDL_cv_long_long_format=ll}
+
+# libnet 
+ac_libnet_have_packet_socket=${ac_libnet_have_packet_socket=yes}
+
+# libpcap
+ac_cv_linux_vers=${ac_cv_linux_vers=2}
+
+# libxfce4util
+with_broken_putenv=${with_broken_putenv=no}
+
+# links
+ac_cv_lib_png_png_create_info_struct=${ac_cv_lib_png_png_create_info_struct=yes}
+
+# mono
+cv_mono_sizeof_sunpath=108
+mono_cv_sizeof_sunpath=108
+
+# mysql
+mysql_cv_func_atomic_sub=${mysql_cv_func_atomic_sub=no}
+mysql_cv_func_atomic_add=${mysql_cv_func_atomic_add=no}
+
+# nano
+ac_cv_regexec_segfault_emptystr=${ac_cv_regexec_segfault_emptystr=no}
+nano_cv_func_regexec_segv_emptystr=${nano_cv_func_regexec_segv_emptystr=no}
+
+# ORBit2
+ac_cv_alignof_CORBA_boolean=1
+ac_cv_alignof_CORBA_char=1
+ac_cv_alignof_CORBA_double=8
+ac_cv_alignof_CORBA_float=4
+ac_cv_alignof_CORBA_long=4
+ac_cv_alignof_CORBA_long_double=8
+ac_cv_alignof_CORBA_long_long=8
+ac_cv_alignof_CORBA_octet=1
+ac_cv_alignof_CORBA_pointer=4
+ac_cv_alignof_CORBA_short=2
+ac_cv_alignof_CORBA_struct=1
+ac_cv_alignof_CORBA_wchar=2
+ac_cv_func_getaddrinfo=${ac_cv_func_getaddrinfo=yes}
+
+# php
+ac_cv_pread=${ac_cv_pread=no}
+ac_cv_pwrite=${ac_cv_pwrite=no}
+php_cv_lib_cookie_io_functions_use_off64_t=${php_cv_lib_cookie_io_functions_use_off64_t=yes}
+cv_php_mbstring_stdarg=${cv_php_mbstring_stdarg=yes}
+
+# rsync
+rsync_cv_HAVE_BROKEN_LARGEFILE=${rsync_cv_HAVE_BROKEN_LARGEFILE=no}
+rsync_cv_HAVE_SOCKETPAIR=${rsync_cv_HAVE_SOCKETPAIR=yes}
+rsync_cv_HAVE_LONGLONG=${rsync_cv_HAVE_LONGLONG=yes}
+rsync_cv_HAVE_OFF64_T=${rsync_cv_HAVE_OFF64_T=no}
+rsync_cv_HAVE_SHORT_INO_T=${rsync_cv_HAVE_SHORT_INO_T=no}
+rsync_cv_HAVE_UNSIGNED_CHAR=${rsync_cv_HAVE_UNSIGNED_CHAR=no}
+rsync_cv_HAVE_BROKEN_READDIR=${rsync_cv_HAVE_BROKEN_READDIR=no}
+rsync_cv_HAVE_GETTIMEOFDAY_TZ=${rsync_cv_HAVE_GETTIMEOFDAY_TZ=yes}
+rsync_cv_HAVE_C99_VSNPRINTF=${rsync_cv_HAVE_C99_VSNPRINTF=yes}
+rsync_cv_HAVE_SECURE_MKSTEMP=${rsync_cv_HAVE_SECURE_MKSTEMP=yes}
+rsync_cv_REPLACE_INET_NTOA=${rsync_cv_REPLACE_INET_NTOA=no}
+rsync_cv_REPLACE_INET_ATON=${rsync_cv_REPLACE_INET_ATON=no}
+
+# screen
+screen_cv_sys_bcopy_overlap=${screen_cv_sys_bcopy_overlap=no}
+screen_cv_sys_memcpy_overlap=${screen_cv_sys_memcpy_overlap=no}
+screen_cv_sys_memmove_overlap=${screen_cv_sys_memmove_overlap=no}
+screen_cv_sys_fifo_broken_impl=${screen_cv_sys_fifo_broken_impl=yes}
+screen_cv_sys_fifo_usable=${screen_cv_sys_fifo_usable=yes}
+screen_cv_sys_select_broken_retval=${screen_cv_sys_select_broken_retval=no}
+screen_cv_sys_sockets_nofs=${screen_cv_sys_sockets_nofs=no}
+screen_cv_sys_sockets_usable=${screen_cv_sys_sockets_usable=yes}
+screen_cv_sys_terminfo_used=${screen_cv_sys_terminfo_used=yes}
+
+# slrn
+slrn_cv___va_copy=${slrn_cv___va_copy=yes}
+slrn_cv_va_copy=${slrn_cv_va_copy=yes}
+slrn_cv_va_val_copy=${slrn_cv_va_val_copy=yes}
+ac_cv_func_realloc_works=${ac_cv_func_realloc_works=yes}
+ac_cv_func_realloc_0_nonnull=${ac_cv_func_realloc_0_nonnull=yes}
+ac_cv_func_malloc_works=${ac_cv_func_malloc_works=yes}
+ac_cv_func_malloc_0_nonnull=${ac_cv_func_malloc_0_nonnull=yes}
+
+# socat
+ac_cv_ispeed_offset=${ac_cv_ispeed_offset=13}
+sc_cv_termios_ispeed=${sc_cv_termios_ispeed=yes}
+
+# ssh
+ac_cv_have_space_d_name_in_struct_dirent=${ac_cv_dirent_have_space_d_name=yes}
+ac_cv_have_broken_snprintf=${ac_cv_have_broken_snprintf=no}
+ac_cv_have_accrights_in_msghdr=${ac_cv_have_accrights_in_msghdr=no}
+ac_cv_have_control_in_msghdr=${ac_cv_have_control_in_msghdr=yes}
+ac_cv_have_openpty_ctty_bug=${ac_cv_have_openpty_ctty_bug=yes}
+
+# startup-notification
+lf_cv_sane_realloc=yes
+
+# sudo
+sudo_cv_uid_t_len=${sudo_cv_uid_t_len=10}
+
+# xffm
+jm_cv_func_working_readdir=yes
+
+# dpkg
+dpkg_cv_va_copy=${ac_cv_va_copy=yes}
+dpkg_cv___va_copy=${ac_cv___va_copy=yes}
+
+# eds-dbus
+ac_cv_libiconv_utf8=${ac_cv_libiconv_utf8=yes}
+ac_cv_func_getpgrp_void=yes
+ac_cv_func_setpgrp_void=yes
+ac_cv_func_setgrent_void=yes
+ac_cv_func_malloc_0_nonnull=yes
+ac_cv_func_malloc_works=yes
+ac_cv_func_posix_getpwuid_r=${ac_cv_func_posix_getpwuid_r=yes}
+ac_cv_func_posix_getgrgid_r=${ac_cv_func_posix_getgrgid_r=yes}
+ac_cv_func_setvbuf_reversed=no
+ac_cv_sizeof___int64=${ac_cv_sizeof___int64=0}
+ac_cv_sizeof_char=${ac_cv_sizeof_char=1}
+ac_cv_sizeof_wchar_t=${ac_cv_sizeof_wchar_t=1}
+ac_cv_sizeof_unsigned_char=${ac_cv_sizeof_unsigned_char=1}
+ac_cv_sizeof_bool=${ac_cv_sizeof_bool=1}
+ac_cv_sizeof_char_p=${ac_cv_sizeof_int_p=4}
+ac_cv_sizeof_int=${ac_cv_sizeof_int=4}
+ac_cv_sizeof_int_p=${ac_cv_sizeof_int_p=4}
+ac_cv_sizeof_long=${ac_cv_sizeof_long=4}
+ac_cv_sizeof_long_int=${ac_cv_sizeof_long_int=4}
+ac_cv_sizeof_long_long=${ac_cv_sizeof_long_long=8}
+ac_cv_sizeof_off_t=${ac_cv_sizeof_off_t=4}
+ac_cv_sizeof_short=${ac_cv_sizeof_short=2}
+ac_cv_sizeof_short_int=${ac_cv_sizeof_short_int=2}
+ac_cv_sizeof_size_t=${ac_cv_sizeof_size_t=4}
+ac_cv_sizeof_void_p=${ac_cv_sizeof_void_p=4}
+ac_cv_sizeof_long_p=${ac_cv_sizeof_long_p=4}
+ac_cv_sizeof_float=${ac_cv_sizeof_float=4}
+ac_cv_sizeof_double=${ac_cv_sizeof_double=8}
+ac_cv_sizeof_long_double=${ac_cv_sizeof_long_double=8}
+ac_cv_sizeof_ptrdiff_t=${glib_cv_sizeof_ptrdiff_t=4}
+ac_cv_sizeof_unsigned_short=${ac_cv_sizeof_unsigned_short=2}
+ac_cv_sizeof_unsigned=${ac_cv_sizeof_unsigned=4}
+ac_cv_sizeof_unsigned_int=${ac_cv_sizeof_unsigned_int=4}
+ac_cv_sizeof_unsigned_long=${ac_cv_sizeof_unsigned_long=4}
+ac_cv_sizeof_unsigned_long_long=${ac_cv_sizeof_unsigned_long_long=8}
+ac_cv_sizeof_signed_char=${ac_cv_sizeof_signed_char=1}
+
+ac_cv_uchar=${ac_cv_uchar=no}
+ac_cv_uint=${ac_cv_uint=yes}
+ac_cv_ulong=${ac_cv_ulong=yes}
+ac_cv_ushort=${ac_cv_ushort=yes}
+ac_cv_time_r_type=${ac_cv_time_r_type=POSIX}
+
+# samba
+samba_cv_BROKEN_NISPLUS_INCLUDE_FILES=${samba_cv_BROKEN_NISPLUS_INCLUDE_FILES=yes}
+samba_cv_BROKEN_REDHAT_7_SYSTEM_HEADERS=${samba_cv_BROKEN_REDHAT_7_SYSTEM_HEADERS=no}
+samba_cv_HAVE_BROKEN_FCNTL64_LOCKS=${samba_cv_HAVE_BROKEN_FCNTL64_LOCKS=no}
+samba_cv_HAVE_BROKEN_GETGROUPS=${samba_cv_HAVE_BROKEN_GETGROUPS=no}
+samba_cv_HAVE_BROKEN_LINUX_SENDFILE=${samba_cv_HAVE_BROKEN_LINUX_SENDFILE=yes}
+samba_cv_HAVE_BROKEN_READDIR=${samba_cv_HAVE_BROKEN_READDIR=yes}
+samba_cv_HAVE_BROKEN_READDIR_NAME=${samba_cv_HAVE_BROKEN_READDIR_NAME=no}
+samba_cv_HAVE_C99_VSNPRINTF=${samba_cv_HAVE_C99_VSNPRINTF=yes}
+samba_cv_HAVE_DEV64_T=${samba_cv_HAVE_DEV64_T=no}
+samba_cv_HAVE_DEVICE_MAJOR_FN=${samba_cv_HAVE_DEVICE_MAJOR_FN=yes}
+samba_cv_HAVE_DEVICE_MINOR_FN=${samba_cv_HAVE_DEVICE_MINOR_FN=yes}
+samba_cv_HAVE_DQB_FSOFTLIMIT=${samba_cv_HAVE_DQB_FSOFTLIMIT=no}
+samba_cv_HAVE_EXPLICIT_LARGEFILE_SUPPORT=${samba_cv_HAVE_EXPLICIT_LARGEFILE_SUPPORT=yes}
+samba_cv_HAVE_FAM_H=${samba_cv_HAVE_FAM_H=no}
+samba_cv_HAVE_FCNTL_LOCK=${samba_cv_HAVE_FCNTL_LOCK=yes}
+samba_cv_HAVE_FTRUNCATE_EXTEND=${samba_cv_HAVE_FTRUNCATE_EXTEND=yes}
+samba_cv_HAVE_FUNCTION_MACRO=${samba_cv_HAVE_FUNCTION_MACRO=yes}
+samba_cv_HAVE_GETTIMEOFDAY_TZ=${samba_cv_HAVE_GETTIMEOFDAY_TZ=yes}
+samba_cv_HAVE_INO64_T=${samba_cv_HAVE_INO64_T=no}
+samba_cv_HAVE_INT16_FROM_RPC_RPC_H=${samba_cv_HAVE_INT16_FROM_RPC_RPC_H=no}
+samba_cv_HAVE_INT32_FROM_RPC_RPC_H=${samba_cv_HAVE_INT32_FROM_RPC_RPC_H=no}
+samba_cv_HAVE_KERNEL_CHANGE_NOTIFY=${samba_cv_HAVE_KERNEL_CHANGE_NOTIFY=yes}
+samba_cv_HAVE_KERNEL_OPLOCKS_IRIX=${samba_cv_HAVE_KERNEL_OPLOCKS_IRIX=no}
+samba_cv_HAVE_KERNEL_OPLOCKS_LINUX=${samba_cv_HAVE_KERNEL_OPLOCKS_LINUX=yes}
+samba_cv_HAVE_KERNEL_SHARE_MODES=${samba_cv_HAVE_KERNEL_SHARE_MODES=yes}
+samba_cv_HAVE_MAKEDEV=${samba_cv_HAVE_MAKEDEV=yes}
+samba_cv_HAVE_MMAP=${samba_cv_HAVE_MMAP=yes}
+samba_cv_HAVE_NATIVE_ICONV=${samba_cv_HAVE_NATIVE_ICONV=yes}
+samba_cv_HAVE_OFF64_T=${samba_cv_HAVE_OFF64_T=no}
+samba_cv_HAVE_ROOT=${samba_cv_HAVE_ROOT=yes}
+samba_cv_HAVE_RPC_AUTH_ERROR_CONFLICT=${samba_cv_HAVE_RPC_AUTH_ERROR_CONFLICT=no}
+samba_cv_HAVE_SECURE_MKSTEMP=${samba_cv_HAVE_SECURE_MKSTEMP=yes}
+samba_cv_HAVE_SENDFILE=${samba_cv_HAVE_SENDFILE=yes}
+samba_cv_HAVE_SENDFILE64=${samba_cv_HAVE_SENDFILE64=yes}
+samba_cv_HAVE_SOCK_SIN_LEN=${samba_cv_HAVE_SOCK_SIN_LEN=no}
+samba_cv_HAVE_STAT_ST_BLKSIZE=${samba_cv_HAVE_STAT_ST_BLKSIZE=yes}
+samba_cv_HAVE_STAT_ST_BLOCKS=${samba_cv_HAVE_STAT_ST_BLOCKS=yes}
+samba_cv_HAVE_STRUCT_DIR64=${samba_cv_HAVE_STRUCT_DIR64=no}
+samba_cv_HAVE_STRUCT_DIRENT64=${samba_cv_HAVE_STRUCT_DIRENT64=yes}
+samba_cv_HAVE_STRUCT_FLOCK64=${samba_cv_HAVE_STRUCT_FLOCK64=yes}
+samba_cv_HAVE_TRUNCATED_SALT=${samba_cv_HAVE_TRUNCATED_SALT=no}
+samba_cv_HAVE_UINT16_FROM_RPC_RPC_H=${samba_cv_HAVE_UINT16_FROM_RPC_RPC_H=no}
+samba_cv_HAVE_UINT32_FROM_RPC_RPC_H=${samba_cv_HAVE_UINT32_FROM_RPC_RPC_H=no}
+samba_cv_HAVE_UNSIGNED_CHAR=${samba_cv_HAVE_UNSIGNED_CHAR=yes}
+samba_cv_HAVE_UTIMBUF=${samba_cv_HAVE_UTIMBUF=yes}
+samba_cv_HAVE_UT_UT_ADDR=${samba_cv_HAVE_UT_UT_ADDR=yes}
+samba_cv_HAVE_UT_UT_EXIT=${samba_cv_HAVE_UT_UT_EXIT=yes}
+samba_cv_HAVE_UT_UT_HOST=${samba_cv_HAVE_UT_UT_HOST=yes}
+samba_cv_HAVE_UT_UT_ID=${samba_cv_HAVE_UT_UT_ID=yes}
+samba_cv_HAVE_UT_UT_NAME=${samba_cv_HAVE_UT_UT_NAME=yes}
+samba_cv_HAVE_UT_UT_PID=${samba_cv_HAVE_UT_UT_PID=yes}
+samba_cv_HAVE_UT_UT_TIME=${samba_cv_HAVE_UT_UT_TIME=yes}
+samba_cv_HAVE_UT_UT_TV=${samba_cv_HAVE_UT_UT_TV=yes}
+samba_cv_HAVE_UT_UT_TYPE=${samba_cv_HAVE_UT_UT_TYPE=yes}
+samba_cv_HAVE_UT_UT_USER=${samba_cv_HAVE_UT_UT_USER=yes}
+samba_cv_HAVE_UX_UT_SYSLEN=${samba_cv_HAVE_UX_UT_SYSLEN=no}
+samba_cv_HAVE_VA_COPY=${samba_cv_HAVE_VA_COPY=yes}
+samba_cv_HAVE_WORKING_AF_LOCAL=${samba_cv_HAVE_WORKING_AF_LOCAL=yes}
+samba_cv_HAVE_Werror=${samba_cv_HAVE_Werror=yes}
+samba_cv_PUTUTLINE_RETURNS_UTMP=${samba_cv_PUTUTLINE_RETURNS_UTMP=yes}
+samba_cv_QUOTA_WORKS=${samba_cv_QUOTA_WORKS=yes}
+samba_cv_REALPATH_TAKES_NULL=${samba_cv_REALPATH_TAKES_NULL=yes}
+samba_cv_REPLACE_GETPASS=${samba_cv_REPLACE_GETPASS=yes}
+samba_cv_REPLACE_INET_NTOA=${samba_cv_REPLACE_INET_NTOA=no}
+samba_cv_REPLACE_READDIR=${samba_cv_REPLACE_READDIR=no}
+samba_cv_RUN_QUOTA_TESTS=${samba_cv_RUN_QUOTA_TESTS=yes}
+samba_cv_SEEKDIR_RETURNS_VOID=${samba_cv_SEEKDIR_RETURNS_VOID=yes}
+samba_cv_SIZEOF_DEV_T=${samba_cv_SIZEOF_DEV_T=yes}
+samba_cv_SIZEOF_INO_T=${samba_cv_SIZEOF_INO_T=yes}
+samba_cv_SIZEOF_OFF_T=${samba_cv_SIZEOF_OFF_T=yes}
+samba_cv_SYSCONF_SC_NGROUPS_MAX=${samba_cv_SYSCONF_SC_NGROUPS_MAX=yes}
+samba_cv_SYSCONF_SC_NPROCESSORS_ONLN=${samba_cv_SYSCONF_SC_NPROCESSORS_ONLN=yes}
+samba_cv_SYSCONF_SC_NPROC_ONLN=${samba_cv_SYSCONF_SC_NPROC_ONLN=no}
+samba_cv_SYSCONF_SC_PAGESIZE=${samba_cv_SYSCONF_SC_PAGESIZE=yes}
+samba_cv_SYSQUOTA_FOUND=${samba_cv_SYSQUOTA_FOUND=yes}
+samba_cv_SYSQUOTA_WORKS=${samba_cv_SYSQUOTA_WORKS=yes}
+samba_cv_SYSQUOTA_WORKS_XFS=${samba_cv_SYSQUOTA_WORKS_XFS=yes}
+samba_cv_TRY_QUOTAS=${samba_cv_TRY_QUOTAS=no}
+samba_cv_TRY_SYS_QUOTAS=${samba_cv_TRY_SYS_QUOTAS=yes}
+samba_cv_USE_SETRESUID=${samba_cv_USE_SETRESUID=yes}
+samba_cv_WE_USE_SYS_QUOTAS=${samba_cv_WE_USE_SYS_QUOTAS=yes}
+samba_cv_WITH_AFS=${samba_cv_WITH_AFS=no}
+samba_cv_WITH_FAKE_KASERVER=${samba_cv_WITH_FAKE_KASERVER=no}
+samba_cv_WITH_QUOTAS=${samba_cv_WITH_QUOTAS=auto}
+samba_cv_WITH_SYS_QUOTAS=${samba_cv_WITH_SYS_QUOTAS=auto}
+samba_cv_WITH_VFS_AFSACL=${samba_cv_WITH_VFS_AFSACL=no}
+samba_cv_compiler_supports_ll=${samba_cv_compiler_supports_ll=yes}
+samba_cv_found_xfs_header=${samba_cv_found_xfs_header=yes}
+samba_cv_have_longlong=${samba_cv_have_longlong=yes}
+samba_cv_have_setresgid=${samba_cv_have_setresgid=yes}
+samba_cv_have_setresuid=${samba_cv_have_setresuid=yes}
+samba_cv_immediate_structures=${samba_cv_immediate_structures=yes}
+samba_cv_optimize_out_funcation_calls=${samba_cv_optimize_out_funcation_calls=yes}
+samba_cv_sig_atomic_t=${samba_cv_sig_atomic_t=yes}
+samba_cv_socklen_t=${samba_cv_socklen_t=yes}
+samba_cv_struct_timespec=${samba_cv_struct_timespec=yes}
+samba_cv_sysquotas_file=${samba_cv_sysquotas_file=lib/sysquotas_linux.c}
+samba_cv_unixsocket=${samba_cv_unixsocket=yes}
+samba_cv_volatile=${samba_cv_volatile=yes}
+
+#older sambe defines
+samba_cv_USE_SETEUID=${samba_cv_USE_SETEUID=yes}
+samba_cv_USE_SETREUID=${samba_cv_USE_SETREUID=yes}
+samba_cv_USE_SETUIDX=${samba_cv_USE_SETUIDX=yes}
+samba_cv_LINUX_LFS_SUPPORT=${samba_cv_LINUX_LFS_SUPPORT=yes}
+
+# clamav
+clamav_av_func_working_snprintf_long=${clamav_av_func_working_snprintf_long=yes}
+clamav_av_have_in_port_t=${clamav_av_have_in_port_t=yes}
+clamav_av_have_in_addr_t=${clamav_av_have_in_addr_t=yes}
+ac_cv_func_mmap_fixed_mapped=${ac_cv_func_mmap_fixed_mapped=yes}
+
+#dbus
+ac_cv_have_abstract_sockets=${ac_cv_have_abstract_sockets=yes}
+
+# lftp
+ac_cv_file___dev_ptc_=yes
+
+# guile
+ac_cv_func_pthread_attr_getstack=${ac_cv_func_pthread_attr_getstack=yes}
+
+# gnet
+ac_cv_member_struct_sockaddr_sa_len=${ac_cv_member_struct_sockaddr_sa_len=no}
+ac_cv_gnet_have_abstract_sockets=${ac_cv_gnet_have_abstract_sockets=no}
+gnet_sockaddr_family_field_name=${gnet_sockaddr_family_field_name=ss_family}
diff --git a/yocto-poky/scripts/contrib/devtool-stress.py b/yocto-poky/scripts/contrib/devtool-stress.py
new file mode 100755
index 0000000..4b35fc9
--- /dev/null
+++ b/yocto-poky/scripts/contrib/devtool-stress.py
@@ -0,0 +1,241 @@
+#!/usr/bin/env python
+
+# devtool stress tester
+#
+# Written by: Paul Eggleton <paul.eggleton@linux.intel.com>
+#
+# Copyright 2015 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+
+import sys
+import os
+import os.path
+import subprocess
+import re
+import argparse
+import logging
+import tempfile
+import shutil
+import signal
+import fnmatch
+
+scripts_lib_path = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'lib'))
+sys.path.insert(0, scripts_lib_path)
+import scriptutils
+logger = scriptutils.logger_create('devtool-stress')
+
+def select_recipes(args):
+    import bb.tinfoil
+    tinfoil = bb.tinfoil.Tinfoil()
+    tinfoil.prepare(False)
+
+    pkg_pn = tinfoil.cooker.recipecache.pkg_pn
+    (latest_versions, preferred_versions) = bb.providers.findProviders(tinfoil.config_data, tinfoil.cooker.recipecache, pkg_pn)
+
+    skip_classes = args.skip_classes.split(',')
+
+    recipelist = []
+    for pn in sorted(pkg_pn):
+        pref = preferred_versions[pn]
+        inherits = [os.path.splitext(os.path.basename(f))[0] for f in tinfoil.cooker.recipecache.inherits[pref[1]]]
+        for cls in skip_classes:
+            if cls in inherits:
+                break
+        else:
+            recipelist.append(pn)
+
+    tinfoil.shutdown()
+
+    resume_from = args.resume_from
+    if resume_from:
+        if not resume_from in recipelist:
+            print('%s is not a testable recipe' % resume_from)
+            return 1
+    if args.only:
+        only = args.only.split(',')
+        for onlyitem in only:
+            for pn in recipelist:
+                if fnmatch.fnmatch(pn, onlyitem):
+                    break
+            else:
+                print('%s does not match any testable recipe' % onlyitem)
+                return 1
+    else:
+        only = None
+    if args.skip:
+        skip = args.skip.split(',')
+    else:
+        skip = []
+
+    recipes = []
+    for pn in recipelist:
+        if resume_from:
+            if pn == resume_from:
+                resume_from = None
+            else:
+                continue
+
+        if args.only:
+            for item in only:
+                if fnmatch.fnmatch(pn, item):
+                    break
+            else:
+                continue
+
+        skipit = False
+        for item in skip:
+            if fnmatch.fnmatch(pn, item):
+                skipit = True
+        if skipit:
+            continue
+
+        recipes.append(pn)
+
+    return recipes
+
+
+def stress_extract(args):
+    import bb.process
+
+    recipes = select_recipes(args)
+
+    failures = 0
+    tmpdir = tempfile.mkdtemp()
+    os.setpgrp()
+    try:
+        for pn in recipes:
+            sys.stdout.write('Testing %s ' % (pn + ' ').ljust(40, '.'))
+            sys.stdout.flush()
+            failed = False
+
+            srctree = os.path.join(tmpdir, pn)
+            try:
+                bb.process.run('devtool extract %s %s' % (pn, srctree))
+            except bb.process.CmdError as exc:
+                failed = True
+                with open('stress_%s_extract.log' % pn, 'w') as f:
+                    f.write(str(exc))
+
+            if os.path.exists(srctree):
+                shutil.rmtree(srctree)
+
+            if failed:
+                print('failed')
+                failures += 1
+            else:
+                print('ok')
+    except KeyboardInterrupt:
+        # We want any child processes killed. This is crude, but effective.
+        os.killpg(0, signal.SIGTERM)
+
+    if failures:
+        return 1
+    else:
+        return 0
+
+
+def stress_modify(args):
+    import bb.process
+
+    recipes = select_recipes(args)
+
+    failures = 0
+    tmpdir = tempfile.mkdtemp()
+    os.setpgrp()
+    try:
+        for pn in recipes:
+            sys.stdout.write('Testing %s ' % (pn + ' ').ljust(40, '.'))
+            sys.stdout.flush()
+            failed = False
+            reset = True
+
+            srctree = os.path.join(tmpdir, pn)
+            try:
+                bb.process.run('devtool modify -x %s %s' % (pn, srctree))
+            except bb.process.CmdError as exc:
+                with open('stress_%s_modify.log' % pn, 'w') as f:
+                    f.write(str(exc))
+                failed = 'modify'
+                reset = False
+
+            if not failed:
+                try:
+                    bb.process.run('bitbake -c install %s' % pn)
+                except bb.process.CmdError as exc:
+                    with open('stress_%s_install.log' % pn, 'w') as f:
+                        f.write(str(exc))
+                    failed = 'build'
+            if reset:
+                try:
+                    bb.process.run('devtool reset %s' % pn)
+                except bb.process.CmdError as exc:
+                    print('devtool reset failed: %s' % str(exc))
+                    break
+
+            if os.path.exists(srctree):
+                shutil.rmtree(srctree)
+
+            if failed:
+                print('failed (%s)' % failed)
+                failures += 1
+            else:
+                print('ok')
+    except KeyboardInterrupt:
+        # We want any child processes killed. This is crude, but effective.
+        os.killpg(0, signal.SIGTERM)
+
+    if failures:
+        return 1
+    else:
+        return 0
+
+
+def main():
+    parser = argparse.ArgumentParser(description="devtool stress tester",
+                                     epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
+    parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
+    parser.add_argument('-r', '--resume-from', help='Resume from specified recipe', metavar='PN')
+    parser.add_argument('-o', '--only', help='Only test specified recipes (comma-separated without spaces, wildcards allowed)', metavar='PNLIST')
+    parser.add_argument('-s', '--skip', help='Skip specified recipes (comma-separated without spaces, wildcards allowed)', metavar='PNLIST')
+    parser.add_argument('-c', '--skip-classes', help='Skip recipes inheriting specified classes (comma-separated) - default %(default)s', metavar='CLASSLIST', default='native,nativesdk,cross,cross-canadian,image,populate_sdk,meta,packagegroup')
+    subparsers = parser.add_subparsers(title='subcommands', metavar='<subcommand>')
+
+    parser_modify = subparsers.add_parser('modify',
+                                          help='Run "devtool modify" followed by a build with bitbake on matching recipes',
+                                          description='Runs "devtool modify" followed by a build with bitbake on matching recipes')
+    parser_modify.set_defaults(func=stress_modify)
+
+    parser_extract = subparsers.add_parser('extract',
+                                           help='Run "devtool extract" on matching recipes',
+                                           description='Runs "devtool extract" on matching recipes')
+    parser_extract.set_defaults(func=stress_extract)
+
+    args = parser.parse_args()
+
+    if args.debug:
+        logger.setLevel(logging.DEBUG)
+
+    import scriptpath
+    bitbakepath = scriptpath.add_bitbake_lib_path()
+    if not bitbakepath:
+        logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
+        return 1
+    logger.debug('Found bitbake path: %s' % bitbakepath)
+
+    ret = args.func(args)
+
+if __name__ == "__main__":
+    main()
diff --git a/yocto-poky/scripts/contrib/python/generate-manifest-3.4.py b/yocto-poky/scripts/contrib/python/generate-manifest-3.4.py
index 06eecdc..ca2fa61 100755
--- a/yocto-poky/scripts/contrib/python/generate-manifest-3.4.py
+++ b/yocto-poky/scripts/contrib/python/generate-manifest-3.4.py
@@ -238,7 +238,7 @@
     m.addPackage( "${PN}-db", "Python file-based database support", "${PN}-core",
     "anydbm.* dumbdbm.* whichdb.* dbm lib-dynload/_dbm.*.so" )
 
-    m.addPackage( "${PN}-debugger", "Python debugger", "${PN}-core ${PN}-io ${PN}-lang ${PN}-re ${PN}-stringold ${PN}-shell ${PN}-pprint",
+    m.addPackage( "${PN}-debugger", "Python debugger", "${PN}-core ${PN}-io ${PN}-lang ${PN}-re ${PN}-stringold ${PN}-shell ${PN}-pprint ${PN}-importlib ${PN}-pkgutil",
     "bdb.* pdb.*" )
 
     m.addPackage( "${PN}-difflib", "Python helpers for computing deltas between objects", "${PN}-lang ${PN}-re",
diff --git a/yocto-poky/scripts/devtool b/yocto-poky/scripts/devtool
index 87df951..e4d9db3 100755
--- a/yocto-poky/scripts/devtool
+++ b/yocto-poky/scripts/devtool
@@ -221,9 +221,6 @@
     if not config.read():
         return -1
 
-    # We need to be in this directory or we won't be able to initialise tinfoil
-    os.chdir(basepath)
-
     bitbake_subdir = config.get('General', 'bitbake_subdir', '')
     if bitbake_subdir:
         # Normally set for use within the SDK
@@ -244,7 +241,7 @@
     scriptutils.logger_setup_color(logger, global_args.color)
 
     if global_args.bbpath is None:
-        tinfoil = setup_tinfoil(config_only=True)
+        tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
         global_args.bbpath = tinfoil.config_data.getVar('BBPATH', True)
     else:
         tinfoil = None
diff --git a/yocto-poky/scripts/gen-lockedsig-cache b/yocto-poky/scripts/gen-lockedsig-cache
index c93b2c0..6aa3614 100755
--- a/yocto-poky/scripts/gen-lockedsig-cache
+++ b/yocto-poky/scripts/gen-lockedsig-cache
@@ -1,7 +1,4 @@
 #!/usr/bin/env python
-#
-# gen-lockedsig-cache <locked-sigs.inc> <input-cachedir> <output-cachedir>
-#
 
 import os
 import sys
@@ -18,14 +15,17 @@
 
 if len(sys.argv) < 3:
     print("Incorrect number of arguments specified")
+    print("syntax: gen-lockedsig-cache <locked-sigs.inc> <input-cachedir> <output-cachedir>")
     sys.exit(1)
 
+print('Reading %s' % sys.argv[1])
 sigs = []
 with open(sys.argv[1]) as f:
     for l in f.readlines():
         if ":" in l:
             sigs.append(l.split(":")[2].split()[0])
 
+print('Gathering file list')
 files = set()
 for s in sigs:
     p = sys.argv[2] + "/" + s[:2] + "/*" + s + "*"
@@ -33,14 +33,25 @@
     p = sys.argv[2] + "/*/" + s[:2] + "/*" + s + "*"
     files |= set(glob.glob(p))
 
+print('Processing files')
 for f in files:
-    dst = f.replace(sys.argv[2], sys.argv[3])
+    sys.stdout.write('Processing %s... ' % f)
+    _, ext = os.path.splitext(f)
+    if not ext in ['.tgz', '.siginfo', '.sig']:
+        # Most likely a temp file, skip it
+        print('skipping')
+        continue
+    dst = os.path.join(sys.argv[3], os.path.relpath(f, sys.argv[2]))
     destdir = os.path.dirname(dst)
     mkdir(destdir)
 
     if os.path.exists(dst):
         os.remove(dst)
     if (os.stat(f).st_dev == os.stat(destdir).st_dev):
+        print('linking')
         os.link(f, dst)
     else:
+        print('copying')
         shutil.copyfile(f, dst)
+
+print('Done!')
diff --git a/yocto-poky/scripts/lib/bsp/engine.py b/yocto-poky/scripts/lib/bsp/engine.py
index 7d6be23..d0d5d72 100644
--- a/yocto-poky/scripts/lib/bsp/engine.py
+++ b/yocto-poky/scripts/lib/bsp/engine.py
@@ -1684,9 +1684,9 @@
             print "Couldn't open properties file %s for writing, exiting" % properties_file
             sys.exit(1)
 
-        json.dump(properties, of)
-
-    print_dict(properties)
+        json.dump(properties, of, indent=1)
+    else:
+        print_dict(properties)
 
 
 def split_nested_property(property):
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-preempt-rt.scc b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-preempt-rt.scc
index ca5f3b5..ea6966c 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-preempt-rt.scc
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-preempt-rt.scc
@@ -1,10 +1,11 @@
 # yocto-bsp-filename {{=machine}}-preempt-rt.scc
 define KMACHINE {{=machine}}
-define KTYPE preempt-rt
+
 define KARCH arm
 
 include {{=map_preempt_rt_kbranch(need_new_kbranch, new_kbranch, existing_kbranch)}}
 {{ if need_new_kbranch == "y": }}
+define KTYPE {{=new_kbranch}}
 branch {{=machine}}
 
 include {{=machine}}.scc
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-standard.scc b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-standard.scc
index 9014c2c..405972d 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-standard.scc
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-standard.scc
@@ -1,10 +1,11 @@
 # yocto-bsp-filename {{=machine}}-standard.scc
 define KMACHINE {{=machine}}
-define KTYPE standard
+
 define KARCH arm
 
 include {{=map_standard_kbranch(need_new_kbranch, new_kbranch, existing_kbranch)}}
 {{ if need_new_kbranch == "y": }}
+define KTYPE {{=new_kbranch}}
 branch {{=machine}}
 
 include {{=machine}}.scc
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-tiny.scc b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-tiny.scc
index 3f1c252..921b7e7 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-tiny.scc
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-tiny.scc
@@ -1,10 +1,11 @@
 # yocto-bsp-filename {{=machine}}-tiny.scc
 define KMACHINE {{=machine}}
-define KTYPE tiny
+
 define KARCH arm
 
 include {{=map_tiny_kbranch(need_new_kbranch, new_kbranch, existing_kbranch)}}
 {{ if need_new_kbranch == "y": }}
+define KTYPE {{=new_kbranch}}
 branch {{=machine}}
 
 include {{=machine}}.scc
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/kernel-list.noinstall b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/kernel-list.noinstall
index 811d695..7676ca8 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/kernel-list.noinstall
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
 {{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (3.19) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.1) kernel? (y/n)" default:"y"}}
 
 {{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_3.19"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.1"}}
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-dev.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-dev.bbappend
index 2fa6231..c336007 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-dev.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-dev.bbappend
@@ -21,6 +21,5 @@
 
 SRC_URI += "file://{{=machine}}-standard.scc \
             file://{{=machine}}-user-config.cfg \
-            file://{{=machine}}-user-patches.scc \
             file://{{=machine}}-user-features.scc \
            "
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
index 5f8db03..c56e9e5 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
@@ -26,8 +26,8 @@
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "f35992f80c81dc5fa1a97165dfd5cbb84661f7cb"
-#SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "1b534b2f8bbe9b8a773268cfa30a4850346f6f5f"
+SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
index 471ccbc..15b9b1b 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
@@ -26,8 +26,8 @@
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
-#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
index 4de82fa..caefcfc 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
@@ -26,8 +26,8 @@
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
-#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.19"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend
new file mode 100644
index 0000000..0a47a4e
--- /dev/null
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.1": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}}  = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-tiny.scc \
+            file://{{=machine}}-user-config.cfg \
+            file://{{=machine}}-user-patches.scc \
+            file://{{=machine}}-user-features.scc \
+           "
+
+# replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+#LINUX_VERSION = "4.1"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_3.14.bbappend
index 1e1cc51..5af490d 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_3.14.bbappend
@@ -22,12 +22,11 @@
 
 SRC_URI += "file://{{=machine}}-standard.scc \
             file://{{=machine}}-user-config.cfg \
-            file://{{=machine}}-user-patches.scc \
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
-#SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.14"
\ No newline at end of file
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_3.19.bbappend
index 97e1bb8..c327d16 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_3.19.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_3.19.bbappend
@@ -22,12 +22,11 @@
 
 SRC_URI += "file://{{=machine}}-standard.scc \
             file://{{=machine}}-user-config.cfg \
-            file://{{=machine}}-user-patches.scc \
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
-#SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.19"
\ No newline at end of file
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.1.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.1.bbappend
new file mode 100644
index 0000000..2d3d073
--- /dev/null
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.1.bbappend
@@ -0,0 +1,32 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.1": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}}  = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-standard.scc \
+            file://{{=machine}}-user-config.cfg \
+            file://{{=machine}}-user-features.scc \
+           "
+
+# replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+#LINUX_VERSION = "4.1"
\ No newline at end of file
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/conf/machine/machine.conf b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/conf/machine/machine.conf
index 7189341..1739ab3 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/conf/machine/machine.conf
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/conf/machine/machine.conf
@@ -33,7 +33,7 @@
 
 {{ input type:"boolean" name:"xserver" prio:"50" msg:"Do you need support for X? (y/n)" default:"y" }}
 
-{{ if xserver == "y" and (kernel_choice == "linux-yocto_3.19" or kernel_choice == "linux-yocto_3.14"): }}
+{{ if xserver == "y" and (kernel_choice == "linux-yocto_4.1" or kernel_choice == "linux-yocto_3.19" or kernel_choice == "linux-yocto_3.14"): }}
 {{ input type:"choicelist" name:"xserver_choice" prio:"50" msg:"Please select an xserver for this machine:" default:"xserver_vesa" }}
 {{ input type:"choice" val:"xserver_vesa" msg:"VESA xserver support" }}
 {{ input type:"choice" val:"xserver_i915" msg:"i915 xserver support" }}
@@ -49,7 +49,7 @@
 {{ input type:"choice" val:"xserver_fbdev" msg:"fbdev xserver support" }}
 {{ input type:"choice" val:"xserver_modesetting" msg:"modesetting xserver support" }}
 
-{{ if xserver == "y" and kernel_choice != "linux-yocto_3.19" and kernel_choice != "linux-yocto_3.14" and kernel_choice != "custom": xserver_choice = "xserver_i915" }}
+{{ if xserver == "y" and kernel_choice != "linux-yocto_4.1" and kernel_choice != "linux-yocto_3.19" and kernel_choice != "linux-yocto_3.14" and kernel_choice != "custom": xserver_choice = "xserver_i915" }}
 
 {{ if xserver == "y": }}
 XSERVER ?= "${XSERVER_X86_BASE} \
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-preempt-rt.scc b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-preempt-rt.scc
index 619ee3f..7146e23 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-preempt-rt.scc
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-preempt-rt.scc
@@ -1,10 +1,11 @@
 # yocto-bsp-filename {{=machine}}-preempt-rt.scc
 define KMACHINE {{=machine}}
-define KTYPE preempt-rt
+
 define KARCH i386
 
 include {{=map_preempt_rt_kbranch(need_new_kbranch, new_kbranch, existing_kbranch)}}
 {{ if need_new_kbranch == "y": }}
+define KTYPE {{=new_kbranch}}
 branch {{=machine}}
 
 include {{=machine}}.scc
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-standard.scc b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-standard.scc
index 682012f..67a54be 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-standard.scc
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-standard.scc
@@ -1,10 +1,11 @@
 # yocto-bsp-filename {{=machine}}-standard.scc
 define KMACHINE {{=machine}}
-define KTYPE standard
+
 define KARCH i386
 
 include {{=map_standard_kbranch(need_new_kbranch, new_kbranch, existing_kbranch)}}
 {{ if need_new_kbranch == "y": }}
+define KTYPE {{=new_kbranch}}
 branch {{=machine}}
 
 include {{=machine}}.scc
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-tiny.scc b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-tiny.scc
index cc75196..91373b3 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-tiny.scc
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-tiny.scc
@@ -1,10 +1,11 @@
 # yocto-bsp-filename {{=machine}}-tiny.scc
 define KMACHINE {{=machine}}
-define KTYPE tiny
+
 define KARCH i386
 
 include {{=map_tiny_kbranch(need_new_kbranch, new_kbranch, existing_kbranch)}}
 {{ if need_new_kbranch == "y": }}
+define KTYPE {{=new_kbranch}}
 branch {{=machine}}
 
 include {{=machine}}.scc
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/kernel-list.noinstall b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/kernel-list.noinstall
index 811d695..7676ca8 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/kernel-list.noinstall
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
 {{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (3.19) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.1) kernel? (y/n)" default:"y"}}
 
 {{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_3.19"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.1"}}
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-dev.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-dev.bbappend
index 2fa6231..c336007 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-dev.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-dev.bbappend
@@ -21,6 +21,5 @@
 
 SRC_URI += "file://{{=machine}}-standard.scc \
             file://{{=machine}}-user-config.cfg \
-            file://{{=machine}}-user-patches.scc \
             file://{{=machine}}-user-features.scc \
            "
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
index 5f8db03..c56e9e5 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
@@ -26,8 +26,8 @@
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "f35992f80c81dc5fa1a97165dfd5cbb84661f7cb"
-#SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "1b534b2f8bbe9b8a773268cfa30a4850346f6f5f"
+SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
index 471ccbc..15b9b1b 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
@@ -26,8 +26,8 @@
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
-#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
index 4de82fa..caefcfc 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
@@ -26,8 +26,8 @@
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
-#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.19"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend
new file mode 100644
index 0000000..0a47a4e
--- /dev/null
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.1": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}}  = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-tiny.scc \
+            file://{{=machine}}-user-config.cfg \
+            file://{{=machine}}-user-patches.scc \
+            file://{{=machine}}-user-features.scc \
+           "
+
+# replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+#LINUX_VERSION = "4.1"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.14.bbappend
index fbb49ed..41325ca 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.14.bbappend
@@ -22,12 +22,11 @@
 
 SRC_URI += "file://{{=machine}}-standard.scc \
             file://{{=machine}}-user-config.cfg \
-            file://{{=machine}}-user-patches.scc \
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
-#SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.14"
\ No newline at end of file
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.19.bbappend
index 0c2cb5a..b471742 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.19.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.19.bbappend
@@ -22,12 +22,11 @@
 
 SRC_URI += "file://{{=machine}}-standard.scc \
             file://{{=machine}}-user-config.cfg \
-            file://{{=machine}}-user-patches.scc \
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
-#SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.19"
\ No newline at end of file
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.1.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.1.bbappend
new file mode 100644
index 0000000..761b9c6
--- /dev/null
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.1.bbappend
@@ -0,0 +1,32 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.1": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}}  = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-standard.scc \
+            file://{{=machine}}-user-config.cfg \
+            file://{{=machine}}-user-features.scc \
+           "
+
+# replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+#LINUX_VERSION = "4.1"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-preempt-rt.scc b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-preempt-rt.scc
index 176190c..a128255 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-preempt-rt.scc
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-preempt-rt.scc
@@ -1,10 +1,11 @@
 # yocto-bsp-filename {{=machine}}-preempt-rt.scc
 define KMACHINE {{=machine}}
-define KTYPE preempt-rt
+
 define KARCH mips
 
 include {{=map_preempt_rt_kbranch(need_new_kbranch, new_kbranch, existing_kbranch)}}
 {{ if need_new_kbranch == "y": }}
+define KTYPE {{=new_kbranch}}
 branch {{=machine}}
 
 include {{=machine}}.scc
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-standard.scc b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-standard.scc
index f05dd85..7c9dc52 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-standard.scc
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-standard.scc
@@ -1,10 +1,11 @@
 # yocto-bsp-filename {{=machine}}-standard.scc
 define KMACHINE {{=machine}}
-define KTYPE standard
+
 define KARCH mips
 
 include {{=map_standard_kbranch(need_new_kbranch, new_kbranch, existing_kbranch)}}
 {{ if need_new_kbranch == "y": }}
+define KTYPE {{=new_kbranch}}
 branch {{=machine}}
 
 include {{=machine}}.scc
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-tiny.scc b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-tiny.scc
index f71c775..64f395b 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-tiny.scc
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-tiny.scc
@@ -1,10 +1,11 @@
 # yocto-bsp-filename {{=machine}}-tiny.scc
 define KMACHINE {{=machine}}
-define KTYPE tiny
+
 define KARCH mips
 
 include {{=map_tiny_kbranch(need_new_kbranch, new_kbranch, existing_kbranch)}}
 {{ if need_new_kbranch == "y": }}
+define KTYPE {{=new_kbranch}}
 branch {{=machine}}
 
 include {{=machine}}.scc
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/kernel-list.noinstall b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/kernel-list.noinstall
index 811d695..7676ca8 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/kernel-list.noinstall
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
 {{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (3.19) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.1) kernel? (y/n)" default:"y"}}
 
 {{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_3.19"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.1"}}
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-dev.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-dev.bbappend
index 2fa6231..c336007 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-dev.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-dev.bbappend
@@ -21,6 +21,5 @@
 
 SRC_URI += "file://{{=machine}}-standard.scc \
             file://{{=machine}}-user-config.cfg \
-            file://{{=machine}}-user-patches.scc \
             file://{{=machine}}-user-features.scc \
            "
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
index 5f8db03..c56e9e5 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
@@ -26,8 +26,8 @@
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "f35992f80c81dc5fa1a97165dfd5cbb84661f7cb"
-#SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "1b534b2f8bbe9b8a773268cfa30a4850346f6f5f"
+SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
index c7e7989..7c6bc7f 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
@@ -26,8 +26,8 @@
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
-#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.14"
\ No newline at end of file
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
index a9ba0ae..7f20379 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
@@ -26,8 +26,8 @@
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
-#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.19"
\ No newline at end of file
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend
new file mode 100644
index 0000000..0a47a4e
--- /dev/null
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.1": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}}  = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-tiny.scc \
+            file://{{=machine}}-user-config.cfg \
+            file://{{=machine}}-user-patches.scc \
+            file://{{=machine}}-user-features.scc \
+           "
+
+# replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+#LINUX_VERSION = "4.1"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_3.14.bbappend
index 1e1cc51..5af490d 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_3.14.bbappend
@@ -22,12 +22,11 @@
 
 SRC_URI += "file://{{=machine}}-standard.scc \
             file://{{=machine}}-user-config.cfg \
-            file://{{=machine}}-user-patches.scc \
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
-#SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.14"
\ No newline at end of file
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_3.19.bbappend
index 97e1bb8..c327d16 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_3.19.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_3.19.bbappend
@@ -22,12 +22,11 @@
 
 SRC_URI += "file://{{=machine}}-standard.scc \
             file://{{=machine}}-user-config.cfg \
-            file://{{=machine}}-user-patches.scc \
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
-#SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.19"
\ No newline at end of file
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.1.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.1.bbappend
new file mode 100644
index 0000000..1e99a04
--- /dev/null
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.1.bbappend
@@ -0,0 +1,32 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.1": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}}  = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-standard.scc \
+            file://{{=machine}}-user-config.cfg \
+            file://{{=machine}}-user-features.scc \
+           "
+
+# replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+#LINUX_VERSION = "4.1"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-preempt-rt.scc b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-preempt-rt.scc
index 176190c..a128255 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-preempt-rt.scc
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-preempt-rt.scc
@@ -1,10 +1,11 @@
 # yocto-bsp-filename {{=machine}}-preempt-rt.scc
 define KMACHINE {{=machine}}
-define KTYPE preempt-rt
+
 define KARCH mips
 
 include {{=map_preempt_rt_kbranch(need_new_kbranch, new_kbranch, existing_kbranch)}}
 {{ if need_new_kbranch == "y": }}
+define KTYPE {{=new_kbranch}}
 branch {{=machine}}
 
 include {{=machine}}.scc
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-standard.scc b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-standard.scc
index f05dd85..7c9dc52 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-standard.scc
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-standard.scc
@@ -1,10 +1,11 @@
 # yocto-bsp-filename {{=machine}}-standard.scc
 define KMACHINE {{=machine}}
-define KTYPE standard
+
 define KARCH mips
 
 include {{=map_standard_kbranch(need_new_kbranch, new_kbranch, existing_kbranch)}}
 {{ if need_new_kbranch == "y": }}
+define KTYPE {{=new_kbranch}}
 branch {{=machine}}
 
 include {{=machine}}.scc
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-tiny.scc b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-tiny.scc
index f71c775..64f395b 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-tiny.scc
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-tiny.scc
@@ -1,10 +1,11 @@
 # yocto-bsp-filename {{=machine}}-tiny.scc
 define KMACHINE {{=machine}}
-define KTYPE tiny
+
 define KARCH mips
 
 include {{=map_tiny_kbranch(need_new_kbranch, new_kbranch, existing_kbranch)}}
 {{ if need_new_kbranch == "y": }}
+define KTYPE {{=new_kbranch}}
 branch {{=machine}}
 
 include {{=machine}}.scc
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/kernel-list.noinstall b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/kernel-list.noinstall
index a04e6c7..7676ca8 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/kernel-list.noinstall
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
 {{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (3.14) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.1) kernel? (y/n)" default:"y"}}
 
 {{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_3.14"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.1"}}
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-dev.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-dev.bbappend
index 2fa6231..c336007 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-dev.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-dev.bbappend
@@ -21,6 +21,5 @@
 
 SRC_URI += "file://{{=machine}}-standard.scc \
             file://{{=machine}}-user-config.cfg \
-            file://{{=machine}}-user-patches.scc \
             file://{{=machine}}-user-features.scc \
            "
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
index 5f8db03..c56e9e5 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
@@ -26,8 +26,8 @@
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "f35992f80c81dc5fa1a97165dfd5cbb84661f7cb"
-#SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "1b534b2f8bbe9b8a773268cfa30a4850346f6f5f"
+SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
index c7e7989..7c6bc7f 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
@@ -26,8 +26,8 @@
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
-#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.14"
\ No newline at end of file
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
index a9ba0ae..7f20379 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
@@ -26,8 +26,8 @@
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
-#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.19"
\ No newline at end of file
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend
new file mode 100644
index 0000000..0a47a4e
--- /dev/null
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.1": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}}  = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-tiny.scc \
+            file://{{=machine}}-user-config.cfg \
+            file://{{=machine}}-user-patches.scc \
+            file://{{=machine}}-user-features.scc \
+           "
+
+# replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+#LINUX_VERSION = "4.1"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.14.bbappend
index fb6cdef..858d5fc 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.14.bbappend
@@ -22,12 +22,11 @@
 
 SRC_URI += "file://{{=machine}}-standard.scc \
             file://{{=machine}}-user-config.cfg \
-            file://{{=machine}}-user-patches.scc \
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
 SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "dbe5b52e93ff114b2c0f5da6f6af91f52c18f2b8"
 SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "6eddbf47875ef48ddc5864957a7b63363100782b"
-#LINUX_VERSION = "3.14"
\ No newline at end of file
+#LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.19.bbappend
index 134aeec..f084531 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.19.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.19.bbappend
@@ -22,12 +22,11 @@
 
 SRC_URI += "file://{{=machine}}-standard.scc \
             file://{{=machine}}-user-config.cfg \
-            file://{{=machine}}-user-patches.scc \
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
-#SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.19"
\ No newline at end of file
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.1.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.1.bbappend
new file mode 100644
index 0000000..01a046c
--- /dev/null
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.1.bbappend
@@ -0,0 +1,32 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.1": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/edgerouter" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/edgerouter" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}}  = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-standard.scc \
+            file://{{=machine}}-user-config.cfg \
+            file://{{=machine}}-user-features.scc \
+           "
+
+# replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+#LINUX_VERSION = "4.1"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/conf/machine/machine.conf b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/conf/machine/machine.conf
index c94f7f9..018146f 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/conf/machine/machine.conf
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/conf/machine/machine.conf
@@ -74,7 +74,7 @@
            xf86-input-evdev \
            xf86-video-fbdev"
 
-PREFERRED_VERSION_u-boot ?= "v2015.01%"
+PREFERRED_VERSION_u-boot ?= "v2015.07%"
 {{ input type:"edit" name:"uboot_entrypoint" prio:"40" msg:"Please specify a value for UBOOT_ENTRYPOINT:" default:"0x00000000" }}
 UBOOT_ENTRYPOINT = "{{=uboot_entrypoint}}"
 
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-preempt-rt.scc b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-preempt-rt.scc
index 40c9267..91ccfb8 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-preempt-rt.scc
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-preempt-rt.scc
@@ -1,10 +1,11 @@
 # yocto-bsp-filename {{=machine}}-preempt-rt.scc
 define KMACHINE {{=machine}}
-define KTYPE preempt-rt
+
 define KARCH powerpc
 
 include {{=map_preempt_rt_kbranch(need_new_kbranch, new_kbranch, existing_kbranch)}}
 {{ if need_new_kbranch == "y": }}
+define KTYPE {{=new_kbranch}}
 branch {{=machine}}
 
 include {{=machine}}.scc
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-standard.scc b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-standard.scc
index 7a1d35b..89b344f 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-standard.scc
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-standard.scc
@@ -1,10 +1,11 @@
 # yocto-bsp-filename {{=machine}}-standard.scc
 define KMACHINE {{=machine}}
-define KTYPE standard
+
 define KARCH powerpc
 
 include {{=map_standard_kbranch(need_new_kbranch, new_kbranch, existing_kbranch)}}
 {{ if need_new_kbranch == "y": }}
+define KTYPE {{=new_kbranch}}
 branch {{=machine}}
 
 include {{=machine}}.scc
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-tiny.scc b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-tiny.scc
index 1bf94b2..2701fd8 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-tiny.scc
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-tiny.scc
@@ -1,10 +1,11 @@
 # yocto-bsp-filename {{=machine}}-tiny.scc
 define KMACHINE {{=machine}}
-define KTYPE tiny
+
 define KARCH powerpc
 
 include {{=map_tiny_kbranch(need_new_kbranch, new_kbranch, existing_kbranch)}}
 {{ if need_new_kbranch == "y": }}
+define KTYPE {{=new_kbranch}}
 branch {{=machine}}
 
 include {{=machine}}.scc
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/kernel-list.noinstall b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/kernel-list.noinstall
index 811d695..7676ca8 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/kernel-list.noinstall
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
 {{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (3.19) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.1) kernel? (y/n)" default:"y"}}
 
 {{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_3.19"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.1"}}
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-dev.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-dev.bbappend
index 2fa6231..c336007 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-dev.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-dev.bbappend
@@ -21,6 +21,5 @@
 
 SRC_URI += "file://{{=machine}}-standard.scc \
             file://{{=machine}}-user-config.cfg \
-            file://{{=machine}}-user-patches.scc \
             file://{{=machine}}-user-features.scc \
            "
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
index 7a25446..4700d98 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
@@ -26,8 +26,8 @@
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "f35992f80c81dc5fa1a97165dfd5cbb84661f7cb"
-#SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "1b534b2f8bbe9b8a773268cfa30a4850346f6f5f"
+SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.14"
\ No newline at end of file
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
index 471ccbc..15b9b1b 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
@@ -26,8 +26,8 @@
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
-#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
index 4de82fa..caefcfc 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
@@ -26,8 +26,8 @@
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
-#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.19"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend
new file mode 100644
index 0000000..0a47a4e
--- /dev/null
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.1": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}}  = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-tiny.scc \
+            file://{{=machine}}-user-config.cfg \
+            file://{{=machine}}-user-patches.scc \
+            file://{{=machine}}-user-features.scc \
+           "
+
+# replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+#LINUX_VERSION = "4.1"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.14.bbappend
index e688384..6f1e7b0 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.14.bbappend
@@ -22,12 +22,11 @@
 
 SRC_URI += "file://{{=machine}}-standard.scc \
             file://{{=machine}}-user-config.cfg \
-            file://{{=machine}}-user-patches.scc \
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
-#SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.19.bbappend
index ded9e85..44086af 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.19.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.19.bbappend
@@ -22,12 +22,11 @@
 
 SRC_URI += "file://{{=machine}}-standard.scc \
             file://{{=machine}}-user-config.cfg \
-            file://{{=machine}}-user-patches.scc \
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
-#SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.19"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.1.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.1.bbappend
new file mode 100644
index 0000000..1e99a04
--- /dev/null
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.1.bbappend
@@ -0,0 +1,32 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.1": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}}  = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-standard.scc \
+            file://{{=machine}}-user-config.cfg \
+            file://{{=machine}}-user-features.scc \
+           "
+
+# replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+#LINUX_VERSION = "4.1"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-preempt-rt.scc b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-preempt-rt.scc
index 6aaffb8..a81b858 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-preempt-rt.scc
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-preempt-rt.scc
@@ -1,10 +1,11 @@
 # yocto-bsp-filename {{=machine}}-preempt-rt.scc
 define KMACHINE {{=machine}}
-define KTYPE preempt-rt
+
 define KARCH {{=qemuarch}}
 
 include {{=map_preempt_rt_kbranch(need_new_kbranch, new_kbranch, existing_kbranch)}}
 {{ if need_new_kbranch == "y": }}
+define KTYPE {{=new_kbranch}}
 branch {{=machine}}
 
 include {{=machine}}.scc
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-standard.scc b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-standard.scc
index d2a03ec..14554da 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-standard.scc
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-standard.scc
@@ -1,6 +1,6 @@
 # yocto-bsp-filename {{=machine}}-standard.scc
 define KMACHINE {{=machine}}
-define KTYPE standard
+
 define KARCH {{=qemuarch}}
 
 {{ if qemuarch == "i386" or qemuarch == "x86_64": }}
@@ -14,6 +14,7 @@
 {{ if qemuarch == "mips64": }}
 include bsp/mti-malta64/mti-malta64-be-standard
 {{ if need_new_kbranch == "y": }}
+define KTYPE {{=new_kbranch}}
 branch {{=machine}}
 
 include {{=machine}}.scc
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-tiny.scc b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-tiny.scc
index 6c098fe..41d4c6f 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-tiny.scc
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-tiny.scc
@@ -1,10 +1,11 @@
 # yocto-bsp-filename {{=machine}}-tiny.scc
 define KMACHINE {{=machine}}
-define KTYPE tiny
+
 define KARCH {{=qemuarch}}
 
 include {{=map_tiny_kbranch(need_new_kbranch, new_kbranch, existing_kbranch)}}
 {{ if need_new_kbranch == "y": }}
+define KTYPE {{=new_kbranch}}
 branch {{=machine}}
 
 include {{=machine}}.scc
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/kernel-list.noinstall b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/kernel-list.noinstall
index 811d695..7676ca8 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/kernel-list.noinstall
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
 {{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (3.19) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.1) kernel? (y/n)" default:"y"}}
 
 {{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_3.19"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_4.1"}}
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-dev.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-dev.bbappend
index be479be..3555285 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-dev.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-dev.bbappend
@@ -51,6 +51,5 @@
 
 SRC_URI += "file://{{=machine}}-standard.scc \
             file://{{=machine}}-user-config.cfg \
-            file://{{=machine}}-user-patches.scc \
             file://{{=machine}}-user-features.scc \
            "
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
index ce5e1a0..9e5aa97 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
@@ -55,8 +55,8 @@
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "f35992f80c81dc5fa1a97165dfd5cbb84661f7cb"
-#SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "1b534b2f8bbe9b8a773268cfa30a4850346f6f5f"
+SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
index 7879ce2..1faac44 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
@@ -55,8 +55,8 @@
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "0143c6ebb4a2d63b241df5f608b19f483f7eb9e0"
-#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "8f55bee2403176a50cc0dd41811aa60fcf07243c"
+SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
index f7ef4bb..51fc7d0 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
@@ -55,8 +55,8 @@
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "0143c6ebb4a2d63b241df5f608b19f483f7eb9e0"
-#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "8f55bee2403176a50cc0dd41811aa60fcf07243c"
+SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.19"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend
new file mode 100644
index 0000000..14ee16f
--- /dev/null
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend
@@ -0,0 +1,62 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.1": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "arm": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"arm" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "arm": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"arm" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "powerpc": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "powerpc": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "i386": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "i386": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/common-pc" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "x86_64": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "x86_64": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "mips": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "mips": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "mips64": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "mips64": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}}  = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-tiny.scc \
+            file://{{=machine}}-user-config.cfg \
+            file://{{=machine}}-user-patches.scc \
+            file://{{=machine}}-user-features.scc \
+           "
+
+# replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+#LINUX_VERSION = "4.1"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_3.14.bbappend
index 626019c..fbaed33 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_3.14.bbappend
@@ -51,12 +51,11 @@
 
 SRC_URI += "file://{{=machine}}-standard.scc \
             file://{{=machine}}-user-config.cfg \
-            file://{{=machine}}-user-patches.scc \
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "0143c6ebb4a2d63b241df5f608b19f483f7eb9e0"
-#SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "8f55bee2403176a50cc0dd41811aa60fcf07243c"
+SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_3.19.bbappend
index b4798b7..7b590ad 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_3.19.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_3.19.bbappend
@@ -51,12 +51,11 @@
 
 SRC_URI += "file://{{=machine}}-standard.scc \
             file://{{=machine}}-user-config.cfg \
-            file://{{=machine}}-user-patches.scc \
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "0143c6ebb4a2d63b241df5f608b19f483f7eb9e0"
-#SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "8f55bee2403176a50cc0dd41811aa60fcf07243c"
+SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.19"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.1.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.1.bbappend
new file mode 100644
index 0000000..82bb970
--- /dev/null
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.1.bbappend
@@ -0,0 +1,61 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.1": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "arm": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base your new BSP branch on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "arm": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose an existing machine branch to use for this BSP:" default:"standard/arm-versatile-926ejs" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "powerpc": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "powerpc": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/qemuppc" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "i386": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "i386": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "x86_64": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc-64"  prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc-64/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "x86_64": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc-64"  prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc-64/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "mips": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/mti-malta32" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "mips64": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/mti-malta64" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "mips": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "mips64": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}}  = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Would you like SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-standard.scc \
+            file://{{=machine}}-user-config.cfg \
+            file://{{=machine}}-user-features.scc \
+           "
+
+# replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+#LINUX_VERSION = "4.1"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-preempt-rt.scc b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-preempt-rt.scc
index fd5320b..bbeeecd 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-preempt-rt.scc
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-preempt-rt.scc
@@ -1,10 +1,11 @@
 # yocto-bsp-filename {{=machine}}-preempt-rt.scc
 define KMACHINE {{=machine}}
-define KTYPE preempt-rt
+
 define KARCH x86_64
 
 include {{=map_preempt_rt_kbranch(need_new_kbranch, new_kbranch, existing_kbranch)}}
 {{ if need_new_kbranch == "y": }}
+define KTYPE {{=new_kbranch}}
 branch {{=machine}}
 
 include {{=machine}}.scc
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-standard.scc b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-standard.scc
index 569f967..9c9cc90 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-standard.scc
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-standard.scc
@@ -1,10 +1,11 @@
 # yocto-bsp-filename {{=machine}}-standard.scc
 define KMACHINE {{=machine}}
-define KTYPE standard
+
 define KARCH x86_64
 
 include {{=map_standard_kbranch(need_new_kbranch, new_kbranch, existing_kbranch)}}
 {{ if need_new_kbranch == "y": }}
+define KTYPE {{=new_kbranch}}
 branch {{=machine}}
 
 include {{=machine}}.scc
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-tiny.scc b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-tiny.scc
index fb21432..b53706f 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-tiny.scc
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-tiny.scc
@@ -1,10 +1,11 @@
 # yocto-bsp-filename {{=machine}}-tiny.scc
 define KMACHINE {{=machine}}
-define KTYPE tiny
+
 define KARCH x86_64
 
 include {{=map_tiny_kbranch(need_new_kbranch, new_kbranch, existing_kbranch)}}
 {{ if need_new_kbranch == "y": }}
+define KTYPE {{=new_kbranch}}
 branch {{=machine}}
 
 include {{=machine}}.scc
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/kernel-list.noinstall b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/kernel-list.noinstall
index 811d695..dbbe934 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/kernel-list.noinstall
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
 {{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (3.19) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (4.1) kernel? (y/n)" default:"y"}}
 
 {{ if kernel_choice != "custom" and use_default_kernel == "n": }}
 {{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_3.19"}}
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-dev.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-dev.bbappend
index 2fa6231..c336007 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-dev.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-dev.bbappend
@@ -21,6 +21,5 @@
 
 SRC_URI += "file://{{=machine}}-standard.scc \
             file://{{=machine}}-user-config.cfg \
-            file://{{=machine}}-user-patches.scc \
             file://{{=machine}}-user-features.scc \
            "
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
index 7a25446..4700d98 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
@@ -26,8 +26,8 @@
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "f35992f80c81dc5fa1a97165dfd5cbb84661f7cb"
-#SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "1b534b2f8bbe9b8a773268cfa30a4850346f6f5f"
+SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.14"
\ No newline at end of file
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
index 471ccbc..15b9b1b 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
@@ -26,8 +26,8 @@
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
-#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
index 4de82fa..caefcfc 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.19.bbappend
@@ -26,8 +26,8 @@
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
-#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.19"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend
new file mode 100644
index 0000000..0a47a4e
--- /dev/null
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.1.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_4.1": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}}  = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-tiny.scc \
+            file://{{=machine}}-user-config.cfg \
+            file://{{=machine}}-user-patches.scc \
+            file://{{=machine}}-user-features.scc \
+           "
+
+# replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "${AUTOREV}"
+#LINUX_VERSION = "4.1"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.14.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.14.bbappend
index ca0b497..62a99d5 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.14.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.14.bbappend
@@ -22,12 +22,11 @@
 
 SRC_URI += "file://{{=machine}}-standard.scc \
             file://{{=machine}}-user-config.cfg \
-            file://{{=machine}}-user-patches.scc \
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
-#SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.14"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.19.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.19.bbappend
index dba63c3..a5dd37d 100644
--- a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.19.bbappend
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.19.bbappend
@@ -22,12 +22,11 @@
 
 SRC_URI += "file://{{=machine}}-standard.scc \
             file://{{=machine}}-user-config.cfg \
-            file://{{=machine}}-user-patches.scc \
             file://{{=machine}}-user-features.scc \
            "
 
-# uncomment and replace these SRCREVs with the real commit ids once you've had
+# replace these SRCREVs with the real commit ids once you've had
 # the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
-#SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
 #LINUX_VERSION = "3.19"
diff --git a/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.1.bbappend b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.1.bbappend
new file mode 100644
index 0000000..8d0bc97
--- /dev/null
+++ b/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.1.bbappend
@@ -0,0 +1,32 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_4.1": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc-64" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc-64/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc-64" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc-64/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}}  = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-standard.scc \
+            file://{{=machine}}-user-config.cfg \
+            file://{{=machine}}-user-features.scc \
+           "
+
+# replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "${AUTOREV}"
+#LINUX_VERSION = "4.1"
diff --git a/yocto-poky/scripts/lib/devtool/__init__.py b/yocto-poky/scripts/lib/devtool/__init__.py
index 404d3e6..50604e6 100644
--- a/yocto-poky/scripts/lib/devtool/__init__.py
+++ b/yocto-poky/scripts/lib/devtool/__init__.py
@@ -96,18 +96,22 @@
             newenv[splitval[0]] = splitval[1]
     return subprocess.call("%s %s" % (fakerootcmd, cmd), env=newenv, **kwargs)
 
-def setup_tinfoil(config_only=False):
+def setup_tinfoil(config_only=False, basepath=None, tracking=False):
     """Initialize tinfoil api from bitbake"""
     import scriptpath
+    orig_cwd = os.path.abspath(os.curdir)
+    if basepath:
+        os.chdir(basepath)
     bitbakepath = scriptpath.add_bitbake_lib_path()
     if not bitbakepath:
         logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
         sys.exit(1)
 
     import bb.tinfoil
-    tinfoil = bb.tinfoil.Tinfoil()
+    tinfoil = bb.tinfoil.Tinfoil(tracking=tracking)
     tinfoil.prepare(config_only)
     tinfoil.logger.setLevel(logger.getEffectiveLevel())
+    os.chdir(orig_cwd)
     return tinfoil
 
 def get_recipe_file(cooker, pn):
@@ -134,5 +138,62 @@
         # Filter out appends from the workspace
         append_files = [path for path in append_files if
                         not path.startswith(config.workspace_path)]
+    else:
+        append_files = None
     return oe.recipeutils.parse_recipe(recipefile, append_files,
                                        tinfoil.config_data)
+
+def check_workspace_recipe(workspace, pn, checksrc=True):
+    """
+    Check that a recipe is in the workspace and (optionally) that source
+    is present.
+    """
+    if not pn in workspace:
+        raise DevtoolError("No recipe named '%s' in your workspace" % pn)
+    if checksrc:
+        srctree = workspace[pn]['srctree']
+        if not os.path.exists(srctree):
+            raise DevtoolError("Source tree %s for recipe %s does not exist" % (srctree, pn))
+        if not os.listdir(srctree):
+            raise DevtoolError("Source tree %s for recipe %s is empty" % (srctree, pn))
+
+def use_external_build(same_dir, no_same_dir, d):
+    """
+    Determine if we should use B!=S (separate build and source directories) or not
+    """
+    b_is_s = True
+    if no_same_dir:
+        logger.info('Using separate build directory since --no-same-dir specified')
+        b_is_s = False
+    elif same_dir:
+        logger.info('Using source tree as build directory since --same-dir specified')
+    elif bb.data.inherits_class('autotools-brokensep', d):
+        logger.info('Using source tree as build directory since recipe inherits autotools-brokensep')
+    elif d.getVar('B', True) == os.path.abspath(d.getVar('S', True)):
+        logger.info('Using source tree as build directory since that would be the default for this recipe')
+    else:
+        b_is_s = False
+    return b_is_s
+
+def setup_git_repo(repodir, version, devbranch, basetag='devtool-base'):
+    """
+    Set up the git repository for the source tree
+    """
+    import bb.process
+    if not os.path.exists(os.path.join(repodir, '.git')):
+        bb.process.run('git init', cwd=repodir)
+        bb.process.run('git add .', cwd=repodir)
+        commit_cmd = ['git', 'commit', '-q']
+        stdout, _ = bb.process.run('git status --porcelain', cwd=repodir)
+        if not stdout:
+            commit_cmd.append('--allow-empty')
+            commitmsg = "Initial empty commit with no upstream sources"
+        elif version:
+            commitmsg = "Initial commit from upstream at version %s" % version
+        else:
+            commitmsg = "Initial commit from upstream"
+        commit_cmd += ['-m', commitmsg]
+        bb.process.run(commit_cmd, cwd=repodir)
+
+    bb.process.run('git checkout -b %s' % devbranch, cwd=repodir)
+    bb.process.run('git tag -f %s' % basetag, cwd=repodir)
diff --git a/yocto-poky/scripts/lib/devtool/build-image.py b/yocto-poky/scripts/lib/devtool/build-image.py
index 2c01428..e53239d 100644
--- a/yocto-poky/scripts/lib/devtool/build-image.py
+++ b/yocto-poky/scripts/lib/devtool/build-image.py
@@ -21,65 +21,85 @@
 import logging
 
 from bb.process import ExecutionError
-from devtool import exec_build_env_command, setup_tinfoil, parse_recipe
+from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError
 
 logger = logging.getLogger('devtool')
 
-def _get_recipes(workspace, config):
-    """Get list of target recipes from the workspace."""
+def _get_packages(tinfoil, workspace, config):
+    """Get list of packages from recipes in the workspace."""
     result = []
-    tinfoil = setup_tinfoil()
     for recipe in workspace:
         data = parse_recipe(config, tinfoil, recipe, True)
         if 'class-target' in data.getVar('OVERRIDES', True).split(':'):
             if recipe in data.getVar('PACKAGES', True):
                 result.append(recipe)
             else:
-                logger.warning("Skipping recipe %s as it doesn't produce "
+                logger.warning("Skipping recipe %s as it doesn't produce a "
                                "package with the same name", recipe)
-    tinfoil.shutdown()
     return result
 
 def build_image(args, config, basepath, workspace):
     """Entry point for the devtool 'build-image' subcommand."""
-    image = args.recipe
+
+    image = args.imagename
+    auto_image = False
+    if not image:
+        sdk_targets = config.get('SDK', 'sdk_targets', '').split()
+        if sdk_targets:
+            image = sdk_targets[0]
+            auto_image = True
+    if not image:
+        raise DevtoolError('Unable to determine image to build, please specify one')
+
     appendfile = os.path.join(config.workspace_path, 'appends',
                               '%s.bbappend' % image)
 
-    # remove <image>.bbapend to make sure setup_tinfoil doesn't
-    # breake because of it
+    # remove <image>.bbappend to make sure setup_tinfoil doesn't
+    # break because of it
     if os.path.isfile(appendfile):
         os.unlink(appendfile)
 
-    recipes = _get_recipes(workspace, config)
-    if recipes:
-        with open(appendfile, 'w') as afile:
-            # include selected recipes into the image
-            afile.write('IMAGE_INSTALL_append = " %s"\n' % ' '.join(recipes))
+    tinfoil = setup_tinfoil(basepath=basepath)
+    rd = parse_recipe(config, tinfoil, image, True)
+    if not rd:
+        # Error already shown
+        return 1
+    if not bb.data.inherits_class('image', rd):
+        if auto_image:
+            raise DevtoolError('Unable to determine image to build, please specify one')
+        else:
+            raise DevtoolError('Specified recipe %s is not an image recipe' % image)
 
-            # Generate notification callback devtool_warn_image_extended
-            afile.write('do_rootfs[prefuncs] += "devtool_warn_image_extended"\n\n')
-            afile.write("python devtool_warn_image_extended() {\n")
-            afile.write("    bb.plain('NOTE: %%s: building with additional '\n"
-                        "             'packages due to \"devtool build-image\"'"
-                        "              %% d.getVar('PN', True))\n"
-                        "    bb.plain('NOTE: delete %%s to clear this' %% \\\n"
-                        "             '%s')\n" % os.path.relpath(appendfile, basepath))
-            afile.write("}\n")
-
-            logger.info('Building image %s with the following '
-                        'additional packages: %s', image, ' '.join(recipes))
-    else:
-        logger.warning('No recipes in workspace, building image %s unmodified', image)
-
-    # run bitbake to build image
     try:
-        exec_build_env_command(config.init_path, basepath,
-                               'bitbake %s' % image, watch=True)
-    except ExecutionError as err:
-        return err.exitcode
+        if workspace:
+            packages = _get_packages(tinfoil, workspace, config)
+            if packages:
+                with open(appendfile, 'w') as afile:
+                    # include packages from workspace recipes into the image
+                    afile.write('IMAGE_INSTALL_append = " %s"\n' % ' '.join(packages))
+                    logger.info('Building image %s with the following '
+                                'additional packages: %s', image, ' '.join(packages))
+            else:
+                logger.warning('No packages to add, building image %s unmodified', image)
+        else:
+            logger.warning('No recipes in workspace, building image %s unmodified', image)
 
-    logger.info('Successfully built %s', image)
+        deploy_dir_image = tinfoil.config_data.getVar('DEPLOY_DIR_IMAGE', True)
+
+        tinfoil.shutdown()
+
+        # run bitbake to build image
+        try:
+            exec_build_env_command(config.init_path, basepath,
+                                'bitbake %s' % image, watch=True)
+        except ExecutionError as err:
+            return err.exitcode
+    finally:
+        if os.path.isfile(appendfile):
+            os.unlink(appendfile)
+
+    logger.info('Successfully built %s. You can find output files in %s'
+                % (image, deploy_dir_image))
 
 def register_commands(subparsers, context):
     """Register devtool subcommands from the build-image plugin"""
@@ -87,5 +107,5 @@
                                    help='Build image including workspace recipe packages',
                                    description='Builds an image, extending it to include '
                                    'packages from recipes in the workspace')
-    parser.add_argument('recipe', help='Image recipe to build')
+    parser.add_argument('imagename', help='Image recipe to build', nargs='?')
     parser.set_defaults(func=build_image)
diff --git a/yocto-poky/scripts/lib/devtool/build.py b/yocto-poky/scripts/lib/devtool/build.py
index 335aff5..9b58858 100644
--- a/yocto-poky/scripts/lib/devtool/build.py
+++ b/yocto-poky/scripts/lib/devtool/build.py
@@ -21,7 +21,7 @@
 import logging
 import argparse
 import tempfile
-from devtool import exec_build_env_command, DevtoolError
+from devtool import exec_build_env_command, check_workspace_recipe, DevtoolError
 
 logger = logging.getLogger('devtool')
 
@@ -42,9 +42,7 @@
 
 def build(args, config, basepath, workspace):
     """Entry point for the devtool 'build' subcommand"""
-    if not args.recipename in workspace:
-        raise DevtoolError("no recipe named %s in your workspace" %
-                           args.recipename)
+    check_workspace_recipe(workspace, args.recipename)
 
     build_task = config.get('Build', 'build_task', 'populate_sysroot')
 
diff --git a/yocto-poky/scripts/lib/devtool/deploy.py b/yocto-poky/scripts/lib/devtool/deploy.py
index fa93adf..c90c6b1 100644
--- a/yocto-poky/scripts/lib/devtool/deploy.py
+++ b/yocto-poky/scripts/lib/devtool/deploy.py
@@ -19,7 +19,7 @@
 import os
 import subprocess
 import logging
-from devtool import exec_fakeroot, setup_tinfoil, DevtoolError
+from devtool import exec_fakeroot, setup_tinfoil, check_workspace_recipe, DevtoolError
 
 logger = logging.getLogger('devtool')
 
@@ -28,9 +28,8 @@
     import re
     import oe.recipeutils
 
-    if not args.recipename in workspace:
-        raise DevtoolError("no recipe named %s in your workspace" %
-                           args.recipename)
+    check_workspace_recipe(workspace, args.recipename, checksrc=False)
+
     try:
         host, destdir = args.target.split(':')
     except ValueError:
@@ -41,7 +40,7 @@
     deploy_dir = os.path.join(basepath, 'target_deploy', args.target)
     deploy_file = os.path.join(deploy_dir, args.recipename + '.list')
 
-    tinfoil = setup_tinfoil()
+    tinfoil = setup_tinfoil(basepath=basepath)
     try:
         rd = oe.recipeutils.parse_recipe_simple(tinfoil.cooker, args.recipename, tinfoil.config_data)
     except Exception as e:
diff --git a/yocto-poky/scripts/lib/devtool/package.py b/yocto-poky/scripts/lib/devtool/package.py
index 3a7a36b..b8d8423 100644
--- a/yocto-poky/scripts/lib/devtool/package.py
+++ b/yocto-poky/scripts/lib/devtool/package.py
@@ -20,7 +20,7 @@
 import subprocess
 import logging
 from bb.process import ExecutionError
-from devtool import exec_build_env_command, setup_tinfoil, DevtoolError
+from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
 
 logger = logging.getLogger('devtool')
 
@@ -30,13 +30,11 @@
 
 def package(args, config, basepath, workspace):
     """Entry point for the devtool 'package' subcommand"""
-    if not args.recipename in workspace:
-        raise DevtoolError("no recipe named %s in your workspace" %
-                           args.recipename)
+    check_workspace_recipe(workspace, args.recipename)
 
     image_pkgtype = config.get('Package', 'image_pkgtype', '')
     if not image_pkgtype:
-        tinfoil = setup_tinfoil()
+        tinfoil = setup_tinfoil(basepath=basepath)
         try:
             tinfoil.prepare(config_only=True)
             image_pkgtype = tinfoil.config_data.getVar('IMAGE_PKGTYPE', True)
diff --git a/yocto-poky/scripts/lib/devtool/runqemu.py b/yocto-poky/scripts/lib/devtool/runqemu.py
new file mode 100644
index 0000000..5282afb
--- /dev/null
+++ b/yocto-poky/scripts/lib/devtool/runqemu.py
@@ -0,0 +1,64 @@
+# Development tool - runqemu command plugin
+#
+# Copyright (C) 2015 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+"""Devtool runqemu plugin"""
+
+import os
+import bb
+import logging
+import argparse
+import glob
+from devtool import exec_build_env_command, setup_tinfoil, DevtoolError
+
+logger = logging.getLogger('devtool')
+
+def runqemu(args, config, basepath, workspace):
+    """Entry point for the devtool 'runqemu' subcommand"""
+
+    tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
+    machine = tinfoil.config_data.getVar('MACHINE', True)
+    bindir_native = tinfoil.config_data.getVar('STAGING_BINDIR_NATIVE', True)
+    tinfoil.shutdown()
+
+    if not glob.glob(os.path.join(bindir_native, 'qemu-system-*')):
+        raise DevtoolError('QEMU is not available within this SDK')
+
+    imagename = args.imagename
+    if not imagename:
+        sdk_targets = config.get('SDK', 'sdk_targets', '').split()
+        if sdk_targets:
+            imagename = sdk_targets[0]
+    if not imagename:
+        raise DevtoolError('Unable to determine image name to run, please specify one')
+
+    try:
+        exec_build_env_command(config.init_path, basepath, 'runqemu %s %s %s' % (machine, imagename, " ".join(args.args)), watch=True)
+    except bb.process.ExecutionError as e:
+        # We've already seen the output since watch=True, so just ensure we return something to the user
+        return e.exitcode
+
+    return 0
+
+def register_commands(subparsers, context):
+    """Register devtool subcommands from this plugin"""
+    if context.fixed_setup:
+        parser_runqemu = subparsers.add_parser('runqemu', help='Run QEMU on the specified image',
+                                               description='Runs QEMU to boot the specified image')
+        parser_runqemu.add_argument('imagename', help='Name of built image to boot within QEMU', nargs='?')
+        parser_runqemu.add_argument('args', help='Any remaining arguments are passed to the runqemu script (pass --help after imagename to see what these are)',
+                                    nargs=argparse.REMAINDER)
+        parser_runqemu.set_defaults(func=runqemu)
diff --git a/yocto-poky/scripts/lib/devtool/sdk.py b/yocto-poky/scripts/lib/devtool/sdk.py
index 2f416b3..f15a6a9 100644
--- a/yocto-poky/scripts/lib/devtool/sdk.py
+++ b/yocto-poky/scripts/lib/devtool/sdk.py
@@ -40,14 +40,9 @@
             continue
     return update_dict
 
-def get_sstate_objects(update_dict, newsdk_path):
+def get_sstate_objects(update_dict, sstate_dir):
     """Return a list containing sstate objects which are to be installed"""
     sstate_objects = []
-    # Ensure newsdk_path points to an extensible SDK
-    sstate_dir = os.path.join(newsdk_path, 'sstate-cache')
-    if not os.path.exists(sstate_dir):
-        logger.error("sstate-cache directory not found under %s" % newsdk_path)
-        raise
     for k in update_dict:
         files = set()
         hashval = update_dict[k]
@@ -85,16 +80,12 @@
 
 def sdk_update(args, config, basepath, workspace):
     # Fetch locked-sigs.inc file from remote/local destination
-    from ConfigParser import NoSectionError
     updateserver = args.updateserver
     if not updateserver:
-        try:
-            updateserver = config.get('SDK', 'updateserver', None)
-        except NoSectionError:
-            pass
+        updateserver = config.get('SDK', 'updateserver', '')
     if not updateserver:
         raise DevtoolError("Update server not specified in config file, you must specify it on the command line")
-    logger.debug("updateserver: %s" % args.updateserver)
+    logger.debug("updateserver: %s" % updateserver)
 
     # Make sure we are using sdk-update from within SDK
     logger.debug("basepath = %s" % basepath)
@@ -105,38 +96,45 @@
     else:
         logger.debug("Found conf/locked-sigs.inc in %s" % basepath)
 
-    if ':' in args.updateserver:
+    if ':' in updateserver:
         is_remote = True
     else:
         is_remote = False
 
     if not is_remote:
         # devtool sdk-update /local/path/to/latest/sdk
-        new_locked_sig_file_path = os.path.join(args.updateserver, 'conf/locked-sigs.inc')
+        new_locked_sig_file_path = os.path.join(updateserver, 'conf/locked-sigs.inc')
         if not os.path.exists(new_locked_sig_file_path):
-            logger.error("%s doesn't exist or is not an extensible SDK" % args.updateserver)
+            logger.error("%s doesn't exist or is not an extensible SDK" % updateserver)
             return -1
         else:
-            logger.debug("Found conf/locked-sigs.inc in %s" % args.updateserver)
+            logger.debug("Found conf/locked-sigs.inc in %s" % updateserver)
         update_dict = generate_update_dict(new_locked_sig_file_path, old_locked_sig_file_path)
         logger.debug("update_dict = %s" % update_dict)
-        sstate_objects = get_sstate_objects(update_dict, args.updateserver)
+        sstate_dir = os.path.join(newsdk_path, 'sstate-cache')
+        if not os.path.exists(sstate_dir):
+            logger.error("sstate-cache directory not found under %s" % newsdk_path)
+            return 1
+        sstate_objects = get_sstate_objects(update_dict, sstate_dir)
         logger.debug("sstate_objects = %s" % sstate_objects)
         if len(sstate_objects) == 0:
             logger.info("No need to update.")
             return 0
         logger.info("Installing sstate objects into %s", basepath)
-        install_sstate_objects(sstate_objects, args.updateserver.rstrip('/'), basepath)
+        install_sstate_objects(sstate_objects, updateserver.rstrip('/'), basepath)
         logger.info("Updating configuration files")
-        new_conf_dir = os.path.join(args.updateserver, 'conf')
+        new_conf_dir = os.path.join(updateserver, 'conf')
         old_conf_dir = os.path.join(basepath, 'conf')
         shutil.rmtree(old_conf_dir)
         shutil.copytree(new_conf_dir, old_conf_dir)
         logger.info("Updating layers")
-        new_layers_dir = os.path.join(args.updateserver, 'layers')
+        new_layers_dir = os.path.join(updateserver, 'layers')
         old_layers_dir = os.path.join(basepath, 'layers')
         shutil.rmtree(old_layers_dir)
-        shutil.copytree(new_layers_dir, old_layers_dir)
+        ret = subprocess.call("cp -a %s %s" % (new_layers_dir, old_layers_dir), shell=True)
+        if ret != 0:
+            logger.error("Copying %s to %s failed" % (new_layers_dir, old_layers_dir))
+            return ret
     else:
         # devtool sdk-update http://myhost/sdk
         tmpsdk_dir = '/tmp/sdk-ext'
@@ -145,12 +143,12 @@
         os.makedirs(tmpsdk_dir)
         os.makedirs(os.path.join(tmpsdk_dir, 'conf'))
         # Fetch locked-sigs.inc from update server
-        ret = subprocess.call("wget -q -O - %s/conf/locked-sigs.inc > %s/locked-sigs.inc" % (args.updateserver, os.path.join(tmpsdk_dir, 'conf')), shell=True)
+        ret = subprocess.call("wget -q -O - %s/conf/locked-sigs.inc > %s/locked-sigs.inc" % (updateserver, os.path.join(tmpsdk_dir, 'conf')), shell=True)
         if ret != 0:
-            logger.error("Fetching conf/locked-sigs.inc from %s to %s/locked-sigs.inc failed" % (args.updateserver, os.path.join(tmpsdk_dir, 'conf')))
+            logger.error("Fetching conf/locked-sigs.inc from %s to %s/locked-sigs.inc failed" % (updateserver, os.path.join(tmpsdk_dir, 'conf')))
             return ret
         else:
-            logger.info("Fetching conf/locked-sigs.inc from %s to %s/locked-sigs.inc succeeded" % (args.updateserver, os.path.join(tmpsdk_dir, 'conf')))
+            logger.info("Fetching conf/locked-sigs.inc from %s to %s/locked-sigs.inc succeeded" % (updateserver, os.path.join(tmpsdk_dir, 'conf')))
         new_locked_sig_file_path = os.path.join(tmpsdk_dir, 'conf/locked-sigs.inc')
         update_dict = generate_update_dict(new_locked_sig_file_path, old_locked_sig_file_path)
         logger.debug("update_dict = %s" % update_dict)
@@ -161,23 +159,23 @@
         logger.debug("Updating meta data via git ...")
         # Try using 'git pull', if failed, use 'git clone'
         if os.path.exists(os.path.join(basepath, 'layers/.git')):
-            ret = subprocess.call("cd layers && git pull", shell=True)
+            ret = subprocess.call("cd layers && git pull %s/layers/.git" % updateserver, shell=True)
         else:
             ret = -1
         if ret != 0:
-            ret = subprocess.call("rm -rf layers && git clone %s/layers" % args.updateserver, shell=True)
+            ret = subprocess.call("rm -rf layers && git clone %s/layers/.git" % updateserver, shell=True)
         if ret != 0:
             logger.error("Updating meta data via git failed")
             return ret
         logger.debug("Updating conf files ...")
-        conf_files = ['local.conf', 'bblayers.conf', 'devtool.conf', 'work-config.inc', 'locked-sigs.inc']
+        conf_files = ['local.conf', 'bblayers.conf', 'devtool.conf', 'locked-sigs.inc']
         for conf in conf_files:
-            ret = subprocess.call("wget -q -O - %s/conf/%s > conf/%s" % (args.updateserver, conf, conf), shell=True)
+            ret = subprocess.call("wget -q -O - %s/conf/%s > conf/%s" % (updateserver, conf, conf), shell=True)
             if ret != 0:
                 logger.error("Update %s failed" % conf)
                 return ret
         with open(os.path.join(basepath, 'conf/local.conf'), 'a') as f:
-            f.write('SSTATE_MIRRORS_append = " file://.* %s/sstate-cache/PATH \\n "\n' % args.updateserver)
+            f.write('SSTATE_MIRRORS_append = " file://.* %s/sstate-cache/PATH \\n "\n' % updateserver)
 
     # Run bitbake command for the whole SDK
     sdk_targets = config.get('SDK', 'sdk_targets')
diff --git a/yocto-poky/scripts/lib/devtool/search.py b/yocto-poky/scripts/lib/devtool/search.py
new file mode 100644
index 0000000..c2f420c
--- /dev/null
+++ b/yocto-poky/scripts/lib/devtool/search.py
@@ -0,0 +1,80 @@
+# Development tool - search command plugin
+#
+# Copyright (C) 2015 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+"""Devtool search plugin"""
+
+import os
+import bb
+import logging
+import argparse
+import re
+from devtool import setup_tinfoil, DevtoolError
+
+logger = logging.getLogger('devtool')
+
+def search(args, config, basepath, workspace):
+    """Entry point for the devtool 'search' subcommand"""
+
+    tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
+    pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR', True)
+    tinfoil.shutdown()
+
+    keyword_rc = re.compile(args.keyword)
+
+    for fn in os.listdir(pkgdata_dir):
+        pfn = os.path.join(pkgdata_dir, fn)
+        if not os.path.isfile(pfn):
+            continue
+
+        packages = []
+        match = False
+        if keyword_rc.search(fn):
+            match = True
+
+        if not match:
+            with open(pfn, 'r') as f:
+                for line in f:
+                    if line.startswith('PACKAGES:'):
+                        packages = line.split(':', 1)[1].strip().split()
+
+            for pkg in packages:
+                if keyword_rc.search(pkg):
+                    match = True
+                    break
+                if os.path.exists(os.path.join(pkgdata_dir, 'runtime', pkg + '.packaged')):
+                    with open(os.path.join(pkgdata_dir, 'runtime', pkg), 'r') as f:
+                        for line in f:
+                            if ': ' in line:
+                                splitline = line.split(':', 1)
+                                key = splitline[0]
+                                value = splitline[1].strip()
+                            if key in ['PKG_%s' % pkg, 'DESCRIPTION', 'FILES_INFO'] or key.startswith('FILERPROVIDES_'):
+                                if keyword_rc.search(value):
+                                    match = True
+                                    break
+
+        if match:
+            print(fn)
+
+    return 0
+
+def register_commands(subparsers, context):
+    """Register devtool subcommands from this plugin"""
+    parser_search = subparsers.add_parser('search', help='Search available recipes',
+                                            description='Searches for available target recipes. Matches on recipe name, package name, description and installed files, and prints the recipe name on match.')
+    parser_search.add_argument('keyword', help='Keyword to search for (regular expression syntax allowed)')
+    parser_search.set_defaults(func=search)
diff --git a/yocto-poky/scripts/lib/devtool/standard.py b/yocto-poky/scripts/lib/devtool/standard.py
index d5900b4..5464d7b 100644
--- a/yocto-poky/scripts/lib/devtool/standard.py
+++ b/yocto-poky/scripts/lib/devtool/standard.py
@@ -25,7 +25,8 @@
 import argparse
 import scriptutils
 import errno
-from devtool import exec_build_env_command, setup_tinfoil, DevtoolError
+from collections import OrderedDict
+from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, DevtoolError
 from devtool import parse_recipe
 
 logger = logging.getLogger('devtool')
@@ -44,6 +45,10 @@
     if reason:
         raise DevtoolError(reason)
 
+    # FIXME this ought to be in validate_pn but we're using that in other contexts
+    if '/' in args.recipename:
+        raise DevtoolError('"/" is not a valid character in recipe names')
+
     srctree = os.path.abspath(args.srctree)
     if os.path.exists(srctree):
         if args.fetch:
@@ -82,7 +87,7 @@
     else:
         bp = args.recipename
     recipefile = os.path.join(recipedir, "%s.bb" % bp)
-    if sys.stdout.isatty():
+    if args.color == 'auto' and sys.stdout.isatty():
         color = 'always'
     else:
         color = args.color
@@ -94,30 +99,51 @@
         source = srctree
     if args.version:
         extracmdopts += ' -V %s' % args.version
+    if args.binary:
+        extracmdopts += ' -b'
     try:
         stdout, _ = exec_build_env_command(config.init_path, basepath, 'recipetool --color=%s create -o %s "%s" %s' % (color, recipefile, source, extracmdopts))
-        logger.info('Recipe %s has been automatically created; further editing may be required to make it fully functional' % recipefile)
     except bb.process.ExecutionError as e:
         raise DevtoolError('Command \'%s\' failed:\n%s' % (e.command, e.stdout))
 
     _add_md5(config, args.recipename, recipefile)
 
+    if args.fetch and not args.no_git:
+        setup_git_repo(srctree, args.version, 'devtool')
+
     initial_rev = None
     if os.path.exists(os.path.join(srctree, '.git')):
         (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree)
         initial_rev = stdout.rstrip()
 
+    tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
+    rd = oe.recipeutils.parse_recipe(recipefile, None, tinfoil.config_data)
+    if not rd:
+        return 1
+
     appendfile = os.path.join(appendpath, '%s.bbappend' % bp)
     with open(appendfile, 'w') as f:
         f.write('inherit externalsrc\n')
         f.write('EXTERNALSRC = "%s"\n' % srctree)
-        if args.same_dir:
+
+        b_is_s = use_external_build(args.same_dir, args.no_same_dir, rd)
+        if b_is_s:
             f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree)
         if initial_rev:
             f.write('\n# initial_rev: %s\n' % initial_rev)
 
+        if args.binary:
+            f.write('do_install_append() {\n')
+            f.write('    rm -rf ${D}/.git\n')
+            f.write('    rm -f ${D}/singletask.lock\n')
+            f.write('}\n')
+
     _add_md5(config, args.recipename, appendfile)
 
+    logger.info('Recipe %s has been automatically created; further editing may be required to make it fully functional' % recipefile)
+
+    tinfoil.shutdown()
+
     return 0
 
 
@@ -152,6 +178,43 @@
                            "from working. You will need to disable this "
                            "first." % pn)
 
+def _move_file(src, dst):
+    """Move a file. Creates all the directory components of destination path."""
+    dst_d = os.path.dirname(dst)
+    if dst_d:
+        bb.utils.mkdirhier(dst_d)
+    shutil.move(src, dst)
+
+def _git_ls_tree(repodir, treeish='HEAD', recursive=False):
+    """List contents of a git treeish"""
+    import bb
+    cmd = ['git', 'ls-tree', '-z', treeish]
+    if recursive:
+        cmd.append('-r')
+    out, _ = bb.process.run(cmd, cwd=repodir)
+    ret = {}
+    for line in out.split('\0'):
+        if line:
+            split = line.split(None, 4)
+            ret[split[3]] = split[0:3]
+    return ret
+
+def _git_exclude_path(srctree, path):
+    """Return pathspec (list of paths) that excludes certain path"""
+    # NOTE: "Filtering out" files/paths in this way is not entirely reliable -
+    # we don't catch files that are deleted, for example. A more reliable way
+    # to implement this would be to use "negative pathspecs" which were
+    # introduced in Git v1.9.0. Revisit this when/if the required Git version
+    # becomes greater than that.
+    path = os.path.normpath(path)
+    recurse = True if len(path.split(os.path.sep)) > 1 else False
+    git_files = _git_ls_tree(srctree, 'HEAD', recurse).keys()
+    if path in git_files:
+        git_files.remove(path)
+        return git_files
+    else:
+        return ['.']
+
 def _ls_tree(directory):
     """Recursive listing of files in a directory"""
     ret = []
@@ -166,6 +229,9 @@
     import bb
 
     tinfoil = _prep_extract_operation(config, basepath, args.recipename)
+    if not tinfoil:
+        # Error already shown
+        return 1
 
     rd = parse_recipe(config, tinfoil, args.recipename, True)
     if not rd:
@@ -210,15 +276,17 @@
 def _prep_extract_operation(config, basepath, recipename):
     """HACK: Ugly workaround for making sure that requirements are met when
        trying to extract a package. Returns the tinfoil instance to be used."""
-    tinfoil = setup_tinfoil()
+    tinfoil = setup_tinfoil(basepath=basepath)
     rd = parse_recipe(config, tinfoil, recipename, True)
+    if not rd:
+        return None
 
     if bb.data.inherits_class('kernel-yocto', rd):
         tinfoil.shutdown()
         try:
             stdout, _ = exec_build_env_command(config.init_path, basepath,
                                                'bitbake kern-tools-native')
-            tinfoil = setup_tinfoil()
+            tinfoil = setup_tinfoil(basepath=basepath)
         except bb.process.ExecutionError as err:
             raise DevtoolError("Failed to build kern-tools-native:\n%s" %
                                err.stdout)
@@ -252,6 +320,10 @@
             raise DevtoolError("output path %s already exists and is "
                                "non-empty" % srctree)
 
+    if 'noexec' in (d.getVarFlags('do_unpack', False) or []):
+        raise DevtoolError("The %s recipe has do_unpack disabled, unable to "
+                           "extract source" % pn)
+
     # Prepare for shutil.move later on
     bb.utils.mkdirhier(srctree)
     os.rmdir(srctree)
@@ -275,7 +347,7 @@
             if bb.data.inherits_class('kernel', d):
                 crd.setVar('S', '${WORKDIR}/source')
             else:
-                crd.setVar('S', '${WORKDIR}/${BP}')
+                crd.setVar('S', '${WORKDIR}/%s' % os.path.basename(d.getVar('S', True)))
         if bb.data.inherits_class('kernel', d):
             # We don't want to move the source to STAGING_KERNEL_DIR here
             crd.setVar('STAGING_KERNEL_DIR', '${S}')
@@ -293,10 +365,25 @@
             logger.info('Doing kernel checkout...')
             task_executor.exec_func('do_kernel_checkout', False)
         srcsubdir = crd.getVar('S', True)
+
+        # Move local source files into separate subdir
+        recipe_patches = [os.path.basename(patch) for patch in
+                          oe.recipeutils.get_recipe_patches(crd)]
+        local_files = oe.recipeutils.get_recipe_local_files(crd)
+        local_files = [fname for fname in local_files if
+                       os.path.exists(os.path.join(workdir, fname))]
+        if local_files:
+            for fname in local_files:
+                _move_file(os.path.join(workdir, fname),
+                           os.path.join(tempdir, 'oe-local-files', fname))
+            with open(os.path.join(tempdir, 'oe-local-files', '.gitignore'),
+                      'w') as f:
+                f.write('# Ignore local files, by default. Remove this file '
+                        'if you want to commit the directory to Git\n*\n')
+
         if srcsubdir == workdir:
-            # Find non-patch sources that were "unpacked" to srctree directory
-            recipe_patches = [os.path.basename(patch) for patch in
-                              oe.recipeutils.get_recipe_patches(crd)]
+            # Find non-patch non-local sources that were "unpacked" to srctree
+            # directory
             src_files = [fname for fname in _ls_tree(workdir) if
                          os.path.basename(fname) not in recipe_patches]
             # Force separate S so that patch files can be left out from srctree
@@ -304,9 +391,8 @@
             crd.setVar('S', srcsubdir)
             # Move source files to S
             for path in src_files:
-                tgt_dir = os.path.join(srcsubdir, os.path.dirname(path))
-                bb.utils.mkdirhier(tgt_dir)
-                shutil.move(os.path.join(workdir, path), tgt_dir)
+                _move_file(os.path.join(workdir, path),
+                           os.path.join(srcsubdir, path))
         elif os.path.dirname(srcsubdir) != workdir:
             # Handle if S is set to a subdirectory of the source
             srcsubdir = os.path.join(workdir, os.path.relpath(srcsubdir, workdir).split(os.sep)[0])
@@ -320,21 +406,18 @@
                 haspatches = True
             else:
                 os.rmdir(patchdir)
+        # Make sure that srcsubdir exists
+        bb.utils.mkdirhier(srcsubdir)
+        if not os.path.exists(srcsubdir) or not os.listdir(srcsubdir):
+            logger.warning("no source unpacked to S, either the %s recipe "
+                           "doesn't use any source or the correct source "
+                           "directory could not be determined" % pn)
 
-        if not os.listdir(srcsubdir):
-            raise DevtoolError("no source unpacked to S, perhaps the %s "
-                               "recipe doesn't use any source?" % pn)
-
-        if not os.path.exists(os.path.join(srcsubdir, '.git')):
-            bb.process.run('git init', cwd=srcsubdir)
-            bb.process.run('git add .', cwd=srcsubdir)
-            bb.process.run('git commit -q -m "Initial commit from upstream at version %s"' % crd.getVar('PV', True), cwd=srcsubdir)
+        setup_git_repo(srcsubdir, crd.getVar('PV', True), devbranch)
 
         (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srcsubdir)
         initial_rev = stdout.rstrip()
 
-        bb.process.run('git checkout -b %s' % devbranch, cwd=srcsubdir)
-        bb.process.run('git tag -f devtool-base', cwd=srcsubdir)
         crd.setVar('PATCHTOOL', 'git')
 
         logger.info('Patching...')
@@ -347,6 +430,12 @@
             if haspatches:
                 bb.process.run('git checkout patches', cwd=srcsubdir)
 
+        # Move oe-local-files directory to srctree
+        if os.path.exists(os.path.join(tempdir, 'oe-local-files')):
+            logger.info('Adding local source files to srctree...')
+            shutil.move(os.path.join(tempdir, 'oe-local-files'), srcsubdir)
+
+
         shutil.move(srcsubdir, srctree)
     finally:
         bb.logger.setLevel(origlevel)
@@ -367,7 +456,7 @@
             f.write('%s|%s|%s\n' % (recipename, os.path.relpath(fn, config.workspace_path), md5))
 
     if os.path.isdir(filename):
-        for root, _, files in os.walk(os.path.dirname(filename)):
+        for root, _, files in os.walk(filename):
             for f in files:
                 addfile(os.path.join(root, f))
     else:
@@ -420,12 +509,23 @@
                            args.srctree)
     if args.extract:
         tinfoil = _prep_extract_operation(config, basepath, args.recipename)
+        if not tinfoil:
+            # Error already shown
+            return 1
     else:
-        tinfoil = setup_tinfoil()
+        tinfoil = setup_tinfoil(basepath=basepath)
 
     rd = parse_recipe(config, tinfoil, args.recipename, True)
     if not rd:
         return 1
+
+    pn = rd.getVar('PN', True)
+    if pn != args.recipename:
+        logger.info('Mapping %s to %s' % (args.recipename, pn))
+    if pn in workspace:
+        raise DevtoolError("recipe %s is already in your workspace" %
+                           pn)
+
     recipefile = rd.getVar('FILE', True)
     appendname = os.path.splitext(os.path.basename(recipefile))[0]
     if args.wildcard:
@@ -436,9 +536,9 @@
         raise DevtoolError("Another variant of recipe %s is already in your "
                            "workspace (only one variant of a recipe can "
                            "currently be worked on at once)"
-                           % args.recipename)
+                           % pn)
 
-    _check_compatible_recipe(args.recipename, rd)
+    _check_compatible_recipe(pn, rd)
 
     initial_rev = None
     commits = []
@@ -478,36 +578,30 @@
     if not os.path.exists(appendpath):
         os.makedirs(appendpath)
     with open(appendfile, 'w') as f:
-        f.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n\n')
-        f.write('inherit externalsrc\n')
-        f.write('# NOTE: We use pn- overrides here to avoid affecting multiple variants in the case where the recipe uses BBCLASSEXTEND\n')
-        f.write('EXTERNALSRC_pn-%s = "%s"\n' % (args.recipename, srctree))
+        f.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n')
+        # Local files can be modified/tracked in separate subdir under srctree
+        # Mostly useful for packages with S != WORKDIR
+        f.write('FILESPATH_prepend := "%s:"\n' %
+                os.path.join(srctree, 'oe-local-files'))
 
-        b_is_s = True
-        if args.no_same_dir:
-            logger.info('using separate build directory since --no-same-dir specified')
-            b_is_s = False
-        elif args.same_dir:
-            logger.info('using source tree as build directory since --same-dir specified')
-        elif bb.data.inherits_class('autotools-brokensep', rd):
-            logger.info('using source tree as build directory since original recipe inherits autotools-brokensep')
-        elif rd.getVar('B', True) == s:
-            logger.info('using source tree as build directory since that is the default for this recipe')
-        else:
-            b_is_s = False
+        f.write('\ninherit externalsrc\n')
+        f.write('# NOTE: We use pn- overrides here to avoid affecting multiple variants in the case where the recipe uses BBCLASSEXTEND\n')
+        f.write('EXTERNALSRC_pn-%s = "%s"\n' % (pn, srctree))
+
+        b_is_s = use_external_build(args.same_dir, args.no_same_dir, rd)
         if b_is_s:
-            f.write('EXTERNALSRC_BUILD_pn-%s = "%s"\n' % (args.recipename, srctree))
+            f.write('EXTERNALSRC_BUILD_pn-%s = "%s"\n' % (pn, srctree))
 
         if bb.data.inherits_class('kernel', rd):
-            f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout do_fetch do_unpack"\n')
+            f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout do_fetch do_unpack do_patch"\n')
         if initial_rev:
             f.write('\n# initial_rev: %s\n' % initial_rev)
             for commit in commits:
                 f.write('# commit: %s\n' % commit)
 
-    _add_md5(config, args.recipename, appendfile)
+    _add_md5(config, pn, appendfile)
 
-    logger.info('Recipe %s now set up to build from %s' % (args.recipename, srctree))
+    logger.info('Recipe %s now set up to build from %s' % (pn, srctree))
 
     return 0
 
@@ -531,6 +625,7 @@
                 commits.append(line.split(':')[-1].strip())
 
     update_rev = initial_rev
+    changed_revs = None
     if initial_rev:
         # Find first actually changed revision
         stdout, _ = bb.process.run('git rev-list --reverse %s..HEAD' %
@@ -540,50 +635,156 @@
             if newcommits[i] == commits[i]:
                 update_rev = commits[i]
 
-    return initial_rev, update_rev
+        try:
+            stdout, _ = bb.process.run('git cherry devtool-patched',
+                                        cwd=srctree)
+        except bb.process.ExecutionError as err:
+            stdout = None
 
-def _remove_patch_entries(srcuri, patchlist):
-    """Remove patch entries from SRC_URI"""
-    remaining = patchlist[:]
+        if stdout is not None:
+            changed_revs = []
+            for line in stdout.splitlines():
+                if line.startswith('+ '):
+                    rev = line.split()[1]
+                    if rev in newcommits:
+                        changed_revs.append(rev)
+
+    return initial_rev, update_rev, changed_revs
+
+def _remove_file_entries(srcuri, filelist):
+    """Remove file:// entries from SRC_URI"""
+    remaining = filelist[:]
     entries = []
-    for patch in patchlist:
-        patchfile = os.path.basename(patch)
+    for fname in filelist:
+        basename = os.path.basename(fname)
         for i in xrange(len(srcuri)):
-            if srcuri[i].startswith('file://') and os.path.basename(srcuri[i].split(';')[0]) == patchfile:
+            if (srcuri[i].startswith('file://') and
+                    os.path.basename(srcuri[i].split(';')[0]) == basename):
                 entries.append(srcuri[i])
-                remaining.remove(patch)
+                remaining.remove(fname)
                 srcuri.pop(i)
                 break
     return entries, remaining
 
-def _remove_patch_files(args, patches, destpath):
+def _remove_source_files(args, files, destpath):
     """Unlink existing patch files"""
-    for patchfile in patches:
+    for path in files:
         if args.append:
             if not destpath:
                 raise Exception('destpath should be set here')
-            patchfile = os.path.join(destpath, os.path.basename(patchfile))
+            path = os.path.join(destpath, os.path.basename(path))
 
-        if os.path.exists(patchfile):
-            logger.info('Removing patch %s' % patchfile)
+        if os.path.exists(path):
+            logger.info('Removing file %s' % path)
             # FIXME "git rm" here would be nice if the file in question is
             #       tracked
             # FIXME there's a chance that this file is referred to by
             #       another recipe, in which case deleting wouldn't be the
             #       right thing to do
-            os.remove(patchfile)
+            os.remove(path)
             # Remove directory if empty
             try:
-                os.rmdir(os.path.dirname(patchfile))
+                os.rmdir(os.path.dirname(path))
             except OSError as ose:
                 if ose.errno != errno.ENOTEMPTY:
                     raise
 
+
+def _export_patches(srctree, rd, start_rev, destdir):
+    """Export patches from srctree to given location.
+       Returns three-tuple of dicts:
+         1. updated - patches that already exist in SRCURI
+         2. added - new patches that don't exist in SRCURI
+         3  removed - patches that exist in SRCURI but not in exported patches
+      In each dict the key is the 'basepath' of the URI and value is the
+      absolute path to the existing file in recipe space (if any).
+    """
+    import oe.recipeutils
+    from oe.patch import GitApplyTree
+    updated = OrderedDict()
+    added = OrderedDict()
+    seqpatch_re = re.compile('^([0-9]{4}-)?(.+)')
+
+    existing_patches = dict((os.path.basename(path), path) for path in
+                            oe.recipeutils.get_recipe_patches(rd))
+
+    # Generate patches from Git, exclude local files directory
+    patch_pathspec = _git_exclude_path(srctree, 'oe-local-files')
+    GitApplyTree.extractPatches(srctree, start_rev, destdir, patch_pathspec)
+
+    new_patches = sorted(os.listdir(destdir))
+    for new_patch in new_patches:
+        # Strip numbering from patch names. If it's a git sequence named patch,
+        # the numbers might not match up since we are starting from a different
+        # revision This does assume that people are using unique shortlog
+        # values, but they ought to be anyway...
+        new_basename = seqpatch_re.match(new_patch).group(2)
+        found = False
+        for old_patch in existing_patches:
+            old_basename = seqpatch_re.match(old_patch).group(2)
+            if new_basename == old_basename:
+                updated[new_patch] = existing_patches.pop(old_patch)
+                found = True
+                # Rename patch files
+                if new_patch != old_patch:
+                    os.rename(os.path.join(destdir, new_patch),
+                              os.path.join(destdir, old_patch))
+                break
+        if not found:
+            added[new_patch] = None
+    return (updated, added, existing_patches)
+
+
+def _export_local_files(srctree, rd, destdir):
+    """Copy local files from srctree to given location.
+       Returns three-tuple of dicts:
+         1. updated - files that already exist in SRCURI
+         2. added - new files files that don't exist in SRCURI
+         3  removed - files that exist in SRCURI but not in exported files
+      In each dict the key is the 'basepath' of the URI and value is the
+      absolute path to the existing file in recipe space (if any).
+    """
+    import oe.recipeutils
+
+    # Find out local files (SRC_URI files that exist in the "recipe space").
+    # Local files that reside in srctree are not included in patch generation.
+    # Instead they are directly copied over the original source files (in
+    # recipe space).
+    existing_files = oe.recipeutils.get_recipe_local_files(rd)
+    new_set = None
+    updated = OrderedDict()
+    added = OrderedDict()
+    removed = OrderedDict()
+    git_files = _git_ls_tree(srctree)
+    if 'oe-local-files' in git_files:
+        # If tracked by Git, take the files from srctree HEAD. First get
+        # the tree object of the directory
+        tmp_index = os.path.join(srctree, '.git', 'index.tmp.devtool')
+        tree = git_files['oe-local-files'][2]
+        bb.process.run(['git', 'checkout', tree, '--', '.'], cwd=srctree,
+                        env=dict(os.environ, GIT_WORK_TREE=destdir,
+                                 GIT_INDEX_FILE=tmp_index))
+        new_set = _git_ls_tree(srctree, tree, True).keys()
+    elif os.path.isdir(os.path.join(srctree, 'oe-local-files')):
+        # If not tracked by Git, just copy from working copy
+        new_set = _ls_tree(os.path.join(srctree, 'oe-local-files'))
+        bb.process.run(['cp', '-ax',
+                        os.path.join(srctree, 'oe-local-files', '.'), destdir])
+    if new_set is not None:
+        for fname in new_set:
+            if fname in existing_files:
+                updated[fname] = existing_files.pop(fname)
+            elif fname != '.gitignore':
+                added[fname] = None
+
+        removed = existing_files
+    return (updated, added, removed)
+
+
 def _update_recipe_srcrev(args, srctree, rd, config_data):
     """Implement the 'srcrev' mode of update-recipe"""
     import bb
     import oe.recipeutils
-    from oe.patch import GitApplyTree
 
     recipefile = rd.getVar('FILE', True)
     logger.info('Updating SRCREV in recipe %s' % os.path.basename(recipefile))
@@ -599,51 +800,68 @@
         raise DevtoolError('Invalid hash returned by git: %s' % stdout)
 
     destpath = None
-    removepatches = []
+    remove_files = []
     patchfields = {}
     patchfields['SRCREV'] = srcrev
     orig_src_uri = rd.getVar('SRC_URI', False) or ''
-    if not args.no_remove:
-        # Find list of existing patches in recipe file
-        existing_patches = oe.recipeutils.get_recipe_patches(rd)
+    srcuri = orig_src_uri.split()
+    tempdir = tempfile.mkdtemp(prefix='devtool')
+    update_srcuri = False
+    try:
+        local_files_dir = tempfile.mkdtemp(dir=tempdir)
+        upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir)
+        if not args.no_remove:
+            # Find list of existing patches in recipe file
+            patches_dir = tempfile.mkdtemp(dir=tempdir)
+            old_srcrev = (rd.getVar('SRCREV', False) or '')
+            upd_p, new_p, del_p = _export_patches(srctree, rd, old_srcrev,
+                                                  patches_dir)
 
-        old_srcrev = (rd.getVar('SRCREV', False) or '')
-        tempdir = tempfile.mkdtemp(prefix='devtool')
-        try:
-            GitApplyTree.extractPatches(srctree, old_srcrev, tempdir)
-            newpatches = os.listdir(tempdir)
-            for patch in existing_patches:
-                patchfile = os.path.basename(patch)
-                if patchfile in newpatches:
-                    removepatches.append(patch)
-        finally:
-            shutil.rmtree(tempdir)
+            # Remove deleted local files and "overlapping" patches
+            remove_files = del_f.values() + upd_p.values()
+            if remove_files:
+                removedentries = _remove_file_entries(srcuri, remove_files)[0]
+                update_srcuri = True
 
-        if removepatches:
-            srcuri = orig_src_uri.split()
-            removedentries, _ = _remove_patch_entries(srcuri, removepatches)
-            if removedentries:
+        if args.append:
+            files = dict((os.path.join(local_files_dir, key), val) for
+                          key, val in upd_f.items() + new_f.items())
+            removevalues = {}
+            if update_srcuri:
+                removevalues  = {'SRC_URI': removedentries}
+                patchfields['SRC_URI'] = '\\\n    '.join(srcuri)
+            _, destpath = oe.recipeutils.bbappend_recipe(
+                    rd, args.append, files, wildcardver=args.wildcard_version,
+                    extralines=patchfields, removevalues=removevalues)
+        else:
+            files_dir = os.path.join(os.path.dirname(recipefile),
+                                     rd.getVar('BPN', True))
+            for basepath, path in upd_f.iteritems():
+                logger.info('Updating file %s' % basepath)
+                _move_file(os.path.join(local_files_dir, basepath), path)
+                update_srcuri= True
+            for basepath, path in new_f.iteritems():
+                logger.info('Adding new file %s' % basepath)
+                _move_file(os.path.join(local_files_dir, basepath),
+                           os.path.join(files_dir, basepath))
+                srcuri.append('file://%s' % basepath)
+                update_srcuri = True
+            if update_srcuri:
                 patchfields['SRC_URI'] = ' '.join(srcuri)
-
-    if args.append:
-        _, destpath = oe.recipeutils.bbappend_recipe(
-                rd, args.append, None, wildcardver=args.wildcard_version,
-                extralines=patchfields)
-    else:
-        oe.recipeutils.patch_recipe(rd, recipefile, patchfields)
-
+            oe.recipeutils.patch_recipe(rd, recipefile, patchfields)
+    finally:
+        shutil.rmtree(tempdir)
     if not 'git://' in orig_src_uri:
         logger.info('You will need to update SRC_URI within the recipe to '
                     'point to a git repository where you have pushed your '
                     'changes')
 
-    _remove_patch_files(args, removepatches, destpath)
+    _remove_source_files(args, remove_files, destpath)
 
 def _update_recipe_patch(args, config, srctree, rd, config_data):
     """Implement the 'patch' mode of update-recipe"""
     import bb
     import oe.recipeutils
-    from oe.patch import GitApplyTree
 
     recipefile = rd.getVar('FILE', True)
     append = os.path.join(config.workspace_path, 'appends', '%s.bbappend' %
@@ -652,105 +870,99 @@
         raise DevtoolError('unable to find workspace bbappend for recipe %s' %
                            args.recipename)
 
-    initial_rev, update_rev = _get_patchset_revs(args, srctree, append)
+    initial_rev, update_rev, changed_revs = _get_patchset_revs(args, srctree, append)
     if not initial_rev:
         raise DevtoolError('Unable to find initial revision - please specify '
                            'it with --initial-rev')
 
-    # Find list of existing patches in recipe file
-    existing_patches = oe.recipeutils.get_recipe_patches(rd)
-
-    removepatches = []
-    seqpatch_re = re.compile('^([0-9]{4}-)?(.+)')
-    if not args.no_remove:
-        # Get all patches from source tree and check if any should be removed
-        tempdir = tempfile.mkdtemp(prefix='devtool')
-        try:
-            GitApplyTree.extractPatches(srctree, initial_rev, tempdir)
-            # Strip numbering from patch names. If it's a git sequence named
-            # patch, the numbers might not match up since we are starting from
-            # a different revision This does assume that people are using
-            # unique shortlog values, but they ought to be anyway...
-            newpatches = [seqpatch_re.match(fname).group(2) for fname in
-                          os.listdir(tempdir)]
-            for patch in existing_patches:
-                basename = seqpatch_re.match(
-                                os.path.basename(patch)).group(2)
-                if basename not in newpatches:
-                    removepatches.append(patch)
-        finally:
-            shutil.rmtree(tempdir)
-
-    # Get updated patches from source tree
     tempdir = tempfile.mkdtemp(prefix='devtool')
     try:
-        GitApplyTree.extractPatches(srctree, update_rev, tempdir)
+        local_files_dir = tempfile.mkdtemp(dir=tempdir)
+        upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir)
 
-        # Match up and replace existing patches with corresponding new patches
-        updatepatches = False
+        remove_files = []
+        if not args.no_remove:
+            # Get all patches from source tree and check if any should be removed
+            all_patches_dir = tempfile.mkdtemp(dir=tempdir)
+            upd_p, new_p, del_p = _export_patches(srctree, rd, initial_rev,
+                                                  all_patches_dir)
+            # Remove deleted local files and  patches
+            remove_files = del_f.values() + del_p.values()
+
+        # Get updated patches from source tree
+        patches_dir = tempfile.mkdtemp(dir=tempdir)
+        upd_p, new_p, del_p = _export_patches(srctree, rd, update_rev,
+                                              patches_dir)
+        updatefiles = False
         updaterecipe = False
         destpath = None
-        newpatches = os.listdir(tempdir)
+        srcuri = (rd.getVar('SRC_URI', False) or '').split()
         if args.append:
-            patchfiles = {}
-            for patch in existing_patches:
-                patchfile = os.path.basename(patch)
-                if patchfile in newpatches:
-                    patchfiles[os.path.join(tempdir, patchfile)] = patchfile
-                    newpatches.remove(patchfile)
-            for patchfile in newpatches:
-                patchfiles[os.path.join(tempdir, patchfile)] = None
-
-            if patchfiles or removepatches:
+            files = dict((os.path.join(local_files_dir, key), val) for
+                         key, val in upd_f.items() + new_f.items())
+            files.update(dict((os.path.join(patches_dir, key), val) for
+                              key, val in upd_p.items() + new_p.items()))
+            if files or remove_files:
                 removevalues = None
-                if removepatches:
-                    srcuri = (rd.getVar('SRC_URI', False) or '').split()
-                    removedentries, remaining = _remove_patch_entries(
-                                                    srcuri, removepatches)
+                if remove_files:
+                    removedentries, remaining = _remove_file_entries(
+                                                    srcuri, remove_files)
                     if removedentries or remaining:
                         remaining = ['file://' + os.path.basename(item) for
                                      item in remaining]
                         removevalues = {'SRC_URI': removedentries + remaining}
                 _, destpath = oe.recipeutils.bbappend_recipe(
-                                rd, args.append, patchfiles,
+                                rd, args.append, files,
                                 removevalues=removevalues)
             else:
-                logger.info('No patches needed updating')
+                logger.info('No patches or local source files needed updating')
         else:
-            for patch in existing_patches:
-                patchfile = os.path.basename(patch)
-                if patchfile in newpatches:
-                    logger.info('Updating patch %s' % patchfile)
-                    shutil.move(os.path.join(tempdir, patchfile), patch)
-                    newpatches.remove(patchfile)
-                    updatepatches = True
-            srcuri = (rd.getVar('SRC_URI', False) or '').split()
-            if newpatches:
-                # Add any patches left over
-                patchdir = os.path.join(os.path.dirname(recipefile),
-                                        rd.getVar('BPN', True))
-                bb.utils.mkdirhier(patchdir)
-                for patchfile in newpatches:
-                    logger.info('Adding new patch %s' % patchfile)
-                    shutil.move(os.path.join(tempdir, patchfile),
-                                os.path.join(patchdir, patchfile))
-                    srcuri.append('file://%s' % patchfile)
-                    updaterecipe = True
-            if removepatches:
-                removedentries, _ = _remove_patch_entries(srcuri, removepatches)
-                if removedentries:
-                    updaterecipe = True
+            # Update existing files
+            for basepath, path in upd_f.iteritems():
+                logger.info('Updating file %s' % basepath)
+                _move_file(os.path.join(local_files_dir, basepath), path)
+                updatefiles = True
+            for basepath, path in upd_p.iteritems():
+                patchfn = os.path.join(patches_dir, basepath)
+                if changed_revs is not None:
+                    # Avoid updating patches that have not actually changed
+                    with open(patchfn, 'r') as f:
+                        firstlineitems = f.readline().split()
+                        if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40:
+                            if not firstlineitems[1] in changed_revs:
+                                continue
+                logger.info('Updating patch %s' % basepath)
+                _move_file(patchfn, path)
+                updatefiles = True
+            # Add any new files
+            files_dir = os.path.join(os.path.dirname(recipefile),
+                                     rd.getVar('BPN', True))
+            for basepath, path in new_f.iteritems():
+                logger.info('Adding new file %s' % basepath)
+                _move_file(os.path.join(local_files_dir, basepath),
+                           os.path.join(files_dir, basepath))
+                srcuri.append('file://%s' % basepath)
+                updaterecipe = True
+            for basepath, path in new_p.iteritems():
+                logger.info('Adding new patch %s' % basepath)
+                _move_file(os.path.join(patches_dir, basepath),
+                           os.path.join(files_dir, basepath))
+                srcuri.append('file://%s' % basepath)
+                updaterecipe = True
+            # Update recipe, if needed
+            if _remove_file_entries(srcuri, remove_files)[0]:
+                updaterecipe = True
             if updaterecipe:
                 logger.info('Updating recipe %s' % os.path.basename(recipefile))
                 oe.recipeutils.patch_recipe(rd, recipefile,
                                             {'SRC_URI': ' '.join(srcuri)})
-            elif not updatepatches:
+            elif not updatefiles:
                 # Neither patches nor recipe were updated
-                logger.info('No patches need updating')
+                logger.info('No patches or files need updating')
     finally:
         shutil.rmtree(tempdir)
 
-    _remove_patch_files(args, removepatches, destpath)
+    _remove_source_files(args, remove_files, destpath)
 
 def _guess_recipe_update_mode(srctree, rdata):
     """Guess the recipe update mode to use"""
@@ -776,9 +988,7 @@
 
 def update_recipe(args, config, basepath, workspace):
     """Entry point for the devtool 'update-recipe' subcommand"""
-    if not args.recipename in workspace:
-        raise DevtoolError("no recipe named %s in your workspace" %
-                           args.recipename)
+    check_workspace_recipe(workspace, args.recipename)
 
     if args.append:
         if not os.path.exists(args.append):
@@ -788,7 +998,7 @@
             raise DevtoolError('conf/layer.conf not found in bbappend '
                                'destination layer "%s"' % args.append)
 
-    tinfoil = setup_tinfoil()
+    tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
 
     rd = parse_recipe(config, tinfoil, args.recipename, True)
     if not rd:
@@ -830,28 +1040,30 @@
     if args.recipename:
         if args.all:
             raise DevtoolError("Recipe cannot be specified if -a/--all is used")
-        elif not args.recipename in workspace:
-            raise DevtoolError("no recipe named %s in your workspace" %
-                               args.recipename)
+        else:
+            check_workspace_recipe(workspace, args.recipename, checksrc=False)
     elif not args.all:
         raise DevtoolError("Recipe must be specified, or specify -a/--all to "
                            "reset all recipes")
     if args.all:
-        recipes = workspace
+        recipes = workspace.keys()
     else:
         recipes = [args.recipename]
 
-    for pn in recipes:
-        if not args.no_clean:
-            logger.info('Cleaning sysroot for recipe %s...' % pn)
-            try:
-                exec_build_env_command(config.init_path, basepath, 'bitbake -c clean %s' % pn)
-            except bb.process.ExecutionError as e:
-                raise DevtoolError('Command \'%s\' failed, output:\n%s\nIf you '
-                                   'wish, you may specify -n/--no-clean to '
-                                   'skip running this command when resetting' %
-                                   (e.command, e.stdout))
+    if recipes and not args.no_clean:
+        if len(recipes) == 1:
+            logger.info('Cleaning sysroot for recipe %s...' % recipes[0])
+        else:
+            logger.info('Cleaning sysroot for recipes %s...' % ', '.join(recipes))
+        try:
+            exec_build_env_command(config.init_path, basepath, 'bitbake -c clean %s' % ' '.join(recipes))
+        except bb.process.ExecutionError as e:
+            raise DevtoolError('Command \'%s\' failed, output:\n%s\nIf you '
+                                'wish, you may specify -n/--no-clean to '
+                                'skip running this command when resetting' %
+                                (e.command, e.stdout))
 
+    for pn in recipes:
         _check_preserve(config, pn)
 
         preservepath = os.path.join(config.workspace_path, 'attic', pn)
@@ -860,8 +1072,8 @@
                 for root, dirs, files in os.walk(origdir):
                     for fn in files:
                         logger.warn('Preserving %s in %s' % (fn, preservepath))
-                        bb.utils.mkdirhier(preservepath)
-                        shutil.move(os.path.join(origdir, fn), os.path.join(preservepath, fn))
+                        _move_file(os.path.join(origdir, fn),
+                                   os.path.join(preservepath, fn))
                     for dn in dirs:
                         os.rmdir(os.path.join(root, dn))
                 os.rmdir(origdir)
@@ -879,9 +1091,13 @@
                                        description='Adds a new recipe')
     parser_add.add_argument('recipename', help='Name for new recipe to add')
     parser_add.add_argument('srctree', help='Path to external source tree')
-    parser_add.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
+    group = parser_add.add_mutually_exclusive_group()
+    group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
+    group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
     parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree', metavar='URI')
     parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)')
+    parser_add.add_argument('--no-git', '-g', help='If -f/--fetch is specified, do not set up source tree as a git repository', action="store_true")
+    parser_add.add_argument('--binary', '-b', help='Treat the source tree as something that should be installed verbatim (no compilation, same directory structure)', action='store_true')
     parser_add.set_defaults(func=add)
 
     parser_modify = subparsers.add_parser('modify', help='Modify the source for an existing recipe',
diff --git a/yocto-poky/scripts/lib/devtool/upgrade.py b/yocto-poky/scripts/lib/devtool/upgrade.py
index 86443b0..6bac44b 100644
--- a/yocto-poky/scripts/lib/devtool/upgrade.py
+++ b/yocto-poky/scripts/lib/devtool/upgrade.py
@@ -29,7 +29,7 @@
 import bb
 import oe.recipeutils
 from devtool import standard
-from devtool import exec_build_env_command, setup_tinfoil, DevtoolError, parse_recipe
+from devtool import exec_build_env_command, setup_tinfoil, DevtoolError, parse_recipe, use_external_build
 
 logger = logging.getLogger('devtool')
 
@@ -53,7 +53,7 @@
         dest_dir = os.path.join(dest, os.path.dirname(path))
         bb.utils.mkdirhier(dest_dir)
         dest_path = os.path.join(dest, path)
-        os.rename(os.path.join(orig, path), dest_path)
+        shutil.move(os.path.join(orig, path), dest_path)
 
 def _get_checksums(rf):
     import re
@@ -91,15 +91,13 @@
         for d in dirs:
             shutil.rmtree(os.path.join(root,d))
 
-def _recipe_contains(rf, var):
-    import re
-    found = False
-    with open(rf) as f:
-        for line in f:
-            if re.match("^%s.*=.*" % var, line):
-                found = True
-                break
-    return found
+def _recipe_contains(rd, var):
+    rf = rd.getVar('FILE', True)
+    varfiles = oe.recipeutils.get_var_files(rf, [var], rd)
+    for var, fn in varfiles.iteritems():
+        if fn and fn.startswith(os.path.dirname(rf) + os.sep):
+            return True
+    return False
 
 def _rename_recipe_dirs(oldpv, newpv, path):
     for root, dirs, files in os.walk(path):
@@ -119,28 +117,12 @@
         recipe = "%s_git.bb" % bpn
         if os.path.isfile(os.path.join(path, recipe)):
             newrecipe = recipe
-            raise DevtoolError("Original recipe not found on workspace")
     return os.path.join(path, newrecipe)
 
 def _rename_recipe_files(bpn, oldpv, newpv, path):
     _rename_recipe_dirs(oldpv, newpv, path)
     return _rename_recipe_file(bpn, oldpv, newpv, path)
 
-def _use_external_build(same_dir, no_same_dir, d):
-    b_is_s = True
-    if no_same_dir:
-        logger.info('using separate build directory since --no-same-dir specified')
-        b_is_s = False
-    elif same_dir:
-        logger.info('using source tree as build directory since --same-dir specified')
-    elif bb.data.inherits_class('autotools-brokensep', d):
-        logger.info('using source tree as build directory since original recipe inherits autotools-brokensep')
-    elif d.getVar('B', True) == os.path.abspath(d.getVar('S', True)):
-        logger.info('using source tree as build directory since that is the default for this recipe')
-    else:
-        b_is_s = False
-    return b_is_s
-
 def _write_append(rc, srctree, same_dir, no_same_dir, rev, workspace, d):
     """Writes an append file"""
     if not os.path.exists(rc):
@@ -161,7 +143,8 @@
         f.write(('# NOTE: We use pn- overrides here to avoid affecting'
                  'multiple variants in the case where the recipe uses BBCLASSEXTEND\n'))
         f.write('EXTERNALSRC_pn-%s = "%s"\n' % (pn, srctree))
-        if _use_external_build(same_dir, no_same_dir, d):
+        b_is_s = use_external_build(same_dir, no_same_dir, d)
+        if b_is_s:
             f.write('EXTERNALSRC_BUILD_pn-%s = "%s"\n' % (pn, srctree))
         if rev:
             f.write('\n# initial_rev: %s\n' % rev)
@@ -216,6 +199,7 @@
     if srcrev:
         rev = srcrev
     if uri.startswith('git://'):
+        __run('git fetch')
         __run('git checkout %s' % rev)
         __run('git tag -f devtool-base-new')
         md5 = None
@@ -271,7 +255,7 @@
 
     return (rev, md5, sha256)
 
-def _create_new_recipe(newpv, md5, sha256, workspace, rd):
+def _create_new_recipe(newpv, md5, sha256, srcrev, workspace, tinfoil, rd):
     """Creates the new recipe under workspace"""
     crd = rd.createCopy()
 
@@ -285,8 +269,16 @@
         newpv = oldpv
     fullpath = _rename_recipe_files(bpn, oldpv, newpv, path)
 
-    if _recipe_contains(fullpath, 'PV') and newpv != oldpv:
-        oe.recipeutils.patch_recipe(d, fullpath, {'PV':newpv})
+    newvalues = {}
+    if _recipe_contains(rd, 'PV') and newpv != oldpv:
+        newvalues['PV'] = newpv
+
+    if srcrev:
+        newvalues['SRCREV'] = srcrev
+
+    if newvalues:
+        rd = oe.recipeutils.parse_recipe(fullpath, None, tinfoil.config_data)
+        oe.recipeutils.patch_recipe(rd, fullpath, newvalues)
 
     if md5 and sha256:
         # Unfortunately, oe.recipeutils.patch_recipe cannot update flags.
@@ -308,13 +300,19 @@
     if reason:
         raise DevtoolError(reason)
 
-    tinfoil = setup_tinfoil()
+    tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
 
     rd = parse_recipe(config, tinfoil, args.recipename, True)
     if not rd:
         return 1
 
-    standard._check_compatible_recipe(args.recipename, rd)
+    pn = rd.getVar('PN', True)
+    if pn != args.recipename:
+        logger.info('Mapping %s to %s' % (args.recipename, pn))
+    if pn in workspace:
+        raise DevtoolError("recipe %s is already in your workspace" % pn)
+
+    standard._check_compatible_recipe(pn, rd)
     if rd.getVar('PV', True) == args.version and rd.getVar('SRCREV', True) == args.srcrev:
         raise DevtoolError("Current and upgrade versions are the same version" % version)
 
@@ -324,16 +322,16 @@
         rev2, md5, sha256 = _extract_new_source(args.version, args.srctree, args.no_patch,
                                                 args.srcrev, args.branch, args.keep_temp,
                                                 tinfoil, rd)
-        rf = _create_new_recipe(args.version, md5, sha256, config.workspace_path, rd)
+        rf = _create_new_recipe(args.version, md5, sha256, args.srcrev, config.workspace_path, tinfoil, rd)
     except bb.process.CmdError as e:
         _upgrade_error(e, rf, args.srctree)
     except DevtoolError as e:
         _upgrade_error(e, rf, args.srctree)
-    standard._add_md5(config, args.recipename, os.path.dirname(rf))
+    standard._add_md5(config, pn, os.path.dirname(rf))
 
     af = _write_append(rf, args.srctree, args.same_dir, args.no_same_dir, rev2,
                        config.workspace_path, rd)
-    standard._add_md5(config, args.recipename, af)
+    standard._add_md5(config, pn, af)
     logger.info('Upgraded source extracted to %s' % args.srctree)
     return 0
 
diff --git a/yocto-poky/scripts/lib/recipetool/create.py b/yocto-poky/scripts/lib/recipetool/create.py
index c4754db..8305e43 100644
--- a/yocto-poky/scripts/lib/recipetool/create.py
+++ b/yocto-poky/scripts/lib/recipetool/create.py
@@ -23,6 +23,7 @@
 import re
 import logging
 import scriptutils
+import urlparse
 
 logger = logging.getLogger('recipetool')
 
@@ -46,10 +47,26 @@
             results.extend(glob.glob(os.path.join(path, spec)))
         return results
 
-    def genfunction(self, outlines, funcname, content):
-        outlines.append('%s () {' % funcname)
+    def genfunction(self, outlines, funcname, content, python=False, forcespace=False):
+        if python:
+            prefix = 'python '
+        else:
+            prefix = ''
+        outlines.append('%s%s () {' % (prefix, funcname))
+        if python or forcespace:
+            indent = '    '
+        else:
+            indent = '\t'
+        addnoop = not python
         for line in content:
-            outlines.append('\t%s' % line)
+            outlines.append('%s%s' % (indent, line))
+            if addnoop:
+                strippedline = line.lstrip()
+                if strippedline and not strippedline.startswith('#'):
+                    addnoop = False
+        if addnoop:
+            # Without this there'll be a syntax error
+            outlines.append('%s:' % indent)
         outlines.append('}')
         outlines.append('')
 
@@ -86,7 +103,17 @@
     srcrev = '${AUTOREV}'
     if '://' in args.source:
         # Fetch a URL
-        srcuri = args.source
+        fetchuri = urlparse.urldefrag(args.source)[0]
+        if args.binary:
+            # Assume the archive contains the directory structure verbatim
+            # so we need to extract to a subdirectory
+            fetchuri += ';subdir=%s' % os.path.splitext(os.path.basename(urlparse.urlsplit(fetchuri).path))[0]
+        git_re = re.compile('(https?)://([^;]+\.git)(;.*)?')
+        res = git_re.match(fetchuri)
+        if res:
+            # Need to switch the URI around so that the git fetcher is used
+            fetchuri = 'git://%s;protocol=%s%s' % (res.group(2), res.group(1), res.group(3) or '')
+        srcuri = fetchuri
         rev_re = re.compile(';rev=([^;]+)')
         res = rev_re.search(srcuri)
         if res:
@@ -95,14 +122,25 @@
         tempsrc = tempfile.mkdtemp(prefix='recipetool-')
         srctree = tempsrc
         logger.info('Fetching %s...' % srcuri)
-        checksums = scriptutils.fetch_uri(tinfoil.config_data, args.source, srctree, srcrev)
+        try:
+            checksums = scriptutils.fetch_uri(tinfoil.config_data, fetchuri, srctree, srcrev)
+        except bb.fetch2.FetchError:
+            # Error already printed
+            sys.exit(1)
         dirlist = os.listdir(srctree)
         if 'git.indirectionsymlink' in dirlist:
             dirlist.remove('git.indirectionsymlink')
-        if len(dirlist) == 1 and os.path.isdir(os.path.join(srctree, dirlist[0])):
-            # We unpacked a single directory, so we should use that
-            srcsubdir = dirlist[0]
-            srctree = os.path.join(srctree, srcsubdir)
+        if len(dirlist) == 1:
+            singleitem = os.path.join(srctree, dirlist[0])
+            if os.path.isdir(singleitem):
+                # We unpacked a single directory, so we should use that
+                srcsubdir = dirlist[0]
+                srctree = os.path.join(srctree, srcsubdir)
+            else:
+                with open(singleitem, 'r') as f:
+                    if '<html' in f.read(100).lower():
+                        logger.error('Fetching "%s" returned a single HTML page - check the URL is correct and functional' % fetchuri)
+                        sys.exit(1)
     else:
         # Assume we're pointing to an existing source tree
         if args.extract_to:
@@ -208,6 +246,10 @@
         lines_after.append('PACKAGE_ARCH = "%s"' % pkgarch)
         lines_after.append('')
 
+    if args.binary:
+        lines_after.append('INSANE_SKIP_${PN} += "already-stripped"')
+        lines_after.append('')
+
     # Find all plugins that want to register handlers
     handlers = []
     for plugin in plugins:
@@ -217,6 +259,11 @@
     # Apply the handlers
     classes = []
     handled = []
+
+    if args.binary:
+        classes.append('bin_package')
+        handled.append('buildsystem')
+
     for handler in handlers:
         handler.process(srctree, classes, lines_before, lines_after, handled)
 
@@ -229,7 +276,15 @@
 
     if args.extract_to:
         scriptutils.git_convert_standalone_clone(srctree)
+        if os.path.isdir(args.extract_to):
+            # If the directory exists we'll move the temp dir into it instead of
+            # its contents - of course, we could try to always move its contents
+            # but that is a pain if there are symlinks; the simplest solution is
+            # to just remove it first
+            os.rmdir(args.extract_to)
         shutil.move(srctree, args.extract_to)
+        if tempsrc == srctree:
+            tempsrc = None
         logger.info('Source extracted to %s' % args.extract_to)
 
     if outfile == '-':
@@ -408,5 +463,6 @@
     parser_create.add_argument('-m', '--machine', help='Make recipe machine-specific as opposed to architecture-specific', action='store_true')
     parser_create.add_argument('-x', '--extract-to', metavar='EXTRACTPATH', help='Assuming source is a URL, fetch it and extract it to the directory specified as %(metavar)s')
     parser_create.add_argument('-V', '--version', help='Version to use within recipe (PV)')
+    parser_create.add_argument('-b', '--binary', help='Treat the source tree as something that should be installed verbatim (no compilation, same directory structure)', action='store_true')
     parser_create.set_defaults(func=create_recipe)
 
diff --git a/yocto-poky/scripts/lib/recipetool/create_buildsys.py b/yocto-poky/scripts/lib/recipetool/create_buildsys.py
index ed14a53..931ef3b 100644
--- a/yocto-poky/scripts/lib/recipetool/create_buildsys.py
+++ b/yocto-poky/scripts/lib/recipetool/create_buildsys.py
@@ -279,7 +279,7 @@
 
             installtarget = True
             try:
-                stdout, stderr = bb.process.run('make -qn install', cwd=srctree, shell=True)
+                stdout, stderr = bb.process.run('make -n install', cwd=srctree, shell=True)
             except bb.process.ExecutionError as e:
                 if e.exitcode != 1:
                     installtarget = False
diff --git a/yocto-poky/scripts/lib/scriptutils.py b/yocto-poky/scripts/lib/scriptutils.py
index 3366882..e79a195 100644
--- a/yocto-poky/scripts/lib/scriptutils.py
+++ b/yocto-poky/scripts/lib/scriptutils.py
@@ -85,6 +85,10 @@
             ud = fetcher.ud[u]
             ud.ignore_checksums = True
         fetcher.download()
+        for u in fetcher.ud:
+            ud = fetcher.ud[u]
+            if ud.localpath.rstrip(os.sep) == localdata.getVar('DL_DIR', True).rstrip(os.sep):
+                raise Exception('Local path is download directory - please check that the URI "%s" is correct' % uri)
         fetcher.unpack(destdir)
         for u in fetcher.ud:
             ud = fetcher.ud[u]
diff --git a/yocto-poky/scripts/lib/wic/imager/direct.py b/yocto-poky/scripts/lib/wic/imager/direct.py
index 146a0d1..d5603fa 100644
--- a/yocto-poky/scripts/lib/wic/imager/direct.py
+++ b/yocto-poky/scripts/lib/wic/imager/direct.py
@@ -241,8 +241,7 @@
                     # IMAGE_OVERHEAD_FACTOR and IMAGE_ROOTFS_EXTRA_SPACE
                     rsize_bb = get_bitbake_var('ROOTFS_SIZE', image_name)
                     if rsize_bb:
-                        # convert from Kb to Mb
-                        part.size = int(round(float(rsize_bb) / 1024.))
+                        part.size = int(round(float(rsize_bb)))
             # need to create the filesystems in order to get their
             # sizes before we can add them and do the layout.
             # Image.create() actually calls __format_disks() to create
diff --git a/yocto-poky/scripts/lib/wic/utils/oe/misc.py b/yocto-poky/scripts/lib/wic/utils/oe/misc.py
index 7370d93..c6d2e5f 100644
--- a/yocto-poky/scripts/lib/wic/utils/oe/misc.py
+++ b/yocto-poky/scripts/lib/wic/utils/oe/misc.py
@@ -82,6 +82,12 @@
 
     return out
 
+def cmd_in_path(cmd, path):
+    import scriptpath
+
+    scriptpath.add_bitbake_lib_path()
+
+    return bb.utils.which(path, cmd) != "" or False
 
 def exec_native_cmd(cmd_and_args, native_sysroot, catch=3):
     """
@@ -92,15 +98,21 @@
     Always need to execute native commands as_shell
     """
     native_paths = \
-        "export PATH=%s/sbin:%s/usr/sbin:%s/usr/bin" % \
+        "%s/sbin:%s/usr/sbin:%s/usr/bin" % \
         (native_sysroot, native_sysroot, native_sysroot)
-    native_cmd_and_args = "%s;%s" % (native_paths, cmd_and_args)
+    native_cmd_and_args = "export PATH=%s:$PATH;%s" % \
+                           (native_paths, cmd_and_args)
     msger.debug("exec_native_cmd: %s" % cmd_and_args)
 
-    args = cmd_and_args.split()
+    # The reason -1 is used is because there may be "export" commands.
+    args = cmd_and_args.split(';')[-1].split()
     msger.debug(args)
 
-    ret, out = _exec_cmd(native_cmd_and_args, True, catch)
+    # If the command isn't in the native sysroot say we failed.
+    if cmd_in_path(args[0], native_paths):
+        ret, out = _exec_cmd(native_cmd_and_args, True, catch)
+    else:
+        ret = 127
 
     if ret == 127: # shell command-not-found
         prog = args[0]
diff --git a/yocto-poky/scripts/oe-git-proxy b/yocto-poky/scripts/oe-git-proxy
index 4873455..d2e9f92 100755
--- a/yocto-poky/scripts/oe-git-proxy
+++ b/yocto-poky/scripts/oe-git-proxy
@@ -53,6 +53,7 @@
 
 	# Determine the mask bitlength
 	BITS=${CIDR##*/}
+	[ "$BITS" != "$CIDR" ] || BITS=32
 	if [ -z "$BITS" ]; then
 		return 1
 	fi
@@ -112,16 +113,23 @@
 # Proxy is necessary, determine protocol, server, and port
 PROTO=$(echo $ALL_PROXY | sed -e 's/\([^:]*\):\/\/.*/\1/')
 PROXY=$(echo $ALL_PROXY | sed -e 's/.*:\/\/\([^:]*\).*/\1/')
-PORT=$(echo $ALL_PROXY | sed -e 's/.*:\([0-9]*\)\/?$/\1/')
+# For backwards compatibility, this allows the port number to be followed by /?
+# in addition to the customary optional /
+PORT=$(echo $ALL_PROXY | sed -e 's/.*:\([0-9]*\)\(\/?\?\)\?$/\1/')
 if [ "$PORT" = "$ALL_PROXY" ]; then
 	PORT=""
 fi
 
-if [ "$PROTO" = "socks" ]; then
+if [ "$PROTO" = "socks" ] || [ "$PROTO" = "socks4a" ]; then
 	if [ -z "$PORT" ]; then
 		PORT="1080"
 	fi
 	METHOD="SOCKS4A:$PROXY:$1:$2,socksport=$PORT"
+elif [ "$PROTO" = "socks4" ]; then
+	if [ -z "$PORT" ]; then
+		PORT="1080"
+	fi
+	METHOD="SOCKS4:$PROXY:$1:$2,socksport=$PORT"
 else
 	# Assume PROXY (http, https, etc)
 	if [ -z "$PORT" ]; then
diff --git a/yocto-poky/scripts/oe-pkgdata-util b/yocto-poky/scripts/oe-pkgdata-util
index b075775..cb19cc4 100755
--- a/yocto-poky/scripts/oe-pkgdata-util
+++ b/yocto-poky/scripts/oe-pkgdata-util
@@ -60,6 +60,7 @@
         skipval += "|" + args.exclude
     skipregex = re.compile(skipval)
 
+    skippedpkgs = set()
     mappedpkgs = set()
     with open(args.pkglistfile, 'r') as f:
         for line in f:
@@ -73,6 +74,7 @@
             # Skip packages for which there is no point applying globs
             if skipregex.search(pkg):
                 logger.debug("%s -> !!" % pkg)
+                skippedpkgs.add(pkg)
                 continue
 
             # Skip packages that already match the globs, so if e.g. a dev package
@@ -84,6 +86,7 @@
                     already = True
                     break
             if already:
+                skippedpkgs.add(pkg)
                 logger.debug("%s -> !" % pkg)
                 continue
 
@@ -152,7 +155,7 @@
 
     logger.debug("------")
 
-    print("\n".join(mappedpkgs))
+    print("\n".join(mappedpkgs - skippedpkgs))
 
 def read_value(args):
     # Handle both multiple arguments and multiple values within an arg (old syntax)
@@ -493,7 +496,7 @@
             sys.exit(1)
 
     if not os.path.exists(args.pkgdata_dir):
-        logger.error('Unable to find pkgdata directory %s' % pkgdata_dir)
+        logger.error('Unable to find pkgdata directory %s' % args.pkgdata_dir)
         sys.exit(1)
 
     ret = args.func(args)
diff --git a/yocto-poky/scripts/oe-publish-sdk b/yocto-poky/scripts/oe-publish-sdk
index 1737c9f..ee4169a0 100755
--- a/yocto-poky/scripts/oe-publish-sdk
+++ b/yocto-poky/scripts/oe-publish-sdk
@@ -101,9 +101,9 @@
 
     # Setting up the git repo
     if not is_remote:
-        cmd = 'set -e; cd %s/layers; if [ ! -e .git ]; then git init .; mv .git/hooks/post-update.sample .git/hooks/post-update; fi; git add -A .; git commit -q -m "init repo" || true;' % destination
+        cmd = 'set -e; mkdir -p %s/layers; cd %s/layers; if [ ! -e .git ]; then git init .; mv .git/hooks/post-update.sample .git/hooks/post-update; fi; git add -A .; git commit -q -m "init repo" || true; git update-server-info' % (destination, destination)
     else:
-        cmd = "ssh %s 'set -e; cd %s/layers; if [ ! -e .git ]; then git init .; mv .git/hooks/post-update.sample .git/hooks/post-update; fi; git add -A .; git commit -q -m \"init repo\" || true;'" % (host, destdir)
+        cmd = "ssh %s 'set -e; mkdir-p %s/layers; cd %s/layers; if [ ! -e .git ]; then git init .; mv .git/hooks/post-update.sample .git/hooks/post-update; fi; git add -A .; git commit -q -m \"init repo\" || true; git update-server-info'" % (host, destdir, destdir)
     ret = subprocess.call(cmd, shell=True)
     if ret == 0:
         logger.info('SDK published successfully')
diff --git a/yocto-poky/scripts/postinst-intercepts/update_font_cache b/yocto-poky/scripts/postinst-intercepts/update_font_cache
index c8c6018..0deab3c 100644
--- a/yocto-poky/scripts/postinst-intercepts/update_font_cache
+++ b/yocto-poky/scripts/postinst-intercepts/update_font_cache
@@ -1,5 +1,5 @@
 #!/bin/sh
 
-PSEUDO_UNLOAD=1 qemuwrapper -L $D -E LD_LIBRARY_PATH=$D/${libdir}:$D/${base_libdir}\
-					$D${bindir}/fc-cache --sysroot=$D
+PSEUDO_UNLOAD=1 qemuwrapper -L $D -E LD_LIBRARY_PATH=$D/${libdir}:$D/${base_libdir} \
+					-E ${fontconfigcacheenv} $D${bindir}/fc-cache --sysroot=$D ${fontconfigcacheparams}
 chown -R root:root $D${fontconfigcachedir}
diff --git a/yocto-poky/scripts/postinst-intercepts/update_icon_cache b/yocto-poky/scripts/postinst-intercepts/update_icon_cache
index 8e17a6a..9cf2a72 100644
--- a/yocto-poky/scripts/postinst-intercepts/update_icon_cache
+++ b/yocto-poky/scripts/postinst-intercepts/update_icon_cache
@@ -1,8 +1,9 @@
 #!/bin/sh
 
 set -e
+
 # update native pixbuf loaders
-gdk-pixbuf-query-loaders --update-cache
+$STAGING_DIR_NATIVE/${libdir_native}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders --update-cache
 
 for icondir in $D/usr/share/icons/*/ ; do
     if [ -d $icondir ] ; then
diff --git a/yocto-poky/scripts/postinst-intercepts/update_pixbuf_cache b/yocto-poky/scripts/postinst-intercepts/update_pixbuf_cache
index 95bf4f9..a8b4001 100644
--- a/yocto-poky/scripts/postinst-intercepts/update_pixbuf_cache
+++ b/yocto-poky/scripts/postinst-intercepts/update_pixbuf_cache
@@ -4,8 +4,6 @@
 export GDK_PIXBUF_FATAL_LOADER=1
 
 PSEUDO_UNLOAD=1 qemuwrapper -L $D -E LD_LIBRARY_PATH=$D/${libdir}:$D/${base_libdir}\
-    $D${bindir}/gdk-pixbuf-query-loaders \
+    $D${libdir}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders \
     >$GDK_PIXBUF_MODULEDIR/../loaders.cache && \
     sed -i -e "s:$D::g" $GDK_PIXBUF_MODULEDIR/../loaders.cache
-
-
diff --git a/yocto-poky/scripts/runqemu b/yocto-poky/scripts/runqemu
index 23cf5be..e01d276 100755
--- a/yocto-poky/scripts/runqemu
+++ b/yocto-poky/scripts/runqemu
@@ -33,6 +33,7 @@
     echo "    nographic - disables video console"
     echo "    serial - enables a serial console on /dev/ttyS0"
     echo "    kvm - enables KVM when running qemux86/qemux86-64 (VT-capable CPU required)"
+    echo "    kvm-vhost - enables KVM with vhost support when running qemux86/qemux86-64 (VT-capable CPU required)"
     echo "    publicvnc - enable a VNC server open to all hosts"
     echo "  qemuparams=\"xyz\" - specify custom parameters to QEMU"
     echo "  bootparams=\"xyz\" - specify custom kernel parameters during boot"
@@ -71,6 +72,8 @@
 TCPSERIAL_PORTNUM=""
 KVM_ENABLED="no"
 KVM_ACTIVE="no"
+VHOST_ENABLED="no"
+VHOST_ACTIVE="no"
 
 # Determine whether the file is a kernel or QEMU image, and set the
 # appropriate variables
@@ -111,7 +114,7 @@
     case "$arg" in
         "qemux86" | "qemux86-64" | "qemuarm" | "qemuarm64" | "qemumips" | "qemumipsel" | \
         "qemumips64" | "qemush4"  | "qemuppc" | "qemumicroblaze" | "qemuzynq")
-            [ -z "$MACHINE" ] && MACHINE=$arg || \
+            [ -z "$MACHINE" -o "$MACHINE" = "$arg" ] && MACHINE=$arg || \
                 error "conflicting MACHINE types [$MACHINE] and [$arg]"
             ;;
         "ext2" | "ext3" | "ext4" | "jffs2" | "nfs" | "btrfs" | "hddimg" | "hdddirect" )
@@ -170,6 +173,11 @@
             KVM_ENABLED="yes"
             KVM_CAPABLE=`grep -q 'vmx\|svm' /proc/cpuinfo && echo 1`
             ;;
+        "kvm-vhost")
+            KVM_ENABLED="yes"
+            KVM_CAPABLE=`grep -q 'vmx\|svm' /proc/cpuinfo && echo 1`
+            VHOST_ENABLED="yes"
+            ;;
         "slirp")
             SLIRP_ENABLED="yes"
             ;;
@@ -270,12 +278,6 @@
         echo "$YOCTO_KVM_WIKI";
         exit 1;
     fi
-    if [ ! -e /dev/vhost-net ]; then
-        echo "Missing virtio net device. Have you inserted vhost-net module?"
-        echo "For further help see:"
-        echo "$YOCTO_PARAVIRT_KVM_WIKI";
-        exit 1;
-    fi
     if [ -w /dev/kvm -a -r /dev/kvm ]; then
         SCRIPT_QEMU_OPT="$SCRIPT_QEMU_OPT -enable-kvm"
         KVM_ACTIVE="yes"
@@ -285,13 +287,22 @@
         echo "$YOCTO_KVM_WIKI";
         exit 1;
     fi
-    if [ ! -w /dev/vhost-net -o ! -r /dev/vhost-net ]; then
-	if [ "$SLIRP_ENABLED" != "yes" ] ; then
-            echo "You have no rights on /dev/vhost-net."
-            echo "Please change the ownership of this file as described at:"
+    if [ "x$VHOST_ENABLED" = "xyes" ]; then
+        if [ ! -e /dev/vhost-net ]; then
+            echo "Missing virtio net device. Have you inserted vhost-net module?"
+            echo "For further help see:"
             echo "$YOCTO_PARAVIRT_KVM_WIKI";
             exit 1;
-	fi
+        fi
+
+        if [ -w /dev/vhost-net -a -r /dev/vhost-net ]; then
+            VHOST_ACTIVE="yes"
+        else
+            echo "You have no rights on /dev/vhost-net."
+            echo "Please change the ownership of this file as described at:"
+            echo "$YOCTO_KVM_WIKI";
+            exit 1;
+        fi
     fi
 fi
 
diff --git a/yocto-poky/scripts/runqemu-export-rootfs b/yocto-poky/scripts/runqemu-export-rootfs
index 40ab201..3dee131 100755
--- a/yocto-poky/scripts/runqemu-export-rootfs
+++ b/yocto-poky/scripts/runqemu-export-rootfs
@@ -46,8 +46,8 @@
 fi
 . $SYSROOT_SETUP_SCRIPT
 
-if [ ! -e "$OECORE_NATIVE_SYSROOT/usr/sbin/unfsd" ]; then
-	echo "Error: Unable to find unfsd binary in $OECORE_NATIVE_SYSROOT/usr/sbin/"
+if [ ! -e "$OECORE_NATIVE_SYSROOT/usr/bin/unfsd" ]; then
+	echo "Error: Unable to find unfsd binary in $OECORE_NATIVE_SYSROOT/usr/bin/"
 
 	if [ "x$OECORE_DISTRO_VERSION" = "x" ]; then
 		echo "Have you run 'bitbake meta-ide-support'?"
@@ -115,8 +115,8 @@
 	fi
 
 	echo "Starting User Mode nfsd"
-	echo "  $PSEUDO $PSEUDO_OPTS $OECORE_NATIVE_SYSROOT/usr/sbin/unfsd $UNFSD_OPTS"
-	$PSEUDO $PSEUDO_OPTS $OECORE_NATIVE_SYSROOT/usr/sbin/unfsd $UNFSD_OPTS
+	echo "  $PSEUDO $PSEUDO_OPTS $OECORE_NATIVE_SYSROOT/usr/bin/unfsd $UNFSD_OPTS"
+	$PSEUDO $PSEUDO_OPTS $OECORE_NATIVE_SYSROOT/usr/bin/unfsd $UNFSD_OPTS
 	if [ ! $? = 0 ]; then
 		echo "Error starting nfsd"
 		exit 1
diff --git a/yocto-poky/scripts/runqemu-ifup b/yocto-poky/scripts/runqemu-ifup
index b5a3db9..d9bd894 100755
--- a/yocto-poky/scripts/runqemu-ifup
+++ b/yocto-poky/scripts/runqemu-ifup
@@ -91,10 +91,25 @@
 
 n=$[ (`echo $TAP | sed 's/tap//'` * 2) + 1 ]
 $IFCONFIG addr add 192.168.7.$n/32 broadcast 192.168.7.255 dev $TAP
+STATUS=$?
+if [ $STATUS -ne 0 ]; then
+    echo "Failed to set up IP addressing on $TAP"
+    exit 1
+fi
 $IFCONFIG link set dev $TAP up
+STATUS=$?
+if [ $STATUS -ne 0 ]; then
+    echo "Failed to bring up $TAP"
+    exit 1
+fi
 
 dest=$[ (`echo $TAP | sed 's/tap//'` * 2) + 2 ]
 $IFCONFIG route add to 192.168.7.$dest dev $TAP
+STATUS=$?
+if [ $STATUS -ne 0 ]; then
+    echo "Failed to add route to 192.168.7.$dest using $TAP"
+    exit 1
+fi
 
 # setup NAT for tap0 interface to have internet access in QEMU
 $IPTABLES -A POSTROUTING -t nat -j MASQUERADE -s 192.168.7.$n/32
diff --git a/yocto-poky/scripts/runqemu-internal b/yocto-poky/scripts/runqemu-internal
index 1527268..3b0e54c 100755
--- a/yocto-poky/scripts/runqemu-internal
+++ b/yocto-poky/scripts/runqemu-internal
@@ -54,7 +54,7 @@
             mem_size=512
             ;;
         "qemumicroblaze")
-            mem_size=64
+            mem_size=256
             ;;
         "qemumips"|"qemumips64")
             mem_size=256
@@ -120,16 +120,9 @@
     KERNEL_NETWORK_CMD="ip=dhcp"
     QEMU_TAP_CMD=""
     QEMU_UI_OPTIONS="-show-cursor -usb -usbdevice wacom-tablet"
-    if [ "$KVM_ACTIVE" = "yes" ]; then
-        QEMU_NETWORK_CMD=""
-        DROOT="/dev/vda"
-        ROOTFS_OPTIONS="-drive file=$ROOTFS,if=virtio,format=raw"
-    else
-        QEMU_NETWORK_CMD=""
-        DROOT="/dev/hda"
-        ROOTFS_OPTIONS="-drive file=$ROOTFS,if=ide,format=raw"
-    fi
-
+    QEMU_NETWORK_CMD=""
+    DROOT="/dev/vda"
+    ROOTFS_OPTIONS="-drive file=$ROOTFS,if=virtio,format=raw"
 else
         acquire_lock() {
             lockfile=$1
@@ -266,20 +259,13 @@
 
         KERNEL_NETWORK_CMD="ip=192.168.7.$n2::192.168.7.$n1:255.255.255.0"
         QEMU_TAP_CMD="-net tap,vlan=0,ifname=$TAP,script=no,downscript=no"
-        if [ "$KVM_ACTIVE" = "yes" ]; then
+        if [ "$VHOST_ACTIVE" = "yes" ]; then
             QEMU_NETWORK_CMD="-net nic,model=virtio $QEMU_TAP_CMD,vhost=on"
-            DROOT="/dev/vda"
-            ROOTFS_OPTIONS="-drive file=$ROOTFS,if=virtio,format=raw"
         else
-            QEMU_NETWORK_CMD="-net nic,vlan=0 $QEMU_TAP_CMD"
-            DROOT="/dev/hda"
-            ROOTFS_OPTIONS="-drive file=$ROOTFS,if=ide,format=raw"
+            QEMU_NETWORK_CMD="-net nic,model=virtio $QEMU_TAP_CMD"
         fi
-        if [ "$MACHINE" = "qemuarm64" ]; then
-            QEMU_NETWORK_CMD="-netdev tap,id=net0,ifname=$TAP,script=no,downscript=no -device virtio-net-device,netdev=net0 "
-            DROOT="/dev/vda"
-            ROOTFS_OPTIONS="-drive id=disk0,file=$ROOTFS,if=none,format=raw -device virtio-blk-device,drive=disk0"
-        fi
+        DROOT="/dev/vda"
+        ROOTFS_OPTIONS="-drive file=$ROOTFS,if=virtio,format=raw"
 
         KERNCMDLINE="mem=$QEMU_MEMORY"
         QEMU_UI_OPTIONS="-show-cursor -usb -usbdevice wacom-tablet"
@@ -364,8 +350,8 @@
     QEMU_UI_OPTIONS="$QEMU_UI_OPTIONS"
     # QEMU_UI_OPTIONS="$QEMU_UI_OPTIONS -force-pointer"
     if [ "${FSTYPE:0:3}" = "ext" -o "$FSTYPE" = "btrfs" ]; then
-        KERNCMDLINE="root=/dev/sda rw console=ttyAMA0,115200 console=tty $KERNEL_NETWORK_CMD mem=$QEMU_MEMORY highres=off"
-        QEMUOPTIONS="$QEMU_NETWORK_CMD -M ${MACHINE_SUBTYPE} -drive file=$ROOTFS,format=raw -no-reboot $QEMU_UI_OPTIONS"
+        KERNCMDLINE="root=$DROOT rw console=ttyAMA0,115200 console=tty $KERNEL_NETWORK_CMD mem=$QEMU_MEMORY highres=off"
+        QEMUOPTIONS="$QEMU_NETWORK_CMD -M ${MACHINE_SUBTYPE} $ROOTFS_OPTIONS -no-reboot $QEMU_UI_OPTIONS"
     fi
     if [ "$FSTYPE" = "nfs" ]; then
         if [ "$NFS_SERVER" = "192.168.7.1" -a ! -d "$NFS_DIR" ]; then
@@ -387,6 +373,10 @@
 if [ "$MACHINE" = "qemuarm64" ]; then
     QEMU=qemu-system-aarch64
 
+    QEMU_NETWORK_CMD="-netdev tap,id=net0,ifname=$TAP,script=no,downscript=no -device virtio-net-device,netdev=net0 "
+    DROOT="/dev/vda"
+    ROOTFS_OPTIONS="-drive id=disk0,file=$ROOTFS,if=none,format=raw -device virtio-blk-device,drive=disk0"
+
     export QEMU_AUDIO_DRV="none"
     if [ "x$SERIALSTDIO" = "x" ] ; then
         QEMU_UI_OPTIONS="-nographic"
@@ -394,7 +384,7 @@
         QEMU_UI_OPTIONS=""
     fi
     if [ "${FSTYPE:0:3}" = "ext" -o "$FSTYPE" = "btrfs" ]; then
-        KERNCMDLINE="root=/dev/vda rw console=ttyAMA0,38400 mem=$QEMU_MEMORY highres=off $KERNEL_NETWORK_CMD"
+        KERNCMDLINE="root=$DROOT rw console=ttyAMA0,38400 mem=$QEMU_MEMORY highres=off $KERNEL_NETWORK_CMD"
         # qemu-system-aarch64 only support '-machine virt -cpu cortex-a57' for now
         QEMUOPTIONS="$QEMU_NETWORK_CMD -machine virt -cpu cortex-a57 $ROOTFS_OPTIONS $QEMU_UI_OPTIONS"
     fi
@@ -409,7 +399,6 @@
     fi
 fi
 
-
 if [ "$MACHINE" = "qemux86" ]; then
     QEMU=qemu-system-i386
     if [ "$KVM_ACTIVE" = "yes" ]; then
@@ -508,8 +497,8 @@
     QEMU_UI_OPTIONS="-vga cirrus $QEMU_UI_OPTIONS"
     if [ "${FSTYPE:0:3}" = "ext" -o "$FSTYPE" = "btrfs" ]; then
         #KERNCMDLINE="root=/dev/hda console=ttyS0 console=tty0 $KERNEL_NETWORK_CMD mem=$QEMU_MEMORY"
-        KERNCMDLINE="root=/dev/hda rw console=ttyS0 console=tty $KERNEL_NETWORK_CMD mem=$QEMU_MEMORY"
-        QEMUOPTIONS="$QEMU_NETWORK_CMD -M $MACHINE_SUBTYPE -drive file=$ROOTFS,format=raw -no-reboot $QEMU_UI_OPTIONS"
+        KERNCMDLINE="root=$DROOT rw console=ttyS0 console=tty $KERNEL_NETWORK_CMD mem=$QEMU_MEMORY"
+        QEMUOPTIONS="$QEMU_NETWORK_CMD -M $MACHINE_SUBTYPE $ROOTFS_OPTIONS -no-reboot $QEMU_UI_OPTIONS"
     fi
     if [ "$FSTYPE" = "nfs" ]; then
         if [ "$NFS_SERVER" = "192.168.7.1" -a ! -d "$NFS_DIR" ]; then
@@ -533,8 +522,8 @@
         QEMU_NETWORK_CMD="-net nic,model=pcnet $QEMU_TAP_CMD"
     fi
     if [ "${FSTYPE:0:3}" = "ext" -o "$FSTYPE" = "btrfs" ]; then
-        KERNCMDLINE="root=/dev/hda rw console=ttyS0 console=tty $KERNEL_NETWORK_CMD mem=$QEMU_MEMORY"
-        QEMUOPTIONS="$QEMU_NETWORK_CMD -cpu $CPU_SUBTYPE -M $MACHINE_SUBTYPE -drive file=$ROOTFS,format=raw -no-reboot $QEMU_UI_OPTIONS"
+        KERNCMDLINE="root=$DROOT rw console=ttyS0 console=tty $KERNEL_NETWORK_CMD mem=$QEMU_MEMORY"
+        QEMUOPTIONS="$QEMU_NETWORK_CMD -cpu $CPU_SUBTYPE -M $MACHINE_SUBTYPE $ROOTFS_OPTIONS -no-reboot $QEMU_UI_OPTIONS"
     fi
     if [ "$FSTYPE" = "nfs" ]; then
         if [ "$NFS_SERVER" = "192.168.7.1" -a ! -d "$NFS_DIR" ]; then
@@ -587,20 +576,20 @@
 
 if [ "$MACHINE" = "qemumicroblaze" ]; then
     QEMU=qemu-system-microblazeel
-    QEMU_SYSTEM_OPTIONS="-M petalogix-ml605 -serial mon:stdio -dtb $KERNEL-$MACHINE.dtb"
+    QEMU_SYSTEM_OPTIONS="$QEMU_NETWORK_CMD -M petalogix-ml605 -serial mon:stdio"
     if [ "${FSTYPE:0:3}" = "ext" -o "${FSTYPE:0:4}" = "cpio" ]; then
-        KERNCMDLINE="earlyprintk root=/dev/ram rw"
+        KERNCMDLINE="earlyprintk root=/dev/ram rw $KERNEL_NETWORK_CMD mem=$QEMU_MEMORY"
         QEMUOPTIONS="$QEMU_SYSTEM_OPTIONS -initrd $ROOTFS"
     fi
 fi
 
 if [ "$MACHINE" = "qemuzynq" ]; then
     QEMU=qemu-system-arm
-    QEMU_SYSTEM_OPTIONS="-M xilinx-zynq-a9 -serial null -serial mon:stdio -dtb $KERNEL-$MACHINE.dtb"
+    QEMU_SYSTEM_OPTIONS="$QEMU_NETWORK_CMD -M xilinx-zynq-a9 -serial null -serial mon:stdio -dtb $KERNEL-$MACHINE.dtb"
     # zynq serial ports are named 'ttyPS0' and 'ttyPS1', fixup the default values
     SCRIPT_KERNEL_OPT=$(echo "$SCRIPT_KERNEL_OPT" | sed 's/console=ttyS/console=ttyPS/g')
     if [ "${FSTYPE:0:3}" = "ext" -o "${FSTYPE:0:4}" = "cpio" ]; then
-        KERNCMDLINE="earlyprintk root=/dev/ram rw"
+        KERNCMDLINE="earlyprintk root=/dev/ram rw $KERNEL_NETWORK_CMD mem=$QEMU_MEMORY"
         QEMUOPTIONS="$QEMU_SYSTEM_OPTIONS -initrd $ROOTFS"
     fi
 fi
@@ -700,6 +689,38 @@
         stty intr ^]
 fi
 
+
+# Preserve the multiplexing behavior for the monitor that would be there based
+# on whether nographic is used.
+if echo "$QEMUOPTIONS $SERIALOPTS $SCRIPT_QEMU_OPT $SCRIPT_QEMU_EXTRA_OPT" | grep -- "-nographic"; then
+    FIRST_SERIAL_OPT="-serial mon:stdio"
+else
+    FIRST_SERIAL_OPT="-serial mon:vc"
+fi
+
+# qemuarm64 uses virtio for any additional serial ports so the normal mechanism
+# of using -serial will not work
+if [ "$MACHINE" = "qemuarm64" ]; then
+    SECOND_SERIAL_OPT="$SCRIPT_QEMU_EXTRA_OPT -device virtio-serial-device -chardev null,id=virtcon -device virtconsole,chardev=virtcon"
+else
+    SECOND_SERIAL_OPT="-serial null"
+fi
+
+# We always want a ttyS1. Since qemu by default adds a serial port when
+# nodefaults is not specified, it seems that all that would be needed is to
+# make sure a "-serial" is there. However, it appears that when "-serial" is
+# specified, it ignores the default serial port that is normally added.
+# So here we make sure to add two -serial if there are none. And only one
+# if there is one -serial already.
+NUM_SERIAL_OPTS=`echo $QEMUOPTIONS $SERIALOPTS $SCRIPT_QEMU_OPT $SCRIPT_QEMU_EXTRA_OPT | sed -e 's/ /\n/g' | grep --count -- -serial`
+
+if [ "$NUM_SERIAL_OPTS" = "0" ]; then
+    SCRIPT_QEMU_EXTRA_OPT="$SCRIPT_QEMU_EXTRA_OPT $FIRST_SERIAL_OPT $SECOND_SERIAL_OPT"
+elif [ "$NUM_SERIAL_OPTS" = "1" ]; then
+    SCRIPT_QEMU_EXTRA_OPT="$SCRIPT_QEMU_EXTRA_OPT $SECOND_SERIAL_OPT"
+fi
+
+
 echo "Running $QEMU..."
 # -no-reboot is a mandatory option - see bug #100
 if [ "$FSTYPE" = "vmdk" -o "$FSTYPE" = "hddimg" -o "$FSTYPE" = "hdddirect" ]; then
diff --git a/yocto-poky/scripts/yocto-bsp b/yocto-poky/scripts/yocto-bsp
index d269861..2d9453f 100755
--- a/yocto-poky/scripts/yocto-bsp
+++ b/yocto-poky/scripts/yocto-bsp
@@ -140,7 +140,7 @@
         if args[0] == "help":
             if len(args) == 1:
                 parser.print_help()
-                sys.exit(1)
+                sys.exit()
 
     invoke_subcommand(args, parser, yocto_bsp_help_usage, subcommands)