reset upstream subtrees to yocto 2.6

Reset the following subtrees on thud HEAD:

  poky: 87e3a9739d
  meta-openembedded: 6094ae18c8
  meta-security: 31dc4e7532
  meta-raspberrypi: a48743dc36
  meta-xilinx: c42016e2e6

Also re-apply backports that didn't make it into thud:
  poky:
    17726d0 systemd-systemctl-native: handle Install wildcards

  meta-openembedded:
    4321a5d libtinyxml2: update to 7.0.1
    042f0a3 libcereal: Add native and nativesdk classes
    e23284f libcereal: Allow empty package
    030e8d4 rsyslog: curl-less build with fmhttp PACKAGECONFIG
    179a1b9 gtest: update to 1.8.1

Squashed OpenBMC subtree compatibility updates:
  meta-aspeed:
    Brad Bishop (1):
          aspeed: add yocto 2.6 compatibility

  meta-ibm:
    Brad Bishop (1):
          ibm: prepare for yocto 2.6

  meta-ingrasys:
    Brad Bishop (1):
          ingrasys: set layer compatibility to yocto 2.6

  meta-openpower:
    Brad Bishop (1):
          openpower: set layer compatibility to yocto 2.6

  meta-phosphor:
    Brad Bishop (3):
          phosphor: set layer compatibility to thud
          phosphor: libgpg-error: drop patches
          phosphor: react to fitimage artifact rename

    Ed Tanous (4):
          Dropbear: upgrade options for latest upgrade
          yocto2.6: update openssl options
          busybox: remove upstream watchdog patch
          systemd: Rebase CONFIG_CGROUP_BPF patch

Change-Id: I7b1fe71cca880d0372a82d94b5fd785323e3a9e7
Signed-off-by: Brad Bishop <bradleyb@fuzziesquirrel.com>
diff --git a/poky/bitbake/lib/bb/fetch2/__init__.py b/poky/bitbake/lib/bb/fetch2/__init__.py
index 72d6092..3b8389e 100644
--- a/poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/poky/bitbake/lib/bb/fetch2/__init__.py
@@ -256,7 +256,7 @@
 
         # Identify if the URI is relative or not
         if urlp.scheme in self._relative_schemes and \
-           re.compile("^\w+:(?!//)").match(uri):
+           re.compile(r"^\w+:(?!//)").match(uri):
             self.relative = True
 
         if not self.relative:
@@ -383,7 +383,7 @@
         path = location
     else:
         host = location
-        path = ""
+        path = "/"
     if user:
         m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
         if m:
@@ -452,8 +452,8 @@
             # Handle URL parameters
             if i:
                 # Any specified URL parameters must match
-                for k in uri_replace_decoded[loc]:
-                    if uri_decoded[loc][k] != uri_replace_decoded[loc][k]:
+                for k in uri_find_decoded[loc]:
+                    if uri_decoded[loc][k] != uri_find_decoded[loc][k]:
                         return None
             # Overwrite any specified replacement parameters
             for k in uri_replace_decoded[loc]:
@@ -827,6 +827,7 @@
                   'NO_PROXY', 'no_proxy',
                   'ALL_PROXY', 'all_proxy',
                   'GIT_PROXY_COMMAND',
+                  'GIT_SSH',
                   'GIT_SSL_CAINFO',
                   'GIT_SMART_HTTP',
                   'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
@@ -837,14 +838,16 @@
     if not cleanup:
         cleanup = []
 
-    # If PATH contains WORKDIR which contains PV which contains SRCPV we
+    # If PATH contains WORKDIR which contains PV-PR which contains SRCPV we
     # can end up in circular recursion here so give the option of breaking it
     # in a data store copy.
     try:
         d.getVar("PV")
+        d.getVar("PR")
     except bb.data_smart.ExpansionError:
         d = bb.data.createCopy(d)
         d.setVar("PV", "fetcheravoidrecurse")
+        d.setVar("PR", "fetcheravoidrecurse")
 
     origenv = d.getVar("BB_ORIGENV", False)
     for var in exportvars:
@@ -1016,16 +1019,7 @@
                     origud.method.build_mirror_data(origud, ld)
             return origud.localpath
         # Otherwise the result is a local file:// and we symlink to it
-        if not os.path.exists(origud.localpath):
-            if os.path.islink(origud.localpath):
-                # Broken symbolic link
-                os.unlink(origud.localpath)
-
-            # As per above, in case two tasks end up here simultaneously.
-            try:
-                os.symlink(ud.localpath, origud.localpath)
-            except FileExistsError:
-                pass
+        ensure_symlink(ud.localpath, origud.localpath)
         update_stamp(origud, ld)
         return ud.localpath
 
@@ -1059,6 +1053,22 @@
             bb.utils.unlockfile(lf)
 
 
+def ensure_symlink(target, link_name):
+    if not os.path.exists(link_name):
+        if os.path.islink(link_name):
+            # Broken symbolic link
+            os.unlink(link_name)
+
+        # In case this is executing without any file locks held (as is
+        # the case for file:// URLs), two tasks may end up here at the
+        # same time, in which case we do not want the second task to
+        # fail when the link has already been created by the first task.
+        try:
+            os.symlink(target, link_name)
+        except FileExistsError:
+            pass
+
+
 def try_mirrors(fetch, d, origud, mirrors, check = False):
     """
     Try to use a mirrored version of the sources.
@@ -1088,7 +1098,9 @@
         return True
 
     pkgname = d.expand(d.getVar('PN', False))
-    trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
+    trusted_hosts = None
+    if pkgname:
+        trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
 
     if not trusted_hosts:
         trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS')
diff --git a/poky/bitbake/lib/bb/fetch2/bzr.py b/poky/bitbake/lib/bb/fetch2/bzr.py
index 16123f8..658502f 100644
--- a/poky/bitbake/lib/bb/fetch2/bzr.py
+++ b/poky/bitbake/lib/bb/fetch2/bzr.py
@@ -41,8 +41,9 @@
         init bzr specific variable within url data
         """
         # Create paths to bzr checkouts
+        bzrdir = d.getVar("BZRDIR") or (d.getVar("DL_DIR") + "/bzr")
         relpath = self._strip_leading_slashes(ud.path)
-        ud.pkgdir = os.path.join(d.expand('${BZRDIR}'), ud.host, relpath)
+        ud.pkgdir = os.path.join(bzrdir, ud.host, relpath)
 
         ud.setup_revisions(d)
 
@@ -57,7 +58,7 @@
         command is "fetch", "update", "revno"
         """
 
-        basecmd = d.expand('${FETCHCMD_bzr}')
+        basecmd = d.getVar("FETCHCMD_bzr") or "/usr/bin/env bzr"
 
         proto =  ud.parm.get('protocol', 'http')
 
diff --git a/poky/bitbake/lib/bb/fetch2/cvs.py b/poky/bitbake/lib/bb/fetch2/cvs.py
index 490c954..0e0a319 100644
--- a/poky/bitbake/lib/bb/fetch2/cvs.py
+++ b/poky/bitbake/lib/bb/fetch2/cvs.py
@@ -110,7 +110,7 @@
         if ud.tag:
             options.append("-r %s" % ud.tag)
 
-        cvsbasecmd = d.getVar("FETCHCMD_cvs")
+        cvsbasecmd = d.getVar("FETCHCMD_cvs") or "/usr/bin/env cvs"
         cvscmd = cvsbasecmd + " '-d" + cvsroot + "' co " + " ".join(options) + " " + ud.module
         cvsupdatecmd = cvsbasecmd + " '-d" + cvsroot + "' update -d -P " + " ".join(options)
 
@@ -121,7 +121,8 @@
         # create module directory
         logger.debug(2, "Fetch: checking for module directory")
         pkg = d.getVar('PN')
-        pkgdir = os.path.join(d.getVar('CVSDIR'), pkg)
+        cvsdir = d.getVar("CVSDIR") or (d.getVar("DL_DIR") + "/cvs")
+        pkgdir = os.path.join(cvsdir, pkg)
         moddir = os.path.join(pkgdir, localdir)
         workdir = None
         if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
diff --git a/poky/bitbake/lib/bb/fetch2/git.py b/poky/bitbake/lib/bb/fetch2/git.py
index 7b618c6..59a2ee8 100644
--- a/poky/bitbake/lib/bb/fetch2/git.py
+++ b/poky/bitbake/lib/bb/fetch2/git.py
@@ -261,7 +261,7 @@
                 gitsrcname = gitsrcname + '_' + ud.revisions[name]
 
         dl_dir = d.getVar("DL_DIR")
-        gitdir = d.getVar("GITDIR") or (dl_dir + "/git2/")
+        gitdir = d.getVar("GITDIR") or (dl_dir + "/git2")
         ud.clonedir = os.path.join(gitdir, gitsrcname)
         ud.localfile = ud.clonedir
 
@@ -299,17 +299,22 @@
         return ud.clonedir
 
     def need_update(self, ud, d):
+        return self.clonedir_need_update(ud, d) or self.shallow_tarball_need_update(ud) or self.tarball_need_update(ud)
+
+    def clonedir_need_update(self, ud, d):
         if not os.path.exists(ud.clonedir):
             return True
         for name in ud.names:
             if not self._contains_ref(ud, d, name, ud.clonedir):
                 return True
-        if ud.shallow and ud.write_shallow_tarballs and not os.path.exists(ud.fullshallow):
-            return True
-        if ud.write_tarballs and not os.path.exists(ud.fullmirror):
-            return True
         return False
 
+    def shallow_tarball_need_update(self, ud):
+        return ud.shallow and ud.write_shallow_tarballs and not os.path.exists(ud.fullshallow)
+
+    def tarball_need_update(self, ud):
+        return ud.write_tarballs and not os.path.exists(ud.fullmirror)
+
     def try_premirror(self, ud, d):
         # If we don't do this, updating an existing checkout with only premirrors
         # is not possible
@@ -322,16 +327,13 @@
     def download(self, ud, d):
         """Fetch url"""
 
-        no_clone = not os.path.exists(ud.clonedir)
-        need_update = no_clone or self.need_update(ud, d)
-
         # A current clone is preferred to either tarball, a shallow tarball is
         # preferred to an out of date clone, and a missing clone will use
         # either tarball.
-        if ud.shallow and os.path.exists(ud.fullshallow) and need_update:
+        if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d):
             ud.localpath = ud.fullshallow
             return
-        elif os.path.exists(ud.fullmirror) and no_clone:
+        elif os.path.exists(ud.fullmirror) and not os.path.exists(ud.clonedir):
             bb.utils.mkdirhier(ud.clonedir)
             runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir)
 
@@ -353,6 +355,8 @@
         for name in ud.names:
             if not self._contains_ref(ud, d, name, ud.clonedir):
                 needupdate = True
+                break
+
         if needupdate:
             output = runfetchcmd("%s remote" % ud.basecmd, d, quiet=True, workdir=ud.clonedir)
             if "origin" in output:
@@ -372,6 +376,7 @@
             except OSError as exc:
                 if exc.errno != errno.ENOENT:
                     raise
+
         for name in ud.names:
             if not self._contains_ref(ud, d, name, ud.clonedir):
                 raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
@@ -471,11 +476,30 @@
         if os.path.exists(destdir):
             bb.utils.prunedir(destdir)
 
-        if ud.shallow and (not os.path.exists(ud.clonedir) or self.need_update(ud, d)):
-            bb.utils.mkdirhier(destdir)
-            runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=destdir)
-        else:
-            runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d)
+        source_found = False
+        source_error = []
+
+        if not source_found:
+            clonedir_is_up_to_date = not self.clonedir_need_update(ud, d)
+            if clonedir_is_up_to_date:
+                runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d)
+                source_found = True
+            else:
+                source_error.append("clone directory not available or not up to date: " + ud.clonedir)
+
+        if not source_found:
+            if ud.shallow:
+                if os.path.exists(ud.fullshallow):
+                    bb.utils.mkdirhier(destdir)
+                    runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=destdir)
+                    source_found = True
+                else:
+                    source_error.append("shallow clone not available: " + ud.fullshallow)
+            else:
+                source_error.append("shallow clone not enabled")
+
+        if not source_found:
+            raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url)
 
         repourl = self._get_repo_url(ud)
         runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d, workdir=destdir)
diff --git a/poky/bitbake/lib/bb/fetch2/gitsm.py b/poky/bitbake/lib/bb/fetch2/gitsm.py
index 0aff100..35729db 100644
--- a/poky/bitbake/lib/bb/fetch2/gitsm.py
+++ b/poky/bitbake/lib/bb/fetch2/gitsm.py
@@ -31,9 +31,12 @@
 
 import os
 import bb
+import copy
 from   bb.fetch2.git import Git
 from   bb.fetch2 import runfetchcmd
 from   bb.fetch2 import logger
+from   bb.fetch2 import Fetch
+from   bb.fetch2 import BBFetchException
 
 class GitSM(Git):
     def supports(self, ud, d):
@@ -42,94 +45,206 @@
         """
         return ud.type in ['gitsm']
 
-    def uses_submodules(self, ud, d, wd):
-        for name in ud.names:
-            try:
-                runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=wd)
-                return True
-            except bb.fetch.FetchError:
-                pass
-        return False
-
-    def _set_relative_paths(self, repopath):
-        """
-        Fix submodule paths to be relative instead of absolute,
-        so that when we move the repo it doesn't break
-        (In Git 1.7.10+ this is done automatically)
-        """
-        submodules = []
-        with open(os.path.join(repopath, '.gitmodules'), 'r') as f:
-            for line in f.readlines():
-                if line.startswith('[submodule'):
-                    submodules.append(line.split('"')[1])
-
-        for module in submodules:
-            repo_conf = os.path.join(repopath, module, '.git')
-            if os.path.exists(repo_conf):
-                with open(repo_conf, 'r') as f:
-                    lines = f.readlines()
-                newpath = ''
-                for i, line in enumerate(lines):
-                    if line.startswith('gitdir:'):
-                        oldpath = line.split(': ')[-1].rstrip()
-                        if oldpath.startswith('/'):
-                            newpath = '../' * (module.count('/') + 1) + '.git/modules/' + module
-                            lines[i] = 'gitdir: %s\n' % newpath
-                            break
-                if newpath:
-                    with open(repo_conf, 'w') as f:
-                        for line in lines:
-                            f.write(line)
-
-            repo_conf2 = os.path.join(repopath, '.git', 'modules', module, 'config')
-            if os.path.exists(repo_conf2):
-                with open(repo_conf2, 'r') as f:
-                    lines = f.readlines()
-                newpath = ''
-                for i, line in enumerate(lines):
-                    if line.lstrip().startswith('worktree = '):
-                        oldpath = line.split(' = ')[-1].rstrip()
-                        if oldpath.startswith('/'):
-                            newpath = '../' * (module.count('/') + 3) + module
-                            lines[i] = '\tworktree = %s\n' % newpath
-                            break
-                if newpath:
-                    with open(repo_conf2, 'w') as f:
-                        for line in lines:
-                            f.write(line)
+    @staticmethod
+    def parse_gitmodules(gitmodules):
+        modules = {}
+        module = ""
+        for line in gitmodules.splitlines():
+            if line.startswith('[submodule'):
+                module = line.split('"')[1]
+                modules[module] = {}
+            elif module and line.strip().startswith('path'):
+                path = line.split('=')[1].strip()
+                modules[module]['path'] = path
+            elif module and line.strip().startswith('url'):
+                url = line.split('=')[1].strip()
+                modules[module]['url'] = url
+        return modules
 
     def update_submodules(self, ud, d):
-        # We have to convert bare -> full repo, do the submodule bit, then convert back
-        tmpclonedir = ud.clonedir + ".tmp"
-        gitdir = tmpclonedir + os.sep + ".git"
-        bb.utils.remove(tmpclonedir, True)
-        os.mkdir(tmpclonedir)
-        os.rename(ud.clonedir, gitdir)
-        runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d)
-        runfetchcmd(ud.basecmd + " reset --hard", d, workdir=tmpclonedir)
-        runfetchcmd(ud.basecmd + " checkout -f " + ud.revisions[ud.names[0]], d, workdir=tmpclonedir)
-        runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=tmpclonedir)
-        self._set_relative_paths(tmpclonedir)
-        runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d, workdir=tmpclonedir)
-        os.rename(gitdir, ud.clonedir,)
-        bb.utils.remove(tmpclonedir, True)
+        submodules = []
+        paths = {}
+        uris = {}
+        local_paths = {}
+
+        for name in ud.names:
+            try:
+                gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=ud.clonedir)
+            except:
+                # No submodules to update
+                continue
+
+            for m, md in self.parse_gitmodules(gitmodules).items():
+                submodules.append(m)
+                paths[m] = md['path']
+                uris[m] = md['url']
+                if uris[m].startswith('..'):
+                    newud = copy.copy(ud)
+                    newud.path = os.path.realpath(os.path.join(newud.path, md['url']))
+                    uris[m] = Git._get_repo_url(self, newud)
+
+        for module in submodules:
+            module_hash = runfetchcmd("%s ls-tree -z -d %s %s" % (ud.basecmd, ud.revisions[name], paths[module]), d, quiet=True, workdir=ud.clonedir)
+            module_hash = module_hash.split()[2]
+
+            # Build new SRC_URI
+            proto = uris[module].split(':', 1)[0]
+            url = uris[module].replace('%s:' % proto, 'gitsm:', 1)
+            url += ';protocol=%s' % proto
+            url += ";name=%s" % module
+            url += ";bareclone=1;nocheckout=1;nobranch=1"
+
+            ld = d.createCopy()
+            # Not necessary to set SRC_URI, since we're passing the URI to
+            # Fetch.
+            #ld.setVar('SRC_URI', url)
+            ld.setVar('SRCREV_%s' % module, module_hash)
+
+            # Workaround for issues with SRCPV/SRCREV_FORMAT errors
+            # error refer to 'multiple' repositories.  Only the repository
+            # in the original SRC_URI actually matters...
+            ld.setVar('SRCPV', d.getVar('SRCPV'))
+            ld.setVar('SRCREV_FORMAT', module)
+
+            newfetch = Fetch([url], ld, cache=False)
+            newfetch.download()
+            local_paths[module] = newfetch.localpath(url)
+
+            # Correct the submodule references to the local download version...
+            runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_paths[module]}, d, workdir=ud.clonedir)
+
+            symlink_path = os.path.join(ud.clonedir, 'modules', paths[module])
+            if not os.path.exists(symlink_path):
+                try:
+                    os.makedirs(os.path.dirname(symlink_path), exist_ok=True)
+                except OSError:
+                    pass
+                os.symlink(local_paths[module], symlink_path)
+
+        return True
+
+    def need_update(self, ud, d):
+        main_repo_needs_update = Git.need_update(self, ud, d)
+
+        # First check that the main repository has enough history fetched. If it doesn't, then we don't
+        # even have the .gitmodules and gitlinks for the submodules to attempt asking whether the
+        # submodules' histories are recent enough.
+        if main_repo_needs_update:
+            return True
+
+        # Now check that the submodule histories are new enough. The git-submodule command doesn't have
+        # any clean interface for doing this aside from just attempting the checkout (with network
+        # fetched disabled).
+        return not self.update_submodules(ud, d)
 
     def download(self, ud, d):
         Git.download(self, ud, d)
 
         if not ud.shallow or ud.localpath != ud.fullshallow:
-            submodules = self.uses_submodules(ud, d, ud.clonedir)
-            if submodules:
-                self.update_submodules(ud, d)
+            self.update_submodules(ud, d)
+
+    def copy_submodules(self, submodules, ud, destdir, d):
+        if ud.bareclone:
+            repo_conf = destdir
+        else:
+            repo_conf = os.path.join(destdir, '.git')
+
+        if submodules and not os.path.exists(os.path.join(repo_conf, 'modules')):
+            os.mkdir(os.path.join(repo_conf, 'modules'))
+
+        for module, md in submodules.items():
+            srcpath = os.path.join(ud.clonedir, 'modules', md['path'])
+            modpath = os.path.join(repo_conf, 'modules', md['path'])
+
+            if os.path.exists(srcpath):
+                if os.path.exists(os.path.join(srcpath, '.git')):
+                    srcpath = os.path.join(srcpath, '.git')
+
+                target = modpath
+                if os.path.exists(modpath):
+                    target = os.path.dirname(modpath)
+
+                os.makedirs(os.path.dirname(target), exist_ok=True)
+                runfetchcmd("cp -fpLR %s %s" % (srcpath, target), d)
+            elif os.path.exists(modpath):
+                # Module already exists, likely unpacked from a shallow mirror clone
+                pass
+            else:
+                # This is fatal, as we do NOT want git-submodule to hit the network
+                raise bb.fetch2.FetchError('Submodule %s does not exist in %s or %s.' % (module, srcpath, modpath))
 
     def clone_shallow_local(self, ud, dest, d):
         super(GitSM, self).clone_shallow_local(ud, dest, d)
 
-        runfetchcmd('cp -fpPRH "%s/modules" "%s/"' % (ud.clonedir, os.path.join(dest, '.git')), d)
+        # Copy over the submodules' fetched histories too.
+        repo_conf = os.path.join(dest, '.git')
+
+        submodules = []
+        for name in ud.names:
+            try:
+                gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revision), d, quiet=True, workdir=dest)
+            except:
+                # No submodules to update
+                continue
+
+            submodules = self.parse_gitmodules(gitmodules)
+            self.copy_submodules(submodules, ud, dest, d)
 
     def unpack(self, ud, destdir, d):
         Git.unpack(self, ud, destdir, d)
 
-        if self.uses_submodules(ud, d, ud.destdir):
-            runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d, workdir=ud.destdir)
-            runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=ud.destdir)
+        # Copy over the submodules' fetched histories too.
+        if ud.bareclone:
+            repo_conf = ud.destdir
+        else:
+            repo_conf = os.path.join(ud.destdir, '.git')
+
+        update_submodules = False
+        paths = {}
+        uris = {}
+        local_paths = {}
+        for name in ud.names:
+            try:
+                gitmodules = runfetchcmd("%s show HEAD:.gitmodules" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)
+            except:
+                # No submodules to update
+                continue
+
+            submodules = self.parse_gitmodules(gitmodules)
+            self.copy_submodules(submodules, ud, ud.destdir, d)
+
+            submodules_queue = [(module, os.path.join(repo_conf, 'modules', md['path'])) for module, md in submodules.items()]
+            while len(submodules_queue) != 0:
+                module, modpath = submodules_queue.pop()
+
+                # add submodule children recursively
+                try:
+                    gitmodules = runfetchcmd("%s show HEAD:.gitmodules" % (ud.basecmd), d, quiet=True, workdir=modpath)
+                    for m, md in self.parse_gitmodules(gitmodules).items():
+                        submodules_queue.append([m, os.path.join(modpath, 'modules', md['path'])])
+                except:
+                    # no children
+                    pass
+
+
+                # There are submodules to update
+                update_submodules = True
+
+                # Determine (from the submodule) the correct url to reference
+                try:
+                    output = runfetchcmd("%(basecmd)s config remote.origin.url" % {'basecmd': ud.basecmd}, d, workdir=modpath)
+                except bb.fetch2.FetchError as e:
+                    # No remote url defined in this submodule
+                    continue
+
+                local_paths[module] = output
+
+                # Setup the local URL properly (like git submodule init or sync would do...)
+                runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_paths[module]}, d, workdir=ud.destdir)
+
+                # Ensure the submodule repository is NOT set to bare, since we're checking it out...
+                runfetchcmd("%s config core.bare false" % (ud.basecmd), d, quiet=True, workdir=modpath)
+
+        if update_submodules:
+            # Run submodule update, this sets up the directories -- without touching the config
+            runfetchcmd("%s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)
diff --git a/poky/bitbake/lib/bb/fetch2/hg.py b/poky/bitbake/lib/bb/fetch2/hg.py
index d0857e6..936d043 100644
--- a/poky/bitbake/lib/bb/fetch2/hg.py
+++ b/poky/bitbake/lib/bb/fetch2/hg.py
@@ -80,7 +80,7 @@
         ud.fullmirror = os.path.join(d.getVar("DL_DIR"), mirrortarball)
         ud.mirrortarballs = [mirrortarball]
 
-        hgdir = d.getVar("HGDIR") or (d.getVar("DL_DIR") + "/hg/")
+        hgdir = d.getVar("HGDIR") or (d.getVar("DL_DIR") + "/hg")
         ud.pkgdir = os.path.join(hgdir, hgsrcname)
         ud.moddir = os.path.join(ud.pkgdir, ud.module)
         ud.localfile = ud.moddir
diff --git a/poky/bitbake/lib/bb/fetch2/osc.py b/poky/bitbake/lib/bb/fetch2/osc.py
index 2b4f7d9..6c60456 100644
--- a/poky/bitbake/lib/bb/fetch2/osc.py
+++ b/poky/bitbake/lib/bb/fetch2/osc.py
@@ -32,8 +32,9 @@
         ud.module = ud.parm["module"]
 
         # Create paths to osc checkouts
+        oscdir = d.getVar("OSCDIR") or (d.getVar("DL_DIR") + "/osc")
         relpath = self._strip_leading_slashes(ud.path)
-        ud.pkgdir = os.path.join(d.getVar('OSCDIR'), ud.host)
+        ud.pkgdir = os.path.join(oscdir, ud.host)
         ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
 
         if 'rev' in ud.parm:
@@ -54,7 +55,7 @@
         command is "fetch", "update", "info"
         """
 
-        basecmd = d.expand('${FETCHCMD_osc}')
+        basecmd = d.getVar("FETCHCMD_osc") or "/usr/bin/env osc"
 
         proto = ud.parm.get('protocol', 'ocs')
 
diff --git a/poky/bitbake/lib/bb/fetch2/perforce.py b/poky/bitbake/lib/bb/fetch2/perforce.py
index 3debad5..903a8e6 100644
--- a/poky/bitbake/lib/bb/fetch2/perforce.py
+++ b/poky/bitbake/lib/bb/fetch2/perforce.py
@@ -43,13 +43,9 @@
         provided by the env, use it.  If P4PORT is specified by the recipe, use
         its values, which may override the settings in P4CONFIG.
         """
-        ud.basecmd = d.getVar('FETCHCMD_p4')
-        if not ud.basecmd:
-            ud.basecmd = "/usr/bin/env p4"
+        ud.basecmd = d.getVar("FETCHCMD_p4") or "/usr/bin/env p4"
 
-        ud.dldir = d.getVar('P4DIR')
-        if not ud.dldir:
-            ud.dldir = '%s/%s' % (d.getVar('DL_DIR'), 'p4')
+        ud.dldir = d.getVar("P4DIR") or (d.getVar("DL_DIR") + "/p4")
 
         path = ud.url.split('://')[1]
         path = path.split(';')[0]
diff --git a/poky/bitbake/lib/bb/fetch2/repo.py b/poky/bitbake/lib/bb/fetch2/repo.py
index c22d9b5..8c7e818 100644
--- a/poky/bitbake/lib/bb/fetch2/repo.py
+++ b/poky/bitbake/lib/bb/fetch2/repo.py
@@ -45,6 +45,8 @@
         "master".
         """
 
+        ud.basecmd = d.getVar("FETCHCMD_repo") or "/usr/bin/env repo"
+
         ud.proto = ud.parm.get('protocol', 'git')
         ud.branch = ud.parm.get('branch', 'master')
         ud.manifest = ud.parm.get('manifest', 'default.xml')
@@ -60,8 +62,8 @@
             logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
             return
 
+        repodir = d.getVar("REPODIR") or (d.getVar("DL_DIR") + "/repo")
         gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
-        repodir = d.getVar("REPODIR") or os.path.join(d.getVar("DL_DIR"), "repo")
         codir = os.path.join(repodir, gitsrcname, ud.manifest)
 
         if ud.user:
@@ -72,11 +74,11 @@
         repodir = os.path.join(codir, "repo")
         bb.utils.mkdirhier(repodir)
         if not os.path.exists(os.path.join(repodir, ".repo")):
-            bb.fetch2.check_network_access(d, "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url)
-            runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d, workdir=repodir)
+            bb.fetch2.check_network_access(d, "%s init -m %s -b %s -u %s://%s%s%s" % (ud.basecmd, ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url)
+            runfetchcmd("%s init -m %s -b %s -u %s://%s%s%s" % (ud.basecmd, ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d, workdir=repodir)
 
-        bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url)
-        runfetchcmd("repo sync", d, workdir=repodir)
+        bb.fetch2.check_network_access(d, "%s sync %s" % (ud.basecmd, ud.url), ud.url)
+        runfetchcmd("%s sync" % ud.basecmd, d, workdir=repodir)
 
         scmdata = ud.parm.get("scmdata", "")
         if scmdata == "keep":
diff --git a/poky/bitbake/lib/bb/fetch2/svn.py b/poky/bitbake/lib/bb/fetch2/svn.py
index 3f172ee..9dcf3eb 100644
--- a/poky/bitbake/lib/bb/fetch2/svn.py
+++ b/poky/bitbake/lib/bb/fetch2/svn.py
@@ -49,7 +49,7 @@
         if not "module" in ud.parm:
             raise MissingParameterError('module', ud.url)
 
-        ud.basecmd = d.getVar('FETCHCMD_svn')
+        ud.basecmd = d.getVar("FETCHCMD_svn") or "/usr/bin/env svn --non-interactive --trust-server-cert"
 
         ud.module = ud.parm["module"]
 
@@ -59,9 +59,13 @@
             ud.path_spec = ud.parm["path_spec"]
 
         # Create paths to svn checkouts
+        svndir = d.getVar("SVNDIR") or (d.getVar("DL_DIR") + "/svn")
         relpath = self._strip_leading_slashes(ud.path)
-        ud.pkgdir = os.path.join(d.expand('${SVNDIR}'), ud.host, relpath)
+        ud.pkgdir = os.path.join(svndir, ud.host, relpath)
         ud.moddir = os.path.join(ud.pkgdir, ud.module)
+        # Protects the repository from concurrent updates, e.g. from two
+        # recipes fetching different revisions at the same time
+        ud.svnlock = os.path.join(ud.pkgdir, "svn.lock")
 
         ud.setup_revisions(d)
 
@@ -122,35 +126,40 @@
 
         logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
 
-        if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
-            svnupdatecmd = self._buildsvncommand(ud, d, "update")
-            logger.info("Update " + ud.url)
-            # We need to attempt to run svn upgrade first in case its an older working format
-            try:
-                runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir)
-            except FetchError:
-                pass
-            logger.debug(1, "Running %s", svnupdatecmd)
-            bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
-            runfetchcmd(svnupdatecmd, d, workdir=ud.moddir)
-        else:
-            svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
-            logger.info("Fetch " + ud.url)
-            # check out sources there
-            bb.utils.mkdirhier(ud.pkgdir)
-            logger.debug(1, "Running %s", svnfetchcmd)
-            bb.fetch2.check_network_access(d, svnfetchcmd, ud.url)
-            runfetchcmd(svnfetchcmd, d, workdir=ud.pkgdir)
+        lf = bb.utils.lockfile(ud.svnlock)
 
-        scmdata = ud.parm.get("scmdata", "")
-        if scmdata == "keep":
-            tar_flags = ""
-        else:
-            tar_flags = "--exclude='.svn'"
+        try:
+            if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
+                svnupdatecmd = self._buildsvncommand(ud, d, "update")
+                logger.info("Update " + ud.url)
+                # We need to attempt to run svn upgrade first in case its an older working format
+                try:
+                    runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir)
+                except FetchError:
+                    pass
+                logger.debug(1, "Running %s", svnupdatecmd)
+                bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
+                runfetchcmd(svnupdatecmd, d, workdir=ud.moddir)
+            else:
+                svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
+                logger.info("Fetch " + ud.url)
+                # check out sources there
+                bb.utils.mkdirhier(ud.pkgdir)
+                logger.debug(1, "Running %s", svnfetchcmd)
+                bb.fetch2.check_network_access(d, svnfetchcmd, ud.url)
+                runfetchcmd(svnfetchcmd, d, workdir=ud.pkgdir)
 
-        # tar them up to a defined filename
-        runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d,
-                    cleanup=[ud.localpath], workdir=ud.pkgdir)
+            scmdata = ud.parm.get("scmdata", "")
+            if scmdata == "keep":
+                tar_flags = ""
+            else:
+                tar_flags = "--exclude='.svn'"
+
+            # tar them up to a defined filename
+            runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d,
+                        cleanup=[ud.localpath], workdir=ud.pkgdir)
+        finally:
+            bb.utils.unlockfile(lf)
 
     def clean(self, ud, d):
         """ Clean SVN specific files and dirs """