reset upstream subtrees to yocto 2.6

Reset the following subtrees on thud HEAD:

  poky: 87e3a9739d
  meta-openembedded: 6094ae18c8
  meta-security: 31dc4e7532
  meta-raspberrypi: a48743dc36
  meta-xilinx: c42016e2e6

Also re-apply backports that didn't make it into thud:
  poky:
    17726d0 systemd-systemctl-native: handle Install wildcards

  meta-openembedded:
    4321a5d libtinyxml2: update to 7.0.1
    042f0a3 libcereal: Add native and nativesdk classes
    e23284f libcereal: Allow empty package
    030e8d4 rsyslog: curl-less build with fmhttp PACKAGECONFIG
    179a1b9 gtest: update to 1.8.1

Squashed OpenBMC subtree compatibility updates:
  meta-aspeed:
    Brad Bishop (1):
          aspeed: add yocto 2.6 compatibility

  meta-ibm:
    Brad Bishop (1):
          ibm: prepare for yocto 2.6

  meta-ingrasys:
    Brad Bishop (1):
          ingrasys: set layer compatibility to yocto 2.6

  meta-openpower:
    Brad Bishop (1):
          openpower: set layer compatibility to yocto 2.6

  meta-phosphor:
    Brad Bishop (3):
          phosphor: set layer compatibility to thud
          phosphor: libgpg-error: drop patches
          phosphor: react to fitimage artifact rename

    Ed Tanous (4):
          Dropbear: upgrade options for latest upgrade
          yocto2.6: update openssl options
          busybox: remove upstream watchdog patch
          systemd: Rebase CONFIG_CGROUP_BPF patch

Change-Id: I7b1fe71cca880d0372a82d94b5fd785323e3a9e7
Signed-off-by: Brad Bishop <bradleyb@fuzziesquirrel.com>
diff --git a/poky/bitbake/lib/bb/fetch2/gitsm.py b/poky/bitbake/lib/bb/fetch2/gitsm.py
index 0aff100..35729db 100644
--- a/poky/bitbake/lib/bb/fetch2/gitsm.py
+++ b/poky/bitbake/lib/bb/fetch2/gitsm.py
@@ -31,9 +31,12 @@
 
 import os
 import bb
+import copy
 from   bb.fetch2.git import Git
 from   bb.fetch2 import runfetchcmd
 from   bb.fetch2 import logger
+from   bb.fetch2 import Fetch
+from   bb.fetch2 import BBFetchException
 
 class GitSM(Git):
     def supports(self, ud, d):
@@ -42,94 +45,206 @@
         """
         return ud.type in ['gitsm']
 
-    def uses_submodules(self, ud, d, wd):
-        for name in ud.names:
-            try:
-                runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=wd)
-                return True
-            except bb.fetch.FetchError:
-                pass
-        return False
-
-    def _set_relative_paths(self, repopath):
-        """
-        Fix submodule paths to be relative instead of absolute,
-        so that when we move the repo it doesn't break
-        (In Git 1.7.10+ this is done automatically)
-        """
-        submodules = []
-        with open(os.path.join(repopath, '.gitmodules'), 'r') as f:
-            for line in f.readlines():
-                if line.startswith('[submodule'):
-                    submodules.append(line.split('"')[1])
-
-        for module in submodules:
-            repo_conf = os.path.join(repopath, module, '.git')
-            if os.path.exists(repo_conf):
-                with open(repo_conf, 'r') as f:
-                    lines = f.readlines()
-                newpath = ''
-                for i, line in enumerate(lines):
-                    if line.startswith('gitdir:'):
-                        oldpath = line.split(': ')[-1].rstrip()
-                        if oldpath.startswith('/'):
-                            newpath = '../' * (module.count('/') + 1) + '.git/modules/' + module
-                            lines[i] = 'gitdir: %s\n' % newpath
-                            break
-                if newpath:
-                    with open(repo_conf, 'w') as f:
-                        for line in lines:
-                            f.write(line)
-
-            repo_conf2 = os.path.join(repopath, '.git', 'modules', module, 'config')
-            if os.path.exists(repo_conf2):
-                with open(repo_conf2, 'r') as f:
-                    lines = f.readlines()
-                newpath = ''
-                for i, line in enumerate(lines):
-                    if line.lstrip().startswith('worktree = '):
-                        oldpath = line.split(' = ')[-1].rstrip()
-                        if oldpath.startswith('/'):
-                            newpath = '../' * (module.count('/') + 3) + module
-                            lines[i] = '\tworktree = %s\n' % newpath
-                            break
-                if newpath:
-                    with open(repo_conf2, 'w') as f:
-                        for line in lines:
-                            f.write(line)
+    @staticmethod
+    def parse_gitmodules(gitmodules):
+        modules = {}
+        module = ""
+        for line in gitmodules.splitlines():
+            if line.startswith('[submodule'):
+                module = line.split('"')[1]
+                modules[module] = {}
+            elif module and line.strip().startswith('path'):
+                path = line.split('=')[1].strip()
+                modules[module]['path'] = path
+            elif module and line.strip().startswith('url'):
+                url = line.split('=')[1].strip()
+                modules[module]['url'] = url
+        return modules
 
     def update_submodules(self, ud, d):
-        # We have to convert bare -> full repo, do the submodule bit, then convert back
-        tmpclonedir = ud.clonedir + ".tmp"
-        gitdir = tmpclonedir + os.sep + ".git"
-        bb.utils.remove(tmpclonedir, True)
-        os.mkdir(tmpclonedir)
-        os.rename(ud.clonedir, gitdir)
-        runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d)
-        runfetchcmd(ud.basecmd + " reset --hard", d, workdir=tmpclonedir)
-        runfetchcmd(ud.basecmd + " checkout -f " + ud.revisions[ud.names[0]], d, workdir=tmpclonedir)
-        runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=tmpclonedir)
-        self._set_relative_paths(tmpclonedir)
-        runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d, workdir=tmpclonedir)
-        os.rename(gitdir, ud.clonedir,)
-        bb.utils.remove(tmpclonedir, True)
+        submodules = []
+        paths = {}
+        uris = {}
+        local_paths = {}
+
+        for name in ud.names:
+            try:
+                gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=ud.clonedir)
+            except:
+                # No submodules to update
+                continue
+
+            for m, md in self.parse_gitmodules(gitmodules).items():
+                submodules.append(m)
+                paths[m] = md['path']
+                uris[m] = md['url']
+                if uris[m].startswith('..'):
+                    newud = copy.copy(ud)
+                    newud.path = os.path.realpath(os.path.join(newud.path, md['url']))
+                    uris[m] = Git._get_repo_url(self, newud)
+
+        for module in submodules:
+            module_hash = runfetchcmd("%s ls-tree -z -d %s %s" % (ud.basecmd, ud.revisions[name], paths[module]), d, quiet=True, workdir=ud.clonedir)
+            module_hash = module_hash.split()[2]
+
+            # Build new SRC_URI
+            proto = uris[module].split(':', 1)[0]
+            url = uris[module].replace('%s:' % proto, 'gitsm:', 1)
+            url += ';protocol=%s' % proto
+            url += ";name=%s" % module
+            url += ";bareclone=1;nocheckout=1;nobranch=1"
+
+            ld = d.createCopy()
+            # Not necessary to set SRC_URI, since we're passing the URI to
+            # Fetch.
+            #ld.setVar('SRC_URI', url)
+            ld.setVar('SRCREV_%s' % module, module_hash)
+
+            # Workaround for issues with SRCPV/SRCREV_FORMAT errors
+            # error refer to 'multiple' repositories.  Only the repository
+            # in the original SRC_URI actually matters...
+            ld.setVar('SRCPV', d.getVar('SRCPV'))
+            ld.setVar('SRCREV_FORMAT', module)
+
+            newfetch = Fetch([url], ld, cache=False)
+            newfetch.download()
+            local_paths[module] = newfetch.localpath(url)
+
+            # Correct the submodule references to the local download version...
+            runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_paths[module]}, d, workdir=ud.clonedir)
+
+            symlink_path = os.path.join(ud.clonedir, 'modules', paths[module])
+            if not os.path.exists(symlink_path):
+                try:
+                    os.makedirs(os.path.dirname(symlink_path), exist_ok=True)
+                except OSError:
+                    pass
+                os.symlink(local_paths[module], symlink_path)
+
+        return True
+
+    def need_update(self, ud, d):
+        main_repo_needs_update = Git.need_update(self, ud, d)
+
+        # First check that the main repository has enough history fetched. If it doesn't, then we don't
+        # even have the .gitmodules and gitlinks for the submodules to attempt asking whether the
+        # submodules' histories are recent enough.
+        if main_repo_needs_update:
+            return True
+
+        # Now check that the submodule histories are new enough. The git-submodule command doesn't have
+        # any clean interface for doing this aside from just attempting the checkout (with network
+        # fetched disabled).
+        return not self.update_submodules(ud, d)
 
     def download(self, ud, d):
         Git.download(self, ud, d)
 
         if not ud.shallow or ud.localpath != ud.fullshallow:
-            submodules = self.uses_submodules(ud, d, ud.clonedir)
-            if submodules:
-                self.update_submodules(ud, d)
+            self.update_submodules(ud, d)
+
+    def copy_submodules(self, submodules, ud, destdir, d):
+        if ud.bareclone:
+            repo_conf = destdir
+        else:
+            repo_conf = os.path.join(destdir, '.git')
+
+        if submodules and not os.path.exists(os.path.join(repo_conf, 'modules')):
+            os.mkdir(os.path.join(repo_conf, 'modules'))
+
+        for module, md in submodules.items():
+            srcpath = os.path.join(ud.clonedir, 'modules', md['path'])
+            modpath = os.path.join(repo_conf, 'modules', md['path'])
+
+            if os.path.exists(srcpath):
+                if os.path.exists(os.path.join(srcpath, '.git')):
+                    srcpath = os.path.join(srcpath, '.git')
+
+                target = modpath
+                if os.path.exists(modpath):
+                    target = os.path.dirname(modpath)
+
+                os.makedirs(os.path.dirname(target), exist_ok=True)
+                runfetchcmd("cp -fpLR %s %s" % (srcpath, target), d)
+            elif os.path.exists(modpath):
+                # Module already exists, likely unpacked from a shallow mirror clone
+                pass
+            else:
+                # This is fatal, as we do NOT want git-submodule to hit the network
+                raise bb.fetch2.FetchError('Submodule %s does not exist in %s or %s.' % (module, srcpath, modpath))
 
     def clone_shallow_local(self, ud, dest, d):
         super(GitSM, self).clone_shallow_local(ud, dest, d)
 
-        runfetchcmd('cp -fpPRH "%s/modules" "%s/"' % (ud.clonedir, os.path.join(dest, '.git')), d)
+        # Copy over the submodules' fetched histories too.
+        repo_conf = os.path.join(dest, '.git')
+
+        submodules = []
+        for name in ud.names:
+            try:
+                gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revision), d, quiet=True, workdir=dest)
+            except:
+                # No submodules to update
+                continue
+
+            submodules = self.parse_gitmodules(gitmodules)
+            self.copy_submodules(submodules, ud, dest, d)
 
     def unpack(self, ud, destdir, d):
         Git.unpack(self, ud, destdir, d)
 
-        if self.uses_submodules(ud, d, ud.destdir):
-            runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d, workdir=ud.destdir)
-            runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=ud.destdir)
+        # Copy over the submodules' fetched histories too.
+        if ud.bareclone:
+            repo_conf = ud.destdir
+        else:
+            repo_conf = os.path.join(ud.destdir, '.git')
+
+        update_submodules = False
+        paths = {}
+        uris = {}
+        local_paths = {}
+        for name in ud.names:
+            try:
+                gitmodules = runfetchcmd("%s show HEAD:.gitmodules" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)
+            except:
+                # No submodules to update
+                continue
+
+            submodules = self.parse_gitmodules(gitmodules)
+            self.copy_submodules(submodules, ud, ud.destdir, d)
+
+            submodules_queue = [(module, os.path.join(repo_conf, 'modules', md['path'])) for module, md in submodules.items()]
+            while len(submodules_queue) != 0:
+                module, modpath = submodules_queue.pop()
+
+                # add submodule children recursively
+                try:
+                    gitmodules = runfetchcmd("%s show HEAD:.gitmodules" % (ud.basecmd), d, quiet=True, workdir=modpath)
+                    for m, md in self.parse_gitmodules(gitmodules).items():
+                        submodules_queue.append([m, os.path.join(modpath, 'modules', md['path'])])
+                except:
+                    # no children
+                    pass
+
+
+                # There are submodules to update
+                update_submodules = True
+
+                # Determine (from the submodule) the correct url to reference
+                try:
+                    output = runfetchcmd("%(basecmd)s config remote.origin.url" % {'basecmd': ud.basecmd}, d, workdir=modpath)
+                except bb.fetch2.FetchError as e:
+                    # No remote url defined in this submodule
+                    continue
+
+                local_paths[module] = output
+
+                # Setup the local URL properly (like git submodule init or sync would do...)
+                runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_paths[module]}, d, workdir=ud.destdir)
+
+                # Ensure the submodule repository is NOT set to bare, since we're checking it out...
+                runfetchcmd("%s config core.bare false" % (ud.basecmd), d, quiet=True, workdir=modpath)
+
+        if update_submodules:
+            # Run submodule update, this sets up the directories -- without touching the config
+            runfetchcmd("%s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)