meta-openembedded and poky: subtree updates

Squash of the following due to dependencies among them
and OpenBMC changes:

meta-openembedded: subtree update:d0748372d2..9201611135
meta-openembedded: subtree update:9201611135..17fd382f34
poky: subtree update:9052e5b32a..2e11d97b6c
poky: subtree update:2e11d97b6c..a8544811d7

The change log was too large for the jenkins plugin
to handle therefore it has been removed. Here is
the first and last commit of each subtree:

meta-openembedded:d0748372d2
      cppzmq: bump to version 4.6.0
meta-openembedded:17fd382f34
      mpv: Remove X11 dependency
poky:9052e5b32a
      package_ipk: Remove pointless comment to trigger rebuild
poky:a8544811d7
      pbzip2: Fix license warning

Change-Id: If0fc6c37629642ee207a4ca2f7aa501a2c673cd6
Signed-off-by: Andrew Geissler <geissonator@yahoo.com>
diff --git a/poky/bitbake/lib/bb/fetch2/__init__.py b/poky/bitbake/lib/bb/fetch2/__init__.py
index 07de6c2..eb112f0 100644
--- a/poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/poky/bitbake/lib/bb/fetch2/__init__.py
@@ -33,6 +33,9 @@
 
 logger = logging.getLogger("BitBake.Fetcher")
 
+CHECKSUM_LIST = [ "md5", "sha256", "sha1", "sha384", "sha512" ]
+SHOWN_CHECKSUM_LIST = ["sha256"]
+
 class BBFetchException(Exception):
     """Class all fetch exceptions inherit from"""
     def __init__(self, message):
@@ -131,10 +134,9 @@
         Exception.__init__(self)
 
 class MissingChecksumEvent(bb.event.Event):
-    def __init__(self, url, md5sum, sha256sum):
+    def __init__(self, url, **checksums):
         self.url = url
-        self.checksums = {'md5sum': md5sum,
-                          'sha256sum': sha256sum}
+        self.checksums = checksums
         bb.event.Event.__init__(self)
 
 
@@ -484,17 +486,22 @@
     Called to initialize the fetchers once the configuration data is known.
     Calls before this must not hit the cache.
     """
+
+    revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
+    try:
+        # fetcher_init is called multiple times, so make sure we only save the
+        # revs the first time it is called.
+        if not bb.fetch2.saved_headrevs:
+            bb.fetch2.saved_headrevs = dict(revs)
+    except:
+        pass
+
     # When to drop SCM head revisions controlled by user policy
     srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear"
     if srcrev_policy == "cache":
         logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
     elif srcrev_policy == "clear":
         logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
-        revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
-        try:
-            bb.fetch2.saved_headrevs = revs.items()
-        except:
-            pass
         revs.clear()
     else:
         raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
@@ -513,22 +520,12 @@
 
 def fetcher_compare_revisions(d):
     """
-    Compare the revisions in the persistant cache with current values and
-    return true/false on whether they've changed.
+    Compare the revisions in the persistent cache with the saved values from
+    when bitbake was started and return true if they have changed.
     """
 
-    data = bb.persist_data.persist('BB_URI_HEADREVS', d).items()
-    data2 = bb.fetch2.saved_headrevs
-
-    changed = False
-    for key in data:
-        if key not in data2 or data2[key] != data[key]:
-            logger.debug(1, "%s changed", key)
-            changed = True
-            return True
-        else:
-            logger.debug(2, "%s did not change", key)
-    return False
+    headrevs = dict(bb.persist_data.persist('BB_URI_HEADREVS', d))
+    return headrevs != bb.fetch2.saved_headrevs
 
 def mirror_from_string(data):
     mirrors = (data or "").replace('\\n',' ').split()
@@ -552,71 +549,84 @@
     downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
     """
 
-    _MD5_KEY = "md5"
-    _SHA256_KEY = "sha256"
-
     if ud.ignore_checksums or not ud.method.supports_checksum(ud):
         return {}
 
-    if _MD5_KEY in precomputed:
-        md5data = precomputed[_MD5_KEY]
-    else:
-        md5data = bb.utils.md5_file(ud.localpath)
+    def compute_checksum_info(checksum_id):
+        checksum_name = getattr(ud, "%s_name" % checksum_id)
 
-    if _SHA256_KEY in precomputed:
-        sha256data = precomputed[_SHA256_KEY]
-    else:
-        sha256data = bb.utils.sha256_file(ud.localpath)
+        if checksum_id in precomputed:
+            checksum_data = precomputed[checksum_id]
+        else:
+            checksum_data = getattr(bb.utils, "%s_file" % checksum_id)(ud.localpath)
 
-    if ud.method.recommends_checksum(ud) and not ud.md5_expected and not ud.sha256_expected:
-        # If strict checking enabled and neither sum defined, raise error
+        checksum_expected = getattr(ud, "%s_expected" % checksum_id)
+
+        return {
+            "id": checksum_id,
+            "name": checksum_name,
+            "data": checksum_data,
+            "expected": checksum_expected
+        }
+
+    checksum_infos = []
+    for checksum_id in CHECKSUM_LIST:
+        checksum_infos.append(compute_checksum_info(checksum_id))
+
+    checksum_dict = {ci["id"] : ci["data"] for ci in checksum_infos}
+    checksum_event = {"%ssum" % ci["id"] : ci["data"] for ci in checksum_infos}
+
+    for ci in checksum_infos:
+        if ci["id"] in SHOWN_CHECKSUM_LIST:
+            checksum_lines = ["SRC_URI[%s] = \"%s\"" % (ci["name"], ci["data"])]
+
+    # If no checksum has been provided
+    if ud.method.recommends_checksum(ud) and all(ci["expected"] is None for ci in checksum_infos):
+        messages = []
         strict = d.getVar("BB_STRICT_CHECKSUM") or "0"
-        if strict == "1":
-            logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
-                             'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
-                             (ud.localpath, ud.md5_name, md5data,
-                              ud.sha256_name, sha256data))
-            raise NoChecksumError('Missing SRC_URI checksum', ud.url)
 
-        bb.event.fire(MissingChecksumEvent(ud.url, md5data, sha256data), d)
+        # If strict checking enabled and neither sum defined, raise error
+        if strict == "1":
+            messages.append("No checksum specified for '%s', please add at " \
+                            "least one to the recipe:" % ud.localpath)
+            messages.extend(checksum_lines)
+            logger.error("\n".join(messages))
+            raise NoChecksumError("Missing SRC_URI checksum", ud.url)
+
+        bb.event.fire(MissingChecksumEvent(ud.url, **checksum_event), d)
 
         if strict == "ignore":
-            return {
-                _MD5_KEY: md5data,
-                _SHA256_KEY: sha256data
-            }
+            return checksum_dict
 
         # Log missing sums so user can more easily add them
-        logger.warning('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
-                       'SRC_URI[%s] = "%s"',
-                       ud.localpath, ud.md5_name, md5data)
-        logger.warning('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
-                       'SRC_URI[%s] = "%s"',
-                       ud.localpath, ud.sha256_name, sha256data)
+        messages.append("Missing checksum for '%s', consider adding at " \
+                        "least one to the recipe:" % ud.localpath)
+        messages.extend(checksum_lines)
+        logger.warning("\n".join(messages))
 
     # We want to alert the user if a checksum is defined in the recipe but
     # it does not match.
-    msg = ""
-    mismatch = False
-    if ud.md5_expected and ud.md5_expected != md5data:
-        msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'md5', md5data, ud.md5_expected)
-        mismatch = True;
+    messages = []
+    messages.append("Checksum mismatch!")
+    bad_checksum = None
 
-    if ud.sha256_expected and ud.sha256_expected != sha256data:
-        msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected)
-        mismatch = True;
+    for ci in checksum_infos:
+        if ci["expected"] and ci["expected"] != ci["data"]:
+            messages.append("File: '%s' has %s checksum %s when %s was " \
+                            "expected" % (ud.localpath, ci["id"], ci["data"], ci["expected"]))
+            bad_checksum = ci["data"]
 
-    if mismatch:
-        msg = msg + '\nIf this change is expected (e.g. you have upgraded to a new version without updating the checksums) then you can use these lines within the recipe:\nSRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"\nOtherwise you should retry the download and/or check with upstream to determine if the file has become corrupted or otherwise unexpectedly modified.\n' % (ud.md5_name, md5data, ud.sha256_name, sha256data)
+    if bad_checksum:
+        messages.append("If this change is expected (e.g. you have upgraded " \
+                        "to a new version without updating the checksums) " \
+                        "then you can use these lines within the recipe:")
+        messages.extend(checksum_lines)
+        messages.append("Otherwise you should retry the download and/or " \
+                        "check with upstream to determine if the file has " \
+                        "become corrupted or otherwise unexpectedly modified.")
+        raise ChecksumError("\n".join(messages), ud.url, bad_checksum)
 
-    if len(msg):
-        raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data)
-
-    return {
-        _MD5_KEY: md5data,
-        _SHA256_KEY: sha256data
-    }
-
+    return checksum_dict
 
 def verify_donestamp(ud, d, origud=None):
     """
@@ -1081,7 +1091,7 @@
 
     for index, uri in enumerate(uris):
         ret = try_mirror_url(fetch, origud, uds[index], ld, check)
-        if ret != False:
+        if ret:
             return ret
     return None
 
@@ -1197,14 +1207,14 @@
 
     return " ".join(filelist)
 
-def get_file_checksums(filelist, pn):
+def get_file_checksums(filelist, pn, localdirsexclude):
     """Get a list of the checksums for a list of local files
 
     Returns the checksums for a list of local files, caching the results as
     it proceeds
 
     """
-    return _checksum_cache.get_checksums(filelist, pn)
+    return _checksum_cache.get_checksums(filelist, pn, localdirsexclude)
 
 
 class FetchData(object):
@@ -1230,24 +1240,26 @@
             self.pswd = self.parm["pswd"]
         self.setup = False
 
-        if "name" in self.parm:
-            self.md5_name = "%s.md5sum" % self.parm["name"]
-            self.sha256_name = "%s.sha256sum" % self.parm["name"]
-        else:
-            self.md5_name = "md5sum"
-            self.sha256_name = "sha256sum"
-        if self.md5_name in self.parm:
-            self.md5_expected = self.parm[self.md5_name]
-        elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]:
-            self.md5_expected = None
-        else:
-            self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name)
-        if self.sha256_name in self.parm:
-            self.sha256_expected = self.parm[self.sha256_name]
-        elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]:
-            self.sha256_expected = None
-        else:
-            self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
+        def configure_checksum(checksum_id):
+            if "name" in self.parm:
+                checksum_name = "%s.%ssum" % (self.parm["name"], checksum_id)
+            else:
+                checksum_name = "%ssum" % checksum_id
+
+            setattr(self, "%s_name" % checksum_id, checksum_name)
+
+            if checksum_name in self.parm:
+                checksum_expected = self.parm[checksum_name]
+            elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]:
+                checksum_expected = None
+            else:
+                checksum_expected = d.getVarFlag("SRC_URI", checksum_name)
+
+            setattr(self, "%s_expected" % checksum_id, checksum_expected)
+
+        for checksum_id in CHECKSUM_LIST:
+            configure_checksum(checksum_id)
+
         self.ignore_checksums = False
 
         self.names = self.parm.get("name",'default').split(',')
@@ -1351,7 +1363,7 @@
         """
 
         # We cannot compute checksums for directories
-        if os.path.isdir(urldata.localpath) == True:
+        if os.path.isdir(urldata.localpath):
             return False
         if urldata.localpath.find("*") != -1:
             return False
@@ -1365,6 +1377,18 @@
         """
         return False
 
+    def verify_donestamp(self, ud, d):
+        """
+        Verify the donestamp file
+        """
+        return verify_donestamp(ud, d)
+
+    def update_donestamp(self, ud, d):
+        """
+        Update the donestamp file
+        """
+        update_stamp(ud, d)
+
     def _strip_leading_slashes(self, relpath):
         """
         Remove leading slash as os.path.join can't cope
@@ -1539,6 +1563,12 @@
         """
         return True
 
+    def try_mirrors(self, fetch, urldata, d, mirrors, check=False):
+        """
+        Try to use a mirror
+        """
+        return bool(try_mirrors(fetch, d, urldata, mirrors, check))
+
     def checkstatus(self, fetch, urldata, d):
         """
         Check the status of a URL
@@ -1567,8 +1597,7 @@
         return True, str(latest_rev)
 
     def generate_revision_key(self, ud, d, name):
-        key = self._revision_key(ud, d, name)
-        return "%s-%s" % (key, d.getVar("PN") or "")
+        return self._revision_key(ud, d, name)
 
     def latest_versionstring(self, ud, d):
         """
@@ -1578,6 +1607,16 @@
         """
         return ('', '')
 
+    def done(self, ud, d):
+        """
+        Is the download done ?
+        """
+        if os.path.exists(ud.localpath):
+            return True
+        if ud.localpath.find("*") != -1:
+            return True
+        return False
+
 class Fetch(object):
     def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
         if localonly and cache:
@@ -1592,8 +1631,11 @@
 
         fn = d.getVar('FILE')
         mc = d.getVar('__BBMULTICONFIG') or ""
-        if cache and fn and mc + fn in urldata_cache:
-            self.ud = urldata_cache[mc + fn + str(id(d))]
+        key = None
+        if cache and fn:
+            key = mc + fn + str(id(d))
+        if key in urldata_cache:
+            self.ud = urldata_cache[key]
 
         for url in urls:
             if url not in self.ud:
@@ -1604,8 +1646,8 @@
                         self.ud[url] = None
                         pass
 
-        if fn and cache:
-            urldata_cache[mc + fn + str(id(d))] = self.ud
+        if key:
+            urldata_cache[key] = self.ud
 
     def localpath(self, url):
         if url not in self.urls:
@@ -1641,7 +1683,7 @@
             ud = self.ud[u]
             ud.setup_localpath(self.d)
             m = ud.method
-            localpath = ""
+            done = False
 
             if ud.lockfile:
                 lf = bb.utils.lockfile(ud.lockfile)
@@ -1649,28 +1691,28 @@
             try:
                 self.d.setVar("BB_NO_NETWORK", network)
 
-                if verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
-                    localpath = ud.localpath
+                if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
+                    done = True
                 elif m.try_premirror(ud, self.d):
                     logger.debug(1, "Trying PREMIRRORS")
                     mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
-                    localpath = try_mirrors(self, self.d, ud, mirrors, False)
-                    if localpath:
+                    done = m.try_mirrors(self, ud, self.d, mirrors)
+                    if done:
                         try:
                             # early checksum verification so that if the checksum of the premirror
                             # contents mismatch the fetcher can still try upstream and mirrors
-                            update_stamp(ud, self.d)
+                            m.update_donestamp(ud, self.d)
                         except ChecksumError as e:
                             logger.warning("Checksum failure encountered with premirror download of %s - will attempt other sources." % u)
                             logger.debug(1, str(e))
-                            localpath = ""
+                            done = False
 
                 if premirroronly:
                     self.d.setVar("BB_NO_NETWORK", "1")
 
                 firsterr = None
-                verified_stamp = verify_donestamp(ud, self.d)
-                if not localpath and (not verified_stamp or m.need_update(ud, self.d)):
+                verified_stamp = m.verify_donestamp(ud, self.d)
+                if not done and (not verified_stamp or m.need_update(ud, self.d)):
                     try:
                         if not trusted_network(self.d, ud.url):
                             raise UntrustedUrl(ud.url)
@@ -1678,10 +1720,10 @@
                         m.download(ud, self.d)
                         if hasattr(m, "build_mirror_data"):
                             m.build_mirror_data(ud, self.d)
-                        localpath = ud.localpath
+                        done = True
                         # early checksum verify, so that if checksum mismatched,
                         # fetcher still have chance to fetch from mirror
-                        update_stamp(ud, self.d)
+                        m.update_donestamp(ud, self.d)
 
                     except bb.fetch2.NetworkAccess:
                         raise
@@ -1703,14 +1745,14 @@
                             m.clean(ud, self.d)
                         logger.debug(1, "Trying MIRRORS")
                         mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
-                        localpath = try_mirrors(self, self.d, ud, mirrors)
+                        done = m.try_mirrors(self, ud, self.d, mirrors)
 
-                if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
+                if not done or not m.done(ud, self.d):
                     if firsterr:
                         logger.error(str(firsterr))
                     raise FetchError("Unable to fetch URL from any source.", u)
 
-                update_stamp(ud, self.d)
+                m.update_donestamp(ud, self.d)
 
             except IOError as e:
                 if e.errno in [errno.ESTALE]:
@@ -1741,14 +1783,14 @@
             logger.debug(1, "Testing URL %s", u)
             # First try checking uri, u, from PREMIRRORS
             mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
-            ret = try_mirrors(self, self.d, ud, mirrors, True)
+            ret = m.try_mirrors(self, ud, self.d, mirrors, True)
             if not ret:
                 # Next try checking from the original uri, u
                 ret = m.checkstatus(self, ud, self.d)
                 if not ret:
                     # Finally, try checking uri, u, from MIRRORS
                     mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
-                    ret = try_mirrors(self, self.d, ud, mirrors, True)
+                    ret = m.try_mirrors(self, ud, self.d, mirrors, True)
 
             if not ret:
                 raise FetchError("URL %s doesn't work" % u, u)
@@ -1853,6 +1895,7 @@
 from . import repo
 from . import clearcase
 from . import npm
+from . import npmsw
 
 methods.append(local.Local())
 methods.append(wget.Wget())
@@ -1871,3 +1914,4 @@
 methods.append(repo.Repo())
 methods.append(clearcase.ClearCase())
 methods.append(npm.Npm())
+methods.append(npmsw.NpmShrinkWrap())
diff --git a/poky/bitbake/lib/bb/fetch2/bzr.py b/poky/bitbake/lib/bb/fetch2/bzr.py
index c56d875..566ace9 100644
--- a/poky/bitbake/lib/bb/fetch2/bzr.py
+++ b/poky/bitbake/lib/bb/fetch2/bzr.py
@@ -14,8 +14,6 @@
 #
 
 import os
-import sys
-import logging
 import bb
 from bb.fetch2 import FetchMethod
 from bb.fetch2 import FetchError
diff --git a/poky/bitbake/lib/bb/fetch2/clearcase.py b/poky/bitbake/lib/bb/fetch2/clearcase.py
index e2934ef..49d7ae1 100644
--- a/poky/bitbake/lib/bb/fetch2/clearcase.py
+++ b/poky/bitbake/lib/bb/fetch2/clearcase.py
@@ -49,7 +49,6 @@
 #
 
 import os
-import sys
 import shutil
 import bb
 from   bb.fetch2 import FetchMethod
@@ -238,7 +237,7 @@
 
         # Clean clearcase meta-data before tar
 
-        runfetchcmd('tar -czf "%s" .' % (ud.localpath), d, cleanup = [ud.localpath])
+        runfetchcmd('tar -czf "%s" .' % (ud.localpath), d, cleanup = [ud.localpath], workdir = ud.viewdir)
 
         # Clean up so we can create a new view next time
         self.clean(ud, d);
diff --git a/poky/bitbake/lib/bb/fetch2/cvs.py b/poky/bitbake/lib/bb/fetch2/cvs.py
index 1b35ba4..29123a4 100644
--- a/poky/bitbake/lib/bb/fetch2/cvs.py
+++ b/poky/bitbake/lib/bb/fetch2/cvs.py
@@ -14,7 +14,6 @@
 #
 
 import os
-import logging
 import bb
 from bb.fetch2 import FetchMethod, FetchError, MissingParameterError, logger
 from bb.fetch2 import runfetchcmd
diff --git a/poky/bitbake/lib/bb/fetch2/git.py b/poky/bitbake/lib/bb/fetch2/git.py
index fa41b07..5b3793a 100644
--- a/poky/bitbake/lib/bb/fetch2/git.py
+++ b/poky/bitbake/lib/bb/fetch2/git.py
@@ -594,7 +594,9 @@
         """
         Return a unique key for the url
         """
-        return "git:" + ud.host + ud.path.replace('/', '.') + ud.unresolvedrev[name]
+        # Collapse adjacent slashes
+        slash_re = re.compile(r"/+")
+        return "git:" + ud.host + slash_re.sub(".", ud.path) + ud.unresolvedrev[name]
 
     def _lsremote(self, ud, d, search):
         """
@@ -671,7 +673,7 @@
 
             # search for version in the line
             tag = tagregex.search(tag_head)
-            if tag == None:
+            if tag is None:
                 continue
 
             tag = tag.group('pver')
diff --git a/poky/bitbake/lib/bb/fetch2/gitannex.py b/poky/bitbake/lib/bb/fetch2/gitannex.py
index 1d497dc..80a808d 100644
--- a/poky/bitbake/lib/bb/fetch2/gitannex.py
+++ b/poky/bitbake/lib/bb/fetch2/gitannex.py
@@ -8,11 +8,9 @@
 # SPDX-License-Identifier: GPL-2.0-only
 #
 
-import os
 import bb
 from   bb.fetch2.git import Git
 from   bb.fetch2 import runfetchcmd
-from   bb.fetch2 import logger
 
 class GitANNEX(Git):
     def supports(self, ud, d):
diff --git a/poky/bitbake/lib/bb/fetch2/gitsm.py b/poky/bitbake/lib/bb/fetch2/gitsm.py
index c622771..e708300 100644
--- a/poky/bitbake/lib/bb/fetch2/gitsm.py
+++ b/poky/bitbake/lib/bb/fetch2/gitsm.py
@@ -20,11 +20,12 @@
 import os
 import bb
 import copy
+import shutil
+import tempfile
 from   bb.fetch2.git import Git
 from   bb.fetch2 import runfetchcmd
 from   bb.fetch2 import logger
 from   bb.fetch2 import Fetch
-from   bb.fetch2 import BBFetchException
 
 class GitSM(Git):
     def supports(self, ud, d):
@@ -131,7 +132,7 @@
             ld.setVar('SRCPV', d.getVar('SRCPV'))
             ld.setVar('SRCREV_FORMAT', module)
 
-            function(ud, url, module, paths[module], ld)
+            function(ud, url, module, paths[module], workdir, ld)
 
         return submodules != []
 
@@ -153,7 +154,7 @@
         return False
 
     def download(self, ud, d):
-        def download_submodule(ud, url, module, modpath, d):
+        def download_submodule(ud, url, module, modpath, workdir, d):
             url += ";bareclone=1;nobranch=1"
 
             # Is the following still needed?
@@ -164,16 +165,25 @@
                 newfetch.download()
                 # Drop a nugget to add each of the srcrevs we've fetched (used by need_update)
                 runfetchcmd("%s config --add bitbake.srcrev %s" % \
-                            (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)
+                            (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=workdir)
             except Exception as e:
                 logger.error('gitsm: submodule download failed: %s %s' % (type(e).__name__, str(e)))
                 raise
 
         Git.download(self, ud, d)
-        self.process_submodules(ud, ud.clonedir, download_submodule, d)
+
+        # If we're using a shallow mirror tarball it needs to be unpacked
+        # temporarily so that we can examine the .gitmodules file
+        if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d):
+            tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
+            runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
+            self.process_submodules(ud, tmpdir, download_submodule, d)
+            shutil.rmtree(tmpdir)
+        else:
+            self.process_submodules(ud, ud.clonedir, download_submodule, d)
 
     def unpack(self, ud, destdir, d):
-        def unpack_submodules(ud, url, module, modpath, d):
+        def unpack_submodules(ud, url, module, modpath, workdir, d):
             url += ";bareclone=1;nobranch=1"
 
             # Figure out where we clone over the bare submodules...
diff --git a/poky/bitbake/lib/bb/fetch2/hg.py b/poky/bitbake/lib/bb/fetch2/hg.py
index e21115d..8f50370 100644
--- a/poky/bitbake/lib/bb/fetch2/hg.py
+++ b/poky/bitbake/lib/bb/fetch2/hg.py
@@ -13,8 +13,6 @@
 #
 
 import os
-import sys
-import logging
 import bb
 import errno
 from bb.fetch2 import FetchMethod
diff --git a/poky/bitbake/lib/bb/fetch2/npm.py b/poky/bitbake/lib/bb/fetch2/npm.py
index 9700e61..4789850 100644
--- a/poky/bitbake/lib/bb/fetch2/npm.py
+++ b/poky/bitbake/lib/bb/fetch2/npm.py
@@ -1,301 +1,296 @@
+# Copyright (C) 2020 Savoir-Faire Linux
 #
 # SPDX-License-Identifier: GPL-2.0-only
 #
 """
-BitBake 'Fetch' NPM implementation
+BitBake 'Fetch' npm implementation
 
-The NPM fetcher is used to retrieve files from the npmjs repository
+npm fetcher support the SRC_URI with format of:
+SRC_URI = "npm://some.registry.url;OptionA=xxx;OptionB=xxx;..."
 
-Usage in the recipe:
+Supported SRC_URI options are:
 
-    SRC_URI = "npm://registry.npmjs.org/;name=${PN};version=${PV}"
-    Suported SRC_URI options are:
+- package
+   The npm package name. This is a mandatory parameter.
 
-    - name
-    - version
+- version
+    The npm package version. This is a mandatory parameter.
 
-    npm://registry.npmjs.org/${PN}/-/${PN}-${PV}.tgz  would become npm://registry.npmjs.org;name=${PN};version=${PV}
-    The fetcher all triggers off the existence of ud.localpath. If that exists and has the ".done" stamp, its assumed the fetch is good/done
+- downloadfilename
+    Specifies the filename used when storing the downloaded file.
 
+- destsuffix
+    Specifies the directory to use to unpack the package (default: npm).
 """
 
-import os
-import sys
-import urllib.request, urllib.parse, urllib.error
+import base64
 import json
-import subprocess
-import signal
+import os
+import re
+import shlex
+import tempfile
 import bb
-from   bb.fetch2 import FetchMethod
-from   bb.fetch2 import FetchError
-from   bb.fetch2 import ChecksumError
-from   bb.fetch2 import runfetchcmd
-from   bb.fetch2 import logger
-from   bb.fetch2 import UnpackError
-from   bb.fetch2 import ParameterError
+from bb.fetch2 import Fetch
+from bb.fetch2 import FetchError
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import MissingParameterError
+from bb.fetch2 import ParameterError
+from bb.fetch2 import URI
+from bb.fetch2 import check_network_access
+from bb.fetch2 import runfetchcmd
+from bb.utils import is_semver
 
-def subprocess_setup():
-    # Python installs a SIGPIPE handler by default. This is usually not what
-    # non-Python subprocesses expect.
-    # SIGPIPE errors are known issues with gzip/bash
-    signal.signal(signal.SIGPIPE, signal.SIG_DFL)
+def npm_package(package):
+    """Convert the npm package name to remove unsupported character"""
+    # Scoped package names (with the @) use the same naming convention
+    # as the 'npm pack' command.
+    if package.startswith("@"):
+        return re.sub("/", "-", package[1:])
+    return package
+
+def npm_filename(package, version):
+    """Get the filename of a npm package"""
+    return npm_package(package) + "-" + version + ".tgz"
+
+def npm_localfile(package, version):
+    """Get the local filename of a npm package"""
+    return os.path.join("npm2", npm_filename(package, version))
+
+def npm_integrity(integrity):
+    """
+    Get the checksum name and expected value from the subresource integrity
+        https://www.w3.org/TR/SRI/
+    """
+    algo, value = integrity.split("-", maxsplit=1)
+    return "%ssum" % algo, base64.b64decode(value).hex()
+
+def npm_unpack(tarball, destdir, d):
+    """Unpack a npm tarball"""
+    bb.utils.mkdirhier(destdir)
+    cmd = "tar --extract --gzip --file=%s" % shlex.quote(tarball)
+    cmd += " --no-same-owner"
+    cmd += " --strip-components=1"
+    runfetchcmd(cmd, d, workdir=destdir)
+
+class NpmEnvironment(object):
+    """
+    Using a npm config file seems more reliable than using cli arguments.
+    This class allows to create a controlled environment for npm commands.
+    """
+    def __init__(self, d, configs=None):
+        self.d = d
+        self.configs = configs
+
+    def run(self, cmd, args=None, configs=None, workdir=None):
+        """Run npm command in a controlled environment"""
+        with tempfile.TemporaryDirectory() as tmpdir:
+            d = bb.data.createCopy(self.d)
+            d.setVar("HOME", tmpdir)
+
+            cfgfile = os.path.join(tmpdir, "npmrc")
+
+            if not workdir:
+                workdir = tmpdir
+
+            def _run(cmd):
+                cmd = "NPM_CONFIG_USERCONFIG=%s " % cfgfile + cmd
+                cmd = "NPM_CONFIG_GLOBALCONFIG=%s " % cfgfile + cmd
+                return runfetchcmd(cmd, d, workdir=workdir)
+
+            if self.configs:
+                for key, value in self.configs:
+                    _run("npm config set %s %s" % (key, shlex.quote(value)))
+
+            if configs:
+                for key, value in configs:
+                    _run("npm config set %s %s" % (key, shlex.quote(value)))
+
+            if args:
+                for key, value in args:
+                    cmd += " --%s=%s" % (key, shlex.quote(value))
+
+            return _run(cmd)
 
 class Npm(FetchMethod):
-
-    """Class to fetch urls via 'npm'"""
-    def init(self, d):
-        pass
+    """Class to fetch a package from a npm registry"""
 
     def supports(self, ud, d):
-        """
-        Check to see if a given url can be fetched with npm
-        """
-        return ud.type in ['npm']
-
-    def debug(self, msg):
-        logger.debug(1, "NpmFetch: %s", msg)
-
-    def clean(self, ud, d):
-        logger.debug(2, "Calling cleanup %s" % ud.pkgname)
-        bb.utils.remove(ud.localpath, False)
-        bb.utils.remove(ud.pkgdatadir, True)
-        bb.utils.remove(ud.fullmirror, False)
+        """Check if a given url can be fetched with npm"""
+        return ud.type in ["npm"]
 
     def urldata_init(self, ud, d):
-        """
-        init NPM specific variable within url data
-        """
-        if 'downloadfilename' in ud.parm:
-            ud.basename = ud.parm['downloadfilename']
-        else:
-            ud.basename = os.path.basename(ud.path)
+        """Init npm specific variables within url data"""
+        ud.package = None
+        ud.version = None
+        ud.registry = None
 
-        # can't call it ud.name otherwise fetcher base class will start doing sha1stuff
-        # TODO: find a way to get an sha1/sha256 manifest of pkg & all deps
-        ud.pkgname = ud.parm.get("name", None)
-        if not ud.pkgname:
-            raise ParameterError("NPM fetcher requires a name parameter", ud.url)
-        ud.version = ud.parm.get("version", None)
+        # Get the 'package' parameter
+        if "package" in ud.parm:
+            ud.package = ud.parm.get("package")
+
+        if not ud.package:
+            raise MissingParameterError("Parameter 'package' required", ud.url)
+
+        # Get the 'version' parameter
+        if "version" in ud.parm:
+            ud.version = ud.parm.get("version")
+
         if not ud.version:
-            raise ParameterError("NPM fetcher requires a version parameter", ud.url)
-        ud.bbnpmmanifest = "%s-%s.deps.json" % (ud.pkgname, ud.version)
-        ud.bbnpmmanifest = ud.bbnpmmanifest.replace('/', '-')
-        ud.registry = "http://%s" % (ud.url.replace('npm://', '', 1).split(';'))[0]
-        prefixdir = "npm/%s" % ud.pkgname
-        ud.pkgdatadir = d.expand("${DL_DIR}/%s" % prefixdir)
-        if not os.path.exists(ud.pkgdatadir):
-            bb.utils.mkdirhier(ud.pkgdatadir)
-        ud.localpath = d.expand("${DL_DIR}/npm/%s" % ud.bbnpmmanifest)
+            raise MissingParameterError("Parameter 'version' required", ud.url)
 
-        self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -O -t 2 -T 30 -nv --passive-ftp --no-check-certificate "
-        ud.prefixdir = prefixdir
+        if not is_semver(ud.version) and not ud.version == "latest":
+            raise ParameterError("Invalid 'version' parameter", ud.url)
 
-        ud.write_tarballs = ((d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0") != "0")
-        mirrortarball = 'npm_%s-%s.tar.xz' % (ud.pkgname, ud.version)
-        mirrortarball = mirrortarball.replace('/', '-')
-        ud.fullmirror = os.path.join(d.getVar("DL_DIR"), mirrortarball)
-        ud.mirrortarballs = [mirrortarball]
+        # Extract the 'registry' part of the url
+        ud.registry = re.sub(r"^npm://", "http://", ud.url.split(";")[0])
+
+        # Using the 'downloadfilename' parameter as local filename
+        # or the npm package name.
+        if "downloadfilename" in ud.parm:
+            ud.localfile = d.expand(ud.parm["downloadfilename"])
+        else:
+            ud.localfile = npm_localfile(ud.package, ud.version)
+
+        # Get the base 'npm' command
+        ud.basecmd = d.getVar("FETCHCMD_npm") or "npm"
+
+        # This fetcher resolves a URI from a npm package name and version and
+        # then forwards it to a proxy fetcher. A resolve file containing the
+        # resolved URI is created to avoid unwanted network access (if the file
+        # already exists). The management of the donestamp file, the lockfile
+        # and the checksums are forwarded to the proxy fetcher.
+        ud.proxy = None
+        ud.needdonestamp = False
+        ud.resolvefile = self.localpath(ud, d) + ".resolved"
+
+    def _resolve_proxy_url(self, ud, d):
+        def _npm_view():
+            configs = []
+            configs.append(("json", "true"))
+            configs.append(("registry", ud.registry))
+            pkgver = shlex.quote(ud.package + "@" + ud.version)
+            cmd = ud.basecmd + " view %s" % pkgver
+            env = NpmEnvironment(d)
+            check_network_access(d, cmd, ud.registry)
+            view_string = env.run(cmd, configs=configs)
+
+            if not view_string:
+                raise FetchError("Unavailable package %s" % pkgver, ud.url)
+
+            try:
+                view = json.loads(view_string)
+
+                error = view.get("error")
+                if error is not None:
+                    raise FetchError(error.get("summary"), ud.url)
+
+                if ud.version == "latest":
+                    bb.warn("The npm package %s is using the latest " \
+                            "version available. This could lead to " \
+                            "non-reproducible builds." % pkgver)
+                elif ud.version != view.get("version"):
+                    raise ParameterError("Invalid 'version' parameter", ud.url)
+
+                return view
+
+            except Exception as e:
+                raise FetchError("Invalid view from npm: %s" % str(e), ud.url)
+
+        def _get_url(view):
+            tarball_url = view.get("dist", {}).get("tarball")
+
+            if tarball_url is None:
+                raise FetchError("Invalid 'dist.tarball' in view", ud.url)
+
+            uri = URI(tarball_url)
+            uri.params["downloadfilename"] = ud.localfile
+
+            integrity = view.get("dist", {}).get("integrity")
+            shasum = view.get("dist", {}).get("shasum")
+
+            if integrity is not None:
+                checksum_name, checksum_expected = npm_integrity(integrity)
+                uri.params[checksum_name] = checksum_expected
+            elif shasum is not None:
+                uri.params["sha1sum"] = shasum
+            else:
+                raise FetchError("Invalid 'dist.integrity' in view", ud.url)
+
+            return str(uri)
+
+        url = _get_url(_npm_view())
+
+        bb.utils.mkdirhier(os.path.dirname(ud.resolvefile))
+        with open(ud.resolvefile, "w") as f:
+            f.write(url)
+
+    def _setup_proxy(self, ud, d):
+        if ud.proxy is None:
+            if not os.path.exists(ud.resolvefile):
+                self._resolve_proxy_url(ud, d)
+
+            with open(ud.resolvefile, "r") as f:
+                url = f.read()
+
+            # Avoid conflicts between the environment data and:
+            # - the proxy url checksum
+            data = bb.data.createCopy(d)
+            data.delVarFlags("SRC_URI")
+            ud.proxy = Fetch([url], data)
+
+    def _get_proxy_method(self, ud, d):
+        self._setup_proxy(ud, d)
+        proxy_url = ud.proxy.urls[0]
+        proxy_ud = ud.proxy.ud[proxy_url]
+        proxy_d = ud.proxy.d
+        proxy_ud.setup_localpath(proxy_d)
+        return proxy_ud.method, proxy_ud, proxy_d
+
+    def verify_donestamp(self, ud, d):
+        """Verify the donestamp file"""
+        proxy_m, proxy_ud, proxy_d = self._get_proxy_method(ud, d)
+        return proxy_m.verify_donestamp(proxy_ud, proxy_d)
+
+    def update_donestamp(self, ud, d):
+        """Update the donestamp file"""
+        proxy_m, proxy_ud, proxy_d = self._get_proxy_method(ud, d)
+        proxy_m.update_donestamp(proxy_ud, proxy_d)
 
     def need_update(self, ud, d):
-        if os.path.exists(ud.localpath):
-            return False
-        return True
+        """Force a fetch, even if localpath exists ?"""
+        if not os.path.exists(ud.resolvefile):
+            return True
+        if ud.version == "latest":
+            return True
+        proxy_m, proxy_ud, proxy_d = self._get_proxy_method(ud, d)
+        return proxy_m.need_update(proxy_ud, proxy_d)
 
-    def _runpack(self, ud, d, pkgfullname: str, quiet=False) -> str:
-        """
-        Runs npm pack on a full package name.
-        Returns the filename of the downloaded package
-        """
-        bb.fetch2.check_network_access(d, pkgfullname, ud.registry)
-        dldir = d.getVar("DL_DIR")
-        dldir = os.path.join(dldir, ud.prefixdir)
-
-        command = "npm pack {} --registry {}".format(pkgfullname, ud.registry)
-        logger.debug(2, "Fetching {} using command '{}' in {}".format(pkgfullname, command, dldir))
-        filename = runfetchcmd(command, d, quiet, workdir=dldir)
-        return filename.rstrip()
-
-    def _unpackdep(self, ud, pkg, data, destdir, dldir, d):
-        file = data[pkg]['tgz']
-        logger.debug(2, "file to extract is %s" % file)
-        if file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
-            cmd = 'tar xz --strip 1 --no-same-owner --warning=no-unknown-keyword -f %s/%s' % (dldir, file)
-        else:
-            bb.fatal("NPM package %s downloaded not a tarball!" % file)
-
-        # Change to subdir before executing command
-        if not os.path.exists(destdir):
-            os.makedirs(destdir)
-        path = d.getVar('PATH')
-        if path:
-            cmd = "PATH=\"%s\" %s" % (path, cmd)
-        bb.note("Unpacking %s to %s/" % (file, destdir))
-        ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=destdir)
-
-        if ret != 0:
-            raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), ud.url)
-
-        if 'deps' not in data[pkg]:
-            return
-        for dep in data[pkg]['deps']:
-            self._unpackdep(ud, dep, data[pkg]['deps'], "%s/node_modules/%s" % (destdir, dep), dldir, d)
-
-
-    def unpack(self, ud, destdir, d):
-        dldir = d.getVar("DL_DIR")
-        with open("%s/npm/%s" % (dldir, ud.bbnpmmanifest)) as datafile:
-            workobj = json.load(datafile)
-        dldir = "%s/%s" % (os.path.dirname(ud.localpath), ud.pkgname)
-
-        if 'subdir' in ud.parm:
-            unpackdir = '%s/%s' % (destdir, ud.parm.get('subdir'))
-        else:
-            unpackdir = '%s/npmpkg' % destdir
-
-        self._unpackdep(ud, ud.pkgname, workobj, unpackdir, dldir, d)
-
-    def _parse_view(self, output):
-        '''
-        Parse the output of npm view --json; the last JSON result
-        is assumed to be the one that we're interested in.
-        '''
-        pdata = json.loads(output);
-        try:
-            return pdata[-1]
-        except:
-            return pdata
-
-    def _getdependencies(self, pkg, data, version, d, ud, optional=False, fetchedlist=None):
-        if fetchedlist is None:
-            fetchedlist = []
-        pkgfullname = pkg
-        if version != '*' and not '/' in version:
-            pkgfullname += "@'%s'" % version
-        if pkgfullname in fetchedlist:
-            return
-
-        logger.debug(2, "Calling getdeps on %s" % pkg)
-        fetchcmd = "npm view %s --json --registry %s" % (pkgfullname, ud.registry)
-        output = runfetchcmd(fetchcmd, d, True)
-        pdata = self._parse_view(output)
-        if not pdata:
-            raise FetchError("The command '%s' returned no output" % fetchcmd)
-        if optional:
-            pkg_os = pdata.get('os', None)
-            if pkg_os:
-                if not isinstance(pkg_os, list):
-                    pkg_os = [pkg_os]
-                blacklist = False
-                for item in pkg_os:
-                    if item.startswith('!'):
-                        blacklist = True
-                        break
-                if (not blacklist and 'linux' not in pkg_os) or '!linux' in pkg_os:
-                    logger.debug(2, "Skipping %s since it's incompatible with Linux" % pkg)
-                    return
-        filename = self._runpack(ud, d, pkgfullname)
-        data[pkg] = {}
-        data[pkg]['tgz'] = filename
-        fetchedlist.append(pkgfullname)
-
-        dependencies = pdata.get('dependencies', {})
-        optionalDependencies = pdata.get('optionalDependencies', {})
-        dependencies.update(optionalDependencies)
-        depsfound = {}
-        optdepsfound = {}
-        data[pkg]['deps'] = {}
-        for dep in dependencies:
-            if dep in optionalDependencies:
-                optdepsfound[dep] = dependencies[dep]
-            else:
-                depsfound[dep] = dependencies[dep]
-        for dep, version in optdepsfound.items():
-            self._getdependencies(dep, data[pkg]['deps'], version, d, ud, optional=True, fetchedlist=fetchedlist)
-        for dep, version in depsfound.items():
-            self._getdependencies(dep, data[pkg]['deps'], version, d, ud, fetchedlist=fetchedlist)
-
-    def _getshrinkeddependencies(self, pkg, data, version, d, ud, lockdown, manifest, toplevel=True):
-        logger.debug(2, "NPM shrinkwrap file is %s" % data)
-        if toplevel:
-            name = data.get('name', None)
-            if name and name != pkg:
-                for obj in data.get('dependencies', []):
-                    if obj == pkg:
-                        self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest, False)
-                        return
-
-        pkgnameWithVersion = "{}@{}".format(pkg, version)
-        logger.debug(2, "Get dependencies for {}".format(pkgnameWithVersion))
-        filename = self._runpack(ud, d, pkgnameWithVersion)
-        manifest[pkg] = {}
-        manifest[pkg]['tgz'] = filename
-        manifest[pkg]['deps'] = {}
-
-        if pkg in lockdown:
-            sha1_expected = lockdown[pkg][version]
-            sha1_data = bb.utils.sha1_file("npm/%s/%s" % (ud.pkgname, manifest[pkg]['tgz']))
-            if sha1_expected != sha1_data:
-                msg = "\nFile: '%s' has %s checksum %s when %s was expected" % (manifest[pkg]['tgz'], 'sha1', sha1_data, sha1_expected)
-                raise ChecksumError('Checksum mismatch!%s' % msg)
-        else:
-            logger.debug(2, "No lockdown data for %s@%s" % (pkg, version))
-
-        if 'dependencies' in data:
-            for obj in data['dependencies']:
-                logger.debug(2, "Found dep is %s" % str(obj))
-                self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest[pkg]['deps'], False)
+    def try_mirrors(self, fetch, ud, d, mirrors):
+        """Try to use a mirror"""
+        proxy_m, proxy_ud, proxy_d = self._get_proxy_method(ud, d)
+        return proxy_m.try_mirrors(fetch, proxy_ud, proxy_d, mirrors)
 
     def download(self, ud, d):
         """Fetch url"""
-        jsondepobj = {}
-        shrinkobj = {}
-        lockdown = {}
+        self._setup_proxy(ud, d)
+        ud.proxy.download()
 
-        if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror):
-            dest = d.getVar("DL_DIR")
-            bb.utils.mkdirhier(dest)
-            runfetchcmd("tar -xJf %s" % (ud.fullmirror), d, workdir=dest)
-            return
+    def unpack(self, ud, rootdir, d):
+        """Unpack the downloaded archive"""
+        destsuffix = ud.parm.get("destsuffix", "npm")
+        destdir = os.path.join(rootdir, destsuffix)
+        npm_unpack(ud.localpath, destdir, d)
 
-        if ud.parm.get("noverify", None) != '1':
-            shwrf = d.getVar('NPM_SHRINKWRAP')
-            logger.debug(2, "NPM shrinkwrap file is %s" % shwrf)
-            if shwrf:
-                try:
-                    with open(shwrf) as datafile:
-                        shrinkobj = json.load(datafile)
-                except Exception as e:
-                    raise FetchError('Error loading NPM_SHRINKWRAP file "%s" for %s: %s' % (shwrf, ud.pkgname, str(e)))
-            elif not ud.ignore_checksums:
-                logger.warning('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname)
-            lckdf = d.getVar('NPM_LOCKDOWN')
-            logger.debug(2, "NPM lockdown file is %s" % lckdf)
-            if lckdf:
-                try:
-                    with open(lckdf) as datafile:
-                        lockdown = json.load(datafile)
-                except Exception as e:
-                    raise FetchError('Error loading NPM_LOCKDOWN file "%s" for %s: %s' % (lckdf, ud.pkgname, str(e)))
-            elif not ud.ignore_checksums:
-                logger.warning('Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname)
+    def clean(self, ud, d):
+        """Clean any existing full or partial download"""
+        if os.path.exists(ud.resolvefile):
+            self._setup_proxy(ud, d)
+            ud.proxy.clean()
+            bb.utils.remove(ud.resolvefile)
 
-        if ('name' not in shrinkobj):
-            self._getdependencies(ud.pkgname, jsondepobj, ud.version, d, ud)
-        else:
-            self._getshrinkeddependencies(ud.pkgname, shrinkobj, ud.version, d, ud, lockdown, jsondepobj)
-
-        with open(ud.localpath, 'w') as outfile:
-            json.dump(jsondepobj, outfile)
-
-    def build_mirror_data(self, ud, d):
-        # Generate a mirror tarball if needed
-        if ud.write_tarballs and not os.path.exists(ud.fullmirror):
-            # it's possible that this symlink points to read-only filesystem with PREMIRROR
-            if os.path.islink(ud.fullmirror):
-                os.unlink(ud.fullmirror)
-
-            dldir = d.getVar("DL_DIR")
-            logger.info("Creating tarball of npm data")
-            runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d,
-                        workdir=dldir)
-            runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=dldir)
+    def done(self, ud, d):
+        """Is the download done ?"""
+        if not os.path.exists(ud.resolvefile):
+            return False
+        proxy_m, proxy_ud, proxy_d = self._get_proxy_method(ud, d)
+        return proxy_m.done(proxy_ud, proxy_d)
diff --git a/poky/bitbake/lib/bb/fetch2/npmsw.py b/poky/bitbake/lib/bb/fetch2/npmsw.py
new file mode 100644
index 0000000..0c3511d
--- /dev/null
+++ b/poky/bitbake/lib/bb/fetch2/npmsw.py
@@ -0,0 +1,255 @@
+# Copyright (C) 2020 Savoir-Faire Linux
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+"""
+BitBake 'Fetch' npm shrinkwrap implementation
+
+npm fetcher support the SRC_URI with format of:
+SRC_URI = "npmsw://some.registry.url;OptionA=xxx;OptionB=xxx;..."
+
+Supported SRC_URI options are:
+
+- dev
+   Set to 1 to also install devDependencies.
+
+- destsuffix
+    Specifies the directory to use to unpack the dependencies (default: ${S}).
+"""
+
+import json
+import os
+import re
+import bb
+from bb.fetch2 import Fetch
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import ParameterError
+from bb.fetch2 import URI
+from bb.fetch2.npm import npm_integrity
+from bb.fetch2.npm import npm_localfile
+from bb.fetch2.npm import npm_unpack
+from bb.utils import is_semver
+
+def foreach_dependencies(shrinkwrap, callback=None, dev=False):
+    """
+        Run a callback for each dependencies of a shrinkwrap file.
+        The callback is using the format:
+            callback(name, params, deptree)
+        with:
+            name = the package name (string)
+            params = the package parameters (dictionary)
+            deptree = the package dependency tree (array of strings)
+    """
+    def _walk_deps(deps, deptree):
+        for name in deps:
+            subtree = [*deptree, name]
+            _walk_deps(deps[name].get("dependencies", {}), subtree)
+            if callback is not None:
+                if deps[name].get("dev", False) and not dev:
+                    continue
+                elif deps[name].get("bundled", False):
+                    continue
+                callback(name, deps[name], subtree)
+
+    _walk_deps(shrinkwrap.get("dependencies", {}), [])
+
+class NpmShrinkWrap(FetchMethod):
+    """Class to fetch all package from a shrinkwrap file"""
+
+    def supports(self, ud, d):
+        """Check if a given url can be fetched with npmsw"""
+        return ud.type in ["npmsw"]
+
+    def urldata_init(self, ud, d):
+        """Init npmsw specific variables within url data"""
+
+        # Get the 'shrinkwrap' parameter
+        ud.shrinkwrap_file = re.sub(r"^npmsw://", "", ud.url.split(";")[0])
+
+        # Get the 'dev' parameter
+        ud.dev = bb.utils.to_boolean(ud.parm.get("dev"), False)
+
+        # Resolve the dependencies
+        ud.deps = []
+
+        def _resolve_dependency(name, params, deptree):
+            url = None
+            localpath = None
+            extrapaths = []
+            destsubdirs = [os.path.join("node_modules", dep) for dep in deptree]
+            destsuffix = os.path.join(*destsubdirs)
+
+            integrity = params.get("integrity", None)
+            resolved = params.get("resolved", None)
+            version = params.get("version", None)
+
+            # Handle registry sources
+            if is_semver(version) and resolved and integrity:
+                localfile = npm_localfile(name, version)
+
+                uri = URI(resolved)
+                uri.params["downloadfilename"] = localfile
+
+                checksum_name, checksum_expected = npm_integrity(integrity)
+                uri.params[checksum_name] = checksum_expected
+
+                url = str(uri)
+
+                localpath = os.path.join(d.getVar("DL_DIR"), localfile)
+
+                # Create a resolve file to mimic the npm fetcher and allow
+                # re-usability of the downloaded file.
+                resolvefile = localpath + ".resolved"
+
+                bb.utils.mkdirhier(os.path.dirname(resolvefile))
+                with open(resolvefile, "w") as f:
+                    f.write(url)
+
+                extrapaths.append(resolvefile)
+
+            # Handle http tarball sources
+            elif version.startswith("http") and integrity:
+                localfile = os.path.join("npm2", os.path.basename(version))
+
+                uri = URI(version)
+                uri.params["downloadfilename"] = localfile
+
+                checksum_name, checksum_expected = npm_integrity(integrity)
+                uri.params[checksum_name] = checksum_expected
+
+                url = str(uri)
+
+                localpath = os.path.join(d.getVar("DL_DIR"), localfile)
+
+            # Handle git sources
+            elif version.startswith("git"):
+                regex = re.compile(r"""
+                    ^
+                    git\+
+                    (?P<protocol>[a-z]+)
+                    ://
+                    (?P<url>[^#]+)
+                    \#
+                    (?P<rev>[0-9a-f]+)
+                    $
+                    """, re.VERBOSE)
+
+                match = regex.match(version)
+
+                if not match:
+                    raise ParameterError("Invalid git url: %s" % version, ud.url)
+
+                groups = match.groupdict()
+
+                uri = URI("git://" + str(groups["url"]))
+                uri.params["protocol"] = str(groups["protocol"])
+                uri.params["rev"] = str(groups["rev"])
+                uri.params["destsuffix"] = destsuffix
+
+                url = str(uri)
+
+            # local tarball sources and local link sources are unsupported
+            else:
+                raise ParameterError("Unsupported dependency: %s" % name, ud.url)
+
+            ud.deps.append({
+                "url": url,
+                "localpath": localpath,
+                "extrapaths": extrapaths,
+                "destsuffix": destsuffix,
+            })
+
+        try:
+            with open(ud.shrinkwrap_file, "r") as f:
+                shrinkwrap = json.load(f)
+        except Exception as e:
+            raise ParameterError("Invalid shrinkwrap file: %s" % str(e), ud.url)
+
+        foreach_dependencies(shrinkwrap, _resolve_dependency, ud.dev)
+
+        # Avoid conflicts between the environment data and:
+        # - the proxy url revision
+        # - the proxy url checksum
+        data = bb.data.createCopy(d)
+        data.delVar("SRCREV")
+        data.delVarFlags("SRC_URI")
+
+        # This fetcher resolves multiple URIs from a shrinkwrap file and then
+        # forwards it to a proxy fetcher. The management of the donestamp file,
+        # the lockfile and the checksums are forwarded to the proxy fetcher.
+        ud.proxy = Fetch([dep["url"] for dep in ud.deps], data)
+        ud.needdonestamp = False
+
+    @staticmethod
+    def _foreach_proxy_method(ud, handle):
+        returns = []
+        for proxy_url in ud.proxy.urls:
+            proxy_ud = ud.proxy.ud[proxy_url]
+            proxy_d = ud.proxy.d
+            proxy_ud.setup_localpath(proxy_d)
+            returns.append(handle(proxy_ud.method, proxy_ud, proxy_d))
+        return returns
+
+    def verify_donestamp(self, ud, d):
+        """Verify the donestamp file"""
+        def _handle(m, ud, d):
+            return m.verify_donestamp(ud, d)
+        return all(self._foreach_proxy_method(ud, _handle))
+
+    def update_donestamp(self, ud, d):
+        """Update the donestamp file"""
+        def _handle(m, ud, d):
+            m.update_donestamp(ud, d)
+        self._foreach_proxy_method(ud, _handle)
+
+    def need_update(self, ud, d):
+        """Force a fetch, even if localpath exists ?"""
+        def _handle(m, ud, d):
+            return m.need_update(ud, d)
+        return all(self._foreach_proxy_method(ud, _handle))
+
+    def try_mirrors(self, fetch, ud, d, mirrors):
+        """Try to use a mirror"""
+        def _handle(m, ud, d):
+            return m.try_mirrors(fetch, ud, d, mirrors)
+        return all(self._foreach_proxy_method(ud, _handle))
+
+    def download(self, ud, d):
+        """Fetch url"""
+        ud.proxy.download()
+
+    def unpack(self, ud, rootdir, d):
+        """Unpack the downloaded dependencies"""
+        destdir = d.getVar("S")
+        destsuffix = ud.parm.get("destsuffix")
+        if destsuffix:
+            destdir = os.path.join(rootdir, destsuffix)
+
+        bb.utils.mkdirhier(destdir)
+        bb.utils.copyfile(ud.shrinkwrap_file,
+                          os.path.join(destdir, "npm-shrinkwrap.json"))
+
+        auto = [dep["url"] for dep in ud.deps if not dep["localpath"]]
+        manual = [dep for dep in ud.deps if dep["localpath"]]
+
+        if auto:
+            ud.proxy.unpack(destdir, auto)
+
+        for dep in manual:
+            depdestdir = os.path.join(destdir, dep["destsuffix"])
+            npm_unpack(dep["localpath"], depdestdir, d)
+
+    def clean(self, ud, d):
+        """Clean any existing full or partial download"""
+        ud.proxy.clean()
+
+        # Clean extra files
+        for dep in ud.deps:
+            for path in dep["extrapaths"]:
+                bb.utils.remove(path)
+
+    def done(self, ud, d):
+        """Is the download done ?"""
+        def _handle(m, ud, d):
+            return m.done(ud, d)
+        return all(self._foreach_proxy_method(ud, _handle))
diff --git a/poky/bitbake/lib/bb/fetch2/osc.py b/poky/bitbake/lib/bb/fetch2/osc.py
index 3e56715..8f091ef 100644
--- a/poky/bitbake/lib/bb/fetch2/osc.py
+++ b/poky/bitbake/lib/bb/fetch2/osc.py
@@ -7,8 +7,6 @@
 
 """
 
-import  os
-import  sys
 import logging
 import  bb
 from    bb.fetch2 import FetchMethod
@@ -43,7 +41,7 @@
         else:
             pv = d.getVar("PV", False)
             rev = bb.fetch2.srcrev_internal_helper(ud, d)
-            if rev and rev != True:
+            if rev:
                 ud.revision = rev
             else:
                 ud.revision = ""
diff --git a/poky/bitbake/lib/bb/fetch2/perforce.py b/poky/bitbake/lib/bb/fetch2/perforce.py
index 54d001e..f57c2a4 100644
--- a/poky/bitbake/lib/bb/fetch2/perforce.py
+++ b/poky/bitbake/lib/bb/fetch2/perforce.py
@@ -11,7 +11,6 @@
 # Based on functions from the base bb module, Copyright 2003 Holger Schurig
 
 import os
-import logging
 import bb
 from   bb.fetch2 import FetchMethod
 from   bb.fetch2 import FetchError
@@ -105,7 +104,7 @@
         if command == 'changes':
             p4cmd = '%s%s changes -m 1 //%s' % (ud.basecmd, p4opt, pathnrev)
         elif command == 'print':
-            if depot_filename != None:
+            if depot_filename is not None:
                 p4cmd = '%s%s print -o "p4/%s" "%s"' % (ud.basecmd, p4opt, filename, depot_filename)
             else:
                 raise FetchError('No depot file name provided to p4 %s' % command, ud.url)
diff --git a/poky/bitbake/lib/bb/fetch2/ssh.py b/poky/bitbake/lib/bb/fetch2/ssh.py
index f5be060..5e982ec 100644
--- a/poky/bitbake/lib/bb/fetch2/ssh.py
+++ b/poky/bitbake/lib/bb/fetch2/ssh.py
@@ -32,8 +32,6 @@
 
 import re, os
 from   bb.fetch2 import FetchMethod
-from   bb.fetch2 import FetchError
-from   bb.fetch2 import logger
 from   bb.fetch2 import runfetchcmd
 
 
@@ -60,7 +58,7 @@
     '''Class to fetch a module or modules via Secure Shell'''
 
     def supports(self, urldata, d):
-        return __pattern__.match(urldata.url) != None
+        return __pattern__.match(urldata.url) is not None
 
     def supports_checksum(self, urldata):
         return False
diff --git a/poky/bitbake/lib/bb/fetch2/svn.py b/poky/bitbake/lib/bb/fetch2/svn.py
index 96d666b..971a5ad 100644
--- a/poky/bitbake/lib/bb/fetch2/svn.py
+++ b/poky/bitbake/lib/bb/fetch2/svn.py
@@ -11,8 +11,6 @@
 # Based on functions from the base bb module, Copyright 2003 Holger Schurig
 
 import os
-import sys
-import logging
 import bb
 import re
 from   bb.fetch2 import FetchMethod
@@ -49,7 +47,7 @@
         svndir = d.getVar("SVNDIR") or (d.getVar("DL_DIR") + "/svn")
         relpath = self._strip_leading_slashes(ud.path)
         ud.pkgdir = os.path.join(svndir, ud.host, relpath)
-        ud.moddir = os.path.join(ud.pkgdir, ud.module)
+        ud.moddir = os.path.join(ud.pkgdir, ud.path_spec)
         # Protects the repository from concurrent updates, e.g. from two
         # recipes fetching different revisions at the same time
         ud.svnlock = os.path.join(ud.pkgdir, "svn.lock")
@@ -124,30 +122,30 @@
 
         try:
             if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
-                svnupdatecmd = self._buildsvncommand(ud, d, "update")
+                svncmd = self._buildsvncommand(ud, d, "update")
                 logger.info("Update " + ud.url)
                 # We need to attempt to run svn upgrade first in case its an older working format
                 try:
                     runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir)
                 except FetchError:
                     pass
-                logger.debug(1, "Running %s", svnupdatecmd)
-                bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
-                runfetchcmd(svnupdatecmd, d, workdir=ud.moddir)
+                logger.debug(1, "Running %s", svncmd)
+                bb.fetch2.check_network_access(d, svncmd, ud.url)
+                runfetchcmd(svncmd, d, workdir=ud.moddir)
             else:
-                svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
+                svncmd = self._buildsvncommand(ud, d, "fetch")
                 logger.info("Fetch " + ud.url)
                 # check out sources there
                 bb.utils.mkdirhier(ud.pkgdir)
-                logger.debug(1, "Running %s", svnfetchcmd)
-                bb.fetch2.check_network_access(d, svnfetchcmd, ud.url)
-                runfetchcmd(svnfetchcmd, d, workdir=ud.pkgdir)
+                logger.debug(1, "Running %s", svncmd)
+                bb.fetch2.check_network_access(d, svncmd, ud.url)
+                runfetchcmd(svncmd, d, workdir=ud.pkgdir)
 
             if not ("externals" in ud.parm and ud.parm["externals"] == "nowarn"):
                 # Warn the user if this had externals (won't catch them all)
                 output = runfetchcmd("svn propget svn:externals || true", d, workdir=ud.moddir)
                 if output:
-                    if "--ignore-externals" in svnfetchcmd.split():
+                    if "--ignore-externals" in svncmd.split():
                         bb.warn("%s contains svn:externals." % ud.url)
                         bb.warn("These should be added to the recipe SRC_URI as necessary.")
                         bb.warn("svn fetch has ignored externals:\n%s" % output)
diff --git a/poky/bitbake/lib/bb/fetch2/wget.py b/poky/bitbake/lib/bb/fetch2/wget.py
index 725586d..f7d1de2 100644
--- a/poky/bitbake/lib/bb/fetch2/wget.py
+++ b/poky/bitbake/lib/bb/fetch2/wget.py
@@ -12,11 +12,10 @@
 #
 # Based on functions from the base bb module, Copyright 2003 Holger Schurig
 
+import shlex
 import re
 import tempfile
-import subprocess
 import os
-import logging
 import errno
 import bb
 import bb.progress
@@ -27,7 +26,6 @@
 from   bb.fetch2 import FetchError
 from   bb.fetch2 import logger
 from   bb.fetch2 import runfetchcmd
-from   bb.fetch2 import FetchConnectionCache
 from   bb.utils import export_proxies
 from   bs4 import BeautifulSoup
 from   bs4 import SoupStrainer
@@ -94,9 +92,9 @@
         fetchcmd = self.basecmd
 
         if 'downloadfilename' in ud.parm:
-            dldir = d.getVar("DL_DIR")
-            bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile))
-            fetchcmd += " -O " + dldir + os.sep + ud.localfile
+            localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile)
+            bb.utils.mkdirhier(os.path.dirname(localpath))
+            fetchcmd += " -O %s" % shlex.quote(localpath)
 
         if ud.user and ud.pswd:
             fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (ud.user, ud.pswd)
@@ -302,6 +300,7 @@
             # Some servers (FusionForge, as used on Alioth) require that the
             # optional Accept header is set.
             r.add_header("Accept", "*/*")
+            r.add_header("User-Agent", "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12")
             def add_basic_auth(login_str, request):
                 '''Adds Basic auth to http request, pass in login:password as string'''
                 import base64