poky: subtree update:b66b9f7548..26ae42ded7

Adrian Bunk (1):
      dpkg: Remove workaound patch for host tar < 1.27

Alexander Kanavin (39):
      linux-yocto: exclude from version checks/automated version updates
      pciutils: upgrade 3.6.4 -> 3.7.0
      createrepo-c: upgrade 0.15.10 -> 0.15.11
      librepo: upgrade 1.11.3 -> 1.12.0
      pkgconf: upgrade 1.6.3 -> 1.7.3
      python3-numpy: upgrade 1.18.4 -> 1.18.5
      python3-git: upgrade 3.1.2 -> 3.1.3
      strace: upgrade 5.6 -> 5.7
      acpica: upgrade 20200430 -> 20200528
      man-db: upgrade 2.9.1 -> 2.9.2
      msmtp: upgrade 1.8.10 -> 1.8.11
      epiphany: upgrade 3.36.1 -> 3.36.2
      cogl-1.0: upgrade 1.22.6 -> 1.22.8
      libdrm: upgrade 2.4.101 -> 2.4.102
      vulkan-demos: upgrade to latest revision
      xkeyboard-config: upgrade 2.29 -> 2.30
      linux-firmware: upgrade 20200421 -> 20200519
      babeltrace2: upgrade 2.0.2 -> 2.0.3
      lttng-tools: upgrade 2.12.0 -> 2.12.1
      ffmpeg: upgrade 4.2.2 -> 4.2.3
      wpebackend-fdo: upgrade 1.6.0 -> 1.6.1
      gnutls: upgrade 3.6.13 -> 3.6.14
      libcap: upgrade 2.34 -> 2.36
      bison: upgrade 3.6.2 -> 3.6.3
      asciidoc: 8.6.10 -> 9.0.0
      debianutils: 4.9.1 -> 4.11
      git: upgrade 2.26.2 -> 2.27.0
      go: 1.14.3 -> 1.14.4
      iproute2: upgrade 5.6.0 -> 5.7.0
      libksba: 1.3.5 -> 1.4.0
      lttng-modules: update to 2.12.1
      mpg123: update to 1.26.1
      ovmf: update to 202005
      shared-mime-info: upgrade 1.15 -> 2.0
      subversion: upgrade 1.13.0 -> 1.14.0
      xinetd: 2.3.15 -> 2.3.15.4
      init-system-helpers: use https for fetching
      ca-certificates: correct upstream version check
      build-sysroots: add sysroot paths with native binaries to PATH

Andreas Müller (4):
      vte: tiny cleanup / renumber patch
      vte: upgrade 0.60.2 -> 0.60.3
      harfbuzz: upgrade 2.6.4 -> 2.6.7
      sqlite3: upgrade 3.32.1 -> 3.32.2

Changqing Li (1):
      cups.inc: remove template service from SYSTEMD_SERVICE

Chen Qi (2):
      db: do not install db_verify if 'verify' is not enabled
      vim: restore the 'chmod -x' workaround in do_install

Hongxu Jia (1):
      glib-networking/btrfs-tools/dosfstools/parted/bmap-tools/libsoup-2.4: add nativesdk support

Jacob Kroon (4):
      features_check: Factorize code for checking features
      meta: Don't inherit 'features_check' in recipes that don't utilize it
      features_check: Warn if not used
      insane: Check for feature check variables not being used

Joe Slater (2):
      qemu: force build type to production
      vim: _FORTIFY_SOURCE=2 be gone

Joshua Watt (12):
      bitbake: bitbake: cooker: Split file collections per multiconfig
      bitbake: bitbake: cache: Use multiconfig aware caches
      bitbake: bitbake: lib: Add support for Logging Adapters
      bitbake: bitbake: lib: Add PrefixLoggerAdapter helper
      bitbake: bitbake: cache: Improve logging
      bitbake: bitbake: cache: Cache size optimization
      bitbake: bitbake: tests: Add tests for BBMASK in multiconfig
      bitbake: bitbake: command: Move split_mc_pn to runqueue
      bitbake: bitbake: cache: Fix error when cache is rebuilt
      wic: Fix --extra-space argument handling
      bitbake: bitbake: siggen: Pass all data caches to hash functions
      bitbake: bitbake: tests: Add mcdepends test

Kai Kang (4):
      mdadm: remove service template from SYSTEMD_SERVICE
      wpa-supplicant: remove service templates from SYSTEMD_SERVICE
      encodings: clear postinst script
      avahi-dnsconfd: rdepends on avahi-daemon

Khem Raj (2):
      libunwind: Fix build on aarch64/musl
      stress-ng: Fix build on musl

Lee Chee Yang (1):
      qemu: fix CVE-2020-13361

Ming Liu (1):
      u-boot: support merging .cfg files for UBOOT_CONFIG

Mingli Yu (2):
      python3-magic: add the missing rdepends
      python3-setuptools: add missing rdepends for python3-pkg-resources

Paul Barker (5):
      selftest: git-submodule-test: New recipe for testing a gitsm SRC_URI
      archiver: Capture git submodules in mirror archiver
      selftest-ed: Support native builds
      selftest-nopackages: New recipe in meta-selftest
      archiver: Speed up tests

Pierre-Jean Texier (2):
      libarchive: upgrade 3.4.2 -> 3.4.3
      iptables: upgrade 1.8.4 -> 1.8.5

Rasmus Villemoes (1):
      glibc: move ld.so.conf back to main package

Richard Purdie (1):
      Revert "bitbake.conf: Remove unused DEPLOY_DIR_TOOLS variable"

Stefan Agner (1):
      initramfs-framework: check successful mount using mountpoint

Signed-off-by: Andrew Geissler <geissonator@yahoo.com>
Change-Id: I047d0fa664dcc2864fd7c1a09d124e3d8c197e9f
diff --git a/poky/bitbake/lib/bb/cache.py b/poky/bitbake/lib/bb/cache.py
index d1be836..be5ea6a 100644
--- a/poky/bitbake/lib/bb/cache.py
+++ b/poky/bitbake/lib/bb/cache.py
@@ -19,16 +19,20 @@
 import os
 import logging
 import pickle
-from collections import defaultdict
+from collections import defaultdict, Mapping
 import bb.utils
+from bb import PrefixLoggerAdapter
 import re
 
 logger = logging.getLogger("BitBake.Cache")
 
 __cache_version__ = "152"
 
-def getCacheFile(path, filename, data_hash):
-    return os.path.join(path, filename + "." + data_hash)
+def getCacheFile(path, filename, mc, data_hash):
+    mcspec = ''
+    if mc:
+        mcspec = ".%s" % mc
+    return os.path.join(path, filename + mcspec + "." + data_hash)
 
 # RecipeInfoCommon defines common data retrieving methods
 # from meta data for caches. CoreRecipeInfo as well as other
@@ -324,7 +328,7 @@
         bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
         return bb_data[virtual]
 
-    def load_bbfile(self, bbfile, appends, virtonly = False):
+    def load_bbfile(self, bbfile, appends, virtonly = False, mc=None):
         """
         Load and parse one .bb build file
         Return the data and whether parsing resulted in the file being skipped
@@ -337,6 +341,10 @@
             datastores = parse_recipe(bb_data, bbfile, appends, mc)
             return datastores
 
+        if mc is not None:
+            bb_data = self.databuilder.mcdata[mc].createCopy()
+            return parse_recipe(bb_data, bbfile, appends, mc)
+
         bb_data = self.data.createCopy()
         datastores = parse_recipe(bb_data, bbfile, appends)
 
@@ -354,14 +362,15 @@
     """
     BitBake Cache implementation
     """
-
-    def __init__(self, databuilder, data_hash, caches_array):
+    def __init__(self, databuilder, mc, data_hash, caches_array):
         super().__init__(databuilder)
         data = databuilder.data
 
         # Pass caches_array information into Cache Constructor
         # It will be used later for deciding whether we
         # need extra cache file dump/load support
+        self.mc = mc
+        self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger)
         self.caches_array = caches_array
         self.cachedir = data.getVar("CACHE")
         self.clean = set()
@@ -374,31 +383,47 @@
 
         if self.cachedir in [None, '']:
             self.has_cache = False
-            logger.info("Not using a cache. "
-                        "Set CACHE = <directory> to enable.")
+            self.logger.info("Not using a cache. "
+                             "Set CACHE = <directory> to enable.")
             return
 
         self.has_cache = True
-        self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash)
 
-        logger.debug(1, "Cache dir: %s", self.cachedir)
+    def getCacheFile(self, cachefile):
+        return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash)
+
+    def prepare_cache(self, progress):
+        if not self.has_cache:
+            return 0
+
+        loaded = 0
+
+        self.cachefile = self.getCacheFile("bb_cache.dat")
+
+        self.logger.debug(1, "Cache dir: %s", self.cachedir)
         bb.utils.mkdirhier(self.cachedir)
 
         cache_ok = True
         if self.caches_array:
             for cache_class in self.caches_array:
-                cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
-                cache_ok = cache_ok and os.path.exists(cachefile)
+                cachefile = self.getCacheFile(cache_class.cachefile)
+                cache_exists = os.path.exists(cachefile)
+                self.logger.debug(2, "Checking if %s exists: %r", cachefile, cache_exists)
+                cache_ok = cache_ok and cache_exists
                 cache_class.init_cacheData(self)
         if cache_ok:
-            self.load_cachefile()
+            loaded = self.load_cachefile(progress)
         elif os.path.isfile(self.cachefile):
-            logger.info("Out of date cache found, rebuilding...")
+            self.logger.info("Out of date cache found, rebuilding...")
         else:
-            logger.debug(1, "Cache file %s not found, building..." % self.cachefile)
+            self.logger.debug(1, "Cache file %s not found, building..." % self.cachefile)
 
         # We don't use the symlink, its just for debugging convinience
-        symlink = os.path.join(self.cachedir, "bb_cache.dat")
+        if self.mc:
+            symlink = os.path.join(self.cachedir, "bb_cache.dat.%s" % self.mc)
+        else:
+            symlink = os.path.join(self.cachedir, "bb_cache.dat")
+
         if os.path.exists(symlink):
             bb.utils.remove(symlink)
         try:
@@ -406,22 +431,31 @@
         except OSError:
             pass
 
-    def load_cachefile(self):
+        return loaded
+
+    def cachesize(self):
+        if not self.has_cache:
+            return 0
+
         cachesize = 0
+        for cache_class in self.caches_array:
+            cachefile = self.getCacheFile(cache_class.cachefile)
+            try:
+                with open(cachefile, "rb") as cachefile:
+                    cachesize += os.fstat(cachefile.fileno()).st_size
+            except FileNotFoundError:
+                pass
+
+        return cachesize
+
+    def load_cachefile(self, progress):
+        cachesize = self.cachesize()
         previous_progress = 0
         previous_percent = 0
 
-        # Calculate the correct cachesize of all those cache files
         for cache_class in self.caches_array:
-            cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
-            with open(cachefile, "rb") as cachefile:
-                cachesize += os.fstat(cachefile.fileno()).st_size
-
-        bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
-
-        for cache_class in self.caches_array:
-            cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
-            logger.debug(1, 'Loading cache file: %s' % cachefile)
+            cachefile = self.getCacheFile(cache_class.cachefile)
+            self.logger.debug(1, 'Loading cache file: %s' % cachefile)
             with open(cachefile, "rb") as cachefile:
                 pickled = pickle.Unpickler(cachefile)
                 # Check cache version information
@@ -429,15 +463,15 @@
                     cache_ver = pickled.load()
                     bitbake_ver = pickled.load()
                 except Exception:
-                    logger.info('Invalid cache, rebuilding...')
-                    return
+                    self.logger.info('Invalid cache, rebuilding...')
+                    return 0
 
                 if cache_ver != __cache_version__:
-                    logger.info('Cache version mismatch, rebuilding...')
-                    return
+                    self.logger.info('Cache version mismatch, rebuilding...')
+                    return 0
                 elif bitbake_ver != bb.__version__:
-                    logger.info('Bitbake version mismatch, rebuilding...')
-                    return
+                    self.logger.info('Bitbake version mismatch, rebuilding...')
+                    return 0
 
                 # Load the rest of the cache file
                 current_progress = 0
@@ -460,29 +494,17 @@
                         self.depends_cache[key] = [value]
                     # only fire events on even percentage boundaries
                     current_progress = cachefile.tell() + previous_progress
-                    if current_progress > cachesize:
-                        # we might have calculated incorrect total size because a file
-                        # might've been written out just after we checked its size
-                        cachesize = current_progress
-                    current_percent = 100 * current_progress / cachesize
-                    if current_percent > previous_percent:
-                        previous_percent = current_percent
-                        bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
-                                      self.data)
+                    progress(cachefile.tell() + previous_progress)
 
                 previous_progress += current_progress
 
-        # Note: depends cache number is corresponding to the parsing file numbers.
-        # The same file has several caches, still regarded as one item in the cache
-        bb.event.fire(bb.event.CacheLoadCompleted(cachesize,
-                                                  len(self.depends_cache)),
-                      self.data)
+        return len(self.depends_cache)
 
     def parse(self, filename, appends):
         """Parse the specified filename, returning the recipe information"""
-        logger.debug(1, "Parsing %s", filename)
+        self.logger.debug(1, "Parsing %s", filename)
         infos = []
-        datastores = self.load_bbfile(filename, appends)
+        datastores = self.load_bbfile(filename, appends, mc=self.mc)
         depends = []
         variants = []
         # Process the "real" fn last so we can store variants list
@@ -534,7 +556,7 @@
         cached, infos = self.load(fn, appends)
         for virtualfn, info_array in infos:
             if info_array[0].skipped:
-                logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
+                self.logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
                 skipped += 1
             else:
                 self.add_info(virtualfn, info_array, cacheData, not cached)
@@ -570,21 +592,21 @@
 
         # File isn't in depends_cache
         if not fn in self.depends_cache:
-            logger.debug(2, "Cache: %s is not cached", fn)
+            self.logger.debug(2, "%s is not cached", fn)
             return False
 
         mtime = bb.parse.cached_mtime_noerror(fn)
 
         # Check file still exists
         if mtime == 0:
-            logger.debug(2, "Cache: %s no longer exists", fn)
+            self.logger.debug(2, "%s no longer exists", fn)
             self.remove(fn)
             return False
 
         info_array = self.depends_cache[fn]
         # Check the file's timestamp
         if mtime != info_array[0].timestamp:
-            logger.debug(2, "Cache: %s changed", fn)
+            self.logger.debug(2, "%s changed", fn)
             self.remove(fn)
             return False
 
@@ -595,14 +617,14 @@
                 fmtime = bb.parse.cached_mtime_noerror(f)
                 # Check if file still exists
                 if old_mtime != 0 and fmtime == 0:
-                    logger.debug(2, "Cache: %s's dependency %s was removed",
-                                    fn, f)
+                    self.logger.debug(2, "%s's dependency %s was removed",
+                                         fn, f)
                     self.remove(fn)
                     return False
 
                 if (fmtime != old_mtime):
-                    logger.debug(2, "Cache: %s's dependency %s changed",
-                                    fn, f)
+                    self.logger.debug(2, "%s's dependency %s changed",
+                                         fn, f)
                     self.remove(fn)
                     return False
 
@@ -618,14 +640,14 @@
                         continue
                     f, exist = f.split(":")
                     if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
-                        logger.debug(2, "Cache: %s's file checksum list file %s changed",
-                                        fn, f)
+                        self.logger.debug(2, "%s's file checksum list file %s changed",
+                                             fn, f)
                         self.remove(fn)
                         return False
 
-        if appends != info_array[0].appends:
-            logger.debug(2, "Cache: appends for %s changed", fn)
-            logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
+        if tuple(appends) != tuple(info_array[0].appends):
+            self.logger.debug(2, "appends for %s changed", fn)
+            self.logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
             self.remove(fn)
             return False
 
@@ -634,10 +656,10 @@
             virtualfn = variant2virtual(fn, cls)
             self.clean.add(virtualfn)
             if virtualfn not in self.depends_cache:
-                logger.debug(2, "Cache: %s is not cached", virtualfn)
+                self.logger.debug(2, "%s is not cached", virtualfn)
                 invalid = True
             elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
-                logger.debug(2, "Cache: Extra caches missing for %s?" % virtualfn)
+                self.logger.debug(2, "Extra caches missing for %s?" % virtualfn)
                 invalid = True
 
         # If any one of the variants is not present, mark as invalid for all
@@ -645,10 +667,10 @@
             for cls in info_array[0].variants:
                 virtualfn = variant2virtual(fn, cls)
                 if virtualfn in self.clean:
-                    logger.debug(2, "Cache: Removing %s from cache", virtualfn)
+                    self.logger.debug(2, "Removing %s from cache", virtualfn)
                     self.clean.remove(virtualfn)
             if fn in self.clean:
-                logger.debug(2, "Cache: Marking %s as not clean", fn)
+                self.logger.debug(2, "Marking %s as not clean", fn)
                 self.clean.remove(fn)
             return False
 
@@ -661,10 +683,10 @@
         Called from the parser in error cases
         """
         if fn in self.depends_cache:
-            logger.debug(1, "Removing %s from cache", fn)
+            self.logger.debug(1, "Removing %s from cache", fn)
             del self.depends_cache[fn]
         if fn in self.clean:
-            logger.debug(1, "Marking %s as unclean", fn)
+            self.logger.debug(1, "Marking %s as unclean", fn)
             self.clean.remove(fn)
 
     def sync(self):
@@ -677,12 +699,13 @@
             return
 
         if self.cacheclean:
-            logger.debug(2, "Cache is clean, not saving.")
+            self.logger.debug(2, "Cache is clean, not saving.")
             return
 
         for cache_class in self.caches_array:
             cache_class_name = cache_class.__name__
-            cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
+            cachefile = self.getCacheFile(cache_class.cachefile)
+            self.logger.debug(2, "Writing %s", cachefile)
             with open(cachefile, "wb") as f:
                 p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
                 p.dump(__cache_version__)
@@ -701,8 +724,18 @@
         return bb.parse.cached_mtime_noerror(cachefile)
 
     def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
+        if self.mc is not None:
+            (fn, cls, mc) = virtualfn2realfn(filename)
+            if mc:
+                self.logger.error("Unexpected multiconfig %s", virtualfn)
+                return
+
+            vfn = realfn2virtual(fn, cls, self.mc)
+        else:
+            vfn = filename
+
         if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
-            cacheData.add_from_recipeinfo(filename, info_array)
+            cacheData.add_from_recipeinfo(vfn, info_array)
 
             if watcher:
                 watcher(info_array[0].file_depends)
@@ -727,6 +760,65 @@
             info_array.append(cache_class(realfn, data))
         self.add_info(file_name, info_array, cacheData, parsed)
 
+class MulticonfigCache(Mapping):
+    def __init__(self, databuilder, data_hash, caches_array):
+        def progress(p):
+            nonlocal current_progress
+            nonlocal previous_progress
+            nonlocal previous_percent
+            nonlocal cachesize
+
+            current_progress = previous_progress + p
+
+            if current_progress > cachesize:
+                # we might have calculated incorrect total size because a file
+                # might've been written out just after we checked its size
+                cachesize = current_progress
+            current_percent = 100 * current_progress / cachesize
+            if current_percent > previous_percent:
+                previous_percent = current_percent
+                bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
+                                databuilder.data)
+
+
+        cachesize = 0
+        current_progress = 0
+        previous_progress = 0
+        previous_percent = 0
+        self.__caches = {}
+
+        for mc, mcdata in databuilder.mcdata.items():
+            self.__caches[mc] = Cache(databuilder, mc, data_hash, caches_array)
+
+            cachesize += self.__caches[mc].cachesize()
+
+        bb.event.fire(bb.event.CacheLoadStarted(cachesize), databuilder.data)
+        loaded = 0
+
+        for c in self.__caches.values():
+            loaded += c.prepare_cache(progress)
+            previous_progress = current_progress
+
+        # Note: depends cache number is corresponding to the parsing file numbers.
+        # The same file has several caches, still regarded as one item in the cache
+        bb.event.fire(bb.event.CacheLoadCompleted(cachesize, loaded), databuilder.data)
+
+    def __len__(self):
+        return len(self.__caches)
+
+    def __getitem__(self, key):
+        return self.__caches[key]
+
+    def __contains__(self, key):
+        return key in self.__caches
+
+    def __iter__(self):
+        for k in self.__caches:
+            yield k
+
+    def keys(self):
+        return self.__caches[key]
+
 
 def init(cooker):
     """