subtree updates

poky: 80f2b56ad8..d78650b980:
  Ahmed Hossam (1):
        go.bbclass: Allow adding parameters to go ldflags

  Alejandro Hernandez Samaniego (1):
        baremetal-image: Fix do_image dependencies

  Alexander Kanavin (153):
        meson: update 0.58.1 -> 0.59.1
        libcap: update 2.51 -> 2.54
        lua: add a recipe from meta-oe
        lua: update 5.3.6 -> 5.4.3
        rpm: update 4.16.1.3 -> 4.17.0
        libdnf: fix the rpm sqlite-only target setup
        libsolv: disable rpm bdb format support
        perl: do not build berkeley db module by default
        package_rpm: use zstd instead of xz
        qemu: update 6.0.0 -> 6.1.0
        runqemu: correct vga-virtio option to keep virgl enabled
        gnupg: update 2.3.1 -> 2.3.2
        pinentry: update 1.1.1 -> 1.2.0
        spirv-tools: update 2021.2 -> 2021.3
        glslang: update 11.5.0 -> 11.6.0
        shaderc: update 2021.1 -> 2021.2
        inetutils: update 2.1 -> 2.2
        systemd: update 249.3 -> 249.4
        lsof: update 4.91 -> 4.94.0
        libpam: update 1.5.1 -> 1.5.2
        rt-tests: update 2.1 -> 2.2
        libgit2: update 1.1.1 -> 1.2.0
        libssh2: update 1.9.0 -> 1.10.0
        libhandy: update 1.2.3 -> 1.4.0
        qemu: install qmp module without hardcoding the python version in oeqa scripts
        lttng-tools: replace ad hoc ptest fixup with upstream fixes
        rust: drop PV from include file names
        rust: update 1.54.0 -> 1.55.0
        librsvg: update 2.40.21 -> 2.52.0 (transition to rust!)
        librsvg: do not enable nativesdk
        librsvg: add backports to fix big endian targets (e.g. mips)
        librsvg: use only the target architecture to determine availability of atomic primitives
        librsvg: restore reproducibility
        adwaita-icon-theme: update 3.34/38 -> 41.0
        gstreamer1.0-plugins-bad: disable rsvg on x32
        rust/cargo: exclude UNINATIVE_LOADER from task signature
        rust-common.bbclass: rewrite toolchain wrappers in (native) python
        rust: do not write ar into target json definitions
        rust: generate target definitions from (arch, abi), not just arch
        openssl: update 1.1.1l -> 3.0.0
        cryptodev-tests: do not use -Werror with openssl 3
        serf: add a openssl 3 compatibility fix
        ruby: disable openssl extension
        glib-2.0: update 2.68.4 -> 2.70.0
        glib-networking: update 2.68.2 -> 2.70.0
        bison: update 3.7.6 -> 3.8.1
        libdnf: update 0.63.1 -> 0.64.0
        libexif: update 0.6.22 -> 0.6.23
        sudo: update 1.9.7p2 -> 1.9.8p1
        wget: update 1.21.1 -> 1.21.2
        coreutils: update 8.32 -> 9.0
        itstool: update 2.0.6 -> 2.0.7
        nghttp2: add recipe from meta-oe
        libsoup: add a 3.x recipe
        webkitgtk: trim down DEPENDS
        epiphany: trim down DEPENDS
        webkitgtk: update 2.32.3 -> 2.34.0
        epiphany: update 40.3 -> 41.0
        python3: update 3.9.7 -> 3.10.0
        libjitterentropy: update 3.1.0 -> 3.3.0
        kea: update 1.8.2 -> 2.0.0
        ghostscript: update 9.54.0 -> 9.55.0
        lighttpd: update 1.4.59 -> 1.4.60
        bluez5: update 5.61 -> 5.62
        ovmf: update 202105 -> 202108
        systemd: update 249.4 -> 249.5
        meson: update 0.59.1 -> 0.59.2
        python3-pip: update 21.2.4 -> 21.3
        valgrind: update 3.17.0 -> 3.18.1
        librsvg: update 2.52.0 -> 2.52.2
        libva: update 2.12.0 -> 2.13.0
        liberation-fonts: update 2.1.4 -> 2.1.5
        ca-certificates: update 20210119 -> 20211016
        curl: update 7.78.0 -> 7.79.1
        libgit2: update 1.2.0 -> 1.3.0
        libxcrypt: upgrade 4.4.25 -> 4.4.26
        bison: upgrade 3.8.1 -> 3.8.2
        cmake: update 3.21.2 -> 3.21.3
        git: upgrade 2.33.0 -> 2.33.1
        tzdata: upgrade 2021a -> 2021d
        ofono: upgrade 1.32 -> 1.33
        openssh: upgrade 8.7p1 -> 8.8p1
        sysvinit: upgrade 2.99 -> 3.00
        btrfs-tools: upgrade 5.13.1 -> 5.14.2
        ccache: upgrade 4.4 -> 4.4.2
        createrepo-c: upgrade 0.17.5 -> 0.17.6
        libcomps: upgrade 0.1.17 -> 0.1.18
        libedit: upgrade 20210714-3.1 -> 20210910-3.1
        librepo: upgrade 1.14.1 -> 1.14.2
        python3-jinja2: upgrade 3.0.1 -> 3.0.2
        python3-pygobject: upgrade 3.40.1 -> 3.42.0
        python3-setuptools: upgrade 58.0.4 -> 58.2.0
        vala: upgrade 0.52.5 -> 0.54.2
        acpica: upgrade 20210730 -> 20210930
        asciidoc: upgrade 9.1.0 -> 9.1.1
        libarchive: upgrade 3.5.1 -> 3.5.2
        msmtp: upgrade 1.8.15 -> 1.8.17
        sudo: upgrade 1.9.8p1 -> 1.9.8p2
        gobject-introspection: upgrade 1.68.0 -> 1.70.0
        gsettings-desktop-schemas: upgrade 40.0 -> 41.0
        json-glib: upgrade 1.6.4 -> 1.6.6
        libdazzle: upgrade 3.40.0 -> 3.42.0
        harfbuzz: upgrade 2.9.1 -> 3.0.0
        pango: upgrade 1.48.9 -> 1.48.10
        libinput: upgrade 1.19.0 -> 1.19.1
        linux-firmware: upgrade 20210818 -> 20210919
        wireless-regdb: upgrade 2021.07.14 -> 2021.08.28
        mpg123: upgrade 1.28.2 -> 1.29.0
        puzzles: upgrade to latest revision
        libwpe: upgrade 1.10.1 -> 1.12.0
        diffoscope: upgrade 182 -> 187
        fribidi: upgrade 1.0.10 -> 1.0.11
        iso-codes: upgrade 4.6.0 -> 4.7.0
        libatomic-ops: upgrade 7.6.10 -> 7.6.12
        libcap: upgrade 2.54 -> 2.59
        libmd: upgrade 1.0.3 -> 1.0.4
        libsoup-2.4: upgrade 2.72.0 -> 2.74.0
        gnupg: update 2.3.2 -> 2.3.3
        libssh2: skip one of the ptests until openssh 8.8 compatibility is fixed
        systemd: disable seccomp on mips32
        waffle: convert to git, website is down
        u-boot: upgrade 2021.07 -> 2021.10
        psplash: upgrade to latest revision
        stress-ng: convert to git, website is down
        json-c: correct upstream version check
        createrepo-c: upgrade 0.17.6 -> 0.17.7
        python3-numpy: upgrade 1.21.2 -> 1.21.3
        python3-pip: upgrade 21.3 -> 21.3.1
        python3-setuptools: upgrade 58.2.0 -> 58.3.0
        msmtp: upgrade 1.8.17 -> 1.8.18
        gi-docgen: upgrade 2021.7 -> 2021.8
        piglit: upgrade to latest revision
        libinput: upgrade 1.19.1 -> 1.19.2
        mpg123: upgrade 1.29.0 -> 1.29.2
        puzzles: upgrade to latest revision
        webkitgtk: upgrade 2.34.0 -> 2.34.1
        wpebackend-fdo: upgrade 1.10.0 -> 1.12.0
        diffoscope: upgrade 187 -> 188
        libcap: upgrade 2.59 -> 2.60
        vte: upgrade 0.64.2 -> 0.66.0
        ncurses: update 6.2 -> 6.3
        tzdata: update 2021d -> 2021e
        asciidoc: update 9.1.1 -> 10.0.0
        waffle: update 1.6.1 -> 1.7.0
        virgl: skip headless test on specific older distros and fail otherwise
        gptfdisk: address ncurses 6.3 compatibility
        powertop: address ncurses 6.3 compatibility
        systemtap: address ncurses 6.3 compatibility
        igt-gpu-tools: address meson 0.60 compatibility
        python3-pygobject: do not supply unknown g-i options
        gstreamer1.0-python: do not supply unknown g-i options
        webkitgtk: drop unnecessary SSE disabling patch
        cups: exclude beta/rc versions from version check

  Alexandre Belloni (5):
        inetutils: drop CVE-2021-40491 patch
        oeqa/selftest/sstatetests: fix typo ware -> were
        maintainers.inc: fix up rust-cross entry
        libevent,btrfs-tools: fix Upstream-Status tag
        ruby: fix Upstream-Status

  Andreas Müller (1):
        webkitgtk: add gperf-native to DEPENDS to fix build

  Andrei Gherzan (1):
        qemu: Define libnfs PACKAGECONFIG

  Andrej Valek (3):
        busybox: 1.34.0 -> 1.34.1
        featimage: refactor style
        kernel-fitimage: use correct kernel image

  Andres Beltran (1):
        buildhistory: Fix package output files for SDKs

  Bruce Ashfield (15):
        linux-yocto/5.14: scripts/gcc-plugins: consistently use HOSTCC
        linux-yocto/5.14: update to v5.14.8
        linux-yocto/5.14: bsp/qemuarm*-gfx: use virtio graphics
        linux-yocto/5.10: update to v5.10.69
        linux-yocto/5.10: update to v5.10.70
        linux-yocto/5.14: update to v5.14.9
        kernel-yocto: don't apply config metadata patches twice
        linux-yocto/5.14: revert: scripts/gcc-plugins: consistently use HOSTCC
        linux-yocto/5.10: update to v5.10.73
        linux-yocto/5.14: update to v5.14.12
        linux-yocto/5.14: update to v5.14.13
        linux-yocto/5.10: update to v5.10.74
        linux-yocto/5.14: common-pc: enable CONFIG_ATA_PIIX as built-in
        linux-yocto/5.14: update to v5.14.14
        linux-yocto/5.10: update to v5.10.75

  Caner Altinbasak (2):
        bitbake: npmsw: Avoid race condition with multiple npm fetchers
        bitbake: fetch2: Do not attempt to verify donestamp if download is not complete

  Changhyeok Bae (2):
        iproute2: update to 5.14.0
        ethtool: update to 5.14

  Changqing Li (1):
        lttng-ust: fix do_compile failure for arm32 with DEBUG_BUILD enabled

  Daiane Angolini (2):
        machine/qemuarm*: Fix UBOOT_MACHINE value
        ref-manual: Update how to set a useradd password

  Daniel McGregor (3):
        bison: prevent checking for textstyle.
        bitbake.conf: Add gpg-agent as a host tool
        sstate: Allow validation of sstate singatures against list of keys

  David Joyner (1):
        rust: add support for big endian 64-bit PowerPC

  Denys Dmytriyenko (2):
        wayland-protocols: upgrade 1.21 -> 1.22
        wayland-protocols: upgrade 1.22 -> 1.23

  Fred Liu (1):
        glibc: Drop libcidn package

  Henry Kleynhans (1):
        sstate: Switch to ZStandard compressor support

  Hsia-Jun(Randy) Li (1):
        meson: install native file in sdk

  Ian Ray (1):
        archiver: Configurable tarball compression

  Jon Mason (10):
        oeqa/manual: Fix no longer valid URLs
        bitbake: bitbake:toaster:test: Update SSTATE URL
        yocto-bsp/poky: replace http with https for URLs
        bitbake: bitbake: replace http with https for URLs
        documentation: update sources mirror URL
        documentation: replace http with https for URLs
        documentation: use YOCTO_DL_URL
        dev-manual: remove errant /
        runqemu: add DEPLOY_DIR_IMAGE replacement in QB_OPT_APPEND
        bitbake: bitbake-user-manual: update sources mirror URL

  Jose Quaresma (15):
        gstreamer1.0: 1.18.4 -> 1.18.5
        gstreamer1.0-plugins-base: 1.18.4 -> 1.18.5
        gstreamer1.0-plugins-good: 1.18.4 -> 1.18.5
        gstreamer1.0-plugins-bad: 1.18.4 -> 1.18.5
        gstreamer1.0-plugins-ugly: 1.18.4 -> 1.18.5
        gstreamer1.0-rtsp-server: 1.18.4 -> 1.18.5
        gstreamer1.0-libav: 1.18.4 -> 1.18.5
        gstreamer1.0-vaapi: 1.18.4 -> 1.18.5
        gstreamer1.0-omx: 1.18.4 -> 1.18.5
        gstreamer1.0-python: 1.18.4 -> 1.18.5
        gst-devtools: 1.18.4 -> 1.18.5
        gst-examples: 1.18.4 -> 1.18.5
        patch.bbclass: when the patch fails show more info on the fatal error
        sstate.bbclass: count the files on mirrors using the pre local files
        sstate: fix touching files inside pseudo

  Joshua Watt (4):
        bitbake: async: Close sync client event loop
        bitbake: hashserv: Add tests for diverging reports
        bitbake: hashserv: Fix diverging report race condition
        classes/populate_sdk_base: Add setscene tasks

  Kai Kang (1):
        sudo: update multilib patch for sudo.conf

  Khem Raj (11):
        pkgconfig: Update to latest
        libseccomp: Upgrade to 2.5.2 and beyond
        openssl: Drop riscv32 upstreamed patches
        opensbi-payloads.inc: Use strings for fallback
        mesa: Enable svga for x86 only
        qemu: Add knob for enabling PMDK pmem support
        opensbi-payloads: Add dependency on kernel if fdt is set
        librsvg: Fix vendored libc to work on riscv and musl
        librsvg: Bump to 2.52.0 -> 2.52.2
        rust: Upgrade to 1.56.0
        librsvg: Disable 64bit atomics in crossbeam for rv32

  Kiran Surendran (1):
        ffmpeg: fix CVE-2021-38114

  Konrad Weihmann (1):
        libical: fix append in DEPENDS

  Lukasz Majewski (1):
        glibc: Extract common code to build tests to glibc-tests.inc

  Marek Vasut (1):
        piglit: upgrade to latest revision

  Martin Jansa (2):
        webkitgtk: add PACKAGECONFIG for opengl-or-es
        boost: allow searching for python310

  Maximilian Blenk (1):
        mount-copybind: add rootcontext mountoption for overlayfs

  Michael Halstead (2):
        scripts/autobuilder-worker-prereq-tests: jinja2 check
        releases: update to include 3.4 honister

  Michael Opdenacker (18):
        manuals: font fixes
        ref-manual: document DEBUG_PREFIX_MAP
        manuals: replace "apt-get" by "apt"
        Makefile: allow epub and latexpdf outputs to use diagrams from SVG sources
        conf.py: use PNG first in EPUB output
        overview-manual: SVG diagram for the git workflow
        docs: add "make all" to build old types of docs (html, epub, pdf) at once
        manuals: introduce 'yocto_sstate' extlink
        overview-manual: simplify expression
        dev-manual: correct NO_GENERIC_LICENSE section title
        dev-manual: warn about license compliance issues when static libraries are used
        devpyshell: rename to pydevshell
        ref-manual: document TOOLCHAIN_HOST_TASK_ESDK
        manuals: rename "devpyshell" to "pydevshell"
        ref-manual: document SOURCE_DATE_EPOCH
        ref-manual: fix description of SOURCE_DATE_EPOCH
        releases.rst: fix release number for 3.3.3
        docs: poky.yaml: updates for 3.4

  Mike Crowe (2):
        lib/oe/qa,insane: Move extra error handling functions to library
        insane,license,license_image: Allow treating license problems as errors

  Mingli Yu (1):
        packagedata.py: silence a DeprecationWarning

  Oleh Matiusha (1):
        findutils: add ptest

  Oleksandr Kravchuk (15):
        python3: update to 3.9.7
        python3-git: update to 3.1.24
        python3-dbusmock: update to 0.24.0
        python3-setuptools: update to 58.0.4
        python3-setuptools: minor cleanup
        xf86-input-libinput: update to 1.2.0
        libinput: update to 1.19.0
        libxi: update to 1.8
        xorgproto: update to 2021.5
        xkeyboard-config: update to 2.34
        libxkbcommon: update to 1.3.1
        mirrors.bbclass: remove dead infozip mirrors
        man-pages: update to 5.13
        python3-smmap: update to 5.0.0
        python3-pyparsing: update to 3.0.0

  Pablo Saavedra Rodi?o (1):
        mesa: upgrade 21.2.1 -> 21.2.4

  Paul Barker (1):
        bitbake: doc: bitbake-user-manual: Document BB_GIT_SHALLOW and friends

  Paul Eggleton (7):
        migration-3.4: tweak overrides change section
        ref-manual: remove meta class
        poky.yaml: add lz4 and zstd to essential host packages
        migration-3.4: add additional migration info
        migration: tweak introduction section
        poky.yaml: fix lz4 package name for older Ubuntu versions
        migration-3.4: add some extra packaging notes

  Pavel Zhukov (1):
        weston: wrapper for weston modules argument

  Peter Hoyes (2):
        u-boot: Convert ${UBOOT_ENV}.cmd into ${UBOOT_ENV}.scr
        u-boot: Fix syntax error in ${UBOOT_ENV}.scr compilation

  Peter Kjellerstedt (2):
        meson.bblcass: Remove empty egg-info directories before running meson
        qemu.inc: Remove empty egg-info directories before running meson

  Petr Vorel (1):
        ltp: Update to 20210927

  Pgowda (2):
        Fix rust-native build issue when debug is enabled
        rust-cross: Fix directory not deleted for race glibc vs. musl

  Ralph Siemsen (1):
        tar: filter CVEs using vendor name

  Randy Li (1):
        meson: move lang args to the right section

  Richard Purdie (54):
        sstatesig: Only apply group/other permissions to pseudo files
        rpm: Deterministically set vendor macro entry
        abi_version/sstate: Bump to fix rpm corruption issues
        multilib: Avoid sysroot race issues when multilib enabled
        bitbake: knotty/uihelper: Show setscene task progress in summary output
        bitbake: bitbake-worker: Handle pseudo shutdown in Ctrl+C case
        poky.conf: Update tested distros list with recent changes
        bitbake: hashserv: Improve behaviour for better determinism/sstate reuse
        poky.conf: Bump version for 3.4 honister release
        build-appliance-image: Update to honister head revision
        bitbake: bitbake: Bump to version 1.52.0
        build-appliance-image: Update to honister head revision
        layer.conf: Extend recipes not to install without explict dependencies
        libxml2: Use python3targetconfig to fix reproducibility issue
        libnewt: Use python3targetconfig to fix reproducibility issue
        bootchart2: Don't compile python modules
        linux-yocto-dev: Ensure DEPENDS matches recent 5.14 kernel changes
        base/insane: Import oe.qa by default
        base: Clean up unneeded len() calls
        base: Use repr() for printing exceptions
        bitbake.conf: Add BB_CURRENTTASK to BB_HASHEXCLUDE
        reproducible_build: Drop obsolete sstate workaround
        python: Update now reproducibile builds are the default
        bitbake: bitbake-worker: Set BB_CURRENTTASK earlier
        bitbake: compress: Allow to operate on file descriptor
        bitbake: siggen: Change file format of siginfo files to use zstd compressed json
        sstate: Fixes for eSDK generation after zstd switch
        patch: Use repr() with exceptions instead of str()
        reproducible_build: Drop now unneeded compiler warning
        reproducible: Move class function code into library
        reproducible: Move variable definitions to bitbake.conf
        reproducible: Merge code into base.bbclass
        kernel: Add KERNEL_DEBUG_TIMESTAMPS variable
        reproducible: Drop BUILD_REPRODUCIBLE_BINARIES variable
        kernel: Rework kernel make flag to variable mappings
        oeqa: Update cleanup code to wait for hashserv exit
        poky: Reproducible builds are now the default
        bitbake: tests/runqueue: Ensure hashserv exits before deleting files
        bitbake: bitbake-worker: Add debug when unpickle fails
        bitbake: siggen: Fix type conversion issues
        bitbake: test/fetch: Update urls to match upstream branch name changes
        libtool: Update patchset to match those submitted upstream
        staging: Fix autoconf-native rebuild failure
        patch: Fix exception reporting with repr()
        bitbake: fetch2/perforce: Fix typo
        bitbake: bitbake: Bump to post release verion 1.53.0
        poky.conf: Post release version bump
        gcc: Merge three related patches together
        gcc: Drop sdt (dtrace) header patch
        gcc: Drop broken/unneeded patch
        bitbake: tests/runqueue: Drop python version test decorators
        gcc: Update patches submitted/merged upstream
        gcc: Drop testsuite failure fix patch
        gcc: Add missing patch Upstream-Status

  Ross Burton (21):
        bitbake: tests/utils: mark a regex as a raw string
        bitbake: tests/fetch: prefix the FetcherTest temporary directory
        bitbake: fetch2: clarify the command-no-found error message
        bitbake: fetch2/gitsm: remove the 'nugget' SRCREV caching
        linux-yocto: add libmpc-native to DEPENDS
        bitbake: fetch2: document checkstatus API
        curl: fix CVE-2021-22945 through -22947
        oe/license: implement ast.NodeVisitor.visit_Constant
        license.bbclass: implement ast.NodeVisitor.visit_Constant
        bitbake: codegen: implement ast.NodeVisitor.visit_Constant
        testimage: fix unclosed testdata file
        oe/utils: log exceptions in ThreadedWorker functions
        sstate: don't silently handle all exceptions in sstate_checkhashes
        gawk: replace AR patch with upstreamed patch
        gawk: reduce strictness of the time test
        strace: remove obsolete patch
        strace: remove TOOLCHAIN assignment
        oeqa/runtime: load modules using importlib
        oeqa/runtime: search sys.path explicitly for modules
        testimage: remove target_modules_path
        strace: show test suite log on failure

  Sakib Sajal (1):
        go: upgrade 1.16.7 -> 1.16.8

  Saul Wold (3):
        spdx.py: Add annotation to relationship
        create-spdx: add create_annotation function
        create-spdx: cross recipes are native also

  Stefan Herbrechtsmeier (17):
        bitbake: fetch2: npm: Support npm archives with missing search directory mode
        bitbake: fetch2: npm: Create config npmrc in environment instantiation
        bitbake: fetch2: npmsw: Add support for local tarball and link sources
        npm: Add support for EXTRA_OENPM arguments
        recipetool: Move license md5sums into CSV files
        recipetool: Skip common source files in guess_license
        recipetool: Change default paramter fallback_licenses of function split_pkg_licenses from None to []
        recipetool: ignore empty license files
        recipetool: Add logger info for missing license entries
        recipetool: Add support for linenumbers to licenses.csv
        recipetool: npm: Do not add package.json files to LIC_FILES_CHKSUM
        recipetool: npm: Use README as license fallback
        npm: Add variable NPM_NODEDIR with default value
        npm: Use configs for npm environment and args for npm run command
        recipetool: Rework crunch_license to recognize more variants
        recipetool: Simplify common source files skip in guess_license
        npm: Remove unnecessary configs argument from npm run command

  Teoh Jay Shen (1):
        oeqa/runtime/parselogs: modified drm error in common errors list

  Thomas Perrot (1):
        image_types: allow the creation of block devices on top of UBI volumes

  Tim Orling (17):
        ptest-runner: install -collect-system-data script
        python3-hypothesis: upgrade 6.15.0 -> 6.23.2
        python3-importlib-metadata: upgrade 4.6.4 -> 4.8.1
        python3-more-itertools: upgrade 8.9.0 -> 8.10.0
        python3-zipp: upgrade 3.5.0 -> 3.6.0
        libconvert-asn1-perl: upgrade 0.31 -> 0.33
        python3-pytest: upgrade 6.2.4 -> 6.2.5
        at-spi2-core: upgrade 2.40.3 -> 2.42.0
        python3-packaging: DEPENDS on python3-setuptools-native
        python3-packaging: BBCLASSEXTEND nativesdk
        python3-tomli: add recipe for 1.2.1
        python3-setuptools-scm: upgrade 6.0.1 -> 6.3.2
        python3-setuptools: _distutils/sysconfig fix
        python3-pyyaml: upgrade 5.4.1 -> 6.0
        python3-manifest: -pprint RDEPENDS on -profile
        python3-hypothesis: upgrade 6.23.2 -> 6.24.0
        python3-tomli: upgrade 1.2.1 -> 1.2.2

  Tobias Kaufmann (1):
        mount-copybind: add SELinux support

  Yureka (1):
        systemd: add missing include for musl

  Zoltán Böszörményi (1):
        libpam: Fix build with DISTRO_FEATURES usrmerge

  hongxu (1):
        libcap: fix nativesdk-libcap relocate failure

  wangmy (20):
        diffoscope: upgrade 181 -> 182
        cmake: upgrade 3.21.1 -> 3.21.2
        gzip: upgrade 1.10 -> 1.11
        harfbuzz: upgrade 2.9.0 -> 2.9.1
        vulkan-headers: upgrade 1.2.182 -> 1.2.191
        vulkan-loader: upgrade 1.2.182 -> 1.2.191
        vulkan-tools: upgrade 1.2.182 -> 1.2.191
        help2man: upgrade 1.48.4 -> 1.48.5
        python3-more-itertools: upgrade 8.8.0 -> 8.9.0
        acpid: upgrade 2.0.32 -> 2.0.33
        bind: upgrade 9.16.20 -> 9.16.21
        createrepo-c: upgrade 0.17.4 -> 0.17.5
        dnf: upgrade 4.8.0 -> 4.9.0
        ell: upgrade 0.43 -> 0.44
        libical: upgrade 3.0.10 -> 3.0.11
        dhcpcd: upgrade 9.4.0 -> 9.4.1
        dnf: upgrade 4.9.0 -> 4.10.0
        file: upgrade 5.40 -> 5.41
        libdnf: upgrade 0.64.0 -> 0.65.0
        lttng-tools: upgrade 2.13.0 -> 2.13.1

  zhengruoqin (2):
        libsolv: upgrade 0.7.19 -> 0.7.20
        python3-pycryptodome: upgrade 3.10.1 -> 3.11.0

meta-security: a85fbe980e..e81c15f851:
  Armin Kuster (1):
        python3-fail2ban: fix build failure and cleanup

  Kai Kang (1):
        sssd: re-package to fix QA issues

  Kristian Klausen (1):
        swtpm: update to 0.6.1

  Liwei Song (1):
        recipes-security/chipsec: platform security assessment framework

meta-openembedded: 23dc4f060f..2f6797d8d6:
  Alexander Kanavin (23):
        python3-yappi: add python 3.10 fix
        python3-gmpy2: update 2.0.8 -> 2.1.0rc1
        python3-m2crypto: address build failure with openssl 3.x
        lua: remove recipe as it is now in oe-core
        nghttp2: remove recipe as it is now in oe-core
        kronosnet: update 1.20 -> 1.22
        polkit: update 0.116 -> 0.119
        openflow: disable openssl PACKAGECONFIG
        openipmi: add a python 3.10 compatibility patch
        strongswan: disable openssl PACKAGECONFIG
        pkcs11-helper: backport a openssl 3.x patch
        nodejs: use -fpermissive
        nodejs: add a python 3.10 compatibility patch
        php: disable openssl PACKAGECONFIG
        libsigrokdecode: add a python 3.10 compatibility patch
        freerdp: backport openssl 3.x patches
        opensc: do not use -Werror
        cfengine: blacklist due to openssl 3.x incompatibility
        netplan: do not use -Werror
        boinc-client: blacklist due to openssl 3.x incompatibility
        ldns: blacklist due to openssl 3.x incompatibility
        surf: add a webkit 2.34 compatibility patch
        mozjs: update 60.9.0 -> 91.1.0

  Alexander Thoma (1):
        keyutils: fix install path

  Armin Kuster (1):
        Apache: Several CVE fixes

  Chen Qi (1):
        ntfs-3g-ntfsprogs: upgrade to 2021.8.22

  Gianfranco (2):
        vboxguestdrivers: upgrade 6.1.26 -> 6.1.28
        vboxguestdrivers: Fix build failure due to the last update.

  Ivan Maidanski (1):
        bdwgc: upgrade 8.0.4 -> 8.0.6

  Jeremy Kerr (1):
        mctp: Add MCTP control utilities

  Joshua Watt (2):
        colord-native: Inherit pkgconfig
        mutter: Move gsettings to a separate package

  Justin Bronder (2):
        python3-mccabe: fix LICENSE and RDEPENDS
        python3-pyflakes: fix LICENSE, HOMEPAGE and RDEPENDS

  Khem Raj (44):
        packagegroup-meta-oe: Add lv-drivers and lvgl
        python3-pystache: Remove need for python2x
        python3-behave: Switch to using github src_uri
        python3-simpleeval: Remove use_2to3
        python3-parse-type: Do not ask for 2to3
        python3-anyjson: Do not enable 2to3
        python3-pybluez: Do not set 2to3
        packagegroup-meta-networking: Remove nghttp2
        packagegroup-meta-oe: Remove lua
        gjs: Inherit pkgconfig
        luaposix: Use lua 5.4
        devilspie2: Switch to using github repo
        devilspie2: Add missing dependency on libxinerama
        kronosnet: Correct path to poll.h
        luaposix: Upgrade to 35.1
        xfce.bbclass: Mark string as raw in a regexp
        nodejs: add -fpermissive BUILD_CXXFLAGS
        pcimem: Add riscv64 into COMPATIBLE_HOST
        luaposix: Depend on libxcrypt-native
        libcrypt-openssl-rsa-perl: Upgrade to 0.32
        xrdp: Upgrade to 0.9.17
        yelp,yelp-xsl,yelp-tools: Upgrade to 41 release
        mozjs: Disable JIT on mips
        libcrypt-openssl-rsa-perl: Fix build with openssl3+
        nodejs: Upgrade to 14.18.1
        librest: Use master with libsoup3 support
        gnome-online-accounts: Upgrade to 3.43.1
        iwd: Fix  build with musl
        xrdp: Exclude from builds
        xorgxrdp: Add to exclusion list for now
        bluepy: Set PV and correct syntax for RDEPENDS
        python3-kivy: Use new override syntax in RDEPENDS
        bluepy: Fix permissions on tarball
        nodejs: Upgrade to 16.11.1
        mozjs: Fix build on mips/musl
        mozjs: Fix musl miscompiles with HAVE_THREAD_TLS_KEYWORD
        luaposix: Use virtual/crypt instread of hardcoding libxcrypt
        packagegroup-meta-oe: Add libjs-jquery-globalize and libjs-jquery-cookie
        mozjs: Add riscv32 support
        driverctl: Rename to have PV in recipe name
        tbb: Link with libatomic when using clang compiler on rv32
        gimp: Disable vector icons on mips/rv32 using clang
        mozjs: Update to 91.2.0
        mozjs: Fix build for 32bit systems disabling JIT

  Kurt Kiefer (2):
        mozjs: move large static library into staticdev package
        gyp: fix for compatibility with Python 3.10

  Leon Anavi (6):
        python3-aiohue: Upgrade 2.6.1 -> 2.6.3
        python3-paho-mqtt: Upgrade 1.5.1 -> 1.6.1
        python3-thrift: Upgrade 0.13.0 -> 0.15.0
        python3-prompt-toolkit: Upgrade 3.0.19 -> 3.0.21
        python3-traitlets: Upgrade 5.1.0 -> 5.1.1
        python3-prettytable: Upgrade 2.1.0 -> 2.2.1

  LiweiSong (1):
        Revert "chipsec: platform security assessment framework"

  Marco Cavallini (1):
        bluepy_git.bb: Added recipe for BluePy Python interface to Bluetooth LE on Linux (v2)

  Marek Vasut (3):
        apitrace: Upgrade to latest version
        apitrace: Conditionally enable X11 support
        yavta: Upgrade to latest version and drop downstream patch

  Martin Jansa (4):
        protobuf: import a patch from meta-webosose to fix building protobuf-c with gold linker
        grpc: revert GRPCPP_ABSEIL_SYNC change from 1.40.0 to fix undefined references to absl::lts_20210324::Mutex::~Mutex()
        luaposix: depend on target libxcrypt instead of native
        lv-lib-png: add wayland to REQUIRED_DISTRO_FEATURES

  Matthias Klein (1):
        gpsd: Install also the generated parts of the Python library

  Matthias Schiffer (1):
        byacc: switch to official HTTPS mirror

  Mingli Yu (1):
        opencv: remove setup_vars_opencv4.sh

  Oleksandr Kravchuk (72):
        iwd: update to 1.18
        nano: update to 5.9
        ostree: update to 2021.5
        pegtl: update to 3.2.1
        pmdk: update to 1.11.1
        protobuf-c: update to 1.4.0
        thingsboard-gateway: update to 2.8
        libbpf: fix PV
        libcdio-paranoia: update to 10.2+2.0.1
        ttf-abyssinica: update to 2.100
        ttf-dejavu: add UPSTREAM_CHECK_URI
        ttf-vlgothic: update to 20200720
        networkmanager: update to 1.32.12
        nng: update to 1.4.0
        opensaf: update to 5.21.09
        python3-absl: update 0.14.1
        python3-alembic: update to 1.7.4
        python3-astroid: update to 2.8.2
        python3-cantools: update to 36.4.0
        python3-certifi: update to 2021.10.8
        python3-cffi: update to 1.15.0
        python3-colorlog: update to 6.5.0
        python3-coverage: update to 6.0.2
        python3-cryptography-vectors: update to 35.0.0
        python3-dateparser: update to 1.1.0
        python3-elementpath: update to 2.3.2
        python3-eventlet: update to 0.32.0
        python3-google-api-core: update to 2.1.1
        python3-google-api-python-client: update to 2.26.1
        python3-google-auth: update to 2.3.0
        python3-grpcio-tools: update to 1.41.0
        python3-grpcio: update to 1.41.0
        python3-h2: update to 4.1.0
        python3-haversine: update to 2.5.1
        python3-httplib2: update to 0.20.1
        python3-idna: update to 3.3
        python3-iso3166: update to 2.0.2
        python3-joblib: update to 1.1.0
        python3-jsonrpcserver: update to 5.0.3
        python3-paramiko: update to 2.8.0
        python3-portion: update to 2.2.0
        python3-protobuf: update to 3.18.1
        python3-pulsectl: update to 21.10.4
        python3-pycodestyle: update to 2.8.0
        python3-pyephem: update to 4.1
        python3-pyflakes: update to 2.4.0
        python3-pyjwt: update to 2.2.0
        python3-pykickstart: update to 3.34
        python3-pyopenssl: update to 21.0.0
        python3-pyperf: update to 2.3.0
        python3-pytest-timeout: update 2.0.1
        python3-pytz: update to 2021.3
        python3-regex: update to 2021.10.8
        python3-sentry-sdk: update to 1.4.3
        python3-sympy: udpate to 1.9
        python3-twitter: update to 4.1.0
        python3-uritemplate: update to 4.1.1
        python3-websockets: update to 10.0
        python3-wrapt: update to 1.13.2
        python3-xlsxwriter: update to 3.0.1
        python3-xmlschema: update to 1.8.0
        python3-yarl: update to 1.7.0
        python3-typeguard: update to 2.13.0
        python3-pyatspi: update to 2.38.1
        python3-h5py: update to 3.5.0
        python3-pybind11-json: update to 0.2.11
        python3-pychromecast: update to 9.3.1
        python3-tzlocal: update to 4.0.1
        python3-zeroconf: update to 0.36.9
        leptonica: update to 1.82.0
        redis-plus-plus: update to 1.3.2
        hiredis: update to 1.0.2

  Peter Bergin (3):
        pipewire: remove manpages class
        pipewire: also handle pipewire-media-session in systemd class
        pipewire: rework PACKAGECONFIG for systemd service files

  Philippe Coval (5):
        lvgl: Add recipe for Lightweight Graphics Library
        lv-drivers: Add recipe for lvgl lib
        lv-lib-png: Add recipe for LVGL for handling PNG format
        lvgl: Add lv_lib_png to packagegroup
        lvgl: Lint recipe

  Roland Hieber (1):
        rapidjson: override hard-coded lib install dir

  Shiping Ji (1):
        Add recipe for driverctl

  Tim Orling (6):
        unicode-ucd: upgrade 12.1.0 -> 14.0.0
        gnome-screenshot: add recipe for 40.0
        libgweather: upgrade 3.36.2 -> 40.0
        colord: inherit pkgconfig
        gfbgraph: patch to build with latest rest
        gnome-calendar: upgrade 3.38.1 -> 41.0

  Trevor Gamblin (3):
        python3-posix-ipc: Add recipe for version 1.0.5
        python3-pyinotify: Add fcntl, logging to RDEPENDS
        python3-cvxopt: upgrade 1.2.6 -> 1.2.7

  Vyacheslav Yurkov (3):
        grpc: fix cross-compilation of grpc applications
        grpc: fix cross-compilation of grpc applications
        grpc: upgrade 1.38.1 -> 1.41.0

  Wang Mingyu (1):
        poppler: upgrade 21.09.0 -> 21.10.0

  Yi Zhao (3):
        samba: upgrade 4.14.7 -> 4.14.8
        audit: upgrade 3.0.5 -> 3.0.6
        strongswan: upgrade 5.9.3 -> 5.9.4

  Zoltán Böszörményi (1):
        python3-jsmin: Upgrade to version 3.0.0

  wangmy (11):
        openvpn: upgrade 2.5.3 -> 2.5.4
        redis: upgrade 6.2.5 -> 6.2.6
        span-lite: upgrade 0.10.1 -> 0.10.3
        bdwgc: upgrade 8.0.6 -> 8.2.0
        icewm: upgrade 2.7.0 -> 2.8.0
        ifenslave: upgrade 2.12 -> 2.13
        asio: upgrade 1.18.1.bb -> 1.20.0
        libgusb: upgrade 0.3.7 -> 0.3.8
        libxmlb: upgrade 0.3.2 -> 0.3.3
        xdebug: upgrade 2.9.5 -> 3.1.1
        cryptsetup: upgrade 2.3.6 -> 2.4.1

  zangrc (25):
        python3-ipython: upgrade 7.27.0 -> 7.28.0
        python3-jdatetime: upgrade 3.6.2 -> 3.6.4
        python3-multidict: upgrade 5.1.0 -> 5.2.0
        python3-openpyxl: upgrade 3.0.8 -> 3.0.9
        python3-pyscaffold: upgrade 4.0.2 -> 4.1
        python3-transitions: upgrade 0.8.9 -> 0.8.10
        networkmanager-openvpn: upgrade 1.8.14 -> 1.8.16
        ser2net: upgrade 4.3.3 -> 4.3.4
        python3-humanize: upgrade 3.11.0 -> 3.12.0
        python3-nmap: upgrade 1.5.0 -> 1.5.1
        libjs-jquery-cookie: Add recipe
        libjs-jquery-globalize: Add recipe
        python3-cachetools: upgrade 4.2.2 -> 4.2.4
        python3-cbor2: upgrade 5.4.1 -> 5.4.2
        python3-click: upgrade 8.0.1 -> 8.0.3
        python3-configargparse : upgrade 1.5.2 -> 1.5.3
        python3-flask: upgrade 2.0.1 -> 2.0.2
        python3-pyscaffold: upgrade 4.1 -> 4.1.1
        python3-werkzeug: upgrade 2.0.1 -> 2.0.2
        python3-absl: upgrade 0.14.1 -> 0.15.0
        python3-pandas: upgrade 1.3.3 -> 1.3.4
        python3-pulsectl: upgrade 21.10.4 -> 21.10.5
        python3-pyjwt: upgrade 2.2.0 -> 2.3.0
        python3-pytest-asyncio: upgrade 0.15.1 -> 0.16.0
        python3-robotframework: upgrade 4.1.1 -> 4.1.2

  zhengruoqin (15):
        python3-ujson: upgrade 4.1.0 -> 4.2.0
        python3-urllib3: upgrade 1.26.6 -> 1.26.7
        python3-watchdog: upgrade 2.1.5 -> 2.1.6
        gensio: upgrade 2.2.9 -> 2.3.1
        nlohmann-json: upgrade 3.10.2 -> 3.10.4
        libencode-perl: upgrade 3.12 -> 3.16
        python3-socketio: upgrade 5.4.0 -> 5.4.1
        python3-sqlalchemy: upgrade 1.4.23 -> 1.4.26
        python3-stevedore: upgrade 3.4.0 -> 3.5.0
        autofs: upgrade 5.1.7 -> 5.1.8
        links: upgrade 2.22 -> 2.25
        atftp: upgrade 0.7.4 -> 0.7.5
        python3-gmqtt: upgrade 0.6.10 -> 0.6.11
        python3-google-api-python-client: upgrade 2.26.1 -> 2.27.0
        python3-greenlet: upgrade 1.1.1 -> 1.1.2

meta-raspberrypi: 9eb4879cf4..90b3ac6fb3:
  Zygmunt Krynicki (1):
        rpi-config: warn on config.txt lines exceeding 80 bytes
Signed-off-by: Andrew Geissler <geissonator@yahoo.com>
Change-Id: I9e75d5bd606a913fbe69e6735c9ecafc436441ba
diff --git a/poky/bitbake/README b/poky/bitbake/README
index 2d5cd25..80a9711 100644
--- a/poky/bitbake/README
+++ b/poky/bitbake/README
@@ -7,7 +7,7 @@
 stacks using a task-oriented approach.
 
 For information about Bitbake, see the OpenEmbedded website:
-    http://www.openembedded.org/
+    https://www.openembedded.org/
 
 Bitbake plain documentation can be found under the doc directory or its integrated
 html version at the Yocto Project website:
@@ -17,7 +17,7 @@
 ------------
 
 Please refer to
-http://www.openembedded.org/wiki/How_to_submit_a_patch_to_OpenEmbedded
+https://www.openembedded.org/wiki/How_to_submit_a_patch_to_OpenEmbedded
 for guidelines on how to submit patches, just note that the latter documentation is intended
 for OpenEmbedded (and its core) not bitbake patches (bitbake-devel@lists.openembedded.org)
 but in general main guidelines apply. Once the commit(s) have been created, the way to send
@@ -28,11 +28,11 @@
 
 Mailing list:
 
-    http://lists.openembedded.org/mailman/listinfo/bitbake-devel
+    https://lists.openembedded.org/g/bitbake-devel
 
 Source code:
 
-    http://git.openembedded.org/bitbake/
+    https://git.openembedded.org/bitbake/
 
 Testing:
 
diff --git a/poky/bitbake/bin/bitbake b/poky/bitbake/bin/bitbake
index d3ee8e9..dcabeae 100755
--- a/poky/bitbake/bin/bitbake
+++ b/poky/bitbake/bin/bitbake
@@ -28,7 +28,7 @@
 if sys.getfilesystemencoding() != "utf-8":
     sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\nPython can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.")
 
-__version__ = "1.51.1"
+__version__ = "1.53.0"
 
 if __name__ == "__main__":
     if __version__ != bb.__version__:
diff --git a/poky/bitbake/bin/bitbake-worker b/poky/bitbake/bin/bitbake-worker
index 115bc1d..bf96207 100755
--- a/poky/bitbake/bin/bitbake-worker
+++ b/poky/bitbake/bin/bitbake-worker
@@ -237,6 +237,7 @@
                 the_data = databuilder.mcdata[mc]
                 the_data.setVar("BB_WORKERCONTEXT", "1")
                 the_data.setVar("BB_TASKDEPDATA", taskdepdata)
+                the_data.setVar('BB_CURRENTTASK', taskname.replace("do_", ""))
                 if cfg.limited_deps:
                     the_data.setVar("BB_LIMITEDDEPS", "1")
                 the_data.setVar("BUILDNAME", workerdata["buildname"])
@@ -287,10 +288,12 @@
             try:
                 if dry_run:
                     return 0
-                ret = bb.build.exec_task(fn, taskname, the_data, cfg.profile)
-                if fakeroot:
-                    fakerootcmd = shlex.split(the_data.getVar("FAKEROOTCMD"))
-                    subprocess.run(fakerootcmd + ['-S'], check=True, stdout=subprocess.PIPE)
+                try:
+                    ret = bb.build.exec_task(fn, taskname, the_data, cfg.profile)
+                finally:
+                    if fakeroot:
+                        fakerootcmd = shlex.split(the_data.getVar("FAKEROOTCMD"))
+                        subprocess.run(fakerootcmd + ['-S'], check=True, stdout=subprocess.PIPE)
                 return ret
             except:
                 os._exit(1)
@@ -415,7 +418,11 @@
         if self.queue.startswith(b"<" + item + b">"):
             index = self.queue.find(b"</" + item + b">")
             while index != -1:
-                func(self.queue[(len(item) + 2):index])
+                try:
+                    func(self.queue[(len(item) + 2):index])
+                except pickle.UnpicklingError:
+                    workerlog_write("Unable to unpickle data: %s\n" % ":".join("{:02x}".format(c) for c in self.queue))
+                    raise
                 self.queue = self.queue[(index + len(item) + 3):]
                 index = self.queue.find(b"</" + item + b">")
 
diff --git a/poky/bitbake/doc/README b/poky/bitbake/doc/README
index 6259582..16e6cdf 100644
--- a/poky/bitbake/doc/README
+++ b/poky/bitbake/doc/README
@@ -13,7 +13,7 @@
 Each folder is self-contained regarding content and figures.
 
 If you want to find HTML versions of the BitBake manuals on the web, 
-go to http://www.openembedded.org/wiki/Documentation. 
+go to https://www.openembedded.org/wiki/Documentation.
 
 Sphinx
 ======
diff --git a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
index a944d0f..4396830 100644
--- a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
+++ b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
@@ -74,7 +74,7 @@
 some confusion when you are providing URLs for the :term:`SRC_URI` variable.
 Consider the following two URLs::
 
-   http://git.yoctoproject.org/git/poky;protocol=git
+   https://git.yoctoproject.org/git/poky;protocol=git
    git://git.yoctoproject.org/git/poky;protocol=http
 
 In the former case, the URL is passed to the ``wget`` fetcher, which does not
diff --git a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.rst b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.rst
index 83a415d..722dc5a 100644
--- a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.rst
+++ b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.rst
@@ -99,7 +99,7 @@
 
    This example was inspired by and drew heavily from
    `Mailing List post - The BitBake equivalent of "Hello, World!"
-   <http://www.mail-archive.com/yocto@yoctoproject.org/msg09379.html>`_.
+   <https://www.mail-archive.com/yocto@yoctoproject.org/msg09379.html>`_.
 
 As stated earlier, the goal of this example is to eventually compile
 "Hello World". However, it is unknown what BitBake needs and what you
@@ -205,7 +205,7 @@
     recipe files. For this example, you need to create the file in your
     project directory and define some key BitBake variables. For more
     information on the ``bitbake.conf`` file, see
-    http://git.openembedded.org/bitbake/tree/conf/bitbake.conf.
+    https://git.openembedded.org/bitbake/tree/conf/bitbake.conf.
 
     Use the following commands to create the ``conf`` directory in the
     project directory::
diff --git a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.rst b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.rst
index 76c8e3d..42263ce 100644
--- a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.rst
+++ b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.rst
@@ -60,11 +60,11 @@
 -  OpenEmbedded, a metadata set utilized by BitBake
 
 Today, BitBake is the primary basis of the
-`OpenEmbedded <http://www.openembedded.org/>`__ project, which is being
+`OpenEmbedded <https://www.openembedded.org/>`__ project, which is being
 used to build and maintain Linux distributions such as the `Angstrom
 Distribution <http://www.angstrom-distribution.org/>`__, and which is
 also being used as the build tool for Linux projects such as the `Yocto
-Project <http://www.yoctoproject.org>`__.
+Project <https://www.yoctoproject.org>`__.
 
 Prior to BitBake, no other build tool adequately met the needs of an
 aspiring embedded Linux distribution. All of the build systems used by
@@ -319,7 +319,7 @@
 
    The following example downloads a snapshot of BitBake version 1.17.0::
 
-     $ wget http://git.openembedded.org/bitbake/snapshot/bitbake-1.17.0.tar.gz
+     $ wget https://git.openembedded.org/bitbake/snapshot/bitbake-1.17.0.tar.gz
      $ tar zxpvf bitbake-1.17.0.tar.gz
 
    After extraction of the tarball using
diff --git a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst
index 8862e16..e955beb 100644
--- a/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst
+++ b/poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst
@@ -281,6 +281,62 @@
 
          BB_GENERATE_MIRROR_TARBALLS = "1"
 
+   :term:`BB_GENERATE_SHALLOW_TARBALLS`
+      Setting this variable to "1" when :term:`BB_GIT_SHALLOW` is also set to
+      "1" causes bitbake to generate shallow mirror tarballs when fetching git
+      repositories. The number of commits included in the shallow mirror
+      tarballs is controlled by :term:`BB_GIT_SHALLOW_DEPTH`.
+
+      If both :term:`BB_GIT_SHALLOW` and :term:`BB_GENERATE_MIRROR_TARBALLS` are
+      enabled, bitbake will generate shallow mirror tarballs by default for git
+      repositories. This separate variable exists so that shallow tarball
+      generation can be enabled without needing to also enable normal mirror
+      generation if it is not desired.
+
+      For example usage, see :term:`BB_GIT_SHALLOW`.
+
+   :term:`BB_GIT_SHALLOW`
+      Setting this variable to "1" enables the support for fetching, using and
+      generating mirror tarballs of `shallow git repositories <https://riptutorial.com/git/example/4584/shallow-clone>`_.
+      The external `git-make-shallow <https://git.openembedded.org/bitbake/tree/bin/git-make-shallow>`_
+      script is used for shallow mirror tarball creation.
+
+      When :term:`BB_GIT_SHALLOW` is enabled, bitbake will attempt to fetch a shallow
+      mirror tarball. If the shallow mirror tarball cannot be fetched, it will
+      try to fetch the full mirror tarball and use that.
+
+      When a mirror tarball is not available, a full git clone will be performed
+      regardless of whether this variable is set or not. Support for shallow
+      clones is not currently implemented as git does not directly support
+      shallow cloning a particular git commit hash (it only supports cloning
+      from a tag or branch reference).
+
+      See also :term:`BB_GIT_SHALLOW_DEPTH` and
+      :term:`BB_GENERATE_SHALLOW_TARBALLS`.
+
+      Example usage::
+
+         BB_GIT_SHALLOW ?= "1"
+
+         # Keep only the top commit
+         BB_GIT_SHALLOW_DEPTH ?= "1"
+
+         # This defaults to enabled if both BB_GIT_SHALLOW and
+         # BB_GENERATE_MIRROR_TARBALLS are enabled
+         BB_GENERATE_SHALLOW_TARBALLS ?= "1"
+
+   :term:`BB_GIT_SHALLOW_DEPTH`
+      When used with :term:`BB_GENERATE_SHALLOW_TARBALLS`, this variable sets
+      the number of commits to include in generated shallow mirror tarballs.
+      With a depth of 1, only the commit referenced in :term:`SRCREV` is
+      included in the shallow mirror tarball. Increasing the depth includes
+      additional parent commits, working back through the commit history.
+
+      If this variable is unset, bitbake will default to a depth of 1 when
+      generating shallow mirror tarballs.
+
+      For example usage, see :term:`BB_GIT_SHALLOW`.
+
    :term:`BB_HASHBASE_WHITELIST`
       Lists variables that are excluded from checksum and dependency data.
       Variables that are excluded can therefore change without affecting
@@ -538,7 +594,7 @@
 
       You can use this variable in combination with task overrides to raise
       or lower priorities of specific tasks. For example, on the `Yocto
-      Project <http://www.yoctoproject.org>`__ autobuilder, QEMU emulation
+      Project <https://www.yoctoproject.org>`__ autobuilder, QEMU emulation
       in images is given a higher priority as compared to build tasks to
       ensure that images do not suffer timeouts on loaded systems.
 
@@ -1123,10 +1179,10 @@
       your configuration::
 
          PREMIRRORS:prepend = "\
-         git://.*/.* http://www.yoctoproject.org/sources/ \n \
-         ftp://.*/.* http://www.yoctoproject.org/sources/ \n \
-         http://.*/.* http://www.yoctoproject.org/sources/ \n \
-         https://.*/.* http://www.yoctoproject.org/sources/ \n"
+         git://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \n \
+         ftp://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \n \
+         http://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \n \
+         https://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \n"
 
       These changes cause the build system to intercept Git, FTP, HTTP, and
       HTTPS requests and direct them to the ``http://`` sources mirror. You can
diff --git a/poky/bitbake/lib/bb/__init__.py b/poky/bitbake/lib/bb/__init__.py
index 5c248d3..e01b8d5 100644
--- a/poky/bitbake/lib/bb/__init__.py
+++ b/poky/bitbake/lib/bb/__init__.py
@@ -9,7 +9,7 @@
 # SPDX-License-Identifier: GPL-2.0-only
 #
 
-__version__ = "1.51.1"
+__version__ = "1.53.0"
 
 import sys
 if sys.version_info < (3, 6, 0):
diff --git a/poky/bitbake/lib/bb/asyncrpc/client.py b/poky/bitbake/lib/bb/asyncrpc/client.py
index 50e60d5..3496019 100644
--- a/poky/bitbake/lib/bb/asyncrpc/client.py
+++ b/poky/bitbake/lib/bb/asyncrpc/client.py
@@ -7,6 +7,7 @@
 import json
 import os
 import socket
+import sys
 from . import chunkify, DEFAULT_MAX_CHUNK
 
 
@@ -129,7 +130,7 @@
         # required (but harmless) with it.
         asyncio.set_event_loop(self.loop)
 
-        self._add_methods('connect_tcp', 'close', 'ping')
+        self._add_methods('connect_tcp', 'ping')
 
     @abc.abstractmethod
     def _get_async_client(self):
@@ -163,3 +164,9 @@
     @max_chunk.setter
     def max_chunk(self, value):
         self.client.max_chunk = value
+
+    def close(self):
+        self.loop.run_until_complete(self.client.close())
+        if sys.version_info >= (3, 6):
+            self.loop.run_until_complete(self.loop.shutdown_asyncgens())
+        self.loop.close()
diff --git a/poky/bitbake/lib/bb/build.py b/poky/bitbake/lib/bb/build.py
index 7e4ab9f..d6418e4 100644
--- a/poky/bitbake/lib/bb/build.py
+++ b/poky/bitbake/lib/bb/build.py
@@ -569,7 +569,6 @@
 def _task_data(fn, task, d):
     localdata = bb.data.createCopy(d)
     localdata.setVar('BB_FILENAME', fn)
-    localdata.setVar('BB_CURRENTTASK', task[3:])
     localdata.setVar('OVERRIDES', 'task-%s:%s' %
                      (task[3:].replace('_', '-'), d.getVar('OVERRIDES', False)))
     localdata.finalize()
diff --git a/poky/bitbake/lib/bb/compress/_pipecompress.py b/poky/bitbake/lib/bb/compress/_pipecompress.py
index 4b9f662..5de17a8 100644
--- a/poky/bitbake/lib/bb/compress/_pipecompress.py
+++ b/poky/bitbake/lib/bb/compress/_pipecompress.py
@@ -49,7 +49,7 @@
             raise ValueError("Argument 'newline' not supported in binary mode")
 
     file_mode = mode.replace("t", "")
-    if isinstance(filename, (str, bytes, os.PathLike)):
+    if isinstance(filename, (str, bytes, os.PathLike, int)):
         binary_file = cls(filename, file_mode, **kwargs)
     elif hasattr(filename, "read") or hasattr(filename, "write"):
         binary_file = cls(None, file_mode, fileobj=filename, **kwargs)
diff --git a/poky/bitbake/lib/bb/fetch2/__init__.py b/poky/bitbake/lib/bb/fetch2/__init__.py
index ee29d89..666cc13 100644
--- a/poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/poky/bitbake/lib/bb/fetch2/__init__.py
@@ -884,7 +884,7 @@
         (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir)
         success = True
     except bb.process.NotFoundError as e:
-        error_message = "Fetch command %s" % (e.command)
+        error_message = "Fetch command %s not found" % (e.command)
     except bb.process.ExecutionError as e:
         if e.stdout:
             output = "output:\n%s\n%s" % (e.stdout, e.stderr)
@@ -1721,7 +1721,9 @@
                     self.d.setVar("BB_NO_NETWORK", "1")
 
                 firsterr = None
-                verified_stamp = m.verify_donestamp(ud, self.d)
+                verified_stamp = False
+                if done:
+                    verified_stamp = m.verify_donestamp(ud, self.d)
                 if not done and (not verified_stamp or m.need_update(ud, self.d)):
                     try:
                         if not trusted_network(self.d, ud.url):
@@ -1780,7 +1782,11 @@
 
     def checkstatus(self, urls=None):
         """
-        Check all urls exist upstream
+        Check all URLs exist upstream.
+
+        Returns None if the URLs exist, raises FetchError if the check wasn't
+        successful but there wasn't an error (such as file not found), and
+        raises other exceptions in error cases.
         """
 
         if not urls:
diff --git a/poky/bitbake/lib/bb/fetch2/gitsm.py b/poky/bitbake/lib/bb/fetch2/gitsm.py
index a4527bf..a7110a9 100644
--- a/poky/bitbake/lib/bb/fetch2/gitsm.py
+++ b/poky/bitbake/lib/bb/fetch2/gitsm.py
@@ -140,16 +140,6 @@
         if Git.need_update(self, ud, d):
             return True
 
-        try:
-            # Check for the nugget dropped by the download operation
-            known_srcrevs = runfetchcmd("%s config --get-all bitbake.srcrev" % \
-                                        (ud.basecmd), d, workdir=ud.clonedir)
-
-            if ud.revisions[ud.names[0]] in known_srcrevs.split():
-                return False
-        except bb.fetch2.FetchError:
-            pass
-
         need_update_list = []
         def need_update_submodule(ud, url, module, modpath, workdir, d):
             url += ";bareclone=1;nobranch=1"
@@ -172,11 +162,6 @@
             shutil.rmtree(tmpdir)
         else:
             self.process_submodules(ud, ud.clonedir, need_update_submodule, d)
-            if len(need_update_list) == 0:
-                # We already have the required commits of all submodules. Drop
-                # a nugget so we don't need to check again.
-                runfetchcmd("%s config --add bitbake.srcrev %s" % \
-                            (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)
 
         if len(need_update_list) > 0:
             logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list)))
@@ -209,9 +194,6 @@
             shutil.rmtree(tmpdir)
         else:
             self.process_submodules(ud, ud.clonedir, download_submodule, d)
-            # Drop a nugget for the srcrev we've fetched (used by need_update)
-            runfetchcmd("%s config --add bitbake.srcrev %s" % \
-                        (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)
 
     def unpack(self, ud, destdir, d):
         def unpack_submodules(ud, url, module, modpath, workdir, d):
diff --git a/poky/bitbake/lib/bb/fetch2/npm.py b/poky/bitbake/lib/bb/fetch2/npm.py
index 4789850..e497c38 100644
--- a/poky/bitbake/lib/bb/fetch2/npm.py
+++ b/poky/bitbake/lib/bb/fetch2/npm.py
@@ -69,17 +69,35 @@
     bb.utils.mkdirhier(destdir)
     cmd = "tar --extract --gzip --file=%s" % shlex.quote(tarball)
     cmd += " --no-same-owner"
+    cmd += " --delay-directory-restore"
     cmd += " --strip-components=1"
     runfetchcmd(cmd, d, workdir=destdir)
+    runfetchcmd("chmod -R +X %s" % (destdir), d, quiet=True, workdir=destdir)
 
 class NpmEnvironment(object):
     """
     Using a npm config file seems more reliable than using cli arguments.
     This class allows to create a controlled environment for npm commands.
     """
-    def __init__(self, d, configs=None):
+    def __init__(self, d, configs=None, npmrc=None):
         self.d = d
-        self.configs = configs
+
+        if configs:
+            self.user_config = tempfile.NamedTemporaryFile(mode="w", buffering=1)
+            self.user_config_name = self.user_config.name
+            for key, value in configs:
+                self.user_config.write("%s=%s\n" % (key, value))
+        else:
+            self.user_config_name = "/dev/null"
+
+        if npmrc:
+            self.global_config_name = npmrc
+        else:
+            self.global_config_name = "/dev/null"
+
+    def __del__(self):
+        if self.user_config:
+            self.user_config.close()
 
     def run(self, cmd, args=None, configs=None, workdir=None):
         """Run npm command in a controlled environment"""
@@ -87,23 +105,19 @@
             d = bb.data.createCopy(self.d)
             d.setVar("HOME", tmpdir)
 
-            cfgfile = os.path.join(tmpdir, "npmrc")
-
             if not workdir:
                 workdir = tmpdir
 
             def _run(cmd):
-                cmd = "NPM_CONFIG_USERCONFIG=%s " % cfgfile + cmd
-                cmd = "NPM_CONFIG_GLOBALCONFIG=%s " % cfgfile + cmd
+                cmd = "NPM_CONFIG_USERCONFIG=%s " % (self.user_config_name) + cmd
+                cmd = "NPM_CONFIG_GLOBALCONFIG=%s " % (self.global_config_name) + cmd
                 return runfetchcmd(cmd, d, workdir=workdir)
 
-            if self.configs:
-                for key, value in self.configs:
-                    _run("npm config set %s %s" % (key, shlex.quote(value)))
-
             if configs:
+                bb.warn("Use of configs argument of NpmEnvironment.run() function"
+                    " is deprecated. Please use args argument instead.")
                 for key, value in configs:
-                    _run("npm config set %s %s" % (key, shlex.quote(value)))
+                    cmd += " --%s=%s" % (key, shlex.quote(value))
 
             if args:
                 for key, value in args:
@@ -165,14 +179,14 @@
 
     def _resolve_proxy_url(self, ud, d):
         def _npm_view():
-            configs = []
-            configs.append(("json", "true"))
-            configs.append(("registry", ud.registry))
+            args = []
+            args.append(("json", "true"))
+            args.append(("registry", ud.registry))
             pkgver = shlex.quote(ud.package + "@" + ud.version)
             cmd = ud.basecmd + " view %s" % pkgver
             env = NpmEnvironment(d)
             check_network_access(d, cmd, ud.registry)
-            view_string = env.run(cmd, configs=configs)
+            view_string = env.run(cmd, args=args)
 
             if not view_string:
                 raise FetchError("Unavailable package %s" % pkgver, ud.url)
diff --git a/poky/bitbake/lib/bb/fetch2/npmsw.py b/poky/bitbake/lib/bb/fetch2/npmsw.py
index 0c3511d..426a139 100644
--- a/poky/bitbake/lib/bb/fetch2/npmsw.py
+++ b/poky/bitbake/lib/bb/fetch2/npmsw.py
@@ -24,11 +24,14 @@
 from bb.fetch2 import Fetch
 from bb.fetch2 import FetchMethod
 from bb.fetch2 import ParameterError
+from bb.fetch2 import runfetchcmd
 from bb.fetch2 import URI
 from bb.fetch2.npm import npm_integrity
 from bb.fetch2.npm import npm_localfile
 from bb.fetch2.npm import npm_unpack
 from bb.utils import is_semver
+from bb.utils import lockfile
+from bb.utils import unlockfile
 
 def foreach_dependencies(shrinkwrap, callback=None, dev=False):
     """
@@ -78,6 +81,7 @@
             extrapaths = []
             destsubdirs = [os.path.join("node_modules", dep) for dep in deptree]
             destsuffix = os.path.join(*destsubdirs)
+            unpack = True
 
             integrity = params.get("integrity", None)
             resolved = params.get("resolved", None)
@@ -148,7 +152,12 @@
 
                 url = str(uri)
 
-            # local tarball sources and local link sources are unsupported
+            # Handle local tarball and link sources
+            elif version.startswith("file"):
+                localpath = version[5:]
+                if not version.endswith(".tgz"):
+                    unpack = False
+
             else:
                 raise ParameterError("Unsupported dependency: %s" % name, ud.url)
 
@@ -157,6 +166,7 @@
                 "localpath": localpath,
                 "extrapaths": extrapaths,
                 "destsuffix": destsuffix,
+                "unpack": unpack,
             })
 
         try:
@@ -177,7 +187,7 @@
         # This fetcher resolves multiple URIs from a shrinkwrap file and then
         # forwards it to a proxy fetcher. The management of the donestamp file,
         # the lockfile and the checksums are forwarded to the proxy fetcher.
-        ud.proxy = Fetch([dep["url"] for dep in ud.deps], data)
+        ud.proxy = Fetch([dep["url"] for dep in ud.deps if dep["url"]], data)
         ud.needdonestamp = False
 
     @staticmethod
@@ -187,7 +197,9 @@
             proxy_ud = ud.proxy.ud[proxy_url]
             proxy_d = ud.proxy.d
             proxy_ud.setup_localpath(proxy_d)
+            lf = lockfile(proxy_ud.lockfile)
             returns.append(handle(proxy_ud.method, proxy_ud, proxy_d))
+            unlockfile(lf)
         return returns
 
     def verify_donestamp(self, ud, d):
@@ -237,7 +249,16 @@
 
         for dep in manual:
             depdestdir = os.path.join(destdir, dep["destsuffix"])
-            npm_unpack(dep["localpath"], depdestdir, d)
+            if dep["url"]:
+                npm_unpack(dep["localpath"], depdestdir, d)
+            else:
+                depsrcdir= os.path.join(destdir, dep["localpath"])
+                if dep["unpack"]:
+                    npm_unpack(depsrcdir, depdestdir, d)
+                else:
+                    bb.utils.mkdirhier(depdestdir)
+                    cmd = 'cp -fpPRH "%s/." .' % (depsrcdir)
+                    runfetchcmd(cmd, d, workdir=depdestdir)
 
     def clean(self, ud, d):
         """Clean any existing full or partial download"""
diff --git a/poky/bitbake/lib/bb/fetch2/perforce.py b/poky/bitbake/lib/bb/fetch2/perforce.py
index e2a41a4..3b6fa4b 100644
--- a/poky/bitbake/lib/bb/fetch2/perforce.py
+++ b/poky/bitbake/lib/bb/fetch2/perforce.py
@@ -134,7 +134,7 @@
 
         ud.setup_revisions(d)
 
-        ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, cleandedmodule, ud.revision))
+        ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, cleanedmodule, ud.revision))
 
     def _buildp4command(self, ud, d, command, depot_filename=None):
         """
diff --git a/poky/bitbake/lib/bb/siggen.py b/poky/bitbake/lib/bb/siggen.py
index 625a9cf..578ba5d 100644
--- a/poky/bitbake/lib/bb/siggen.py
+++ b/poky/bitbake/lib/bb/siggen.py
@@ -11,6 +11,8 @@
 import bb.data
 import difflib
 import simplediff
+import json
+import bb.compress.zstd
 from bb.checksum import FileChecksumCache
 from bb import runqueue
 import hashserv
@@ -19,6 +21,17 @@
 logger = logging.getLogger('BitBake.SigGen')
 hashequiv_logger = logging.getLogger('BitBake.SigGen.HashEquiv')
 
+class SetEncoder(json.JSONEncoder):
+    def default(self, obj):
+        if isinstance(obj, set):
+            return dict(_set_object=list(sorted(obj)))
+        return json.JSONEncoder.default(self, obj)
+
+def SetDecoder(dct):
+    if '_set_object' in dct:
+        return set(dct['_set_object'])
+    return dct
+
 def init(d):
     siggens = [obj for obj in globals().values()
                       if type(obj) is type and issubclass(obj, SignatureGenerator)]
@@ -398,9 +411,9 @@
 
         fd, tmpfile = tempfile.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.")
         try:
-            with os.fdopen(fd, "wb") as stream:
-                p = pickle.dump(data, stream, -1)
-                stream.flush()
+            with bb.compress.zstd.open(fd, "wt", encoding="utf-8", num_threads=1) as f:
+                json.dump(data, f, sort_keys=True, separators=(",", ":"), cls=SetEncoder)
+                f.flush()
             os.chmod(tmpfile, 0o664)
             bb.utils.rename(tmpfile, sigfile)
         except (OSError, IOError) as err:
@@ -794,12 +807,10 @@
         formatparams.update(values)
         return formatstr.format(**formatparams)
 
-    with open(a, 'rb') as f:
-        p1 = pickle.Unpickler(f)
-        a_data = p1.load()
-    with open(b, 'rb') as f:
-        p2 = pickle.Unpickler(f)
-        b_data = p2.load()
+    with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
+        a_data = json.load(f, object_hook=SetDecoder)
+    with bb.compress.zstd.open(b, "rt", encoding="utf-8", num_threads=1) as f:
+        b_data = json.load(f, object_hook=SetDecoder)
 
     def dict_diff(a, b, whitelist=set()):
         sa = set(a.keys())
@@ -815,11 +826,11 @@
 
     def file_checksums_diff(a, b):
         from collections import Counter
-        # Handle old siginfo format
-        if isinstance(a, dict):
-            a = [(os.path.basename(f), cs) for f, cs in a.items()]
-        if isinstance(b, dict):
-            b = [(os.path.basename(f), cs) for f, cs in b.items()]
+
+        # Convert lists back to tuples
+        a = [(f[0], f[1]) for f in a]
+        b = [(f[0], f[1]) for f in b]
+
         # Compare lists, ensuring we can handle duplicate filenames if they exist
         removedcount = Counter(a)
         removedcount.subtract(b)
@@ -902,9 +913,9 @@
                 output.append(color_format("{color_title}Variable {var} value changed from '{color_default}{oldval}{color_title}' to '{color_default}{newval}{color_title}'{color_default}", var=dep, oldval=oldval, newval=newval))
 
     if not 'file_checksum_values' in a_data:
-         a_data['file_checksum_values'] = {}
+         a_data['file_checksum_values'] = []
     if not 'file_checksum_values' in b_data:
-         b_data['file_checksum_values'] = {}
+         b_data['file_checksum_values'] = []
 
     changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values'])
     if changed:
@@ -1031,9 +1042,8 @@
 def dump_sigfile(a):
     output = []
 
-    with open(a, 'rb') as f:
-        p1 = pickle.Unpickler(f)
-        a_data = p1.load()
+    with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
+        a_data = json.load(f, object_hook=SetDecoder)
 
     output.append("basewhitelist: %s" % (a_data['basewhitelist']))
 
diff --git a/poky/bitbake/lib/bb/tests/fetch.py b/poky/bitbake/lib/bb/tests/fetch.py
index af292a2..8ad1c85 100644
--- a/poky/bitbake/lib/bb/tests/fetch.py
+++ b/poky/bitbake/lib/bb/tests/fetch.py
@@ -376,7 +376,7 @@
     def setUp(self):
         self.origdir = os.getcwd()
         self.d = bb.data.init()
-        self.tempdir = tempfile.mkdtemp()
+        self.tempdir = tempfile.mkdtemp(prefix="bitbake-fetch-")
         self.dldir = os.path.join(self.tempdir, "download")
         os.mkdir(self.dldir)
         self.d.setVar("DL_DIR", self.dldir)
@@ -826,12 +826,12 @@
 class FetcherNetworkTest(FetcherTest):
     @skipIfNoNetwork()
     def test_fetch(self):
-        fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
+        fetcher = bb.fetch.Fetch(["https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
         fetcher.download()
         self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
         self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.1.tar.gz"), 57892)
         self.d.setVar("BB_NO_NETWORK", "1")
-        fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
+        fetcher = bb.fetch.Fetch(["https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
         fetcher.download()
         fetcher.unpack(self.unpackdir)
         self.assertEqual(len(os.listdir(self.unpackdir + "/bitbake-1.0/")), 9)
@@ -839,21 +839,21 @@
 
     @skipIfNoNetwork()
     def test_fetch_mirror(self):
-        self.d.setVar("MIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake")
+        self.d.setVar("MIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake")
         fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
         fetcher.download()
         self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
 
     @skipIfNoNetwork()
     def test_fetch_mirror_of_mirror(self):
-        self.d.setVar("MIRRORS", "http://.*/.* http://invalid2.yoctoproject.org/ \n http://invalid2.yoctoproject.org/.* http://downloads.yoctoproject.org/releases/bitbake")
+        self.d.setVar("MIRRORS", "http://.*/.* http://invalid2.yoctoproject.org/ \n http://invalid2.yoctoproject.org/.* https://downloads.yoctoproject.org/releases/bitbake")
         fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
         fetcher.download()
         self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
 
     @skipIfNoNetwork()
     def test_fetch_file_mirror_of_mirror(self):
-        self.d.setVar("MIRRORS", "http://.*/.* file:///some1where/ \n file:///some1where/.* file://some2where/ \n file://some2where/.* http://downloads.yoctoproject.org/releases/bitbake")
+        self.d.setVar("MIRRORS", "http://.*/.* file:///some1where/ \n file:///some1where/.* file://some2where/ \n file://some2where/.* https://downloads.yoctoproject.org/releases/bitbake")
         fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
         os.mkdir(self.dldir + "/some2where")
         fetcher.download()
@@ -861,20 +861,20 @@
 
     @skipIfNoNetwork()
     def test_fetch_premirror(self):
-        self.d.setVar("PREMIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake")
+        self.d.setVar("PREMIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake")
         fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
         fetcher.download()
         self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
 
     @skipIfNoNetwork()
     def test_fetch_specify_downloadfilename(self):
-        fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz;downloadfilename=bitbake-v1.0.0.tar.gz"], self.d)
+        fetcher = bb.fetch.Fetch(["https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz;downloadfilename=bitbake-v1.0.0.tar.gz"], self.d)
         fetcher.download()
         self.assertEqual(os.path.getsize(self.dldir + "/bitbake-v1.0.0.tar.gz"), 57749)
 
     @skipIfNoNetwork()
     def test_fetch_premirror_specify_downloadfilename_regex_uri(self):
-        self.d.setVar("PREMIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake/")
+        self.d.setVar("PREMIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake/")
         fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz;downloadfilename=bitbake-v1.0.0.tar.gz"], self.d)
         fetcher.download()
         self.assertEqual(os.path.getsize(self.dldir + "/bitbake-v1.0.0.tar.gz"), 57749)
@@ -882,7 +882,7 @@
     @skipIfNoNetwork()
     # BZ13039
     def test_fetch_premirror_specify_downloadfilename_specific_uri(self):
-        self.d.setVar("PREMIRRORS", "http://invalid.yoctoproject.org/releases/bitbake http://downloads.yoctoproject.org/releases/bitbake")
+        self.d.setVar("PREMIRRORS", "http://invalid.yoctoproject.org/releases/bitbake https://downloads.yoctoproject.org/releases/bitbake")
         fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz;downloadfilename=bitbake-v1.0.0.tar.gz"], self.d)
         fetcher.download()
         self.assertEqual(os.path.getsize(self.dldir + "/bitbake-v1.0.0.tar.gz"), 57749)
@@ -1012,7 +1012,7 @@
 
     @skipIfNoNetwork()
     def test_git_submodule_CLI11(self):
-        url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf"
+        url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf;branch=main"
         fetcher = bb.fetch.Fetch([url], self.d)
         fetcher.download()
         # Previous cwd has been deleted
@@ -1027,12 +1027,12 @@
     @skipIfNoNetwork()
     def test_git_submodule_update_CLI11(self):
         """ Prevent regression on update detection not finding missing submodule, or modules without needed commits """
-        url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714"
+        url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714;branch=main"
         fetcher = bb.fetch.Fetch([url], self.d)
         fetcher.download()
 
         # CLI11 that pulls in a newer nlohmann-json
-        url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca"
+        url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca;branch=main"
         fetcher = bb.fetch.Fetch([url], self.d)
         fetcher.download()
         # Previous cwd has been deleted
@@ -1291,10 +1291,10 @@
         #
         # packages with versions only in current directory
         #
-        # http://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2
+        # https://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2
         ("eglic", "/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2", "", "")
             : "2.19",
-        # http://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2
+        # https://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2
         ("gnu-config", "/releases/gnu-config/gnu-config-20120814.tar.bz2", "", "")
             : "20120814",
         #
@@ -1357,13 +1357,13 @@
 
 
 class FetchCheckStatusTest(FetcherTest):
-    test_wget_uris = ["http://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz",
-                      "http://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz",
-                      "http://downloads.yoctoproject.org/releases/sato/sato-engine-0.3.tar.gz",
+    test_wget_uris = ["https://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz",
+                      "https://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz",
+                      "https://downloads.yoctoproject.org/releases/sato/sato-engine-0.3.tar.gz",
                       "https://yoctoproject.org/",
                       "https://docs.yoctoproject.org",
-                      "http://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz",
-                      "http://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz",
+                      "https://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz",
+                      "https://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz",
                       "ftp://sourceware.org/pub/libffi/libffi-1.20.tar.gz",
                       "http://ftp.gnu.org/gnu/autoconf/autoconf-2.60.tar.gz",
                       "https://ftp.gnu.org/gnu/chess/gnuchess-5.08.tar.gz",
diff --git a/poky/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf b/poky/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf
index efebf00..2645c0e 100644
--- a/poky/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf
+++ b/poky/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf
@@ -12,6 +12,6 @@
 T = "${TMPDIR}/workdir/${PN}/temp"
 BB_NUMBER_THREADS = "4"
 
-BB_HASHBASE_WHITELIST = "BB_CURRENT_MC BB_HASHSERVE TMPDIR TOPDIR SLOWTASKS SSTATEVALID FILE"
+BB_HASHBASE_WHITELIST = "BB_CURRENT_MC BB_HASHSERVE TMPDIR TOPDIR SLOWTASKS SSTATEVALID FILE BB_CURRENTTASK"
 
 include conf/multiconfig/${BB_CURRENT_MC}.conf
diff --git a/poky/bitbake/lib/bb/tests/runqueue.py b/poky/bitbake/lib/bb/tests/runqueue.py
index 3d51779..5b6ada8 100644
--- a/poky/bitbake/lib/bb/tests/runqueue.py
+++ b/poky/bitbake/lib/bb/tests/runqueue.py
@@ -278,7 +278,6 @@
                        ["mc_2:a1:%s" % t for t in rerun_tasks]
             self.assertEqual(set(tasks), set(expected))
 
-    @unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required')
     def test_hashserv_single(self):
         with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
             extraenv = {
@@ -304,7 +303,6 @@
 
             self.shutdown(tempdir)
 
-    @unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required')
     def test_hashserv_double(self):
         with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
             extraenv = {
@@ -329,7 +327,6 @@
 
             self.shutdown(tempdir)
 
-    @unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required')
     def test_hashserv_multiple_setscene(self):
         # Runs e1:do_package_setscene twice
         with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
@@ -361,7 +358,7 @@
 
     def shutdown(self, tempdir):
         # Wait for the hashserve socket to disappear else we'll see races with the tempdir cleanup
-        while os.path.exists(tempdir + "/hashserve.sock"):
+        while (os.path.exists(tempdir + "/hashserve.sock") or os.path.exists(tempdir + "cache/hashserv.db-wal")):
             time.sleep(0.5)
 
 
diff --git a/poky/bitbake/lib/bb/tests/utils.py b/poky/bitbake/lib/bb/tests/utils.py
index 4d5e21b..c363f62 100644
--- a/poky/bitbake/lib/bb/tests/utils.py
+++ b/poky/bitbake/lib/bb/tests/utils.py
@@ -418,7 +418,7 @@
                                ['MULTILINE'],
                                handle_var)
 
-        testvalue = re.sub('\s+', ' ', value_in_callback.strip())
+        testvalue = re.sub(r'\s+', ' ', value_in_callback.strip())
         self.assertEqual(expected_value, testvalue)
 
 class EditBbLayersConf(unittest.TestCase):
diff --git a/poky/bitbake/lib/bb/ui/knotty.py b/poky/bitbake/lib/bb/ui/knotty.py
index 8df745d..484545a 100644
--- a/poky/bitbake/lib/bb/ui/knotty.py
+++ b/poky/bitbake/lib/bb/ui/knotty.py
@@ -276,11 +276,11 @@
             print(content)
         else:
             if self.quiet:
-                content = "Running tasks (%s of %s)" % (self.helper.tasknumber_current, self.helper.tasknumber_total)
+                content = "Running tasks (%s of %s/%s of %s)" % (self.helper.setscene_current, self.helper.setscene_total, self.helper.tasknumber_current, self.helper.tasknumber_total)
             elif not len(activetasks):
-                content = "No currently running tasks (%s of %s)" % (self.helper.tasknumber_current, self.helper.tasknumber_total)
+                content = "No currently running tasks (%s of %s/%s of %s)" % (self.helper.setscene_current, self.helper.setscene_total, self.helper.tasknumber_current, self.helper.tasknumber_total)
             else:
-                content = "Currently %2s running tasks (%s of %s)" % (len(activetasks), self.helper.tasknumber_current, self.helper.tasknumber_total)
+                content = "Currently %2s running tasks (%s of %s/%s of %s)" % (len(activetasks), self.helper.setscene_current, self.helper.setscene_total, self.helper.tasknumber_current, self.helper.tasknumber_total)
             maxtask = self.helper.tasknumber_total
             if not self.main_progress or self.main_progress.maxval != maxtask:
                 widgets = [' ', progressbar.Percentage(), ' ', progressbar.Bar()]
diff --git a/poky/bitbake/lib/bb/ui/uihelper.py b/poky/bitbake/lib/bb/ui/uihelper.py
index 52fdae3..82913e0 100644
--- a/poky/bitbake/lib/bb/ui/uihelper.py
+++ b/poky/bitbake/lib/bb/ui/uihelper.py
@@ -50,8 +50,10 @@
             removetid(event.pid, tid)
             self.failed_tasks.append( { 'title' : "%s %s" % (event._package, event._task)})
         elif isinstance(event, bb.runqueue.runQueueTaskStarted) or isinstance(event, bb.runqueue.sceneQueueTaskStarted):
-            self.tasknumber_current = event.stats.completed + event.stats.active + event.stats.failed + event.stats.setscene_active + 1
+            self.tasknumber_current = event.stats.completed + event.stats.active + event.stats.failed
             self.tasknumber_total = event.stats.total
+            self.setscene_current = event.stats.setscene_active + event.stats.setscene_covered + event.stats.setscene_notcovered
+            self.setscene_total = event.stats.setscene_total
             self.needUpdate = True
         elif isinstance(event, bb.build.TaskProgress):
             if event.pid > 0 and event.pid in self.pidmap:
diff --git a/poky/bitbake/lib/codegen.py b/poky/bitbake/lib/codegen.py
index 62a6748..6955a7a 100644
--- a/poky/bitbake/lib/codegen.py
+++ b/poky/bitbake/lib/codegen.py
@@ -401,6 +401,12 @@
     def visit_Num(self, node):
         self.write(repr(node.n))
 
+    def visit_Constant(self, node):
+        # Python 3.8 deprecated visit_Num(), visit_Str(), visit_Bytes(),
+        # visit_NameConstant() and visit_Ellipsis(). They can be removed once we
+        # require 3.8+.
+        self.write(repr(node.value))
+
     def visit_Tuple(self, node):
         self.write('(')
         idx = -1
diff --git a/poky/bitbake/lib/hashserv/__init__.py b/poky/bitbake/lib/hashserv/__init__.py
index 5f2e101..9cb3fd5 100644
--- a/poky/bitbake/lib/hashserv/__init__.py
+++ b/poky/bitbake/lib/hashserv/__init__.py
@@ -22,46 +22,68 @@
 # is necessary
 DEFAULT_MAX_CHUNK = 32 * 1024
 
-TABLE_DEFINITION = (
-    ("method", "TEXT NOT NULL"),
-    ("outhash", "TEXT NOT NULL"),
-    ("taskhash", "TEXT NOT NULL"),
-    ("unihash", "TEXT NOT NULL"),
-    ("created", "DATETIME"),
-
-    # Optional fields
-    ("owner", "TEXT"),
-    ("PN", "TEXT"),
-    ("PV", "TEXT"),
-    ("PR", "TEXT"),
-    ("task", "TEXT"),
-    ("outhash_siginfo", "TEXT"),
+UNIHASH_TABLE_DEFINITION = (
+    ("method", "TEXT NOT NULL", "UNIQUE"),
+    ("taskhash", "TEXT NOT NULL", "UNIQUE"),
+    ("unihash", "TEXT NOT NULL", ""),
 )
 
-TABLE_COLUMNS = tuple(name for name, _ in TABLE_DEFINITION)
+UNIHASH_TABLE_COLUMNS = tuple(name for name, _, _ in UNIHASH_TABLE_DEFINITION)
+
+OUTHASH_TABLE_DEFINITION = (
+    ("method", "TEXT NOT NULL", "UNIQUE"),
+    ("taskhash", "TEXT NOT NULL", "UNIQUE"),
+    ("outhash", "TEXT NOT NULL", "UNIQUE"),
+    ("created", "DATETIME", ""),
+
+    # Optional fields
+    ("owner", "TEXT", ""),
+    ("PN", "TEXT", ""),
+    ("PV", "TEXT", ""),
+    ("PR", "TEXT", ""),
+    ("task", "TEXT", ""),
+    ("outhash_siginfo", "TEXT", ""),
+)
+
+OUTHASH_TABLE_COLUMNS = tuple(name for name, _, _ in OUTHASH_TABLE_DEFINITION)
+
+def _make_table(cursor, name, definition):
+    cursor.execute('''
+        CREATE TABLE IF NOT EXISTS {name} (
+            id INTEGER PRIMARY KEY AUTOINCREMENT,
+            {fields}
+            UNIQUE({unique})
+            )
+        '''.format(
+            name=name,
+            fields=" ".join("%s %s," % (name, typ) for name, typ, _ in definition),
+            unique=", ".join(name for name, _, flags in definition if "UNIQUE" in flags)
+    ))
+
 
 def setup_database(database, sync=True):
     db = sqlite3.connect(database)
     db.row_factory = sqlite3.Row
 
     with closing(db.cursor()) as cursor:
-        cursor.execute('''
-            CREATE TABLE IF NOT EXISTS tasks_v2 (
-                id INTEGER PRIMARY KEY AUTOINCREMENT,
-                %s
-                UNIQUE(method, outhash, taskhash)
-                )
-            ''' % " ".join("%s %s," % (name, typ) for name, typ in TABLE_DEFINITION))
+        _make_table(cursor, "unihashes_v2", UNIHASH_TABLE_DEFINITION)
+        _make_table(cursor, "outhashes_v2", OUTHASH_TABLE_DEFINITION)
+
         cursor.execute('PRAGMA journal_mode = WAL')
         cursor.execute('PRAGMA synchronous = %s' % ('NORMAL' if sync else 'OFF'))
 
         # Drop old indexes
         cursor.execute('DROP INDEX IF EXISTS taskhash_lookup')
         cursor.execute('DROP INDEX IF EXISTS outhash_lookup')
+        cursor.execute('DROP INDEX IF EXISTS taskhash_lookup_v2')
+        cursor.execute('DROP INDEX IF EXISTS outhash_lookup_v2')
+
+        # TODO: Upgrade from tasks_v2?
+        cursor.execute('DROP TABLE IF EXISTS tasks_v2')
 
         # Create new indexes
-        cursor.execute('CREATE INDEX IF NOT EXISTS taskhash_lookup_v2 ON tasks_v2 (method, taskhash, created)')
-        cursor.execute('CREATE INDEX IF NOT EXISTS outhash_lookup_v2 ON tasks_v2 (method, outhash)')
+        cursor.execute('CREATE INDEX IF NOT EXISTS taskhash_lookup_v3 ON unihashes_v2 (method, taskhash)')
+        cursor.execute('CREATE INDEX IF NOT EXISTS outhash_lookup_v3 ON outhashes_v2 (method, outhash)')
 
     return db
 
diff --git a/poky/bitbake/lib/hashserv/client.py b/poky/bitbake/lib/hashserv/client.py
index 1a67c69..b2aa102 100644
--- a/poky/bitbake/lib/hashserv/client.py
+++ b/poky/bitbake/lib/hashserv/client.py
@@ -107,11 +107,11 @@
         super().__init__()
         self._add_methods(
             "connect_tcp",
-            "close",
             "get_unihash",
             "report_unihash",
             "report_unihash_equiv",
             "get_taskhash",
+            "get_outhash",
             "get_stats",
             "reset_stats",
             "backfill_wait",
diff --git a/poky/bitbake/lib/hashserv/server.py b/poky/bitbake/lib/hashserv/server.py
index a059e52..d40a2ab 100644
--- a/poky/bitbake/lib/hashserv/server.py
+++ b/poky/bitbake/lib/hashserv/server.py
@@ -5,11 +5,12 @@
 
 from contextlib import closing, contextmanager
 from datetime import datetime
+import enum
 import asyncio
 import logging
 import math
 import time
-from . import create_async_client, TABLE_COLUMNS
+from . import create_async_client, UNIHASH_TABLE_COLUMNS, OUTHASH_TABLE_COLUMNS
 import bb.asyncrpc
 
 
@@ -106,56 +107,64 @@
         return {k: getattr(self, k) for k in ('num', 'total_time', 'max_time', 'average', 'stdev')}
 
 
-def insert_task(cursor, data, ignore=False):
+@enum.unique
+class Resolve(enum.Enum):
+    FAIL = enum.auto()
+    IGNORE = enum.auto()
+    REPLACE = enum.auto()
+
+
+def insert_table(cursor, table, data, on_conflict):
+    resolve = {
+        Resolve.FAIL: "",
+        Resolve.IGNORE: " OR IGNORE",
+        Resolve.REPLACE: " OR REPLACE",
+    }[on_conflict]
+
     keys = sorted(data.keys())
-    query = '''INSERT%s INTO tasks_v2 (%s) VALUES (%s)''' % (
-        " OR IGNORE" if ignore else "",
-        ', '.join(keys),
-        ', '.join(':' + k for k in keys))
+    query = 'INSERT{resolve} INTO {table} ({fields}) VALUES({values})'.format(
+        resolve=resolve,
+        table=table,
+        fields=", ".join(keys),
+        values=", ".join(":" + k for k in keys),
+    )
+    prevrowid = cursor.lastrowid
     cursor.execute(query, data)
+    logging.debug(
+        "Inserting %r into %s, %s",
+        data,
+        table,
+        on_conflict
+    )
+    return (cursor.lastrowid, cursor.lastrowid != prevrowid)
 
-async def copy_from_upstream(client, db, method, taskhash):
-    d = await client.get_taskhash(method, taskhash, True)
+def insert_unihash(cursor, data, on_conflict):
+    return insert_table(cursor, "unihashes_v2", data, on_conflict)
+
+def insert_outhash(cursor, data, on_conflict):
+    return insert_table(cursor, "outhashes_v2", data, on_conflict)
+
+async def copy_unihash_from_upstream(client, db, method, taskhash):
+    d = await client.get_taskhash(method, taskhash)
     if d is not None:
-        # Filter out unknown columns
-        d = {k: v for k, v in d.items() if k in TABLE_COLUMNS}
-
         with closing(db.cursor()) as cursor:
-            insert_task(cursor, d)
+            insert_unihash(
+                cursor,
+                {k: v for k, v in d.items() if k in UNIHASH_TABLE_COLUMNS},
+                Resolve.IGNORE,
+            )
             db.commit()
-
     return d
 
-async def copy_outhash_from_upstream(client, db, method, outhash, taskhash):
-    d = await client.get_outhash(method, outhash, taskhash)
-    if d is not None:
-        # Filter out unknown columns
-        d = {k: v for k, v in d.items() if k in TABLE_COLUMNS}
 
-        with closing(db.cursor()) as cursor:
-            insert_task(cursor, d)
-            db.commit()
+class ServerCursor(object):
+    def __init__(self, db, cursor, upstream):
+        self.db = db
+        self.cursor = cursor
+        self.upstream = upstream
 
-    return d
 
 class ServerClient(bb.asyncrpc.AsyncServerConnection):
-    FAST_QUERY = 'SELECT taskhash, method, unihash FROM tasks_v2 WHERE method=:method AND taskhash=:taskhash ORDER BY created ASC LIMIT 1'
-    ALL_QUERY =  'SELECT *                         FROM tasks_v2 WHERE method=:method AND taskhash=:taskhash ORDER BY created ASC LIMIT 1'
-    OUTHASH_QUERY = '''
-        -- Find tasks with a matching outhash (that is, tasks that
-        -- are equivalent)
-        SELECT * FROM tasks_v2 WHERE method=:method AND outhash=:outhash
-
-        -- If there is an exact match on the taskhash, return it.
-        -- Otherwise return the oldest matching outhash of any
-        -- taskhash
-        ORDER BY CASE WHEN taskhash=:taskhash THEN 1 ELSE 2 END,
-            created ASC
-
-        -- Only return one row
-        LIMIT 1
-        '''
-
     def __init__(self, reader, writer, db, request_stats, backfill_queue, upstream, read_only):
         super().__init__(reader, writer, 'OEHASHEQUIV', logger)
         self.db = db
@@ -210,37 +219,103 @@
     async def handle_get(self, request):
         method = request['method']
         taskhash = request['taskhash']
+        fetch_all = request.get('all', False)
 
-        if request.get('all', False):
-            row = self.query_equivalent(method, taskhash, self.ALL_QUERY)
-        else:
-            row = self.query_equivalent(method, taskhash, self.FAST_QUERY)
-
-        if row is not None:
-            logger.debug('Found equivalent task %s -> %s', (row['taskhash'], row['unihash']))
-            d = {k: row[k] for k in row.keys()}
-        elif self.upstream_client is not None:
-            d = await copy_from_upstream(self.upstream_client, self.db, method, taskhash)
-        else:
-            d = None
+        with closing(self.db.cursor()) as cursor:
+            d = await self.get_unihash(cursor, method, taskhash, fetch_all)
 
         self.write_message(d)
 
-    async def handle_get_outhash(self, request):
-        with closing(self.db.cursor()) as cursor:
-            cursor.execute(self.OUTHASH_QUERY,
-                           {k: request[k] for k in ('method', 'outhash', 'taskhash')})
+    async def get_unihash(self, cursor, method, taskhash, fetch_all=False):
+        d = None
 
+        if fetch_all:
+            cursor.execute(
+                '''
+                SELECT *, unihashes_v2.unihash AS unihash FROM outhashes_v2
+                INNER JOIN unihashes_v2 ON unihashes_v2.method=outhashes_v2.method AND unihashes_v2.taskhash=outhashes_v2.taskhash
+                WHERE outhashes_v2.method=:method AND outhashes_v2.taskhash=:taskhash
+                ORDER BY outhashes_v2.created ASC
+                LIMIT 1
+                ''',
+                {
+                    'method': method,
+                    'taskhash': taskhash,
+                }
+
+            )
             row = cursor.fetchone()
 
-        if row is not None:
-            logger.debug('Found equivalent outhash %s -> %s', (row['outhash'], row['unihash']))
-            d = {k: row[k] for k in row.keys()}
+            if row is not None:
+                d = {k: row[k] for k in row.keys()}
+            elif self.upstream_client is not None:
+                d = await self.upstream_client.get_taskhash(method, taskhash, True)
+                self.update_unified(cursor, d)
+                self.db.commit()
         else:
-            d = None
+            row = self.query_equivalent(cursor, method, taskhash)
+
+            if row is not None:
+                d = {k: row[k] for k in row.keys()}
+            elif self.upstream_client is not None:
+                d = await self.upstream_client.get_taskhash(method, taskhash)
+                d = {k: v for k, v in d.items() if k in UNIHASH_TABLE_COLUMNS}
+                insert_unihash(cursor, d, Resolve.IGNORE)
+                self.db.commit()
+
+        return d
+
+    async def handle_get_outhash(self, request):
+        method = request['method']
+        outhash = request['outhash']
+        taskhash = request['taskhash']
+
+        with closing(self.db.cursor()) as cursor:
+            d = await self.get_outhash(cursor, method, outhash, taskhash)
 
         self.write_message(d)
 
+    async def get_outhash(self, cursor, method, outhash, taskhash):
+        d = None
+        cursor.execute(
+            '''
+            SELECT *, unihashes_v2.unihash AS unihash FROM outhashes_v2
+            INNER JOIN unihashes_v2 ON unihashes_v2.method=outhashes_v2.method AND unihashes_v2.taskhash=outhashes_v2.taskhash
+            WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash
+            ORDER BY outhashes_v2.created ASC
+            LIMIT 1
+            ''',
+            {
+                'method': method,
+                'outhash': outhash,
+            }
+        )
+        row = cursor.fetchone()
+
+        if row is not None:
+            d = {k: row[k] for k in row.keys()}
+        elif self.upstream_client is not None:
+            d = await self.upstream_client.get_outhash(method, outhash, taskhash)
+            self.update_unified(cursor, d)
+            self.db.commit()
+
+        return d
+
+    def update_unified(self, cursor, data):
+        if data is None:
+            return
+
+        insert_unihash(
+            cursor,
+            {k: v for k, v in data.items() if k in UNIHASH_TABLE_COLUMNS},
+            Resolve.IGNORE
+        )
+        insert_outhash(
+            cursor,
+            {k: v for k, v in data.items() if k in OUTHASH_TABLE_COLUMNS},
+            Resolve.IGNORE
+        )
+
     async def handle_get_stream(self, request):
         self.write_message('ok')
 
@@ -267,7 +342,12 @@
 
                 (method, taskhash) = l.split()
                 #logger.debug('Looking up %s %s' % (method, taskhash))
-                row = self.query_equivalent(method, taskhash, self.FAST_QUERY)
+                cursor = self.db.cursor()
+                try:
+                    row = self.query_equivalent(cursor, method, taskhash)
+                finally:
+                    cursor.close()
+
                 if row is not None:
                     msg = ('%s\n' % row['unihash']).encode('utf-8')
                     #logger.debug('Found equivalent task %s -> %s', (row['taskhash'], row['unihash']))
@@ -294,55 +374,82 @@
 
     async def handle_report(self, data):
         with closing(self.db.cursor()) as cursor:
-            cursor.execute(self.OUTHASH_QUERY,
-                           {k: data[k] for k in ('method', 'outhash', 'taskhash')})
+            outhash_data = {
+                'method': data['method'],
+                'outhash': data['outhash'],
+                'taskhash': data['taskhash'],
+                'created': datetime.now()
+            }
 
-            row = cursor.fetchone()
+            for k in ('owner', 'PN', 'PV', 'PR', 'task', 'outhash_siginfo'):
+                if k in data:
+                    outhash_data[k] = data[k]
 
-            if row is None and self.upstream_client:
-                # Try upstream
-                row = await copy_outhash_from_upstream(self.upstream_client,
-                                                       self.db,
-                                                       data['method'],
-                                                       data['outhash'],
-                                                       data['taskhash'])
+            # Insert the new entry, unless it already exists
+            (rowid, inserted) = insert_outhash(cursor, outhash_data, Resolve.IGNORE)
 
-            # If no matching outhash was found, or one *was* found but it
-            # wasn't an exact match on the taskhash, a new entry for this
-            # taskhash should be added
-            if row is None or row['taskhash'] != data['taskhash']:
-                # If a row matching the outhash was found, the unihash for
-                # the new taskhash should be the same as that one.
-                # Otherwise the caller provided unihash is used.
-                unihash = data['unihash']
+            if inserted:
+                # If this row is new, check if it is equivalent to another
+                # output hash
+                cursor.execute(
+                    '''
+                    SELECT outhashes_v2.taskhash AS taskhash, unihashes_v2.unihash AS unihash FROM outhashes_v2
+                    INNER JOIN unihashes_v2 ON unihashes_v2.method=outhashes_v2.method AND unihashes_v2.taskhash=outhashes_v2.taskhash
+                    -- Select any matching output hash except the one we just inserted
+                    WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash AND outhashes_v2.taskhash!=:taskhash
+                    -- Pick the oldest hash
+                    ORDER BY outhashes_v2.created ASC
+                    LIMIT 1
+                    ''',
+                    {
+                        'method': data['method'],
+                        'outhash': data['outhash'],
+                        'taskhash': data['taskhash'],
+                    }
+                )
+                row = cursor.fetchone()
+
                 if row is not None:
+                    # A matching output hash was found. Set our taskhash to the
+                    # same unihash since they are equivalent
                     unihash = row['unihash']
+                    resolve = Resolve.IGNORE
+                else:
+                    # No matching output hash was found. This is probably the
+                    # first outhash to be added.
+                    unihash = data['unihash']
+                    resolve = Resolve.IGNORE
 
-                insert_data = {
-                    'method': data['method'],
-                    'outhash': data['outhash'],
-                    'taskhash': data['taskhash'],
-                    'unihash': unihash,
-                    'created': datetime.now()
-                }
+                    # Query upstream to see if it has a unihash we can use
+                    if self.upstream_client is not None:
+                        upstream_data = await self.upstream_client.get_outhash(data['method'], data['outhash'], data['taskhash'])
+                        if upstream_data is not None:
+                            unihash = upstream_data['unihash']
 
-                for k in ('owner', 'PN', 'PV', 'PR', 'task', 'outhash_siginfo'):
-                    if k in data:
-                        insert_data[k] = data[k]
 
-                insert_task(cursor, insert_data)
-                self.db.commit()
+                insert_unihash(
+                    cursor,
+                    {
+                        'method': data['method'],
+                        'taskhash': data['taskhash'],
+                        'unihash': unihash,
+                    },
+                    resolve
+                )
 
-                logger.info('Adding taskhash %s with unihash %s',
-                            data['taskhash'], unihash)
-
-                d = {
-                    'taskhash': data['taskhash'],
-                    'method': data['method'],
-                    'unihash': unihash
-                }
+            unihash_data = await self.get_unihash(cursor, data['method'], data['taskhash'])
+            if unihash_data is not None:
+                unihash = unihash_data['unihash']
             else:
-                d = {k: row[k] for k in ('taskhash', 'method', 'unihash')}
+                unihash = data['unihash']
+
+            self.db.commit()
+
+            d = {
+                'taskhash': data['taskhash'],
+                'method': data['method'],
+                'unihash': unihash,
+            }
 
         self.write_message(d)
 
@@ -350,23 +457,16 @@
         with closing(self.db.cursor()) as cursor:
             insert_data = {
                 'method': data['method'],
-                'outhash': "",
                 'taskhash': data['taskhash'],
                 'unihash': data['unihash'],
-                'created': datetime.now()
             }
-
-            for k in ('owner', 'PN', 'PV', 'PR', 'task', 'outhash_siginfo'):
-                if k in data:
-                    insert_data[k] = data[k]
-
-            insert_task(cursor, insert_data, ignore=True)
+            insert_unihash(cursor, insert_data, Resolve.IGNORE)
             self.db.commit()
 
             # Fetch the unihash that will be reported for the taskhash. If the
             # unihash matches, it means this row was inserted (or the mapping
             # was already valid)
-            row = self.query_equivalent(data['method'], data['taskhash'], self.FAST_QUERY)
+            row = self.query_equivalent(cursor, data['method'], data['taskhash'])
 
             if row['unihash'] == data['unihash']:
                 logger.info('Adding taskhash equivalence for %s with unihash %s',
@@ -399,14 +499,16 @@
         await self.backfill_queue.join()
         self.write_message(d)
 
-    def query_equivalent(self, method, taskhash, query):
+    def query_equivalent(self, cursor, method, taskhash):
         # This is part of the inner loop and must be as fast as possible
-        try:
-            cursor = self.db.cursor()
-            cursor.execute(query, {'method': method, 'taskhash': taskhash})
-            return cursor.fetchone()
-        except:
-            cursor.close()
+        cursor.execute(
+            'SELECT taskhash, method, unihash FROM unihashes_v2 WHERE method=:method AND taskhash=:taskhash',
+            {
+                'method': method,
+                'taskhash': taskhash,
+            }
+        )
+        return cursor.fetchone()
 
 
 class Server(bb.asyncrpc.AsyncServer):
@@ -435,7 +537,7 @@
                         self.backfill_queue.task_done()
                         break
                     method, taskhash = item
-                    await copy_from_upstream(client, self.db, method, taskhash)
+                    await copy_unihash_from_upstream(client, self.db, method, taskhash)
                     self.backfill_queue.task_done()
             finally:
                 await client.close()
diff --git a/poky/bitbake/lib/hashserv/tests.py b/poky/bitbake/lib/hashserv/tests.py
index e851535..f6b85ae 100644
--- a/poky/bitbake/lib/hashserv/tests.py
+++ b/poky/bitbake/lib/hashserv/tests.py
@@ -19,10 +19,10 @@
 import signal
 
 def server_prefunc(server, idx):
-    logging.basicConfig(level=logging.DEBUG, filename='bbhashserv.log', filemode='w',
+    logging.basicConfig(level=logging.DEBUG, filename='bbhashserv-%d.log' % idx, filemode='w',
                         format='%(levelname)s %(filename)s:%(lineno)d %(message)s')
     server.logger.debug("Running server %d" % idx)
-    sys.stdout = open('bbhashserv-%d.log' % idx, 'w')
+    sys.stdout = open('bbhashserv-stdout-%d.log' % idx, 'w')
     sys.stderr = sys.stdout
 
 class HashEquivalenceTestSetup(object):
@@ -140,12 +140,17 @@
         })
         self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
 
-        result = self.client.get_taskhash(self.METHOD, taskhash, True)
-        self.assertEqual(result['taskhash'], taskhash)
-        self.assertEqual(result['unihash'], unihash)
-        self.assertEqual(result['method'], self.METHOD)
-        self.assertEqual(result['outhash'], outhash)
-        self.assertEqual(result['outhash_siginfo'], siginfo)
+        result_unihash = self.client.get_taskhash(self.METHOD, taskhash, True)
+        self.assertEqual(result_unihash['taskhash'], taskhash)
+        self.assertEqual(result_unihash['unihash'], unihash)
+        self.assertEqual(result_unihash['method'], self.METHOD)
+
+        result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash)
+        self.assertEqual(result_outhash['taskhash'], taskhash)
+        self.assertEqual(result_outhash['method'], self.METHOD)
+        self.assertEqual(result_outhash['unihash'], unihash)
+        self.assertEqual(result_outhash['outhash'], outhash)
+        self.assertEqual(result_outhash['outhash_siginfo'], siginfo)
 
     def test_stress(self):
         def query_server(failures):
@@ -260,6 +265,39 @@
         result = down_client.report_unihash(taskhash6, self.METHOD, outhash5, unihash6)
         self.assertEqual(result['unihash'], unihash5, 'Server failed to copy unihash from upstream')
 
+        # Tests read through from server with
+        taskhash7 = '9d81d76242cc7cfaf7bf74b94b9cd2e29324ed74'
+        outhash7 = '8470d56547eea6236d7c81a644ce74670ca0bbda998e13c629ef6bb3f0d60b69'
+        unihash7 = '05d2a63c81e32f0a36542ca677e8ad852365c538'
+        self.client.report_unihash(taskhash7, self.METHOD, outhash7, unihash7)
+
+        result = down_client.get_taskhash(self.METHOD, taskhash7, True)
+        self.assertEqual(result['unihash'], unihash7, 'Server failed to copy unihash from upstream')
+        self.assertEqual(result['outhash'], outhash7, 'Server failed to copy unihash from upstream')
+        self.assertEqual(result['taskhash'], taskhash7, 'Server failed to copy unihash from upstream')
+        self.assertEqual(result['method'], self.METHOD)
+
+        taskhash8 = '86978a4c8c71b9b487330b0152aade10c1ee58aa'
+        outhash8 = 'ca8c128e9d9e4a28ef24d0508aa20b5cf880604eacd8f65c0e366f7e0cc5fbcf'
+        unihash8 = 'd8bcf25369d40590ad7d08c84d538982f2023e01'
+        self.client.report_unihash(taskhash8, self.METHOD, outhash8, unihash8)
+
+        result = down_client.get_outhash(self.METHOD, outhash8, taskhash8)
+        self.assertEqual(result['unihash'], unihash8, 'Server failed to copy unihash from upstream')
+        self.assertEqual(result['outhash'], outhash8, 'Server failed to copy unihash from upstream')
+        self.assertEqual(result['taskhash'], taskhash8, 'Server failed to copy unihash from upstream')
+        self.assertEqual(result['method'], self.METHOD)
+
+        taskhash9 = 'ae6339531895ddf5b67e663e6a374ad8ec71d81c'
+        outhash9 = 'afc78172c81880ae10a1fec994b5b4ee33d196a001a1b66212a15ebe573e00b5'
+        unihash9 = '6662e699d6e3d894b24408ff9a4031ef9b038ee8'
+        self.client.report_unihash(taskhash9, self.METHOD, outhash9, unihash9)
+
+        result = down_client.get_taskhash(self.METHOD, taskhash9, False)
+        self.assertEqual(result['unihash'], unihash9, 'Server failed to copy unihash from upstream')
+        self.assertEqual(result['taskhash'], taskhash9, 'Server failed to copy unihash from upstream')
+        self.assertEqual(result['method'], self.METHOD)
+
     def test_ro_server(self):
         (ro_client, ro_server) = self.start_server(dbpath=self.server.dbpath, read_only=True)
 
@@ -287,10 +325,8 @@
 
 
     def test_slow_server_start(self):
-        """
-        Ensures that the server will exit correctly even if it gets a SIGTERM
-        before entering the main loop
-        """
+        # Ensures that the server will exit correctly even if it gets a SIGTERM
+        # before entering the main loop
 
         event = multiprocessing.Event()
 
@@ -312,6 +348,58 @@
         server.process.join(300)
         self.assertIsNotNone(server.process.exitcode, "Server did not exit in a timely manner!")
 
+    def test_diverging_report_race(self):
+        # Tests that a reported task will correctly pick up an updated unihash
+
+        # This is a baseline report added to the database to ensure that there
+        # is something to match against as equivalent
+        outhash1 = 'afd11c366050bcd75ad763e898e4430e2a60659b26f83fbb22201a60672019fa'
+        taskhash1 = '3bde230c743fc45ab61a065d7a1815fbfa01c4740e4c895af2eb8dc0f684a4ab'
+        unihash1 = '3bde230c743fc45ab61a065d7a1815fbfa01c4740e4c895af2eb8dc0f684a4ab'
+        result = self.client.report_unihash(taskhash1, self.METHOD, outhash1, unihash1)
+
+        # Add a report that is equivalent to Task 1. It should ignore the
+        # provided unihash and report the unihash from task 1
+        taskhash2 = '6259ae8263bd94d454c086f501c37e64c4e83cae806902ca95b4ab513546b273'
+        unihash2 = taskhash2
+        result = self.client.report_unihash(taskhash2, self.METHOD, outhash1, unihash2)
+        self.assertEqual(result['unihash'], unihash1)
+
+        # Add another report for Task 2, but with a different outhash (e.g. the
+        # task is non-deterministic). It should still be marked with the Task 1
+        # unihash because it has the Task 2 taskhash, which is equivalent to
+        # Task 1
+        outhash3 = 'd2187ee3a8966db10b34fe0e863482288d9a6185cb8ef58a6c1c6ace87a2f24c'
+        result = self.client.report_unihash(taskhash2, self.METHOD, outhash3, unihash2)
+        self.assertEqual(result['unihash'], unihash1)
+
+
+    def test_diverging_report_reverse_race(self):
+        # Same idea as the previous test, but Tasks 2 and 3 are reported in
+        # reverse order the opposite order
+
+        outhash1 = 'afd11c366050bcd75ad763e898e4430e2a60659b26f83fbb22201a60672019fa'
+        taskhash1 = '3bde230c743fc45ab61a065d7a1815fbfa01c4740e4c895af2eb8dc0f684a4ab'
+        unihash1 = '3bde230c743fc45ab61a065d7a1815fbfa01c4740e4c895af2eb8dc0f684a4ab'
+        result = self.client.report_unihash(taskhash1, self.METHOD, outhash1, unihash1)
+
+        taskhash2 = '6259ae8263bd94d454c086f501c37e64c4e83cae806902ca95b4ab513546b273'
+        unihash2 = taskhash2
+
+        # Report Task 3 first. Since there is nothing else in the database it
+        # will use the client provided unihash
+        outhash3 = 'd2187ee3a8966db10b34fe0e863482288d9a6185cb8ef58a6c1c6ace87a2f24c'
+        result = self.client.report_unihash(taskhash2, self.METHOD, outhash3, unihash2)
+        self.assertEqual(result['unihash'], unihash2)
+
+        # Report Task 2. This is equivalent to Task 1 but there is already a mapping for
+        # taskhash2 so it will report unihash2
+        result = self.client.report_unihash(taskhash2, self.METHOD, outhash1, unihash2)
+        self.assertEqual(result['unihash'], unihash2)
+
+        # The originally reported unihash for Task 3 should be unchanged even if it
+        # shares a taskhash with Task 2
+        self.assertClientGetHash(self.client, taskhash2, unihash2)
 
 class TestHashEquivalenceUnixServer(HashEquivalenceTestSetup, HashEquivalenceCommonTests, unittest.TestCase):
     def get_server_addr(self, server_idx):
diff --git a/poky/bitbake/lib/layerindexlib/__init__.py b/poky/bitbake/lib/layerindexlib/__init__.py
index 3159bf2..08063c5 100644
--- a/poky/bitbake/lib/layerindexlib/__init__.py
+++ b/poky/bitbake/lib/layerindexlib/__init__.py
@@ -198,7 +198,7 @@
 
   For example:
 
-  http://layers.openembedded.org/layerindex/api/;branch=master;desc=OpenEmbedded%20Layer%20Index
+  https://layers.openembedded.org/layerindex/api/;branch=master;desc=OpenEmbedded%20Layer%20Index
   cooker://
 '''
         if reload:
@@ -576,7 +576,7 @@
 #   index['config'] - configuration data for this index
 #   index['branches'] - dictionary of Branch objects, by id number
 #   index['layerItems'] - dictionary of layerItem objects, by id number
-#   ...etc...  (See: http://layers.openembedded.org/layerindex/api/)
+#   ...etc...  (See: https://layers.openembedded.org/layerindex/api/)
 #
 # The class needs to manage the 'index' entries and allow easily adding
 # of new items, as well as simply loading of the items.
diff --git a/poky/bitbake/lib/layerindexlib/restapi.py b/poky/bitbake/lib/layerindexlib/restapi.py
index 26a1c96..81d99b0 100644
--- a/poky/bitbake/lib/layerindexlib/restapi.py
+++ b/poky/bitbake/lib/layerindexlib/restapi.py
@@ -31,7 +31,7 @@
             The return value is a LayerIndexObj.
 
             url is the url to the rest api of the layer index, such as:
-            http://layers.openembedded.org/layerindex/api/
+            https://layers.openembedded.org/layerindex/api/
 
             Or a local file...
         """
@@ -138,7 +138,7 @@
             The return value is a LayerIndexObj.
 
             ud is the parsed url to the rest api of the layer index, such as:
-            http://layers.openembedded.org/layerindex/api/
+            https://layers.openembedded.org/layerindex/api/
         """
 
         def _get_json_response(apiurl=None, username=None, password=None, retry=True):
diff --git a/poky/bitbake/lib/layerindexlib/tests/restapi.py b/poky/bitbake/lib/layerindexlib/tests/restapi.py
index 33b5c1c..71f0ae8 100644
--- a/poky/bitbake/lib/layerindexlib/tests/restapi.py
+++ b/poky/bitbake/lib/layerindexlib/tests/restapi.py
@@ -22,7 +22,7 @@
         self.assertFalse(os.environ.get("BB_SKIP_NETTESTS") == "yes", msg="BB_SKIP_NETTESTS set, but we tried to test anyway")
         LayersTest.setUp(self)
         self.layerindex = layerindexlib.LayerIndex(self.d)
-        self.layerindex.load_layerindex('http://layers.openembedded.org/layerindex/api/;branch=sumo', load=['layerDependencies'])
+        self.layerindex.load_layerindex('https://layers.openembedded.org/layerindex/api/;branch=sumo', load=['layerDependencies'])
 
     @skipIfNoNetwork()
     def test_layerindex_is_empty(self):
diff --git a/poky/bitbake/lib/toaster/orm/fixtures/oe-core.xml b/poky/bitbake/lib/toaster/orm/fixtures/oe-core.xml
index 026d948..b01a337 100644
--- a/poky/bitbake/lib/toaster/orm/fixtures/oe-core.xml
+++ b/poky/bitbake/lib/toaster/orm/fixtures/oe-core.xml
@@ -34,7 +34,7 @@
     <field type="CharField" name="description">Openembedded Dunfell</field>
     <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">1</field>
     <field type="CharField" name="branch_name">dunfell</field>
-    <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"http://cgit.openembedded.org/openembedded-core/log/?h=dunfell\"&gt;OpenEmbedded Dunfell&lt;/a&gt; branch.</field>
+    <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=dunfell\"&gt;OpenEmbedded Dunfell&lt;/a&gt; branch.</field>
   </object>
   <object model="orm.release" pk="2">
     <field type="CharField" name="name">local</field>
@@ -48,14 +48,14 @@
     <field type="CharField" name="description">OpenEmbedded core master</field>
     <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">3</field>
     <field type="CharField" name="branch_name">master</field>
-    <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"http://cgit.openembedded.org/openembedded-core/log/\"&gt;OpenEmbedded master&lt;/a&gt; branch.</field>
+    <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"https://cgit.openembedded.org/openembedded-core/log/\"&gt;OpenEmbedded master&lt;/a&gt; branch.</field>
   </object>
   <object model="orm.release" pk="4">
     <field type="CharField" name="name">gatesgarth</field>
     <field type="CharField" name="description">Openembedded Gatesgarth</field>
     <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">4</field>
     <field type="CharField" name="branch_name">gatesgarth</field>
-    <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"http://cgit.openembedded.org/openembedded-core/log/?h=gatesgarth\"&gt;OpenEmbedded Gatesgarth&lt;/a&gt; branch.</field>
+    <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=gatesgarth\"&gt;OpenEmbedded Gatesgarth&lt;/a&gt; branch.</field>
   </object>
 
   <!-- Default layers for each release -->
@@ -81,9 +81,9 @@
   <object model="orm.layer" pk="1">
     <field type="CharField" name="name">openembedded-core</field>
     <field type="CharField" name="vcs_url">git://git.openembedded.org/openembedded-core</field>
-    <field type="CharField" name="vcs_web_url">http://cgit.openembedded.org/openembedded-core</field>
-    <field type="CharField" name="vcs_web_tree_base_url">http://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%</field>
-    <field type="CharField" name="vcs_web_file_base_url">http://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%</field>
+    <field type="CharField" name="vcs_web_url">https://cgit.openembedded.org/openembedded-core</field>
+    <field type="CharField" name="vcs_web_tree_base_url">https://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%</field>
+    <field type="CharField" name="vcs_web_file_base_url">https://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%</field>
   </object>
   <object model="orm.layer_version" pk="1">
     <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field>
diff --git a/poky/bitbake/lib/toaster/orm/fixtures/poky.xml b/poky/bitbake/lib/toaster/orm/fixtures/poky.xml
index a468a54..363789d 100644
--- a/poky/bitbake/lib/toaster/orm/fixtures/poky.xml
+++ b/poky/bitbake/lib/toaster/orm/fixtures/poky.xml
@@ -39,7 +39,7 @@
     <field type="CharField" name="description">Yocto Project 3.1 "Dunfell"</field>
     <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">1</field>
     <field type="CharField" name="branch_name">dunfell</field>
-    <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=dunfell"&gt;Yocto Project Dunfell branch&lt;/a&gt;.</field>
+    <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=dunfell"&gt;Yocto Project Dunfell branch&lt;/a&gt;.</field>
   </object>
   <object model="orm.release" pk="2">
     <field type="CharField" name="name">local</field>
@@ -53,14 +53,14 @@
     <field type="CharField" name="description">Yocto Project master</field>
     <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">3</field>
     <field type="CharField" name="branch_name">master</field>
-    <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/"&gt;Yocto Project Master branch&lt;/a&gt;.</field>
+    <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/"&gt;Yocto Project Master branch&lt;/a&gt;.</field>
   </object>
   <object model="orm.release" pk="4">
     <field type="CharField" name="name">gatesgarth</field>
     <field type="CharField" name="description">Yocto Project 3.2 "Gatesgarth"</field>
     <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">4</field>
     <field type="CharField" name="branch_name">gatesgarth</field>
-    <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=gatesgarth"&gt;Yocto Project Gatesgarth branch&lt;/a&gt;.</field>
+    <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=gatesgarth"&gt;Yocto Project Gatesgarth branch&lt;/a&gt;.</field>
   </object>
 
   <!-- Default project layers for each release -->
@@ -122,9 +122,9 @@
     <field type="CharField" name="name">openembedded-core</field>
     <field type="CharField" name="layer_index_url"></field>
     <field type="CharField" name="vcs_url">git://git.yoctoproject.org/poky</field>
-    <field type="CharField" name="vcs_web_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky</field>
-    <field type="CharField" name="vcs_web_tree_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
-    <field type="CharField" name="vcs_web_file_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
+    <field type="CharField" name="vcs_web_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky</field>
+    <field type="CharField" name="vcs_web_tree_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
+    <field type="CharField" name="vcs_web_file_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
   </object>
   <object model="orm.layer_version" pk="1">
     <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field>
@@ -160,9 +160,9 @@
     <field type="CharField" name="name">meta-poky</field>
     <field type="CharField" name="layer_index_url"></field>
     <field type="CharField" name="vcs_url">git://git.yoctoproject.org/poky</field>
-    <field type="CharField" name="vcs_web_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky</field>
-    <field type="CharField" name="vcs_web_tree_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
-    <field type="CharField" name="vcs_web_file_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
+    <field type="CharField" name="vcs_web_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky</field>
+    <field type="CharField" name="vcs_web_tree_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
+    <field type="CharField" name="vcs_web_file_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
   </object>
   <object model="orm.layer_version" pk="5">
     <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field>
@@ -198,9 +198,9 @@
     <field type="CharField" name="name">meta-yocto-bsp</field>
     <field type="CharField" name="layer_index_url"></field>
     <field type="CharField" name="vcs_url">git://git.yoctoproject.org/poky</field>
-    <field type="CharField" name="vcs_web_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky</field>
-    <field type="CharField" name="vcs_web_tree_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
-    <field type="CharField" name="vcs_web_file_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
+    <field type="CharField" name="vcs_web_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky</field>
+    <field type="CharField" name="vcs_web_tree_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
+    <field type="CharField" name="vcs_web_file_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
   </object>
   <object model="orm.layer_version" pk="9">
     <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field>
diff --git a/poky/bitbake/lib/toaster/orm/management/commands/lsupdates.py b/poky/bitbake/lib/toaster/orm/management/commands/lsupdates.py
index 2fbd7be..3b950e6 100644
--- a/poky/bitbake/lib/toaster/orm/management/commands/lsupdates.py
+++ b/poky/bitbake/lib/toaster/orm/management/commands/lsupdates.py
@@ -21,7 +21,7 @@
 import time
 logger = logging.getLogger("toaster")
 
-DEFAULT_LAYERINDEX_SERVER = "http://layers.openembedded.org/layerindex/api/"
+DEFAULT_LAYERINDEX_SERVER = "https://layers.openembedded.org/layerindex/api/"
 
 # Add path to bitbake modules for layerindexlib
 # lib/toaster/orm/management/commands/lsupdates.py (abspath)
diff --git a/poky/bitbake/lib/toaster/tests/builds/buildtest.py b/poky/bitbake/lib/toaster/tests/builds/buildtest.py
index 872bbd3..13b51fb 100644
--- a/poky/bitbake/lib/toaster/tests/builds/buildtest.py
+++ b/poky/bitbake/lib/toaster/tests/builds/buildtest.py
@@ -119,7 +119,7 @@
         if os.environ.get("TOASTER_TEST_USE_SSTATE_MIRROR"):
             ProjectVariable.objects.get_or_create(
                 name="SSTATE_MIRRORS",
-                value="file://.* http://autobuilder.yoctoproject.org/pub/sstate/PATH;downloadfilename=PATH",
+                value="file://.* http://sstate.yoctoproject.org/PATH;downloadfilename=PATH",
                 project=project)
 
         ProjectTarget.objects.create(project=project,
diff --git a/poky/bitbake/lib/toaster/toastergui/templates/layerdetails.html b/poky/bitbake/lib/toaster/toastergui/templates/layerdetails.html
index 1e26e31..923ca3b 100644
--- a/poky/bitbake/lib/toaster/toastergui/templates/layerdetails.html
+++ b/poky/bitbake/lib/toaster/toastergui/templates/layerdetails.html
@@ -355,7 +355,7 @@
             {% if layerversion.layer_source == layer_source.TYPE_LAYERINDEX  %}
             <dt>Layer index</dt>
             <dd>
-            <a href="http://layers.openembedded.org/layerindex/branch/{{layerversion.release.name}}/layer/{{layerversion.layer.name}}">Layer index {{layerversion.layer.name}}</a>
+            <a href="https://layers.openembedded.org/layerindex/branch/{{layerversion.release.name}}/layer/{{layerversion.layer.name}}">Layer index {{layerversion.layer.name}}</a>
             </dd>
             {% endif %}
           </dl>
diff --git a/poky/bitbake/lib/toaster/toastergui/templates/package_detail_base.html b/poky/bitbake/lib/toaster/toastergui/templates/package_detail_base.html
index 66f8e7f..a4fcd2a 100644
--- a/poky/bitbake/lib/toaster/toastergui/templates/package_detail_base.html
+++ b/poky/bitbake/lib/toaster/toastergui/templates/package_detail_base.html
@@ -127,7 +127,7 @@
                     {% comment %}
                     # Removed per team meeting of 1/29/2014 until
                     # decision on index search algorithm
-                    <a href="http://layers.openembedded.org"  target="_blank">
+                    <a href="https://layers.openembedded.org"  target="_blank">
                     <i class="glyphicon glyphicon-share get-info"></i>
                     </a>
                     {% endcomment %}