subtree updates

poky: 8d0ba08aa6..2696bf8cf3:
  Adam Johnston (1):
        useradd_base: Fix sed command line for passwd-expire

  Adrian Freihofer (1):
        vscode: add minimal configuration

  Alassane Yattara (44):
        bitbake: Update toaster-requirements to add django-log-viewer==1.1.7
        bitbake: toaster: bug-fix on tests.browser.test_most_recent_builds_states
        bitbake: Toaster: Bug-fix failure on tests.browser.test_layerdetails_page
        bitbake: Toaster: Fixed javascript issue on tests.browser.test_js_unit_tests
        bitbake: Toaster: bug-fix on /toastermain/logs.py
        bitbake: Toaster: bug-fix on custom image test cases
        bitbake: Toaster: bug-fix on tests/views/test_views.py
        bitbake: Toaster: bug-fix on tests.views.test_views.py
        bitbake: toaster: Write logs to BUILDDIR/toaster_logs
        bitbake: toaster: Add toaster-tests-requirements.txt to add pytest and some plugins
        bitbake: toaster: Update orm.models to catch error ProcessLookupError
        bitbake: toaster: Bug-fix pytest and Failed: Database access not allowed
        bitbake: toaster: fixed pytest error: Database access not allowed, use the "django_db"
        bitbake: toaster: Bug-fix django.db.utils.IntegrityError: Problem installing fixture
        bitbake: toaster: fixed: Tests fail when executed one after the other out of sequence
        bitbake: toaster: Added pytest.ini file
        bitbake: toaster: Check info_sign is visible and clickable in landing page
        bitbake: toaster: Test documentation link in landing header is displayed
        bitbake: toaster: Test jumbotron links visible and clickable
        bitbake: toaster: Bug-fix webdriver No parameter named options
        bitbake: Toaster: Write UI TestCase create new project
        bitbake: Toaster: Test create new project without project name
        bitbake: Toaster: Write UI TestCase import new project using
        bitbake: toaster/tests: Add UI TestCase to test if 'no build' message is shown
        bitbake: toaster/tests: Add UI TestCase to test search box on all build page
        bitbake: toaster/tests: Add UI TestCase to test the filtering feature on 'failure tasks' column
        bitbake: toaster/tests: Add UI TestCase to test filtering feature on 'completed_on' column
        bitbake: toaster/tests: Add UI TestCase to test "edit column" feature show/hide column
        bitbake: toaster/tests: Add UI TestCase to test "show rows" feature, change displaying rows in table
        bitbake: toaster/tests: Add UI TestCase for deleting project
        bitbake: toaster/tests: Add UI TestCase for Visualize all projects
        bitbake: toaster/tests: Add UI TestCase for visualize all projects edit column
        bitbake: toaster/tests: Add UI TestCase for visualize all projects show rows
        bitbake: toaster/tests/create_new_project: Code cleanup
        bitbake: toaster/tests: Add UI TestCase - Check project header contains right items
        bitbake: toaster/tests: Add UI TestCase - Test edit project name on project page
        bitbake: toaster/tests: Add UI TestCase - Test project page has right tabs displayed
        bitbake: toaster/tests: Add UI TestCase - Test project config tab navigation:
        bitbake: toaster/tests: Add UI TestCase - Test project config tab
        bitbake: toaster/tests: Add UI TestCase - Test project page tab import layer
        bitbake: toaster/tests: Add UI TestCase - Test project page tab "New custom image"
        bitbake: toaster/tests: Add UI TestCase - Test project page section images
        bitbake: toaster/tests: Add UI TestCase for the edit column feature in image recipe
        bitbake: toaster/tests: Add UI TestCase - Test the show rows feature in image recipe

  Alberto Pianon (1):
        bitbake: fetch2: Add API for upstream source tracing

  Alejandro Hernandez Samaniego (2):
        qemuarmv5: Drop QB_DTB conditional for older kernels
        baremetal-helloworld: Pull in fix for race condition on x86-64

  Alex Stewart (1):
        libsndfile1: fix CVE-2022-33065

  Alexander Kanavin (10):
        scripts/bitbake-whatchanged: remove
        selftest/buildoptions: tag the download mirror test with 'yocto-mirrors'
        bitbake: runqueue.py: clarify that 'closest' signature means 'most recent' (and not closest in its content)
        selftest/sstatetests: add tests for 'bitbake -S printdiff'
        lib/oe/sstatesig.py: dump locked.sigs.inc only when explicitly asked via -S lockedsigs
        selftest/sstatetests: add a test for CDN sstate cache
        populate_sdk_ext.bbclass: do not symlink unfsd from sdk image sysroot into eSDK tools path
        meta/lib/oe/copy_buildsystem.py: do not derefence symlinks
        scripts/esdk-tools: use a dedicated, static directory for esdk tools
        populate_sdk_ext: split copy_buildsystem() into logical steps defined as functions

  Alexander Lussier-Cullen (2):
        bitbake: toaster/tests: add passthroughs for relevant build environment variables
        bitbake: toaster: make django temp directory configurable

  Alexandre Belloni (1):
        strace: further clean up of ptest folders

  Alexis Lothoré (5):
        scripts/resulttool: limit the number of changes displayed per test
        scripts/resulttool: rearrange regressions report order
        scripts/resulttool: make additional info more compact
        scripts/yocto_testresults_query: add option to change display limit
        scripts/resulttool: group all regressions in regression report

  Anuj Mittal (9):
        gstreamer1.0: upgrade 1.22.6 -> 1.22.7
        gsettings-desktop-schemas: upgrade 44.0 -> 45.0
        harfbuzz: upgrade 8.2.2 -> 8.3.0
        libnotify: upgrade 0.8.2 -> 0.8.3
        libtirpc: upgrade 1.3.3 -> 1.3.4
        mmc-utils: upgrade to latest revision
        puzzles: upgrade to latest revision
        sqlite3: upgrade 3.43.2 -> 3.44.0
        vulkan: upgrade 1.3.261.1 -> 1.3.268.0

  Archana Polampalli (1):
        vim: Upgrade 9.0.2048 -> 9.0.2068

  Arne Schwerdt (1):
        ref-manual: Warn about COMPATIBLE_MACHINE skipping native recipes

  BELHADJ SALEM Talel (8):
        bitbake.conf: Drop DEPLOY_DIR_TAR
        ref-manual: Fix PACKAGECONFIG term and add an example
        dev-manual: layers: Add notes about layer.conf
        ref-manual: variables: add RECIPE_SYSROOT and RECIPE_SYSROOT_NATIVE
        ref-manual: variables: add TOOLCHAIN_OPTIONS variable
        ref-manual: variables: add example for SYSROOT_DIRS variable
        bitbake: Fix find_bbfiles string endswith call
        overview-manual: concepts: Add Bitbake Tasks Map

  Bastian Krause (1):
        linux-firmware: add new fw file to ${PN}-rtl8821

  Bruce Ashfield (22):
        linux-yocto/6.1: update to v6.1.56
        linux-yocto/6.5: update to v6.5.6
        linux-yocto/6.1: tiny: fix arm 32 boot
        linux-yocto/6.5: tiny: fix arm 32 boot
        linux-yocto/6.5: update to v6.5.7
        linux-yocto/6.1: update to v6.1.57
        linux-yocto/6.4: drop recipes
        linux-yocto/6.5: avoid serial port suspend issues
        linux-yocto/6.5: config: remove VIDEO_STK1160_COMMON
        linux-yocto/6.5: serial: core: integrate upstream fixes
        linux-yocto/6.5: update to v6.5.8
        linux-yocto/6.1: update to v6.1.59
        linux-yocto/6.5: update to v6.5.9
        linux-yocto/6.1: update to v6.1.60
        kern-tools: make lower context patches reproducible
        kern-tools: bump SRCREV for queue processing changes
        kern-tools: update SRCREV to include SECURITY.md file
        kernel-yocto: improve metadata patching
        linux-yocto/6.1: cfg: restore CONFIG_DEVMEM
        linux-yocto/6.1: update to v6.1.61
        linux-yocto/6.5: cfg: restore CONFIG_DEVMEM
        linux-yocto/6.5: update to v6.5.10

  Chen Qi (2):
        kernel.bbclass: add preceding space in appendVar setting
        systemd: fix DynamicUser issue

  Chris Laplante (4):
        bitbake: codeparser: replace deprecated ast.Str and 's'
        bitbake: runqueue: set has 'add', not 'append' method
        bitbake: codeparser: add missing 'import os'
        bitbake: codegen: cleanup deprecated AST usages

  Deepthi Hemraj (1):
        binutils: Fix CVE-2022-47007

  Desone Burns (1):
        bitbake: bitbake: fetch2: git: Update Git-LFS download and tests

  Dmitry Baryshkov (11):
        kernel-arch: drop CCACHE from KERNEL_STRIP definition
        meson: use correct targets for rust binaries
        linux-firmware: upgrade 20230804 -> 20231030
        linux-firmware: add missing depenencies on license packages
        linux-firmware: add notice file to sdm845 modem firmware
        linux-firmware: add audio topology symlink to the X13's audio package
        linux-firmware: package firmware for Qualcomm Adreno a702
        linux-firmware: package firmware for Qualcomm QCM2290 / QRB4210
        linux-firmware: package Qualcomm Venus 6.0 firmware
        linux-firmware: package Robotics RB5 sensors DSP firmware
        libdrm: upgrade 2.4.116 -> 2.4.117

  Eero Aaltonen (3):
        base-files, systemd: add nss-resolve plugin
        systemd: add option to use stub-resolv.conf
        ref-manual: add systemd-resolved to distro features

  Etienne Cordonnier (1):
        bitbake: bitbake-worker: add header with length of message

  Fabio Estevam (1):
        packagegroup-core-tools-profile: Remove PROFILE_TOOLS_X

  Fahad Arslan (1):
        linux-firmware: create separate packages

  Felix Moessbauer (1):
        bitbake: fetch2/aws: forward env-vars used in gitlab-ci K8s

  Florian Wickert (1):
        systemd: fix libnss-mymachines packaging

  Glenn Strauss (3):
        lighttpd: upgrade 1.4.71 -> 1.4.72
        lighttpd: update init script
        lighttpd: modernize lighttpd.conf

  Javier Tia (1):
        kernel-arch: use ccache only for compiler

  Jermain Horsman (3):
        lib/oe/buildcfg.py: Include missing import
        lib/oe/buildcfg.py: Remove unused parameter
        lib/bblayers/setupwriters/oe-setup-layers.py: Fix indentation

  Joakim Tjernlund (1):
        sed -i destroys symlinks

  Johannes Schneider (1):
        base-files: profile: allow profile.d to set EDITOR

  Jon Mason (2):
        qemu: drop unreferenced patch
        linux-yocto: Update dtb path for qemuarmv5

  Jose Quaresma (5):
        sstatesig: be more precise and show the full path in exceptions
        systemd: sort packages before pn
        systemd: add systemd-crypt package
        systemd: add cryptsetup-plugins package config
        systemd: add p11kit package config

  Joshua Watt (24):
        goarch: Move Go architecture mapping to a library
        bitbake: asyncrpc: Abstract sockets
        bitbake: hashserv: Add websocket connection implementation
        bitbake: asyncrpc: Add context manager API
        bitbake: hashserv: tests: Add external database tests
        bitbake: asyncrpc: Prefix log messages with client info
        bitbake: bitbake-hashserv: Allow arguments from environment
        bitbake: hashserv: Abstract database
        bitbake: hashserv: Add SQLalchemy backend
        bitbake: hashserv: Implement read-only version of "report" RPC
        bitbake: asyncrpc: Add InvokeError
        bitbake: asyncrpc: client: Prevent double closing of loop
        bitbake: asyncrpc: client: Add disconnect API
        bitbake: hashserv: Add user permissions
        bitbake: hashserv: Add become-user API
        bitbake: hashserv: Add db-usage API
        bitbake: hashserv: Add database column query API
        bitbake: hashserv: test: Add bitbake-hashclient tests
        bitbake: bitbake-hashclient: Output stats in JSON format
        bitbake: bitbake-hashserver: Allow anonymous permissions to be space separated
        bitbake: hashserv: tests: Allow authentication for external server tests
        bitbake: hashserv: Allow self-service deletion
        bitbake: hashserv: server: Add owner if user is logged in
        bitbake: asyncrpc: Add option to set log level when running as a process

  Julien Stephan (10):
        oeqa/selftest/devtool: abort if a local workspace already exist
        oeqa/selftest/devtool: remove spaces on empty line
        recipetool/create_buildsys_python: fix license note
        recipetool/create_buildsys_python: prefix created recipes with python3-
        recipetool/create_buildsys_python: refactor code for futur PEP517 addition
        recipetool/create_buildsys_python: add PEP517 support
        oeqa/selftest/recipetool: add selftest for PEP-517 recipe creation
        oeqa/selftest/devtool: fix test_devtool_modify_overrides test
        bitbake: bitbake: utils: remove spaces on empty lines
        bitbake: fetch2: git: add missing destsuffix and subpath parameters in docstrings

  Jérémy Rosen (5):
        insane: Add unimplemented-ptest infrastructure
        insane: Detect python and perl based tests
        insane: Detect build-system test harnesses
        insane: Add a naive heuristic to detect test subdirectories
        ref-manual: Add documentation for the unimplemented-ptest QA warning

  Jörg Sommer (3):
        libtirpc: Support ipv6 in DISTRO_FEATURES
        base-files: Remove localhost ::1 from hosts if ipv6 missing
        package_qa_check_rdepends: Allow /usr/bin/sh if usrmerge

  Khem Raj (23):
        gcompat: Add fcntl64 wrapper
        gcompat: Upgrade to 1.1.0 release
        python3-urllib3: Update to 2.0.6
        llvm: Upgrade to 17.0.3
        shared-mime-info: Fix missing sentinel warning
        openssl: Match target name for riscv64/riscv32
        openssl: Inherit riscv32 config from latomic config on linux
        kernel.bbclass: Use strip utility used for kernel build in do_package
        python3-urllib3: Upgrade to 2.0.7
        qemuriscv: Add to common MACHINE_FEATURES instead of overriding them
        meson: Add check for riscv64 in link template
        machine-sdk: Add SDK_ARCH for riscv64
        uninative.bbclass: Add ldso information for riscv64
        rust-cross-canadian: Add riscv64 to cross-canadian hosts
        cdrtools: Fix build on riscv64
        llvm: Upgrade to 17.0.4 release
        systemd: Make libnss-mymachines conditional upon packageconfig
        ptest-packagelists: Remove strace/valgrind/lttng-tools on riscv32
        libarchive: Add packageconfig knob for libb2
        librsvg: Fix build for riscv32
        librsvg: Enable 64bit atomics in crossbeam again for riscv32
        libsoup: Upgrade to 3.4.2 -> 3.4.4
        llvm: Upgrade to 17.0.5

  Lee Chee Yang (6):
        qemu: ignore RHEL specific CVE-2023-2680
        machine: drop obsolete SERIAL_CONSOLES_CHECK
        documentation.conf: drop SERIAL_CONSOLES_CHECK
        release-notes-4.3: add Repositories / Downloads section
        migration-guide: add release notes for 4.0.14
        migration-guide: add release notes for 4.2.4

  Logan Gunthorpe (1):
        runqemu: Add squashfs filesystem types

  Lukas Funke (5):
        classes: go-vendor: Add go-vendor class
        selftest: recipetool: Add test for go recipe handler
        recipetool: Ignore *.go files while scanning for licenses
        recipetool: Add handler to create go recipes
        udev-extraconf: mount.sh: check if filesystem is supported before mounting

  Malte Schmidt (3):
        systemd: use nonarch libdir for tmpfiles.d
        pam: use nonarch libdir for tmpfiles.d
        sysstat: use nonarch libdir for tmpfiles.d

  Marcus Folkesson (1):
        qemuboot.bbclass: fix typos in documentation

  Markus Fuchs (1):
        systemd: Add 'no-ntp-fallback' PACKAGECONFIG option

  Markus Volk (6):
        libcroco: drop recipe
        gnomebase.bbclass: Use meson as default buildsystem
        ghostscript: Build and install shared lib
        cups: Upgrade 2.4.6 -> 2.4.7
        gtk: Add rdepend on printbackend for cups
        ffmpeg: Upgrade 6.0 -> 6.1

  Marlon Rodriguez Garcia (6):
        bitbake: toaster: updated bootstrap version 3.3.6 -> 3.3.7
        bitbake: toaster: Update bootstrap version to 3.4.1
        bitbake: toaster: update jquery version 2.0.3 -> 3.7.1
        bitbake: toaster: fixed functional test
        bitbake: toaster: add tox.ini file to execute test suite
        bitbake: toaster: replace deprecated tags ifequal and ifnotequal

  Marta Rybczynska (6):
        SECURITY.md: add file
        bitbake: SECURITY.md: add file
        dev-manual: add security team processes
        python3-beartype: upgrade 0.16.2 -> 0.16.4
        python3-spdx-tools: upgrade 0.8.1 -> 0.8.2
        dev-manual: extend the description of CVE patch preparation

  Martin Jansa (13):
        staging.bbclass: process installed dependencies in deterministic order as well
        bitbake.conf: drop ${PE} and ${PR} from -f{file,macro,debug}-prefix-map
        ovmf: drop PE, PR from /usr/src/debug paths
        go-cross-canadian.inc: drop PE, PR from /usr/src/debug paths
        acpica: drop PE, PR from /usr/src/debug paths
        libjpeg-turbo: drop PE, PR from /usr/src/debug paths
        ffmpeg: drop PE, PR from /usr/src/debug paths
        perf: drop PE, PR from /usr/src/debug paths
        rust: drop PE, PR from /usr/src/debug paths
        vulkan-samples: drop PE, PR from /usr/src/debug paths
        valgrind: drop PE, PR from /usr/src/debug paths
        python3-cython: drop PE, PR from /usr/src/debug paths
        igt-gpu-tools: drop PR from /usr/src/debug paths

  Massimiliano Minella (1):
        systemd: update LICENSE statement

  Max Krummenacher (2):
        Revert "bin_package.bbclass: Inhibit the default dependencies"
        perf: fix build with latest kernel

  Meenali Gupta (5):
        avahi: fix CVE-2023-38469
        avahi: fix CVE-2023-38470
        avahi: fix CVE-2023-38471
        avahi: fix CVE-2023-38472
        avahi: fix CVE-2023-38473

  Michael Halstead (1):
        docs: add support for nanbield (4.3) release

  Michael Opdenacker (29):
        manuals: update linux-yocto append examples
        dev-manual: wic: update "wic list images" output
        sdk-manual: appendix-obtain: improve and update descriptions
        manuals: update list of supported machines
        bsp-guide: bsp: skip Intel machines no longer supported in Poky
        brief-yoctoprojectqs: use new CDN mirror for sstate
        dev-manual: start.rst: remove obsolete reference
        local.conf.sample: remove mips edgerouter machine
        oeqa/runtime/cases/parselogs: remove "edgerouter" case
        manuals: correct "yocto-linux" by "linux-yocto"
        test-manual: reproducible-builds: stop mentioning LTO bug
        ref-manual: document KERNEL_LOCALVERSION
        ref-manual: variables: document OEQA_REPRODUCIBLE_TEST_PACKAGE
        migration-guides: updates for 4.3
        migration-guides: mention runqemu change in serial port management
        ref-manual: document KERNEL_STRIP
        migration-guides: further updates for 4.3
        manuals: improve description of CVE_STATUS and CVE_STATUS_GROUPS
        ref-manual: document MESON_TARGET
        ref-manual: document cargo_c class
        ref-manual: variables: mention new CDN for SSTATE_MIRRORS
        ref-manual: variables: add RECIPE_MAINTAINER
        ref-manual: variables: remove SERIAL_CONSOLES_CHECK
        migration-guides: further updates for release 4.3
        bsp-guide: bsp.rst: update beaglebone example
        ref-manual: classes: explain cml1 class name
        migration-guides: fix empty sections
        manuals: fix URL
        ref-manual: releases.svg: update nanbield release status

  Mickael RAMILISON (1):
        scripts/patchreview: Add a custom pattern for finding recipe patches

  Mingli Yu (2):
        openssh: Add sshd.service
        openssh: Don't hardcode the dir in sshd.service

  Niko Mauno (6):
        package_rpm: Fix some pycodestyle issues
        package_rpm: Minor cosmetic and style fixes
        package_rpm: Remove unused definitions
        package_rpm: Allow compression mode override
        image_types.bbclass: Use xz default compression preset level
        ccache.conf: Remove obsolete configuration option

  Paul Barker (1):
        ref-manual: Fix reference to MIRRORS/PREMIRRORS defaults

  Paul Eggleton (12):
        Remove references to apm in MACHINE_FEATURES
        ref-manual: update SDK_NAME variable documentation
        ref-manual: remove semicolons from *PROCESS_COMMAND variables
        release-notes-4.3: fix some typos
        release-notes-4.3: tweaks to existing text
        release-notes-4.3: add CVEs, recipe upgrades, license changes, contributors
        release-notes-4.3: remove the Distribution section
        release-notes-4.3: move new classes to Rust section
        release-notes-4.3: feature additions
        migration-4.3: remove some unnecessary items
        migration-4.3: adjustments to existing text
        migration-4.3: additional migration items

  Pavel Zhukov (1):
        bitbake: tests/fetch.py: Add tests to cover multiple branch/name parameters

  Peter Kjellerstedt (5):
        bb-matrix-plot.sh: Show underscores correctly in labels
        bitbake: command: Make parseRecipeFile() handle virtual recipes correctly
        bitbake: cookerdata: Be consistent with what type bb_data represents
        bitbake: cache: Simplify virtualfn2realfn()
        oeqa/selftest/tinfoil: Add tests that parse virtual recipes

  Peter Marko (1):
        openssl: Upgrade 3.1.3 -> 3.1.4

  Quentin Schulz (2):
        recipes-rt: update README to match newer override syntax
        ref-manual: variables: provide no-match example for COMPATIBLE_MACHINE

  Ragesh Nair (1):
        bitbake: fetch2/git: fix lfs fetch with destsuffix param

  Randy MacLeod (2):
        strace: backport fix for so_peerpidfd-test
        strace: upgrade 6.5 -> 6.6

  Rasmus Villemoes (3):
        perf: lift TARGET_CC_ARCH modification out of security_flags.inc
        valgrind: split helper scripts to separate packages, update dependencies
        perf: add jevents PACKAGECONFIG item

  Richard Purdie (34):
        reproducible: Exclude rust for now again
        linux/cve-exclusion6.1/6.5: Update to latest kernel point releases
        oeqa/qemurunner: Drop newlines serial workaround
        local.conf.sample: Document new CDN mirror for sstate
        poky.conf: Bump version for 4.3 nanbield release
        build-appliance-image: Update to master head revision
        poky.conf: Update to post release versioning
        base: Ensure recipes using mercurial-native have certificates
        qemu: Upgrade 8.1.0 -> 8.1.2
        oeqa/selftest: Drop machines support
        sstate: Ensure sstate searches update file mtime
        insane: Move unpack tests to do_recipe_qa
        go-vendor: Minor style tweaks
        package/package_write: Improve packagedata code location
        debianutils: Fix warnings
        bitbake: runqueue: Fix runall option for setscene tasks
        bitbake: runqueue: Fix errors when using -S printdiff
        oeqa/selftest/sstatetests: Fix intermitttent errors and improve performance
        layer.conf: Switch layer to nanbield series only
        libdnf: Fix arm arch mapping issues for qemuarmv5
        linux/cve-exclusion6.1/6.5: Update to latest kernel point releases
        bitbake: Revert "toaster: Bug-fix webdriver No parameter named options"
        vim: Improve locale handling
        selftest/reproducible: Allow packages exclusion via config
        bitbake: runqueue: Move 'cantskip' into sqdata
        bitbake: runqueue: Refactor StaleSetSceneTasks event out of build_scenequeue_data
        bitbake: toaster/tox.ini: Add py 3.11 and 3.12
        bitbake.conf: Drop oldincludedir
        bitbake: cooker: Add support for BB_DEFAULT_EVENTLOG
        bitbake: cooker: Avoid sideeffects for autorev from getAllKeysWithFlags
        oeqa/selftest/sstatetests: Re-enable CDN tests
        bitbake.conf: Log events by default using BB_DEFAULT_EVENTLOG
        package_ipk: Fix Source: field variable dependency
        Revert "binutils: Fix CVE-2022-47007"

  Robert P. J. Day (2):
        dev-manual: new-recipe.rst: add missing parenthesis to "Patching Code" section
        profile-manual: aesthetic cleanups

  Ross Burton (36):
        man-db: add RRECOMMENDS on glibc-utils for iconv
        man-db: remove inexplicable man_db.conf patch
        patchtest: remove unused imports
        patchtest: sort when reading patches from a directory
        linux-yocto: update CVE exclusions
        libxml2: ignore disputed CVE-2023-45322
        zlib: ignore CVE-2023-45853
        cve-check: sort the package list in the JSON report
        cve-check: slightly more verbose warning when adding the same package twice
        pixman: ignore CVE-2023-37769
        scripts/patchreview: rework patch detection
        scripts/contrib/patchreview: add commit and recipe count fields to JSON
        scripts/contrib/patchreview: consolidate imports
        scripts/contrib/patchreview: fix commit identification
        cve-check: don't warn if a patch is remote
        migration-guides: add debian 12 to newly supported distros
        migration-guides: edgerouter machine removed
        migration-guides: QEMU_USE_SLIRP variable removed
        migration-guides: remove non-notable change
        migration-guides: mention LLVM 17
        migration-guides: mention CDN
        migration-guides: add kernel notes
        migration-guides: remove SERIAL_CONSOLES_CHECK
        migration-guides: enabling SPDX only for Poky, not a global default
        migration-guides: add testing notes
        migration-guides: add utility notes
        migration-guides: add BitBake changes
        migration-guides: packaging changes
        migration-guides: git recipes reword
        poky-tiny: fix PACKAGE_EXCLUDE
        Revert "xserver-xorg: Fix for CVE-2023-5574"
        xwayland: upgrade to 23.2.2
        lib/oe/patch: ensure os.chdir restoring always happens
        oeqa/selftest/debuginfod: improve selftest
        shared-mime-info: embed PV in the filename
        rust-llvm: remove python3native dependency

  Rouven Czerwinski (1):
        glib-2.0: Remove unnecessary assignement

  Sean Nyekjaer (3):
        rust-cross-canadian: set CARGO_TARGET_<triple>_RUSTFLAGS
        rust-cross-canadian: set CARGO_TARGET_<triple>_RUNNER for nativesdk
        oeqa/sdk/rust: Add build and run test of rust binary with SDK host

  Sergei Zhmylev (1):
        classes: Move package RDEPENDS processing out of debian.bbclass

  Siddharth Doshi (2):
        vim: Upgrade 9.0.1894 -> 9.0.2009
        vim: Upgrade 9.0.2009 -> 9.0.2048

  Stefan Herbrechtsmeier (2):
        glibc: use nonarch libdir for tmpfiles.d
        classes: go-mod: do not pack go mod cache

  Steve Sakoman (1):
        vim: use upstream generated .po files

  Stéphane Veyret (2):
        volatile-binds: Allow creation of subdirectories
        volatile-binds: Calculate the name of the /var/lib service

  Thomas Perrot (1):
        opensbi: Upgrade to 1.3.1 release

  Thomas Wolber (1):
        kea: drop unused directory

  Tim Orling (9):
        recipetool: add python_hatchling support
        lsb-release: use https for UPSTREAM_CHECK_URI
        bitbake: toaster: drop deprecated USE_L10N from settings
        bitbake: toaster: use docs for BitBake link on landing page
        bitbake: toaster: fix obsolete use of find_element_by_link_text
        bitbake: toaster: test_create_new_project typos, whitespace
        python3-hypothesis: upgrade 6.88.3 -> 6.89.0
        python3-setuptools-scm: upgrade 7.1.0 -> 8.0.4
        python3-poetry-core: upgrade 1.7.0 -> 1.8.1

  Trevor Gamblin (30):
        patchtest: improve test issue messages
        patchtest: clean up test suite
        patchtest/requirements.txt: update
        patchtest: add supporting modules
        patchtest: add scripts to oe-core
        patchtest: set default repo and testdir targets
        patchtest: update SPDX identifiers
        patchtest/selftest: fix command arguments
        patchtest: check for untracked changes
        patchtest: test regardless of mergeability
        patchtest: skip merge test if not targeting master
        contributor-guide: add patchtest section
        contributor-guide: clarify patchtest usage
        patchtest: fix lic_files_chksum test regex
        patchtest-send-results: improve subject line
        patchtest: disable merge test
        patchtest-send-results: check max line length, simplify responses
        patchtest/selftest: add XSKIP, update test files
        patchtest: simplify test directory structure
        patchtest: reduce checksum test output length
        patchtest: shorten test result outputs
        patchtest-send-results: send results to submitter
        patchtest-send-results: add In-Reply-To
        patchtest: make pylint tests compatible with 3.x
        patchtest: remove test for CVE tag in mbox
        patchtest-send-results: fix sender parsing
        patchtest: rework license checksum tests
        python3-mako: upgrade 1.2.4 -> 1.3.0
        python3-trove-classifiers: upgrade 2023.10.18 -> 2023.11.14
        python3-numpy: upgrade 1.26.0 -> 1.26.2

  Vijay Anusuri (1):
        xserver-xorg: Fix for CVE-2023-5574

  Vincent Davis Jr (1):
        acpica: add nativesdk to BBCLASSEXTEND

  Vyacheslav Yurkov (1):
        lib/oe/path: Deploy files can start only with a dot

  Wang Mingyu (79):
        openssh: upgrade 9.4p1 -> 9.5p1
        bluez5: upgrade 5.69 -> 5.70
        btrfs-tools: upgrade 6.5.1 -> 6.5.2
        createrepo-c: upgrade 1.0.0 -> 1.0.1
        dhcpcd: upgrade 10.0.2 -> 10.0.3
        ell: upgrade 0.58 -> 0.59
        kmod: upgrade 30 -> 31
        libcomps: upgrade 0.1.19 -> 0.1.20
        libsdl2: upgrade 2.28.3 -> 2.28.4
        libubootenv: upgrade 0.3.4 -> 0.3.5
        ltp: upgrade 20230516 -> 20230929
        libva: upgrade 2.19.0 -> 2.20.0
        python3-git: upgrade 3.1.36 -> 3.1.37
        python3-babel: upgrade 2.12.1 -> 2.13.0
        python3-beartype: upgrade 0.15.0 -> 0.16.2
        python3-cffi: upgrade 1.15.1 -> 1.16.0
        python3-hypothesis: upgrade 6.86.2 -> 6.87.4
        python3-iso8601: upgrade 2.0.0 -> 2.1.0
        python3-markdown: upgrade 3.4.4 -> 3.5
        python3-packaging: upgrade 23.1 -> 23.2
        python3-pycairo: upgrade 1.24.0 -> 1.25.0
        python3-ruamel-yaml: upgrade 0.17.32 -> 0.17.35
        xkeyboard-config: upgrade 2.39 -> 2.40
        python3-wcwidth: upgrade 0.2.6 -> 0.2.8
        repo: upgrade 2.36.1 -> 2.37
        shared-mime-info: upgrade 2.2 -> 2.3
        sqlite3: upgrade 3.43.1 -> 3.43.2
        stress-ng: upgrade 0.16.05 -> 0.17.00
        base-passwd: upgrade 3.6.1 -> 3.6.2
        createrepo-c: upgrade 1.0.1 -> 1.0.2
        cronie: upgrade 1.6.1 -> 1.7.0
        dhcpcd: upgrade 10.0.3 -> 10.0.4
        enchant2: upgrade 2.6.1 -> 2.6.2
        btrfs-tools: upgrade 6.5.2 -> 6.5.3
        debianutils: upgrade 5.13 -> 5.14
        gpgme: upgrade 1.22.0 -> 1.23.1
        harfbuzz: upgrade 8.2.1 -> 8.2.2
        libdnf: upgrade 0.71.0 -> 0.72.0
        libical: upgrade 3.0.16 -> 3.0.17
        libjpeg-turbo: upgrade 3.0.0 -> 3.0.1
        libnewt: upgrade 0.52.23 -> 0.52.24
        libnsl2: upgrade 2.0.0 -> 2.0.1
        lighttpd: upgrade 1.4.72 -> 1.4.73
        msmtp: upgrade 1.8.24 -> 1.8.25
        ghostscript: upgrade 10.02.0 -> 10.02.1
        glib-2.0: upgrade 2.78.0 -> 2.78.1
        python3-pyrsistent: upgrade 0.19.3 -> 0.20.0
        python3-babel: upgrade 2.13.0 -> 2.13.1
        python3-gitdb: upgrade 4.0.10 -> 4.0.11
        python3-git: upgrade 3.1.37 -> 3.1.40
        python3-hypothesis: upgrade 6.87.4 -> 6.88.1
        python3-pip: upgrade 23.2.1 -> 23.3.1
        python3-psutil: upgrade 5.9.5 -> 5.9.6
        python3-pycairo: upgrade 1.25.0 -> 1.25.1
        python3-pyopenssl: upgrade 23.2.0 -> 23.3.0
        python3-pytest: upgrade 7.4.2 -> 7.4.3
        python3-setuptools-rust: upgrade 1.7.0 -> 1.8.1
        python3-testtools: upgrade 2.6.0 -> 2.7.0
        python3-trove-classifiers: upgrade 2023.9.19 -> 2023.10.18
        python3-wcwidth: upgrade 0.2.8 -> 0.2.9
        python3-wheel: upgrade 0.41.2 -> 0.41.3
        shaderc: upgrade 2023.6 -> 2023.7
        xserver-xorg: upgrade 21.1.8 -> 21.1.9
        python3-cryptography(-vectors): upgrade 41.0.4 -> 41.0.5
        dhcpcd: upgrade 10.0.4 -> 10.0.5
        diffoscope: upgrade 249 -> 251
        git: upgrade 2.42.0 -> 2.42.1
        iproute2: upgrade 6.5.0 -> 6.6.0
        libsdl2: upgrade 2.28.4 -> 2.28.5
        libsolv: upgrade 0.7.25 -> 0.7.26
        libuv: upgrade 1.46.0 -> 1.47.0
        bash: upgrade 5.2.15 -> 5.2.21
        dnf: upgrade 4.17.0 -> 4.18.1
        python3-hatch-vcs: upgrade 0.3.0 -> 0.4.0
        python3-hypothesis: upgrade 6.88.1 -> 6.88.3
        python3-pbr: upgrade 5.11.1 -> 6.0.0
        python3-testtools: upgrade 2.7.0 -> 2.7.1
        shared-mime-info: upgrade 2.3 -> 2.4
        stress-ng: upgrade 0.17.00 -> 0.17.01

  William A. Kennington III (1):
        kernel: Commit without running hooks

  William Lyu (2):
        perl: fix intermittent test failure
        openssl: improve handshake test error reporting

  Xiangyu Chen (4):
        linux-yocto: make sure the pahole-native available before do_kernel_configme
        grub: Fix for CVE-2023-4692 and CVE-2023-4693
        sudo: upgrade 1.9.14p3 -> 1.9.15p2
        openssh: add systemd readiness notification support

  Yoann Congal (4):
        insane: skip unimplemented-ptest on S=WORKDIR recipes
        insane: unimplemented-ptest: ignore source file errors
        selftest/reproducible: Split a long line
        meta-selftest/files: add xuser to static-passwd/-group

  david d zuhn (1):
        bitbake.conf: remove ${CCACHE} from FORTRAN compiler

  luca fancellu (1):
        oeqa/ssh: Handle SSHCall timeout error code

meta-arm: e914891eee..1dff3300fb:
  Abdellatif El Khlifi (6):
        arm-bsp/linux-yocto: corstone1000: bump to v6.5%
        arm-bsp/documentation: corstone1000: enable debug-tweaks
        arm-bsp/documentation: corstone1000: update the release note
        arm-bsp/documentation: corstone1000: update the change log
        arm-bsp/documentation: corstone1000: update the user guide
        kas: corstone1000: pin the SHAs

  Ali Can Ozaslan (1):
        arm-bsp/documentation: corstone1000: Update the user guide

  Debbie Martin (10):
        arm-bsp/u-boot: Divide the U-boot configuration by machine
        arm-bsp/fvp-base: Merge fvp-common.inc into fvp-base.conf
        arm-bsp/trusted-firmware-a/fvp-base: Add stdout path and virtio net and rng
        arm-bsp/u-boot/fvp-base: Configure FVP base U-boot machine and enable U-boot sysreset, CRC-32 and virtio RNG
        arm-bsp/fvp-base: Configure grub as the EFI provider
        arm/fvp-base: Update the default testsuites
        arm-systemready: Introduce the Arm SystemReady layer
        arm-bsp/systemready: Bring up the Arm SystemReady IR ACS 2.0 suite on FVP base
        kas: Add kas configuration for Arm SystemReady and fvp-base
        ci: Add fvpboot to IMAGE_CLASSES

  Delane Brandy (1):
        arm-bsp/documentation: corstone1000: Update the user guide

  Drew Reed (2):
        arm-bsp: Enable TF-A test building for the N1SDP
        CI: Enable TF-A TFTF test builds

  Emekcan Aras (17):
        arm-bsp/u-boot: corstone1000: enable on-disk capsule update
        arm-bsp/u-boot: corstone1000: fix runtime capsule update flag checks
        arm-bsp/trusted-firmware-m: fix capsule update alignment
        arm-bsp/trusted-firmware-m: update the upstream status of the out-of-tree patches
        arm-bsp/u-boot: corstone1000: scatter gather list workaround for ondisk capsule update
        arm-bsp/trusted-services: enable signaled handling interrupts for SPs
        arm-bsp/corstone1000: fix synchronization issue on openamp notification
        arm/fvp-corstone1000: upgrade to 11.23_25
        arm-bsp/corstone1000-fvp: Add virtio-net configuration
        arm-bsp/corstone1000-fvp: add unpadded image support for MMC card config
        arm-bsp/corstone1000-fvp: Disable Time Annotation
        arm-bsp/u-boot: corstone1000: enable virtio-net support for FVP
        arm-bsp/documentation: corstone1000: update the architecture document
        arm-bsp/documentation: corstone1000: Add EFI system partition section
        arm-bsp/documentation: corstone1000: add a note and fix instructions
        arm-bsp/documentation: corstone1000: add readthedocs.yaml file
        arm-bsp/documentation: corstone1000: fix the requirements.txt and conf.py path

  Harsimran Singh Tungal (4):
        arm-bsp/u-boot: corstone1000: Remove External system patches
        arm-bsp/linux: corstone1000: update the defconfig
        arm-bsp/linux: corstone1000: Remove External system patches
        arm-bsp/images: corstone1000: Remove the external system test package

  Javier Tia (1):
        trusted-firmware-a: fix build error when using ccache

  Jon Mason (10):
        arm-bsp/linux-yocto: add recipe for v6.4 kernel
        arm/linux-yocto: remove defconfig patch
        CI: add sbsa-acs to recipe report
        arm/linux-yocto: remove PHYS_VIRT config frag
        arm-bsp/optee: remove 3.18 recipes and patches
        arm-bsp/edk2: remove 202211
        arm/hafnium: update to v2.9
        arm/optee: update to 4.0.0
        arm/optee: cleanups from code review
        arm/toolchains: update to 13.2.Rel1

  Mariam Elshakfy (3):
        arm-bsp/n1sdp: Move OP-TEE to DDR4
        arm-bsp/n1sdp: Enable OP-TEE cache in N1SDP
        arm-bsp/corstone1000: Remove inappropriate kernel delay patch

  Ross Burton (24):
        arm/oeqa/selftest: tag all tests with "meta-arm"
        CI: don't hardcode the selftest tests to run
        CI: also run the _qemutiny testcase for poky-tiny
        CI: track nanbield branches
        arm/fvp-corstone1000: upgrade to 11.22.35, add aarch64 binaries
        kas/corstone1000: don't limit the FVP use to x86-64
        CI: don't pin corstone1000-fvp to x86-64
        CI: build both aarch64 and x86-64 packages for as many FVPs as possible
        arm-bsp/u-boot: remove 2023.01
        arm/trusted-firmware-a: update mbedtls to recommended release
        CI: Add meta-secure-core to pending-upgrades for corstone1000
        arm-bsp: corstone1000 depends on meta-efi-secure-boot
        arm/generic-arm64: remove obsolete SERIAL_CONSOLES_CHECK
        arm/lib/fvp/runner: don't pass '' as cwd
        scripts/runfvp: exit code should be the FVP exit code
        arm/selftest: add test that DISPLAY is forwarded into the runfvp child
        CI: use nanbield branch for meta-virtualization
        CI: use nanbield branch of meta-clang
        arm/optee: handle CVE-2021-36133 as disputed
        arm-bsp/optee-os: backport fix for CVE-2023-41325
        arm/fvp-base-a-aem: upgrade to 11.23.9
        arm-bsp/fvp-base: upgrade tune to v8.4
        arm-bsp/trusted-firmware-a: use v8.4 instructions on fvp-base
        arm-bsp/optee-os: update Upstream-Status tags

  Vikas Katariya (1):
        arm-bsp/corstone1000: Fix RSA key generation issue

  Xueliang Zhong (2):
        Update Corstone-1000 doc with security issue reporting guideline
        arm-bsp/n1sdp: update to linux yocto kernel 6.5

meta-raspberrypi: 482d864b8f..8231f97534:
  Andrei Gherzan (1):
        docs: Fix ReadTheDocs builds.os requirement

  Carlos Alberto Lopez Perez (1):
        linux-raspberrypi: stop setting powersave as the default CPU governor

  Jose Quaresma (2):
        linux-raspberrypi/linux-raspberrypi-v7: drop 5.10 version
        rpi-base: Adds EXTRA_IMAGEDEPENDS to fix the image task do_populate_lic_deploy

  Khem Raj (1):
        linux-raspberrypi_6.1.bb: Update to 6.1.61 release

  Leon Anavi (2):
        rpi-config: Upgrade to tip of tree
        rpi-config: reintroduce start_x

  Matthew Draws (1):
        rpi-eeprom: Update to 2023.10.18-2712

  Vincent Davis Jr (1):
        rpidistro-vlc: add new patch po-Fix-typos-in-oc

meta-openembedded: 62039a2c33..991e6852a5:
  Akash Hadke (1):
        libeigen: Update GPL-3.0-only to GPL-2.0-only

  Alex Kiernan (2):
        reptyr: Add 0.10.0
        mdns: Upgrade 2200.0.8 -> 2200.40.37.0.1

  Alper Ak (1):
        unionfs-fuse: upgrade 2.2 --> 3.4

  Andrew Jeffery (1):
        mdio-tools: Add virtual/kernel dependency to avoid stale SPDX reference

  Armin Kuster (4):
        netkit: Drop old and no upstream
        MAINTANERS: drop netkit
        README: drop netkit maintainer
        pkggrp: drop netkit

  Arthur Oliveira (5):
        python3-objectpath: Add ObjectPath Python Recipe
        python3-flask-restx: Add Flask-RestX Python Recipe
        python3-zopeevent: Add Zope.Event Python Recipe
        python3-aniso8601: Add ISO 8601 parsing library
        python3-flask-restx: Switch dependency from isodate to aniso8601

  Bartosz Golaszewski (5):
        shunit2: new recipe
        libgpiod: update to v2.1
        python3-gpiod: update to v2.1.3
        python3-gpiod: setup target config in ptest compile
        python3-gpiod: fix the required version of libgpiod

  Beniamin Sandu (2):
        mbedtls: upgrade 3.4.1 -> 3.5.0
        unbound: upgrade 1.18.0 -> 1.19.0

  Benjamin Bouvier (1):
        libsmi: enable native build

  Carlos Alberto Lopez Perez (1):
        libbacktrace: Update version and enable shared library.

  Charles Perry (4):
        libosip2: add recipe
        libexosip2: add recipe
        libexosip2: add c-ares and openssl PACKAGECONFIG
        libexosip2: package binaries in a separate package

  Chi Xu (1):
        re2: Add ptest support

  Christian Eggers (1):
        python3-gcovr: switch to main branch

  Christophe Vu-Brugier (1):
        exfatprogs: upgrade 1.2.1 -> 1.2.2

  Clément Péron (2):
        proj: Upgrade to 9.3.0 release
        pcapplusplus: Add recipe for 23.09 release

  Daiane Angolini (1):
        wireguard-tools: Use PACKAGECONFIG to select wg-quick and bash-completion

  Daniel McGregor (1):
        python3-pylint: allow native build

  Daniel Semkowicz (2):
        cockpit: Fix cockpit-askpass path
        cockpit: Bump to version 304

  David Pierret (3):
        libtext: add ptest
        cjson: Add ptest
        python3-rapidjson: add missing ptest dependency

  Edi Feschiyan (1):
        libbytesize: update SRC_URI

  Etienne Cordonnier (1):
        uutils-coreutils: upgrade 0.0.21 -> 0.0.22

  Fabien Thomas (2):
        klibc/klibc.inc : Add DEBUG_PREFIX_MAP flag.
        samba.bb : Disable ad-dc by default

  Fabio Estevam (5):
        edid-decode: Upgrade to latest master
        openocd: Use https for github
        python3-piccata: Use https for github
        multipath-tools: Use https for github
        crucible: Upgrade to 2023.11.02

  Gianfranco Costamagna (3):
        vbxguestdrivers: upgrade 7.0.10 -> 7.0.12
        cpulimit: add DESCRIPTION field
        dlt-daemon: cherry-pick another upstream-proposed patch

  Hains van den Bosch (1):
        libebml: Enable shared libraries

  Jamin Lin (1):
        Brotli: fix build failed if the path includes "-static"

  Jan Claußen (1):
        btop: Add recipe

  Jan Vermaete (3):
        netdata: chown in systemd service with ':' iso '.'
        netdata: version bump 1.43.0 -> 1.43.2
        README.md: was a Markdown paragraph and should be a list

  Jeffrey Pautler (1):
        apache2: add vendor to product name used for CVE checking

  Joe Slater (2):
        python3-pynacl: add RCONFLICTS with python3-nacl
        python3-django: move to version 4.2.5

  Johannes Kauffmann (1):
        open62541: update to v1.3.8

  Johnathan Mantey (1):
        ipmitool: Update and eliminate unneeded patch

  Jonas Gorski (1):
        frr: fix CVEs CVE-2023-4675{2,3} and CVE-2023-4723{4,5}

  Jose Quaresma (4):
        ostree: Upgrade 2023.5 -> 2023.6
        ostree: drop trivial-httpd-cmdline
        ostree: add ed25519-openssl
        ostree: Upgrade 2023.6 -> 2023.7

  Kai Kang (4):
        xfce4-panel-profiles: 1.0.13 -> 1.0.14
        python3-nacl: drop duplicate recipe
        python3-blivet: 3.4.3 -> 3.8.2
        python3-blivetgui: 2.3.0 -> 2.4.2

  Khem Raj (209):
        libnet-idn-encode: Fix build with perl 2.38 and gcc13
        poco: Fix data race when create POSIX thread
        static-group: Match nogroup id to base-passwd from core.
        gutenprint: Upgrade to 5.3.4
        meta-perl: Add libtext-diff-perl to fast ptest list
        leveldb: Upgrade to 1.23 plus latest git
        meta-python: Add python3-rapidjson to PTESTS_FAST_META_PYTHON
        leveldb: Print uint64_t with PRI64
        network-manager-applet,networkmanager-openvpn, networkmanager: Apply linker versioning patch when using lld only
        emlog: Add PV
        ccid: upgrade 1.5.2 -> 1.5.4
        jack: upgrade 1.19.22 -> 2
        abseil-cpp: upgrade 20230802.0 -> 20230802.1
        xterm: upgrade 387 -> 388
        toybox: upgrade 0.8.8 -> 0.8.10
        pahole: upgrade 1.24 -> 1.25
        gcab: upgrade 1.4 -> 1.6
        feh: upgrade 3.10 -> 3.10.1
        xmlsec1: upgrade 1.2.37 -> 1.3.2
        xmlsec1: Fix the key name in verify2 test
        ctags: upgrade 6.0.20231001.0 -> 6.0.20231029.0
        googlebenchmark: upgrade 1.8.0 -> 1.8.3
        opencl-headers: upgrade 04.17 -> 2023.04.17
        thingsboard-gateway: upgrade 3.4.1 -> 3.4.2
        neatvnc: upgrade 0.6.0 -> 0.7.0
        lastlog2: upgrade 1.1.0 -> 1.2.0
        libmbim: upgrade 1.30.0 -> 1.31.1
        ser2net: upgrade 4.3.13 -> 4.5.0
        fio: upgrade 3.32 -> 2022
        libosinfo: upgrade 1.10 -> 1.11.0
        webkitgtk3: upgrade 2.42.0 -> 2.42.1
        mstpd: upgrade 0.1 -> 0.05
        smarty: upgrade 4.3.0 -> 4.3.4
        geos: upgrade 3.12.0 -> 3.12.0beta2
        wtmpdb: upgrade 0.7.1 -> 0.9.3
        lsscsi: upgrade 0.32 -> 030
        glibmm-2.68: upgrade 2.74.0 -> 2.78.0
        mcelog: upgrade 194 -> 196
        libfastjson: upgrade 0.99.9 -> 1.2304.0
        libraw: upgrade 0.20.2 -> 0.21.1
        cairomm-1.16: upgrade 1.16.2 -> 1.18.0
        libbpf: upgrade 1.2.0 -> 1.2.2
        libtorrent: upgrade 0.13.8 -> 1
        modemmanager: upgrade 1.22.0 -> 1.23.1
        c-ares: upgrade 1.20.1 -> 1.21.0
        pmdk: upgrade 1.12.1 -> 2.0.0
        hwdata: upgrade 0.370 -> 0.375
        mksh: upgrade 59 -> R59c
        sdbus-c++: upgrade 1.3.0 -> 1.4.0
        cjson: upgrade 1.7.15 -> 1.7.16
        uftrace: upgrade 0.13.1 -> 0.14
        python3-trustme: upgrade 0.9.0 -> 1.1.0
        python3-eth-utils: upgrade 2.2.2 -> 2.3.0
        python3-xstatic-font-awesome: upgrade 4.7.0.0 -> 6.2.1.1
        python3-process-tests: upgrade 2.1.2 -> 3.0.0
        python3-pyperf: upgrade 2.6.1 -> 2.6.2
        python3-sentry-sdk: upgrade 1.26.0 -> 1.34.0
        python3-websockets: upgrade 11.0.3 -> 12.0
        python3-alembic: upgrade 1.12.0 -> 1.12.1
        python3-pymisp: upgrade 2.4.176 -> 2.4.178
        python3-traitlets: upgrade 5.11.2 -> 5.13.0
        python3-pytest-mock: upgrade 3.11.1 -> 3.12.0
        python3-kivy: upgrade 2.1.0 -> 2.2.1
        python3-web3: upgrade 6.11.1 -> 6.11.2
        python3-m2crypto: upgrade 0.39.0 -> 0.40.1
        python3-rapidjson: upgrade 1.12 -> 1.13
        python3-eth-typing: upgrade 3.5.0 -> 3.5.1
        python3-email-validator: upgrade 2.0.0 -> 2.1.0
        python3-icu: upgrade 2.11 -> 2.12
        python3-virtualenv: upgrade 20.24.5 -> 20.24.6
        python3-tzlocal: upgrade 5.1 -> 5.2
        python3-cantools: upgrade 39.2.0 -> 39.3.0
        python3-flask-login: upgrade 0.6.2 -> 0.6.3
        python3-argcomplete: upgrade 3.1.2 -> 3.1.4
        python3-wxgtk4: upgrade 4.2.0 -> 4.2.1
        python3-meson-python: upgrade 0.14.0 -> 0.15.0
        python3-pymongo: upgrade 4.5.0 -> 4.6.0
        python3-imgtool: upgrade 1.10.0 -> 2.0.0
        python3-google-api-python-client: upgrade 2.104.0 -> 2.106.0
        python3-tornado: upgrade 6.3 -> 6.3.3
        python3-imageio: upgrade 2.31.5 -> 2.31.6
        python3-blinker: upgrade 1.6.3 -> 1.7.0
        python3-pyhamcrest: upgrade 2.0.4 -> 2.1.0
        python3-pytest-asyncio: upgrade 0.21.1 -> 0.22.0
        python3-pyjwt: upgrade 2.7.0 -> 2.8.0
        python3-bitstruct: upgrade 8.18.0 -> 8.19.0
        python3-filelock: upgrade 3.12.4 -> 3.13.1
        python3-sqlalchemy: upgrade 2.0.22 -> 2.0.23
        python3-greenlet: upgrade 2.0.2 -> 3.0.1
        python3-charset-normalizer: upgrade 3.3.0 -> 3.3.2
        python3-cbor2: upgrade 5.4.6 -> 5.5.1
        python3-cbor2: Add missing hypothesis rdep for ptests
        python3-asttokens: upgrade 2.4.0 -> 2.4.1
        python3-xlsxwriter: upgrade 3.1.8 -> 3.1.9
        python3-cachetools: upgrade 5.3.1 -> 5.3.2
        python3-paramiko: upgrade 3.2.0 -> 3.3.1
        python3-tomlkit: upgrade 0.12.1 -> 0.12.2
        python3-eth-account: upgrade 0.9.0 -> 0.10.0
        python3-reedsolo: upgrade 1.7.0 -> 2.0.13
        python3-shellingham: upgrade 1.5.3 -> 1.5.4
        python3-ipython: upgrade 8.16.1 -> 8.17.2
        python3-argh: upgrade 0.29.4 -> 0.30.3
        python3-executing: upgrade 2.0.0 -> 2.0.1
        python3-pylint: upgrade 3.0.1 -> 3.0.2
        python3-google-auth: upgrade 2.23.3 -> 2.23.4
        libtest-harness-perl: upgrade 3.47 -> 3.48
        libmodule-build-tiny-perl: upgrade 0.046 -> 0.047
        libdbd-sqlite-perl: upgrade 1.72 -> 1.74
        libconfig-tiny-perl: upgrade 2.29 -> 2.30
        libcgi-perl: upgrade 4.57 -> 4.60
        ipset: upgrade 7.15 -> 7.19
        openvpn: upgrade 2.6.3 -> 2.6.6
        nng: upgrade 1.5.2 -> 12
        usrsctp: upgrade to latest revision
        python3-scapy: upgrade to latest revision
        wolfssl: upgrade 5.5.4 -> 5.6.4
        tnftp: upgrade 20210827 -> 20230507
        fluidsynth: upgrade 2.3.2 -> 2.3.4
        libuvc: upgrade 0.0.6 -> 0.0.7
        libdc1394: upgrade 2.2.6 -> 2.2.7
        ncmpc: upgrade 0.47 -> 0.49
        gerbera: upgrade 1.11.0 -> 1.12.1
        gst-shark: upgrade 0.7.3.1 -> 0.8.1
        gupnp-av: upgrade 0.14.0 -> 0.14.1
        libmediaart-2.0: upgrade 1.9.5 -> 1.9.6
        libdvbpsi: upgrade 1.3.0 -> 1.3.3
        fdk-aac: upgrade 2.0.1 -> 2.0.2
        libavif: upgrade 0.11.1 -> 1.0.1
        libdvdcss: upgrade 1.4.2 -> 1.4.3
        aom: upgrade 3.6.1 -> 3.7.0
        aom: Disable neon when building on arm
        dav1d: upgrade 1.2.0 -> 1.3.0
        network-manager-applet: upgrade 1.32.0 -> 1.34.0
        gvfs: upgrade 1.52.0 -> 1.52.1
        gnome-text-editor: upgrade 45.0 -> 45.1
        libwacom: upgrade 2.6.0 -> 2.8.0
        evolution-data-server: upgrade 3.50.0 -> 3.50.1
        orage: upgrade 4.16.0 -> 4.18.0
        xfce4-systemload-plugin: upgrade 1.3.1 -> 1.3.2
        xfce4-screenshooter: upgrade 1.10.3 -> 1.10.4
        xfce4-appfinder: upgrade 4.18.0 -> 4.19.1
        xfce4-netload-plugin: upgrade 1.4.0 -> 1.4.1
        thunar-shares-plugin: upgrade 0.3.1 -> 0.3.2
        xfce4-battery-plugin: upgrade 1.1.4 -> 1.1.5
        xfce4-places-plugin: upgrade 1.8.1 -> 1.8.3
        libxfce4util: upgrade 4.18.1 -> 4.19.2
        xfce4-notes-plugin: upgrade 1.9.0 -> 1.10.0
        xfce4-weather-plugin: upgrade 0.11.0 -> 0.11.1
        thunar: upgrade 4.18.4 -> 4.19.0
        catfish: upgrade 4.16.3 -> 4.18.0
        xfce4-time-out-plugin: upgrade 1.1.2 -> 1.1.3
        thunar-archive-plugin: upgrade 0.5.1 -> 0.5.2
        xfce4-timer-plugin: upgrade 1.7.1 -> 1.7.2
        xfce4-calculator-plugin: upgrade 0.7.1 -> 0.7.2
        xfmpc: upgrade 0.3.0 -> 0.3.1
        garcon: upgrade 4.18.1 -> 4.19.0
        xfce4-genmon-plugin: upgrade 4.1.1 -> 4.2.0
        xfce4-fsguard-plugin: upgrade 1.1.2 -> 1.1.3
        xfce4-cpugraph-plugin: upgrade 1.2.7 -> 1.2.8
        parole: upgrade 4.16.0 -> 4.18.0
        xfce4-datetime-plugin: upgrade 0.8.1 -> 0.8.3
        menulibre: upgrade 2.2.3 -> 2.3.2
        xfce4-pulseaudio-plugin: upgrade 0.4.3 -> 0.4.8
        libxfce4ui: upgrade 4.18.3 -> 4.19.3
        xfce4-taskmanager: upgrade 1.5.5 -> 1.5.6
        xfce4-mpc-plugin: upgrade 0.5.2 -> 0.5.3
        mousepad: upgrade 0.5.9 -> 0.6.1
        gigolo: upgrade 0.5.2 -> 0.5.3
        xfce4-verve-plugin: upgrade 2.0.1 -> 2.0.3
        exo: upgrade 4.18.0 -> 4.19.0
        xfce4-mailwatch-plugin: upgrade 1.3.0 -> 1.3.1
        xarchiver: upgrade 0.5.4.17 -> 0.5.4.21
        xfsprogs: upgrade 6.1.1 -> 6.5.0
        xfstests: upgrade 2023.03.05 -> 2023.10.29
        xfstests: Fix build with clang17
        xfstests: Fix build on musl
        ufs-utils: upgrade to latest revision
        xfce4-systemload-plugin: Fix build on 32bit machines
        libsodium: upgrade 1.0.18 -> 1.0.19
        libsodium: Fix build with clang on aarch64
        Revert "modemmanager: upgrade 1.22.0 -> 1.23.1"
        modemmanager: inherit upstream-version-is-even
        Revert "geos: upgrade 3.12.0 -> 3.12.0beta2"
        emlog: Drop SRCPV
        makedumpfile: Change COMPATIBLE_HOST check to exclude unsupported arches
        packagegroup-meta-oe: Update makedumpfile architecture support list
        gupnp: Add missing rdep on python3-core
        vte9: Upgrade to 0.74.1
        rygel: Upgrade to 0.40.4 -> 0.42.4
        vte9: Add knob for enabling systemd
        meta-networking: Use autotools make system
        meta-oe: Use autotools make system
        toscoterm: Skip recipe, slated for removal
        loudmouth: Upgrade to 1.5.4
        toscoterm: Delete recipe
        librest: Use autotools make system
        cannelloni: Fix build with clang and libc++ runtime
        gnome-console: Add missing dependency on gtk4-native
        gnome-terminal: Add missing dependency on libhandy
        dleyna-core: Update to tip of master
        dleyna: Skip all dleyna recipes, slated for removal
        packagegroup-meta-multimedia: Remove dleyna recipes
        beep: Upgrade to 1.4.12
        yelp: Use autotools for build system
        gstd: Upgrade to 0.15.0
        gimp: Update to 2.10.36
        projucer: Refresh patch to apply cleanly
        ledmon: Fix systemd unit install
        libxml++-5.0: Make use of gnomebase bbclass

  LI Qingwu (1):
        kmsxx: Add recipe

  Lei Maohui (1):
        gexiv2: Fix do_package QA issue when usrmerge enabled.

  Leon Anavi (32):
        sip: upgrade 6.7.11 -> 6.7.12
        python3-rarfile: add recipe
        python3-colorclass: add recipe
        python3-inflate64: add recipe
        python3-jsbeautifier: add recipe
        python3-pymemcache: add recipe
        python3-multivolumefile: add recipe
        python3-oletools: add recipe
        python3-olefile: add recipe
        python3-pcodedmp: add recipe
        python3-screeninfo: add recipe
        python3-unoconv: add recipe
        python3-pybcj: add recipe
        python3-pyppmd: add recipe
        python3-py7zr: add recipe
        python3-wand: add recipe
        python3-pdm-backend: add recipe
        python3-pdm: add recipe
        python3-jsonref: Upgrade 1.0.1 -> 1.1.0
        imlib2: Upgrade 1.7.1 -> 1.12.1
        libblockdev: Upgrade 3.0.3 -> 3.0.4
        exiftool: add recipe
        bindfs: add recipe
        qpdf: Update 10.6.3 -> 11.6.3
        python3-file-magic: add recipe
        python3-wrapt: Upgrade 1.15.0 -> 1.16.0
        python3-bitarray: Upgrade 2.8.2 -> 2.8.3
        python3-pillow: Upgrade 10.0.1 -> 10.1.0
        python3-polyline: upgrade 1.4.0 -> 2.0.1
        python3-py7zr: Upgrade 0.20.7 -> 0.20.8
        python3-zeroconf: upgrade 0.120.0 -> 0.126.0
        python3-pystemd: upgrade 0.10.0 -> 0.13.2

  Luca Fancellu (5):
        linuxptp: update linuxptp recipe to 4.1
        linuxptp: install default configuration file in sysconfdir
        linuxptp: add systemd services
        linuxptp: Drop unneeded downstream patches
        linuxptp: Use templates for the systemd services

  Marek Vasut (2):
        lvgl: lv-drivers: Allow empty package
        lvgl: Allow empty package

  Markus Fuchs (1):
        remove unused AUTHOR variable

  Markus Volk (52):
        libdecor: Upgrade 0.1.99 -> 0.2.0
        wireplumber: Upgrade 0.4.14 -> 0.4.15
        pipewire: Update 0.3.81 -> 0.3.83
        gnome-software: Update 45.0 -> 45.1
        gnome-calendar: Update 45.0 -> 45.1
        gnome-disk-utility: Update 44.0 -> 45.0
        gnome-control-center: Update 45.0 -> 45.1
        eog: Update 45.0 -> 45.1
        gnome-remote-desktop: Update 45.0 -> 45.1
        gnome-shell: Add missing dependency on pipewire
        gnome-shell: Remove deprecated libcroco dependency
        openbox: Drop deprecated libcroco dependency
        pipewire: Update 0.3.83 -> 0.3.84
        tracker-miners: Upgrade 3.6.0 -> 3.6.2
        libgweather4: Upgrade 4.2.0 -> 4.4.0
        gtksourceview5: Upgrade 5.7.1 -> 5.10.0
        openal-soft: Upgrade 1.20.1 -> 1.23.1
        gnome-shell: Upgrade 45.0 -> 45.1
        mutter: Upgrade 45.0 -> 45.1
        dconf-editor: Upgrade 43 -> 45.0.1
        libgsf: Upgrade 1.14.50 -> 1.14.51
        xdg-desktop-portal: Upgrade 1.18.0 -> 1.18.1
        xdg-desktop-portal-gtk: Upgrade 1.14.1 -> 1.15.1
        rest: Upgrade 0.9.0 -> 0.9.1
        nv-codec-headers: Upgrade 12.0.16.0 -> 12.1.14.0
        webp-pixbuf-loader: Upgrade 0.2.4 -> 0.2.5
        libchamplain: Upgrade 0.12.20 -> 0.12.21
        rest: Add packageconfigs for examples and tests
        gssdp: Fix build with api-documentation enabled
        gupnp: Upgrade 0.10.2 -> 0.12.1
        Gupnp-tools upgrade 0.10.2 -> 0.12.1
        gupnp-idg: Upgrade 1.2.0 -> 1.6.0
        gssdp: Upgrade 1.4.0.1 -> 1.6.3
        ghex: Upgrade 3.18.4 -> 45.0
        Adjust vala build according to changes in vala.bbclass
        drop GNOMEBASEBUILDCLASS = "meson"
        gnome-shell-extensions: Upgrade 44.1 -> 45.1
        cups-filters: Fix for current gcc
        gnome-console: Add recipe
        vte9: Fix build with api-documentation enabled
        gnome-terminal: Upgrade 3.48.1 -> 3.50.1
        cups-filters: Upgrade 1.28.17 -> 2.0.0
        gnome-terminal: Remove recommendation on vte-prompt
        ghex: backport patch to fix build for clang
        qpdf: cleanup
        gtksourceview4: Upgrade 4.8.2 -> 4.8.4
        gnome-control-center: Add rdepends
        system-config-printer: Add cups to rdepends
        pipewire: Upgrade 0.3.84 -> 0.3.85
        flatpak: Upgrade 1.15.4 -> 1.15.6
        flatpak: Add packageconfigs for man and docbook docs
        musicpd: unbreak build with ffmpeg 6.1

  Martin Jansa (12):
        nodejs: update to latest v20 version 20.8.1
        nodejs: Revert io_uring support from bundled libuv-1.46.0
        opencv: refresh protobuf-v22 compatibility patch with backported version
        leveldb: prevent installing gtest
        android-tools: drop ${PE}, ${PR} from /usr/src/debug paths
        minifi-cpp: drop ${PE}, ${PR} from /usr/src/debug paths
        xmlrcp-c: drop ${PE}, ${PR} from /usr/src/debug paths
        fluentbit: drop ${PE}, ${PR} from /usr/src/debug paths
        ntpsec, net-snmp: drop ${PE}, ${PR} from /usr/src/debug paths
        aom, x265: drop ${PE}, ${PR} from /usr/src/debug paths
        python3-{h5py,pandas}: drop ${PE}, ${PR} from /usr/src/debug paths
        evince, gnome-calendar, tracker: drop ${PE}, ${PR} from /usr/src/debug paths

  Martin Maurer (1):
        libqmi: Upgrade 1.32.4 -> 1.34.0

  Matthias Klein (1):
        paho-mqtt-c: upgrade 1.3.12 -> 1.3.13

  Mingli Yu (3):
        vboxguestdrivers: Remove the buildpath
        nlohmann-json: Add ptest support
        ptest-packagelists-meta-oe.inc: Add nlohmann-json

  Peter Kjellerstedt (18):
        libwebsockets: Support building for native
        mosquitto: Support building for native again
        jack: Revert to 1.9.22
        pahole: Correct the version in the recipe file name
        neatvnc: Specify the version in the recipe file name
        mstpd: Update to 0.1.0+
        Revert "libtorrent: upgrade 0.13.8 -> 1"
        libtorrent: Add UPSTREAM_CHECK_GITTAGREGEX
        mksh: Update to 59c properly
        fluidsynth: Specify the version in the recipe file name
        libuvc: Specify the version in the recipe file name
        gst-shark: Update to 0.8.1 properly
        xarchiver: Specify the version in the recipe file name
        python3-kivy: Move a comment so it makes more sense
        python3-greenlet: Avoid duplicate URI in SRC_URI
        python3-pylint: Only set SRCREV once
        python3-pytest-mock: Only set SRCREV once
        zeromq: Update to 4.3.5

  Peter Marko (1):
        grpc: Upgrade 1.56.2 -> 1.59.2

  Petr Gotthard (2):
        libmbim: upgrade 1.28.4 -> 1.30.0
        modemmanager: upgrade 1.20.6 -> 1.22.0

  Poonam Jadhav (1):
        sdbus-c++: Update ptest path

  Potin Lai (2):
        libplist: Upgrade to latest master
        idevicerestore: Upgrade to latest master

  Richard Purdie (4):
        meta-python: Drop broken BBCLASSEXTEND variants
        meta-oe: Drop broken BBCLASSEXTEND variants
        meta-networking: Drop broken BBCLASSEXTEND variants
        meta-perl: Drop broken BBCLASSEXTEND variants

  Ross Burton (1):
        yajl: fix CVE-2017-16516, CVE-2022-24795, CVE-2023-33460

  Sam Van Den Berge (1):
        netdata: Upgrade 1.36.1 -> 1.43.0

  Samuli Piippo (2):
        abseil-cpp: fix mingw build
        protobuf: stage protoc binary to sysroot

  Thomas Gessler (1):
        influxdb: Add start script used by systemd service

  Tim Orling (2):
        po4a: remove old recipe
        debsums: remove old recipe

  Trevor Gamblin (5):
        python-git-pw: add from meta-patchtest
        python3-py-cpuinfo: disable broken ptests
        python3-arrow: add from meta-patchtest
        python3-pytest-mock: disable broken ptests
        meta-python: update ptests status for py-cpuinfo, pytest-mock

  Wang Mingyu (149):
        dnf-plugin-tui: create symlinks from /usr/ to /.
        c-ares: upgrade 1.19.1 -> 1.20.1
        adw-gtk3: upgrade 4.9 -> 5.1
        ctags: upgrade 6.0.20230917.0 -> 6.0.20231001.0
        dialog: upgrade 1.3-20230209 -> 1.3-20231002
        freerdp: upgrade 2.11.1 -> 2.11.2
        gnome-backgrounds: upgrade 44.0 -> 45.0
        gnome-calculator: upgrade 45.0 -> 45.0.2
        gnome-font-viewer: upgrade 44.0 -> 45.0
        ipc-run: upgrade 20220807.0 -> 20231003.0
        libbytesize: upgrade 2.9 -> 2.10
        libcoap: upgrade 4.3.3 -> 4.3.4
        libyang: upgrade 2.1.111 -> 2.1.128
        lvgl: upgrade 8.3.9 -> 8.3.10
        metacity: upgrade 3.46.1 -> 3.50.0
        nautilus: upgrade 45.0 -> 45.1
        ceres-solver: upgrade 2.1.0 -> 2.2.0
        python3-eth-abi: upgrade 3.0.1 -> 4.2.1
        python3-mypy: upgrade 1.5.1 -> 1.6.1
        python3-pylint: upgrade 3.0.0 -> 3.0.1
        python3-aiodns: upgrade 3.0.0 -> 3.1.1
        python3-aiohttp: upgrade 3.8.5 -> 3.8.6
        python3-astroid: upgrade 3.0.0 -> 3.0.1
        python3-bitarray: upgrade 2.8.1 -> 2.8.2
        python3-bitstruct: upgrade 8.17.0 -> 8.18.0
        python3-blinker: upgrade 1.6.2 -> 1.6.3
        python3-charset-normalizer: upgrade 3.2.0 -> 3.3.0
        python3-cmake: upgrade 3.27.5 -> 3.27.7
        python3-coverage: upgrade 7.3.1 -> 7.3.2
        python3-croniter: upgrade 1.4.1 -> 2.0.1
        python3-dbus-fast: upgrade 1.85.0 -> 2.12.0
        python3-email-validator: upgrade 1.3.1 -> 2.0.0
        python3-engineio: upgrade 4.7.1 -> 4.8.0
        python3-eth-typing: upgrade 3.4.0 -> 3.5.0
        python3-eth-utils: upgrade 2.2.1 -> 2.2.2
        python3-executing: upgrade 1.2.0 -> 2.0.0
        python3-flask-babel: upgrade 3.1.0 -> 4.0.0
        python3-flask-jwt-extended: upgrade 4.5.2 -> 4.5.3
        python3-google-api-python-client: upgrade 2.101.0 -> 2.104.0
        python3-googleapis-common-protos: upgrade 1.60.0 -> 1.61.0
        python3-google-auth: upgrade 2.23.1 -> 2.23.3
        python3-h5py: upgrade 3.9.0 -> 3.10.0
        python3-huey: upgrade 2.4.5 -> 2.5.0
        python3-imageio: upgrade 2.31.3 -> 2.31.5
        python3-ipython: upgrade 8.15.0 -> 8.16.1
        python3-jedi: upgrade 0.19.0 -> 0.19.1
        python3-meson-python: upgrade 0.13.1 -> 0.14.0
        python3-msgpack: upgrade 1.0.6 -> 1.0.7
        python3-platformdirs: upgrade 3.10.0 -> 3.11.0
        python3-prompt-toolkit: upgrade 3.0.36 -> 3.0.39
        python3-protobuf: upgrade 4.24.3 -> 4.24.4
        python3-pycares: upgrade 4.3.0 -> 4.4.0
        python3-pycodestyle: upgrade 2.11.0 -> 2.11.1
        python3-pydantic: upgrade 2.4.1 -> 2.4.2
        python3-pyephem: upgrade 4.1.4 -> 4.1.5
        python3-pytest-timeout: upgrade 2.1.0 -> 2.2.0
        python3-rapidjson: upgrade 1.11 -> 1.12
        python3-regex: upgrade 2023.8.8 -> 2023.10.3
        python3-rich: upgrade 13.5.3 -> 13.6.0
        python3-schedule: upgrade 1.2.0 -> 1.2.1
        python3-semver: upgrade 3.0.1 -> 3.0.2
        python3-simplejson: upgrade 3.19.1 -> 3.19.2
        python3-socketio: upgrade 5.9.0 -> 5.10.0
        python3-sqlalchemy: upgrade 2.0.21 -> 2.0.22
        python3-stack-data: upgrade 0.6.2 -> 0.6.3
        python3-texttable: upgrade 1.6.7 -> 1.7.0
        python3-traitlets: upgrade 5.10.1 -> 5.11.2
        python3-types-psutil: upgrade 5.9.5.16 -> 5.9.5.17
        python3-tzlocal: upgrade 5.0.1 -> 5.1
        python3-web3: upgrade 6.10.0 -> 6.11.1
        python3-websocket-client: upgrade 1.6.3 -> 1.6.4
        python3-xlsxwriter: upgrade 3.1.3 -> 3.1.8
        python3-xxhash: upgrade 3.3.0 -> 3.4.1
        python3-zeroconf: upgrade 0.112.0 -> 0.119.0
        python3-zopeinterface: upgrade 6.0 -> 6.1
        rdma-core: upgrade 47.0 -> 48.0
        redis: upgrade 7.2.1 -> 7.2.2
        remmina: upgrade 1.4.32 -> 1.4.33
        tesseract: upgrade 5.3.2 -> 5.3.3
        thingsboard-gateway: upgrade 3.3 -> 3.4.1
        tio: upgrade 2.6 -> 2.7
        wireshark: upgrade 4.0.8 -> 4.0.10
        xterm: upgrade 384 -> 387
        zchunk: upgrade 1.3.1 -> 1.3.2
        hdf5: Fix install conflict when enable multilib.
        dnf-plugin-tui: Recover BBCLASSEXTEND variants
        gensio: upgrade 2.7.6 -> 2.7.7
        hwdata: upgrade 0.375 -> 0.376
        libio-socket-ssl-perl: upgrade 2.083 -> 2.084
        makedumpfile: upgrade 1.7.3 -> 1.7.4
        gnome-remote-desktop: move from meta-virtualization to meta-security
        ctags: upgrade 6.0.20231029.0 -> 6.0.20231105.0
        function2: upgrade 4.2.3 -> 4.2.4
        neatvnc: upgrade 0.7.0 -> 0.7.1
        python3-argh: upgrade 0.30.3 -> 0.30.4
        python3-geojson: upgrade 3.0.1 -> 3.1.0
        python3-imageio: upgrade 2.31.6 -> 2.32.0
        python3-inflate64: upgrade 0.3.1 -> 1.0.0
        python3-jsbeautifier: upgrade 1.14.9 -> 1.14.11
        python3-lru-dict: upgrade 1.2.0 -> 1.3.0
        python3-python-vlc: upgrade 3.0.18122 -> 3.0.20123
        python3-zeroconf: upgrade 0.119.0 -> 0.120.0
        c-ares: upgrade 1.21.0 -> 1.22.0
        ctags: upgrade 6.0.20231105.0 -> 6.0.20231112.0
        libencode-perl: upgrade 3.19 -> 3.20
        bindfs: upgrade 1.17.5 -> 1.17.6
        python3-hexbytes: upgrade 0.3.1 -> 1.0.0
        python3-linux-procfs: upgrade 0.7.1 -> 0.7.3
        openvpn: upgrade 2.6.6 -> 2.6.7
        python3-argcomplete: upgrade 3.1.4 -> 3.1.6
        python3-awesomeversion: upgrade 23.8.0 -> 23.11.0
        python3-dbus-fast: upgrade 2.12.0 -> 2.14.0
        python3-eth-typing: upgrade 3.5.1 -> 3.5.2
        python3-eth-utils: upgrade 2.3.0 -> 2.3.1
        python3-geomet: upgrade 1.0.0 -> 1.1.0
        python3-google-api-core: upgrade 2.12.0 -> 2.14.0
        python3-google-api-python-client: upgrade 2.106.0 -> 2.108.0
        python3-mypy: upgrade 1.6.1 -> 1.7.0
        python3-platformdirs: upgrade 3.11.0 -> 4.0.0
        python3-prompt-toolkit: upgrade 3.0.39 -> 3.0.41
        python3-pyaudio: upgrade 0.2.13 -> 0.2.14
        python3-pydantic: upgrade 2.4.2 -> 2.5.0
        python3-pymetno: upgrade 0.11.0 -> 0.12.0
        python3-pytest-xdist: upgrade 3.3.1 -> 3.4.0
        python3-sentry-sdk: upgrade 1.34.0 -> 1.35.0
        python3-tomlkit: upgrade 0.12.2 -> 0.12.3
        python3-types-setuptools: upgrade 68.2.0.0 -> 68.2.0.1
        python3-web3: upgrade 6.11.2 -> 6.11.3
        python3-zeroconf: upgrade 0.126.0 -> 0.127.0
        ser2net: upgrade 4.5.0 -> 4.5.1
        uftp: upgrade 5.0.1 -> 5.0.2
        webkitgtk3: upgrade 2.42.1 -> 2.42.2
        imlib2: delete non-existent file
        c-ares: upgrade 1.22.0 -> 1.22.1
        ctags: upgrade 6.0.20231112.0 -> 6.0.20231119.0
        exiftool: upgrade 12.69 -> 12.70
        gnome-bluetooth: upgrade 42.6 -> 42.7
        libextutils-cppguess-perl: upgrade 0.26 -> 0.27
        libwebsockets: upgrade 4.3.2 -> 4.3.3
        python3-aiohttp: upgrade 3.8.6 -> 3.9.0
        python3-dateparser: upgrade 1.1.8 -> 1.2.0
        python3-django: upgrade 4.2.5 -> 4.2.7
        python3-imageio: upgrade 2.32.0 -> 2.33.0
        python3-ldap: upgrade 3.4.3 -> 3.4.4
        python3-pastedeploy: upgrade 3.0.1 -> 3.1.0
        python3-pdm: upgrade 2.10.1 -> 2.10.3
        python3-pydantic: upgrade 2.5.0 -> 2.5.1
        python3-rich: upgrade 13.6.0 -> 13.7.0
        strongswan: upgrade 5.9.11 -> 5.9.12

  Yi Zhao (6):
        samba: upgrade 4.18.6 -> 4.18.8
        samba: use external cmocka instead of bundled cmocka
        libtevent: fix ptest
        libldb: add ptest
        conntrack-tools: upgrade 1.4.7 -> 1.4.8
        nftables: upgrade 1.0.8 -> 1.0.9

  Yoann Congal (5):
        emlog: ignore CVE-2022-3968 & CVE-2023-43291
        juce/projucer: Backport a fix for the compilation under recent GCC
        meta-oe/static-ids: Change postgres to 28 to match forced id in recipe
        static-id: add missing netdata group
        python3-soupsieve: Break circular dependency with beautifulsoup4

  Zoltán Böszörményi (3):
        python3-ninja-syntax: Set BBCLASSEXTEND = "native nativesdk"
        python3-ninja: Set BBCLASSEXTEND = "native nativesdk"
        geos: Fix packaging

  alperak (39):
        xdebug: upgrade 3.2.0 -> 3.2.2
        catch2: upgrade 2.13.7 -> 2.13.10
        tuna: upgrade 0.18 -> 0.19
        libsrtp: upgrade 2.4.2 -> 2.5.0
        libupnp: upgrade 1.14.6 -> 1.14.18
        libisofs: upgrade 1.5.4 -> 1.5.6
        libisoburn: 1.5.4 -> 1.5.6
        fuse-exfat: upgrade 1.3.0 -> 1.4.0
        fuse3: upgrade 3.15.1 -> 3.16.2
        ufs-utils: upgrade 3.12.3 -> 4.13.5
        libebml: upgrade 1.3.0 -> 1.4.4
        libmatroska: upgrade 1.4.1 -> 1.7.1
        libde265: upgrade 1.0.5 -> 1.0.12
        libopenmpt: upgrade 0.6.2 -> 0.7.3
        mpd: upgrade 0.23.12 -> 0.23.14
        opencore-amr: upgrade 0.1.3 -> 0.1.6
        tinyalsa: upgrade 1.1.1 -> 2.0.0
        cannelloni: upgrade 1.0.0 -> 1.1.0
        civetweb: upgrade 1.12 -> 1.16
        libdnet: upgrade 1.16.3 -> 1.17.0
        openfortivpn: upgrade 1.20.5 -> 1.21.0
        fuse-exfat: Dropped md5sum
        libopenmpt: Added license change reason and dropped md5sum
        bolt: upgrade 0.9.5 -> 0.9.6
        irssi: upgrade 1.4.4 -> 1.4.5
        libmtp: upgrade 1.1.20 -> 1.1.21
        libsigc++-2.0: upgrade 2.10.7 -> 2.12.1
        libsigc++-3: upgrade 3.2.0 -> 3.6.0
        ocl-icd: upgrade 2.3.1 -> 2.3.2
        opencl-icd-loader: upgrade v2022.01.04 -> v2023.04.17
        uutils-coreutils: upgrade 0.0.22 -> 0.0.23
        botan: upgrade 2.19.3 -> 3.2.0
        capnproto: upgrade 0.10.4 -> 1.0.1
        cloc: upgrade 1.94 -> 1.98
        cpuid: upgrade 20211129 -> 20230614
        gst-editing-services: upgrade 1.20.5 -> 1.22.7
        luaposix: upgrade 35.1 -> 36.2.1
        mercurial: upgrade 6.1 -> 6.5
        ledmon: upgrade 0.93 -> 0.97

  skandigraun (1):
        libvpx: don't specify armv5 and armv6 toolchains explicitly

meta-security: 3f7d40b0fc..070a1e82cc:
  Gowtham Suresh Kumar (1):
        Update parsec recipes

  Mingli Yu (1):
        samhain: remove the buildpath

  Stefan Berger (1):
        ima,evm: Add two variables to write filenames and signatures into

Change-Id: Ib809aa0df4162c50a06c542a94a0b06cdc149a2d
Signed-off-by: Patrick Williams <patrick@stwcx.xyz>
diff --git a/poky/bitbake/lib/bb/asyncrpc/__init__.py b/poky/bitbake/lib/bb/asyncrpc/__init__.py
index 9a85e99..a437164 100644
--- a/poky/bitbake/lib/bb/asyncrpc/__init__.py
+++ b/poky/bitbake/lib/bb/asyncrpc/__init__.py
@@ -4,30 +4,13 @@
 # SPDX-License-Identifier: GPL-2.0-only
 #
 
-import itertools
-import json
-
-# The Python async server defaults to a 64K receive buffer, so we hardcode our
-# maximum chunk size. It would be better if the client and server reported to
-# each other what the maximum chunk sizes were, but that will slow down the
-# connection setup with a round trip delay so I'd rather not do that unless it
-# is necessary
-DEFAULT_MAX_CHUNK = 32 * 1024
-
-
-def chunkify(msg, max_chunk):
-    if len(msg) < max_chunk - 1:
-        yield ''.join((msg, "\n"))
-    else:
-        yield ''.join((json.dumps({
-                'chunk-stream': None
-            }), "\n"))
-
-        args = [iter(msg)] * (max_chunk - 1)
-        for m in map(''.join, itertools.zip_longest(*args, fillvalue='')):
-            yield ''.join(itertools.chain(m, "\n"))
-        yield "\n"
-
 
 from .client import AsyncClient, Client
-from .serv import AsyncServer, AsyncServerConnection, ClientError, ServerError
+from .serv import AsyncServer, AsyncServerConnection
+from .connection import DEFAULT_MAX_CHUNK
+from .exceptions import (
+    ClientError,
+    ServerError,
+    ConnectionClosedError,
+    InvokeError,
+)
diff --git a/poky/bitbake/lib/bb/asyncrpc/client.py b/poky/bitbake/lib/bb/asyncrpc/client.py
index fa042bb..0d7cd85 100644
--- a/poky/bitbake/lib/bb/asyncrpc/client.py
+++ b/poky/bitbake/lib/bb/asyncrpc/client.py
@@ -10,13 +10,13 @@
 import os
 import socket
 import sys
-from . import chunkify, DEFAULT_MAX_CHUNK
+from .connection import StreamConnection, WebsocketConnection, DEFAULT_MAX_CHUNK
+from .exceptions import ConnectionClosedError, InvokeError
 
 
 class AsyncClient(object):
     def __init__(self, proto_name, proto_version, logger, timeout=30):
-        self.reader = None
-        self.writer = None
+        self.socket = None
         self.max_chunk = DEFAULT_MAX_CHUNK
         self.proto_name = proto_name
         self.proto_version = proto_version
@@ -25,7 +25,8 @@
 
     async def connect_tcp(self, address, port):
         async def connect_sock():
-            return await asyncio.open_connection(address, port)
+            reader, writer = await asyncio.open_connection(address, port)
+            return StreamConnection(reader, writer, self.timeout, self.max_chunk)
 
         self._connect_sock = connect_sock
 
@@ -40,27 +41,39 @@
                 sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM, 0)
                 sock.connect(os.path.basename(path))
             finally:
-               os.chdir(cwd)
-            return await asyncio.open_unix_connection(sock=sock)
+                os.chdir(cwd)
+            reader, writer = await asyncio.open_unix_connection(sock=sock)
+            return StreamConnection(reader, writer, self.timeout, self.max_chunk)
+
+        self._connect_sock = connect_sock
+
+    async def connect_websocket(self, uri):
+        import websockets
+
+        async def connect_sock():
+            websocket = await websockets.connect(uri, ping_interval=None)
+            return WebsocketConnection(websocket, self.timeout)
 
         self._connect_sock = connect_sock
 
     async def setup_connection(self):
-        s = '%s %s\n\n' % (self.proto_name, self.proto_version)
-        self.writer.write(s.encode("utf-8"))
-        await self.writer.drain()
+        # Send headers
+        await self.socket.send("%s %s" % (self.proto_name, self.proto_version))
+        # End of headers
+        await self.socket.send("")
 
     async def connect(self):
-        if self.reader is None or self.writer is None:
-            (self.reader, self.writer) = await self._connect_sock()
+        if self.socket is None:
+            self.socket = await self._connect_sock()
             await self.setup_connection()
 
-    async def close(self):
-        self.reader = None
+    async def disconnect(self):
+        if self.socket is not None:
+            await self.socket.close()
+            self.socket = None
 
-        if self.writer is not None:
-            self.writer.close()
-            self.writer = None
+    async def close(self):
+        await self.disconnect()
 
     async def _send_wrapper(self, proc):
         count = 0
@@ -71,6 +84,7 @@
             except (
                 OSError,
                 ConnectionError,
+                ConnectionClosedError,
                 json.JSONDecodeError,
                 UnicodeDecodeError,
             ) as e:
@@ -82,49 +96,27 @@
                 await self.close()
                 count += 1
 
-    async def send_message(self, msg):
-        async def get_line():
-            try:
-                line = await asyncio.wait_for(self.reader.readline(), self.timeout)
-            except asyncio.TimeoutError:
-                raise ConnectionError("Timed out waiting for server")
+    def check_invoke_error(self, msg):
+        if isinstance(msg, dict) and "invoke-error" in msg:
+            raise InvokeError(msg["invoke-error"]["message"])
 
-            if not line:
-                raise ConnectionError("Connection closed")
-
-            line = line.decode("utf-8")
-
-            if not line.endswith("\n"):
-                raise ConnectionError("Bad message %r" % (line))
-
-            return line
-
+    async def invoke(self, msg):
         async def proc():
-            for c in chunkify(json.dumps(msg), self.max_chunk):
-                self.writer.write(c.encode("utf-8"))
-            await self.writer.drain()
+            await self.socket.send_message(msg)
+            return await self.socket.recv_message()
 
-            l = await get_line()
-
-            m = json.loads(l)
-            if m and "chunk-stream" in m:
-                lines = []
-                while True:
-                    l = (await get_line()).rstrip("\n")
-                    if not l:
-                        break
-                    lines.append(l)
-
-                m = json.loads("".join(lines))
-
-            return m
-
-        return await self._send_wrapper(proc)
+        result = await self._send_wrapper(proc)
+        self.check_invoke_error(result)
+        return result
 
     async def ping(self):
-        return await self.send_message(
-            {'ping': {}}
-        )
+        return await self.invoke({"ping": {}})
+
+    async def __aenter__(self):
+        return self
+
+    async def __aexit__(self, exc_type, exc_value, traceback):
+        await self.close()
 
 
 class Client(object):
@@ -142,7 +134,7 @@
         # required (but harmless) with it.
         asyncio.set_event_loop(self.loop)
 
-        self._add_methods('connect_tcp', 'ping')
+        self._add_methods("connect_tcp", "ping")
 
     @abc.abstractmethod
     def _get_async_client(self):
@@ -171,8 +163,20 @@
     def max_chunk(self, value):
         self.client.max_chunk = value
 
-    def close(self):
+    def disconnect(self):
         self.loop.run_until_complete(self.client.close())
-        if sys.version_info >= (3, 6):
-            self.loop.run_until_complete(self.loop.shutdown_asyncgens())
-        self.loop.close()
+
+    def close(self):
+        if self.loop:
+            self.loop.run_until_complete(self.client.close())
+            if sys.version_info >= (3, 6):
+                self.loop.run_until_complete(self.loop.shutdown_asyncgens())
+            self.loop.close()
+        self.loop = None
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, exc_type, exc_value, traceback):
+        self.close()
+        return False
diff --git a/poky/bitbake/lib/bb/asyncrpc/connection.py b/poky/bitbake/lib/bb/asyncrpc/connection.py
new file mode 100644
index 0000000..7f0cf6b
--- /dev/null
+++ b/poky/bitbake/lib/bb/asyncrpc/connection.py
@@ -0,0 +1,146 @@
+#
+# Copyright BitBake Contributors
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import asyncio
+import itertools
+import json
+from datetime import datetime
+from .exceptions import ClientError, ConnectionClosedError
+
+
+# The Python async server defaults to a 64K receive buffer, so we hardcode our
+# maximum chunk size. It would be better if the client and server reported to
+# each other what the maximum chunk sizes were, but that will slow down the
+# connection setup with a round trip delay so I'd rather not do that unless it
+# is necessary
+DEFAULT_MAX_CHUNK = 32 * 1024
+
+
+def chunkify(msg, max_chunk):
+    if len(msg) < max_chunk - 1:
+        yield "".join((msg, "\n"))
+    else:
+        yield "".join((json.dumps({"chunk-stream": None}), "\n"))
+
+        args = [iter(msg)] * (max_chunk - 1)
+        for m in map("".join, itertools.zip_longest(*args, fillvalue="")):
+            yield "".join(itertools.chain(m, "\n"))
+        yield "\n"
+
+
+def json_serialize(obj):
+    if isinstance(obj, datetime):
+        return obj.isoformat()
+    raise TypeError("Type %s not serializeable" % type(obj))
+
+
+class StreamConnection(object):
+    def __init__(self, reader, writer, timeout, max_chunk=DEFAULT_MAX_CHUNK):
+        self.reader = reader
+        self.writer = writer
+        self.timeout = timeout
+        self.max_chunk = max_chunk
+
+    @property
+    def address(self):
+        return self.writer.get_extra_info("peername")
+
+    async def send_message(self, msg):
+        for c in chunkify(json.dumps(msg, default=json_serialize), self.max_chunk):
+            self.writer.write(c.encode("utf-8"))
+        await self.writer.drain()
+
+    async def recv_message(self):
+        l = await self.recv()
+
+        m = json.loads(l)
+        if not m:
+            return m
+
+        if "chunk-stream" in m:
+            lines = []
+            while True:
+                l = await self.recv()
+                if not l:
+                    break
+                lines.append(l)
+
+            m = json.loads("".join(lines))
+
+        return m
+
+    async def send(self, msg):
+        self.writer.write(("%s\n" % msg).encode("utf-8"))
+        await self.writer.drain()
+
+    async def recv(self):
+        if self.timeout < 0:
+            line = await self.reader.readline()
+        else:
+            try:
+                line = await asyncio.wait_for(self.reader.readline(), self.timeout)
+            except asyncio.TimeoutError:
+                raise ConnectionError("Timed out waiting for data")
+
+        if not line:
+            raise ConnectionClosedError("Connection closed")
+
+        line = line.decode("utf-8")
+
+        if not line.endswith("\n"):
+            raise ConnectionError("Bad message %r" % (line))
+
+        return line.rstrip()
+
+    async def close(self):
+        self.reader = None
+        if self.writer is not None:
+            self.writer.close()
+            self.writer = None
+
+
+class WebsocketConnection(object):
+    def __init__(self, socket, timeout):
+        self.socket = socket
+        self.timeout = timeout
+
+    @property
+    def address(self):
+        return ":".join(str(s) for s in self.socket.remote_address)
+
+    async def send_message(self, msg):
+        await self.send(json.dumps(msg, default=json_serialize))
+
+    async def recv_message(self):
+        m = await self.recv()
+        return json.loads(m)
+
+    async def send(self, msg):
+        import websockets.exceptions
+
+        try:
+            await self.socket.send(msg)
+        except websockets.exceptions.ConnectionClosed:
+            raise ConnectionClosedError("Connection closed")
+
+    async def recv(self):
+        import websockets.exceptions
+
+        try:
+            if self.timeout < 0:
+                return await self.socket.recv()
+
+            try:
+                return await asyncio.wait_for(self.socket.recv(), self.timeout)
+            except asyncio.TimeoutError:
+                raise ConnectionError("Timed out waiting for data")
+        except websockets.exceptions.ConnectionClosed:
+            raise ConnectionClosedError("Connection closed")
+
+    async def close(self):
+        if self.socket is not None:
+            await self.socket.close()
+            self.socket = None
diff --git a/poky/bitbake/lib/bb/asyncrpc/exceptions.py b/poky/bitbake/lib/bb/asyncrpc/exceptions.py
new file mode 100644
index 0000000..ae1043a
--- /dev/null
+++ b/poky/bitbake/lib/bb/asyncrpc/exceptions.py
@@ -0,0 +1,21 @@
+#
+# Copyright BitBake Contributors
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+
+class ClientError(Exception):
+    pass
+
+
+class InvokeError(Exception):
+    pass
+
+
+class ServerError(Exception):
+    pass
+
+
+class ConnectionClosedError(Exception):
+    pass
diff --git a/poky/bitbake/lib/bb/asyncrpc/serv.py b/poky/bitbake/lib/bb/asyncrpc/serv.py
index d2de489..f0be9a6 100644
--- a/poky/bitbake/lib/bb/asyncrpc/serv.py
+++ b/poky/bitbake/lib/bb/asyncrpc/serv.py
@@ -12,241 +12,321 @@
 import socket
 import sys
 import multiprocessing
-from . import chunkify, DEFAULT_MAX_CHUNK
+import logging
+from .connection import StreamConnection, WebsocketConnection
+from .exceptions import ClientError, ServerError, ConnectionClosedError, InvokeError
 
 
-class ClientError(Exception):
-    pass
-
-
-class ServerError(Exception):
-    pass
+class ClientLoggerAdapter(logging.LoggerAdapter):
+    def process(self, msg, kwargs):
+        return f"[Client {self.extra['address']}] {msg}", kwargs
 
 
 class AsyncServerConnection(object):
-    def __init__(self, reader, writer, proto_name, logger):
-        self.reader = reader
-        self.writer = writer
+    # If a handler returns this object (e.g. `return self.NO_RESPONSE`), no
+    # return message will be automatically be sent back to the client
+    NO_RESPONSE = object()
+
+    def __init__(self, socket, proto_name, logger):
+        self.socket = socket
         self.proto_name = proto_name
-        self.max_chunk = DEFAULT_MAX_CHUNK
         self.handlers = {
-            'chunk-stream': self.handle_chunk,
-            'ping': self.handle_ping,
+            "ping": self.handle_ping,
         }
-        self.logger = logger
+        self.logger = ClientLoggerAdapter(
+            logger,
+            {
+                "address": socket.address,
+            },
+        )
+
+    async def close(self):
+        await self.socket.close()
 
     async def process_requests(self):
         try:
-            self.addr = self.writer.get_extra_info('peername')
-            self.logger.debug('Client %r connected' % (self.addr,))
+            self.logger.info("Client %r connected" % (self.socket.address,))
 
             # Read protocol and version
-            client_protocol = await self.reader.readline()
+            client_protocol = await self.socket.recv()
             if not client_protocol:
                 return
 
-            (client_proto_name, client_proto_version) = client_protocol.decode('utf-8').rstrip().split()
+            (client_proto_name, client_proto_version) = client_protocol.split()
             if client_proto_name != self.proto_name:
-                self.logger.debug('Rejecting invalid protocol %s' % (self.proto_name))
+                self.logger.debug("Rejecting invalid protocol %s" % (self.proto_name))
                 return
 
-            self.proto_version = tuple(int(v) for v in client_proto_version.split('.'))
+            self.proto_version = tuple(int(v) for v in client_proto_version.split("."))
             if not self.validate_proto_version():
-                self.logger.debug('Rejecting invalid protocol version %s' % (client_proto_version))
+                self.logger.debug(
+                    "Rejecting invalid protocol version %s" % (client_proto_version)
+                )
                 return
 
             # Read headers. Currently, no headers are implemented, so look for
             # an empty line to signal the end of the headers
             while True:
-                line = await self.reader.readline()
-                if not line:
-                    return
-
-                line = line.decode('utf-8').rstrip()
-                if not line:
+                header = await self.socket.recv()
+                if not header:
                     break
 
             # Handle messages
             while True:
-                d = await self.read_message()
+                d = await self.socket.recv_message()
                 if d is None:
                     break
-                await self.dispatch_message(d)
-                await self.writer.drain()
-        except ClientError as e:
+                try:
+                    response = await self.dispatch_message(d)
+                except InvokeError as e:
+                    await self.socket.send_message(
+                        {"invoke-error": {"message": str(e)}}
+                    )
+                    break
+
+                if response is not self.NO_RESPONSE:
+                    await self.socket.send_message(response)
+
+        except ConnectionClosedError as e:
+            self.logger.info(str(e))
+        except (ClientError, ConnectionError) as e:
             self.logger.error(str(e))
         finally:
-            self.writer.close()
+            await self.close()
 
     async def dispatch_message(self, msg):
         for k in self.handlers.keys():
             if k in msg:
-                self.logger.debug('Handling %s' % k)
-                await self.handlers[k](msg[k])
-                return
+                self.logger.debug("Handling %s" % k)
+                return await self.handlers[k](msg[k])
 
         raise ClientError("Unrecognized command %r" % msg)
 
-    def write_message(self, msg):
-        for c in chunkify(json.dumps(msg), self.max_chunk):
-            self.writer.write(c.encode('utf-8'))
-
-    async def read_message(self):
-        l = await self.reader.readline()
-        if not l:
-            return None
-
-        try:
-            message = l.decode('utf-8')
-
-            if not message.endswith('\n'):
-                return None
-
-            return json.loads(message)
-        except (json.JSONDecodeError, UnicodeDecodeError) as e:
-            self.logger.error('Bad message from client: %r' % message)
-            raise e
-
-    async def handle_chunk(self, request):
-        lines = []
-        try:
-            while True:
-                l = await self.reader.readline()
-                l = l.rstrip(b"\n").decode("utf-8")
-                if not l:
-                    break
-                lines.append(l)
-
-            msg = json.loads(''.join(lines))
-        except (json.JSONDecodeError, UnicodeDecodeError) as e:
-            self.logger.error('Bad message from client: %r' % lines)
-            raise e
-
-        if 'chunk-stream' in msg:
-            raise ClientError("Nested chunks are not allowed")
-
-        await self.dispatch_message(msg)
-
     async def handle_ping(self, request):
-        response = {'alive': True}
-        self.write_message(response)
+        return {"alive": True}
+
+
+class StreamServer(object):
+    def __init__(self, handler, logger):
+        self.handler = handler
+        self.logger = logger
+        self.closed = False
+
+    async def handle_stream_client(self, reader, writer):
+        # writer.transport.set_write_buffer_limits(0)
+        socket = StreamConnection(reader, writer, -1)
+        if self.closed:
+            await socket.close()
+            return
+
+        await self.handler(socket)
+
+    async def stop(self):
+        self.closed = True
+
+
+class TCPStreamServer(StreamServer):
+    def __init__(self, host, port, handler, logger):
+        super().__init__(handler, logger)
+        self.host = host
+        self.port = port
+
+    def start(self, loop):
+        self.server = loop.run_until_complete(
+            asyncio.start_server(self.handle_stream_client, self.host, self.port)
+        )
+
+        for s in self.server.sockets:
+            self.logger.debug("Listening on %r" % (s.getsockname(),))
+            # Newer python does this automatically. Do it manually here for
+            # maximum compatibility
+            s.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
+            s.setsockopt(socket.SOL_TCP, socket.TCP_QUICKACK, 1)
+
+            # Enable keep alives. This prevents broken client connections
+            # from persisting on the server for long periods of time.
+            s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+            s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 30)
+            s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 15)
+            s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 4)
+
+        name = self.server.sockets[0].getsockname()
+        if self.server.sockets[0].family == socket.AF_INET6:
+            self.address = "[%s]:%d" % (name[0], name[1])
+        else:
+            self.address = "%s:%d" % (name[0], name[1])
+
+        return [self.server.wait_closed()]
+
+    async def stop(self):
+        await super().stop()
+        self.server.close()
+
+    def cleanup(self):
+        pass
+
+
+class UnixStreamServer(StreamServer):
+    def __init__(self, path, handler, logger):
+        super().__init__(handler, logger)
+        self.path = path
+
+    def start(self, loop):
+        cwd = os.getcwd()
+        try:
+            # Work around path length limits in AF_UNIX
+            os.chdir(os.path.dirname(self.path))
+            self.server = loop.run_until_complete(
+                asyncio.start_unix_server(
+                    self.handle_stream_client, os.path.basename(self.path)
+                )
+            )
+        finally:
+            os.chdir(cwd)
+
+        self.logger.debug("Listening on %r" % self.path)
+        self.address = "unix://%s" % os.path.abspath(self.path)
+        return [self.server.wait_closed()]
+
+    async def stop(self):
+        await super().stop()
+        self.server.close()
+
+    def cleanup(self):
+        os.unlink(self.path)
+
+
+class WebsocketsServer(object):
+    def __init__(self, host, port, handler, logger):
+        self.host = host
+        self.port = port
+        self.handler = handler
+        self.logger = logger
+
+    def start(self, loop):
+        import websockets.server
+
+        self.server = loop.run_until_complete(
+            websockets.server.serve(
+                self.client_handler,
+                self.host,
+                self.port,
+                ping_interval=None,
+            )
+        )
+
+        for s in self.server.sockets:
+            self.logger.debug("Listening on %r" % (s.getsockname(),))
+
+            # Enable keep alives. This prevents broken client connections
+            # from persisting on the server for long periods of time.
+            s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+            s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 30)
+            s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 15)
+            s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 4)
+
+        name = self.server.sockets[0].getsockname()
+        if self.server.sockets[0].family == socket.AF_INET6:
+            self.address = "ws://[%s]:%d" % (name[0], name[1])
+        else:
+            self.address = "ws://%s:%d" % (name[0], name[1])
+
+        return [self.server.wait_closed()]
+
+    async def stop(self):
+        self.server.close()
+
+    def cleanup(self):
+        pass
+
+    async def client_handler(self, websocket):
+        socket = WebsocketConnection(websocket, -1)
+        await self.handler(socket)
 
 
 class AsyncServer(object):
     def __init__(self, logger):
-        self._cleanup_socket = None
         self.logger = logger
-        self.start = None
-        self.address = None
         self.loop = None
+        self.run_tasks = []
 
     def start_tcp_server(self, host, port):
-        def start_tcp():
-            self.server = self.loop.run_until_complete(
-                asyncio.start_server(self.handle_client, host, port)
-            )
-
-            for s in self.server.sockets:
-                self.logger.debug('Listening on %r' % (s.getsockname(),))
-                # Newer python does this automatically. Do it manually here for
-                # maximum compatibility
-                s.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
-                s.setsockopt(socket.SOL_TCP, socket.TCP_QUICKACK, 1)
-
-                # Enable keep alives. This prevents broken client connections
-                # from persisting on the server for long periods of time.
-                s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
-                s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 30)
-                s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 15)
-                s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 4)
-
-            name = self.server.sockets[0].getsockname()
-            if self.server.sockets[0].family == socket.AF_INET6:
-                self.address = "[%s]:%d" % (name[0], name[1])
-            else:
-                self.address = "%s:%d" % (name[0], name[1])
-
-        self.start = start_tcp
+        self.server = TCPStreamServer(host, port, self._client_handler, self.logger)
 
     def start_unix_server(self, path):
-        def cleanup():
-            os.unlink(path)
+        self.server = UnixStreamServer(path, self._client_handler, self.logger)
 
-        def start_unix():
-            cwd = os.getcwd()
-            try:
-                # Work around path length limits in AF_UNIX
-                os.chdir(os.path.dirname(path))
-                self.server = self.loop.run_until_complete(
-                    asyncio.start_unix_server(self.handle_client, os.path.basename(path))
-                )
-            finally:
-                os.chdir(cwd)
+    def start_websocket_server(self, host, port):
+        self.server = WebsocketsServer(host, port, self._client_handler, self.logger)
 
-            self.logger.debug('Listening on %r' % path)
-
-            self._cleanup_socket = cleanup
-            self.address = "unix://%s" % os.path.abspath(path)
-
-        self.start = start_unix
-
-    @abc.abstractmethod
-    def accept_client(self, reader, writer):
-        pass
-
-    async def handle_client(self, reader, writer):
-        # writer.transport.set_write_buffer_limits(0)
+    async def _client_handler(self, socket):
+        address = socket.address
         try:
-            client = self.accept_client(reader, writer)
+            client = self.accept_client(socket)
             await client.process_requests()
         except Exception as e:
             import traceback
-            self.logger.error('Error from client: %s' % str(e), exc_info=True)
-            traceback.print_exc()
-            writer.close()
-        self.logger.debug('Client disconnected')
 
-    def run_loop_forever(self):
-        try:
-            self.loop.run_forever()
-        except KeyboardInterrupt:
-            pass
+            self.logger.error(
+                "Error from client %s: %s" % (address, str(e)), exc_info=True
+            )
+            traceback.print_exc()
+        finally:
+            self.logger.debug("Client %s disconnected", address)
+            await socket.close()
+
+    @abc.abstractmethod
+    def accept_client(self, socket):
+        pass
+
+    async def stop(self):
+        self.logger.debug("Stopping server")
+        await self.server.stop()
+
+    def start(self):
+        tasks = self.server.start(self.loop)
+        self.address = self.server.address
+        return tasks
 
     def signal_handler(self):
         self.logger.debug("Got exit signal")
-        self.loop.stop()
+        self.loop.create_task(self.stop())
 
-    def _serve_forever(self):
+    def _serve_forever(self, tasks):
         try:
             self.loop.add_signal_handler(signal.SIGTERM, self.signal_handler)
+            self.loop.add_signal_handler(signal.SIGINT, self.signal_handler)
+            self.loop.add_signal_handler(signal.SIGQUIT, self.signal_handler)
             signal.pthread_sigmask(signal.SIG_UNBLOCK, [signal.SIGTERM])
 
-            self.run_loop_forever()
-            self.server.close()
+            self.loop.run_until_complete(asyncio.gather(*tasks))
 
-            self.loop.run_until_complete(self.server.wait_closed())
-            self.logger.debug('Server shutting down')
+            self.logger.debug("Server shutting down")
         finally:
-            if self._cleanup_socket is not None:
-                self._cleanup_socket()
+            self.server.cleanup()
 
     def serve_forever(self):
         """
         Serve requests in the current process
         """
+        self._create_loop()
+        tasks = self.start()
+        self._serve_forever(tasks)
+        self.loop.close()
+
+    def _create_loop(self):
         # Create loop and override any loop that may have existed in
         # a parent process.  It is possible that the usecases of
         # serve_forever might be constrained enough to allow using
         # get_event_loop here, but better safe than sorry for now.
         self.loop = asyncio.new_event_loop()
         asyncio.set_event_loop(self.loop)
-        self.start()
-        self._serve_forever()
 
-    def serve_as_process(self, *, prefunc=None, args=()):
+    def serve_as_process(self, *, prefunc=None, args=(), log_level=None):
         """
         Serve requests in a child process
         """
+
         def run(queue):
             # Create loop and override any loop that may have existed
             # in a parent process.  Without doing this and instead
@@ -259,18 +339,22 @@
             # more general, though, as any potential use of asyncio in
             # Cooker could create a loop that needs to replaced in this
             # new process.
-            self.loop = asyncio.new_event_loop()
-            asyncio.set_event_loop(self.loop)
+            self._create_loop()
             try:
-                self.start()
+                self.address = None
+                tasks = self.start()
             finally:
+                # Always put the server address to wake up the parent task
                 queue.put(self.address)
                 queue.close()
 
             if prefunc is not None:
                 prefunc(self, *args)
 
-            self._serve_forever()
+            if log_level is not None:
+                self.logger.setLevel(log_level)
+
+            self._serve_forever(tasks)
 
             if sys.version_info >= (3, 6):
                 self.loop.run_until_complete(self.loop.shutdown_asyncgens())
diff --git a/poky/bitbake/lib/bb/cache.py b/poky/bitbake/lib/bb/cache.py
index 5ea41c5..18d5574 100644
--- a/poky/bitbake/lib/bb/cache.py
+++ b/poky/bitbake/lib/bb/cache.py
@@ -344,9 +344,7 @@
     """
     mc = ""
     if virtualfn.startswith('mc:') and virtualfn.count(':') >= 2:
-        elems = virtualfn.split(':')
-        mc = elems[1]
-        virtualfn = ":".join(elems[2:])
+        (_, mc, virtualfn) = virtualfn.split(':', 2)
 
     fn = virtualfn
     cls = ""
@@ -369,7 +367,7 @@
 
 def variant2virtual(realfn, variant):
     """
-    Convert a real filename + the associated subclass keyword to a virtual filename
+    Convert a real filename + a variant to a virtual filename
     """
     if variant == "":
         return realfn
diff --git a/poky/bitbake/lib/bb/codeparser.py b/poky/bitbake/lib/bb/codeparser.py
index eabeda5..cd39409 100644
--- a/poky/bitbake/lib/bb/codeparser.py
+++ b/poky/bitbake/lib/bb/codeparser.py
@@ -62,6 +62,7 @@
 modulecode_deps = {}
 
 def add_module_functions(fn, functions, namespace):
+    import os
     fstat = os.stat(fn)
     fixedhash = fn + ":" + str(fstat.st_size) +  ":" + str(fstat.st_mtime)
     for f in functions:
@@ -255,8 +256,8 @@
     def visit_Call(self, node):
         name = self.called_node_name(node.func)
         if name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs or name in self.containsanyfuncs):
-            if isinstance(node.args[0], ast.Str):
-                varname = node.args[0].s
+            if isinstance(node.args[0], ast.Constant) and isinstance(node.args[0].value, str):
+                varname = node.args[0].value
                 if name in self.containsfuncs and isinstance(node.args[1], ast.Str):
                     if varname not in self.contains:
                         self.contains[varname] = set()
diff --git a/poky/bitbake/lib/bb/command.py b/poky/bitbake/lib/bb/command.py
index f2ee587..79b6c07 100644
--- a/poky/bitbake/lib/bb/command.py
+++ b/poky/bitbake/lib/bb/command.py
@@ -550,8 +550,8 @@
         and return a datastore object representing the environment
         for the recipe.
         """
-        fn = params[0]
-        mc = bb.runqueue.mc_from_tid(fn)
+        virtualfn = params[0]
+        (fn, cls, mc) = bb.cache.virtualfn2realfn(virtualfn)
         appends = params[1]
         appendlist = params[2]
         if len(params) > 3:
@@ -574,10 +574,10 @@
         if config_data:
             # We have to use a different function here if we're passing in a datastore
             # NOTE: we took a copy above, so we don't do it here again
-            envdata = command.cooker.databuilder._parse_recipe(config_data, fn, appendfiles, mc, layername)['']
+            envdata = command.cooker.databuilder._parse_recipe(config_data, fn, appendfiles, mc, layername)[cls]
         else:
             # Use the standard path
-            envdata = command.cooker.databuilder.parseRecipe(fn, appendfiles, layername)
+            envdata = command.cooker.databuilder.parseRecipe(virtualfn, appendfiles, layername)
         idx = command.remotedatastores.store(envdata)
         return DataStoreConnectionHandle(idx)
     parseRecipeFile.readonly = True
diff --git a/poky/bitbake/lib/bb/cooker.py b/poky/bitbake/lib/bb/cooker.py
index 599c7dd..d658db9 100644
--- a/poky/bitbake/lib/bb/cooker.py
+++ b/poky/bitbake/lib/bb/cooker.py
@@ -303,6 +303,10 @@
         self.data_hash = self.databuilder.data_hash
         self.extraconfigdata = {}
 
+        eventlog = self.data.getVar("BB_DEFAULT_EVENTLOG")
+        if not self.configuration.writeeventlog and eventlog:
+            self.setupEventLog(eventlog)
+
         if consolelog:
             self.data.setVar("BB_CONSOLELOG", consolelog)
 
@@ -345,7 +349,7 @@
                     sync=False,
                     upstream=upstream,
                 )
-                self.hashserv.serve_as_process()
+                self.hashserv.serve_as_process(log_level=logging.WARNING)
             for mc in self.databuilder.mcdata:
                 self.databuilder.mcorigdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
                 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
@@ -409,6 +413,18 @@
 
         self._parsecache_set(False)
 
+    def setupEventLog(self, eventlog):
+        if self.eventlog and self.eventlog[0] != eventlog:
+            bb.event.unregister_UIHhandler(self.eventlog[1])
+        if not self.eventlog or self.eventlog[0] != eventlog:
+            # we log all events to a file if so directed
+            # register the log file writer as UI Handler
+            if not os.path.exists(os.path.dirname(eventlog)):
+                bb.utils.mkdirhier(os.path.dirname(eventlog))
+            writer = EventWriter(self, eventlog)
+            EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
+            self.eventlog = (eventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)))
+
     def updateConfigOpts(self, options, environment, cmdline):
         self.ui_cmdline = cmdline
         clean = True
@@ -428,14 +444,7 @@
                 setattr(self.configuration, o, options[o])
 
         if self.configuration.writeeventlog:
-            if self.eventlog and self.eventlog[0] != self.configuration.writeeventlog:
-                bb.event.unregister_UIHhandler(self.eventlog[1])
-            if not self.eventlog or self.eventlog[0] != self.configuration.writeeventlog:
-                # we log all events to a file if so directed
-                # register the log file writer as UI Handler
-                writer = EventWriter(self, self.configuration.writeeventlog)
-                EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
-                self.eventlog = (self.configuration.writeeventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)))
+            self.setupEventLog(self.configuration.writeeventlog)
 
         bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel
         bb.msg.loggerDefaultDomains = self.configuration.debug_domains
@@ -1548,7 +1557,13 @@
 
 
     def getAllKeysWithFlags(self, flaglist):
+        def dummy_autorev(d):
+            return
+
         dump = {}
+        # Horrible but for now we need to avoid any sideeffects of autorev being called
+        saved = bb.fetch2.get_autorev
+        bb.fetch2.get_autorev = dummy_autorev
         for k in self.data.keys():
             try:
                 expand = True
@@ -1568,6 +1583,7 @@
                             dump[k][d] = None
             except Exception as e:
                 print(e)
+        bb.fetch2.get_autorev = saved
         return dump
 
 
@@ -1787,7 +1803,7 @@
             for ignored in ('SCCS', 'CVS', '.svn'):
                 if ignored in dirs:
                     dirs.remove(ignored)
-            found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
+            found += [os.path.join(dir, f) for f in files if (f.endswith(('.bb', '.bbappend')))]
 
         return found
 
diff --git a/poky/bitbake/lib/bb/cookerdata.py b/poky/bitbake/lib/bb/cookerdata.py
index 42b8d64..0649e40 100644
--- a/poky/bitbake/lib/bb/cookerdata.py
+++ b/poky/bitbake/lib/bb/cookerdata.py
@@ -503,8 +503,8 @@
 
         if appends:
             bb_data.setVar('__BBAPPEND', " ".join(appends))
-        bb_data = bb.parse.handle(bbfile, bb_data)
-        return bb_data
+
+        return bb.parse.handle(bbfile, bb_data)
 
     def parseRecipeVariants(self, bbfile, appends, virtonly=False, mc=None, layername=None):
         """
@@ -516,8 +516,7 @@
             (bbfile, virtual, mc) = bb.cache.virtualfn2realfn(bbfile)
             bb_data = self.mcdata[mc].createCopy()
             bb_data.setVar("__ONLYFINALISE", virtual or "default")
-            datastores = self._parse_recipe(bb_data, bbfile, appends, mc, layername)
-            return datastores
+            return self._parse_recipe(bb_data, bbfile, appends, mc, layername)
 
         if mc is not None:
             bb_data = self.mcdata[mc].createCopy()
@@ -543,5 +542,5 @@
         """
         logger.debug("Parsing %s (full)" % virtualfn)
         (fn, virtual, mc) = bb.cache.virtualfn2realfn(virtualfn)
-        bb_data = self.parseRecipeVariants(virtualfn, appends, virtonly=True, layername=layername)
-        return bb_data[virtual]
+        datastores = self.parseRecipeVariants(virtualfn, appends, virtonly=True, layername=layername)
+        return datastores[virtual]
diff --git a/poky/bitbake/lib/bb/fetch2/__init__.py b/poky/bitbake/lib/bb/fetch2/__init__.py
index ffb1a92..22a2f80 100644
--- a/poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/poky/bitbake/lib/bb/fetch2/__init__.py
@@ -872,6 +872,8 @@
                      'AWS_PROFILE',
                      'AWS_ACCESS_KEY_ID',
                      'AWS_SECRET_ACCESS_KEY',
+                     'AWS_ROLE_ARN',
+                     'AWS_WEB_IDENTITY_TOKEN_FILE',
                      'AWS_DEFAULT_REGION',
                      'GIT_CACHE_PATH',
                      'REMOTE_CONTAINERS_IPC',
@@ -1579,6 +1581,7 @@
             unpackdir = rootdir
 
         if not unpack or not cmd:
+            urldata.unpack_tracer.unpack("file-copy", unpackdir)
             # If file == dest, then avoid any copies, as we already put the file into dest!
             dest = os.path.join(unpackdir, os.path.basename(file))
             if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)):
@@ -1593,6 +1596,8 @@
                         destdir = urlpath.rsplit("/", 1)[0] + '/'
                         bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
                 cmd = 'cp -fpPRH "%s" "%s"' % (file, destdir)
+        else:
+            urldata.unpack_tracer.unpack("archive-extract", unpackdir)
 
         if not cmd:
             return
@@ -1684,6 +1689,55 @@
         """
         return []
 
+
+class DummyUnpackTracer(object):
+    """
+    Abstract API definition for a class that traces unpacked source files back
+    to their respective upstream SRC_URI entries, for software composition
+    analysis, license compliance and detailed SBOM generation purposes.
+    User may load their own unpack tracer class (instead of the dummy
+    one) by setting the BB_UNPACK_TRACER_CLASS config parameter.
+    """
+    def start(self, unpackdir, urldata_dict, d):
+        """
+        Start tracing the core Fetch.unpack process, using an index to map
+        unpacked files to each SRC_URI entry.
+        This method is called by Fetch.unpack and it may receive nested calls by
+        gitsm and npmsw fetchers, that expand SRC_URI entries by adding implicit
+        URLs and by recursively calling Fetch.unpack from new (nested) Fetch
+        instances.
+        """
+        return
+    def start_url(self, url):
+        """Start tracing url unpack process.
+        This method is called by Fetch.unpack before the fetcher-specific unpack
+        method starts, and it may receive nested calls by gitsm and npmsw
+        fetchers.
+        """
+        return
+    def unpack(self, unpack_type, destdir):
+        """
+        Set unpack_type and destdir for current url.
+        This method is called by the fetcher-specific unpack method after url
+        tracing started.
+        """
+        return
+    def finish_url(self, url):
+        """Finish tracing url unpack process and update the file index.
+        This method is called by Fetch.unpack after the fetcher-specific unpack
+        method finished its job, and it may receive nested calls by gitsm
+        and npmsw fetchers.
+        """
+        return
+    def complete(self):
+        """
+        Finish tracing the Fetch.unpack process, and check if all nested
+        Fecth.unpack calls (if any) have been completed; if so, save collected
+        metadata.
+        """
+        return
+
+
 class Fetch(object):
     def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
         if localonly and cache:
@@ -1704,10 +1758,30 @@
         if key in urldata_cache:
             self.ud = urldata_cache[key]
 
+        # the unpack_tracer object needs to be made available to possible nested
+        # Fetch instances (when those are created by gitsm and npmsw fetchers)
+        # so we set it as a global variable
+        global unpack_tracer
+        try:
+            unpack_tracer
+        except NameError:
+            class_path = d.getVar("BB_UNPACK_TRACER_CLASS")
+            if class_path:
+                # use user-defined unpack tracer class
+                import importlib
+                module_name, _, class_name = class_path.rpartition(".")
+                module = importlib.import_module(module_name)
+                class_ = getattr(module, class_name)
+                unpack_tracer = class_()
+            else:
+                # fall back to the dummy/abstract class
+                unpack_tracer = DummyUnpackTracer()
+
         for url in urls:
             if url not in self.ud:
                 try:
                     self.ud[url] = FetchData(url, d, localonly)
+                    self.ud[url].unpack_tracer = unpack_tracer
                 except NonLocalMethod:
                     if localonly:
                         self.ud[url] = None
@@ -1883,6 +1957,8 @@
         if not urls:
             urls = self.urls
 
+        unpack_tracer.start(root, self.ud, self.d)
+
         for u in urls:
             ud = self.ud[u]
             ud.setup_localpath(self.d)
@@ -1890,11 +1966,15 @@
             if ud.lockfile:
                 lf = bb.utils.lockfile(ud.lockfile)
 
+            unpack_tracer.start_url(u)
             ud.method.unpack(ud, root, self.d)
+            unpack_tracer.finish_url(u)
 
             if ud.lockfile:
                 bb.utils.unlockfile(lf)
 
+        unpack_tracer.complete()
+
     def clean(self, urls=None):
         """
         Clean files that the fetcher gets or places
diff --git a/poky/bitbake/lib/bb/fetch2/crate.py b/poky/bitbake/lib/bb/fetch2/crate.py
index 3310ed0..01d4943 100644
--- a/poky/bitbake/lib/bb/fetch2/crate.py
+++ b/poky/bitbake/lib/bb/fetch2/crate.py
@@ -101,8 +101,10 @@
         bp = d.getVar('BP')
         if bp == ud.parm.get('name'):
             cmd = "tar -xz --no-same-owner -f %s" % thefile
+            ud.unpack_tracer.unpack("crate-extract", rootdir)
         else:
             cargo_bitbake = self._cargo_bitbake_path(rootdir)
+            ud.unpack_tracer.unpack("cargo-extract", cargo_bitbake)
 
             cmd = "tar -xz --no-same-owner -f %s -C %s" % (thefile, cargo_bitbake)
 
diff --git a/poky/bitbake/lib/bb/fetch2/git.py b/poky/bitbake/lib/bb/fetch2/git.py
index 4385d0b..0deeb5c 100644
--- a/poky/bitbake/lib/bb/fetch2/git.py
+++ b/poky/bitbake/lib/bb/fetch2/git.py
@@ -48,10 +48,23 @@
    instead of branch.
    The default is "0", set nobranch=1 if needed.
 
+- subpath
+   Limit the checkout to a specific subpath of the tree.
+   By default, checkout the whole tree, set subpath=<path> if needed
+
+- destsuffix
+   The name of the path in which to place the checkout.
+   By default, the path is git/, set destsuffix=<suffix> if needed
+
 - usehead
    For local git:// urls to use the current branch HEAD as the revision for use with
    AUTOREV. Implies nobranch.
 
+- lfs
+    Enable the checkout to use LFS for large files. This will download all LFS files
+    in the download step, as the unpack step does not have network access.
+    The default is "1", set lfs=0 to skip.
+
 """
 
 # Copyright (C) 2005 Richard Purdie
@@ -462,8 +475,8 @@
                 # Only do this if the unpack resulted in a .git/lfs directory being
                 # created; this only happens if at least one blob needed to be
                 # downloaded.
-                if os.path.exists(os.path.join(tmpdir, "git", ".git", "lfs")):
-                    runfetchcmd("tar -cf - lfs | tar -xf - -C %s" % ud.clonedir, d, workdir="%s/git/.git" % tmpdir)
+                if os.path.exists(os.path.join(ud.destdir, ".git", "lfs")):
+                    runfetchcmd("tar -cf - lfs | tar -xf - -C %s" % ud.clonedir, d, workdir="%s/.git" % ud.destdir)
 
     def build_mirror_data(self, ud, d):
 
@@ -589,6 +602,8 @@
         destdir = ud.destdir = os.path.join(destdir, destsuffix)
         if os.path.exists(destdir):
             bb.utils.prunedir(destdir)
+        if not ud.bareclone:
+            ud.unpack_tracer.unpack("git", destdir)
 
         need_lfs = self._need_lfs(ud)
 
@@ -627,6 +642,8 @@
                 raise bb.fetch2.FetchError("Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 to ignore it)" % (repourl))
             elif not need_lfs:
                 bb.note("Repository %s has LFS content but it is not being fetched" % (repourl))
+            else:
+                runfetchcmd("%s lfs install" % ud.basecmd, d, workdir=destdir)
 
         if not ud.nocheckout:
             if subpath:
@@ -686,8 +703,11 @@
         Check if the repository has 'lfs' (large file) content
         """
 
-        # The bare clonedir doesn't use the remote names; it has the branch immediately.
-        if wd == ud.clonedir:
+        if ud.nobranch:
+            # If no branch is specified, use the current git commit
+            refname = self._build_revision(ud, d, ud.names[0])
+        elif wd == ud.clonedir:
+            # The bare clonedir doesn't use the remote names; it has the branch immediately.
             refname = ud.branches[ud.names[0]]
         else:
             refname = "origin/%s" % ud.branches[ud.names[0]]
diff --git a/poky/bitbake/lib/bb/fetch2/gitsm.py b/poky/bitbake/lib/bb/fetch2/gitsm.py
index a87361c..f7f3af7 100644
--- a/poky/bitbake/lib/bb/fetch2/gitsm.py
+++ b/poky/bitbake/lib/bb/fetch2/gitsm.py
@@ -218,6 +218,10 @@
 
             try:
                 newfetch = Fetch([url], d, cache=False)
+                # modpath is needed by unpack tracer to calculate submodule
+                # checkout dir
+                new_ud = newfetch.ud[url]
+                new_ud.modpath = modpath
                 newfetch.unpack(root=os.path.dirname(os.path.join(repo_conf, 'modules', module)))
             except Exception as e:
                 logger.error('gitsm: submodule unpack failed: %s %s' % (type(e).__name__, str(e)))
diff --git a/poky/bitbake/lib/bb/fetch2/hg.py b/poky/bitbake/lib/bb/fetch2/hg.py
index 063e130..cbff8c4 100644
--- a/poky/bitbake/lib/bb/fetch2/hg.py
+++ b/poky/bitbake/lib/bb/fetch2/hg.py
@@ -242,6 +242,7 @@
         revflag = "-r %s" % ud.revision
         subdir = ud.parm.get("destsuffix", ud.module)
         codir = "%s/%s" % (destdir, subdir)
+        ud.unpack_tracer.unpack("hg", codir)
 
         scmdata = ud.parm.get("scmdata", "")
         if scmdata != "nokeep":
diff --git a/poky/bitbake/lib/bb/fetch2/npm.py b/poky/bitbake/lib/bb/fetch2/npm.py
index f83485a..15f3f19 100644
--- a/poky/bitbake/lib/bb/fetch2/npm.py
+++ b/poky/bitbake/lib/bb/fetch2/npm.py
@@ -298,6 +298,7 @@
         destsuffix = ud.parm.get("destsuffix", "npm")
         destdir = os.path.join(rootdir, destsuffix)
         npm_unpack(ud.localpath, destdir, d)
+        ud.unpack_tracer.unpack("npm", destdir)
 
     def clean(self, ud, d):
         """Clean any existing full or partial download"""
diff --git a/poky/bitbake/lib/bb/fetch2/npmsw.py b/poky/bitbake/lib/bb/fetch2/npmsw.py
index 4ff2c8f..ff5f8dc 100644
--- a/poky/bitbake/lib/bb/fetch2/npmsw.py
+++ b/poky/bitbake/lib/bb/fetch2/npmsw.py
@@ -191,7 +191,9 @@
             else:
                 raise ParameterError("Unsupported dependency: %s" % name, ud.url)
 
+            # name is needed by unpack tracer for module mapping
             ud.deps.append({
+                "name": name,
                 "url": url,
                 "localpath": localpath,
                 "extrapaths": extrapaths,
@@ -270,6 +272,7 @@
         destsuffix = ud.parm.get("destsuffix")
         if destsuffix:
             destdir = os.path.join(rootdir, destsuffix)
+        ud.unpack_tracer.unpack("npm-shrinkwrap", destdir)
 
         bb.utils.mkdirhier(destdir)
         bb.utils.copyfile(ud.shrinkwrap_file,
diff --git a/poky/bitbake/lib/bb/runqueue.py b/poky/bitbake/lib/bb/runqueue.py
index 56147c5..02d7ff9 100644
--- a/poky/bitbake/lib/bb/runqueue.py
+++ b/poky/bitbake/lib/bb/runqueue.py
@@ -157,7 +157,7 @@
             (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
             self.stamps[tid] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
             if tid in self.rq.runq_buildable:
-                self.buildable.append(tid)
+                self.buildable.add(tid)
 
         self.rev_prio_map = None
         self.is_pressure_usable()
@@ -1021,6 +1021,7 @@
 
                 for tid in list(runall_tids):
                     mark_active(tid, 1)
+                    self.target_tids.append(tid)
                     if self.cooker.configuration.force:
                         invalidate_task(tid, False)
 
@@ -1318,6 +1319,16 @@
         self.worker = {}
         self.fakeworker = {}
 
+    @staticmethod
+    def send_pickled_data(worker, data, name):
+        msg = bytearray()
+        msg.extend(b"<" + name.encode() + b">")
+        pickled_data = pickle.dumps(data)
+        msg.extend(len(pickled_data).to_bytes(4, 'big'))
+        msg.extend(pickled_data)
+        msg.extend(b"</" + name.encode() + b">")
+        worker.stdin.write(msg)
+
     def _start_worker(self, mc, fakeroot = False, rqexec = None):
         logger.debug("Starting bitbake-worker")
         magic = "decafbad"
@@ -1355,9 +1366,9 @@
             "umask" : self.cfgData.getVar("BB_DEFAULT_UMASK"),
         }
 
-        worker.stdin.write(b"<cookerconfig>" + pickle.dumps(self.cooker.configuration) + b"</cookerconfig>")
-        worker.stdin.write(b"<extraconfigdata>" + pickle.dumps(self.cooker.extraconfigdata) + b"</extraconfigdata>")
-        worker.stdin.write(b"<workerdata>" + pickle.dumps(workerdata) + b"</workerdata>")
+        RunQueue.send_pickled_data(worker, self.cooker.configuration, "cookerconfig")
+        RunQueue.send_pickled_data(worker, self.cooker.extraconfigdata, "extraconfigdata")
+        RunQueue.send_pickled_data(worker, workerdata, "workerdata")
         worker.stdin.flush()
 
         return RunQueueWorker(worker, workerpipe)
@@ -1367,7 +1378,7 @@
             return
         logger.debug("Teardown for bitbake-worker")
         try:
-           worker.process.stdin.write(b"<quit></quit>")
+           RunQueue.send_pickled_data(worker.process, b"", "quit")
            worker.process.stdin.flush()
            worker.process.stdin.close()
         except IOError:
@@ -1756,20 +1767,20 @@
         for tid in invalidtasks:
             (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
             pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
-            h = self.rqdata.runtaskentries[tid].hash
+            h = self.rqdata.runtaskentries[tid].unihash
             matches = bb.siggen.find_siginfo(pn, taskname, [], self.cooker.databuilder.mcdata[mc])
             match = None
             for m in matches:
                 if h in m:
                     match = m
             if match is None:
-                bb.fatal("Can't find a task we're supposed to have written out? (hash: %s)?" % h)
+                bb.fatal("Can't find a task we're supposed to have written out? (hash: %s tid: %s)?" % (h, tid))
             matches = {k : v for k, v in iter(matches.items()) if h not in k}
             if matches:
                 latestmatch = sorted(matches.keys(), key=lambda f: matches[f])[-1]
                 prevh = __find_sha256__.search(latestmatch).group(0)
                 output = bb.siggen.compare_sigfiles(latestmatch, match, recursecb)
-                bb.plain("\nTask %s:%s couldn't be used from the cache because:\n  We need hash %s, closest matching task was %s\n  " % (pn, taskname, h, prevh) + '\n  '.join(output))
+                bb.plain("\nTask %s:%s couldn't be used from the cache because:\n  We need hash %s, most recent matching task was %s\n  " % (pn, taskname, h, prevh) + '\n  '.join(output))
 
 
 class RunQueueExecute:
@@ -1851,11 +1862,6 @@
         self.tasks_notcovered = set()
         self.scenequeue_notneeded = set()
 
-        # We can't skip specified target tasks which aren't setscene tasks
-        self.cantskip = set(self.rqdata.target_tids)
-        self.cantskip.difference_update(self.rqdata.runq_setscene_tids)
-        self.cantskip.intersection_update(self.rqdata.runtaskentries)
-
         schedulers = self.get_schedulers()
         for scheduler in schedulers:
             if self.scheduler == scheduler.name:
@@ -1868,7 +1874,23 @@
 
         #if self.rqdata.runq_setscene_tids:
         self.sqdata = SQData()
-        build_scenequeue_data(self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self)
+        build_scenequeue_data(self.sqdata, self.rqdata, self)
+
+        update_scenequeue_data(self.sqdata.sq_revdeps, self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=True)
+
+        # Compute a list of 'stale' sstate tasks where the current hash does not match the one
+        # in any stamp files. Pass the list out to metadata as an event.
+        found = {}
+        for tid in self.rqdata.runq_setscene_tids:
+            (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
+            stamps = bb.build.find_stale_stamps(taskname, taskfn)
+            if stamps:
+                if mc not in found:
+                    found[mc] = {}
+                found[mc][tid] = stamps
+        for mc in found:
+            event = bb.event.StaleSetSceneTasks(found[mc])
+            bb.event.fire(event, self.cooker.databuilder.mcdata[mc])
 
     def runqueue_process_waitpid(self, task, status, fakerootlog=None):
 
@@ -1894,14 +1916,14 @@
     def finish_now(self):
         for mc in self.rq.worker:
             try:
-                self.rq.worker[mc].process.stdin.write(b"<finishnow></finishnow>")
+                RunQueue.send_pickled_data(self.rq.worker[mc].process, b"", "finishnow")
                 self.rq.worker[mc].process.stdin.flush()
             except IOError:
                 # worker must have died?
                 pass
         for mc in self.rq.fakeworker:
             try:
-                self.rq.fakeworker[mc].process.stdin.write(b"<finishnow></finishnow>")
+                RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, b"", "finishnow")
                 self.rq.fakeworker[mc].process.stdin.flush()
             except IOError:
                 # worker must have died?
@@ -2196,10 +2218,10 @@
             if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run:
                 if not mc in self.rq.fakeworker:
                     self.rq.start_fakeworker(self, mc)
-                self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps(runtask) + b"</runtask>")
+                RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, runtask, "runtask")
                 self.rq.fakeworker[mc].process.stdin.flush()
             else:
-                self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps(runtask) + b"</runtask>")
+                RunQueue.send_pickled_data(self.rq.worker[mc].process, runtask, "runtask")
                 self.rq.worker[mc].process.stdin.flush()
 
             self.build_stamps[task] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
@@ -2297,10 +2319,10 @@
                         self.rq.state = runQueueFailed
                         self.stats.taskFailed()
                         return True
-                self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps(runtask) + b"</runtask>")
+                RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, runtask, "runtask")
                 self.rq.fakeworker[mc].process.stdin.flush()
             else:
-                self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps(runtask) + b"</runtask>")
+                RunQueue.send_pickled_data(self.rq.worker[mc].process, runtask, "runtask")
                 self.rq.worker[mc].process.stdin.flush()
 
             self.build_stamps[task] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
@@ -2388,7 +2410,7 @@
             return
 
         notcovered = set(self.scenequeue_notcovered)
-        notcovered |= self.cantskip
+        notcovered |= self.sqdata.cantskip
         for tid in self.scenequeue_notcovered:
             notcovered |= self.sqdata.sq_covered_tasks[tid]
         notcovered |= self.sqdata.unskippable.difference(self.rqdata.runq_setscene_tids)
@@ -2502,9 +2524,9 @@
 
         if changed:
             for mc in self.rq.worker:
-                self.rq.worker[mc].process.stdin.write(b"<newtaskhashes>" + pickle.dumps(bb.parse.siggen.get_taskhashes()) + b"</newtaskhashes>")
+                RunQueue.send_pickled_data(self.rq.worker[mc].process, bb.parse.siggen.get_taskhashes(), "newtaskhashes")
             for mc in self.rq.fakeworker:
-                self.rq.fakeworker[mc].process.stdin.write(b"<newtaskhashes>" + pickle.dumps(bb.parse.siggen.get_taskhashes()) + b"</newtaskhashes>")
+                RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, bb.parse.siggen.get_taskhashes(), "newtaskhashes")
 
             hashequiv_logger.debug(pprint.pformat("Tasks changed:\n%s" % (changed)))
 
@@ -2767,12 +2789,17 @@
         # A list of normal tasks a setscene task covers
         self.sq_covered_tasks = {}
 
-def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
+def build_scenequeue_data(sqdata, rqdata, sqrq):
 
     sq_revdeps = {}
     sq_revdeps_squash = {}
     sq_collated_deps = {}
 
+    # We can't skip specified target tasks which aren't setscene tasks
+    sqdata.cantskip = set(rqdata.target_tids)
+    sqdata.cantskip.difference_update(rqdata.runq_setscene_tids)
+    sqdata.cantskip.intersection_update(rqdata.runtaskentries)
+
     # We need to construct a dependency graph for the setscene functions. Intermediate
     # dependencies between the setscene tasks only complicate the code. This code
     # therefore aims to collapse the huge runqueue dependency tree into a smaller one
@@ -2841,7 +2868,7 @@
     for tid in rqdata.runtaskentries:
         if not rqdata.runtaskentries[tid].revdeps:
             sqdata.unskippable.add(tid)
-    sqdata.unskippable |= sqrq.cantskip
+    sqdata.unskippable |= sqdata.cantskip
     while new:
         new = False
         orig = sqdata.unskippable.copy()
@@ -2951,22 +2978,6 @@
                 sqrq.sq_deferred[tid] = sqdata.hashes[h]
                 bb.debug(1, "Deferring %s after %s" % (tid, sqdata.hashes[h]))
 
-    update_scenequeue_data(sqdata.sq_revdeps, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True)
-
-    # Compute a list of 'stale' sstate tasks where the current hash does not match the one
-    # in any stamp files. Pass the list out to metadata as an event.
-    found = {}
-    for tid in rqdata.runq_setscene_tids:
-        (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
-        stamps = bb.build.find_stale_stamps(taskname, taskfn)
-        if stamps:
-            if mc not in found:
-                found[mc] = {}
-            found[mc][tid] = stamps
-    for mc in found:
-        event = bb.event.StaleSetSceneTasks(found[mc])
-        bb.event.fire(event, cooker.databuilder.mcdata[mc])
-
 def check_setscene_stamps(tid, rqdata, rq, stampcache, noexecstamp=False):
 
     (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
diff --git a/poky/bitbake/lib/bb/tests/fetch.py b/poky/bitbake/lib/bb/tests/fetch.py
index eeb7a31..c7a2340 100644
--- a/poky/bitbake/lib/bb/tests/fetch.py
+++ b/poky/bitbake/lib/bb/tests/fetch.py
@@ -2277,7 +2277,7 @@
 
     @skipIfNoGitLFS()
     @skipIfNoNetwork()
-    def test_real_git_lfs_repo_succeeds(self):
+    def test_real_git_lfs_repo_skips(self):
         self.d.setVar('SRC_URI', "git://gitlab.com/gitlab-examples/lfs.git;protocol=https;branch=master;lfs=0")
         f = self.get_real_git_lfs_file()
         # This is the actual non-smudged placeholder file on the repo if git-lfs does not run
@@ -2290,24 +2290,41 @@
         with open(f) as fh:
             self.assertEqual(lfs_file, fh.read())
 
+    @skipIfNoGitLFS()
     def test_lfs_enabled(self):
         import shutil
 
         uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir
         self.d.setVar('SRC_URI', uri)
 
-        # Careful: suppress initial attempt at downloading until
-        # we know whether git-lfs is installed.
+        # With git-lfs installed, test that we can fetch and unpack
+        fetcher, ud = self.fetch()
+        shutil.rmtree(self.gitdir, ignore_errors=True)
+        fetcher.unpack(self.d.getVar('WORKDIR'))
+
+    @skipIfNoGitLFS()
+    def test_lfs_disabled(self):
+        import shutil
+
+        uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir
+        self.d.setVar('SRC_URI', uri)
+
+        # Verify that the fetcher can survive even if the source
+        # repository has Git LFS usage configured.
+        fetcher, ud = self.fetch()
+        fetcher.unpack(self.d.getVar('WORKDIR'))
+
+    def test_lfs_enabled_not_installed(self):
+        import shutil
+
+        uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir
+        self.d.setVar('SRC_URI', uri)
+
+        # Careful: suppress initial attempt at downloading
         fetcher, ud = self.fetch(uri=None, download=False)
-        self.assertIsNotNone(ud.method._find_git_lfs)
 
-        # If git-lfs can be found, the unpack should be successful. Only
-        # attempt this with the real live copy of git-lfs installed.
-        if ud.method._find_git_lfs(self.d):
-            fetcher.download()
-            shutil.rmtree(self.gitdir, ignore_errors=True)
-            fetcher.unpack(self.d.getVar('WORKDIR'))
-
+        # Artificially assert that git-lfs is not installed, so
+        # we can verify a failure to unpack in it's absence.
         old_find_git_lfs = ud.method._find_git_lfs
         try:
             # If git-lfs cannot be found, the unpack should throw an error
@@ -2319,29 +2336,21 @@
         finally:
             ud.method._find_git_lfs = old_find_git_lfs
 
-    def test_lfs_disabled(self):
+    def test_lfs_disabled_not_installed(self):
         import shutil
 
         uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir
         self.d.setVar('SRC_URI', uri)
 
-        # In contrast to test_lfs_enabled(), allow the implicit download
-        # done by self.fetch() to occur here. The point of this test case
-        # is to verify that the fetcher can survive even if the source
-        # repository has Git LFS usage configured.
-        fetcher, ud = self.fetch()
-        self.assertIsNotNone(ud.method._find_git_lfs)
+        # Careful: suppress initial attempt at downloading
+        fetcher, ud = self.fetch(uri=None, download=False)
 
+        # Artificially assert that git-lfs is not installed, so
+        # we can verify a failure to unpack in it's absence.
         old_find_git_lfs = ud.method._find_git_lfs
         try:
-            # If git-lfs can be found, the unpack should be successful. A
-            # live copy of git-lfs is not required for this case, so
-            # unconditionally forge its presence.
-            ud.method._find_git_lfs = lambda d: True
-            shutil.rmtree(self.gitdir, ignore_errors=True)
-            fetcher.unpack(self.d.getVar('WORKDIR'))
-            # If git-lfs cannot be found, the unpack should be successful
-
+            # Even if git-lfs cannot be found, the unpack should be successful
+            fetcher.download()
             ud.method._find_git_lfs = lambda d: False
             shutil.rmtree(self.gitdir, ignore_errors=True)
             fetcher.unpack(self.d.getVar('WORKDIR'))
@@ -3042,9 +3051,11 @@
         self.d.setVar("BB_FETCH_PREMIRRORONLY", "1")
         self.d.setVar("BB_NO_NETWORK", "1")
         self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n")
+        self.mirrorname = "git2_git.fake.repo.bitbake.tar.gz"
+        self.mirrorfile = os.path.join(self.mirrordir, self.mirrorname)
+        self.testfilename = "bitbake-fetch.test"
 
     def make_git_repo(self):
-        self.mirrorname = "git2_git.fake.repo.bitbake.tar.gz"
         recipeurl = "git:/git.fake.repo/bitbake"
         os.makedirs(self.gitdir)
         self.git_init(cwd=self.gitdir)
@@ -3054,15 +3065,23 @@
 
     def git_new_commit(self):
         import random
-        testfilename = "bibake-fetch.test"
         os.unlink(os.path.join(self.mirrordir, self.mirrorname))
-        with open(os.path.join(self.gitdir, testfilename), "w") as testfile:
-            testfile.write("Useless random data {}".format(random.random()))
-        self.git("add {}".format(testfilename), self.gitdir)
-        self.git("commit -a -m \"This random commit {}. I'm useless.\"".format(random.random()), self.gitdir)
+        branch = self.git("branch --show-current", self.gitdir).split()
+        with open(os.path.join(self.gitdir, self.testfilename), "w") as testfile:
+            testfile.write("File {} from branch {}; Useless random data {}".format(self.testfilename, branch, random.random()))
+        self.git("add {}".format(self.testfilename), self.gitdir)
+        self.git("commit -a -m \"This random commit {} in branch {}. I'm useless.\"".format(random.random(), branch), self.gitdir)
         bb.process.run('tar -czvf {} .'.format(os.path.join(self.mirrordir, self.mirrorname)), cwd =  self.gitdir)
         return self.git("rev-parse HEAD", self.gitdir).strip()
 
+    def git_new_branch(self, name):
+        self.git_new_commit()
+        head = self.git("rev-parse HEAD", self.gitdir).strip()
+        self.git("checkout -b {}".format(name), self.gitdir)
+        newrev = self.git_new_commit()
+        self.git("checkout {}".format(head), self.gitdir)
+        return newrev
+
     def test_mirror_commit_nonexistent(self):
         self.make_git_repo()
         self.d.setVar("SRCREV", "0"*40)
@@ -3083,6 +3102,59 @@
         with self.assertRaises(bb.fetch2.NetworkAccess):
             fetcher.download()
 
+    def test_mirror_tarball_multiple_branches(self):
+        """
+        test if PREMIRRORS can handle multiple name/branches correctly
+        both branches have required revisions
+        """
+        self.make_git_repo()
+        branch1rev = self.git_new_branch("testbranch1")
+        branch2rev = self.git_new_branch("testbranch2")
+        self.recipe_url = "git://git.fake.repo/bitbake;branch=testbranch1,testbranch2;protocol=https;name=branch1,branch2"
+        self.d.setVar("SRCREV_branch1", branch1rev)
+        self.d.setVar("SRCREV_branch2", branch2rev)
+        fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+        self.assertTrue(os.path.exists(self.mirrorfile), "Mirror file doesn't exist")
+        fetcher.download()
+        fetcher.unpack(os.path.join(self.tempdir, "unpacked"))
+        unpacked = os.path.join(self.tempdir, "unpacked", "git", self.testfilename)
+        self.assertTrue(os.path.exists(unpacked), "Repo has not been unpackaged properly!")
+        with open(unpacked, 'r') as f:
+            content = f.read()
+            ## We expect to see testbranch1 in the file, not master, not testbranch2
+            self.assertTrue(content.find("testbranch1") != -1, "Wrong branch has been checked out!")
+
+    def test_mirror_tarball_multiple_branches_nobranch(self):
+        """
+        test if PREMIRRORS can handle multiple name/branches correctly
+        Unbalanced name/branches raises ParameterError
+        """
+        self.make_git_repo()
+        branch1rev = self.git_new_branch("testbranch1")
+        branch2rev = self.git_new_branch("testbranch2")
+        self.recipe_url = "git://git.fake.repo/bitbake;branch=testbranch1;protocol=https;name=branch1,branch2"
+        self.d.setVar("SRCREV_branch1", branch1rev)
+        self.d.setVar("SRCREV_branch2", branch2rev)
+        with self.assertRaises(bb.fetch2.ParameterError):
+            fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+
+    def test_mirror_tarball_multiple_branches_norev(self):
+        """
+        test if PREMIRRORS can handle multiple name/branches correctly
+        one of the branches specifies non existing SRCREV
+        """
+        self.make_git_repo()
+        branch1rev = self.git_new_branch("testbranch1")
+        branch2rev = self.git_new_branch("testbranch2")
+        self.recipe_url = "git://git.fake.repo/bitbake;branch=testbranch1,testbranch2;protocol=https;name=branch1,branch2"
+        self.d.setVar("SRCREV_branch1", branch1rev)
+        self.d.setVar("SRCREV_branch2", "0"*40)
+        fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+        self.assertTrue(os.path.exists(self.mirrorfile), "Mirror file doesn't exist")
+        with self.assertRaises(bb.fetch2.NetworkAccess):
+            fetcher.download()
+
+
 class FetchPremirroronlyNetworkTest(FetcherTest):
 
     def setUp(self):
diff --git a/poky/bitbake/lib/bb/utils.py b/poky/bitbake/lib/bb/utils.py
index b401fa5..61ffad9 100644
--- a/poky/bitbake/lib/bb/utils.py
+++ b/poky/bitbake/lib/bb/utils.py
@@ -50,7 +50,7 @@
 
 def get_context():
     return _context
-    
+
 
 def set_context(ctx):
     _context = ctx
@@ -212,8 +212,8 @@
             inversion = True
             # This list is based on behavior and supported comparisons from deb, opkg and rpm.
             #
-            # Even though =<, <<, ==, !=, =>, and >> may not be supported, 
-            # we list each possibly valid item. 
+            # Even though =<, <<, ==, !=, =>, and >> may not be supported,
+            # we list each possibly valid item.
             # The build system is responsible for validation of what it supports.
             if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
                 lastcmp = i[0:2]
@@ -347,7 +347,7 @@
         exception = traceback.format_exception_only(t, value)
         error.append('Error executing a python function in %s:\n' % realfile)
 
-        # Strip 'us' from the stack (better_exec call) unless that was where the 
+        # Strip 'us' from the stack (better_exec call) unless that was where the
         # error came from
         if tb.tb_next is not None:
             tb = tb.tb_next
@@ -746,9 +746,9 @@
 # but thats possibly insane and suffixes is probably going to be small
 #
 def prune_suffix(var, suffixes, d):
-    """ 
+    """
     See if var ends with any of the suffixes listed and
-    remove it if found 
+    remove it if found
     """
     for suffix in suffixes:
         if suffix and var.endswith(suffix):
@@ -1001,9 +1001,9 @@
         os.umask(current_mask)
 
 def to_boolean(string, default=None):
-    """ 
+    """
     Check input string and return boolean value True/False/None
-    depending upon the checks 
+    depending upon the checks
     """
     if not string:
         return default
diff --git a/poky/bitbake/lib/codegen.py b/poky/bitbake/lib/codegen.py
index 6955a7a..018b283 100644
--- a/poky/bitbake/lib/codegen.py
+++ b/poky/bitbake/lib/codegen.py
@@ -392,19 +392,7 @@
     def visit_Name(self, node):
         self.write(node.id)
 
-    def visit_Str(self, node):
-        self.write(repr(node.s))
-
-    def visit_Bytes(self, node):
-        self.write(repr(node.s))
-
-    def visit_Num(self, node):
-        self.write(repr(node.n))
-
     def visit_Constant(self, node):
-        # Python 3.8 deprecated visit_Num(), visit_Str(), visit_Bytes(),
-        # visit_NameConstant() and visit_Ellipsis(). They can be removed once we
-        # require 3.8+.
         self.write(repr(node.value))
 
     def visit_Tuple(self, node):
diff --git a/poky/bitbake/lib/hashserv/__init__.py b/poky/bitbake/lib/hashserv/__init__.py
index 9cb3fd5..552a332 100644
--- a/poky/bitbake/lib/hashserv/__init__.py
+++ b/poky/bitbake/lib/hashserv/__init__.py
@@ -6,150 +6,126 @@
 import asyncio
 from contextlib import closing
 import re
-import sqlite3
 import itertools
 import json
+from collections import namedtuple
+from urllib.parse import urlparse
 
 UNIX_PREFIX = "unix://"
+WS_PREFIX = "ws://"
+WSS_PREFIX = "wss://"
 
 ADDR_TYPE_UNIX = 0
 ADDR_TYPE_TCP = 1
+ADDR_TYPE_WS = 2
 
-# The Python async server defaults to a 64K receive buffer, so we hardcode our
-# maximum chunk size. It would be better if the client and server reported to
-# each other what the maximum chunk sizes were, but that will slow down the
-# connection setup with a round trip delay so I'd rather not do that unless it
-# is necessary
-DEFAULT_MAX_CHUNK = 32 * 1024
-
-UNIHASH_TABLE_DEFINITION = (
-    ("method", "TEXT NOT NULL", "UNIQUE"),
-    ("taskhash", "TEXT NOT NULL", "UNIQUE"),
-    ("unihash", "TEXT NOT NULL", ""),
-)
-
-UNIHASH_TABLE_COLUMNS = tuple(name for name, _, _ in UNIHASH_TABLE_DEFINITION)
-
-OUTHASH_TABLE_DEFINITION = (
-    ("method", "TEXT NOT NULL", "UNIQUE"),
-    ("taskhash", "TEXT NOT NULL", "UNIQUE"),
-    ("outhash", "TEXT NOT NULL", "UNIQUE"),
-    ("created", "DATETIME", ""),
-
-    # Optional fields
-    ("owner", "TEXT", ""),
-    ("PN", "TEXT", ""),
-    ("PV", "TEXT", ""),
-    ("PR", "TEXT", ""),
-    ("task", "TEXT", ""),
-    ("outhash_siginfo", "TEXT", ""),
-)
-
-OUTHASH_TABLE_COLUMNS = tuple(name for name, _, _ in OUTHASH_TABLE_DEFINITION)
-
-def _make_table(cursor, name, definition):
-    cursor.execute('''
-        CREATE TABLE IF NOT EXISTS {name} (
-            id INTEGER PRIMARY KEY AUTOINCREMENT,
-            {fields}
-            UNIQUE({unique})
-            )
-        '''.format(
-            name=name,
-            fields=" ".join("%s %s," % (name, typ) for name, typ, _ in definition),
-            unique=", ".join(name for name, _, flags in definition if "UNIQUE" in flags)
-    ))
-
-
-def setup_database(database, sync=True):
-    db = sqlite3.connect(database)
-    db.row_factory = sqlite3.Row
-
-    with closing(db.cursor()) as cursor:
-        _make_table(cursor, "unihashes_v2", UNIHASH_TABLE_DEFINITION)
-        _make_table(cursor, "outhashes_v2", OUTHASH_TABLE_DEFINITION)
-
-        cursor.execute('PRAGMA journal_mode = WAL')
-        cursor.execute('PRAGMA synchronous = %s' % ('NORMAL' if sync else 'OFF'))
-
-        # Drop old indexes
-        cursor.execute('DROP INDEX IF EXISTS taskhash_lookup')
-        cursor.execute('DROP INDEX IF EXISTS outhash_lookup')
-        cursor.execute('DROP INDEX IF EXISTS taskhash_lookup_v2')
-        cursor.execute('DROP INDEX IF EXISTS outhash_lookup_v2')
-
-        # TODO: Upgrade from tasks_v2?
-        cursor.execute('DROP TABLE IF EXISTS tasks_v2')
-
-        # Create new indexes
-        cursor.execute('CREATE INDEX IF NOT EXISTS taskhash_lookup_v3 ON unihashes_v2 (method, taskhash)')
-        cursor.execute('CREATE INDEX IF NOT EXISTS outhash_lookup_v3 ON outhashes_v2 (method, outhash)')
-
-    return db
+User = namedtuple("User", ("username", "permissions"))
 
 
 def parse_address(addr):
     if addr.startswith(UNIX_PREFIX):
-        return (ADDR_TYPE_UNIX, (addr[len(UNIX_PREFIX):],))
+        return (ADDR_TYPE_UNIX, (addr[len(UNIX_PREFIX) :],))
+    elif addr.startswith(WS_PREFIX) or addr.startswith(WSS_PREFIX):
+        return (ADDR_TYPE_WS, (addr,))
     else:
-        m = re.match(r'\[(?P<host>[^\]]*)\]:(?P<port>\d+)$', addr)
+        m = re.match(r"\[(?P<host>[^\]]*)\]:(?P<port>\d+)$", addr)
         if m is not None:
-            host = m.group('host')
-            port = m.group('port')
+            host = m.group("host")
+            port = m.group("port")
         else:
-            host, port = addr.split(':')
+            host, port = addr.split(":")
 
         return (ADDR_TYPE_TCP, (host, int(port)))
 
 
-def chunkify(msg, max_chunk):
-    if len(msg) < max_chunk - 1:
-        yield ''.join((msg, "\n"))
-    else:
-        yield ''.join((json.dumps({
-                'chunk-stream': None
-            }), "\n"))
+def create_server(
+    addr,
+    dbname,
+    *,
+    sync=True,
+    upstream=None,
+    read_only=False,
+    db_username=None,
+    db_password=None,
+    anon_perms=None,
+    admin_username=None,
+    admin_password=None,
+):
+    def sqlite_engine():
+        from .sqlite import DatabaseEngine
 
-        args = [iter(msg)] * (max_chunk - 1)
-        for m in map(''.join, itertools.zip_longest(*args, fillvalue='')):
-            yield ''.join(itertools.chain(m, "\n"))
-        yield "\n"
+        return DatabaseEngine(dbname, sync)
 
+    def sqlalchemy_engine():
+        from .sqlalchemy import DatabaseEngine
 
-def create_server(addr, dbname, *, sync=True, upstream=None, read_only=False):
+        return DatabaseEngine(dbname, db_username, db_password)
+
     from . import server
-    db = setup_database(dbname, sync=sync)
-    s = server.Server(db, upstream=upstream, read_only=read_only)
+
+    if "://" in dbname:
+        db_engine = sqlalchemy_engine()
+    else:
+        db_engine = sqlite_engine()
+
+    if anon_perms is None:
+        anon_perms = server.DEFAULT_ANON_PERMS
+
+    s = server.Server(
+        db_engine,
+        upstream=upstream,
+        read_only=read_only,
+        anon_perms=anon_perms,
+        admin_username=admin_username,
+        admin_password=admin_password,
+    )
 
     (typ, a) = parse_address(addr)
     if typ == ADDR_TYPE_UNIX:
         s.start_unix_server(*a)
+    elif typ == ADDR_TYPE_WS:
+        url = urlparse(a[0])
+        s.start_websocket_server(url.hostname, url.port)
     else:
         s.start_tcp_server(*a)
 
     return s
 
 
-def create_client(addr):
+def create_client(addr, username=None, password=None):
     from . import client
-    c = client.Client()
 
-    (typ, a) = parse_address(addr)
-    if typ == ADDR_TYPE_UNIX:
-        c.connect_unix(*a)
-    else:
-        c.connect_tcp(*a)
+    c = client.Client(username, password)
 
-    return c
+    try:
+        (typ, a) = parse_address(addr)
+        if typ == ADDR_TYPE_UNIX:
+            c.connect_unix(*a)
+        elif typ == ADDR_TYPE_WS:
+            c.connect_websocket(*a)
+        else:
+            c.connect_tcp(*a)
+        return c
+    except Exception as e:
+        c.close()
+        raise e
 
-async def create_async_client(addr):
+
+async def create_async_client(addr, username=None, password=None):
     from . import client
-    c = client.AsyncClient()
 
-    (typ, a) = parse_address(addr)
-    if typ == ADDR_TYPE_UNIX:
-        await c.connect_unix(*a)
-    else:
-        await c.connect_tcp(*a)
+    c = client.AsyncClient(username, password)
 
-    return c
+    try:
+        (typ, a) = parse_address(addr)
+        if typ == ADDR_TYPE_UNIX:
+            await c.connect_unix(*a)
+        elif typ == ADDR_TYPE_WS:
+            await c.connect_websocket(*a)
+        else:
+            await c.connect_tcp(*a)
+
+        return c
+    except Exception as e:
+        await c.close()
+        raise e
diff --git a/poky/bitbake/lib/hashserv/client.py b/poky/bitbake/lib/hashserv/client.py
index f676d26..35a9768 100644
--- a/poky/bitbake/lib/hashserv/client.py
+++ b/poky/bitbake/lib/hashserv/client.py
@@ -6,6 +6,7 @@
 import logging
 import socket
 import bb.asyncrpc
+import json
 from . import create_async_client
 
 
@@ -16,36 +17,47 @@
     MODE_NORMAL = 0
     MODE_GET_STREAM = 1
 
-    def __init__(self):
-        super().__init__('OEHASHEQUIV', '1.1', logger)
+    def __init__(self, username=None, password=None):
+        super().__init__("OEHASHEQUIV", "1.1", logger)
         self.mode = self.MODE_NORMAL
+        self.username = username
+        self.password = password
+        self.saved_become_user = None
 
     async def setup_connection(self):
         await super().setup_connection()
         cur_mode = self.mode
         self.mode = self.MODE_NORMAL
         await self._set_mode(cur_mode)
+        if self.username:
+            # Save off become user temporarily because auth() resets it
+            become = self.saved_become_user
+            await self.auth(self.username, self.password)
+
+            if become:
+                await self.become_user(become)
 
     async def send_stream(self, msg):
         async def proc():
-            self.writer.write(("%s\n" % msg).encode("utf-8"))
-            await self.writer.drain()
-            l = await self.reader.readline()
-            if not l:
-                raise ConnectionError("Connection closed")
-            return l.decode("utf-8").rstrip()
+            await self.socket.send(msg)
+            return await self.socket.recv()
 
         return await self._send_wrapper(proc)
 
     async def _set_mode(self, new_mode):
+        async def stream_to_normal():
+            await self.socket.send("END")
+            return await self.socket.recv()
+
         if new_mode == self.MODE_NORMAL and self.mode == self.MODE_GET_STREAM:
-            r = await self.send_stream("END")
+            r = await self._send_wrapper(stream_to_normal)
             if r != "ok":
-                raise ConnectionError("Bad response from server %r" % r)
+                self.check_invoke_error(r)
+                raise ConnectionError("Unable to transition to normal mode: Bad response from server %r" % r)
         elif new_mode == self.MODE_GET_STREAM and self.mode == self.MODE_NORMAL:
-            r = await self.send_message({"get-stream": None})
+            r = await self.invoke({"get-stream": None})
             if r != "ok":
-                raise ConnectionError("Bad response from server %r" % r)
+                raise ConnectionError("Unable to transition to stream mode: Bad response from server %r" % r)
         elif new_mode != self.mode:
             raise Exception(
                 "Undefined mode transition %r -> %r" % (self.mode, new_mode)
@@ -67,7 +79,7 @@
         m["method"] = method
         m["outhash"] = outhash
         m["unihash"] = unihash
-        return await self.send_message({"report": m})
+        return await self.invoke({"report": m})
 
     async def report_unihash_equiv(self, taskhash, method, unihash, extra={}):
         await self._set_mode(self.MODE_NORMAL)
@@ -75,46 +87,123 @@
         m["taskhash"] = taskhash
         m["method"] = method
         m["unihash"] = unihash
-        return await self.send_message({"report-equiv": m})
+        return await self.invoke({"report-equiv": m})
 
     async def get_taskhash(self, method, taskhash, all_properties=False):
         await self._set_mode(self.MODE_NORMAL)
-        return await self.send_message(
+        return await self.invoke(
             {"get": {"taskhash": taskhash, "method": method, "all": all_properties}}
         )
 
     async def get_outhash(self, method, outhash, taskhash, with_unihash=True):
         await self._set_mode(self.MODE_NORMAL)
-        return await self.send_message(
-            {"get-outhash": {"outhash": outhash, "taskhash": taskhash, "method": method, "with_unihash": with_unihash}}
+        return await self.invoke(
+            {
+                "get-outhash": {
+                    "outhash": outhash,
+                    "taskhash": taskhash,
+                    "method": method,
+                    "with_unihash": with_unihash,
+                }
+            }
         )
 
     async def get_stats(self):
         await self._set_mode(self.MODE_NORMAL)
-        return await self.send_message({"get-stats": None})
+        return await self.invoke({"get-stats": None})
 
     async def reset_stats(self):
         await self._set_mode(self.MODE_NORMAL)
-        return await self.send_message({"reset-stats": None})
+        return await self.invoke({"reset-stats": None})
 
     async def backfill_wait(self):
         await self._set_mode(self.MODE_NORMAL)
-        return (await self.send_message({"backfill-wait": None}))["tasks"]
+        return (await self.invoke({"backfill-wait": None}))["tasks"]
 
     async def remove(self, where):
         await self._set_mode(self.MODE_NORMAL)
-        return await self.send_message({"remove": {"where": where}})
+        return await self.invoke({"remove": {"where": where}})
 
     async def clean_unused(self, max_age):
         await self._set_mode(self.MODE_NORMAL)
-        return await self.send_message({"clean-unused": {"max_age_seconds": max_age}})
+        return await self.invoke({"clean-unused": {"max_age_seconds": max_age}})
+
+    async def auth(self, username, token):
+        await self._set_mode(self.MODE_NORMAL)
+        result = await self.invoke({"auth": {"username": username, "token": token}})
+        self.username = username
+        self.password = token
+        self.saved_become_user = None
+        return result
+
+    async def refresh_token(self, username=None):
+        await self._set_mode(self.MODE_NORMAL)
+        m = {}
+        if username:
+            m["username"] = username
+        result = await self.invoke({"refresh-token": m})
+        if (
+            self.username
+            and not self.saved_become_user
+            and result["username"] == self.username
+        ):
+            self.password = result["token"]
+        return result
+
+    async def set_user_perms(self, username, permissions):
+        await self._set_mode(self.MODE_NORMAL)
+        return await self.invoke(
+            {"set-user-perms": {"username": username, "permissions": permissions}}
+        )
+
+    async def get_user(self, username=None):
+        await self._set_mode(self.MODE_NORMAL)
+        m = {}
+        if username:
+            m["username"] = username
+        return await self.invoke({"get-user": m})
+
+    async def get_all_users(self):
+        await self._set_mode(self.MODE_NORMAL)
+        return (await self.invoke({"get-all-users": {}}))["users"]
+
+    async def new_user(self, username, permissions):
+        await self._set_mode(self.MODE_NORMAL)
+        return await self.invoke(
+            {"new-user": {"username": username, "permissions": permissions}}
+        )
+
+    async def delete_user(self, username):
+        await self._set_mode(self.MODE_NORMAL)
+        return await self.invoke({"delete-user": {"username": username}})
+
+    async def become_user(self, username):
+        await self._set_mode(self.MODE_NORMAL)
+        result = await self.invoke({"become-user": {"username": username}})
+        if username == self.username:
+            self.saved_become_user = None
+        else:
+            self.saved_become_user = username
+        return result
+
+    async def get_db_usage(self):
+        await self._set_mode(self.MODE_NORMAL)
+        return (await self.invoke({"get-db-usage": {}}))["usage"]
+
+    async def get_db_query_columns(self):
+        await self._set_mode(self.MODE_NORMAL)
+        return (await self.invoke({"get-db-query-columns": {}}))["columns"]
 
 
 class Client(bb.asyncrpc.Client):
-    def __init__(self):
+    def __init__(self, username=None, password=None):
+        self.username = username
+        self.password = password
+
         super().__init__()
         self._add_methods(
             "connect_tcp",
+            "connect_websocket",
             "get_unihash",
             "report_unihash",
             "report_unihash_equiv",
@@ -125,7 +214,17 @@
             "backfill_wait",
             "remove",
             "clean_unused",
+            "auth",
+            "refresh_token",
+            "set_user_perms",
+            "get_user",
+            "get_all_users",
+            "new_user",
+            "delete_user",
+            "become_user",
+            "get_db_usage",
+            "get_db_query_columns",
         )
 
     def _get_async_client(self):
-        return AsyncClient()
+        return AsyncClient(self.username, self.password)
diff --git a/poky/bitbake/lib/hashserv/server.py b/poky/bitbake/lib/hashserv/server.py
index 45bf476..a865078 100644
--- a/poky/bitbake/lib/hashserv/server.py
+++ b/poky/bitbake/lib/hashserv/server.py
@@ -3,18 +3,51 @@
 # SPDX-License-Identifier: GPL-2.0-only
 #
 
-from contextlib import closing, contextmanager
 from datetime import datetime, timedelta
-import enum
 import asyncio
 import logging
 import math
 import time
-from . import create_async_client, UNIHASH_TABLE_COLUMNS, OUTHASH_TABLE_COLUMNS
+import os
+import base64
+import hashlib
+from . import create_async_client
 import bb.asyncrpc
 
+logger = logging.getLogger("hashserv.server")
 
-logger = logging.getLogger('hashserv.server')
+
+# This permission only exists to match nothing
+NONE_PERM = "@none"
+
+READ_PERM = "@read"
+REPORT_PERM = "@report"
+DB_ADMIN_PERM = "@db-admin"
+USER_ADMIN_PERM = "@user-admin"
+ALL_PERM = "@all"
+
+ALL_PERMISSIONS = {
+    READ_PERM,
+    REPORT_PERM,
+    DB_ADMIN_PERM,
+    USER_ADMIN_PERM,
+    ALL_PERM,
+}
+
+DEFAULT_ANON_PERMS = (
+    READ_PERM,
+    REPORT_PERM,
+    DB_ADMIN_PERM,
+)
+
+TOKEN_ALGORITHM = "sha256"
+
+# 48 bytes of random data will result in 64 characters when base64
+# encoded. This number also ensures that the base64 encoding won't have any
+# trailing '=' characters.
+TOKEN_SIZE = 48
+
+SALT_SIZE = 8
 
 
 class Measurement(object):
@@ -104,244 +137,267 @@
         return math.sqrt(self.s / (self.num - 1))
 
     def todict(self):
-        return {k: getattr(self, k) for k in ('num', 'total_time', 'max_time', 'average', 'stdev')}
+        return {
+            k: getattr(self, k)
+            for k in ("num", "total_time", "max_time", "average", "stdev")
+        }
 
 
-@enum.unique
-class Resolve(enum.Enum):
-    FAIL = enum.auto()
-    IGNORE = enum.auto()
-    REPLACE = enum.auto()
+token_refresh_semaphore = asyncio.Lock()
 
 
-def insert_table(cursor, table, data, on_conflict):
-    resolve = {
-        Resolve.FAIL: "",
-        Resolve.IGNORE: " OR IGNORE",
-        Resolve.REPLACE: " OR REPLACE",
-    }[on_conflict]
+async def new_token():
+    # Prevent malicious users from using this API to deduce the entropy
+    # pool on the server and thus be able to guess a token. *All* token
+    # refresh requests lock the same global semaphore and then sleep for a
+    # short time. The effectively rate limits the total number of requests
+    # than can be made across all clients to 10/second, which should be enough
+    # since you have to be an authenticated users to make the request in the
+    # first place
+    async with token_refresh_semaphore:
+        await asyncio.sleep(0.1)
+        raw = os.getrandom(TOKEN_SIZE, os.GRND_NONBLOCK)
 
-    keys = sorted(data.keys())
-    query = 'INSERT{resolve} INTO {table} ({fields}) VALUES({values})'.format(
-        resolve=resolve,
-        table=table,
-        fields=", ".join(keys),
-        values=", ".join(":" + k for k in keys),
-    )
-    prevrowid = cursor.lastrowid
-    cursor.execute(query, data)
-    logging.debug(
-        "Inserting %r into %s, %s",
-        data,
-        table,
-        on_conflict
-    )
-    return (cursor.lastrowid, cursor.lastrowid != prevrowid)
-
-def insert_unihash(cursor, data, on_conflict):
-    return insert_table(cursor, "unihashes_v2", data, on_conflict)
-
-def insert_outhash(cursor, data, on_conflict):
-    return insert_table(cursor, "outhashes_v2", data, on_conflict)
-
-async def copy_unihash_from_upstream(client, db, method, taskhash):
-    d = await client.get_taskhash(method, taskhash)
-    if d is not None:
-        with closing(db.cursor()) as cursor:
-            insert_unihash(
-                cursor,
-                {k: v for k, v in d.items() if k in UNIHASH_TABLE_COLUMNS},
-                Resolve.IGNORE,
-            )
-            db.commit()
-    return d
+    return base64.b64encode(raw, b"._").decode("utf-8")
 
 
-class ServerCursor(object):
-    def __init__(self, db, cursor, upstream):
-        self.db = db
-        self.cursor = cursor
-        self.upstream = upstream
+def new_salt():
+    return os.getrandom(SALT_SIZE, os.GRND_NONBLOCK).hex()
+
+
+def hash_token(algo, salt, token):
+    h = hashlib.new(algo)
+    h.update(salt.encode("utf-8"))
+    h.update(token.encode("utf-8"))
+    return ":".join([algo, salt, h.hexdigest()])
+
+
+def permissions(*permissions, allow_anon=True, allow_self_service=False):
+    """
+    Function decorator that can be used to decorate an RPC function call and
+    check that the current users permissions match the require permissions.
+
+    If allow_anon is True, the user will also be allowed to make the RPC call
+    if the anonymous user permissions match the permissions.
+
+    If allow_self_service is True, and the "username" property in the request
+    is the currently logged in user, or not specified, the user will also be
+    allowed to make the request. This allows users to access normal privileged
+    API, as long as they are only modifying their own user properties (e.g.
+    users can be allowed to reset their own token without @user-admin
+    permissions, but not the token for any other user.
+    """
+
+    def wrapper(func):
+        async def wrap(self, request):
+            if allow_self_service and self.user is not None:
+                username = request.get("username", self.user.username)
+                if username == self.user.username:
+                    request["username"] = self.user.username
+                    return await func(self, request)
+
+            if not self.user_has_permissions(*permissions, allow_anon=allow_anon):
+                if not self.user:
+                    username = "Anonymous user"
+                    user_perms = self.anon_perms
+                else:
+                    username = self.user.username
+                    user_perms = self.user.permissions
+
+                self.logger.info(
+                    "User %s with permissions %r denied from calling %s. Missing permissions(s) %r",
+                    username,
+                    ", ".join(user_perms),
+                    func.__name__,
+                    ", ".join(permissions),
+                )
+                raise bb.asyncrpc.InvokeError(
+                    f"{username} is not allowed to access permissions(s) {', '.join(permissions)}"
+                )
+
+            return await func(self, request)
+
+        return wrap
+
+    return wrapper
 
 
 class ServerClient(bb.asyncrpc.AsyncServerConnection):
-    def __init__(self, reader, writer, db, request_stats, backfill_queue, upstream, read_only):
-        super().__init__(reader, writer, 'OEHASHEQUIV', logger)
-        self.db = db
+    def __init__(
+        self,
+        socket,
+        db_engine,
+        request_stats,
+        backfill_queue,
+        upstream,
+        read_only,
+        anon_perms,
+    ):
+        super().__init__(socket, "OEHASHEQUIV", logger)
+        self.db_engine = db_engine
         self.request_stats = request_stats
         self.max_chunk = bb.asyncrpc.DEFAULT_MAX_CHUNK
         self.backfill_queue = backfill_queue
         self.upstream = upstream
+        self.read_only = read_only
+        self.user = None
+        self.anon_perms = anon_perms
 
-        self.handlers.update({
-            'get': self.handle_get,
-            'get-outhash': self.handle_get_outhash,
-            'get-stream': self.handle_get_stream,
-            'get-stats': self.handle_get_stats,
-        })
+        self.handlers.update(
+            {
+                "get": self.handle_get,
+                "get-outhash": self.handle_get_outhash,
+                "get-stream": self.handle_get_stream,
+                "get-stats": self.handle_get_stats,
+                "get-db-usage": self.handle_get_db_usage,
+                "get-db-query-columns": self.handle_get_db_query_columns,
+                # Not always read-only, but internally checks if the server is
+                # read-only
+                "report": self.handle_report,
+                "auth": self.handle_auth,
+                "get-user": self.handle_get_user,
+                "get-all-users": self.handle_get_all_users,
+                "become-user": self.handle_become_user,
+            }
+        )
 
         if not read_only:
-            self.handlers.update({
-                'report': self.handle_report,
-                'report-equiv': self.handle_equivreport,
-                'reset-stats': self.handle_reset_stats,
-                'backfill-wait': self.handle_backfill_wait,
-                'remove': self.handle_remove,
-                'clean-unused': self.handle_clean_unused,
-            })
+            self.handlers.update(
+                {
+                    "report-equiv": self.handle_equivreport,
+                    "reset-stats": self.handle_reset_stats,
+                    "backfill-wait": self.handle_backfill_wait,
+                    "remove": self.handle_remove,
+                    "clean-unused": self.handle_clean_unused,
+                    "refresh-token": self.handle_refresh_token,
+                    "set-user-perms": self.handle_set_perms,
+                    "new-user": self.handle_new_user,
+                    "delete-user": self.handle_delete_user,
+                }
+            )
+
+    def raise_no_user_error(self, username):
+        raise bb.asyncrpc.InvokeError(f"No user named '{username}' exists")
+
+    def user_has_permissions(self, *permissions, allow_anon=True):
+        permissions = set(permissions)
+        if allow_anon:
+            if ALL_PERM in self.anon_perms:
+                return True
+
+            if not permissions - self.anon_perms:
+                return True
+
+        if self.user is None:
+            return False
+
+        if ALL_PERM in self.user.permissions:
+            return True
+
+        if not permissions - self.user.permissions:
+            return True
+
+        return False
 
     def validate_proto_version(self):
-        return (self.proto_version > (1, 0) and self.proto_version <= (1, 1))
+        return self.proto_version > (1, 0) and self.proto_version <= (1, 1)
 
     async def process_requests(self):
-        if self.upstream is not None:
-            self.upstream_client = await create_async_client(self.upstream)
-        else:
-            self.upstream_client = None
+        async with self.db_engine.connect(self.logger) as db:
+            self.db = db
+            if self.upstream is not None:
+                self.upstream_client = await create_async_client(self.upstream)
+            else:
+                self.upstream_client = None
 
-        await super().process_requests()
-
-        if self.upstream_client is not None:
-            await self.upstream_client.close()
+            try:
+                await super().process_requests()
+            finally:
+                if self.upstream_client is not None:
+                    await self.upstream_client.close()
 
     async def dispatch_message(self, msg):
         for k in self.handlers.keys():
             if k in msg:
-                logger.debug('Handling %s' % k)
-                if 'stream' in k:
-                    await self.handlers[k](msg[k])
+                self.logger.debug("Handling %s" % k)
+                if "stream" in k:
+                    return await self.handlers[k](msg[k])
                 else:
-                    with self.request_stats.start_sample() as self.request_sample, \
-                            self.request_sample.measure():
-                        await self.handlers[k](msg[k])
-                return
+                    with self.request_stats.start_sample() as self.request_sample, self.request_sample.measure():
+                        return await self.handlers[k](msg[k])
 
         raise bb.asyncrpc.ClientError("Unrecognized command %r" % msg)
 
+    @permissions(READ_PERM)
     async def handle_get(self, request):
-        method = request['method']
-        taskhash = request['taskhash']
-        fetch_all = request.get('all', False)
+        method = request["method"]
+        taskhash = request["taskhash"]
+        fetch_all = request.get("all", False)
 
-        with closing(self.db.cursor()) as cursor:
-            d = await self.get_unihash(cursor, method, taskhash, fetch_all)
+        return await self.get_unihash(method, taskhash, fetch_all)
 
-        self.write_message(d)
-
-    async def get_unihash(self, cursor, method, taskhash, fetch_all=False):
+    async def get_unihash(self, method, taskhash, fetch_all=False):
         d = None
 
         if fetch_all:
-            cursor.execute(
-                '''
-                SELECT *, unihashes_v2.unihash AS unihash FROM outhashes_v2
-                INNER JOIN unihashes_v2 ON unihashes_v2.method=outhashes_v2.method AND unihashes_v2.taskhash=outhashes_v2.taskhash
-                WHERE outhashes_v2.method=:method AND outhashes_v2.taskhash=:taskhash
-                ORDER BY outhashes_v2.created ASC
-                LIMIT 1
-                ''',
-                {
-                    'method': method,
-                    'taskhash': taskhash,
-                }
-
-            )
-            row = cursor.fetchone()
-
+            row = await self.db.get_unihash_by_taskhash_full(method, taskhash)
             if row is not None:
                 d = {k: row[k] for k in row.keys()}
             elif self.upstream_client is not None:
                 d = await self.upstream_client.get_taskhash(method, taskhash, True)
-                self.update_unified(cursor, d)
-                self.db.commit()
+                await self.update_unified(d)
         else:
-            row = self.query_equivalent(cursor, method, taskhash)
+            row = await self.db.get_equivalent(method, taskhash)
 
             if row is not None:
                 d = {k: row[k] for k in row.keys()}
             elif self.upstream_client is not None:
                 d = await self.upstream_client.get_taskhash(method, taskhash)
-                d = {k: v for k, v in d.items() if k in UNIHASH_TABLE_COLUMNS}
-                insert_unihash(cursor, d, Resolve.IGNORE)
-                self.db.commit()
+                await self.db.insert_unihash(d["method"], d["taskhash"], d["unihash"])
 
         return d
 
+    @permissions(READ_PERM)
     async def handle_get_outhash(self, request):
-        method = request['method']
-        outhash = request['outhash']
-        taskhash = request['taskhash']
+        method = request["method"]
+        outhash = request["outhash"]
+        taskhash = request["taskhash"]
         with_unihash = request.get("with_unihash", True)
 
-        with closing(self.db.cursor()) as cursor:
-            d = await self.get_outhash(cursor, method, outhash, taskhash, with_unihash)
+        return await self.get_outhash(method, outhash, taskhash, with_unihash)
 
-        self.write_message(d)
-
-    async def get_outhash(self, cursor, method, outhash, taskhash, with_unihash=True):
+    async def get_outhash(self, method, outhash, taskhash, with_unihash=True):
         d = None
         if with_unihash:
-            cursor.execute(
-                '''
-                SELECT *, unihashes_v2.unihash AS unihash FROM outhashes_v2
-                INNER JOIN unihashes_v2 ON unihashes_v2.method=outhashes_v2.method AND unihashes_v2.taskhash=outhashes_v2.taskhash
-                WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash
-                ORDER BY outhashes_v2.created ASC
-                LIMIT 1
-                ''',
-                {
-                    'method': method,
-                    'outhash': outhash,
-                }
-            )
+            row = await self.db.get_unihash_by_outhash(method, outhash)
         else:
-            cursor.execute(
-                """
-                SELECT * FROM outhashes_v2
-                WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash
-                ORDER BY outhashes_v2.created ASC
-                LIMIT 1
-                """,
-                {
-                    'method': method,
-                    'outhash': outhash,
-                }
-            )
-        row = cursor.fetchone()
+            row = await self.db.get_outhash(method, outhash)
 
         if row is not None:
             d = {k: row[k] for k in row.keys()}
         elif self.upstream_client is not None:
             d = await self.upstream_client.get_outhash(method, outhash, taskhash)
-            self.update_unified(cursor, d)
-            self.db.commit()
+            await self.update_unified(d)
 
         return d
 
-    def update_unified(self, cursor, data):
+    async def update_unified(self, data):
         if data is None:
             return
 
-        insert_unihash(
-            cursor,
-            {k: v for k, v in data.items() if k in UNIHASH_TABLE_COLUMNS},
-            Resolve.IGNORE
-        )
-        insert_outhash(
-            cursor,
-            {k: v for k, v in data.items() if k in OUTHASH_TABLE_COLUMNS},
-            Resolve.IGNORE
-        )
+        await self.db.insert_unihash(data["method"], data["taskhash"], data["unihash"])
+        await self.db.insert_outhash(data)
 
+    @permissions(READ_PERM)
     async def handle_get_stream(self, request):
-        self.write_message('ok')
+        await self.socket.send_message("ok")
 
         while True:
             upstream = None
 
-            l = await self.reader.readline()
+            l = await self.socket.recv()
             if not l:
-                return
+                break
 
             try:
                 # This inner loop is very sensitive and must be as fast as
@@ -352,272 +408,438 @@
                 request_measure = self.request_sample.measure()
                 request_measure.start()
 
-                l = l.decode('utf-8').rstrip()
-                if l == 'END':
-                    self.writer.write('ok\n'.encode('utf-8'))
-                    return
+                if l == "END":
+                    break
 
                 (method, taskhash) = l.split()
-                #logger.debug('Looking up %s %s' % (method, taskhash))
-                cursor = self.db.cursor()
-                try:
-                    row = self.query_equivalent(cursor, method, taskhash)
-                finally:
-                    cursor.close()
+                # self.logger.debug('Looking up %s %s' % (method, taskhash))
+                row = await self.db.get_equivalent(method, taskhash)
 
                 if row is not None:
-                    msg = ('%s\n' % row['unihash']).encode('utf-8')
-                    #logger.debug('Found equivalent task %s -> %s', (row['taskhash'], row['unihash']))
+                    msg = row["unihash"]
+                    # self.logger.debug('Found equivalent task %s -> %s', (row['taskhash'], row['unihash']))
                 elif self.upstream_client is not None:
                     upstream = await self.upstream_client.get_unihash(method, taskhash)
                     if upstream:
-                        msg = ("%s\n" % upstream).encode("utf-8")
+                        msg = upstream
                     else:
-                        msg = "\n".encode("utf-8")
+                        msg = ""
                 else:
-                    msg = '\n'.encode('utf-8')
+                    msg = ""
 
-                self.writer.write(msg)
+                await self.socket.send(msg)
             finally:
                 request_measure.end()
                 self.request_sample.end()
 
-            await self.writer.drain()
-
             # Post to the backfill queue after writing the result to minimize
             # the turn around time on a request
             if upstream is not None:
                 await self.backfill_queue.put((method, taskhash))
 
-    async def handle_report(self, data):
-        with closing(self.db.cursor()) as cursor:
-            outhash_data = {
-                'method': data['method'],
-                'outhash': data['outhash'],
-                'taskhash': data['taskhash'],
-                'created': datetime.now()
-            }
+        await self.socket.send("ok")
+        return self.NO_RESPONSE
 
-            for k in ('owner', 'PN', 'PV', 'PR', 'task', 'outhash_siginfo'):
-                if k in data:
-                    outhash_data[k] = data[k]
+    async def report_readonly(self, data):
+        method = data["method"]
+        outhash = data["outhash"]
+        taskhash = data["taskhash"]
 
-            # Insert the new entry, unless it already exists
-            (rowid, inserted) = insert_outhash(cursor, outhash_data, Resolve.IGNORE)
+        info = await self.get_outhash(method, outhash, taskhash)
+        if info:
+            unihash = info["unihash"]
+        else:
+            unihash = data["unihash"]
 
-            if inserted:
-                # If this row is new, check if it is equivalent to another
-                # output hash
-                cursor.execute(
-                    '''
-                    SELECT outhashes_v2.taskhash AS taskhash, unihashes_v2.unihash AS unihash FROM outhashes_v2
-                    INNER JOIN unihashes_v2 ON unihashes_v2.method=outhashes_v2.method AND unihashes_v2.taskhash=outhashes_v2.taskhash
-                    -- Select any matching output hash except the one we just inserted
-                    WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash AND outhashes_v2.taskhash!=:taskhash
-                    -- Pick the oldest hash
-                    ORDER BY outhashes_v2.created ASC
-                    LIMIT 1
-                    ''',
-                    {
-                        'method': data['method'],
-                        'outhash': data['outhash'],
-                        'taskhash': data['taskhash'],
-                    }
-                )
-                row = cursor.fetchone()
-
-                if row is not None:
-                    # A matching output hash was found. Set our taskhash to the
-                    # same unihash since they are equivalent
-                    unihash = row['unihash']
-                    resolve = Resolve.IGNORE
-                else:
-                    # No matching output hash was found. This is probably the
-                    # first outhash to be added.
-                    unihash = data['unihash']
-                    resolve = Resolve.IGNORE
-
-                    # Query upstream to see if it has a unihash we can use
-                    if self.upstream_client is not None:
-                        upstream_data = await self.upstream_client.get_outhash(data['method'], data['outhash'], data['taskhash'])
-                        if upstream_data is not None:
-                            unihash = upstream_data['unihash']
-
-
-                insert_unihash(
-                    cursor,
-                    {
-                        'method': data['method'],
-                        'taskhash': data['taskhash'],
-                        'unihash': unihash,
-                    },
-                    resolve
-                )
-
-            unihash_data = await self.get_unihash(cursor, data['method'], data['taskhash'])
-            if unihash_data is not None:
-                unihash = unihash_data['unihash']
-            else:
-                unihash = data['unihash']
-
-            self.db.commit()
-
-            d = {
-                'taskhash': data['taskhash'],
-                'method': data['method'],
-                'unihash': unihash,
-            }
-
-        self.write_message(d)
-
-    async def handle_equivreport(self, data):
-        with closing(self.db.cursor()) as cursor:
-            insert_data = {
-                'method': data['method'],
-                'taskhash': data['taskhash'],
-                'unihash': data['unihash'],
-            }
-            insert_unihash(cursor, insert_data, Resolve.IGNORE)
-            self.db.commit()
-
-            # Fetch the unihash that will be reported for the taskhash. If the
-            # unihash matches, it means this row was inserted (or the mapping
-            # was already valid)
-            row = self.query_equivalent(cursor, data['method'], data['taskhash'])
-
-            if row['unihash'] == data['unihash']:
-                logger.info('Adding taskhash equivalence for %s with unihash %s',
-                                data['taskhash'], row['unihash'])
-
-            d = {k: row[k] for k in ('taskhash', 'method', 'unihash')}
-
-        self.write_message(d)
-
-
-    async def handle_get_stats(self, request):
-        d = {
-            'requests': self.request_stats.todict(),
+        return {
+            "taskhash": taskhash,
+            "method": method,
+            "unihash": unihash,
         }
 
-        self.write_message(d)
+    # Since this can be called either read only or to report, the check to
+    # report is made inside the function
+    @permissions(READ_PERM)
+    async def handle_report(self, data):
+        if self.read_only or not self.user_has_permissions(REPORT_PERM):
+            return await self.report_readonly(data)
 
+        outhash_data = {
+            "method": data["method"],
+            "outhash": data["outhash"],
+            "taskhash": data["taskhash"],
+            "created": datetime.now(),
+        }
+
+        for k in ("owner", "PN", "PV", "PR", "task", "outhash_siginfo"):
+            if k in data:
+                outhash_data[k] = data[k]
+
+        if self.user:
+            outhash_data["owner"] = self.user.username
+
+        # Insert the new entry, unless it already exists
+        if await self.db.insert_outhash(outhash_data):
+            # If this row is new, check if it is equivalent to another
+            # output hash
+            row = await self.db.get_equivalent_for_outhash(
+                data["method"], data["outhash"], data["taskhash"]
+            )
+
+            if row is not None:
+                # A matching output hash was found. Set our taskhash to the
+                # same unihash since they are equivalent
+                unihash = row["unihash"]
+            else:
+                # No matching output hash was found. This is probably the
+                # first outhash to be added.
+                unihash = data["unihash"]
+
+                # Query upstream to see if it has a unihash we can use
+                if self.upstream_client is not None:
+                    upstream_data = await self.upstream_client.get_outhash(
+                        data["method"], data["outhash"], data["taskhash"]
+                    )
+                    if upstream_data is not None:
+                        unihash = upstream_data["unihash"]
+
+            await self.db.insert_unihash(data["method"], data["taskhash"], unihash)
+
+        unihash_data = await self.get_unihash(data["method"], data["taskhash"])
+        if unihash_data is not None:
+            unihash = unihash_data["unihash"]
+        else:
+            unihash = data["unihash"]
+
+        return {
+            "taskhash": data["taskhash"],
+            "method": data["method"],
+            "unihash": unihash,
+        }
+
+    @permissions(READ_PERM, REPORT_PERM)
+    async def handle_equivreport(self, data):
+        await self.db.insert_unihash(data["method"], data["taskhash"], data["unihash"])
+
+        # Fetch the unihash that will be reported for the taskhash. If the
+        # unihash matches, it means this row was inserted (or the mapping
+        # was already valid)
+        row = await self.db.get_equivalent(data["method"], data["taskhash"])
+
+        if row["unihash"] == data["unihash"]:
+            self.logger.info(
+                "Adding taskhash equivalence for %s with unihash %s",
+                data["taskhash"],
+                row["unihash"],
+            )
+
+        return {k: row[k] for k in ("taskhash", "method", "unihash")}
+
+    @permissions(READ_PERM)
+    async def handle_get_stats(self, request):
+        return {
+            "requests": self.request_stats.todict(),
+        }
+
+    @permissions(DB_ADMIN_PERM)
     async def handle_reset_stats(self, request):
         d = {
-            'requests': self.request_stats.todict(),
+            "requests": self.request_stats.todict(),
         }
 
         self.request_stats.reset()
-        self.write_message(d)
+        return d
 
+    @permissions(READ_PERM)
     async def handle_backfill_wait(self, request):
         d = {
-            'tasks': self.backfill_queue.qsize(),
+            "tasks": self.backfill_queue.qsize(),
         }
         await self.backfill_queue.join()
-        self.write_message(d)
+        return d
 
+    @permissions(DB_ADMIN_PERM)
     async def handle_remove(self, request):
         condition = request["where"]
         if not isinstance(condition, dict):
             raise TypeError("Bad condition type %s" % type(condition))
 
-        def do_remove(columns, table_name, cursor):
-            nonlocal condition
-            where = {}
-            for c in columns:
-                if c in condition and condition[c] is not None:
-                    where[c] = condition[c]
+        return {"count": await self.db.remove(condition)}
 
-            if where:
-                query = ('DELETE FROM %s WHERE ' % table_name) + ' AND '.join("%s=:%s" % (k, k) for k in where.keys())
-                cursor.execute(query, where)
-                return cursor.rowcount
-
-            return 0
-
-        count = 0
-        with closing(self.db.cursor()) as cursor:
-            count += do_remove(OUTHASH_TABLE_COLUMNS, "outhashes_v2", cursor)
-            count += do_remove(UNIHASH_TABLE_COLUMNS, "unihashes_v2", cursor)
-            self.db.commit()
-
-        self.write_message({"count": count})
-
+    @permissions(DB_ADMIN_PERM)
     async def handle_clean_unused(self, request):
         max_age = request["max_age_seconds"]
-        with closing(self.db.cursor()) as cursor:
-            cursor.execute(
-                """
-                DELETE FROM outhashes_v2 WHERE created<:oldest AND NOT EXISTS (
-                    SELECT unihashes_v2.id FROM unihashes_v2 WHERE unihashes_v2.method=outhashes_v2.method AND unihashes_v2.taskhash=outhashes_v2.taskhash LIMIT 1
-                )
-                """,
-                {
-                    "oldest": datetime.now() - timedelta(seconds=-max_age)
-                }
-            )
-            count = cursor.rowcount
+        oldest = datetime.now() - timedelta(seconds=-max_age)
+        return {"count": await self.db.clean_unused(oldest)}
 
-        self.write_message({"count": count})
+    @permissions(DB_ADMIN_PERM)
+    async def handle_get_db_usage(self, request):
+        return {"usage": await self.db.get_usage()}
 
-    def query_equivalent(self, cursor, method, taskhash):
-        # This is part of the inner loop and must be as fast as possible
-        cursor.execute(
-            'SELECT taskhash, method, unihash FROM unihashes_v2 WHERE method=:method AND taskhash=:taskhash',
-            {
-                'method': method,
-                'taskhash': taskhash,
-            }
+    @permissions(DB_ADMIN_PERM)
+    async def handle_get_db_query_columns(self, request):
+        return {"columns": await self.db.get_query_columns()}
+
+    # The authentication API is always allowed
+    async def handle_auth(self, request):
+        username = str(request["username"])
+        token = str(request["token"])
+
+        async def fail_auth():
+            nonlocal username
+            # Rate limit bad login attempts
+            await asyncio.sleep(1)
+            raise bb.asyncrpc.InvokeError(f"Unable to authenticate as {username}")
+
+        user, db_token = await self.db.lookup_user_token(username)
+
+        if not user or not db_token:
+            await fail_auth()
+
+        try:
+            algo, salt, _ = db_token.split(":")
+        except ValueError:
+            await fail_auth()
+
+        if hash_token(algo, salt, token) != db_token:
+            await fail_auth()
+
+        self.user = user
+
+        self.logger.info("Authenticated as %s", username)
+
+        return {
+            "result": True,
+            "username": self.user.username,
+            "permissions": sorted(list(self.user.permissions)),
+        }
+
+    @permissions(USER_ADMIN_PERM, allow_self_service=True, allow_anon=False)
+    async def handle_refresh_token(self, request):
+        username = str(request["username"])
+
+        token = await new_token()
+
+        updated = await self.db.set_user_token(
+            username,
+            hash_token(TOKEN_ALGORITHM, new_salt(), token),
         )
-        return cursor.fetchone()
+        if not updated:
+            self.raise_no_user_error(username)
+
+        return {"username": username, "token": token}
+
+    def get_perm_arg(self, arg):
+        if not isinstance(arg, list):
+            raise bb.asyncrpc.InvokeError("Unexpected type for permissions")
+
+        arg = set(arg)
+        try:
+            arg.remove(NONE_PERM)
+        except KeyError:
+            pass
+
+        unknown_perms = arg - ALL_PERMISSIONS
+        if unknown_perms:
+            raise bb.asyncrpc.InvokeError(
+                "Unknown permissions %s" % ", ".join(sorted(list(unknown_perms)))
+            )
+
+        return sorted(list(arg))
+
+    def return_perms(self, permissions):
+        if ALL_PERM in permissions:
+            return sorted(list(ALL_PERMISSIONS))
+        return sorted(list(permissions))
+
+    @permissions(USER_ADMIN_PERM, allow_anon=False)
+    async def handle_set_perms(self, request):
+        username = str(request["username"])
+        permissions = self.get_perm_arg(request["permissions"])
+
+        if not await self.db.set_user_perms(username, permissions):
+            self.raise_no_user_error(username)
+
+        return {
+            "username": username,
+            "permissions": self.return_perms(permissions),
+        }
+
+    @permissions(USER_ADMIN_PERM, allow_self_service=True, allow_anon=False)
+    async def handle_get_user(self, request):
+        username = str(request["username"])
+
+        user = await self.db.lookup_user(username)
+        if user is None:
+            return None
+
+        return {
+            "username": user.username,
+            "permissions": self.return_perms(user.permissions),
+        }
+
+    @permissions(USER_ADMIN_PERM, allow_anon=False)
+    async def handle_get_all_users(self, request):
+        users = await self.db.get_all_users()
+        return {
+            "users": [
+                {
+                    "username": u.username,
+                    "permissions": self.return_perms(u.permissions),
+                }
+                for u in users
+            ]
+        }
+
+    @permissions(USER_ADMIN_PERM, allow_anon=False)
+    async def handle_new_user(self, request):
+        username = str(request["username"])
+        permissions = self.get_perm_arg(request["permissions"])
+
+        token = await new_token()
+
+        inserted = await self.db.new_user(
+            username,
+            permissions,
+            hash_token(TOKEN_ALGORITHM, new_salt(), token),
+        )
+        if not inserted:
+            raise bb.asyncrpc.InvokeError(f"Cannot create new user '{username}'")
+
+        return {
+            "username": username,
+            "permissions": self.return_perms(permissions),
+            "token": token,
+        }
+
+    @permissions(USER_ADMIN_PERM, allow_self_service=True, allow_anon=False)
+    async def handle_delete_user(self, request):
+        username = str(request["username"])
+
+        if not await self.db.delete_user(username):
+            self.raise_no_user_error(username)
+
+        return {"username": username}
+
+    @permissions(USER_ADMIN_PERM, allow_anon=False)
+    async def handle_become_user(self, request):
+        username = str(request["username"])
+
+        user = await self.db.lookup_user(username)
+        if user is None:
+            raise bb.asyncrpc.InvokeError(f"User {username} doesn't exist")
+
+        self.user = user
+
+        self.logger.info("Became user %s", username)
+
+        return {
+            "username": self.user.username,
+            "permissions": self.return_perms(self.user.permissions),
+        }
 
 
 class Server(bb.asyncrpc.AsyncServer):
-    def __init__(self, db, upstream=None, read_only=False):
+    def __init__(
+        self,
+        db_engine,
+        upstream=None,
+        read_only=False,
+        anon_perms=DEFAULT_ANON_PERMS,
+        admin_username=None,
+        admin_password=None,
+    ):
         if upstream and read_only:
-            raise bb.asyncrpc.ServerError("Read-only hashserv cannot pull from an upstream server")
+            raise bb.asyncrpc.ServerError(
+                "Read-only hashserv cannot pull from an upstream server"
+            )
+
+        disallowed_perms = set(anon_perms) - set(
+            [NONE_PERM, READ_PERM, REPORT_PERM, DB_ADMIN_PERM]
+        )
+
+        if disallowed_perms:
+            raise bb.asyncrpc.ServerError(
+                f"Permission(s) {' '.join(disallowed_perms)} are not allowed for anonymous users"
+            )
 
         super().__init__(logger)
 
         self.request_stats = Stats()
-        self.db = db
+        self.db_engine = db_engine
         self.upstream = upstream
         self.read_only = read_only
+        self.backfill_queue = None
+        self.anon_perms = set(anon_perms)
+        self.admin_username = admin_username
+        self.admin_password = admin_password
 
-    def accept_client(self, reader, writer):
-        return ServerClient(reader, writer, self.db, self.request_stats, self.backfill_queue, self.upstream, self.read_only)
+        self.logger.info(
+            "Anonymous user permissions are: %s", ", ".join(self.anon_perms)
+        )
 
-    @contextmanager
-    def _backfill_worker(self):
-        async def backfill_worker_task():
-            client = await create_async_client(self.upstream)
-            try:
-                while True:
-                    item = await self.backfill_queue.get()
-                    if item is None:
-                        self.backfill_queue.task_done()
-                        break
-                    method, taskhash = item
-                    await copy_unihash_from_upstream(client, self.db, method, taskhash)
+    def accept_client(self, socket):
+        return ServerClient(
+            socket,
+            self.db_engine,
+            self.request_stats,
+            self.backfill_queue,
+            self.upstream,
+            self.read_only,
+            self.anon_perms,
+        )
+
+    async def create_admin_user(self):
+        admin_permissions = (ALL_PERM,)
+        async with self.db_engine.connect(self.logger) as db:
+            added = await db.new_user(
+                self.admin_username,
+                admin_permissions,
+                hash_token(TOKEN_ALGORITHM, new_salt(), self.admin_password),
+            )
+            if added:
+                self.logger.info("Created admin user '%s'", self.admin_username)
+            else:
+                await db.set_user_perms(
+                    self.admin_username,
+                    admin_permissions,
+                )
+                await db.set_user_token(
+                    self.admin_username,
+                    hash_token(TOKEN_ALGORITHM, new_salt(), self.admin_password),
+                )
+                self.logger.info("Admin user '%s' updated", self.admin_username)
+
+    async def backfill_worker_task(self):
+        async with await create_async_client(
+            self.upstream
+        ) as client, self.db_engine.connect(self.logger) as db:
+            while True:
+                item = await self.backfill_queue.get()
+                if item is None:
                     self.backfill_queue.task_done()
-            finally:
-                await client.close()
+                    break
 
-        async def join_worker(worker):
+                method, taskhash = item
+                d = await client.get_taskhash(method, taskhash)
+                if d is not None:
+                    await db.insert_unihash(d["method"], d["taskhash"], d["unihash"])
+                self.backfill_queue.task_done()
+
+    def start(self):
+        tasks = super().start()
+        if self.upstream:
+            self.backfill_queue = asyncio.Queue()
+            tasks += [self.backfill_worker_task()]
+
+        self.loop.run_until_complete(self.db_engine.create())
+
+        if self.admin_username:
+            self.loop.run_until_complete(self.create_admin_user())
+
+        return tasks
+
+    async def stop(self):
+        if self.backfill_queue is not None:
             await self.backfill_queue.put(None)
-            await worker
-
-        if self.upstream is not None:
-            worker = asyncio.ensure_future(backfill_worker_task())
-            try:
-                yield
-            finally:
-                self.loop.run_until_complete(join_worker(worker))
-        else:
-            yield
-
-    def run_loop_forever(self):
-        self.backfill_queue = asyncio.Queue()
-
-        with self._backfill_worker():
-            super().run_loop_forever()
+        await super().stop()
diff --git a/poky/bitbake/lib/hashserv/sqlalchemy.py b/poky/bitbake/lib/hashserv/sqlalchemy.py
new file mode 100644
index 0000000..cee04bf
--- /dev/null
+++ b/poky/bitbake/lib/hashserv/sqlalchemy.py
@@ -0,0 +1,427 @@
+#! /usr/bin/env python3
+#
+# Copyright (C) 2023 Garmin Ltd.
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import logging
+from datetime import datetime
+from . import User
+
+from sqlalchemy.ext.asyncio import create_async_engine
+from sqlalchemy.pool import NullPool
+from sqlalchemy import (
+    MetaData,
+    Column,
+    Table,
+    Text,
+    Integer,
+    UniqueConstraint,
+    DateTime,
+    Index,
+    select,
+    insert,
+    exists,
+    literal,
+    and_,
+    delete,
+    update,
+    func,
+)
+import sqlalchemy.engine
+from sqlalchemy.orm import declarative_base
+from sqlalchemy.exc import IntegrityError
+
+Base = declarative_base()
+
+
+class UnihashesV2(Base):
+    __tablename__ = "unihashes_v2"
+    id = Column(Integer, primary_key=True, autoincrement=True)
+    method = Column(Text, nullable=False)
+    taskhash = Column(Text, nullable=False)
+    unihash = Column(Text, nullable=False)
+
+    __table_args__ = (
+        UniqueConstraint("method", "taskhash"),
+        Index("taskhash_lookup_v3", "method", "taskhash"),
+    )
+
+
+class OuthashesV2(Base):
+    __tablename__ = "outhashes_v2"
+    id = Column(Integer, primary_key=True, autoincrement=True)
+    method = Column(Text, nullable=False)
+    taskhash = Column(Text, nullable=False)
+    outhash = Column(Text, nullable=False)
+    created = Column(DateTime)
+    owner = Column(Text)
+    PN = Column(Text)
+    PV = Column(Text)
+    PR = Column(Text)
+    task = Column(Text)
+    outhash_siginfo = Column(Text)
+
+    __table_args__ = (
+        UniqueConstraint("method", "taskhash", "outhash"),
+        Index("outhash_lookup_v3", "method", "outhash"),
+    )
+
+
+class Users(Base):
+    __tablename__ = "users"
+    id = Column(Integer, primary_key=True, autoincrement=True)
+    username = Column(Text, nullable=False)
+    token = Column(Text, nullable=False)
+    permissions = Column(Text)
+
+    __table_args__ = (UniqueConstraint("username"),)
+
+
+class DatabaseEngine(object):
+    def __init__(self, url, username=None, password=None):
+        self.logger = logging.getLogger("hashserv.sqlalchemy")
+        self.url = sqlalchemy.engine.make_url(url)
+
+        if username is not None:
+            self.url = self.url.set(username=username)
+
+        if password is not None:
+            self.url = self.url.set(password=password)
+
+    async def create(self):
+        self.logger.info("Using database %s", self.url)
+        self.engine = create_async_engine(self.url, poolclass=NullPool)
+
+        async with self.engine.begin() as conn:
+            # Create tables
+            self.logger.info("Creating tables...")
+            await conn.run_sync(Base.metadata.create_all)
+
+    def connect(self, logger):
+        return Database(self.engine, logger)
+
+
+def map_row(row):
+    if row is None:
+        return None
+    return dict(**row._mapping)
+
+
+def map_user(row):
+    if row is None:
+        return None
+    return User(
+        username=row.username,
+        permissions=set(row.permissions.split()),
+    )
+
+
+class Database(object):
+    def __init__(self, engine, logger):
+        self.engine = engine
+        self.db = None
+        self.logger = logger
+
+    async def __aenter__(self):
+        self.db = await self.engine.connect()
+        return self
+
+    async def __aexit__(self, exc_type, exc_value, traceback):
+        await self.close()
+
+    async def close(self):
+        await self.db.close()
+        self.db = None
+
+    async def get_unihash_by_taskhash_full(self, method, taskhash):
+        statement = (
+            select(
+                OuthashesV2,
+                UnihashesV2.unihash.label("unihash"),
+            )
+            .join(
+                UnihashesV2,
+                and_(
+                    UnihashesV2.method == OuthashesV2.method,
+                    UnihashesV2.taskhash == OuthashesV2.taskhash,
+                ),
+            )
+            .where(
+                OuthashesV2.method == method,
+                OuthashesV2.taskhash == taskhash,
+            )
+            .order_by(
+                OuthashesV2.created.asc(),
+            )
+            .limit(1)
+        )
+        self.logger.debug("%s", statement)
+        async with self.db.begin():
+            result = await self.db.execute(statement)
+            return map_row(result.first())
+
+    async def get_unihash_by_outhash(self, method, outhash):
+        statement = (
+            select(OuthashesV2, UnihashesV2.unihash.label("unihash"))
+            .join(
+                UnihashesV2,
+                and_(
+                    UnihashesV2.method == OuthashesV2.method,
+                    UnihashesV2.taskhash == OuthashesV2.taskhash,
+                ),
+            )
+            .where(
+                OuthashesV2.method == method,
+                OuthashesV2.outhash == outhash,
+            )
+            .order_by(
+                OuthashesV2.created.asc(),
+            )
+            .limit(1)
+        )
+        self.logger.debug("%s", statement)
+        async with self.db.begin():
+            result = await self.db.execute(statement)
+            return map_row(result.first())
+
+    async def get_outhash(self, method, outhash):
+        statement = (
+            select(OuthashesV2)
+            .where(
+                OuthashesV2.method == method,
+                OuthashesV2.outhash == outhash,
+            )
+            .order_by(
+                OuthashesV2.created.asc(),
+            )
+            .limit(1)
+        )
+
+        self.logger.debug("%s", statement)
+        async with self.db.begin():
+            result = await self.db.execute(statement)
+            return map_row(result.first())
+
+    async def get_equivalent_for_outhash(self, method, outhash, taskhash):
+        statement = (
+            select(
+                OuthashesV2.taskhash.label("taskhash"),
+                UnihashesV2.unihash.label("unihash"),
+            )
+            .join(
+                UnihashesV2,
+                and_(
+                    UnihashesV2.method == OuthashesV2.method,
+                    UnihashesV2.taskhash == OuthashesV2.taskhash,
+                ),
+            )
+            .where(
+                OuthashesV2.method == method,
+                OuthashesV2.outhash == outhash,
+                OuthashesV2.taskhash != taskhash,
+            )
+            .order_by(
+                OuthashesV2.created.asc(),
+            )
+            .limit(1)
+        )
+        self.logger.debug("%s", statement)
+        async with self.db.begin():
+            result = await self.db.execute(statement)
+            return map_row(result.first())
+
+    async def get_equivalent(self, method, taskhash):
+        statement = select(
+            UnihashesV2.unihash,
+            UnihashesV2.method,
+            UnihashesV2.taskhash,
+        ).where(
+            UnihashesV2.method == method,
+            UnihashesV2.taskhash == taskhash,
+        )
+        self.logger.debug("%s", statement)
+        async with self.db.begin():
+            result = await self.db.execute(statement)
+            return map_row(result.first())
+
+    async def remove(self, condition):
+        async def do_remove(table):
+            where = {}
+            for c in table.__table__.columns:
+                if c.key in condition and condition[c.key] is not None:
+                    where[c] = condition[c.key]
+
+            if where:
+                statement = delete(table).where(*[(k == v) for k, v in where.items()])
+                self.logger.debug("%s", statement)
+                async with self.db.begin():
+                    result = await self.db.execute(statement)
+                return result.rowcount
+
+            return 0
+
+        count = 0
+        count += await do_remove(UnihashesV2)
+        count += await do_remove(OuthashesV2)
+
+        return count
+
+    async def clean_unused(self, oldest):
+        statement = delete(OuthashesV2).where(
+            OuthashesV2.created < oldest,
+            ~(
+                select(UnihashesV2.id)
+                .where(
+                    UnihashesV2.method == OuthashesV2.method,
+                    UnihashesV2.taskhash == OuthashesV2.taskhash,
+                )
+                .limit(1)
+                .exists()
+            ),
+        )
+        self.logger.debug("%s", statement)
+        async with self.db.begin():
+            result = await self.db.execute(statement)
+            return result.rowcount
+
+    async def insert_unihash(self, method, taskhash, unihash):
+        statement = insert(UnihashesV2).values(
+            method=method,
+            taskhash=taskhash,
+            unihash=unihash,
+        )
+        self.logger.debug("%s", statement)
+        try:
+            async with self.db.begin():
+                await self.db.execute(statement)
+            return True
+        except IntegrityError:
+            self.logger.debug(
+                "%s, %s, %s already in unihash database", method, taskhash, unihash
+            )
+            return False
+
+    async def insert_outhash(self, data):
+        outhash_columns = set(c.key for c in OuthashesV2.__table__.columns)
+
+        data = {k: v for k, v in data.items() if k in outhash_columns}
+
+        if "created" in data and not isinstance(data["created"], datetime):
+            data["created"] = datetime.fromisoformat(data["created"])
+
+        statement = insert(OuthashesV2).values(**data)
+        self.logger.debug("%s", statement)
+        try:
+            async with self.db.begin():
+                await self.db.execute(statement)
+            return True
+        except IntegrityError:
+            self.logger.debug(
+                "%s, %s already in outhash database", data["method"], data["outhash"]
+            )
+            return False
+
+    async def _get_user(self, username):
+        statement = select(
+            Users.username,
+            Users.permissions,
+            Users.token,
+        ).where(
+            Users.username == username,
+        )
+        self.logger.debug("%s", statement)
+        async with self.db.begin():
+            result = await self.db.execute(statement)
+            return result.first()
+
+    async def lookup_user_token(self, username):
+        row = await self._get_user(username)
+        if not row:
+            return None, None
+        return map_user(row), row.token
+
+    async def lookup_user(self, username):
+        return map_user(await self._get_user(username))
+
+    async def set_user_token(self, username, token):
+        statement = (
+            update(Users)
+            .where(
+                Users.username == username,
+            )
+            .values(
+                token=token,
+            )
+        )
+        self.logger.debug("%s", statement)
+        async with self.db.begin():
+            result = await self.db.execute(statement)
+            return result.rowcount != 0
+
+    async def set_user_perms(self, username, permissions):
+        statement = (
+            update(Users)
+            .where(Users.username == username)
+            .values(permissions=" ".join(permissions))
+        )
+        self.logger.debug("%s", statement)
+        async with self.db.begin():
+            result = await self.db.execute(statement)
+            return result.rowcount != 0
+
+    async def get_all_users(self):
+        statement = select(
+            Users.username,
+            Users.permissions,
+        )
+        self.logger.debug("%s", statement)
+        async with self.db.begin():
+            result = await self.db.execute(statement)
+            return [map_user(row) for row in result]
+
+    async def new_user(self, username, permissions, token):
+        statement = insert(Users).values(
+            username=username,
+            permissions=" ".join(permissions),
+            token=token,
+        )
+        self.logger.debug("%s", statement)
+        try:
+            async with self.db.begin():
+                await self.db.execute(statement)
+            return True
+        except IntegrityError as e:
+            self.logger.debug("Cannot create new user %s: %s", username, e)
+            return False
+
+    async def delete_user(self, username):
+        statement = delete(Users).where(Users.username == username)
+        self.logger.debug("%s", statement)
+        async with self.db.begin():
+            result = await self.db.execute(statement)
+            return result.rowcount != 0
+
+    async def get_usage(self):
+        usage = {}
+        async with self.db.begin() as session:
+            for name, table in Base.metadata.tables.items():
+                statement = select(func.count()).select_from(table)
+                self.logger.debug("%s", statement)
+                result = await self.db.execute(statement)
+                usage[name] = {
+                    "rows": result.scalar(),
+                }
+
+        return usage
+
+    async def get_query_columns(self):
+        columns = set()
+        for table in (UnihashesV2, OuthashesV2):
+            for c in table.__table__.columns:
+                if not isinstance(c.type, Text):
+                    continue
+                columns.add(c.key)
+
+        return list(columns)
diff --git a/poky/bitbake/lib/hashserv/sqlite.py b/poky/bitbake/lib/hashserv/sqlite.py
new file mode 100644
index 0000000..f65036b
--- /dev/null
+++ b/poky/bitbake/lib/hashserv/sqlite.py
@@ -0,0 +1,408 @@
+#! /usr/bin/env python3
+#
+# Copyright (C) 2023 Garmin Ltd.
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+import sqlite3
+import logging
+from contextlib import closing
+from . import User
+
+logger = logging.getLogger("hashserv.sqlite")
+
+UNIHASH_TABLE_DEFINITION = (
+    ("method", "TEXT NOT NULL", "UNIQUE"),
+    ("taskhash", "TEXT NOT NULL", "UNIQUE"),
+    ("unihash", "TEXT NOT NULL", ""),
+)
+
+UNIHASH_TABLE_COLUMNS = tuple(name for name, _, _ in UNIHASH_TABLE_DEFINITION)
+
+OUTHASH_TABLE_DEFINITION = (
+    ("method", "TEXT NOT NULL", "UNIQUE"),
+    ("taskhash", "TEXT NOT NULL", "UNIQUE"),
+    ("outhash", "TEXT NOT NULL", "UNIQUE"),
+    ("created", "DATETIME", ""),
+    # Optional fields
+    ("owner", "TEXT", ""),
+    ("PN", "TEXT", ""),
+    ("PV", "TEXT", ""),
+    ("PR", "TEXT", ""),
+    ("task", "TEXT", ""),
+    ("outhash_siginfo", "TEXT", ""),
+)
+
+OUTHASH_TABLE_COLUMNS = tuple(name for name, _, _ in OUTHASH_TABLE_DEFINITION)
+
+USERS_TABLE_DEFINITION = (
+    ("username", "TEXT NOT NULL", "UNIQUE"),
+    ("token", "TEXT NOT NULL", ""),
+    ("permissions", "TEXT NOT NULL", ""),
+)
+
+USERS_TABLE_COLUMNS = tuple(name for name, _, _ in USERS_TABLE_DEFINITION)
+
+
+def _make_table(cursor, name, definition):
+    cursor.execute(
+        """
+        CREATE TABLE IF NOT EXISTS {name} (
+            id INTEGER PRIMARY KEY AUTOINCREMENT,
+            {fields}
+            UNIQUE({unique})
+            )
+        """.format(
+            name=name,
+            fields=" ".join("%s %s," % (name, typ) for name, typ, _ in definition),
+            unique=", ".join(
+                name for name, _, flags in definition if "UNIQUE" in flags
+            ),
+        )
+    )
+
+
+def map_user(row):
+    if row is None:
+        return None
+    return User(
+        username=row["username"],
+        permissions=set(row["permissions"].split()),
+    )
+
+
+class DatabaseEngine(object):
+    def __init__(self, dbname, sync):
+        self.dbname = dbname
+        self.logger = logger
+        self.sync = sync
+
+    async def create(self):
+        db = sqlite3.connect(self.dbname)
+        db.row_factory = sqlite3.Row
+
+        with closing(db.cursor()) as cursor:
+            _make_table(cursor, "unihashes_v2", UNIHASH_TABLE_DEFINITION)
+            _make_table(cursor, "outhashes_v2", OUTHASH_TABLE_DEFINITION)
+            _make_table(cursor, "users", USERS_TABLE_DEFINITION)
+
+            cursor.execute("PRAGMA journal_mode = WAL")
+            cursor.execute(
+                "PRAGMA synchronous = %s" % ("NORMAL" if self.sync else "OFF")
+            )
+
+            # Drop old indexes
+            cursor.execute("DROP INDEX IF EXISTS taskhash_lookup")
+            cursor.execute("DROP INDEX IF EXISTS outhash_lookup")
+            cursor.execute("DROP INDEX IF EXISTS taskhash_lookup_v2")
+            cursor.execute("DROP INDEX IF EXISTS outhash_lookup_v2")
+
+            # TODO: Upgrade from tasks_v2?
+            cursor.execute("DROP TABLE IF EXISTS tasks_v2")
+
+            # Create new indexes
+            cursor.execute(
+                "CREATE INDEX IF NOT EXISTS taskhash_lookup_v3 ON unihashes_v2 (method, taskhash)"
+            )
+            cursor.execute(
+                "CREATE INDEX IF NOT EXISTS outhash_lookup_v3 ON outhashes_v2 (method, outhash)"
+            )
+
+    def connect(self, logger):
+        return Database(logger, self.dbname)
+
+
+class Database(object):
+    def __init__(self, logger, dbname, sync=True):
+        self.dbname = dbname
+        self.logger = logger
+
+        self.db = sqlite3.connect(self.dbname)
+        self.db.row_factory = sqlite3.Row
+
+        with closing(self.db.cursor()) as cursor:
+            cursor.execute("SELECT sqlite_version()")
+
+            version = []
+            for v in cursor.fetchone()[0].split("."):
+                try:
+                    version.append(int(v))
+                except ValueError:
+                    version.append(v)
+
+            self.sqlite_version = tuple(version)
+
+    async def __aenter__(self):
+        return self
+
+    async def __aexit__(self, exc_type, exc_value, traceback):
+        await self.close()
+
+    async def close(self):
+        self.db.close()
+
+    async def get_unihash_by_taskhash_full(self, method, taskhash):
+        with closing(self.db.cursor()) as cursor:
+            cursor.execute(
+                """
+                SELECT *, unihashes_v2.unihash AS unihash FROM outhashes_v2
+                INNER JOIN unihashes_v2 ON unihashes_v2.method=outhashes_v2.method AND unihashes_v2.taskhash=outhashes_v2.taskhash
+                WHERE outhashes_v2.method=:method AND outhashes_v2.taskhash=:taskhash
+                ORDER BY outhashes_v2.created ASC
+                LIMIT 1
+                """,
+                {
+                    "method": method,
+                    "taskhash": taskhash,
+                },
+            )
+            return cursor.fetchone()
+
+    async def get_unihash_by_outhash(self, method, outhash):
+        with closing(self.db.cursor()) as cursor:
+            cursor.execute(
+                """
+                SELECT *, unihashes_v2.unihash AS unihash FROM outhashes_v2
+                INNER JOIN unihashes_v2 ON unihashes_v2.method=outhashes_v2.method AND unihashes_v2.taskhash=outhashes_v2.taskhash
+                WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash
+                ORDER BY outhashes_v2.created ASC
+                LIMIT 1
+                """,
+                {
+                    "method": method,
+                    "outhash": outhash,
+                },
+            )
+            return cursor.fetchone()
+
+    async def get_outhash(self, method, outhash):
+        with closing(self.db.cursor()) as cursor:
+            cursor.execute(
+                """
+                SELECT * FROM outhashes_v2
+                WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash
+                ORDER BY outhashes_v2.created ASC
+                LIMIT 1
+                """,
+                {
+                    "method": method,
+                    "outhash": outhash,
+                },
+            )
+            return cursor.fetchone()
+
+    async def get_equivalent_for_outhash(self, method, outhash, taskhash):
+        with closing(self.db.cursor()) as cursor:
+            cursor.execute(
+                """
+                SELECT outhashes_v2.taskhash AS taskhash, unihashes_v2.unihash AS unihash FROM outhashes_v2
+                INNER JOIN unihashes_v2 ON unihashes_v2.method=outhashes_v2.method AND unihashes_v2.taskhash=outhashes_v2.taskhash
+                -- Select any matching output hash except the one we just inserted
+                WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash AND outhashes_v2.taskhash!=:taskhash
+                -- Pick the oldest hash
+                ORDER BY outhashes_v2.created ASC
+                LIMIT 1
+                """,
+                {
+                    "method": method,
+                    "outhash": outhash,
+                    "taskhash": taskhash,
+                },
+            )
+            return cursor.fetchone()
+
+    async def get_equivalent(self, method, taskhash):
+        with closing(self.db.cursor()) as cursor:
+            cursor.execute(
+                "SELECT taskhash, method, unihash FROM unihashes_v2 WHERE method=:method AND taskhash=:taskhash",
+                {
+                    "method": method,
+                    "taskhash": taskhash,
+                },
+            )
+            return cursor.fetchone()
+
+    async def remove(self, condition):
+        def do_remove(columns, table_name, cursor):
+            where = {}
+            for c in columns:
+                if c in condition and condition[c] is not None:
+                    where[c] = condition[c]
+
+            if where:
+                query = ("DELETE FROM %s WHERE " % table_name) + " AND ".join(
+                    "%s=:%s" % (k, k) for k in where.keys()
+                )
+                cursor.execute(query, where)
+                return cursor.rowcount
+
+            return 0
+
+        count = 0
+        with closing(self.db.cursor()) as cursor:
+            count += do_remove(OUTHASH_TABLE_COLUMNS, "outhashes_v2", cursor)
+            count += do_remove(UNIHASH_TABLE_COLUMNS, "unihashes_v2", cursor)
+            self.db.commit()
+
+        return count
+
+    async def clean_unused(self, oldest):
+        with closing(self.db.cursor()) as cursor:
+            cursor.execute(
+                """
+                DELETE FROM outhashes_v2 WHERE created<:oldest AND NOT EXISTS (
+                    SELECT unihashes_v2.id FROM unihashes_v2 WHERE unihashes_v2.method=outhashes_v2.method AND unihashes_v2.taskhash=outhashes_v2.taskhash LIMIT 1
+                )
+                """,
+                {
+                    "oldest": oldest,
+                },
+            )
+            self.db.commit()
+            return cursor.rowcount
+
+    async def insert_unihash(self, method, taskhash, unihash):
+        with closing(self.db.cursor()) as cursor:
+            prevrowid = cursor.lastrowid
+            cursor.execute(
+                """
+                INSERT OR IGNORE INTO unihashes_v2 (method, taskhash, unihash) VALUES(:method, :taskhash, :unihash)
+                """,
+                {
+                    "method": method,
+                    "taskhash": taskhash,
+                    "unihash": unihash,
+                },
+            )
+            self.db.commit()
+            return cursor.lastrowid != prevrowid
+
+    async def insert_outhash(self, data):
+        data = {k: v for k, v in data.items() if k in OUTHASH_TABLE_COLUMNS}
+        keys = sorted(data.keys())
+        query = "INSERT OR IGNORE INTO outhashes_v2 ({fields}) VALUES({values})".format(
+            fields=", ".join(keys),
+            values=", ".join(":" + k for k in keys),
+        )
+        with closing(self.db.cursor()) as cursor:
+            prevrowid = cursor.lastrowid
+            cursor.execute(query, data)
+            self.db.commit()
+            return cursor.lastrowid != prevrowid
+
+    def _get_user(self, username):
+        with closing(self.db.cursor()) as cursor:
+            cursor.execute(
+                """
+                SELECT username, permissions, token FROM users WHERE username=:username
+                """,
+                {
+                    "username": username,
+                },
+            )
+            return cursor.fetchone()
+
+    async def lookup_user_token(self, username):
+        row = self._get_user(username)
+        if row is None:
+            return None, None
+        return map_user(row), row["token"]
+
+    async def lookup_user(self, username):
+        return map_user(self._get_user(username))
+
+    async def set_user_token(self, username, token):
+        with closing(self.db.cursor()) as cursor:
+            cursor.execute(
+                """
+                UPDATE users SET token=:token WHERE username=:username
+                """,
+                {
+                    "username": username,
+                    "token": token,
+                },
+            )
+            self.db.commit()
+            return cursor.rowcount != 0
+
+    async def set_user_perms(self, username, permissions):
+        with closing(self.db.cursor()) as cursor:
+            cursor.execute(
+                """
+                UPDATE users SET permissions=:permissions WHERE username=:username
+                """,
+                {
+                    "username": username,
+                    "permissions": " ".join(permissions),
+                },
+            )
+            self.db.commit()
+            return cursor.rowcount != 0
+
+    async def get_all_users(self):
+        with closing(self.db.cursor()) as cursor:
+            cursor.execute("SELECT username, permissions FROM users")
+            return [map_user(r) for r in cursor.fetchall()]
+
+    async def new_user(self, username, permissions, token):
+        with closing(self.db.cursor()) as cursor:
+            try:
+                cursor.execute(
+                    """
+                    INSERT INTO users (username, token, permissions) VALUES (:username, :token, :permissions)
+                    """,
+                    {
+                        "username": username,
+                        "token": token,
+                        "permissions": " ".join(permissions),
+                    },
+                )
+                self.db.commit()
+                return True
+            except sqlite3.IntegrityError:
+                return False
+
+    async def delete_user(self, username):
+        with closing(self.db.cursor()) as cursor:
+            cursor.execute(
+                """
+                DELETE FROM users WHERE username=:username
+                """,
+                {
+                    "username": username,
+                },
+            )
+            self.db.commit()
+            return cursor.rowcount != 0
+
+    async def get_usage(self):
+        usage = {}
+        with closing(self.db.cursor()) as cursor:
+            if self.sqlite_version >= (3, 33):
+                table_name = "sqlite_schema"
+            else:
+                table_name = "sqlite_master"
+
+            cursor.execute(
+                f"""
+                SELECT name FROM {table_name} WHERE type = 'table' AND name NOT LIKE 'sqlite_%'
+                """
+            )
+            for row in cursor.fetchall():
+                cursor.execute(
+                    """
+                    SELECT COUNT() FROM %s
+                    """
+                    % row["name"],
+                )
+                usage[row["name"]] = {
+                    "rows": cursor.fetchone()[0],
+                }
+        return usage
+
+    async def get_query_columns(self):
+        columns = set()
+        for name, typ, _ in UNIHASH_TABLE_DEFINITION + OUTHASH_TABLE_DEFINITION:
+            if typ.startswith("TEXT"):
+                columns.add(name)
+        return list(columns)
diff --git a/poky/bitbake/lib/hashserv/tests.py b/poky/bitbake/lib/hashserv/tests.py
index f343c58..a9e6fdf 100644
--- a/poky/bitbake/lib/hashserv/tests.py
+++ b/poky/bitbake/lib/hashserv/tests.py
@@ -6,6 +6,8 @@
 #
 
 from . import create_server, create_client
+from .server import DEFAULT_ANON_PERMS, ALL_PERMISSIONS
+from bb.asyncrpc import InvokeError
 import hashlib
 import logging
 import multiprocessing
@@ -17,6 +19,14 @@
 import socket
 import time
 import signal
+import subprocess
+import json
+import re
+from pathlib import Path
+
+
+THIS_DIR = Path(__file__).parent
+BIN_DIR = THIS_DIR.parent.parent / "bin"
 
 def server_prefunc(server, idx):
     logging.basicConfig(level=logging.DEBUG, filename='bbhashserv-%d.log' % idx, filemode='w',
@@ -29,11 +39,12 @@
     METHOD = 'TestMethod'
 
     server_index = 0
+    client_index = 0
 
-    def start_server(self, dbpath=None, upstream=None, read_only=False, prefunc=server_prefunc):
+    def start_server(self, dbpath=None, upstream=None, read_only=False, prefunc=server_prefunc, anon_perms=DEFAULT_ANON_PERMS, admin_username=None, admin_password=None):
         self.server_index += 1
         if dbpath is None:
-            dbpath = os.path.join(self.temp_dir.name, "db%d.sqlite" % self.server_index)
+            dbpath = self.make_dbpath()
 
         def cleanup_server(server):
             if server.process.exitcode is not None:
@@ -45,19 +56,41 @@
         server = create_server(self.get_server_addr(self.server_index),
                                dbpath,
                                upstream=upstream,
-                               read_only=read_only)
+                               read_only=read_only,
+                               anon_perms=anon_perms,
+                               admin_username=admin_username,
+                               admin_password=admin_password)
         server.dbpath = dbpath
 
         server.serve_as_process(prefunc=prefunc, args=(self.server_index,))
         self.addCleanup(cleanup_server, server)
 
+        return server
+
+    def make_dbpath(self):
+        return os.path.join(self.temp_dir.name, "db%d.sqlite" % self.server_index)
+
+    def start_client(self, server_address, username=None, password=None):
         def cleanup_client(client):
             client.close()
 
-        client = create_client(server.address)
+        client = create_client(server_address, username=username, password=password)
         self.addCleanup(cleanup_client, client)
 
-        return (client, server)
+        return client
+
+    def start_test_server(self):
+        self.server = self.start_server()
+        return self.server.address
+
+    def start_auth_server(self):
+        auth_server = self.start_server(self.server.dbpath, anon_perms=[], admin_username="admin", admin_password="password")
+        self.auth_server_address = auth_server.address
+        self.admin_client = self.start_client(auth_server.address, username="admin", password="password")
+        return self.admin_client
+
+    def auth_client(self, user):
+        return self.start_client(self.auth_server_address, user["username"], user["token"])
 
     def setUp(self):
         if sys.version_info < (3, 5, 0):
@@ -66,26 +99,83 @@
         self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-hashserv')
         self.addCleanup(self.temp_dir.cleanup)
 
-        (self.client, self.server) = self.start_server()
+        self.server_address = self.start_test_server()
+
+        self.client = self.start_client(self.server_address)
 
     def assertClientGetHash(self, client, taskhash, unihash):
         result = client.get_unihash(self.METHOD, taskhash)
         self.assertEqual(result, unihash)
 
+    def assertUserPerms(self, user, permissions):
+        with self.auth_client(user) as client:
+            info = client.get_user()
+            self.assertEqual(info, {
+                "username": user["username"],
+                "permissions": permissions,
+            })
 
-class HashEquivalenceCommonTests(object):
-    def test_create_hash(self):
+    def assertUserCanAuth(self, user):
+        with self.start_client(self.auth_server_address) as client:
+            client.auth(user["username"], user["token"])
+
+    def assertUserCannotAuth(self, user):
+        with self.start_client(self.auth_server_address) as client, self.assertRaises(InvokeError):
+            client.auth(user["username"], user["token"])
+
+    def create_test_hash(self, client):
         # Simple test that hashes can be created
         taskhash = '35788efcb8dfb0a02659d81cf2bfd695fb30faf9'
         outhash = '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f'
         unihash = 'f46d3fbb439bd9b921095da657a4de906510d2cd'
 
-        self.assertClientGetHash(self.client, taskhash, None)
+        self.assertClientGetHash(client, taskhash, None)
 
-        result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+        result = client.report_unihash(taskhash, self.METHOD, outhash, unihash)
         self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
         return taskhash, outhash, unihash
 
+    def run_hashclient(self, args, **kwargs):
+        try:
+            p = subprocess.run(
+                [BIN_DIR / "bitbake-hashclient"] + args,
+                stdout=subprocess.PIPE,
+                stderr=subprocess.STDOUT,
+                encoding="utf-8",
+                **kwargs
+            )
+        except subprocess.CalledProcessError as e:
+            print(e.output)
+            raise e
+
+        print(p.stdout)
+        return p
+
+
+class HashEquivalenceCommonTests(object):
+    def auth_perms(self, *permissions):
+        self.client_index += 1
+        user = self.create_user(f"user-{self.client_index}", permissions)
+        return self.auth_client(user)
+
+    def create_user(self, username, permissions, *, client=None):
+        def remove_user(username):
+            try:
+                self.admin_client.delete_user(username)
+            except bb.asyncrpc.InvokeError:
+                pass
+
+        if client is None:
+            client = self.admin_client
+
+        user = client.new_user(username, permissions)
+        self.addCleanup(remove_user, username)
+
+        return user
+
+    def test_create_hash(self):
+        return self.create_test_hash(self.client)
+
     def test_create_equivalent(self):
         # Tests that a second reported task with the same outhash will be
         # assigned the same unihash
@@ -127,7 +217,7 @@
         self.assertClientGetHash(self.client, taskhash, unihash)
 
     def test_remove_taskhash(self):
-        taskhash, outhash, unihash = self.test_create_hash()
+        taskhash, outhash, unihash = self.create_test_hash(self.client)
         result = self.client.remove({"taskhash": taskhash})
         self.assertGreater(result["count"], 0)
         self.assertClientGetHash(self.client, taskhash, None)
@@ -136,13 +226,13 @@
         self.assertIsNone(result_outhash)
 
     def test_remove_unihash(self):
-        taskhash, outhash, unihash = self.test_create_hash()
+        taskhash, outhash, unihash = self.create_test_hash(self.client)
         result = self.client.remove({"unihash": unihash})
         self.assertGreater(result["count"], 0)
         self.assertClientGetHash(self.client, taskhash, None)
 
     def test_remove_outhash(self):
-        taskhash, outhash, unihash = self.test_create_hash()
+        taskhash, outhash, unihash = self.create_test_hash(self.client)
         result = self.client.remove({"outhash": outhash})
         self.assertGreater(result["count"], 0)
 
@@ -150,7 +240,7 @@
         self.assertIsNone(result_outhash)
 
     def test_remove_method(self):
-        taskhash, outhash, unihash = self.test_create_hash()
+        taskhash, outhash, unihash = self.create_test_hash(self.client)
         result = self.client.remove({"method": self.METHOD})
         self.assertGreater(result["count"], 0)
         self.assertClientGetHash(self.client, taskhash, None)
@@ -159,7 +249,7 @@
         self.assertIsNone(result_outhash)
 
     def test_clean_unused(self):
-        taskhash, outhash, unihash = self.test_create_hash()
+        taskhash, outhash, unihash = self.create_test_hash(self.client)
 
         # Clean the database, which should not remove anything because all hashes an in-use
         result = self.client.clean_unused(0)
@@ -206,7 +296,7 @@
 
     def test_stress(self):
         def query_server(failures):
-            client = Client(self.server.address)
+            client = Client(self.server_address)
             try:
                 for i in range(1000):
                     taskhash = hashlib.sha256()
@@ -245,8 +335,10 @@
         # the side client. It also verifies that the results are pulled into
         # the downstream database by checking that the downstream and side servers
         # match after the downstream is done waiting for all backfill tasks
-        (down_client, down_server) = self.start_server(upstream=self.server.address)
-        (side_client, side_server) = self.start_server(dbpath=down_server.dbpath)
+        down_server = self.start_server(upstream=self.server_address)
+        down_client = self.start_client(down_server.address)
+        side_server = self.start_server(dbpath=down_server.dbpath)
+        side_client = self.start_client(side_server.address)
 
         def check_hash(taskhash, unihash, old_sidehash):
             nonlocal down_client
@@ -351,14 +443,18 @@
         self.assertEqual(result['method'], self.METHOD)
 
     def test_ro_server(self):
-        (ro_client, ro_server) = self.start_server(dbpath=self.server.dbpath, read_only=True)
+        rw_server = self.start_server()
+        rw_client = self.start_client(rw_server.address)
+
+        ro_server = self.start_server(dbpath=rw_server.dbpath, read_only=True)
+        ro_client = self.start_client(ro_server.address)
 
         # Report a hash via the read-write server
         taskhash = '35788efcb8dfb0a02659d81cf2bfd695fb30faf9'
         outhash = '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f'
         unihash = 'f46d3fbb439bd9b921095da657a4de906510d2cd'
 
-        result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+        result = rw_client.report_unihash(taskhash, self.METHOD, outhash, unihash)
         self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
 
         # Check the hash via the read-only server
@@ -369,11 +465,11 @@
         outhash2 = '3c979c3db45c569f51ab7626a4651074be3a9d11a84b1db076f5b14f7d39db44'
         unihash2 = '90e9bc1d1f094c51824adca7f8ea79a048d68824'
 
-        with self.assertRaises(ConnectionError):
-            ro_client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
+        result = ro_client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
+        self.assertEqual(result['unihash'], unihash2)
 
         # Ensure that the database was not modified
-        self.assertClientGetHash(self.client, taskhash2, None)
+        self.assertClientGetHash(rw_client, taskhash2, None)
 
 
     def test_slow_server_start(self):
@@ -393,7 +489,7 @@
         old_signal = signal.signal(signal.SIGTERM, do_nothing)
         self.addCleanup(signal.signal, signal.SIGTERM, old_signal)
 
-        _, server = self.start_server(prefunc=prefunc)
+        server = self.start_server(prefunc=prefunc)
         server.process.terminate()
         time.sleep(30)
         event.set()
@@ -453,6 +549,524 @@
         # shares a taskhash with Task 2
         self.assertClientGetHash(self.client, taskhash2, unihash2)
 
+    def test_auth_read_perms(self):
+        admin_client = self.start_auth_server()
+
+        # Create hashes with non-authenticated server
+        taskhash, outhash, unihash = self.create_test_hash(self.client)
+
+        # Validate hash can be retrieved using authenticated client
+        with self.auth_perms("@read") as client:
+            self.assertClientGetHash(client, taskhash, unihash)
+
+        with self.auth_perms() as client, self.assertRaises(InvokeError):
+            self.assertClientGetHash(client, taskhash, unihash)
+
+    def test_auth_report_perms(self):
+        admin_client = self.start_auth_server()
+
+        # Without read permission, the user is completely denied
+        with self.auth_perms() as client, self.assertRaises(InvokeError):
+            self.create_test_hash(client)
+
+        # Read permission allows the call to succeed, but it doesn't record
+        # anythin in the database
+        with self.auth_perms("@read") as client:
+            taskhash, outhash, unihash = self.create_test_hash(client)
+            self.assertClientGetHash(client, taskhash, None)
+
+        # Report permission alone is insufficient
+        with self.auth_perms("@report") as client, self.assertRaises(InvokeError):
+            self.create_test_hash(client)
+
+        # Read and report permission actually modify the database
+        with self.auth_perms("@read", "@report") as client:
+            taskhash, outhash, unihash = self.create_test_hash(client)
+            self.assertClientGetHash(client, taskhash, unihash)
+
+    def test_auth_no_token_refresh_from_anon_user(self):
+        self.start_auth_server()
+
+        with self.start_client(self.auth_server_address) as client, self.assertRaises(InvokeError):
+            client.refresh_token()
+
+    def test_auth_self_token_refresh(self):
+        admin_client = self.start_auth_server()
+
+        # Create a new user with no permissions
+        user = self.create_user("test-user", [])
+
+        with self.auth_client(user) as client:
+            new_user = client.refresh_token()
+
+        self.assertEqual(user["username"], new_user["username"])
+        self.assertNotEqual(user["token"], new_user["token"])
+        self.assertUserCanAuth(new_user)
+        self.assertUserCannotAuth(user)
+
+        # Explicitly specifying with your own username is fine also
+        with self.auth_client(new_user) as client:
+            new_user2 = client.refresh_token(user["username"])
+
+        self.assertEqual(user["username"], new_user2["username"])
+        self.assertNotEqual(user["token"], new_user2["token"])
+        self.assertUserCanAuth(new_user2)
+        self.assertUserCannotAuth(new_user)
+        self.assertUserCannotAuth(user)
+
+    def test_auth_token_refresh(self):
+        admin_client = self.start_auth_server()
+
+        user = self.create_user("test-user", [])
+
+        with self.auth_perms() as client, self.assertRaises(InvokeError):
+            client.refresh_token(user["username"])
+
+        with self.auth_perms("@user-admin") as client:
+            new_user = client.refresh_token(user["username"])
+
+        self.assertEqual(user["username"], new_user["username"])
+        self.assertNotEqual(user["token"], new_user["token"])
+        self.assertUserCanAuth(new_user)
+        self.assertUserCannotAuth(user)
+
+    def test_auth_self_get_user(self):
+        admin_client = self.start_auth_server()
+
+        user = self.create_user("test-user", [])
+        user_info = user.copy()
+        del user_info["token"]
+
+        with self.auth_client(user) as client:
+            info = client.get_user()
+            self.assertEqual(info, user_info)
+
+            # Explicitly asking for your own username is fine also
+            info = client.get_user(user["username"])
+            self.assertEqual(info, user_info)
+
+    def test_auth_get_user(self):
+        admin_client = self.start_auth_server()
+
+        user = self.create_user("test-user", [])
+        user_info = user.copy()
+        del user_info["token"]
+
+        with self.auth_perms() as client, self.assertRaises(InvokeError):
+            client.get_user(user["username"])
+
+        with self.auth_perms("@user-admin") as client:
+            info = client.get_user(user["username"])
+            self.assertEqual(info, user_info)
+
+            info = client.get_user("nonexist-user")
+            self.assertIsNone(info)
+
+    def test_auth_reconnect(self):
+        admin_client = self.start_auth_server()
+
+        user = self.create_user("test-user", [])
+        user_info = user.copy()
+        del user_info["token"]
+
+        with self.auth_client(user) as client:
+            info = client.get_user()
+            self.assertEqual(info, user_info)
+
+            client.disconnect()
+
+            info = client.get_user()
+            self.assertEqual(info, user_info)
+
+    def test_auth_delete_user(self):
+        admin_client = self.start_auth_server()
+
+        user = self.create_user("test-user", [])
+
+        # self service
+        with self.auth_client(user) as client:
+            client.delete_user(user["username"])
+
+        self.assertIsNone(admin_client.get_user(user["username"]))
+        user = self.create_user("test-user", [])
+
+        with self.auth_perms() as client, self.assertRaises(InvokeError):
+            client.delete_user(user["username"])
+
+        with self.auth_perms("@user-admin") as client:
+            client.delete_user(user["username"])
+
+        # User doesn't exist, so even though the permission is correct, it's an
+        # error
+        with self.auth_perms("@user-admin") as client, self.assertRaises(InvokeError):
+            client.delete_user(user["username"])
+
+    def test_auth_set_user_perms(self):
+        admin_client = self.start_auth_server()
+
+        user = self.create_user("test-user", [])
+
+        self.assertUserPerms(user, [])
+
+        # No self service to change permissions
+        with self.auth_client(user) as client, self.assertRaises(InvokeError):
+            client.set_user_perms(user["username"], ["@all"])
+        self.assertUserPerms(user, [])
+
+        with self.auth_perms() as client, self.assertRaises(InvokeError):
+            client.set_user_perms(user["username"], ["@all"])
+        self.assertUserPerms(user, [])
+
+        with self.auth_perms("@user-admin") as client:
+            client.set_user_perms(user["username"], ["@all"])
+        self.assertUserPerms(user, sorted(list(ALL_PERMISSIONS)))
+
+        # Bad permissions
+        with self.auth_perms("@user-admin") as client, self.assertRaises(InvokeError):
+            client.set_user_perms(user["username"], ["@this-is-not-a-permission"])
+        self.assertUserPerms(user, sorted(list(ALL_PERMISSIONS)))
+
+    def test_auth_get_all_users(self):
+        admin_client = self.start_auth_server()
+
+        user = self.create_user("test-user", [])
+
+        with self.auth_client(user) as client, self.assertRaises(InvokeError):
+            client.get_all_users()
+
+        # Give the test user the correct permission
+        admin_client.set_user_perms(user["username"], ["@user-admin"])
+
+        with self.auth_client(user) as client:
+            all_users = client.get_all_users()
+
+        # Convert to a dictionary for easier comparison
+        all_users = {u["username"]: u for u in all_users}
+
+        self.assertEqual(all_users,
+            {
+                "admin": {
+                    "username": "admin",
+                    "permissions": sorted(list(ALL_PERMISSIONS)),
+                },
+                "test-user": {
+                    "username": "test-user",
+                    "permissions": ["@user-admin"],
+                }
+            }
+        )
+
+    def test_auth_new_user(self):
+        self.start_auth_server()
+
+        permissions = ["@read", "@report", "@db-admin", "@user-admin"]
+        permissions.sort()
+
+        with self.auth_perms() as client, self.assertRaises(InvokeError):
+            self.create_user("test-user", permissions, client=client)
+
+        with self.auth_perms("@user-admin") as client:
+            user = self.create_user("test-user", permissions, client=client)
+            self.assertIn("token", user)
+            self.assertEqual(user["username"], "test-user")
+            self.assertEqual(user["permissions"], permissions)
+
+    def test_auth_become_user(self):
+        admin_client = self.start_auth_server()
+
+        user = self.create_user("test-user", ["@read", "@report"])
+        user_info = user.copy()
+        del user_info["token"]
+
+        with self.auth_perms() as client, self.assertRaises(InvokeError):
+            client.become_user(user["username"])
+
+        with self.auth_perms("@user-admin") as client:
+            become = client.become_user(user["username"])
+            self.assertEqual(become, user_info)
+
+            info = client.get_user()
+            self.assertEqual(info, user_info)
+
+            # Verify become user is preserved across disconnect
+            client.disconnect()
+
+            info = client.get_user()
+            self.assertEqual(info, user_info)
+
+            # test-user doesn't have become_user permissions, so this should
+            # not work
+            with self.assertRaises(InvokeError):
+                client.become_user(user["username"])
+
+        # No self-service of become
+        with self.auth_client(user) as client, self.assertRaises(InvokeError):
+            client.become_user(user["username"])
+
+        # Give test user permissions to become
+        admin_client.set_user_perms(user["username"], ["@user-admin"])
+
+        # It's possible to become yourself (effectively a noop)
+        with self.auth_perms("@user-admin") as client:
+            become = client.become_user(client.username)
+
+    def test_get_db_usage(self):
+        usage = self.client.get_db_usage()
+
+        self.assertTrue(isinstance(usage, dict))
+        for name in usage.keys():
+            self.assertTrue(isinstance(usage[name], dict))
+            self.assertIn("rows", usage[name])
+            self.assertTrue(isinstance(usage[name]["rows"], int))
+
+    def test_get_db_query_columns(self):
+        columns = self.client.get_db_query_columns()
+
+        self.assertTrue(isinstance(columns, list))
+        self.assertTrue(len(columns) > 0)
+
+        for col in columns:
+            self.client.remove({col: ""})
+
+    def test_auth_is_owner(self):
+        admin_client = self.start_auth_server()
+
+        user = self.create_user("test-user", ["@read", "@report"])
+        with self.auth_client(user) as client:
+            taskhash, outhash, unihash = self.create_test_hash(client)
+            data = client.get_taskhash(self.METHOD, taskhash, True)
+            self.assertEqual(data["owner"], user["username"])
+
+
+class TestHashEquivalenceClient(HashEquivalenceTestSetup, unittest.TestCase):
+    def get_server_addr(self, server_idx):
+        return "unix://" + os.path.join(self.temp_dir.name, 'sock%d' % server_idx)
+
+    def test_stats(self):
+        p = self.run_hashclient(["--address", self.server_address, "stats"], check=True)
+        json.loads(p.stdout)
+
+    def test_stress(self):
+        self.run_hashclient(["--address", self.server_address, "stress"], check=True)
+
+    def test_remove_taskhash(self):
+        taskhash, outhash, unihash = self.create_test_hash(self.client)
+        self.run_hashclient([
+            "--address", self.server_address,
+            "remove",
+            "--where", "taskhash", taskhash,
+        ], check=True)
+        self.assertClientGetHash(self.client, taskhash, None)
+
+        result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash)
+        self.assertIsNone(result_outhash)
+
+    def test_remove_unihash(self):
+        taskhash, outhash, unihash = self.create_test_hash(self.client)
+        self.run_hashclient([
+            "--address", self.server_address,
+            "remove",
+            "--where", "unihash", unihash,
+        ], check=True)
+        self.assertClientGetHash(self.client, taskhash, None)
+
+    def test_remove_outhash(self):
+        taskhash, outhash, unihash = self.create_test_hash(self.client)
+        self.run_hashclient([
+            "--address", self.server_address,
+            "remove",
+            "--where", "outhash", outhash,
+        ], check=True)
+
+        result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash)
+        self.assertIsNone(result_outhash)
+
+    def test_remove_method(self):
+        taskhash, outhash, unihash = self.create_test_hash(self.client)
+        self.run_hashclient([
+            "--address", self.server_address,
+            "remove",
+            "--where", "method", self.METHOD,
+        ], check=True)
+        self.assertClientGetHash(self.client, taskhash, None)
+
+        result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash)
+        self.assertIsNone(result_outhash)
+
+    def test_clean_unused(self):
+        taskhash, outhash, unihash = self.create_test_hash(self.client)
+
+        # Clean the database, which should not remove anything because all hashes an in-use
+        self.run_hashclient([
+            "--address", self.server_address,
+            "clean-unused", "0",
+        ], check=True)
+        self.assertClientGetHash(self.client, taskhash, unihash)
+
+        # Remove the unihash. The row in the outhash table should still be present
+        self.run_hashclient([
+            "--address", self.server_address,
+            "remove",
+            "--where", "unihash", unihash,
+        ], check=True)
+        result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash, False)
+        self.assertIsNotNone(result_outhash)
+
+        # Now clean with no minimum age which will remove the outhash
+        self.run_hashclient([
+            "--address", self.server_address,
+            "clean-unused", "0",
+        ], check=True)
+        result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash, False)
+        self.assertIsNone(result_outhash)
+
+    def test_refresh_token(self):
+        admin_client = self.start_auth_server()
+
+        user = admin_client.new_user("test-user", ["@read", "@report"])
+
+        p = self.run_hashclient([
+            "--address", self.auth_server_address,
+            "--login", user["username"],
+            "--password", user["token"],
+            "refresh-token"
+        ], check=True)
+
+        new_token = None
+        for l in p.stdout.splitlines():
+            l = l.rstrip()
+            m = re.match(r'Token: +(.*)$', l)
+            if m is not None:
+                new_token = m.group(1)
+
+        self.assertTrue(new_token)
+
+        print("New token is %r" % new_token)
+
+        self.run_hashclient([
+            "--address", self.auth_server_address,
+            "--login", user["username"],
+            "--password", new_token,
+            "get-user"
+        ], check=True)
+
+    def test_set_user_perms(self):
+        admin_client = self.start_auth_server()
+
+        user = admin_client.new_user("test-user", ["@read"])
+
+        self.run_hashclient([
+            "--address", self.auth_server_address,
+            "--login", admin_client.username,
+            "--password", admin_client.password,
+            "set-user-perms",
+            "-u", user["username"],
+            "@read", "@report",
+        ], check=True)
+
+        new_user = admin_client.get_user(user["username"])
+
+        self.assertEqual(set(new_user["permissions"]), {"@read", "@report"})
+
+    def test_get_user(self):
+        admin_client = self.start_auth_server()
+
+        user = admin_client.new_user("test-user", ["@read"])
+
+        p = self.run_hashclient([
+            "--address", self.auth_server_address,
+            "--login", admin_client.username,
+            "--password", admin_client.password,
+            "get-user",
+            "-u", user["username"],
+        ], check=True)
+
+        self.assertIn("Username:", p.stdout)
+        self.assertIn("Permissions:", p.stdout)
+
+        p = self.run_hashclient([
+            "--address", self.auth_server_address,
+            "--login", user["username"],
+            "--password", user["token"],
+            "get-user",
+        ], check=True)
+
+        self.assertIn("Username:", p.stdout)
+        self.assertIn("Permissions:", p.stdout)
+
+    def test_get_all_users(self):
+        admin_client = self.start_auth_server()
+
+        admin_client.new_user("test-user1", ["@read"])
+        admin_client.new_user("test-user2", ["@read"])
+
+        p = self.run_hashclient([
+            "--address", self.auth_server_address,
+            "--login", admin_client.username,
+            "--password", admin_client.password,
+            "get-all-users",
+        ], check=True)
+
+        self.assertIn("admin", p.stdout)
+        self.assertIn("test-user1", p.stdout)
+        self.assertIn("test-user2", p.stdout)
+
+    def test_new_user(self):
+        admin_client = self.start_auth_server()
+
+        p = self.run_hashclient([
+            "--address", self.auth_server_address,
+            "--login", admin_client.username,
+            "--password", admin_client.password,
+            "new-user",
+            "-u", "test-user",
+            "@read", "@report",
+        ], check=True)
+
+        new_token = None
+        for l in p.stdout.splitlines():
+            l = l.rstrip()
+            m = re.match(r'Token: +(.*)$', l)
+            if m is not None:
+                new_token = m.group(1)
+
+        self.assertTrue(new_token)
+
+        user = {
+            "username": "test-user",
+            "token": new_token,
+        }
+
+        self.assertUserPerms(user, ["@read", "@report"])
+
+    def test_delete_user(self):
+        admin_client = self.start_auth_server()
+
+        user = admin_client.new_user("test-user", ["@read"])
+
+        p = self.run_hashclient([
+            "--address", self.auth_server_address,
+            "--login", admin_client.username,
+            "--password", admin_client.password,
+            "delete-user",
+            "-u", user["username"],
+        ], check=True)
+
+        self.assertIsNone(admin_client.get_user(user["username"]))
+
+    def test_get_db_usage(self):
+        p = self.run_hashclient([
+            "--address", self.server_address,
+            "get-db-usage",
+        ], check=True)
+
+    def test_get_db_query_columns(self):
+        p = self.run_hashclient([
+            "--address", self.server_address,
+            "get-db-query-columns",
+        ], check=True)
+
+
 class TestHashEquivalenceUnixServer(HashEquivalenceTestSetup, HashEquivalenceCommonTests, unittest.TestCase):
     def get_server_addr(self, server_idx):
         return "unix://" + os.path.join(self.temp_dir.name, 'sock%d' % server_idx)
@@ -483,3 +1097,77 @@
         # If IPv6 is enabled, it should be safe to use localhost directly, in general
         # case it is more reliable to resolve the IP address explicitly.
         return socket.gethostbyname("localhost") + ":0"
+
+
+class TestHashEquivalenceWebsocketServer(HashEquivalenceTestSetup, HashEquivalenceCommonTests, unittest.TestCase):
+    def setUp(self):
+        try:
+            import websockets
+        except ImportError as e:
+            self.skipTest(str(e))
+
+        super().setUp()
+
+    def get_server_addr(self, server_idx):
+        # Some hosts cause asyncio module to misbehave, when IPv6 is not enabled.
+        # If IPv6 is enabled, it should be safe to use localhost directly, in general
+        # case it is more reliable to resolve the IP address explicitly.
+        host = socket.gethostbyname("localhost")
+        return "ws://%s:0" % host
+
+
+class TestHashEquivalenceWebsocketsSQLAlchemyServer(TestHashEquivalenceWebsocketServer):
+    def setUp(self):
+        try:
+            import sqlalchemy
+            import aiosqlite
+        except ImportError as e:
+            self.skipTest(str(e))
+
+        super().setUp()
+
+    def make_dbpath(self):
+        return "sqlite+aiosqlite:///%s" % os.path.join(self.temp_dir.name, "db%d.sqlite" % self.server_index)
+
+
+class TestHashEquivalenceExternalServer(HashEquivalenceTestSetup, HashEquivalenceCommonTests, unittest.TestCase):
+    def get_env(self, name):
+        v = os.environ.get(name)
+        if not v:
+            self.skipTest(f'{name} not defined to test an external server')
+        return v
+
+    def start_test_server(self):
+        return self.get_env('BB_TEST_HASHSERV')
+
+    def start_server(self, *args, **kwargs):
+        self.skipTest('Cannot start local server when testing external servers')
+
+    def start_auth_server(self):
+
+        self.auth_server_address = self.server_address
+        self.admin_client = self.start_client(
+            self.server_address,
+            username=self.get_env('BB_TEST_HASHSERV_USERNAME'),
+            password=self.get_env('BB_TEST_HASHSERV_PASSWORD'),
+        )
+        return self.admin_client
+
+    def setUp(self):
+        super().setUp()
+        if "BB_TEST_HASHSERV_USERNAME" in os.environ:
+            self.client = self.start_client(
+                self.server_address,
+                username=os.environ["BB_TEST_HASHSERV_USERNAME"],
+                password=os.environ["BB_TEST_HASHSERV_PASSWORD"],
+            )
+        self.client.remove({"method": self.METHOD})
+
+    def tearDown(self):
+        self.client.remove({"method": self.METHOD})
+        super().tearDown()
+
+
+    def test_auth_get_all_users(self):
+        self.skipTest("Cannot test all users with external server")
+
diff --git a/poky/bitbake/lib/prserv/client.py b/poky/bitbake/lib/prserv/client.py
index 69ab7a4..6b81356 100644
--- a/poky/bitbake/lib/prserv/client.py
+++ b/poky/bitbake/lib/prserv/client.py
@@ -14,28 +14,28 @@
         super().__init__('PRSERVICE', '1.0', logger)
 
     async def getPR(self, version, pkgarch, checksum):
-        response = await self.send_message(
+        response = await self.invoke(
             {'get-pr': {'version': version, 'pkgarch': pkgarch, 'checksum': checksum}}
         )
         if response:
             return response['value']
 
     async def importone(self, version, pkgarch, checksum, value):
-        response = await self.send_message(
+        response = await self.invoke(
             {'import-one': {'version': version, 'pkgarch': pkgarch, 'checksum': checksum, 'value': value}}
         )
         if response:
             return response['value']
 
     async def export(self, version, pkgarch, checksum, colinfo):
-        response = await self.send_message(
+        response = await self.invoke(
             {'export': {'version': version, 'pkgarch': pkgarch, 'checksum': checksum, 'colinfo': colinfo}}
         )
         if response:
             return (response['metainfo'], response['datainfo'])
 
     async def is_readonly(self):
-        response = await self.send_message(
+        response = await self.invoke(
             {'is-readonly': {}}
         )
         if response:
diff --git a/poky/bitbake/lib/prserv/serv.py b/poky/bitbake/lib/prserv/serv.py
index c686b20..62d3b5a 100644
--- a/poky/bitbake/lib/prserv/serv.py
+++ b/poky/bitbake/lib/prserv/serv.py
@@ -20,8 +20,8 @@
 singleton = None
 
 class PRServerClient(bb.asyncrpc.AsyncServerConnection):
-    def __init__(self, reader, writer, table, read_only):
-        super().__init__(reader, writer, 'PRSERVICE', logger)
+    def __init__(self, socket, table, read_only):
+        super().__init__(socket, 'PRSERVICE', logger)
         self.handlers.update({
             'get-pr': self.handle_get_pr,
             'import-one': self.handle_import_one,
@@ -36,12 +36,12 @@
 
     async def dispatch_message(self, msg):
         try:
-            await super().dispatch_message(msg)
+            return await super().dispatch_message(msg)
         except:
             self.table.sync()
             raise
-
-        self.table.sync_if_dirty()
+        else:
+            self.table.sync_if_dirty()
 
     async def handle_get_pr(self, request):
         version = request['version']
@@ -57,7 +57,7 @@
         except sqlite3.Error as exc:
             logger.error(str(exc))
 
-        self.write_message(response)
+        return response
 
     async def handle_import_one(self, request):
         response = None
@@ -71,7 +71,7 @@
             if value is not None:
                 response = {'value': value}
 
-        self.write_message(response)
+        return response
 
     async def handle_export(self, request):
         version = request['version']
@@ -85,12 +85,10 @@
             logger.error(str(exc))
             metainfo = datainfo = None
 
-        response = {'metainfo': metainfo, 'datainfo': datainfo}
-        self.write_message(response)
+        return {'metainfo': metainfo, 'datainfo': datainfo}
 
     async def handle_is_readonly(self, request):
-        response = {'readonly': self.read_only}
-        self.write_message(response)
+        return {'readonly': self.read_only}
 
 class PRServer(bb.asyncrpc.AsyncServer):
     def __init__(self, dbfile, read_only=False):
@@ -99,20 +97,23 @@
         self.table = None
         self.read_only = read_only
 
-    def accept_client(self, reader, writer):
-        return PRServerClient(reader, writer, self.table, self.read_only)
+    def accept_client(self, socket):
+        return PRServerClient(socket, self.table, self.read_only)
 
-    def _serve_forever(self):
+    def start(self):
+        tasks = super().start()
         self.db = prserv.db.PRData(self.dbfile, read_only=self.read_only)
         self.table = self.db["PRMAIN"]
 
         logger.info("Started PRServer with DBfile: %s, Address: %s, PID: %s" %
                      (self.dbfile, self.address, str(os.getpid())))
 
-        super()._serve_forever()
+        return tasks
 
+    async def stop(self):
         self.table.sync_if_dirty()
         self.db.disconnect()
+        await super().stop()
 
     def signal_handler(self):
         super().signal_handler()
@@ -129,7 +130,7 @@
     def start(self):
         self.prserv = PRServer(self.dbfile)
         self.prserv.start_tcp_server(socket.gethostbyname(self.host), self.port)
-        self.process = self.prserv.serve_as_process()
+        self.process = self.prserv.serve_as_process(log_level=logging.WARNING)
 
         if not self.prserv.address:
             raise PRServiceConfigError
@@ -344,9 +345,9 @@
 def ping(host, port):
     from . import client
 
-    conn = client.PRClient()
-    conn.connect_tcp(host, port)
-    return conn.ping()
+    with client.PRClient() as conn:
+        conn.connect_tcp(host, port)
+        return conn.ping()
 
 def connect(host, port):
     from . import client
diff --git a/poky/bitbake/lib/toaster/orm/models.py b/poky/bitbake/lib/toaster/orm/models.py
index 0d503a5..1098ad3 100644
--- a/poky/bitbake/lib/toaster/orm/models.py
+++ b/poky/bitbake/lib/toaster/orm/models.py
@@ -1389,9 +1389,6 @@
         return "Machine " + self.name + "(" + self.description + ")"
 
 
-
-
-
 class BitbakeVersion(models.Model):
 
     name = models.CharField(max_length=32, unique = True)
@@ -1853,6 +1850,8 @@
             os.kill(int(pidf.read()), SIGUSR1)
     except FileNotFoundError:
         logger.info("Stopping existing runbuilds: no current process found")
+    except ProcessLookupError:
+        logger.warning("Stopping existing runbuilds: process lookup not found")
 
 class Distro(models.Model):
     search_allowed_fields = ["name", "description", "layer_version__layer__name"]
diff --git a/poky/bitbake/lib/toaster/pytest.ini b/poky/bitbake/lib/toaster/pytest.ini
new file mode 100644
index 0000000..f07076b
--- /dev/null
+++ b/poky/bitbake/lib/toaster/pytest.ini
@@ -0,0 +1,19 @@
+# -- FILE: pytest.ini (or tox.ini)
+[pytest]
+DJANGO_SETTINGS_MODULE = toastermain.settings_test
+
+python_files = db/test_*.py commands/test_*.py views/test_*.py browser/test_*.py functional/test_*.py
+
+# --create-db - force re creation of the test database
+# https://pytest-django.readthedocs.io/en/latest/database.html#create-db-force-re-creation-of-the-test-database
+
+# --html=report.html --self-contained-html
+# https://docs.pytest.org/en/latest/usage.html#creating-html-reports
+# https://pytest-html.readthedocs.io/en/latest/user_guide.html#creating-a-self-contained-report
+addopts = --create-db --html="Toaster Tests Report.html" --self-contained-html 
+
+# Define environment variables using pytest-env
+# A pytest plugin that enables you to set environment variables in the pytest.ini file.
+# https://pypi.org/project/pytest-env/
+env =
+    TOASTER_BUILDSERVER=1
diff --git a/poky/bitbake/lib/toaster/tests/browser/selenium_helpers_base.py b/poky/bitbake/lib/toaster/tests/browser/selenium_helpers_base.py
index 9a4e27a..e0ac437 100644
--- a/poky/bitbake/lib/toaster/tests/browser/selenium_helpers_base.py
+++ b/poky/bitbake/lib/toaster/tests/browser/selenium_helpers_base.py
@@ -33,7 +33,13 @@
         browser = env_browser
 
     if browser == 'chrome':
-        return webdriver.Chrome()
+        options = webdriver.ChromeOptions()
+        options.add_argument('headless')
+        options.add_argument('--disable-infobars')
+        options.add_argument('--disable-dev-shm-usage')
+        options.add_argument('--no-sandbox')
+        options.add_argument('--remote-debugging-port=9222')
+        return webdriver.Chrome(options=options)
     elif browser == 'firefox':
         return webdriver.Firefox()
     elif browser == 'marionette':
diff --git a/poky/bitbake/lib/toaster/tests/browser/test_all_builds_page.py b/poky/bitbake/lib/toaster/tests/browser/test_all_builds_page.py
index d4312bb..4e9b9fd 100644
--- a/poky/bitbake/lib/toaster/tests/browser/test_all_builds_page.py
+++ b/poky/bitbake/lib/toaster/tests/browser/test_all_builds_page.py
@@ -7,13 +7,16 @@
 # SPDX-License-Identifier: GPL-2.0-only
 #
 
-import re, time
+import re
+import time
 
 from django.urls import reverse
+from selenium.webdriver.support.select import Select
 from django.utils import timezone
+from bldcontrol.models import BuildRequest
 from tests.browser.selenium_helpers import SeleniumTestCase
 
-from orm.models import BitbakeVersion, Release, Project, Build, Target
+from orm.models import BitbakeVersion, Layer, Layer_Version, Recipe, Release, Project, Build, Target, Task
 
 from selenium.webdriver.common.by import By
 
@@ -102,6 +105,66 @@
 
         return found_row
 
+    def _get_create_builds(self, **kwargs):
+        """ Create a build and return the build object """
+        build1 = Build.objects.create(**self.project1_build_success)
+        build2 = Build.objects.create(**self.project1_build_failure)
+
+        # add some targets to these builds so they have recipe links
+        # (and so we can find the row in the ToasterTable corresponding to
+        # a particular build)
+        Target.objects.create(build=build1, target='foo')
+        Target.objects.create(build=build2, target='bar')
+
+        if kwargs:
+            # Create kwargs.get('success') builds with success status with target
+            # and kwargs.get('failure') builds with failure status with target
+            for i in range(kwargs.get('success', 0)):
+                now = timezone.now()
+                self.project1_build_success['started_on'] = now
+                self.project1_build_success[
+                    'completed_on'] = now - timezone.timedelta(days=i)
+                build = Build.objects.create(**self.project1_build_success)
+                Target.objects.create(build=build,
+                                      target=f'{i}_success_recipe',
+                                      task=f'{i}_success_task')
+
+                self._set_buildRequest_and_task_on_build(build)
+            for i in range(kwargs.get('failure', 0)):
+                now = timezone.now()
+                self.project1_build_failure['started_on'] = now
+                self.project1_build_failure[
+                    'completed_on'] = now - timezone.timedelta(days=i)
+                build = Build.objects.create(**self.project1_build_failure)
+                Target.objects.create(build=build,
+                                      target=f'{i}_fail_recipe',
+                                      task=f'{i}_fail_task')
+                self._set_buildRequest_and_task_on_build(build)
+        return build1, build2
+
+    def _create_recipe(self):
+        """ Add a recipe to the database and return it """
+        layer = Layer.objects.create()
+        layer_version = Layer_Version.objects.create(layer=layer)
+        return Recipe.objects.create(name='recipe_foo', layer_version=layer_version)
+
+    def _set_buildRequest_and_task_on_build(self, build):
+        """ Set buildRequest and task on build """
+        build.recipes_parsed = 1
+        build.save()
+        buildRequest = BuildRequest.objects.create(
+            build=build,
+            project=self.project1,
+            state=BuildRequest.REQ_COMPLETED)
+        build.build_request = buildRequest
+        recipe = self._create_recipe()
+        task = Task.objects.create(build=build,
+                                   recipe=recipe,
+                                   task_name='task',
+                                   outcome=Task.OUTCOME_SUCCESS)
+        task.save()
+        build.save()
+
     def test_show_tasks_with_suffix(self):
         """ Task should be shown as suffix on build name """
         build = Build.objects.create(**self.project1_build_success)
@@ -128,7 +191,8 @@
         but should be shown for other builds
         """
         build1 = Build.objects.create(**self.project1_build_success)
-        default_build = Build.objects.create(**self.default_project_build_success)
+        default_build = Build.objects.create(
+            **self.default_project_build_success)
 
         url = reverse('all-builds')
         self.get(url)
@@ -146,7 +210,6 @@
         self.assertEqual(len(run_again_button), 0,
                          'should not see a rebuild button for cli builds')
 
-
     def test_tooltips_on_project_name(self):
         """
         Test tooltips shown next to project name in the main table
@@ -188,14 +251,7 @@
         recent builds area; failed builds should not have links on the time column,
         or in the recent builds area
         """
-        build1 = Build.objects.create(**self.project1_build_success)
-        build2 = Build.objects.create(**self.project1_build_failure)
-
-        # add some targets to these builds so they have recipe links
-        # (and so we can find the row in the ToasterTable corresponding to
-        # a particular build)
-        Target.objects.create(build=build1, target='foo')
-        Target.objects.create(build=build2, target='bar')
+        build1, build2 = self._get_create_builds()
 
         url = reverse('all-builds')
         self.get(url)
@@ -223,3 +279,185 @@
         links = build2_row.find_elements(By.CSS_SELECTOR, 'td.time a')
         msg = 'should not be a link on the build time for a failed build'
         self.assertEquals(len(links), 0, msg)
+
+    def test_builds_table_search_box(self):
+        """ Test the search box in the builds table on the all builds page """
+        self._get_create_builds()
+
+        url = reverse('all-builds')
+        self.get(url)
+
+        # Check search box is present and works
+        self.wait_until_present('#allbuildstable tbody tr')
+        search_box = self.find('#search-input-allbuildstable')
+        self.assertTrue(search_box.is_displayed())
+
+        # Check that we can search for a build by recipe name
+        search_box.send_keys('foo')
+        search_btn = self.find('#search-submit-allbuildstable')
+        search_btn.click()
+        self.wait_until_present('#allbuildstable tbody tr')
+        rows = self.find_all('#allbuildstable tbody tr')
+        self.assertTrue(len(rows) >= 1)
+
+    def test_filtering_on_failure_tasks_column(self):
+        """ Test the filtering on failure tasks column in the builds table on the all builds page """
+        self._get_create_builds(success=10, failure=10)
+
+        url = reverse('all-builds')
+        self.get(url)
+
+        # Check filtering on failure tasks column
+        self.wait_until_present('#allbuildstable tbody tr')
+        failed_tasks_filter = self.find('#failed_tasks_filter')
+        failed_tasks_filter.click()
+        # Check popup is visible
+        time.sleep(1)
+        self.wait_until_present('#filter-modal-allbuildstable')
+        self.assertTrue(
+            self.find('#filter-modal-allbuildstable').is_displayed())
+        # Check that we can filter by failure tasks
+        build_without_failure_tasks = self.find(
+            '#failed_tasks_filter\\:without_failed_tasks')
+        build_without_failure_tasks.click()
+        # click on apply button
+        self.find('#filter-modal-allbuildstable .btn-primary').click()
+        self.wait_until_present('#allbuildstable tbody tr')
+        # Check if filter is applied, by checking if failed_tasks_filter has btn-primary class
+        self.assertTrue(self.find('#failed_tasks_filter').get_attribute(
+            'class').find('btn-primary') != -1)
+
+    def test_filtering_on_completedOn_column(self):
+        """ Test the filtering on completed_on column in the builds table on the all builds page """
+        self._get_create_builds(success=10, failure=10)
+
+        url = reverse('all-builds')
+        self.get(url)
+
+        # Check filtering on failure tasks column
+        self.wait_until_present('#allbuildstable tbody tr')
+        completed_on_filter = self.find('#completed_on_filter')
+        completed_on_filter.click()
+        # Check popup is visible
+        time.sleep(1)
+        self.wait_until_present('#filter-modal-allbuildstable')
+        self.assertTrue(
+            self.find('#filter-modal-allbuildstable').is_displayed())
+        # Check that we can filter by failure tasks
+        build_without_failure_tasks = self.find(
+            '#completed_on_filter\\:date_range')
+        build_without_failure_tasks.click()
+        # click on apply button
+        self.find('#filter-modal-allbuildstable .btn-primary').click()
+        self.wait_until_present('#allbuildstable tbody tr')
+        # Check if filter is applied, by checking if completed_on_filter has btn-primary class
+        self.assertTrue(self.find('#completed_on_filter').get_attribute(
+            'class').find('btn-primary') != -1)
+
+        # Filter by date range
+        self.find('#completed_on_filter').click()
+        self.wait_until_present('#filter-modal-allbuildstable')
+        date_ranges = self.driver.find_elements(
+            By.XPATH, '//input[@class="form-control hasDatepicker"]')
+        today = timezone.now()
+        yestersday = today - timezone.timedelta(days=1)
+        time.sleep(1)
+        date_ranges[0].send_keys(yestersday.strftime('%Y-%m-%d'))
+        date_ranges[1].send_keys(today.strftime('%Y-%m-%d'))
+        self.find('#filter-modal-allbuildstable .btn-primary').click()
+        self.wait_until_present('#allbuildstable tbody tr')
+        self.assertTrue(self.find('#completed_on_filter').get_attribute(
+            'class').find('btn-primary') != -1)
+        # Check if filter is applied, number of builds displayed should be 6
+        time.sleep(1)
+        self.assertTrue(len(self.find_all('#allbuildstable tbody tr')) == 6)
+
+    def test_builds_table_editColumn(self):
+        """ Test the edit column feature in the builds table on the all builds page """
+        self._get_create_builds(success=10, failure=10)
+
+        def test_edit_column(check_box_id):
+            # Check that we can hide/show table column
+            check_box = self.find(f'#{check_box_id}')
+            th_class = str(check_box_id).replace('checkbox-', '')
+            if check_box.is_selected():
+                # check if column is visible in table
+                self.assertTrue(
+                    self.find(
+                        f'#allbuildstable thead th.{th_class}'
+                    ).is_displayed(),
+                    f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
+                )
+                check_box.click()
+                # check if column is hidden in table
+                self.assertFalse(
+                    self.find(
+                        f'#allbuildstable thead th.{th_class}'
+                    ).is_displayed(),
+                    f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
+                )
+            else:
+                # check if column is hidden in table
+                self.assertFalse(
+                    self.find(
+                        f'#allbuildstable thead th.{th_class}'
+                    ).is_displayed(),
+                    f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
+                )
+                check_box.click()
+                # check if column is visible in table
+                self.assertTrue(
+                    self.find(
+                        f'#allbuildstable thead th.{th_class}'
+                    ).is_displayed(),
+                    f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
+                )
+        url = reverse('all-builds')
+        self.get(url)
+        self.wait_until_present('#allbuildstable tbody tr')
+
+        # Check edit column
+        edit_column = self.find('#edit-columns-button')
+        self.assertTrue(edit_column.is_displayed())
+        edit_column.click()
+        # Check dropdown is visible
+        self.wait_until_visible('ul.dropdown-menu.editcol')
+
+        # Check that we can hide the edit column
+        test_edit_column('checkbox-errors_no')
+        test_edit_column('checkbox-failed_tasks')
+        test_edit_column('checkbox-image_files')
+        test_edit_column('checkbox-project')
+        test_edit_column('checkbox-started_on')
+        test_edit_column('checkbox-time')
+        test_edit_column('checkbox-warnings_no')
+
+    def test_builds_table_show_rows(self):
+        """ Test the show rows feature in the builds table on the all builds page """
+        self._get_create_builds(success=100, failure=100)
+
+        def test_show_rows(row_to_show, show_row_link):
+            # Check that we can show rows == row_to_show
+            show_row_link.select_by_value(str(row_to_show))
+            self.wait_until_present('#allbuildstable tbody tr')
+            time.sleep(1)
+            self.assertTrue(
+                len(self.find_all('#allbuildstable tbody tr')) == row_to_show
+            )
+
+        url = reverse('all-builds')
+        self.get(url)
+        self.wait_until_present('#allbuildstable tbody tr')
+
+        show_rows = self.driver.find_elements(
+            By.XPATH,
+            '//select[@class="form-control pagesize-allbuildstable"]'
+        )
+        # Check show rows
+        for show_row_link in show_rows:
+            show_row_link = Select(show_row_link)
+            test_show_rows(10, show_row_link)
+            test_show_rows(25, show_row_link)
+            test_show_rows(50, show_row_link)
+            test_show_rows(100, show_row_link)
+            test_show_rows(150, show_row_link)
diff --git a/poky/bitbake/lib/toaster/tests/browser/test_all_projects_page.py b/poky/bitbake/lib/toaster/tests/browser/test_all_projects_page.py
index 3389d32..a880dbc 100644
--- a/poky/bitbake/lib/toaster/tests/browser/test_all_projects_page.py
+++ b/poky/bitbake/lib/toaster/tests/browser/test_all_projects_page.py
@@ -8,9 +8,11 @@
 #
 
 import re
+import time
 
 from django.urls import reverse
 from django.utils import timezone
+from selenium.webdriver.support.select import Select
 from tests.browser.selenium_helpers import SeleniumTestCase
 
 from orm.models import BitbakeVersion, Release, Project, Build
@@ -37,6 +39,17 @@
 
         self.release = None
 
+    def _create_projects(self, nb_project=10):
+        projects = []
+        for i in range(1, nb_project + 1):
+            projects.append(
+                Project(
+                    name='test project {}'.format(i),
+                    release=self.release,
+                )
+            )
+        Project.objects.bulk_create(projects)
+
     def _add_build_to_default_project(self):
         """ Add a build to the default project (not used in all tests) """
         now = timezone.now()
@@ -205,3 +218,116 @@
         expected_url = reverse('project', args=(self.project.id,))
         msg = 'link on project name should point to configuration but was %s' % link_url
         self.assertTrue(link_url.endswith(expected_url), msg)
+
+    def test_allProject_table_search_box(self):
+        """ Test the search box in the all project table on the all projects page """
+        self._create_projects()
+
+        url = reverse('all-projects')
+        self.get(url)
+
+        # Chseck search box is present and works
+        self.wait_until_present('#projectstable tbody tr')
+        search_box = self.find('#search-input-projectstable')
+        self.assertTrue(search_box.is_displayed())
+
+        # Check that we can search for a project by project name
+        search_box.send_keys('test project 10')
+        search_btn = self.find('#search-submit-projectstable')
+        search_btn.click()
+        self.wait_until_present('#projectstable tbody tr')
+        time.sleep(1)
+        rows = self.find_all('#projectstable tbody tr')
+        self.assertTrue(len(rows) == 1)
+
+    def test_allProject_table_editColumn(self):
+        """ Test the edit column feature in the projects table on the all projects page """
+        self._create_projects()
+
+        def test_edit_column(check_box_id):
+            # Check that we can hide/show table column
+            check_box = self.find(f'#{check_box_id}')
+            th_class = str(check_box_id).replace('checkbox-', '')
+            if check_box.is_selected():
+                # check if column is visible in table
+                self.assertTrue(
+                    self.find(
+                        f'#projectstable thead th.{th_class}'
+                    ).is_displayed(),
+                    f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
+                )
+                check_box.click()
+                # check if column is hidden in table
+                self.assertFalse(
+                    self.find(
+                        f'#projectstable thead th.{th_class}'
+                    ).is_displayed(),
+                    f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
+                )
+            else:
+                # check if column is hidden in table
+                self.assertFalse(
+                    self.find(
+                        f'#projectstable thead th.{th_class}'
+                    ).is_displayed(),
+                    f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
+                )
+                check_box.click()
+                # check if column is visible in table
+                self.assertTrue(
+                    self.find(
+                        f'#projectstable thead th.{th_class}'
+                    ).is_displayed(),
+                    f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
+                )
+        url = reverse('all-projects')
+        self.get(url)
+        self.wait_until_present('#projectstable tbody tr')
+
+        # Check edit column
+        edit_column = self.find('#edit-columns-button')
+        self.assertTrue(edit_column.is_displayed())
+        edit_column.click()
+        # Check dropdown is visible
+        self.wait_until_visible('ul.dropdown-menu.editcol')
+
+        # Check that we can hide the edit column
+        test_edit_column('checkbox-errors')
+        test_edit_column('checkbox-image_files')
+        test_edit_column('checkbox-last_build_outcome')
+        test_edit_column('checkbox-recipe_name')
+        test_edit_column('checkbox-warnings')
+
+    def test_allProject_table_show_rows(self):
+        """ Test the show rows feature in the projects table on the all projects page """
+        self._create_projects(nb_project=200)
+
+        def test_show_rows(row_to_show, show_row_link):
+            # Check that we can show rows == row_to_show
+            show_row_link.select_by_value(str(row_to_show))
+            self.wait_until_present('#projectstable tbody tr')
+            sleep_time = 1
+            if row_to_show == 150:
+                # wait more time for 150 rows
+                sleep_time = 2
+            time.sleep(sleep_time)
+            self.assertTrue(
+                len(self.find_all('#projectstable tbody tr')) == row_to_show
+            )
+
+        url = reverse('all-projects')
+        self.get(url)
+        self.wait_until_present('#projectstable tbody tr')
+
+        show_rows = self.driver.find_elements(
+            By.XPATH,
+            '//select[@class="form-control pagesize-projectstable"]'
+        )
+        # Check show rows
+        for show_row_link in show_rows:
+            show_row_link = Select(show_row_link)
+            test_show_rows(10, show_row_link)
+            test_show_rows(25, show_row_link)
+            test_show_rows(50, show_row_link)
+            test_show_rows(100, show_row_link)
+            test_show_rows(150, show_row_link)
diff --git a/poky/bitbake/lib/toaster/tests/browser/test_delete_project.py b/poky/bitbake/lib/toaster/tests/browser/test_delete_project.py
new file mode 100644
index 0000000..1941777
--- /dev/null
+++ b/poky/bitbake/lib/toaster/tests/browser/test_delete_project.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# BitBake Toaster UI tests implementation
+#
+# Copyright (C) 2023 Savoir-faire Linux Inc
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+import pytest
+from django.urls import reverse
+from selenium.webdriver.support.ui import Select
+from tests.browser.selenium_helpers import SeleniumTestCase
+from orm.models import BitbakeVersion, Project, Release
+from selenium.webdriver.common.by import By
+
+class TestDeleteProject(SeleniumTestCase):
+
+    def setUp(self):
+        bitbake, _ = BitbakeVersion.objects.get_or_create(
+            name="master",
+            giturl="git://master",
+            branch="master",
+            dirpath="master")
+
+        self.release, _ = Release.objects.get_or_create(
+            name="master",
+            description="Yocto Project master",
+            branch_name="master",
+            helptext="latest",
+            bitbake_version=bitbake)
+
+        Release.objects.get_or_create(
+            name="foo",
+            description="Yocto Project foo",
+            branch_name="foo",
+            helptext="latest",
+            bitbake_version=bitbake)
+
+    @pytest.mark.django_db
+    def test_delete_project(self):
+        """ Test delete a project
+            - Check delete modal is visible
+            - Check delete modal has right text
+            - Confirm delete
+            - Check project is deleted
+        """
+        project_name = "project_to_delete"
+        url = reverse('newproject')
+        self.get(url)
+        self.enter_text('#new-project-name', project_name)
+        select = Select(self.find('#projectversion'))
+        select.select_by_value(str(self.release.pk))
+        self.click("#create-project-button")
+        # We should get redirected to the new project's page with the
+        # notification at the top
+        element = self.wait_until_visible('#project-created-notification')
+        self.assertTrue(project_name in element.text,
+                        "New project name not in new project notification")
+        self.assertTrue(Project.objects.filter(name=project_name).count(),
+                        "New project not found in database")
+
+        # Delete project
+        delete_project_link = self.driver.find_element(
+            By.XPATH, '//a[@href="#delete-project-modal"]')
+        delete_project_link.click()
+        
+        # Check delete modal is visible
+        self.wait_until_visible('#delete-project-modal')
+
+        # Check delete modal has right text
+        modal_header_text = self.find('#delete-project-modal .modal-header').text
+        self.assertTrue(
+            "Are you sure you want to delete this project?" in modal_header_text,
+            "Delete project modal header text is wrong")
+
+        modal_body_text = self.find('#delete-project-modal .modal-body').text
+        self.assertTrue(
+            "Cancel its builds currently in progress" in modal_body_text,
+            "Modal body doesn't contain: Cancel its builds currently in progress")
+        self.assertTrue(
+            "Remove its configuration information" in modal_body_text,
+            "Modal body doesn't contain: Remove its configuration information")
+        self.assertTrue(
+            "Remove its imported layers" in modal_body_text,
+            "Modal body doesn't contain: Remove its imported layers")
+        self.assertTrue(
+            "Remove its custom images" in modal_body_text,
+            "Modal body doesn't contain: Remove its custom images")
+        self.assertTrue(
+            "Remove all its build information" in modal_body_text,
+            "Modal body doesn't contain: Remove all its build information")
+
+        # Confirm delete
+        delete_btn = self.find('#delete-project-confirmed')
+        delete_btn.click()
+
+        # Check project is deleted
+        self.wait_until_visible('#change-notification')
+        delete_notification = self.find('#change-notification-msg')
+        self.assertTrue("You have deleted 1 project:" in delete_notification.text)
+        self.assertTrue(project_name in delete_notification.text)
+        self.assertFalse(Project.objects.filter(name=project_name).exists(),
+                        "Project not deleted from database")
diff --git a/poky/bitbake/lib/toaster/tests/browser/test_landing_page.py b/poky/bitbake/lib/toaster/tests/browser/test_landing_page.py
index 8bb64b9..7ec52a4 100644
--- a/poky/bitbake/lib/toaster/tests/browser/test_landing_page.py
+++ b/poky/bitbake/lib/toaster/tests/browser/test_landing_page.py
@@ -10,8 +10,9 @@
 from django.urls import reverse
 from django.utils import timezone
 from tests.browser.selenium_helpers import SeleniumTestCase
+from selenium.webdriver.common.by import By
 
-from orm.models import Project, Build
+from orm.models import Layer, Layer_Version, Project, Build
 
 class TestLandingPage(SeleniumTestCase):
     """ Tests for redirects on the landing page """
@@ -29,6 +30,124 @@
         self.project.is_default = True
         self.project.save()
 
+    def test_icon_info_visible_and_clickable(self):
+        """ Test that the information icon is visible and clickable """
+        self.get(reverse('landing'))
+        info_sign = self.find('#toaster-version-info-sign')
+
+        # check that the info sign is visible
+        self.assertTrue(info_sign.is_displayed())
+
+        # check that the info sign is clickable
+        # and info modal is appearing when clicking on the info sign
+        info_sign.click() # click on the info sign make attribute 'aria-describedby' visible
+        info_model_id = info_sign.get_attribute('aria-describedby')
+        info_modal = self.find(f'#{info_model_id}')
+        self.assertTrue(info_modal.is_displayed())
+        self.assertTrue("Toaster version information" in info_modal.text)
+
+    def test_documentation_link_displayed(self):
+        """ Test that the documentation link is displayed """
+        self.get(reverse('landing'))
+        documentation_link = self.find('#navbar-docs > a')
+
+        # check that the documentation link is visible
+        self.assertTrue(documentation_link.is_displayed())
+
+        # check browser open new tab toaster manual when clicking on the documentation link
+        self.assertEqual(documentation_link.get_attribute('target') , '_blank')
+        self.assertEqual(
+            documentation_link.get_attribute('href'),
+            'http://docs.yoctoproject.org/toaster-manual/index.html#toaster-user-manual')
+        self.assertTrue("Documentation" in documentation_link.text)
+
+    def test_openembedded_jumbotron_link_visible_and_clickable(self):
+        """ Test OpenEmbedded link jumbotron is visible and clickable: """
+        self.get(reverse('landing'))
+        jumbotron = self.find('.jumbotron')
+
+        # check OpenEmbedded
+        openembedded = jumbotron.find_element(By.LINK_TEXT, 'OpenEmbedded')
+        self.assertTrue(openembedded.is_displayed())
+        openembedded.click()
+        self.assertTrue("openembedded.org" in self.driver.current_url)
+
+    def test_bitbake_jumbotron_link_visible_and_clickable(self):
+        """ Test BitBake link jumbotron is visible and clickable: """
+        self.get(reverse('landing'))
+        jumbotron = self.find('.jumbotron')
+
+        # check BitBake
+        bitbake = jumbotron.find_element(By.LINK_TEXT, 'BitBake')
+        self.assertTrue(bitbake.is_displayed())
+        bitbake.click()
+        self.assertTrue("docs.yoctoproject.org/bitbake.html" in self.driver.current_url)
+
+    def test_yoctoproject_jumbotron_link_visible_and_clickable(self):
+        """ Test Yocto Project link jumbotron is visible and clickable: """
+        self.get(reverse('landing'))
+        jumbotron = self.find('.jumbotron')
+
+        # check Yocto Project
+        yoctoproject = jumbotron.find_element(By.LINK_TEXT, 'Yocto Project')
+        self.assertTrue(yoctoproject.is_displayed())
+        yoctoproject.click()
+        self.assertTrue("yoctoproject.org" in self.driver.current_url)
+
+    def test_link_setup_using_toaster_visible_and_clickable(self):
+        """ Test big magenta button setting up and using toaster link in jumbotron
+            if visible and clickable
+        """
+        self.get(reverse('landing'))
+        jumbotron = self.find('.jumbotron')
+
+        # check Big magenta button
+        big_magenta_button = jumbotron.find_element(By.LINK_TEXT,
+            'Toaster is ready to capture your command line builds'
+        )
+        self.assertTrue(big_magenta_button.is_displayed())
+        big_magenta_button.click()
+        self.assertTrue("docs.yoctoproject.org/toaster-manual/setup-and-use.html#setting-up-and-using-toaster" in self.driver.current_url)
+
+    def test_link_create_new_project_in_jumbotron_visible_and_clickable(self):
+        """ Test big blue button create new project jumbotron if visible and clickable """
+        # Create a layer and a layer version to make visible the big blue button
+        layer = Layer.objects.create(name='bar')
+        Layer_Version.objects.create(layer=layer)
+
+        self.get(reverse('landing'))
+        jumbotron = self.find('.jumbotron')
+
+        # check Big Blue button
+        big_blue_button = jumbotron.find_element(By.LINK_TEXT,
+            'Create your first Toaster project to run manage builds'
+        )
+        self.assertTrue(big_blue_button.is_displayed())
+        big_blue_button.click()
+        self.assertTrue("toastergui/newproject/" in self.driver.current_url)
+
+    def test_toaster_manual_link_visible_and_clickable(self):
+        """ Test Read the Toaster manual link jumbotron is visible and clickable: """
+        self.get(reverse('landing'))
+        jumbotron = self.find('.jumbotron')
+
+        # check Read the Toaster manual
+        toaster_manual = jumbotron.find_element(By.LINK_TEXT, 'Read the Toaster manual')
+        self.assertTrue(toaster_manual.is_displayed())
+        toaster_manual.click()
+        self.assertTrue("https://docs.yoctoproject.org/toaster-manual/index.html#toaster-user-manual" in self.driver.current_url)
+
+    def test_contrib_to_toaster_link_visible_and_clickable(self):
+        """ Test Contribute to Toaster link jumbotron is visible and clickable: """
+        self.get(reverse('landing'))
+        jumbotron = self.find('.jumbotron')
+
+        # check Contribute to Toaster
+        contribute_to_toaster = jumbotron.find_element(By.LINK_TEXT, 'Contribute to Toaster')
+        self.assertTrue(contribute_to_toaster.is_displayed())
+        contribute_to_toaster.click()
+        self.assertTrue("wiki.yoctoproject.org/wiki/contribute_to_toaster" in str(self.driver.current_url).lower())
+
     def test_only_default_project(self):
         """
         No projects except default
diff --git a/poky/bitbake/lib/toaster/tests/browser/test_layerdetails_page.py b/poky/bitbake/lib/toaster/tests/browser/test_layerdetails_page.py
index 71bdd2a..cb7b915 100644
--- a/poky/bitbake/lib/toaster/tests/browser/test_layerdetails_page.py
+++ b/poky/bitbake/lib/toaster/tests/browser/test_layerdetails_page.py
@@ -97,6 +97,8 @@
                             "Expecting any of \"%s\"but got \"%s\"" %
                             (self.initial_values, value))
 
+            # Make sure the input visible beofre sending keys
+            self.wait_until_visible("#layer-git input[type=text]")
             inputs.send_keys("-edited")
 
         # Save the new values
diff --git a/poky/bitbake/lib/toaster/tests/browser/test_most_recent_builds_states.py b/poky/bitbake/lib/toaster/tests/browser/test_most_recent_builds_states.py
index a34a092..949a947 100644
--- a/poky/bitbake/lib/toaster/tests/browser/test_most_recent_builds_states.py
+++ b/poky/bitbake/lib/toaster/tests/browser/test_most_recent_builds_states.py
@@ -54,6 +54,7 @@
         build.outcome = Build.IN_PROGRESS
         build.recipes_to_parse = recipes_to_parse
         build.recipes_parsed = 0
+        build.save()
 
         build_request.state = BuildRequest.REQ_INPROGRESS
         build_request.save()
@@ -100,7 +101,7 @@
             'Tasks starting', 'build should show "tasks starting" status')
 
         # first task finished; check tasks progress bar
-        task1.order = 1
+        task1.outcome = Task.OUTCOME_SUCCESS
         task1.save()
 
         self.get(url)
@@ -117,7 +118,7 @@
         element = Wait(self.driver).until(task_bar_updated, msg)
 
         # last task finished; check tasks progress bar updates
-        task2.order = 2
+        task2.outcome = Task.OUTCOME_SUCCESS
         task2.save()
 
         self.get(url)
diff --git a/poky/bitbake/lib/toaster/tests/browser/test_new_custom_image_page.py b/poky/bitbake/lib/toaster/tests/browser/test_new_custom_image_page.py
index 6361f40..34d1bd4 100644
--- a/poky/bitbake/lib/toaster/tests/browser/test_new_custom_image_page.py
+++ b/poky/bitbake/lib/toaster/tests/browser/test_new_custom_image_page.py
@@ -48,8 +48,12 @@
         self.recipe = Recipe.objects.create(
             name='core-image-minimal',
             layer_version=layer_version,
+            file_path='/tmp/core-image-minimal.bb',
             is_image=True
         )
+        # create a tmp file for the recipe
+        with open(self.recipe.file_path, 'w') as f:
+            f.write('foo')
 
         # another project with a custom image already in it
         project2 = Project.objects.create(name='whoop', release=release)
diff --git a/poky/bitbake/lib/toaster/tests/browser/test_sample.py b/poky/bitbake/lib/toaster/tests/browser/test_sample.py
index b0067c2..7397377 100644
--- a/poky/bitbake/lib/toaster/tests/browser/test_sample.py
+++ b/poky/bitbake/lib/toaster/tests/browser/test_sample.py
@@ -27,3 +27,12 @@
         self.get(url)
         brand_link = self.find('.toaster-navbar-brand a.brand')
         self.assertEqual(brand_link.text.strip(), 'Toaster')
+
+    def test_no_builds_message(self):
+        """ Test that a message is shown when there are no builds """
+        url = reverse('all-builds')
+        self.get(url)
+        div_msg = self.find('#empty-state-allbuildstable .alert-info')
+
+        msg = 'Sorry - no data found'
+        self.assertEqual(div_msg.text, msg)
diff --git a/poky/bitbake/lib/toaster/tests/builds/buildtest.py b/poky/bitbake/lib/toaster/tests/builds/buildtest.py
index 13b51fb..53cd7a9 100644
--- a/poky/bitbake/lib/toaster/tests/builds/buildtest.py
+++ b/poky/bitbake/lib/toaster/tests/builds/buildtest.py
@@ -116,6 +116,15 @@
         project = Project.objects.create_project(name=BuildTest.PROJECT_NAME,
                                                  release=release)
 
+        passthrough_variable_names = ["SSTATE_DIR", "DL_DIR"]
+        for variable_name in passthrough_variable_names:
+            current_variable = os.environ.get(variable_name)
+            if current_variable:
+                ProjectVariable.objects.get_or_create(
+                    name=variable_name,
+                    value=current_variable,
+                    project=project)
+
         if os.environ.get("TOASTER_TEST_USE_SSTATE_MIRROR"):
             ProjectVariable.objects.get_or_create(
                 name="SSTATE_MIRRORS",
diff --git a/poky/bitbake/lib/toaster/tests/functional/functional_helpers.py b/poky/bitbake/lib/toaster/tests/functional/functional_helpers.py
index c3191f6..b80d403 100644
--- a/poky/bitbake/lib/toaster/tests/functional/functional_helpers.py
+++ b/poky/bitbake/lib/toaster/tests/functional/functional_helpers.py
@@ -15,8 +15,6 @@
 import re
 
 from tests.browser.selenium_helpers_base import SeleniumTestCaseBase
-from tests.builds.buildtest import load_build_environment
-from bldcontrol.models import BuildEnvironment
 from selenium.webdriver.common.by import By
 from selenium.common.exceptions import NoSuchElementException
 
@@ -33,10 +31,6 @@
             raise RuntimeError("Please initialise django with the tests settings:  " \
                 "DJANGO_SETTINGS_MODULE='toastermain.settings_test'")
 
-        if BuildEnvironment.objects.count() == 0:
-            BuildEnvironment.objects.create(betype=BuildEnvironment.TYPE_LOCAL)
-        load_build_environment()
-
         # start toaster
         cmd = "bash -c 'source toaster start'"
         p = subprocess.Popen(
diff --git a/poky/bitbake/lib/toaster/tests/functional/test_create_new_project.py b/poky/bitbake/lib/toaster/tests/functional/test_create_new_project.py
new file mode 100644
index 0000000..dc7d1fc
--- /dev/null
+++ b/poky/bitbake/lib/toaster/tests/functional/test_create_new_project.py
@@ -0,0 +1,177 @@
+#! /usr/bin/env python3
+# BitBake Toaster UI tests implementation
+#
+# Copyright (C) 2023 Savoir-faire Linux
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import re
+import pytest
+from django.urls import reverse
+from selenium.webdriver.support.select import Select
+from tests.functional.functional_helpers import SeleniumFunctionalTestCase
+from orm.models import Project
+from selenium.webdriver.common.by import By
+
+
+@pytest.mark.django_db
+class TestCreateNewProject(SeleniumFunctionalTestCase):
+
+    def _create_test_new_project(
+        self,
+        project_name,
+        release,
+        release_title,
+        merge_toaster_settings,
+    ):
+        """ Create/Test new project using:
+          - Project Name: Any string
+          - Release: Any string
+          - Merge Toaster settings: True or False
+        """
+        self.get(reverse('newproject'))
+        self.driver.find_element(By.ID,
+                                 "new-project-name").send_keys(project_name)
+
+        select = Select(self.find('#projectversion'))
+        select.select_by_value(release)
+
+        # check merge toaster settings
+        checkbox = self.find('.checkbox-mergeattr')
+        if merge_toaster_settings:
+            if not checkbox.is_selected():
+                checkbox.click()
+        else:
+            if checkbox.is_selected():
+                checkbox.click()
+
+        self.driver.find_element(By.ID, "create-project-button").click()
+
+        element = self.wait_until_visible('#project-created-notification')
+        self.assertTrue(
+            self.element_exists('#project-created-notification'),
+            f"Project:{project_name} creation notification not shown"
+        )
+        self.assertTrue(
+            project_name in element.text,
+            f"New project name:{project_name} not in new project notification"
+        )
+        self.assertTrue(
+            Project.objects.filter(name=project_name).count(),
+            f"New project:{project_name} not found in database"
+        )
+
+        # check release
+        self.assertTrue(re.search(
+            release_title,
+            self.driver.find_element(By.XPATH,
+                                     "//span[@id='project-release-title']"
+                                     ).text),
+                        'The project release is not defined')
+
+    def test_create_new_project_master(self):
+        """ Test create new project using:
+          - Project Name: Any string
+          - Release: Yocto Project master (option value: 3)
+          - Merge Toaster settings: False
+        """
+        release = '3'
+        release_title = 'Yocto Project master'
+        project_name = 'projectmaster'
+        self._create_test_new_project(
+            project_name,
+            release,
+            release_title,
+            False,
+        )
+
+    def test_create_new_project_kirkstone(self):
+        """ Test create new project using:
+          - Project Name: Any string
+          - Release: Yocto Project 4.0 "Kirkstone" (option value: 1)
+          - Merge Toaster settings: True
+        """
+        release = '1'
+        release_title = 'Yocto Project 4.0 "Kirkstone"'
+        project_name = 'projectkirkstone'
+        self._create_test_new_project(
+            project_name,
+            release,
+            release_title,
+            True,
+        )
+
+    def test_create_new_project_dunfell(self):
+        """ Test create new project using:
+          - Project Name: Any string
+          - Release: Yocto Project 3.1 "Dunfell" (option value: 5)
+          - Merge Toaster settings: False
+        """
+        release = '5'
+        release_title = 'Yocto Project 3.1 "Dunfell"'
+        project_name = 'projectdunfull'
+        self._create_test_new_project(
+            project_name,
+            release,
+            release_title,
+            False,
+        )
+
+    def test_create_new_project_local(self):
+        """ Test create new project using:
+          - Project Name: Any string
+          - Release: Local Yocto Project (option value: 2)
+          - Merge Toaster settings: True
+        """
+        release = '2'
+        release_title = 'Local Yocto Project'
+        project_name = 'projectlocal'
+        self._create_test_new_project(
+            project_name,
+            release,
+            release_title,
+            True,
+        )
+
+    def test_create_new_project_without_name(self):
+        """ Test create new project without project name """
+        self.get(reverse('newproject'))
+
+        select = Select(self.find('#projectversion'))
+        select.select_by_value(str(3))
+
+        # Check input name has required attribute
+        input_name = self.driver.find_element(By.ID, "new-project-name")
+        self.assertIsNotNone(input_name.get_attribute('required'),
+                        'Input name has not required attribute')
+
+        # Check create button is disabled
+        create_btn = self.driver.find_element(By.ID, "create-project-button")
+        self.assertIsNotNone(create_btn.get_attribute('disabled'),
+                        'Create button is not disabled')
+
+    def test_import_new_project(self):
+        """ Test import new project using:
+          - Project Name: Any string
+          - Project type: select (Import command line project)
+          - Import existing project directory: Wrong Path
+        """
+        project_name = 'projectimport'
+        self.get(reverse('newproject'))
+        self.driver.find_element(By.ID,
+                                 "new-project-name").send_keys(project_name)
+        # select import project
+        self.find('#type-import').click()
+
+        # set wrong path
+        wrong_path = '/wrongpath'
+        self.driver.find_element(By.ID,
+                                 "import-project-dir").send_keys(wrong_path)
+        self.driver.find_element(By.ID, "create-project-button").click()
+
+        # check error message
+        self.assertTrue(self.element_exists('.alert-danger'),
+                        'Allert message not shown')
+        self.assertTrue(wrong_path in self.find('.alert-danger').text,
+                        "Wrong path not in alert message")
diff --git a/poky/bitbake/lib/toaster/tests/functional/test_functional_basic.py b/poky/bitbake/lib/toaster/tests/functional/test_functional_basic.py
index 067ad99..f558cce 100644
--- a/poky/bitbake/lib/toaster/tests/functional/test_functional_basic.py
+++ b/poky/bitbake/lib/toaster/tests/functional/test_functional_basic.py
@@ -7,21 +7,26 @@
 # SPDX-License-Identifier: GPL-2.0-only
 #
 
-import re
+import re, time
+from django.urls import reverse
+import pytest
 from tests.functional.functional_helpers import SeleniumFunctionalTestCase
 from orm.models import Project
 from selenium.webdriver.common.by import By
 
+
+@pytest.mark.order("last")
 class FuntionalTestBasic(SeleniumFunctionalTestCase):
 
 #   testcase (1514)
+    @pytest.mark.django_db
     def test_create_slenium_project(self):
         project_name = 'selenium-project'
-        self.get('')
-        self.driver.find_element(By.LINK_TEXT, "To start building, create your first Toaster project").click()
+        self.get(reverse('newproject'))
         self.driver.find_element(By.ID, "new-project-name").send_keys(project_name)
         self.driver.find_element(By.ID, 'projectversion').click()
         self.driver.find_element(By.ID, "create-project-button").click()
+        time.sleep(2)
         element = self.wait_until_visible('#project-created-notification')
         self.assertTrue(self.element_exists('#project-created-notification'),'Project creation notification not shown')
         self.assertTrue(project_name in element.text,
@@ -31,15 +36,18 @@
 
  #  testcase (1515)
     def test_verify_left_bar_menu(self):
-        self.get('')
+        self.get(reverse('all-projects'))
         self.wait_until_visible('#projectstable')
         self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
+        time.sleep(2)
         self.assertTrue(self.element_exists('#config-nav'),'Configuration Tab does not exist')
         project_URL=self.get_URL()
         self.driver.find_element(By.XPATH, '//a[@href="'+project_URL+'"]').click()
+        time.sleep(2)
 
         try:
             self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'customimages/"'+"]").click()
+            time.sleep(2)
             self.assertTrue(re.search("Custom images",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'Custom images information is not loading properly')
         except:
             self.fail(msg='No Custom images tab available')
@@ -78,14 +86,16 @@
     def test_review_configuration_information(self):
         self.get('')
         self.driver.find_element(By.XPATH, "//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click()
+        time.sleep(2)
         self.wait_until_visible('#projectstable')
         self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
         project_URL=self.get_URL()
-
+        time.sleep(2)
         try:
            self.assertTrue(self.element_exists('#machine-section'),'Machine section for the project configuration page does not exist')
            self.assertTrue(re.search("qemux86",self.driver.find_element(By.XPATH, "//span[@id='project-machine-name']").text),'The machine type is not assigned')
            self.driver.find_element(By.XPATH, "//span[@id='change-machine-toggle']").click()
+           time.sleep(2)
            self.wait_until_visible('#select-machine-form')
            self.wait_until_visible('#cancel-machine-change')
            self.driver.find_element(By.XPATH, "//form[@id='select-machine-form']/a[@id='cancel-machine-change']").click()
@@ -123,13 +133,16 @@
     def test_verify_machine_information(self):
         self.get('')
         self.driver.find_element(By.XPATH, "//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click()
+        time.sleep(2)
         self.wait_until_visible('#projectstable')
         self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
+        time.sleep(2)
 
         try:
             self.assertTrue(self.element_exists('#machine-section'),'Machine section for the project configuration page does not exist')
             self.assertTrue(re.search("qemux86",self.driver.find_element(By.ID, "project-machine-name").text),'The machine type is not assigned')
             self.driver.find_element(By.ID, "change-machine-toggle").click()
+            time.sleep(2)
             self.wait_until_visible('#select-machine-form')
             self.wait_until_visible('#cancel-machine-change')
             self.driver.find_element(By.ID, "cancel-machine-change").click()
@@ -140,14 +153,15 @@
     def test_verify_most_built_recipes_information(self):
         self.get('')
         self.driver.find_element(By.XPATH, "//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click()
-
+        time.sleep(2)
         self.wait_until_visible('#projectstable')
         self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
         project_URL=self.get_URL()
-
+        time.sleep(2)
         try:
             self.assertTrue(re.search("You haven't built any recipes yet",self.driver.find_element(By.ID, "no-most-built").text),'Default message of no builds is not present')
             self.driver.find_element(By.XPATH, "//div[@id='no-most-built']/p/a[@href="+'"'+project_URL+'images/"'+"]").click()
+            time.sleep(2)
             self.assertTrue(re.search("Compatible image recipes",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Choose a recipe to build link  is not working  properly')
         except:
             self.fail(msg='No Most built information in project detail page')
@@ -156,8 +170,10 @@
     def test_verify_project_release_information(self):
         self.get('')
         self.driver.find_element(By.XPATH, "//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click()
+        time.sleep(2)
         self.wait_until_visible('#projectstable')
         self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
+        time.sleep(2)
 
         try:
             self.assertTrue(re.search("Yocto Project master",self.driver.find_element(By.ID, "project-release-title").text),'The project release is not defined')
@@ -171,12 +187,12 @@
         self.wait_until_visible('#projectstable')
         self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
         project_URL=self.get_URL()
-
+        time.sleep(2)
         try:
            self.driver.find_element(By.XPATH, "//div[@id='layer-container']")
            self.assertTrue(re.search("3",self.driver.find_element(By.ID, "project-layers-count").text),'There should be 3 layers listed in the layer count')
            layer_list = self.driver.find_element(By.ID, "layers-in-project-list")
-           layers = layer_list.find_element(By.TAG_NAME, "li")
+           layers = layer_list.find_elements(By.TAG_NAME, "li")
 
            for layer in layers:
                if re.match ("openembedded-core",layer.text):
@@ -199,10 +215,11 @@
     def test_verify_project_detail_links(self):
         self.get('')
         self.driver.find_element(By.XPATH, "//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click()
+        time.sleep(2)
         self.wait_until_visible('#projectstable')
         self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
         project_URL=self.get_URL()
-
+        time.sleep(2)
         self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").click()
         self.assertTrue(re.search("Configuration",self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").text), 'Configuration tab in project topbar is misspelled')
 
diff --git a/poky/bitbake/lib/toaster/tests/functional/test_project_page.py b/poky/bitbake/lib/toaster/tests/functional/test_project_page.py
new file mode 100644
index 0000000..3edf967
--- /dev/null
+++ b/poky/bitbake/lib/toaster/tests/functional/test_project_page.py
@@ -0,0 +1,247 @@
+#! /usr/bin/env python3 #
+# BitBake Toaster UI tests implementation
+#
+# Copyright (C) 2023 Savoir-faire Linux
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import pytest
+from django.urls import reverse
+from selenium.webdriver.support.select import Select
+from tests.functional.functional_helpers import SeleniumFunctionalTestCase
+from selenium.webdriver.common.by import By
+
+
+@pytest.mark.django_db
+class TestProjectPage(SeleniumFunctionalTestCase):
+
+    def setUp(self):
+        super().setUp()
+        release = '3'
+        project_name = 'projectmaster'
+        self._create_test_new_project(
+            project_name,
+            release,
+            False,
+        )
+
+    def _create_test_new_project(
+        self,
+        project_name,
+        release,
+        merge_toaster_settings,
+    ):
+        """ Create/Test new project using:
+          - Project Name: Any string
+          - Release: Any string
+          - Merge Toaster settings: True or False
+        """
+        self.get(reverse('newproject'))
+        self.driver.find_element(By.ID,
+                                 "new-project-name").send_keys(project_name)
+
+        select = Select(self.find('#projectversion'))
+        select.select_by_value(release)
+
+        # check merge toaster settings
+        checkbox = self.find('.checkbox-mergeattr')
+        if merge_toaster_settings:
+            if not checkbox.is_selected():
+                checkbox.click()
+        else:
+            if checkbox.is_selected():
+                checkbox.click()
+
+        self.driver.find_element(By.ID, "create-project-button").click()
+
+    def test_page_header_on_project_page(self):
+        """ Check page header in project page:
+          - AT LEFT -> Logo of Yocto project, displayed, clickable
+          - "Toaster"+" Information icon", displayed, clickable
+          - "Server Icon" + "All builds", displayed, clickable
+          - "Directory Icon" + "All projects", displayed, clickable
+          - "Book Icon" + "Documentation", displayed, clickable
+          - AT RIGHT -> button "New project", displayed, clickable
+        """
+        # navigate to the project page
+        url = reverse("project", args=(1,))
+        self.get(url)
+
+        # check page header
+        # AT LEFT -> Logo of Yocto project
+        logo = self.driver.find_element(
+            By.XPATH,
+            "//div[@class='toaster-navbar-brand']",
+        )
+        logo_img = logo.find_element(By.TAG_NAME, 'img')
+        self.assertTrue(logo_img.is_displayed(),
+                        'Logo of Yocto project not found')
+        self.assertTrue(
+            '/static/img/logo.png' in str(logo_img.get_attribute('src')),
+            'Logo of Yocto project not found'
+        )
+        # "Toaster"+" Information icon", clickable
+        toaster = self.driver.find_element(
+            By.XPATH,
+            "//div[@class='toaster-navbar-brand']//a[@class='brand']",
+        )
+        self.assertTrue(toaster.is_displayed(), 'Toaster not found')
+        self.assertTrue(toaster.text == 'Toaster')
+        info_sign = self.find('.glyphicon-info-sign')
+        self.assertTrue(info_sign.is_displayed())
+
+        # "Server Icon" + "All builds"
+        all_builds = self.find('#navbar-all-builds')
+        all_builds_link = all_builds.find_element(By.TAG_NAME, 'a')
+        self.assertTrue("All builds" in all_builds_link.text)
+        self.assertTrue(
+            '/toastergui/builds/' in str(all_builds_link.get_attribute('href'))
+        )
+        server_icon = all_builds.find_element(By.TAG_NAME, 'i')
+        self.assertTrue(
+            server_icon.get_attribute('class') == 'glyphicon glyphicon-tasks'
+        )
+        self.assertTrue(server_icon.is_displayed())
+
+        # "Directory Icon" + "All projects"
+        all_projects = self.find('#navbar-all-projects')
+        all_projects_link = all_projects.find_element(By.TAG_NAME, 'a')
+        self.assertTrue("All projects" in all_projects_link.text)
+        self.assertTrue(
+            '/toastergui/projects/' in str(all_projects_link.get_attribute(
+                'href'))
+        )
+        dir_icon = all_projects.find_element(By.TAG_NAME, 'i')
+        self.assertTrue(
+            dir_icon.get_attribute('class') == 'icon-folder-open'
+        )
+        self.assertTrue(dir_icon.is_displayed())
+
+        # "Book Icon" + "Documentation"
+        toaster_docs_link = self.find('#navbar-docs')
+        toaster_docs_link_link = toaster_docs_link.find_element(By.TAG_NAME,
+                                                                'a')
+        self.assertTrue("Documentation" in toaster_docs_link_link.text)
+        self.assertTrue(
+            toaster_docs_link_link.get_attribute('href') == 'http://docs.yoctoproject.org/toaster-manual/index.html#toaster-user-manual'
+        )
+        book_icon = toaster_docs_link.find_element(By.TAG_NAME, 'i')
+        self.assertTrue(
+            book_icon.get_attribute('class') == 'glyphicon glyphicon-book'
+        )
+        self.assertTrue(book_icon.is_displayed())
+
+        # AT RIGHT -> button "New project"
+        new_project_button = self.find('#new-project-button')
+        self.assertTrue(new_project_button.is_displayed())
+        self.assertTrue(new_project_button.text == 'New project')
+        new_project_button.click()
+        self.assertTrue(
+            '/toastergui/newproject/' in str(self.driver.current_url)
+        )
+
+    def test_edit_project_name(self):
+        """ Test edit project name:
+          - Click on "Edit" icon button
+          - Change project name
+          - Click on "Save" button
+          - Check project name is changed
+        """
+        # navigate to the project page
+        url = reverse("project", args=(1,))
+        self.get(url)
+
+        # click on "Edit" icon button
+        self.wait_until_visible('#project-name-container')
+        edit_button = self.find('#project-change-form-toggle')
+        edit_button.click()
+        project_name_input = self.find('#project-name-change-input')
+        self.assertTrue(project_name_input.is_displayed())
+        project_name_input.clear()
+        project_name_input.send_keys('New Name')
+        self.find('#project-name-change-btn').click()
+
+        # check project name is changed
+        self.wait_until_visible('#project-name-container')
+        self.assertTrue(
+            'New Name' in str(self.find('#project-name-container').text)
+        )
+
+    def test_project_page_tabs(self):
+        """ Test project tabs:
+          - "configuration" tab
+          - "Builds" tab
+          - "Import layers" tab
+          - "New custom image" tab
+          Check search box used to build recipes
+        """
+        # navigate to the project page
+        url = reverse("project", args=(1,))
+        self.get(url)
+
+        # check "configuration" tab
+        self.wait_until_visible('#topbar-configuration-tab')
+        config_tab = self.find('#topbar-configuration-tab')
+        self.assertTrue(config_tab.get_attribute('class') == 'active')
+        self.assertTrue('Configuration' in config_tab.text)
+        config_tab_link = config_tab.find_element(By.TAG_NAME, 'a')
+        self.assertTrue(
+            f"/toastergui/project/1" in str(config_tab_link.get_attribute(
+                'href'))
+        )
+
+        def get_tabs():
+            # tabs links list
+            return self.driver.find_elements(
+                By.XPATH,
+                '//div[@id="project-topbar"]//li'
+            )
+
+        def check_tab_link(tab_index, tab_name, url):
+            tab = get_tabs()[tab_index]
+            tab_link = tab.find_element(By.TAG_NAME, 'a')
+            self.assertTrue(url in tab_link.get_attribute('href'))
+            self.assertTrue(tab_name in tab_link.text)
+            self.assertTrue(tab.get_attribute('class') == 'active')
+
+        # check "Builds" tab
+        builds_tab = get_tabs()[1]
+        builds_tab.find_element(By.TAG_NAME, 'a').click()
+        check_tab_link(
+            1,
+            'Builds',
+            f"/toastergui/project/1/builds"
+        )
+
+        # check "Import layers" tab
+        import_layers_tab = get_tabs()[2]
+        import_layers_tab.find_element(By.TAG_NAME, 'a').click()
+        check_tab_link(
+            2,
+            'Import layer',
+            f"/toastergui/project/1/importlayer"
+        )
+
+        # check "New custom image" tab
+        new_custom_image_tab = get_tabs()[3]
+        new_custom_image_tab.find_element(By.TAG_NAME, 'a').click()
+        check_tab_link(
+            3,
+            'New custom image',
+            f"/toastergui/project/1/newcustomimage"
+        )
+
+        # check search box can be use to build recipes
+        search_box = self.find('#build-input')
+        search_box.send_keys('core-image-minimal')
+        self.find('#build-button').click()
+        self.wait_until_visible('#latest-builds')
+        lastest_builds = self.driver.find_elements(
+            By.XPATH,
+            '//div[@id="latest-builds"]',
+        )
+        last_build = lastest_builds[0]
+        self.assertTrue(
+            'core-image-minimal' in str(last_build.text)
+        )
diff --git a/poky/bitbake/lib/toaster/tests/functional/test_project_page_tab_config.py b/poky/bitbake/lib/toaster/tests/functional/test_project_page_tab_config.py
new file mode 100644
index 0000000..23012d7
--- /dev/null
+++ b/poky/bitbake/lib/toaster/tests/functional/test_project_page_tab_config.py
@@ -0,0 +1,578 @@
+#! /usr/bin/env python3 #
+# BitBake Toaster UI tests implementation
+#
+# Copyright (C) 2023 Savoir-faire Linux
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+from time import sleep
+import pytest
+from django.utils import timezone
+from django.urls import reverse
+from selenium.webdriver import Keys
+from selenium.webdriver.support.select import Select
+from selenium.common.exceptions import NoSuchElementException
+from orm.models import Build, Project, Target
+from tests.functional.functional_helpers import SeleniumFunctionalTestCase
+from selenium.webdriver.common.by import By
+
+
+@pytest.mark.django_db
+class TestProjectConfigTab(SeleniumFunctionalTestCase):
+
+    def setUp(self):
+        self.recipe = None
+        super().setUp()
+        release = '3'
+        project_name = 'projectmaster'
+        self._create_test_new_project(
+            project_name,
+            release,
+            False,
+        )
+
+    def _create_test_new_project(
+        self,
+        project_name,
+        release,
+        merge_toaster_settings,
+    ):
+        """ Create/Test new project using:
+          - Project Name: Any string
+          - Release: Any string
+          - Merge Toaster settings: True or False
+        """
+        self.get(reverse('newproject'))
+        self.driver.find_element(By.ID,
+                                 "new-project-name").send_keys(project_name)
+
+        select = Select(self.find('#projectversion'))
+        select.select_by_value(release)
+
+        # check merge toaster settings
+        checkbox = self.find('.checkbox-mergeattr')
+        if merge_toaster_settings:
+            if not checkbox.is_selected():
+                checkbox.click()
+        else:
+            if checkbox.is_selected():
+                checkbox.click()
+
+        self.driver.find_element(By.ID, "create-project-button").click()
+
+    @classmethod
+    def _wait_until_build(cls, state):
+        while True:
+            try:
+                last_build_state = cls.driver.find_element(
+                    By.XPATH,
+                    '//*[@id="latest-builds"]/div[1]//div[@class="build-state"]',
+                )
+                build_state = last_build_state.get_attribute(
+                    'data-build-state')
+                state_text = state.lower().split()
+                if any(x in str(build_state).lower() for x in state_text):
+                    break
+            except NoSuchElementException:
+                continue
+            sleep(1)
+
+    def _create_builds(self):
+        # check search box can be use to build recipes
+        search_box = self.find('#build-input')
+        search_box.send_keys('core-image-minimal')
+        self.find('#build-button').click()
+        sleep(1)
+        self.wait_until_visible('#latest-builds')
+        # loop until reach the parsing state
+        self._wait_until_build('parsing starting cloning')
+        lastest_builds = self.driver.find_elements(
+            By.XPATH,
+            '//div[@id="latest-builds"]/div',
+        )
+        last_build = lastest_builds[0]
+        self.assertTrue(
+            'core-image-minimal' in str(last_build.text)
+        )
+        cancel_button = last_build.find_element(
+            By.XPATH,
+            '//span[@class="cancel-build-btn pull-right alert-link"]',
+        )
+        cancel_button.click()
+        sleep(1)
+        self._wait_until_build('cancelled')
+
+    def _get_tabs(self):
+        # tabs links list
+        return self.driver.find_elements(
+            By.XPATH,
+            '//div[@id="project-topbar"]//li'
+        )
+
+    def _get_config_nav_item(self, index):
+        config_nav = self.find('#config-nav')
+        return config_nav.find_elements(By.TAG_NAME, 'li')[index]
+
+    def _get_create_builds(self, **kwargs):
+        """ Create a build and return the build object """
+        # parameters for builds to associate with the projects
+        now = timezone.now()
+        release = '3'
+        project_name = 'projectmaster'
+        self._create_test_new_project(
+            project_name+"2",
+            release,
+            False,
+        )
+
+        self.project1_build_success = {
+            'project': Project.objects.get(id=1),
+            'started_on': now,
+            'completed_on': now,
+            'outcome': Build.SUCCEEDED
+        }
+
+        self.project1_build_failure = {
+            'project': Project.objects.get(id=1),
+            'started_on': now,
+            'completed_on': now,
+            'outcome': Build.FAILED
+        }
+        build1 = Build.objects.create(**self.project1_build_success)
+        build2 = Build.objects.create(**self.project1_build_failure)
+
+        # add some targets to these builds so they have recipe links
+        # (and so we can find the row in the ToasterTable corresponding to
+        # a particular build)
+        Target.objects.create(build=build1, target='foo')
+        Target.objects.create(build=build2, target='bar')
+
+        if kwargs:
+            # Create kwargs.get('success') builds with success status with target
+            # and kwargs.get('failure') builds with failure status with target
+            for i in range(kwargs.get('success', 0)):
+                now = timezone.now()
+                self.project1_build_success['started_on'] = now
+                self.project1_build_success[
+                    'completed_on'] = now - timezone.timedelta(days=i)
+                build = Build.objects.create(**self.project1_build_success)
+                Target.objects.create(build=build,
+                                      target=f'{i}_success_recipe',
+                                      task=f'{i}_success_task')
+
+            for i in range(kwargs.get('failure', 0)):
+                now = timezone.now()
+                self.project1_build_failure['started_on'] = now
+                self.project1_build_failure[
+                    'completed_on'] = now - timezone.timedelta(days=i)
+                build = Build.objects.create(**self.project1_build_failure)
+                Target.objects.create(build=build,
+                                      target=f'{i}_fail_recipe',
+                                      task=f'{i}_fail_task')
+        return build1, build2
+
+    def test_project_config_nav(self):
+        """ Test project config tab navigation:
+        - Check if the menu is displayed and contains the right elements:
+            - Configuration
+            - COMPATIBLE METADATA
+            - Custom images
+            - Image recipes
+            - Software recipes
+            - Machines
+            - Layers
+            - Distro
+            - EXTRA CONFIGURATION
+            - Bitbake variables
+            - Actions
+            - Delete project
+        """
+        # navigate to the project page
+        url = reverse("project", args=(1,))
+        self.get(url)
+
+        # check if the menu is displayed
+        self.wait_until_visible('#config-nav')
+
+        def _get_config_nav_item(index):
+            config_nav = self.find('#config-nav')
+            return config_nav.find_elements(By.TAG_NAME, 'li')[index]
+
+        def check_config_nav_item(index, item_name, url):
+            item = _get_config_nav_item(index)
+            self.assertTrue(item_name in item.text)
+            self.assertTrue(item.get_attribute('class') == 'active')
+            self.assertTrue(url in self.driver.current_url)
+
+        # check if the menu contains the right elements
+        # COMPATIBLE METADATA
+        compatible_metadata = _get_config_nav_item(1)
+        self.assertTrue(
+            "compatible metadata" in compatible_metadata.text.lower()
+        )
+        # EXTRA CONFIGURATION
+        extra_configuration = _get_config_nav_item(8)
+        self.assertTrue(
+            "extra configuration" in extra_configuration.text.lower()
+        )
+        # Actions
+        actions = _get_config_nav_item(10)
+        self.assertTrue("actions" in str(actions.text).lower())
+
+        conf_nav_list = [
+            [0, 'Configuration', f"/toastergui/project/1"],  # config
+            [2, 'Custom images', f"/toastergui/project/1/customimages"],  # custom images
+            [3, 'Image recipes', f"/toastergui/project/1/images"],  # image recipes
+            [4, 'Software recipes', f"/toastergui/project/1/softwarerecipes"],  # software recipes
+            [5, 'Machines', f"/toastergui/project/1/machines"],  # machines
+            [6, 'Layers', f"/toastergui/project/1/layers"],  # layers
+            [7, 'Distro', f"/toastergui/project/1/distro"],  # distro
+            [9, 'BitBake variables', f"/toastergui/project/1/configuration"],  # bitbake variables
+        ]
+        for index, item_name, url in conf_nav_list:
+            item = _get_config_nav_item(index)
+            if item.get_attribute('class') != 'active':
+                item.click()
+            check_config_nav_item(index, item_name, url)
+
+    def test_project_config_tab_right_section(self):
+        """ Test project config tab right section contains five blocks:
+            - Machine:
+                - check 'Machine' is displayed
+                - check can change Machine
+            - Distro:
+                - check 'Distro' is displayed
+                - check can change Distro
+            - Most built recipes:
+                - check 'Most built recipes' is displayed
+                - check can select a recipe and build it
+            - Project release:
+                - check 'Project release' is displayed
+                - check project has right release displayed
+            - Layers:
+                - check can add a layer if exists
+                - check at least three layers are displayed
+                    - openembedded-core
+                    - meta-poky
+                    - meta-yocto-bsp
+        """
+        # navigate to the project page
+        url = reverse("project", args=(1,))
+        self.get(url)
+
+        # check if the menu is displayed
+        self.wait_until_visible('#project-page')
+        block_l = self.driver.find_element(
+            By.XPATH, '//*[@id="project-page"]/div[2]')
+        machine = self.find('#machine-section')
+        distro = self.find('#distro-section')
+        most_built_recipes = self.driver.find_element(
+            By.XPATH, '//*[@id="project-page"]/div[1]/div[3]')
+        project_release = self.driver.find_element(
+            By.XPATH, '//*[@id="project-page"]/div[1]/div[4]')
+        layers = block_l.find_element(By.ID, 'layer-container')
+
+        def check_machine_distro(self, item_name, new_item_name, block):
+            title = block.find_element(By.TAG_NAME, 'h3')
+            self.assertTrue(item_name.capitalize() in title.text)
+            edit_btn = block.find_element(By.ID, f'change-{item_name}-toggle')
+            edit_btn.click()
+            sleep(1)
+            name_input = block.find_element(By.ID, f'{item_name}-change-input')
+            name_input.clear()
+            name_input.send_keys(new_item_name)
+            change_btn = block.find_element(By.ID, f'{item_name}-change-btn')
+            change_btn.click()
+            sleep(1)
+            project_name = block.find_element(By.ID, f'project-{item_name}-name')
+            self.assertTrue(new_item_name in project_name.text)
+            # check change notificaiton is displayed
+            change_notification = self.find('#change-notification')
+            self.assertTrue(
+                f'You have changed the {item_name} to: {new_item_name}' in change_notification.text
+            )
+
+        # Machine
+        check_machine_distro(self, 'machine', 'qemux86-64', machine)
+        # Distro
+        check_machine_distro(self, 'distro', 'poky-altcfg', distro)
+
+        # Project release
+        title = project_release.find_element(By.TAG_NAME, 'h3')
+        self.assertTrue("Project release" in title.text)
+        self.assertTrue(
+            "Yocto Project master" in self.find('#project-release-title').text
+        )
+
+        # Layers
+        title = layers.find_element(By.TAG_NAME, 'h3')
+        self.assertTrue("Layers" in title.text)
+        # check at least three layers are displayed
+        # openembedded-core
+        # meta-poky
+        # meta-yocto-bsp
+        layers_list = layers.find_element(By.ID, 'layers-in-project-list')
+        layers_list_items = layers_list.find_elements(By.TAG_NAME, 'li')
+        self.assertTrue(len(layers_list_items) == 3)
+        # check can add a layer if exists
+        add_layer_input = layers.find_element(By.ID, 'layer-add-input')
+        add_layer_input.send_keys('meta-oe')
+        self.wait_until_visible('#layer-container > form > div > span > div')
+        dropdown_item = self.driver.find_element(
+            By.XPATH,
+            '//*[@id="layer-container"]/form/div/span/div'
+        )
+        dropdown_item.click()
+        add_layer_btn = layers.find_element(By.ID, 'add-layer-btn')
+        add_layer_btn.click()
+        sleep(1)
+        # check layer is added
+        layers_list_items = layers_list.find_elements(By.TAG_NAME, 'li')
+        self.assertTrue(len(layers_list_items) == 4)
+
+        # Most built recipes
+        title = most_built_recipes.find_element(By.TAG_NAME, 'h3')
+        self.assertTrue("Most built recipes" in title.text)
+        # Create a new builds 5
+        self._create_builds()
+
+        # Refresh the page
+        self.get(url)
+
+        sleep(1)  # wait for page to load
+        self.wait_until_visible('#project-page')
+        # check can select a recipe and build it
+        most_built_recipes = self.driver.find_element(
+            By.XPATH, '//*[@id="project-page"]/div[1]/div[3]')
+        recipe_list = most_built_recipes.find_element(By.ID, 'freq-build-list')
+        recipe_list_items = recipe_list.find_elements(By.TAG_NAME, 'li')
+        self.assertTrue(
+            len(recipe_list_items) > 0,
+            msg="No recipes found in the most built recipes list",
+        )
+        checkbox = recipe_list_items[0].find_element(By.TAG_NAME, 'input')
+        checkbox.click()
+        build_btn = self.find('#freq-build-btn')
+        build_btn.click()
+        sleep(1)  # wait for page to load
+        self.wait_until_visible('#latest-builds')
+        self._wait_until_build('parsing starting cloning queueing')
+        lastest_builds = self.driver.find_elements(
+            By.XPATH,
+            '//div[@id="latest-builds"]/div'
+        )
+        last_build = lastest_builds[0]
+        cancel_button = last_build.find_element(
+            By.XPATH,
+            '//span[@class="cancel-build-btn pull-right alert-link"]',
+        )
+        cancel_button.click()
+        self.assertTrue(len(lastest_builds) == 2)
+
+    def test_project_page_tab_importlayer(self):
+        """ Test project page tab import layer """
+        # navigate to the project page
+        url = reverse("project", args=(1,))
+        self.get(url)
+
+        # navigate to "Import layers" tab
+        import_layers_tab = self._get_tabs()[2]
+        import_layers_tab.find_element(By.TAG_NAME, 'a').click()
+        self.wait_until_visible('#layer-git-repo-url')
+
+        # Check git repo radio button
+        git_repo_radio = self.find('#git-repo-radio')
+        git_repo_radio.click()
+
+        # Set git repo url
+        input_repo_url = self.find('#layer-git-repo-url')
+        input_repo_url.send_keys('git://git.yoctoproject.org/meta-fake')
+        # Blur the input to trigger the validation
+        input_repo_url.send_keys(Keys.TAB)
+
+        # Check name is set
+        input_layer_name = self.find('#import-layer-name')
+        self.assertTrue(input_layer_name.get_attribute('value') == 'meta-fake')
+
+        # Set branch
+        input_branch = self.find('#layer-git-ref')
+        input_branch.send_keys('master')
+
+        # Import layer
+        self.find('#import-and-add-btn').click()
+
+        # Check layer is added
+        self.wait_until_visible('#layer-container')
+        block_l = self.driver.find_element(
+            By.XPATH, '//*[@id="project-page"]/div[2]')
+        layers = block_l.find_element(By.ID, 'layer-container')
+        layers_list = layers.find_element(By.ID, 'layers-in-project-list')
+        layers_list_items = layers_list.find_elements(By.TAG_NAME, 'li')
+        self.assertTrue(
+            'meta-fake' in str(layers_list_items[-1].text)
+        )
+
+    def test_project_page_custom_image_no_image(self):
+        """ Test project page tab "New custom image" when no custom image """
+        # navigate to the project page
+        url = reverse("project", args=(1,))
+        self.get(url)
+
+        # navigate to "Custom image" tab
+        custom_image_section = self._get_config_nav_item(2)
+        custom_image_section.click()
+        self.wait_until_visible('#empty-state-customimagestable')
+
+        # Check message when no custom image
+        self.assertTrue(
+            "You have not created any custom images yet." in str(
+                self.find('#empty-state-customimagestable').text
+            )
+        )
+        div_empty_msg = self.find('#empty-state-customimagestable')
+        link_create_custom_image = div_empty_msg.find_element(
+            By.TAG_NAME, 'a')
+        self.assertTrue(
+            f"/toastergui/project/1/newcustomimage" in str(
+                link_create_custom_image.get_attribute('href')
+            )
+        )
+        self.assertTrue(
+            "Create your first custom image" in str(
+                link_create_custom_image.text
+            )
+        )
+
+    def test_project_page_image_recipe(self):
+        """ Test project page section images
+            - Check image recipes are displayed
+            - Check search input
+            - Check image recipe build button works
+            - Check image recipe table features(show/hide column, pagination)
+        """
+        # navigate to the project page
+        url = reverse("project", args=(1,))
+        self.get(url)
+        self.wait_until_visible('#config-nav')
+
+        # navigate to "Images section"
+        images_section = self._get_config_nav_item(3)
+        images_section.click()
+        self.wait_until_visible('#imagerecipestable')
+        rows = self.find_all('#imagerecipestable tbody tr')
+        self.assertTrue(len(rows) > 0)
+
+        # Test search input
+        self.wait_until_visible('#search-input-imagerecipestable')
+        recipe_input = self.find('#search-input-imagerecipestable')
+        recipe_input.send_keys('core-image-minimal')
+        self.find('#search-submit-imagerecipestable').click()
+        self.wait_until_visible('#imagerecipestable tbody tr')
+        rows = self.find_all('#imagerecipestable tbody tr')
+        self.assertTrue(len(rows) > 0)
+
+        # Test build button
+        image_to_build = rows[0]
+        build_btn = image_to_build.find_element(
+            By.XPATH,
+            '//td[@class="add-del-layers"]'
+        )
+        build_btn.click()
+        self._wait_until_build('parsing starting cloning')
+        lastest_builds = self.driver.find_elements(
+            By.XPATH,
+            '//div[@id="latest-builds"]/div'
+        )
+        self.assertTrue(len(lastest_builds) > 0)
+
+    def test_image_recipe_editColumn(self):
+        """ Test the edit column feature in image recipe table on project page """
+        self._get_create_builds(success=10, failure=10)
+
+        def test_edit_column(check_box_id):
+            # Check that we can hide/show table column
+            check_box = self.find(f'#{check_box_id}')
+            th_class = str(check_box_id).replace('checkbox-', '')
+            if check_box.is_selected():
+                # check if column is visible in table
+                self.assertTrue(
+                    self.find(
+                        f'#imagerecipestable thead th.{th_class}'
+                    ).is_displayed(),
+                    f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
+                )
+                check_box.click()
+                # check if column is hidden in table
+                self.assertFalse(
+                    self.find(
+                        f'#imagerecipestable thead th.{th_class}'
+                    ).is_displayed(),
+                    f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
+                )
+            else:
+                # check if column is hidden in table
+                self.assertFalse(
+                    self.find(
+                        f'#imagerecipestable thead th.{th_class}'
+                    ).is_displayed(),
+                    f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
+                )
+                check_box.click()
+                # check if column is visible in table
+                self.assertTrue(
+                    self.find(
+                        f'#imagerecipestable thead th.{th_class}'
+                    ).is_displayed(),
+                    f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
+                )
+
+        url = reverse('projectimagerecipes', args=(1,))
+        self.get(url)
+        self.wait_until_present('#imagerecipestable tbody tr')
+
+        # Check edit column
+        edit_column = self.find('#edit-columns-button')
+        self.assertTrue(edit_column.is_displayed())
+        edit_column.click()
+        # Check dropdown is visible
+        self.wait_until_visible('ul.dropdown-menu.editcol')
+
+        # Check that we can hide the edit column
+        test_edit_column('checkbox-get_description_or_summary')
+        test_edit_column('checkbox-layer_version__get_vcs_reference')
+        test_edit_column('checkbox-layer_version__layer__name')
+        test_edit_column('checkbox-license')
+        test_edit_column('checkbox-recipe-file')
+        test_edit_column('checkbox-section')
+        test_edit_column('checkbox-version')
+
+    def test_image_recipe_show_rows(self):
+        """ Test the show rows feature in image recipe table on project page """
+        self._get_create_builds(success=100, failure=100)
+
+        def test_show_rows(row_to_show, show_row_link):
+            # Check that we can show rows == row_to_show
+            show_row_link.select_by_value(str(row_to_show))
+            self.wait_until_present('#imagerecipestable tbody tr')
+            sleep(1)
+            self.assertTrue(
+                len(self.find_all('#imagerecipestable tbody tr')) == row_to_show
+            )
+
+        url = reverse('projectimagerecipes', args=(2,))
+        self.get(url)
+        self.wait_until_present('#imagerecipestable tbody tr')
+
+        show_rows = self.driver.find_elements(
+            By.XPATH,
+            '//select[@class="form-control pagesize-imagerecipestable"]'
+        )
+        # Check show rows
+        for show_row_link in show_rows:
+            show_row_link = Select(show_row_link)
+            test_show_rows(10, show_row_link)
+            test_show_rows(25, show_row_link)
+            test_show_rows(50, show_row_link)
+            test_show_rows(100, show_row_link)
+            test_show_rows(150, show_row_link)
diff --git a/poky/bitbake/lib/toaster/tests/toaster-tests-requirements.txt b/poky/bitbake/lib/toaster/tests/toaster-tests-requirements.txt
index f30ac07..71cc083 100644
--- a/poky/bitbake/lib/toaster/tests/toaster-tests-requirements.txt
+++ b/poky/bitbake/lib/toaster/tests/toaster-tests-requirements.txt
@@ -1 +1,7 @@
 selenium>=4.13.0
+pytest==7.4.2
+pytest-django==4.5.2
+pytest-env==1.1.0
+pytest-html==4.0.2
+pytest-metadata==3.0.0
+pytest-order==1.1.0
diff --git a/poky/bitbake/lib/toaster/tests/views/test_views.py b/poky/bitbake/lib/toaster/tests/views/test_views.py
index f962e76..349881e 100644
--- a/poky/bitbake/lib/toaster/tests/views/test_views.py
+++ b/poky/bitbake/lib/toaster/tests/views/test_views.py
@@ -9,6 +9,7 @@
 
 """Test cases for Toaster GUI and ReST."""
 
+import pytest
 from django.test import TestCase
 from django.test.client import RequestFactory
 from django.urls import reverse
@@ -33,6 +34,7 @@
 CLI_BUILDS_PROJECT_NAME = 'Command line builds'
 
 
+@pytest.mark.order(1)
 class ViewTests(TestCase):
     """Tests to verify view APIs."""
 
@@ -41,7 +43,15 @@
     def setUp(self):
 
         self.project = Project.objects.first()
+
         self.recipe1 = Recipe.objects.get(pk=2)
+        # create a file and to recipe1 file_path
+        file_path = f"/tmp/{self.recipe1.name.strip().replace(' ', '-')}.bb"
+        with open(file_path, 'w') as f:
+            f.write('foo')
+        self.recipe1.file_path = file_path
+        self.recipe1.save()
+
         self.customr = CustomImageRecipe.objects.first()
         self.cust_package = CustomImagePackage.objects.first()
         self.package = Package.objects.first()
diff --git a/poky/bitbake/lib/toaster/toastergui/fixtures/toastergui-unittest-data.xml b/poky/bitbake/lib/toaster/toastergui/fixtures/toastergui-unittest-data.xml
index 4517ed1..df10693 100644
--- a/poky/bitbake/lib/toaster/toastergui/fixtures/toastergui-unittest-data.xml
+++ b/poky/bitbake/lib/toaster/toastergui/fixtures/toastergui-unittest-data.xml
@@ -6,10 +6,22 @@
       <field type="CharField" name="dirpath">b</field>
       <field type="CharField" name="branch">a</field>
   </object>
+  <object pk="1" model="orm.distro">
+     <field type="DateTimeField" name="up_date"><None></None></field>
+     <field to="orm.layer_version" name="layer_version" rel="ManyToOneRel">1</field>
+     <field type="CharField" name="name">poky_distro1</field>
+     <field type="CharField" name="description">poky_distro1 description</field>
+  </object>
+  <object pk="2" model="orm.distro">
+     <field type="DateTimeField" name="up_date"><None></None></field>
+     <field to="orm.layer_version" name="layer_version" rel="ManyToOneRel">2</field>
+     <field type="CharField" name="name">poky_distro2</field>
+     <field type="CharField" name="description">poky_distro2 description</field>
+  </object>
   <object pk="1" model="orm.release">
-     <field type="CharField" name="name">master</field>
+     <field type="CharField" name="name">foo_master</field>
      <field type="CharField" name="description">master project</field>
-     <field to="orm.bitbake_version" name="bitbake_version">1</field>
+     <field to="orm.bitbakeversion" name="bitbake_version">1</field>
   </object>
   <object pk="1" model="orm.project">
     <field type="CharField" name="name">a test project</field>
diff --git a/poky/bitbake/lib/toaster/toastergui/static/js/bootstrap.js b/poky/bitbake/lib/toaster/toastergui/static/js/bootstrap-3.4.1.js
similarity index 84%
rename from poky/bitbake/lib/toaster/toastergui/static/js/bootstrap.js
rename to poky/bitbake/lib/toaster/toastergui/static/js/bootstrap-3.4.1.js
index d47d640..170bd60 100644
--- a/poky/bitbake/lib/toaster/toastergui/static/js/bootstrap.js
+++ b/poky/bitbake/lib/toaster/toastergui/static/js/bootstrap-3.4.1.js
@@ -1,6 +1,6 @@
 /*!
- * Bootstrap v3.3.6 (http://getbootstrap.com)
- * Copyright 2011-2016 Twitter, Inc.
+ * Bootstrap v3.4.1 (https://getbootstrap.com/)
+ * Copyright 2011-2019 Twitter, Inc.
  * Licensed under the MIT license
  */
 
@@ -11,16 +11,16 @@
 +function ($) {
   'use strict';
   var version = $.fn.jquery.split(' ')[0].split('.')
-  if ((version[0] < 2 && version[1] < 9) || (version[0] == 1 && version[1] == 9 && version[2] < 1) || (version[0] > 2)) {
-    throw new Error('Bootstrap\'s JavaScript requires jQuery version 1.9.1 or higher, but lower than version 3')
+  if ((version[0] < 2 && version[1] < 9) || (version[0] == 1 && version[1] == 9 && version[2] < 1) || (version[0] > 3)) {
+    throw new Error('Bootstrap\'s JavaScript requires jQuery version 1.9.1 or higher, but lower than version 4')
   }
 }(jQuery);
 
 /* ========================================================================
- * Bootstrap: transition.js v3.3.6
- * http://getbootstrap.com/javascript/#transitions
+ * Bootstrap: transition.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#transitions
  * ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
+ * Copyright 2011-2019 Twitter, Inc.
  * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
  * ======================================================================== */
 
@@ -28,7 +28,7 @@
 +function ($) {
   'use strict';
 
-  // CSS TRANSITION SUPPORT (Shoutout: http://www.modernizr.com/)
+  // CSS TRANSITION SUPPORT (Shoutout: https://modernizr.com/)
   // ============================================================
 
   function transitionEnd() {
@@ -50,7 +50,7 @@
     return false // explicit for ie8 (  ._.)
   }
 
-  // http://blog.alexmaccaw.com/css-transitions
+  // https://blog.alexmaccaw.com/css-transitions
   $.fn.emulateTransitionEnd = function (duration) {
     var called = false
     var $el = this
@@ -77,10 +77,10 @@
 }(jQuery);
 
 /* ========================================================================
- * Bootstrap: alert.js v3.3.6
- * http://getbootstrap.com/javascript/#alerts
+ * Bootstrap: alert.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#alerts
  * ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
+ * Copyright 2011-2019 Twitter, Inc.
  * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
  * ======================================================================== */
 
@@ -96,7 +96,7 @@
     $(el).on('click', dismiss, this.close)
   }
 
-  Alert.VERSION = '3.3.6'
+  Alert.VERSION = '3.4.1'
 
   Alert.TRANSITION_DURATION = 150
 
@@ -109,7 +109,8 @@
       selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7
     }
 
-    var $parent = $(selector)
+    selector    = selector === '#' ? [] : selector
+    var $parent = $(document).find(selector)
 
     if (e) e.preventDefault()
 
@@ -172,10 +173,10 @@
 }(jQuery);
 
 /* ========================================================================
- * Bootstrap: button.js v3.3.6
- * http://getbootstrap.com/javascript/#buttons
+ * Bootstrap: button.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#buttons
  * ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
+ * Copyright 2011-2019 Twitter, Inc.
  * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
  * ======================================================================== */
 
@@ -192,7 +193,7 @@
     this.isLoading = false
   }
 
-  Button.VERSION  = '3.3.6'
+  Button.VERSION  = '3.4.1'
 
   Button.DEFAULTS = {
     loadingText: 'loading...'
@@ -214,10 +215,10 @@
 
       if (state == 'loadingText') {
         this.isLoading = true
-        $el.addClass(d).attr(d, d)
+        $el.addClass(d).attr(d, d).prop(d, true)
       } else if (this.isLoading) {
         this.isLoading = false
-        $el.removeClass(d).removeAttr(d)
+        $el.removeClass(d).removeAttr(d).prop(d, false)
       }
     }, this), 0)
   }
@@ -281,10 +282,15 @@
 
   $(document)
     .on('click.bs.button.data-api', '[data-toggle^="button"]', function (e) {
-      var $btn = $(e.target)
-      if (!$btn.hasClass('btn')) $btn = $btn.closest('.btn')
+      var $btn = $(e.target).closest('.btn')
       Plugin.call($btn, 'toggle')
-      if (!($(e.target).is('input[type="radio"]') || $(e.target).is('input[type="checkbox"]'))) e.preventDefault()
+      if (!($(e.target).is('input[type="radio"], input[type="checkbox"]'))) {
+        // Prevent double click on radios, and the double selections (so cancellation) on checkboxes
+        e.preventDefault()
+        // The target component still receive the focus
+        if ($btn.is('input,button')) $btn.trigger('focus')
+        else $btn.find('input:visible,button:visible').first().trigger('focus')
+      }
     })
     .on('focus.bs.button.data-api blur.bs.button.data-api', '[data-toggle^="button"]', function (e) {
       $(e.target).closest('.btn').toggleClass('focus', /^focus(in)?$/.test(e.type))
@@ -293,10 +299,10 @@
 }(jQuery);
 
 /* ========================================================================
- * Bootstrap: carousel.js v3.3.6
- * http://getbootstrap.com/javascript/#carousel
+ * Bootstrap: carousel.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#carousel
  * ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
+ * Copyright 2011-2019 Twitter, Inc.
  * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
  * ======================================================================== */
 
@@ -324,7 +330,7 @@
       .on('mouseleave.bs.carousel', $.proxy(this.cycle, this))
   }
 
-  Carousel.VERSION  = '3.3.6'
+  Carousel.VERSION  = '3.4.1'
 
   Carousel.TRANSITION_DURATION = 600
 
@@ -438,7 +444,9 @@
     var slidEvent = $.Event('slid.bs.carousel', { relatedTarget: relatedTarget, direction: direction }) // yes, "slid"
     if ($.support.transition && this.$element.hasClass('slide')) {
       $next.addClass(type)
-      $next[0].offsetWidth // force reflow
+      if (typeof $next === 'object' && $next.length) {
+        $next[0].offsetWidth // force reflow
+      }
       $active.addClass(direction)
       $next.addClass(direction)
       $active
@@ -500,10 +508,17 @@
   // =================
 
   var clickHandler = function (e) {
-    var href
     var $this   = $(this)
-    var $target = $($this.attr('data-target') || (href = $this.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '')) // strip for ie7
+    var href    = $this.attr('href')
+    if (href) {
+      href = href.replace(/.*(?=#[^\s]+$)/, '') // strip for ie7
+    }
+
+    var target  = $this.attr('data-target') || href
+    var $target = $(document).find(target)
+
     if (!$target.hasClass('carousel')) return
+
     var options = $.extend({}, $target.data(), $this.data())
     var slideIndex = $this.attr('data-slide-to')
     if (slideIndex) options.interval = false
@@ -531,13 +546,14 @@
 }(jQuery);
 
 /* ========================================================================
- * Bootstrap: collapse.js v3.3.6
- * http://getbootstrap.com/javascript/#collapse
+ * Bootstrap: collapse.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#collapse
  * ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
+ * Copyright 2011-2019 Twitter, Inc.
  * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
  * ======================================================================== */
 
+/* jshint latedef: false */
 
 +function ($) {
   'use strict';
@@ -561,7 +577,7 @@
     if (this.options.toggle) this.toggle()
   }
 
-  Collapse.VERSION  = '3.3.6'
+  Collapse.VERSION  = '3.4.1'
 
   Collapse.TRANSITION_DURATION = 350
 
@@ -668,7 +684,7 @@
   }
 
   Collapse.prototype.getParent = function () {
-    return $(this.options.parent)
+    return $(document).find(this.options.parent)
       .find('[data-toggle="collapse"][data-parent="' + this.options.parent + '"]')
       .each($.proxy(function (i, element) {
         var $element = $(element)
@@ -691,7 +707,7 @@
     var target = $trigger.attr('data-target')
       || (href = $trigger.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '') // strip for ie7
 
-    return $(target)
+    return $(document).find(target)
   }
 
 
@@ -743,10 +759,10 @@
 }(jQuery);
 
 /* ========================================================================
- * Bootstrap: dropdown.js v3.3.6
- * http://getbootstrap.com/javascript/#dropdowns
+ * Bootstrap: dropdown.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#dropdowns
  * ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
+ * Copyright 2011-2019 Twitter, Inc.
  * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
  * ======================================================================== */
 
@@ -763,7 +779,7 @@
     $(element).on('click.bs.dropdown', this.toggle)
   }
 
-  Dropdown.VERSION = '3.3.6'
+  Dropdown.VERSION = '3.4.1'
 
   function getParent($this) {
     var selector = $this.attr('data-target')
@@ -773,7 +789,7 @@
       selector = selector && /#[A-Za-z]/.test(selector) && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7
     }
 
-    var $parent = selector && $(selector)
+    var $parent = selector !== '#' ? $(document).find(selector) : null
 
     return $parent && $parent.length ? $parent : $this.parent()
   }
@@ -909,10 +925,10 @@
 }(jQuery);
 
 /* ========================================================================
- * Bootstrap: modal.js v3.3.6
- * http://getbootstrap.com/javascript/#modals
+ * Bootstrap: modal.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#modals
  * ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
+ * Copyright 2011-2019 Twitter, Inc.
  * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
  * ======================================================================== */
 
@@ -924,15 +940,16 @@
   // ======================
 
   var Modal = function (element, options) {
-    this.options             = options
-    this.$body               = $(document.body)
-    this.$element            = $(element)
-    this.$dialog             = this.$element.find('.modal-dialog')
-    this.$backdrop           = null
-    this.isShown             = null
-    this.originalBodyPad     = null
-    this.scrollbarWidth      = 0
+    this.options = options
+    this.$body = $(document.body)
+    this.$element = $(element)
+    this.$dialog = this.$element.find('.modal-dialog')
+    this.$backdrop = null
+    this.isShown = null
+    this.originalBodyPad = null
+    this.scrollbarWidth = 0
     this.ignoreBackdropClick = false
+    this.fixedContent = '.navbar-fixed-top, .navbar-fixed-bottom'
 
     if (this.options.remote) {
       this.$element
@@ -943,7 +960,7 @@
     }
   }
 
-  Modal.VERSION  = '3.3.6'
+  Modal.VERSION = '3.4.1'
 
   Modal.TRANSITION_DURATION = 300
   Modal.BACKDROP_TRANSITION_DURATION = 150
@@ -960,7 +977,7 @@
 
   Modal.prototype.show = function (_relatedTarget) {
     var that = this
-    var e    = $.Event('show.bs.modal', { relatedTarget: _relatedTarget })
+    var e = $.Event('show.bs.modal', { relatedTarget: _relatedTarget })
 
     this.$element.trigger(e)
 
@@ -1050,7 +1067,9 @@
     $(document)
       .off('focusin.bs.modal') // guard against infinite focus loop
       .on('focusin.bs.modal', $.proxy(function (e) {
-        if (this.$element[0] !== e.target && !this.$element.has(e.target).length) {
+        if (document !== e.target &&
+          this.$element[0] !== e.target &&
+          !this.$element.has(e.target).length) {
           this.$element.trigger('focus')
         }
       }, this))
@@ -1152,7 +1171,7 @@
     var modalIsOverflowing = this.$element[0].scrollHeight > document.documentElement.clientHeight
 
     this.$element.css({
-      paddingLeft:  !this.bodyIsOverflowing && modalIsOverflowing ? this.scrollbarWidth : '',
+      paddingLeft: !this.bodyIsOverflowing && modalIsOverflowing ? this.scrollbarWidth : '',
       paddingRight: this.bodyIsOverflowing && !modalIsOverflowing ? this.scrollbarWidth : ''
     })
   }
@@ -1177,11 +1196,26 @@
   Modal.prototype.setScrollbar = function () {
     var bodyPad = parseInt((this.$body.css('padding-right') || 0), 10)
     this.originalBodyPad = document.body.style.paddingRight || ''
-    if (this.bodyIsOverflowing) this.$body.css('padding-right', bodyPad + this.scrollbarWidth)
+    var scrollbarWidth = this.scrollbarWidth
+    if (this.bodyIsOverflowing) {
+      this.$body.css('padding-right', bodyPad + scrollbarWidth)
+      $(this.fixedContent).each(function (index, element) {
+        var actualPadding = element.style.paddingRight
+        var calculatedPadding = $(element).css('padding-right')
+        $(element)
+          .data('padding-right', actualPadding)
+          .css('padding-right', parseFloat(calculatedPadding) + scrollbarWidth + 'px')
+      })
+    }
   }
 
   Modal.prototype.resetScrollbar = function () {
     this.$body.css('padding-right', this.originalBodyPad)
+    $(this.fixedContent).each(function (index, element) {
+      var padding = $(element).data('padding-right')
+      $(element).removeData('padding-right')
+      element.style.paddingRight = padding ? padding : ''
+    })
   }
 
   Modal.prototype.measureScrollbar = function () { // thx walsh
@@ -1199,8 +1233,8 @@
 
   function Plugin(option, _relatedTarget) {
     return this.each(function () {
-      var $this   = $(this)
-      var data    = $this.data('bs.modal')
+      var $this = $(this)
+      var data = $this.data('bs.modal')
       var options = $.extend({}, Modal.DEFAULTS, $this.data(), typeof option == 'object' && option)
 
       if (!data) $this.data('bs.modal', (data = new Modal(this, options)))
@@ -1211,7 +1245,7 @@
 
   var old = $.fn.modal
 
-  $.fn.modal             = Plugin
+  $.fn.modal = Plugin
   $.fn.modal.Constructor = Modal
 
 
@@ -1228,10 +1262,13 @@
   // ==============
 
   $(document).on('click.bs.modal.data-api', '[data-toggle="modal"]', function (e) {
-    var $this   = $(this)
-    var href    = $this.attr('href')
-    var $target = $($this.attr('data-target') || (href && href.replace(/.*(?=#[^\s]+$)/, ''))) // strip for ie7
-    var option  = $target.data('bs.modal') ? 'toggle' : $.extend({ remote: !/#/.test(href) && href }, $target.data(), $this.data())
+    var $this = $(this)
+    var href = $this.attr('href')
+    var target = $this.attr('data-target') ||
+      (href && href.replace(/.*(?=#[^\s]+$)/, '')) // strip for ie7
+
+    var $target = $(document).find(target)
+    var option = $target.data('bs.modal') ? 'toggle' : $.extend({ remote: !/#/.test(href) && href }, $target.data(), $this.data())
 
     if ($this.is('a')) e.preventDefault()
 
@@ -1247,18 +1284,148 @@
 }(jQuery);
 
 /* ========================================================================
- * Bootstrap: tooltip.js v3.3.6
- * http://getbootstrap.com/javascript/#tooltip
+ * Bootstrap: tooltip.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#tooltip
  * Inspired by the original jQuery.tipsy by Jason Frame
  * ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
+ * Copyright 2011-2019 Twitter, Inc.
  * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
  * ======================================================================== */
 
-
 +function ($) {
   'use strict';
 
+  var DISALLOWED_ATTRIBUTES = ['sanitize', 'whiteList', 'sanitizeFn']
+
+  var uriAttrs = [
+    'background',
+    'cite',
+    'href',
+    'itemtype',
+    'longdesc',
+    'poster',
+    'src',
+    'xlink:href'
+  ]
+
+  var ARIA_ATTRIBUTE_PATTERN = /^aria-[\w-]*$/i
+
+  var DefaultWhitelist = {
+    // Global attributes allowed on any supplied element below.
+    '*': ['class', 'dir', 'id', 'lang', 'role', ARIA_ATTRIBUTE_PATTERN],
+    a: ['target', 'href', 'title', 'rel'],
+    area: [],
+    b: [],
+    br: [],
+    col: [],
+    code: [],
+    div: [],
+    em: [],
+    hr: [],
+    h1: [],
+    h2: [],
+    h3: [],
+    h4: [],
+    h5: [],
+    h6: [],
+    i: [],
+    img: ['src', 'alt', 'title', 'width', 'height'],
+    li: [],
+    ol: [],
+    p: [],
+    pre: [],
+    s: [],
+    small: [],
+    span: [],
+    sub: [],
+    sup: [],
+    strong: [],
+    u: [],
+    ul: []
+  }
+
+  /**
+   * A pattern that recognizes a commonly useful subset of URLs that are safe.
+   *
+   * Shoutout to Angular 7 https://github.com/angular/angular/blob/7.2.4/packages/core/src/sanitization/url_sanitizer.ts
+   */
+  var SAFE_URL_PATTERN = /^(?:(?:https?|mailto|ftp|tel|file):|[^&:/?#]*(?:[/?#]|$))/gi
+
+  /**
+   * A pattern that matches safe data URLs. Only matches image, video and audio types.
+   *
+   * Shoutout to Angular 7 https://github.com/angular/angular/blob/7.2.4/packages/core/src/sanitization/url_sanitizer.ts
+   */
+  var DATA_URL_PATTERN = /^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[a-z0-9+/]+=*$/i
+
+  function allowedAttribute(attr, allowedAttributeList) {
+    var attrName = attr.nodeName.toLowerCase()
+
+    if ($.inArray(attrName, allowedAttributeList) !== -1) {
+      if ($.inArray(attrName, uriAttrs) !== -1) {
+        return Boolean(attr.nodeValue.match(SAFE_URL_PATTERN) || attr.nodeValue.match(DATA_URL_PATTERN))
+      }
+
+      return true
+    }
+
+    var regExp = $(allowedAttributeList).filter(function (index, value) {
+      return value instanceof RegExp
+    })
+
+    // Check if a regular expression validates the attribute.
+    for (var i = 0, l = regExp.length; i < l; i++) {
+      if (attrName.match(regExp[i])) {
+        return true
+      }
+    }
+
+    return false
+  }
+
+  function sanitizeHtml(unsafeHtml, whiteList, sanitizeFn) {
+    if (unsafeHtml.length === 0) {
+      return unsafeHtml
+    }
+
+    if (sanitizeFn && typeof sanitizeFn === 'function') {
+      return sanitizeFn(unsafeHtml)
+    }
+
+    // IE 8 and below don't support createHTMLDocument
+    if (!document.implementation || !document.implementation.createHTMLDocument) {
+      return unsafeHtml
+    }
+
+    var createdDocument = document.implementation.createHTMLDocument('sanitization')
+    createdDocument.body.innerHTML = unsafeHtml
+
+    var whitelistKeys = $.map(whiteList, function (el, i) { return i })
+    var elements = $(createdDocument.body).find('*')
+
+    for (var i = 0, len = elements.length; i < len; i++) {
+      var el = elements[i]
+      var elName = el.nodeName.toLowerCase()
+
+      if ($.inArray(elName, whitelistKeys) === -1) {
+        el.parentNode.removeChild(el)
+
+        continue
+      }
+
+      var attributeList = $.map(el.attributes, function (el) { return el })
+      var whitelistedAttributes = [].concat(whiteList['*'] || [], whiteList[elName] || [])
+
+      for (var j = 0, len2 = attributeList.length; j < len2; j++) {
+        if (!allowedAttribute(attributeList[j], whitelistedAttributes)) {
+          el.removeAttribute(attributeList[j].nodeName)
+        }
+      }
+    }
+
+    return createdDocument.body.innerHTML
+  }
+
   // TOOLTIP PUBLIC CLASS DEFINITION
   // ===============================
 
@@ -1274,7 +1441,7 @@
     this.init('tooltip', element, options)
   }
 
-  Tooltip.VERSION  = '3.3.6'
+  Tooltip.VERSION  = '3.4.1'
 
   Tooltip.TRANSITION_DURATION = 150
 
@@ -1291,7 +1458,10 @@
     viewport: {
       selector: 'body',
       padding: 0
-    }
+    },
+    sanitize : true,
+    sanitizeFn : null,
+    whiteList : DefaultWhitelist
   }
 
   Tooltip.prototype.init = function (type, element, options) {
@@ -1299,7 +1469,7 @@
     this.type      = type
     this.$element  = $(element)
     this.options   = this.getOptions(options)
-    this.$viewport = this.options.viewport && $($.isFunction(this.options.viewport) ? this.options.viewport.call(this, this.$element) : (this.options.viewport.selector || this.options.viewport))
+    this.$viewport = this.options.viewport && $(document).find($.isFunction(this.options.viewport) ? this.options.viewport.call(this, this.$element) : (this.options.viewport.selector || this.options.viewport))
     this.inState   = { click: false, hover: false, focus: false }
 
     if (this.$element[0] instanceof document.constructor && !this.options.selector) {
@@ -1332,7 +1502,15 @@
   }
 
   Tooltip.prototype.getOptions = function (options) {
-    options = $.extend({}, this.getDefaults(), this.$element.data(), options)
+    var dataAttributes = this.$element.data()
+
+    for (var dataAttr in dataAttributes) {
+      if (dataAttributes.hasOwnProperty(dataAttr) && $.inArray(dataAttr, DISALLOWED_ATTRIBUTES) !== -1) {
+        delete dataAttributes[dataAttr]
+      }
+    }
+
+    options = $.extend({}, this.getDefaults(), dataAttributes, options)
 
     if (options.delay && typeof options.delay == 'number') {
       options.delay = {
@@ -1341,6 +1519,10 @@
       }
     }
 
+    if (options.sanitize) {
+      options.template = sanitizeHtml(options.template, options.whiteList, options.sanitizeFn)
+    }
+
     return options
   }
 
@@ -1452,7 +1634,7 @@
         .addClass(placement)
         .data('bs.' + this.type, this)
 
-      this.options.container ? $tip.appendTo(this.options.container) : $tip.insertAfter(this.$element)
+      this.options.container ? $tip.appendTo($(document).find(this.options.container)) : $tip.insertAfter(this.$element)
       this.$element.trigger('inserted.bs.' + this.type)
 
       var pos          = this.getPosition()
@@ -1554,7 +1736,16 @@
     var $tip  = this.tip()
     var title = this.getTitle()
 
-    $tip.find('.tooltip-inner')[this.options.html ? 'html' : 'text'](title)
+    if (this.options.html) {
+      if (this.options.sanitize) {
+        title = sanitizeHtml(title, this.options.whiteList, this.options.sanitizeFn)
+      }
+
+      $tip.find('.tooltip-inner').html(title)
+    } else {
+      $tip.find('.tooltip-inner').text(title)
+    }
+
     $tip.removeClass('fade in top bottom left right')
   }
 
@@ -1565,9 +1756,11 @@
 
     function complete() {
       if (that.hoverState != 'in') $tip.detach()
-      that.$element
-        .removeAttr('aria-describedby')
-        .trigger('hidden.bs.' + that.type)
+      if (that.$element) { // TODO: Check whether guarding this code with this `if` is really necessary.
+        that.$element
+          .removeAttr('aria-describedby')
+          .trigger('hidden.bs.' + that.type)
+      }
       callback && callback()
     }
 
@@ -1610,7 +1803,10 @@
       // width and height are missing in IE8, so compute them manually; see https://github.com/twbs/bootstrap/issues/14093
       elRect = $.extend({}, elRect, { width: elRect.right - elRect.left, height: elRect.bottom - elRect.top })
     }
-    var elOffset  = isBody ? { top: 0, left: 0 } : $element.offset()
+    var isSvg = window.SVGElement && el instanceof window.SVGElement
+    // Avoid using $.offset() on SVGs since it gives incorrect results in jQuery 3.
+    // See https://github.com/twbs/bootstrap/issues/20280
+    var elOffset  = isBody ? { top: 0, left: 0 } : (isSvg ? null : $element.offset())
     var scroll    = { scroll: isBody ? document.documentElement.scrollTop || document.body.scrollTop : $element.scrollTop() }
     var outerDims = isBody ? { width: $(window).width(), height: $(window).height() } : null
 
@@ -1726,9 +1922,13 @@
       that.$tip = null
       that.$arrow = null
       that.$viewport = null
+      that.$element = null
     })
   }
 
+  Tooltip.prototype.sanitizeHtml = function (unsafeHtml) {
+    return sanitizeHtml(unsafeHtml, this.options.whiteList, this.options.sanitizeFn)
+  }
 
   // TOOLTIP PLUGIN DEFINITION
   // =========================
@@ -1762,10 +1962,10 @@
 }(jQuery);
 
 /* ========================================================================
- * Bootstrap: popover.js v3.3.6
- * http://getbootstrap.com/javascript/#popovers
+ * Bootstrap: popover.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#popovers
  * ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
+ * Copyright 2011-2019 Twitter, Inc.
  * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
  * ======================================================================== */
 
@@ -1782,7 +1982,7 @@
 
   if (!$.fn.tooltip) throw new Error('Popover requires tooltip.js')
 
-  Popover.VERSION  = '3.3.6'
+  Popover.VERSION  = '3.4.1'
 
   Popover.DEFAULTS = $.extend({}, $.fn.tooltip.Constructor.DEFAULTS, {
     placement: 'right',
@@ -1808,10 +2008,25 @@
     var title   = this.getTitle()
     var content = this.getContent()
 
-    $tip.find('.popover-title')[this.options.html ? 'html' : 'text'](title)
-    $tip.find('.popover-content').children().detach().end()[ // we use append for html objects to maintain js events
-      this.options.html ? (typeof content == 'string' ? 'html' : 'append') : 'text'
-    ](content)
+    if (this.options.html) {
+      var typeContent = typeof content
+
+      if (this.options.sanitize) {
+        title = this.sanitizeHtml(title)
+
+        if (typeContent === 'string') {
+          content = this.sanitizeHtml(content)
+        }
+      }
+
+      $tip.find('.popover-title').html(title)
+      $tip.find('.popover-content').children().detach().end()[
+        typeContent === 'string' ? 'html' : 'append'
+      ](content)
+    } else {
+      $tip.find('.popover-title').text(title)
+      $tip.find('.popover-content').children().detach().end().text(content)
+    }
 
     $tip.removeClass('fade top bottom left right in')
 
@@ -1830,8 +2045,8 @@
 
     return $e.attr('data-content')
       || (typeof o.content == 'function' ?
-            o.content.call($e[0]) :
-            o.content)
+        o.content.call($e[0]) :
+        o.content)
   }
 
   Popover.prototype.arrow = function () {
@@ -1871,10 +2086,10 @@
 }(jQuery);
 
 /* ========================================================================
- * Bootstrap: scrollspy.js v3.3.6
- * http://getbootstrap.com/javascript/#scrollspy
+ * Bootstrap: scrollspy.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#scrollspy
  * ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
+ * Copyright 2011-2019 Twitter, Inc.
  * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
  * ======================================================================== */
 
@@ -1900,7 +2115,7 @@
     this.process()
   }
 
-  ScrollSpy.VERSION  = '3.3.6'
+  ScrollSpy.VERSION  = '3.4.1'
 
   ScrollSpy.DEFAULTS = {
     offset: 10
@@ -2044,10 +2259,10 @@
 }(jQuery);
 
 /* ========================================================================
- * Bootstrap: tab.js v3.3.6
- * http://getbootstrap.com/javascript/#tabs
+ * Bootstrap: tab.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#tabs
  * ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
+ * Copyright 2011-2019 Twitter, Inc.
  * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
  * ======================================================================== */
 
@@ -2064,7 +2279,7 @@
     // jscs:enable requireDollarBeforejQueryAssignment
   }
 
-  Tab.VERSION = '3.3.6'
+  Tab.VERSION = '3.4.1'
 
   Tab.TRANSITION_DURATION = 150
 
@@ -2093,7 +2308,7 @@
 
     if (showEvent.isDefaultPrevented() || hideEvent.isDefaultPrevented()) return
 
-    var $target = $(selector)
+    var $target = $(document).find(selector)
 
     this.activate($this.closest('li'), $ul)
     this.activate($target, $target.parent(), function () {
@@ -2118,15 +2333,15 @@
       $active
         .removeClass('active')
         .find('> .dropdown-menu > .active')
-          .removeClass('active')
+        .removeClass('active')
         .end()
         .find('[data-toggle="tab"]')
-          .attr('aria-expanded', false)
+        .attr('aria-expanded', false)
 
       element
         .addClass('active')
         .find('[data-toggle="tab"]')
-          .attr('aria-expanded', true)
+        .attr('aria-expanded', true)
 
       if (transition) {
         element[0].offsetWidth // reflow for transition
@@ -2138,10 +2353,10 @@
       if (element.parent('.dropdown-menu').length) {
         element
           .closest('li.dropdown')
-            .addClass('active')
+          .addClass('active')
           .end()
           .find('[data-toggle="tab"]')
-            .attr('aria-expanded', true)
+          .attr('aria-expanded', true)
       }
 
       callback && callback()
@@ -2200,10 +2415,10 @@
 }(jQuery);
 
 /* ========================================================================
- * Bootstrap: affix.js v3.3.6
- * http://getbootstrap.com/javascript/#affix
+ * Bootstrap: affix.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#affix
  * ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
+ * Copyright 2011-2019 Twitter, Inc.
  * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
  * ======================================================================== */
 
@@ -2217,7 +2432,9 @@
   var Affix = function (element, options) {
     this.options = $.extend({}, Affix.DEFAULTS, options)
 
-    this.$target = $(this.options.target)
+    var target = this.options.target === Affix.DEFAULTS.target ? $(this.options.target) : $(document).find(this.options.target)
+
+    this.$target = target
       .on('scroll.bs.affix.data-api', $.proxy(this.checkPosition, this))
       .on('click.bs.affix.data-api',  $.proxy(this.checkPositionWithEventLoop, this))
 
@@ -2229,7 +2446,7 @@
     this.checkPosition()
   }
 
-  Affix.VERSION  = '3.3.6'
+  Affix.VERSION  = '3.4.1'
 
   Affix.RESET    = 'affix affix-top affix-bottom'