subtree updates Jan-13-2023
meta-openembedded: d04444509a..cd13881611:
Alex Kiernan (10):
mdns: Upgrade 1310.140.1 -> 1790.40.31
mdns: Set MDNS_VERSIONSTR_NODTS
mdns: Upgrade 1790.40.31 -> 1790.60.25
ostree: Upgrade 2022.5 -> 2022.7
ostree: Use systemd_system_unitdir for systemd units
ostree: Switch to fuse3 which is supported in ostree now
ostree: Fix comments for configuration/ptest
ostree: Handle musl's ERANGE mapping
usbguard: Remove pegtl from DEPENDS
usbguard: Upgrade 1.1.1 -> 1.1.2
Alex Stewart (2):
gvfs: stylize DEPENDS
gvfs: obviate the ssh-client requirement for gvfs
Alexander Kanavin (5):
frr: add a patch to correctly check presence of python from pkg-config
lirc: correctly use PYTHONPATH
libportal: move to oe-core
packagegroup-meta-python: drop python3-strict-rfc3339
nftables: fix builds with latest setuptools
Alexander Stein (1):
dool: Add patch to fix rebuild
Archana Polampalli (1):
Nodejs - Upgrade to 16.18.1
Bartosz Golaszewski (3):
python3-kmod: new package
python3-watchdogdev: new package
packagegroup-meta-python: add missing packages
Bruce Ashfield (1):
zfs: update to 2.1.7
Changqing Li (5):
linuxptp: fix do_compile error
keyutils: fix ptest failed since "+++ Can't Determine Endianness"
graphviz: Do not build tcl support for native
redis: 6.2.7 -> 6.2.8
redis: 7.0.5 -> 7.0.7
Chen Pei (2):
suitesparse:fix git branch in SRC_URI
botan: upgrade 2.19.2 -> 2.19.3
Chen Qi (4):
xfce4-verve-plugin: fix do_configure faiure about missing libpcre
networkmanager: fix dhcpcd PACKAGECONFIG
networkmanager: install config files into correct place
networkmanager: fix /etc/resolv.conf handling
Christian Eggers (1):
boost-url: remove recipe
Clément Péron (3):
navigation: bump proj to 9.1.0 library
proj: add a packageconfig to build as a static library
proj: avoid leaking host path in libproj
Devendra Tewari (1):
android-tools: Use echo instead of bbnote
Dmitry Baryshkov (1):
nss: fix cross-compilation error
Erwann Roussy (3):
python3-schedutils: add recipe
python3-linux-procfs: add recipe
tuna: add recipe
Fabio Estevam (2):
remmina: Update to 1.4.28
crucible: Upgrade to 2022.12.06
Geoff Parker (1):
python3-yappi: upgrade 1.3.6 -> 1.4.0, python 3.11 compatible
Gerbrand De Laender (1):
python3-aioserial: new package
Gianfranco Costamagna (2):
vbxguestdrivers: upgrade 7.0.2 -> 7.0.4
boinc-client: Update boinc from 7.18.1 to 7.20.4
Gianluigi Spagnuolo (1):
libbpf: add native and nativesdk BBCLASSEXTEND
Hains van den Bosch (2):
python3-twisted: Add python3-asyncio to RDEPENDS
python3-twisted: Add python3-typing-extensions to RDEPENDS
He Zhe (1):
protobuf: upgrade 3.21.5 -> 3.21.10
Jose Quaresma (1):
lshw: bump to 42fef565
Kai Kang (31):
freeradius: fix multilib systemd service start failure
wxwidgets: 3.1.5 -> 3.2.1
python3-attrdict3: add recipe with version 2.0.2
python3-wxgtk4: 4.1.1 -> 4.2.0
xfce4-settings: 4.16.3 -> 4.16.5
python3-m2crypto: fix CVE-2020-25657 and buildpaths qa issue
fixup! wxwidgets: 3.1.5 -> 3.2.1
postfix: fix multilib conflict of sample-main.cf
python3-wxgtk4: replace deprecated inspect.getargspec
libxfce4ui: 4.16.1 -> 4.18.0
thunar-volman: 4.16.0 -> 4.18.0
xfce4-cpufreq-plugin: 1.2.7 -> 1.2.8
xfce4-wavelan-plugin: 0.6.2 -> 0.6.3
xfce4-cpugraph-plugin: 1.2.6 -> 1.2.7
xfce4-sensors-plugin: 1.4.3 -> 1.4.4
thunar-shares-plugin: Bump GLib minimum required to 2.26
xfce4-dev-tools: 4.16.0 -> 4.18.0
libxfce4util: 4.16.0 -> 4.18.0
exo: 4.16.4 -> 4.18.0
garcon: 4.16.1 -> 4.18.0
xfce4-panel: 4.16.3 -> 4.18.0
thunar: 4.16.9 -> 4.18.0
tumbler: 4.16.0 -> 4.18.0
xfconf: 4.16.0 -> 4.18.0
xfce4-appfinder: 4.16.1 -> 4.18.0
xfce4-settings: 4.16.5 -> 4.18.0
xfce4-power-manager: 4.16.0 -> 4.18.0
xfce4-session: 4.16.0 -> 4.18.0
xfwm4: 4.16.1 -> 4.18.0
xfdesktop: 4.16.0 -> 4.18.0
xorg-lib: set XORG_EXT for recipes
Khem Raj (91):
gnome-text-editor: Add missing libpcre build time depenedency
ettercap: Add missing dependency on libpcre
xcb-util-cursor: Update to 0.1.4
lldpd: Use github release assets for SRC_URI
aufs-util: Fix build with large file support enabled systems
volume-key: Inherit python3targetconfig
proj: Enable apps when building native variant
python3-pyproj: Export PROJ_DIR
satyr: Inherit python3targetconfig
rest: Re-add 0.8.1
gfbgraph: Use rest 0.8.1
audit: Inherit python3targetconfig
opensaf: Check for _FILE_OFFSET_BITS instead of __TIMESIZE
flite: Add missing deps on alsa-lib and chrpath
python3-pystemd: Regenerate .c sources using newer cython
libreport: Inherit python3targetconfig
uw-imap: Disable parallelism
gnome-calendar: Upgrade to 43.1
gnome-photos: Upgrade to 43.0
libgweather: Remove 40.0
waf-samba.bbclass: point PYTHON_CONFIG to target python3-config
amtk: Add missing dep on python3-pygments-native
fontforge: Inherit python3targetconfig
tepl: Add missing dep on python3-pygments-native
alsa-oss: Remove recipe
opencv: Check for commercial_ffmpeg as well to enable ffmpeg
opencv: Fix build with ffmpeg 5.1+
fwts: Upgrade to 22.11.00
minio: Disable on mips
sip: Add recipe for 6.7.5
imapfilter: Upgrade to 2.7.6
perfetto: Do not pass TUNE_CCARGS to native/host compiler
stressapptest: Upgrade to latest tip
mariadb: Upgrade to 10.11.1
surf: Depend on gcr3
fatcat: Enable 64bit off_t
stressapptest: Fix build with largefile support and musl
nspr: Upgrade to 4.35
cryptsetup: Upgrade to 2.6.0
libyui,libyui-ncurses: Upgrade to 4.2.3
inotify-tools: Fix build on musl and lfs64
sdbus-c++-libsystemd: Upgrade to 250.9 systemd release
xfsprogs: Upgrade to 6.0.0
drbd,drbd-utils: Upgrade to 9.2.1 and drbd-utils to 9.22.0
libtraceevent: Add recipe
libtracefs: Add recipe
trace-cmd: Remove use of off64_t and lseek64
xfsdump: Add -D_LARGEFILE64_SOURCE on musl
xfstests: Add -D_LARGEFILE64_SOURCE on musl
mariadb: Alias lseek64/open64/ftruncate64 on musl systems
gperftools: Define off64_t on musl
android-tools: Define lseek64 = lseek on musl
php: Add -D_LARGEFILE64_SOURCE to cflags
spice-gtk: Use libucontext for coroutines on musl
wxwidgets: Fix build with musl
wxwidgets: Fix locale on musl
wxwidgets: Set HAVE_LARGEFILE_SUPPORT
python3-wxgtk4: Do not use GetAssertStackTrace with USE_STACKWALKER disabled
f2fs-tools: Upgrade to 1.15.0
trace-cmd: Pass ldflags to compiler
parole: Define DATADIRNAME
abseil-cpp: Replace off64_t with off_t
vsftpd_3.0.5.bb: Define _LARGEFILE64_SOURCE on musl
mozjs-102: Disable mozilla stackwalk on musl
fatresize: Fix build when 64bit time_t is enabled
boinc-client: Fix build when using 64bit time_t
python3-grpcio: Define -D_LARGEFILE64_SOURCE only for musl
gnome-online-accounts: Fix build race seen on musl systems
imagemagick: Do not set ac_cv_sys_file_offset_bits
spdlog: Do not use LFS64 functions with musl
mongodb: Do not use off64_t on musl
dracut: Do not undefine _FILE_OFFSET_BITS
libcamera: Diable 64bit time_t on glibc targets
v4l-utils: Diable 64bit time_t on glibc targets
opensaf: Fix the check for __fsblkcnt64_t size
libcereal,poco: Link with -latomic on ppc32 as well
sshpass: Use SPDX identified string for GPLv2
nftables: Upgrade to 1.0.6
mycroft: Check for pulseaudio in distro features
trace-cmd: Build libs before building rest
open-vm-tools: Fix build with 64-bit time_t
libtraceevent: Move plugins into package of its own
trace-cmd: Upgrade to 3.1.5
luajit: Update to latest on v2.1 branch
concurrencykit: Update to 0.7.0
concurrencykit: Set correct PLAT value for riscv32
concurrencykit: Fix build on riscv32 and riscv64
sysbench: Enable only on architectures supporting LuaJIT
packagegroup-meta-oe: Ensure sysbench is included in limited arches
hwloc: Update to 2.9.0
fluentbit: Link with libatomic on ppc32
Lei Maohui (1):
polkit: Fix multilib builds
Leon Anavi (9):
python3-watchdog: Upgrade 2.2.0 -> 2.2.1
python3-zeroconf: Upgrade 0.39.4 -> 0.47.1
python3-croniter: Upgrade 1.3.7 -> 1.3.8
python3-coverage: Upgrade 7.0.1 -> 7.0.3
python3-prompt-toolkit: Upgrade 3.0.31 -> 3.0.36
python3-simplejson: Upgrade 3.18.0 -> 3.18.1
python3-termcolor: Upgrade 2.1.1 -> 2.2.0
python3-cantools: Upgrade 37.2.0 -> 38.0.0
python3-marshmallow: Upgrade 3.18.0 -> 3.19.0
Livin Sunny (1):
libwebsockets: add ipv6 in PACKAGECONFIG
Markus Volk (88):
blueman: add RDEPEND on python3-fcntl
hwdata: add patch to use sysroot prefix for pkgdatadir
pipewire: upgrade 0.3.59 -> 0.3.60
spirv-cross: upgrade; fix build
blueman: upgrade 2.34 -> 2.35
pipewire: upgrade 0.3.60 -> 0.3.61
iwd: upgrade 1.30 -> 2.0
libgdata: use gcr3
libgweather: update 4.0.0 -> 4.2.0
gnome-online-accounts: use gcr3
geary: build with gcr3
gnome-keyring: use gcr3
evolution-data-server: update 3.44.2 -> 3.46.1
gnome-settings-daemon: update 42.1 -> 43.0
libnma: update 1.8.38 -> 1.10.4
geocode-glib: build with libsoup-3.0
gjs: update 1.72.2 -> 1.75.1
gnome-shell: update 42.0 -> 43.1
mutter: update 42.0 -> 43.1
polkit: add recipe for v122
mozjs: update 98 -> 102
appstream-glib: update 0.7.18 -> 0.8.2
gthumb: build with libsoup-3
amtk: update 5.3.1 -> 5.6.1
gedit: update 42.2 -> 43.2
evolution-data-server: remove libgdata dependency
tepl: update 6.0.0 -> 6.2.0
perfetto: pass TUNE_CCARGS to use machine tune
gnome-photos: update dependencies
thunar-archive-plugin: update 0.4.0 -> 0.5.0
libadwaita: remove deprecated sassc-native dependency
gnome-shell: remove deprecated sassc-native dependency
spice-gtk: add missing license information
pipewire: update 0.3.61 -> 0.3.62
gdm: update 42.0 -> 43.0
gnome-session: update 42.0 -> 43-0
geoclue: update to latest commit to allow to build with libsoup-3.0
gvfs: fix polkit homedir
editorconfig: add recipe
tracker: update 3.4.1 -> 3.4.2
gvfs: fix dependencies
gnome-calculator: update 42.2 -> 43.0.1
tracker-miners: update 3.4.1 -> 3.4.2
gnome-photos: add missing runtime dependency on tracker-miners
gtksourceview5: update 5.4.2 -> 5.6.1
remmina: build with libsoup-3.0
ostree: replace libsoup-2.4 by curl
gnome-text-editor: update 42.2 -> 43.1
gtk4: remove recipe
libxmlb: allow to build native
pipewire: update 0.3.62 -> 0.3.63
gnome-shell-extensions: update SRC_URI and remove sassc-native dep
grilo: update 0.3.14 -> 0.3.15
libstemmer: move recipe to meta-oe
xdg-desktop-portal: add recipe
bubblewrap: import recipe from meta-security
gnome-software: add recipe
basu: import recipe from meta-wayland
xdg-desktop-portal-wlr: add recipe
appstream: add recipe
flatpak: add recipe
flatpak-xdg-utils: add recipe
flatpak: add runtime dependency on flatpak-xdg-utils
wireplumber: update 0.4.12 -> 0.4.13
wireplumber: build with dbus support by default
xdg-desktop-portal-gnome: add recipe
libcloudproviders: add recipe
evince: update 42.3 -> 43.1
libportal: build libportal-gtk4 and vala support
nautilus: update 42.2 -> 43.1
gnome-desktop: update 42.0 -> 43
file-roller: update 3.42.0 -> 43.0
wireplumber: dont start systemd system service by default
gnome-bluetooth: update 42.4 -> 42.5
gnome-flashback: update 3.44.0 -> 3.46.0
libwnck3: update 40.1 -> 43.0
gnome-panel: update 3.44.0 -> 3.47.1
gnome-terminal: update 3.42.2 -> 3.46.7
dconf-editor: update 3.38.3 -> 43.0
gnome-shell: add missing RDEPENDS
gnome-control-center: update 42.0 -> 43.2
gnome-shell: add runtime dependency on adwaita-icon-theme
xdg-desktop-portal-gtk: add recipe
thunar: add tumbler to RRECOMMENDS
gnome:terminal add missing inherit meson
gnome-disk-utility: update 42.0 -> 43.0
eog: add recipe
libdecor: import recipe
Martin Jansa (3):
nss: fix SRC_URI
geoclue: fix polkit files only with modem-gps PACKAGECONFIG
layer.conf: update LAYERSERIES_COMPAT for mickledore
Mathieu Dubois-Briand (2):
nss: Add missing CVE product
nss: Whitelist CVEs related to libnssdbm
Matthias Klein (1):
paho-mqtt-c: upgrade 1.3.11 -> 1.3.12
Max Krummenacher (1):
opencv: follow changed name license_flags_accepted
Mingli Yu (25):
gnome-calculator: add opengl to REQUIRED_DISTRO_FEATURES
waylandpp: add opengl to REQUIRED_DISTRO_FEATURES
libnma: add opengl to REQUIRED_DISTRO_FEATURES
network-manager-applet: add opengl to REQUIRED_DISTRO_FEATURES
gssdp: check opengl is enabled or not
gtksourceview5: add opengl to REQUIRED_DISTRO_FEATURES
gnome-font-viewer: add opengl to REQUIRED_DISTRO_FEATURES
libxfce4ui: check opengl DISTRO_FEATURES
gnome-desktop: add opengl to REQUIRED_DISTRO_FEATURES
ibus: add opengl related check
nautilus: add opengl to REQUIRED_DISTRO_FEATURES
gnome-bluetooth: add opengl to REQUIRED_DISTRO_FEATURES
evince: add opengl to REQUIRED_DISTRO_FEATURES
gnome-calendar: add opengl to REQUIRED_DISTRO_FEATURES
xf86-video-amdgpu: add opengl to REQUIRED_DISTRO_FEATURES
spice-gtk: add opengl to REQUIRED_DISTRO_FEATURES
grail: add opengl to REQUIRED_DISTRO_FEATURES
frame: add opengl to REQUIRED_DISTRO_FEATURES
geis: add opengl to REQUIRED_DISTRO_FEATURES
evolution-data-server: add opengl to REQUIRED_DISTRO_FEATURES
libgweather4: add opengl to REQUIRED_DISTRO_FEATURES
geary: add opengl to REQUIRED_DISTRO_FEATURES
file-roller: add opengl to REQUIRED_DISTRO_FEATURES
gnome-photos: add opengl to REQUIRED_DISTRO_FEATURES
xdg-desktop-portal-wlr: add opengl to REQUIRED_DISTRO_FEATURES
Naveen Saini (3):
opencl-headers: add native and nativesdk
tcsh: add native nativesdk BBCLASSEXTEND
tbb: upgrade 2021.5.0 -> 2021.7.0
Omkar Patil (1):
ntfs-3g-ntfsprogs: Upgrade 2022.5.17 to 2022.10.3
Ovidiu Panait (1):
multipath-tools: upgrade 0.8.4 -> 0.9.3
Peter Bergin (1):
sysbench: Upgrade 0.4.12 -> 1.0.20
Peter Kjellerstedt (4):
chrony: Make it possible to enable editline support again
chrony: Remove the libcap and nss PACKAGECONFIGs
Revert "lldpd: Use github release assets for SRC_URI"
lldpd: Correct the checksum for the tar ball to match 1.0.16
Preeti Sachan (1):
fluidsynth: update SRC_URI to remove non-existing 2.2.x branch
Roger Knecht (1):
python3-rapidjson: add recipe
Sakib Sajal (1):
minio: fix license information
Samuli Piippo (1):
protobuf: stage protoc binary to sysroot
Tim Orling (4):
libio-pty-perl: upgrade 1.16 -> 1.17; enable ptest
libmozilla-ca-perl: add recipe for 20221114
libio-socket-ssl-perl: upgrade 2.075 -> 2.076
libtest-warnings-perl: move to oe-core
Tomasz Żyjewski (2):
python3-binwalk: add recipe for version 2.3.3
python3-uefi-firmware: add recipe for version 1.9
Wang Mingyu (190):
byacc: upgrade 20220128 -> 20221106
libforms: upgrade 1.2.4 -> 1.2.5pre1
libnftnl: upgrade 1.2.3 -> 1.2.4
mpich: upgrade 4.0.2 -> 4.0.3
python3-u-msgpack-python: upgrade 2.7.1 -> 2.7.2
python3-aiosignal: upgrade 1.2.0 -> 1.3.1
python3-eth-hash: upgrade 0.5.0 -> 0.5.1
python3-frozenlist: upgrade 1.3.1 -> 1.3.3
python3-google-auth: upgrade 2.14.0 -> 2.14.1
python3-greenlet: upgrade 2.0.0 -> 2.0.1
python3-imageio: upgrade 2.22.3 -> 2.22.4
python3-pycocotools: upgrade 2.0.5 -> 2.0.6
babl: upgrade 0.1.96 -> 0.1.98
ctags: upgrade 5.9.20221106.0 -> 5.9.20221113.0
gegl: upgrade 0.4.38 -> 0.4.40
freerdp: upgrade 2.8.1 -> 2.9.0
glibmm-2.68: upgrade 2.72.1 -> 2.74.0
googlebenchmark: upgrade 1.7.0 -> 1.7.1
gnome-backgrounds: upgrade 42.0 -> 43
nano: upgrade 6.4 -> 7.0
networkmanager-openvpn: upgrade 1.10.0 -> 1.10.2
python3-django: upgrade 4.1 -> 4.1.3
python3-flask-migrate: upgrade 3.1.0 -> 4.0.0
python3-eth-utils: upgrade 2.0.0 -> 2.1.0
python3-eventlet: upgrade 0.33.1 -> 0.33.2
python3-googleapis-common-protos: upgrade 1.56.4 -> 1.57.0
python3-google-api-python-client: upgrade 2.65.0 -> 2.66.0
python3-pymongo: upgrade 4.3.2 -> 4.3.3
lldpd: upgrade 1.0.15 -> 1.0.16
audit: upgrade 3.0.8 -> 3.0.9
ccid: upgrade 1.5.0 -> 1.5.1
colord: upgrade 1.4.5 -> 1.4.6
ctags: upgrade 5.9.20221113.0 -> 5.9.20221120.0
flatbuffers: upgrade 22.10.26 -> 22.11.23
libglvnd: upgrade 1.5.0 -> 1.6.0
gensio: upgrade 2.5.2 -> 2.6.1
mg: upgrade 20220614 -> 20221112
nbdkit: upgrade 1.33.2 -> 1.33.3
xfstests: upgrade 2022.10.30 -> 2022.11.06
pcsc-lite: upgrade 1.9.8 -> 1.9.9
python3-matplotlib-inline: upgrade 0.1.2 -> 0.1.6
python3-astroid: upgrade 2.12.12 -> 2.12.13
python3-asyncinotify: upgrade 2.0.5 -> 2.0.8
python3-charset-normalizer: upgrade 3.0.0 -> 3.0.1
python3-dateparser: upgrade 1.1.0 -> 1.1.4
python3-can: upgrade 4.0.0 -> 4.1.0
python3-flask-socketio: upgrade 5.3.1 -> 5.3.2
python3-ipython: upgrade 8.2.0 -> 8.6.0
python3-langtable: upgrade 0.0.60 -> 0.0.61
python3-jedi: upgrade 0.18.1 -> 0.18.2
python3-grpcio-tools: upgrade 1.50.0 -> 1.51.0
python3-grpcio: upgrade 1.50.0 -> 1.51.0
python3-networkx: upgrade 2.8.7 -> 2.8.8
python3-pyatspi: upgrade 2.38.2 -> 2.46.0
python3-pandas: upgrade 1.5.1 -> 1.5.2
python3-pybind11-json: upgrade 0.2.11 -> 0.2.13
python3-pychromecast: upgrade 12.1.4 -> 13.0.1
python3-pycodestyle: upgrade 2.9.1 -> 2.10.0
xterm: upgrade 373 -> 377
smarty: upgrade 4.2.1 -> 4.3.0
spdlog: upgrade 1.10.0 -> 1.11.0
python3-pyperf: upgrade 2.4.1 -> 2.5.0
python3-pyflakes: upgrade 2.5.0 -> 3.0.1
python3-pymisp: upgrade 2.4.157 -> 2.4.165.1
capnproto: upgrade 0.10.2 -> 0.10.3
libass: upgrade 0.16.0 -> 0.17.0
ctags: upgrade 5.9.20221120.0 -> 5.9.20221127.0
libio-socket-ssl-perl: upgrade 2.076 -> 2.077
python3-grpcio-tools: upgrade 1.51.0 -> 1.51.1
python3-asyncinotify: upgrade 2.0.8 -> 3.0.1
python3-grpcio: upgrade 1.51.0 -> 1.51.1
opensc: upgrade 0.22.0 -> 0.23.0
python3-ipython: upgrade 8.6.0 -> 8.7.0
ply: upgrade 2.2.0 -> 2.3.0
python3-apt: upgrade 2.3.0 -> 2.5.0
poppler: upgrade 22.11.0 -> 22.12.0
python3-asttokens: upgrade 2.1.0 -> 2.2.0
python3-cbor2: upgrade 5.4.3 -> 5.4.5
python3-geomet: upgrade 0.3.0 -> 1.0.0
python3-google-api-core: upgrade 2.10.2 -> 2.11.0
python3-google-api-python-client: upgrade 2.66.0 -> 2.68.0
python3-path: upgrade 16.5.0 -> 16.6.0
python3-google-auth: upgrade 2.14.1 -> 2.15.0
zabbix: upgrade 6.2.4 -> 6.2.5
xmlsec1: upgrade 1.2.36 -> 1.2.37
smcroute: upgrade 2.5.5 -> 2.5.6
python3-protobuf: upgrade 4.21.9 -> 4.21.10
python3-traitlets: upgrade 5.5.0 -> 5.6.0
python3-twine: upgrade 4.0.1 -> 4.0.2
python3-web3: upgrade 5.31.1 -> 5.31.2
python3-ujson: upgrade 5.5.0 -> 5.6.0
ctags: upgrade 5.9.20221127.0 -> 5.9.20221204.0
dnsmasq: upgrade 2.87 -> 2.88
flatbuffers: upgrade 22.11.23 -> 22.12.06
nbdkit: upgrade 1.33.3 -> 1.33.4
hwdata: upgrade 0.364 -> 0.365
evolution-data-server: update 3.46.1 -> 3.46.2
xfstests: upgrade 2022.11.06 -> 2022.11.27
python3-protobuf: upgrade 4.21.10 -> 4.21.11
python3-traitlets: upgrade 5.6.0 -> 5.7.0
python3-redis: upgrade 4.3.5 -> 4.4.0
python3-web3: upgrade 5.31.2 -> 5.31.3
python3-asttokens: upgrade 2.2.0 -> 2.2.1
python3-cbor2: upgrade 5.4.5 -> 5.4.6
python3-google-api-python-client: upgrade 2.68.0 -> 2.69.0
python3-gmpy2: upgrade 2.1.2 -> 2.1.3
python3-multidict: upgrade 6.0.2 -> 6.0.3
python3-watchdog: upgrade 2.1.9 -> 2.2.0
python3-pychromecast: upgrade 13.0.1 -> 13.0.2
python3-pymisp: upgrade 2.4.165.1 -> 2.4.166
python3-pytest-xdist: upgrade 3.0.2 -> 3.1.0
python3-yarl: upgrade 1.8.1 -> 1.8.2
zabbix: upgrade 6.2.5 -> 6.2.6
python3-yamlloader: upgrade 1.1.0 -> 1.2.2
tio: upgrade 2.3 -> 2.4
ctags: upgrade 5.9.20221204.0 -> 6.0.20221218.0
dash: upgrade 0.5.11.5 -> 0.5.12
nanopb: upgrade 0.4.6.4 -> 0.4.7
libio-socket-ssl-perl: upgrade 2.077 -> 2.078
libfile-slurper-perl: upgrade 0.013 -> 0.014
protobuf: upgrade 3.21.10 -> 3.21.12
python3-alembic: upgrade 1.8.1 -> 1.9.0
nano: upgrade 7.0 -> 7.1
python3-gmpy2: upgrade 2.1.3 -> 2.1.5
python3-eth-account: upgrade 0.7.0 -> 0.8.0
python3-google-api-python-client: upgrade 2.69.0 -> 2.70.0
python3-protobuf: upgrade 4.21.11 -> 4.21.12
python3-pycares: upgrade 4.2.2 -> 4.3.0
python3-pycurl: upgrade 7.45.1 -> 7.45.2
python3-pychromecast: upgrade 13.0.2 -> 13.0.4
python3-pyproj: upgrade 3.4.0 -> 3.4.1
python3-pydicti: upgrade 1.1.6 -> 1.2.0
python3-sentry-sdk: upgrade 1.11.1 -> 1.12.0
python3-traitlets: upgrade 5.7.0 -> 5.7.1
tio: upgrade 2.4 -> 2.5
python3-sqlalchemy: upgrade 1.4.44 -> 1.4.45
xfsdump: upgrade 3.1.11 -> 3.1.12
python3-isort: upgrade 5.10.1 -> 5.11.3
xfstests: upgrade 2022.11.27 -> 2022.12.11
ctags: upgrade 6.0.20221218.0 -> 6.0.20221225.0
gst-editing-services: upgrade 1.20.4 -> 1.20.5
logcheck: upgrade 1.3.24 -> 1.4.0
memtester: upgrade 4.5.1 -> 4.6.0
libmime-types-perl: upgrade 2.22 -> 2.23
metacity: upgrade 3.46.0 -> 3.46.1
python3-alembic: upgrade 1.9.0 -> 1.9.1
xfstests: upgrade 2022.12.11 -> 2022.12.18
python3-cytoolz: upgrade 0.12.0 -> 0.12.1
python3-asgiref: upgrade 3.5.2 -> 3.6.0
python3-autobahn: upgrade 22.7.1 -> 22.12.1
python3-coverage: upgrade 6.5.0 -> 7.0.1
python3-bitarray: upgrade 2.6.0 -> 2.6.1
python3-imageio: upgrade 2.22.4 -> 2.23.0
python3-isort: upgrade 5.11.3 -> 5.11.4
python3-multidict: upgrade 6.0.3 -> 6.0.4
python3-traitlets: upgrade 5.7.1 -> 5.8.0
python3-pymisp: upgrade 2.4.166 -> 2.4.167
python3-sentry-sdk: upgrade 1.12.0 -> 1.12.1
python3-supervisor: upgrade 4.2.4 -> 4.2.5
wolfssl: upgrade 5.5.3 -> 5.5.4
remmina: upgrade 1.4.28 -> 1.4.29
ser2net: upgrade 4.3.10 -> 4.3.11
tesseract: upgrade 5.2.0 -> 5.3.0
network-manager-applet: upgrade 1.26.0 -> 1.30.0
byacc: upgrade 20221106 -> 20221229
ctags: upgrade 6.0.20221225.0 -> 6.0.20230101.0
flashrom: upgrade 1.2 -> 1.2.1
fontforge: upgrade 20220308 -> 20230101
hunspell: upgrade 1.7.1 -> 1.7.2
libmime-types-perl: upgrade 2.23 -> 2.24
libnet-dns-perl: upgrade 1.35 -> 1.36
tepl: upgrade 6.2.0 -> 6.4.0
tcpdump: upgrade 4.99.1 -> 4.99.2
traceroute: upgrade 2.1.0 -> 2.1.1
openwsman: upgrade 2.7.1 -> 2.7.2
pcsc-tools: upgrade 1.6.0 -> 1.6.1
poppler: upgrade 22.12.0 -> 23.01.0
rsnapshot: upgrade 1.4.4 -> 1.4.5
tree: upgrade 2.0.4 -> 2.1.0
python3-bidict: upgrade 0.22.0 -> 0.22.1
python3-bitarray: upgrade 2.6.1 -> 2.6.2
python3-dateparser: upgrade 1.1.4 -> 1.1.5
python3-lz4: upgrade 4.0.2 -> 4.3.2
python3-mock: upgrade 4.0.3 -> 5.0.0
python3-pillow: upgrade 9.3.0 -> 9.4.0
python3-pydantic: upgrade 1.10.2 -> 1.10.4
python3-pyephem: upgrade 4.1.3 -> 4.1.4
python3-xlsxwriter: upgrade 3.0.3 -> 3.0.5
python3-xxhash: upgrade 3.1.0 -> 3.2.0
dnf-plugins/rpm.py: Fix grammar when RPM_PREFER_ELF_ARCH doesn't exit.
Xiangyu Chen (1):
lldpd: add ptest for lldpd package
Yi Zhao (13):
libpwquality: set correct pam plugin directory
ostree: add runtime dependency bubblewrap for PACKAGECONFIG[selinux]
ostree: fix selinux policy rebuild error on first deployment
frr: upgrade 8.3.1 -> 8.4.1
open-vm-tools: upgrade 12.1.0 -> 12.1.5
libtdb: upgrade 1.4.3 -> 1.4.7
libldb: upgrade 2.3.4 -> 2.6.1
libtalloc: upgrade 2.3.3 -> 2.3.4
libtevent: upgrade 0.10.2 -> 0.13.0
samba upgrade 4.14.14 -> 4.17.4
krb5: upgrade 1.17.2 -> 1.20.1
grubby: update to latest git rev
grubby: drop version 8.40
Zheng Qiu (1):
python3-inotify: add ptest
persianpros (1):
samba: Remove samba related PYTHONHASHSEED patches and use export function
zhengrq.fnst@fujitsu.com (15):
python3-pymodbus: upgrade 3.0.0 -> 3.0.2
python3-pywbemtools: upgrade 1.0.1 -> 1.1.0
python3-stevedore: upgrade 4.1.0 -> 4.1.1
ser2net: upgrade 4.3.9 -> 4.3.10
yelp-tools: upgrade 42.0 -> 42.1
python3-python-vlc: upgrade 3.0.16120 -> 3.0.18121
python3-sqlalchemy: upgrade 1.4.43 -> 1.4.44
python3-zopeinterface: upgrade 5.5.1 -> 5.5.2
python3-simplejson: upgrade 3.17.6 -> 3.18.0
python3-pywbemtools: upgrade 1.0.1 -> 1.1.1
python3-redis: upgrade 4.3.4 -> 4.3.5
python3-texttable: upgrade 1.6.4 -> 1.6.7
python3-sentry-sdk: upgrade 1.9.10 -> 1.11.1
python3-twitter: upgrade 4.10.1 -> 4.12.1
python3-termcolor: upgrade 2.1.0 -> 2.1.1
meta-security: 2aa48e6f4e..f991b20f56:
Alex Kiernan (1):
bubblewrap: Update 0.6.2 -> 0.7.0
Armin Kuster (2):
python3-privacyidea: update to 2.7.4
chipsec: update to 1.9.1
Michael Haener (1):
tpm2-tools: update to 5.3
meta-arm: d5f132b199..5c42f084f7:
Adam Johnston (1):
arm/trusted-services: Fix 'no such file' when building libts
Adrian Herrera (2):
atp: decouple m5readfile from m5ops
atp: move m5readfile to meta-gem5
Adrián Herrera Arcila (5):
atp: fix failing test_readme
gem5: support for EXTRAS
atp: separate recipe for gem5 models
atp: fix machine overrides in recipes
ci: add meta-atp to check-layers
David Bagonyi (1):
meta-arm-toolchain: Drop calls to datastore finalize
Diego Sueiro (2):
arm/classes: Introduce apply_local_src_patches bbclass
arm/trusted-firmware-m: Fix local source patches application
Emekcan (1):
arm/fvp: Upgrade Corstone1000 FVP
Emekcan Aras (6):
arm-bsp/documentation: corstone1000: update the user guide
arm/optee: Move optee-3.18 patches
arm/optee: support optee 3.19
arm-bsp/optee-os: Adds 3.19 bbappend
arm-bsp/optee-os: N1SDP support for optee-os 3.19
arm/qemuarm-secureboot: pin optee-os version
Jon Mason (5):
arm-bsp/trusted-services: rename bbappends with git version
arm/trusted-services: limit the ts compatible machines
arm-bsp/trusted-services: add n1sdp support
arm/trusted-firmware-m: update to 1.6.1
CI: define DEFAULT_TAG and CPU_REQUEST
Khem Raj (1):
gn: Replace lfs64 functions with original counterparts
Mohamed Omar Asaker (5):
arm-bsp/trusted-services: corstone1000: Use the stateless platform service calls
arm-bsp/trusted-firmware-m: Bump TFM to v1.7
arm-bsp/trusted-firmware-m: corstone1000: TFM 1.7
arm-bsp/musca_b1: Edit the platform name
arm-bsp/trusted-firmware-m: Remove TF-M 1.6 recipe
Peter Hoyes (3):
arm/fvp: Backport shlex.join from Python 3.8
arm/fvpboot: Disable timing annotation by default
arm/classes: Ensure patch files are sorted in apply_local_src_patches
Robbie Cao (1):
arm/fvp-base-r-aem: upgrade to version 11.20.15
Ross Burton (17):
CI: revert a meta-clang change which breaks pixman (thus, xserver)
CI: add variables needed for k8s runners
CI: add tags to all jobs
CI: no need to install telnet
CI: fix builds with clang
CI: use the .setup fragment in machine-coverage
arm/fvp-base-a-aem: upgrade to 11.20.15
arm-bsp/edk2-firmware: allow clang builds on juno
ci/get-binary-toolchains: rewrite, slightly
arm-bsp/documentation: update fvp-base documentation to use runfvp
CI: use qemuarm64 for pending-updates report job
meta-atp: remove
meta-gem5: remove
arm/fvp-envelope: name the FVP tarballs for checksums
arm/fvp-envelope: update HOMEPAGE
arm/fvp-base-a-aem: add support for aarch64 binaries
CI: don't pin fvp-base jobs to x86-64
poky: 44bb88cc86..0ce159991d:
Alejandro Hernandez Samaniego (6):
baremetal-image: Avoid overriding qemu variables from IMAGE_CLASSES
rust: Enable building rust from stable, beta and nightly channels
rust: Enable baremetal targets
baremetal-helloworld: Enable x86 and x86-64 ports
baremetal-helloworld: Move from skeleton to recipes-extended matching what rust-hello-world is doing
oe-selftest: Add baremetal toolchain test
Alex Kiernan (20):
rust: Install target.json for target rustc
rust: update 1.65.0 -> 1.66.0
oeqa/runtime/rust: Add basic compile/run test
libstd-rs: Merge .inc into .bb
libstd-rs: Move source directory to library/test
rust-llvm: Merge .inc into .bb
rust-llvm: Update LLVM_VERSION to match embedded version
packagegroup-rust-sdk-target: Add Rust SDK target packagegroup
packagegroup-core-sdk: Add SDK toolchain language selection support
rust: Merge .inc into .bb
rust: Move musl-x86 fix for `__stack_chk_fail_local` to rust-source
cargo: Merge .inc into .bb
cargo: Extend DEBUG_PREFIX_MAP to cover vendor
cargo: Include crossbeam-utils patch
cargo: Drop exclude from world
packagegroup-rust-sdk-target: Add cargo
oeqa/runtime/rust: Add cargo test
classes: image: Set empty weak default IMAGE_LINGUAS
default-distrovars: Include "c" in IMAGE_LINGUAS for glibc
rust: Merge all rustc-source patches into rust-source.inc
Alex Stewart (2):
lsof: add update-alternatives logic
opkg: upgrade to version 0.6.1
Alexander Kanavin (155):
elfutils: update 0.187 -> 0.188
rsync: update 3.2.5 -> 3.2.7
swig: update 4.0.2 -> 4.1.0
tcl: update 8.6.11 -> 8.6.12
quota: update 4.06 -> 4.09
shadow: update 4.12.3 -> 4.13
texinfo: update 6.8 -> 7.0
libhandy: update 1.6.3 -> 1.8.0
xf86-input-mouse: update 1.9.3 -> 1.9.4
flac: update 1.4.0 -> 1.4.2
icu: update 71.1 -> 72-1
libgpg-error: update 1.45 -> 1.46
popt: update 1.18 -> 1.19
vte: update 0.68.0 -> 0.70.1
webkitgtk: update 2.36.7 -> 2.38.2
man-db: update 2.10.2 -> 2.11.1
gawk: update 5.1.1 -> 5.2.1
unfs: update 0.9.22 -> 0.10.0
qemu-helper: depend on unfs3 and pseudo directly
runqemu: do not hardcode the ip address of the nfs server when using tap
selftest/runqemu: reenable the nfs rootfs test
glibc-tests: correctly pull in the actual tests when installing -ptest package
python3: fix tests on x86 (32 bit)
ptest-packagelists.inc: do not run valgrind ptests on 32 bit x86
python3: use the standard shell version of python3-config
python3targetconfig.bbclass: use PYTHONPATH to point to the target config
bitbake: fetch2/wget.py: correctly match versioned directories
devtool/upgrade: correctly handle recipes where S is a subdir of upstream tree
python3-numpy: fix upstream version check
python3-poetry-core: update 1.3.2 -> 1.4.0
tcl: update 8.6.12 -> 8.6.13
libnewt: update 0.52.21 -> 0.52.23
libxdmcp: update 1.1.3 -> 1.1.4
libxpm: update 3.5.13 -> 3.5.14
libxrandr: update 1.5.2 -> 1.5.3
bluez: update 5.65 -> 5.66
libxcrypt: update PV to match SRCREV
python3-dbusmock: update 0.28.4 -> 0.28.6
ruby: merge .inc into .bb
ruby: update 3.1.2 -> 3.1.3
ghostscript: update 9.56.1 -> 10.0.0
tzdata: update 2022d -> 2022g
systemtap: upgrade 4.7 -> 4.8
gnupg: upgrade 2.3.7 -> 2.3.8
ptest-packagelists.inc: correctly assign fast and slow tests
ovmf: update edk2-stable202208 -> edk2-stable202211
llvm: update 15.0.4 -> 15.0.6
tcmode-default.inc: set LLVMVERSION to a major version wildcard
cmake: update 3.24.2 -> 3.25.1
python3-native: further tweak to sysconfig.py to find python includes correctly
libslirp: add recipe to continue slirp support in qemu
qemu: update 7.1.0 -> 7.2.0
systemd: update 251.8 -> 252.4
dpkg: update 1.21.9 -> 1.21.13
python3-installer: update 0.5.1 -> 0.6.0
python3: update 3.11.0 -> 3.11.1
weston: update 11.0.0 -> 11.0.1
xhost: update 1.0.8 -> 1.0.9
xinit: update 1.4.1 -> 1.4.2
xkbcomp: update 1.4.5 -> 1.4.6
xprop: update 1.2.5 -> 1.2.6
xset: update 1.2.4 -> 1.2.5
xvinfo: update 1.1.4 -> 1.1.5
xf86-video-vesa: update 2.5.0 -> 2.6.0
libice: update 1.0.10 -> 1.1.1
libxcomposite: update 0.4.5 -> 0.4.6
libxdamage: update 1.1.5 -> 1.1.6
libxres: update 1.2.1 -> 1.2.2
libxscrnsaver: update 1.2.3 -> 1.2.4
libxv: update 1.0.11 -> 1.0.12
jquery: upgrade 3.6.1 -> 3.6.2
libmodule-build-perl: update 0.4231 -> 0.4232
python3-chardet: upgrade 5.0.0 -> 5.1.0
libarchive: upgrade 3.6.1 -> 3.6.2
stress-ng: upgrade 0.15.00 -> 0.15.01
vulkan: upgrade 1.3.231.1 -> 1.3.236.0
Revert "python3-native: further tweak to sysconfig.py to find python includes correctly"
conf/machine/include: add x86-64-v3 tunes (AVX, AVX2, BMI1, BMI2, F16C, FMA, LZCNT, MOVBE, XSAVE)
go: update 1.19.3 -> 1.19.4
vulkan-samples: update to latest revision
boost-build-native: update 1.80.0 -> 1.81.0
qemu: disable sporadically failing test-io-channel-command
devtool: process local files only for the main branch
libportal: add from meta-openembedded/meta-gnome
libportal: convert from gtk-doc to gi-docgen
epiphany: update 42.4 -> 43.0
qemux86-64: build for x86-64-v3 (2013 Haswell and later) rather than Core 2 from 2006
valgrind: disable tests that started failing after switching to x86-64-v3 target
glib-2.0: upgrade 2.74.3 -> 2.74.4
jquery: upgrade 3.6.2 -> 3.6.3
nasm: update 2.15.05 -> 2.16.01
ffmpeg: use nasm patched-in debug-prefix-map option to restore reproducibility
gtk+3: update 3.24.35 -> 3.24.36
libva-utils: update 2.16.0 -> 2.17.0
xcb-util: update 0.4.0 -> 0.4.1
gnupg: update 2.3.8 -> 2.4.0
libksba: update 1.6.2 -> 1.6.3
python3-pycryptodomex: upgrade 3.15.0 -> 3.16.0
piglit: upgrade to latest revision
python3-setuptools-scm: upgrade 7.0.5 -> 7.1.0
python3-attrs: upgrade 22.1.0 -> 22.2.0
webkitgtk: upgrade 2.38.2 -> 2.38.3
linux-firmware: upgrade 20221109 -> 20221214
harfbuzz: upgrade 5.3.1 -> 6.0.0
python3-pytz: upgrade 2022.6 -> 2022.7
strace: upgrade 6.0 -> 6.1
python3-pycryptodome: upgrade 3.15.0 -> 3.16.0
meson: upgrade 0.64.0 -> 1.0.0
xwayland: upgrade 22.1.5 -> 22.1.7
python3-pyrsistent: upgrade 0.19.2 -> 0.19.3
file: upgrade 5.43 -> 5.44
python3-subunit: upgrade 1.4.1 -> 1.4.2
python3-zipp: upgrade 3.10.0 -> 3.11.0
python3-cryptography: upgrade 38.0.3 -> 38.0.4
logrotate: upgrade 3.20.1 -> 3.21.0
python3-importlib-metadata: upgrade 5.0.0 -> 5.2.0
python3-numpy: upgrade 1.23.4 -> 1.24.1
xserver-xorg: upgrade 21.1.4 -> 21.1.6
puzzles: upgrade to latest revision
vte: upgrade 0.70.1 -> 0.70.2
libpsl: upgrade 0.21.1 -> 0.21.2
libtest-fatal-perl: upgrade 0.016 -> 0.017
python3-urllib3: upgrade 1.26.12 -> 1.26.13
python3-cryptography-vectors: upgrade 38.0.3 -> 38.0.4
python3-setuptools: upgrade 65.5.1 -> 65.6.3
libsdl2: upgrade 2.26.0 -> 2.26.1
python3-gitdb: upgrade 4.0.9 -> 4.0.10
diffoscope: upgrade 224 -> 230
python3-mako: upgrade 1.2.3 -> 1.2.4
python3-sphinx: upgrade 5.3.0 -> 6.0.0
libsolv: upgrade 0.7.22 -> 0.7.23
ruby: upgrade 3.1.3 -> 3.2.0
python3-lxml: upgrade 4.9.1 -> 4.9.2
python3-git: upgrade 3.1.29 -> 3.1.30
curl: upgrade 7.86.0 -> 7.87.0
kmscube: upgrade to latest revision
gobject-introspection: upgrade 1.72.0 -> 1.74.0
python3-dtschema: upgrade 2022.11 -> 2022.12
bash: upgrade 5.2.9 -> 5.2.15
kexec-tools: upgrade 2.0.25 -> 2.0.26
python3-jsonschema: upgrade 4.17.0 -> 4.17.3
python3-pycairo: upgrade 1.21.0 -> 1.23.0
nghttp2: upgrade 1.50.0 -> 1.51.0
python3-certifi: upgrade 2022.9.24 -> 2022.12.7
python3-hypothesis: upgrade 6.57.1 -> 6.61.0
libsndfile1: upgrade 1.1.0 -> 1.2.0
repo: upgrade 2.29.9 -> 2.31
libpcap: upgrade 1.10.1 -> 1.10.2
python3-jsonschema: depend on rfc3339-validator in all cases
python3-strict-rfc3339: remove the recipe
elfutils: do not error out on deprecated declarations
gcr3: limit version check to 3.x versions without odd-even rule
ncurses: restore version check as it's now again working due to release of 6.4
tiff: update 4.4.0 -> 4.5.0
qemu: fix recent reproducibility issues
Alexey Smirnov (1):
classes: make TOOLCHAIN more permissive for kernel
Anton Antonov (1):
rust: Do not use default compiler flags defined in CC crate
Antonin Godard (2):
busybox: always start do_compile with orig config files
busybox: rm temporary files if do_compile was interrupted
Atanas Bunchev (1):
qemu.rst: slirp port forwarding details
Bruce Ashfield (30):
linux-yocto-dev: bump to v6.0+
linux-yocto/5.19: update to v5.19.16
linux-yocto/5.15: update to v5.15.74
linux-yocto/5.19: update to v5.19.17
linux-yocto/5.15: update to v5.15.76
linux-yocto/5.19: cfg: intel and vesa updates
kern-tools: integrate ZFS speedup patch
linux-yocto-dev: bump to v6.1
kernel-devsrc: fix for v6.1+
lttng-modules: fix build for v6.1+
linux-yocto/5.19: security.cfg: remove configs which have been dropped
linux-yocto/5.15: update to v5.15.78
linux-yocto/5.19: fix CONFIG_CRYPTO_CCM mismatch warnings
linux-yocto/5.15: fix CONFIG_CRYPTO_CCM mismatch warnings
linux-yocto/5.19: fix elfutils run-backtrace-native-core ptest failure
linux-libc-headers: add 6.x fetch location
linux-libc-headers: bump to 6.1
linux-yocto/5.19: fix perf build with clang
linux-yocto/5.15: ltp and squashfs fixes
linux-yocto: introduce v6.1 reference kernel recipes
linux-yocto/5.15: fix perf build with clang
linux-yocto/5.15: libbpf: Fix build warning on ref_ctr_off
linux-yocto/5.15: update to v5.15.84
linux-yocto/6.1: update to v6.1.1
linux-yocto/5.15: powerpc: Fix reschedule bug in KUAP-unlocked user copy
linux-yocto/5.19: powerpc: Fix reschedule bug in KUAP-unlocked user copy
linux-yocto/6.1: update to v6.1.3
linux-yocto/6.1: cfg: remove CONFIG_ARM_CRYPTO
yocto-bsps/5.15: update to v5.15.78
linux-yocto/5.15: update to v5.15.80
Carlos Alberto Lopez Perez (3):
xwayland: libxshmfence is needed when dri3 is enabled
recipes: Enable nativesdk for gperf, unifdef, gi-docgen and its dependencies
mesa-gl: gallium is required when enabling x11
Changqing Li (2):
base.bbclass: Fix way to check ccache path
sqlite3: upgrade 3.40.0 -> 3.40.1
Charlie Johnston (1):
opkg: ensure opkg uses private gpg.conf when applying keys.
Chee Yang Lee (1):
migration-guides: add release-notes for 4.1.1
Chen Qi (10):
kernel.bbclass: make KERNEL_DEBUG_TIMESTAMPS work at rebuild
resolvconf: make it work
dhcpcd: fix to work with systemd
bitbake: command.py: cleanup bb.cache.parse_recipe
psplash: consider the situation of psplash not exist for systemd
bc: extend to nativesdk
rm_work: adjust dependency to make do_rm_work_all depend on do_rm_work
selftest: allow '-R' and '-r' be used together
dhcpcd: backport two patches to fix runtime error
libseccomp: fix typo in DESCRIPTION
Christian Eggers (1):
boost: add url lib
David Bagonyi (1):
u-boot: Fix u-boot signing when building with multiple u-boot configs
Dmitry Baryshkov (2):
linux-firmware: upgrade 20221012 -> 20221109
linux-firmware: add new fw file to ${PN}-qcom-adreno-a530
Enguerrand de Ribaucourt (1):
bitbake-layers: fix a typo
Enrico Jörns (1):
sstatesig: emit more helpful error message when not finding sstate manifest
Enrico Scholz (1):
sstate: show progress bar again
Fabre Sébastien (1):
u-boot: Add /boot in SYSROOT_DIRS
Frank de Brabander (4):
bitbake: README: Improve explanation about running the testsuite
bitbake: bin/utils: Ensure locale en_US.UTF-8 is available on the system
bitbake: process: log odd unlink events with bitbake.sock
bitbake: README: add required python version for bitbake
Harald Seiler (1):
opkg: Set correct info_dir and status_file in opkg.conf
Jagadeesh Krishnanjanappa (1):
qemuboot.bbclass: make sure runqemu boots bundled initramfs kernel image
Jan Kircher (1):
toolchain-scripts: compatibility with unbound variable protection
Javier Tia (1):
poky.conf: Add Fedora 36 as supported distro
Joe Slater (2):
python3: Fix CVE-2022-37460
libarchive: fix CVE-2022-36227
Jose Quaresma (2):
Revert "gstreamer1.0: disable flaky gstbin:test_watch_for_state_change test"
gstreamer1.0: Fix race conditions in gstbin tests
Joshua Watt (4):
qemu-helper-native: Correctly pass program name as argv[0]
bitbake: cooker: Use event to terminate parser threads
bitbake: cooker: Start sync thread a little earlier
bitbake: bitbake: Convert to argparse
Kai Kang (4):
xorg-lib-common.inc: set default value of XORG_EXT
libx11-compose-data: 1.6.8 -> 1.8.3
libx11: 1.8.1 -> 1.8.3
libsm: 1.2.3 > 1.2.4
Kasper Revsbech (1):
bitbake: fetch2/wget: handle username/password in uri
Khem Raj (47):
rsync: Delete pedantic errors re-ordering patch
pseudo: Disable LFS on 32bit arches
libxkbcommon: Extend to build native package
iso-codes: Extend to build native packages
xkeyboard-config: Extend to build native package
bluez5: enable position independent executables flag
rpcsvc-proto: Use autoconf knob to enable largefile support
gptfdisk: Enable largefile support functions
libpcre2: Upgrade to 10.42
erofs-utils: Convert from off64_t to off_t
pseudo: Remove 64bit time_t flags
unfs3: Define off64_t in terms of off_t on musl
acpid: Fix largefile enabled build
efivar: Replace off64_t with off_t
ltp: Fix largefile support
acl: Enable largefile support by default
libpciaccess: Do not use 64bit functions for largefile support
mdadm: Use _FILE_OFFSET_BITS to use largefile support
btrfs-tools: Do not use 64bit functions for largefile support
e2fsprogs: Do not use 64bit functions for largefile support
libbsd: Fix build with largefile support
gpgme: Fix with with largefile support
virglrenderer: Replace lseek64 with lseek
nfs-utils: Replace statfs64 with statfs
alsa-utils: Replace off64_t with off_t
lttng-tools: Fix build with largefile support
strace: Add knob to enable largefile support
numactl: Enable largefile support
qemu: Fix build with largefile support
systemd: Fix 252 release build on musl
rust: Do not use open64 on musl in getrandom crate
rust,libstd-rs: Fix build with latest musl
rust-llvm: Fix build on latest musl
cargo: Do not use open64 on musl anymore
llvm: Do not use lseek64
strace: Replace off64_t with off_t in sync_file_range.c test
vulkan-samples: Do not use LFS64 APIs in spdlog
pulseaudio: Do not use 64bit time_t flags
musl: Update to latest on tip of trunk
rust: Fix build with 64bit time_t
stress-ng: Do not enforce gold linker
time64.inc: Add GLIBC_64BIT_TIME_FLAGS on ppc/x86 as well
time64: Remove leading whitespace from GLIBC_64BIT_TIME_FLAGS
mpg123: Enable largefile support
site/powerpc32-linux: Do not cache statvfs64 across glibc and musl
tiff: Add packageconfig knob for webp
site/common-musl: Set ac_cv_sys_file_offset_bits default to 64
Lee Chee Yang (1):
migration-guides: add release-notes for 4.0.6
Luca Boccassi (2):
systemd: refresh patch to remove fuzz introduced by rebase on v252
systemd: ship pcrphase/measure tools and units in systemd-extra-utils
Luis (1):
rm_work.bbclass: use HOSTTOOLS 'rm' binary exclusively
Marek Vasut (5):
bitbake: fetch2/git: Prevent git fetcher from fetching gitlab repository metadata
package_rpm: Fix Linux 6.1.0 perf 1.0 version mistranslation
systemd: Make importd depend on glib-2.0 again
bitbake: bitbake-user-manual: Document override :append, :prepend, :remove order
bitbake: fetch2/git: Clarify the meaning of namespace
Markus Volk (12):
ell: upgrade 0.53 -> 0.54
libsdl2: update 2.24.2 -> 2.26.0
graphene: import from meta-oe
gtk4: import recipe from meta-gnome
gcr: rename gcr -> gcr3
gcr: add recipe for gcr-4, needed to build with gtk4
epiphany: use gcr3
gtk4: add tracker-miners runtime dependency
python3-dbusmock: allow to build native
gtk4: update 4.8.2 -> 4.8.3
gcr3: update 3.40.0 -> 3.41.1
librsvg: enable vapi build
Marta Rybczynska (2):
efibootmgr: update compilation with musl
cve-update-db-native: avoid incomplete updates
Martin Jansa (4):
libxml2: upgrade test data from 20080827 to 20130923
nativesdk-rpm: export RPM_ETCCONFIGDIR and MAGIC in environment like RPM_CONFIGDIR
nativesdk-rpm: don't create wrappers for WRAPPER_TOOLS
tune-x86-64-v3.inc: set QEMU_EXTRAOPTIONS like other tune-* files
Mathieu Dubois-Briand (1):
dbus: Add missing CVE product name
Michael Halstead (1):
uninative: Upgrade to 3.8.1 to include libgcc
Michael Opdenacker (34):
manuals: add missing references to classes
manuals: fix paragraphs with the "inherit" word
ref-manual/classes.rst: remove reference to sip.bbclass
manuals: simplify .gitignore files
manuals: split dev-manual/common-tasks.rst
dev-manual/sbom.rst: minor corrections
bitbake: bitbake-user-manual: update references to Yocto Project manual
bitbake.conf: remove SERIAL_CONSOLE variable
bitbake: bitbake-user-manual: add reference to bitbake git repository
ref-manual: add references to variables only documented in the BitBake manual
manuals: add reference to yocto-docs git repository to page footer
manuals: add missing references to variables
manuals: add missing SPDX license header to source files
manuals: fix double colons
ref-manual/resources.rst: fix formating
ref-manual: update references to release notes
manual: improve documentation about using external toolchains
ref-manual/images.rst: fix unnumbered list
manuals: define proper numbered lists
manuals: final removal of SERIAL_CONSOLE variable
ref-manual/resources.rst: improve description of mailing lists
ref-manual/system-requirements.rst: update buildtools instructions
manuals: create references to buildtools
documentation/poky.yaml.in: update minimum python version to 3.8
manuals: prepare 4.2 migration notes
bitbake: bitbake-user-manual: double colon fix
bitbake: bitbake-user-manual: remove "OEBasic" signature generator
migration-guides: fix 4.2 migration note issues
toaster-manual: fix description of introduction video
ref-manual/classes.rst: remove .bbclass from section titles
manuals: simplify references to classes
migration-1.6.rst: fix redundant reference
ref-manual/system-requirements.rst: recommend buildtools for not supported distros
.gitignore: ignore files generated by Toaster
Mikko Rapeli (5):
qemurunner.py: support setting slirp host IP address
runqemu: limit slirp host port forwarding to localhost 127.0.0.1
qemurunner.py: use IP address from command line
dev-manual/runtime-testing.rst: fix oeqa runtime test path
runqemu: add QB_SETUP_CMD and QB_CLEANUP_CMD
Mingli Yu (8):
tcl: correct the header location in tcl.pc
python3: make tkinter available when enabled
sudo: add selinux and audit PACKAGECONFIG
iproute2: add selinux PACKAGECONFIG
util-linux: add selinux PACKAGECONFIG
cronie: add selinux PACKAGECONFIG
psmisc: add selinux PACKAGECONFIG
gcr: add opengl to REQUIRED_DISTRO_FEATURES
Narpat Mali (2):
ffmpeg: fix for CVE-2022-3964
ffmpeg: fix for CVE-2022-3965
Ola x Nilsson (4):
kbd: Don't build tests
glibc: Add ppoll fortify symbol for 64 bit time_t
insane: Add QA check for 32 bit time and file offset functions
time64.conf: Include to enable 64 bit time flags
Ovidiu Panait (1):
kernel.bbclass: remove empty module directories to prevent QA issues
Patrick Williams (1):
kernel-fitimage: reduce dependency to the cpio
Pavel Zhukov (1):
oeqa/rpm.py: Increase timeout and add debug output
Peter Kjellerstedt (1):
recipes, classes: Avoid adding extra whitespace to PACKAGESPLITFUNCS
Peter Marko (2):
externalsrc: fix lookup for .gitmodules
oeqa/selftest/externalsrc: add test for srctree_hash_files
Petr Kubizňák (1):
harfbuzz: remove bindir only if it exists
Petr Vorel (1):
iputils: update to 20221126
Polampalli, Archana (1):
libpam: fix CVE-2022-28321
Qiu, Zheng (3):
valgrind: remove most hidden tests for arm64
tiff: Security fix for CVE-2022-3970
vim: upgrade 9.0.0820 -> 9.0.0947
Quentin Schulz (4):
cairo: update patch for CVE-2019-6461 with upstream solution
cairo: fix CVE patches assigned wrong CVE number
docs: kernel-dev: faq: update tip on how to not include kernel in image
docs: migration-guides: migration-4.0: specify variable name change for kernel inclusion in image recipe
Randy MacLeod (1):
valgrind: skip the boost_thread test on arm
Ranjitsinh Rathod (1):
curl: Correct LICENSE from MIT-open-group to curl
Ravula Adhitya Siddartha (2):
linux-yocto/5.15: update genericx86* machines to v5.15.78
linux-yocto/5.19: update genericx86* machines to v5.19.17
Richard Purdie (97):
bitbake: cache/cookerdata: Move recipe parsing functions from cache to databuilder
bitbake: cache: Drop broken/unused code
bitbake: cache: Drop unused function
bitbake: server: Ensure cooker profiling works
bitbake: worker/runqueue: Reduce initial data transfer in workerdata
bitbake: cache: Drop support for not saving the cache file
bitbake: runqueue: Add further debug for sstate reuse issues
bitbake: runqueue: Fix race issues around hash equivalence and sstate reuse
bitbake: data/siggen: Switch to use frozensets and optimize
bitbake: data_smart: Add debugging for overrides stability issue
bitbake: utils: Allow to_boolean to support int values
base: Drop do_package base definition
bitbake: data: Drop obsolete pydoc/path code
bitbake: BBHandler: Remove pointless global variable declarations
bitbake: runqueue: Improve error message for missing multiconfig
bitbake: data_smart: Small cache reuse optimization
bitbake.conf: Simplify CACHE setting
oeqa/selftest/tinfoil: Add test for separate config_data with recipe_parse_file()
qemu: Ensure libpng dependency is deterministic
bitbake: data: Tweak code layout
bitbake: cache/siggen: Simplify passing basehash data into the cache
bitbake: siggen/cache: Optionally allow adding siggen hash data to the bitbake cache
bitbake: parse: Add support for addpylib conf file directive and BB_GLOBAL_PYMODULES
bitbake: cookerdata: Ensure layers use LAYERSERIES_COMPAT fairly
base: Switch to use addpylib directive and BB_GLOBAL_PYMODULES
devtool/friends: Use LAYERSERIES_CORENAMES when generating LAYERSERIES_COMPAT entries
scripts/checklayer: Update to match bitbake changes
yocto-check-layer: Allow OE-Core to be tested
bitbake: main: Add timestamp to server retry messages
bitbake: main/server: Add lockfile debugging upon server retry
poky/poky-tiny: Drop largefile mentions
lib/sstatesig: Drop OEBasic siggen
bitbake: siggen: Drop non-multiconfig aware siggen support
bitbake: build/siggen/runqueue: Drop do_setscene references
bitbake: bitbake: Bump minimum python version requirement to 3.8
sanity: Update minimum python version to 3.8
bitbake: main/process: Add extra sockname debugging
Revert "kernel-fitimage: reduce dependency to the cpio"
bitbake: siggen: Directly store datacaches reference
bitbake: bitbake: siggen/runqueue: Switch to using RECIPE_SIGGEN_INFO feature for signature dumping
bitbake: siggen: Add dummy dataCaches from task context/datastore
bitbake: build/siggen: Rework stamps functions
bitbake: siggen: Clarify which fn is meant
bitbake: ast/data/codeparser: Add dependencies from python module functions
bitbake: codeparser/data: Add vardepsexclude support to module dependency code
bitbake.conf: Add module function vardepsexclude entries
time64: Rename to a .inc file to match the others
bitbake: command: Add ping command
bitbake: cache: Allow compression of the data in SiggenRecipeInfo
bitbake: siggen: Minor code improvement
bitbake: server/process: Add bitbake.sock race handling
oeqa/concurrencytest: Add number of failures to summary output
python3-poetry-core: Fix determinism issue breaking reproducibility
bitbake: cache/siggen: Fix cache issues with signature handling
bitbake: event: builtins fix for 'd' deletion
bitbake: cooker: Ensure cache is cleared for partial resets
bitbake: tinfoil: Ensure CommandExit is handled
bitbake: cache: Drop reciever side counting for SiggenRecipeInfo
bitbake: knotty: Avoid looping with tracebacks
bitbake: event: Add enable/disable heartbeat code
bitbake: cooker/cookerdata: Rework the way the datastores are reset
bitbake: server/process: Improve exception and idle function logging
bitbake: command: Tweak finishAsyncCommand ordering to avoid races
bitbake: cooker: Ensure commands clean up any parser processes
bitbake: server/process: Improve idle loop exit code
bitbake: event: Always use threadlock
bitbake: server/process: Add locking around idle functions accesses
bitbake: server/process: Run idle commands in a separate idle thread
bitbake: knotty: Ping the server/cooker periodically
bitbake: cookerdata: Fix cache/reparsing issue
bitbake: cookerdata: Fix previous commit to use a string, not a generator
bitbake: command: Ensure that failure cases call finishAsyncComand
layer.conf: Update to use mickledore as the layer series name
layer.conf: Mark master as compatible with mickledore
bitbake: lib/bb: Update thread/process locks to use a timeout
package: Move fixup_perms function to bb function library
package: Move get_conffiles/files_from_filevars functions to lib
package: Move pkgdata handling functions to oe.packagedata
package: Move emit_pkgdata to packagedata.py
package: Move package functions to function library
package: Drop unused function and obsolete comment
package: Move mapping_rename_hook to packagedata function library
python3-cython: Use PACKAGESPLITFUNCS instead of PACKAGEBUILDPKGD
package: Drop support for PACKAGEBUILDPKGD function customisation
recipes/classes: Drop prepend/append usage with PACKAGESPLITFUNCS
bitbake: cooker: Rework the parsing results submission
bitbake: cooker: Clean up inotify idle handler
uninative-tarball: Add libgcc
patchelf: Add fix submitted upstream for uninative segfaults
bitbake: cooker/command: Drop async command handler indirection via cooker
bitbake: process/cooker/command: Fix currentAsyncCommand locking/races
uninative: Ensure uninative is enabled in all cases for BuildStarted event
qemux86-64: Reduce tuning to core2-64
bitbake: tinfoil: Don't wait for events indefinitely
bitbake: knotty: Improve shutdown handling
bitbake: cooker: Fix exit handling issues
bitbake: server/process: Move heartbeat to idle thread
Robert Andersson (1):
go-crosssdk: avoid host contamination by GOCACHE
Ross Burton (28):
build-appliance-image: Update to master head revision
lib/buildstats: fix parsing of trees with reduced_proc_pressure directories
combo-layer: remove unused import
combo-layer: dont use bb.utils.rename
combo-layer: add sync-revs command
libxml2: upgrade 2.9.14 -> 2.10.3
libxml2: add more testing
python3-packaging: upgrade to 22.0
python3-hatchling: remove python3-tomli DEPENDS
python3-cryptography: remove python3-tomli RDEPENDS
meson: drop redundant is_debianlike() patch
meson: always use meson subcommands
libepoxy: remove upstreamed patch
gtk+3: upgrade 3.24.34 -> 3.24.35
gtk+3: port to Meson
meson: no need to rebuild on install
at-spi2-core: clean up x11 enabling
at-spi2-core: disable API docs if x11 is disabled
gtk+3: fix reproducible builds
lsof: upgrade 4.96.4 -> 4.96.5
pango: upgrade 1.50.11 -> 1.50.12
python3-hatch-vcs: upgrade 0.2.0 -> 0.3.0
python3-hatchling: upgrade 1.11.1 -> 1.12.1
python3-pathspec: upgrade 0.10.1 -> 0.10.3
rm_work: handle non-existant stamps directory
oeqa/selftest/debuginfod: improve testcase
elfutils: disable deprecation errors in all builds, not just native
curl: don't enable debug builds
Ryan Eatmon (1):
go: Update reproducibility patch to fix panic errors
Sandeep Gundlupet Raju (3):
libdrm: Remove libdrm-kms package
kernel-fitimage: Adjust order of dtb/dtbo files
kernel-fitimage: Allow user to select dtb when multiple dtb exists
Saul Wold (1):
at: Change when files are copied
Sergei Zhmylev (1):
oeqa/qemurunner: implement vmdk images support
Tim Orling (7):
python3-hypothesis: upgrade 6.56.4 -> 6.57.1
at-spi2-core: upgrade 2.44.1 -> 2.46.0
mirrors.bbclass: update CPAN_MIRROR
libtry-tiny-perl: add recipe for 0.31
libtest-fatal-perl: add recipe for 0.016
libtest-warnings-perl: move from meta-perl
liburi-perl: upgrade 5.08 -> 5.17
Trevor Woerner (1):
local.conf.sample: update bbclass locations
Vincent Davis Jr (1):
mesa: enable glvnd support
Wang Mingyu (49):
btrfs-tools: upgrade 6.0 -> 6.0.1
libpipeline: upgrade 1.5.6 -> 1.5.7
btrfs-tools: upgrade 6.0.1 -> 6.0.2
bind: upgrade 9.18.8 -> 9.18.9
ccache: upgrade 4.7.2 -> 4.7.4
dropbear: upgrade 2022.82 -> 2022.83
libinput: upgrade 1.21.0 -> 1.22.0
libxft: upgrade 2.3.6 -> 2.3.7
mpfr: upgrade 4.1.0 -> 4.1.1
glib-2.0: upgrade 2.74.1 -> 2.74.3
libxcrypt-compat: upgrade 4.4.30 -> 4.4.33
patchelf: upgrade 0.16.1 -> 0.17.0
pciutils: upgrade 3.8.0 -> 3.9.0
shaderc: upgrade 2022.3 -> 2022.4
sqlite3: upgrade 3.39.4 -> 3.40.0
stress-ng: upgrade 0.14.06 -> 0.15.00
swig: upgrade 4.1.0 -> 4.1.1
texinfo: upgrade 7.0 -> 7.0.1
usbutils: upgrade 014 -> 015
xz: upgrade 5.2.7 -> 5.2.9
wayland-protocols: upgrade 1.28 -> 1.31
gnu-config: upgrade to latest revision
libfontenc: upgrade 1.1.6 -> 1.1.7
libpcre2: upgrade 10.40 -> 10.41
libpng: upgrade 1.6.38 -> 1.6.39
libxau: upgrade 1.0.10 -> 1.0.11
libxkbfile: upgrade 1.1.1 -> 1.1.2
libxshmfence: upgrade 1.3.1 -> 1.3.2
xrandr: upgrade 1.5.1 -> 1.5.2
boost: upgrade 1.80.0 -> 1.81.0
ell: upgrade 0.54 -> 0.55
git: upgrade 2.38.1 -> 2.39.0
help2man: upgrade 1.49.2 -> 1.49.3
iproute2: upgrade 6.0.0 -> 6.1.0
libmpc: upgrade 1.2.1 -> 1.3.1
makedepend: upgrade 1.0.7 -> 1.0.8
psmisc: upgrade 23.5 -> 23.6
xz: upgrade 5.2.9 -> 5.4.0
gstreamer1.0: upgrade 1.20.4 -> 1.20.5
bind: upgrade 9.18.9 -> 9.18.10
btrfs-tools: upgrade 6.0.2 -> 6.1
librepo: upgrade 1.14.5 -> 1.15.1
libsdl2: upgrade 2.26.1 -> 2.26.2
libva-utils: upgrade 2.17.0 -> 2.17.1
libxkbcommon: upgrade 1.4.1 -> 1.5.0
mpfr: upgrade 4.1.1 -> 4.2.0
dpkg: upgrade 1.21.13 -> 1.21.17
rxvt-unicode: upgrade 9.30 -> 9.31
virglrenderer: upgrade 0.10.3 -> 0.10.4
Xiangyu Chen (3):
grub: backport patches to fix CVE-2022-28736
openssh: remove RRECOMMENDS to rng-tools for sshd package
grub2: backport patch to fix CVE-2022-2601 CVE-2022-3775
Yoann Congal (2):
bitbake: Group and reorder options in bitbake help
bitbake: main: Move --buildfile help at the end of "Execution" group
leimaohui (1):
libpng: Enable NEON for aarch64 to enensure consistency with arm32.
pgowda (1):
binutils: Add patch to fix CVE-2022-4285
张忠山 (1):
bitbake: data_smart: Use regex consistently for override matching
meta-raspberrypi: 93dadf336c..896566aa92:
Carlos Alberto Lopez Perez (1):
weston: disablepackageconfig options that fail to build with userland drivers
Khem Raj (2):
lirc: Drop upstreamed patch
linux-raspberrypi.inc: Weakly assign COMPATIBLE_MACHINE
Martin Jansa (2):
bluez5: update patches to apply on 5.66 version
layer.conf: update LAYERSERIES_COMPAT for mickledore
Vincent Davis Jr (5):
rpidistro-vlc,rpidistro-ffmpeg: update COMPATIBLE_HOST regex
rpidistro-vlc: upgrade 3.0.12 -> 3.0.17
rpi-default-providers: add libav and libpostproc
rpidistro-ffmpeg: upgrade 4.3.2 -> 4.3.4
rpidistro-ffmpeg: remove --enable-v4l2-request flag
Signed-off-by: Andrew Geissler <geissonator@yahoo.com>
Change-Id: Ied8537beedde0f83790e6e3595057db45f408107
diff --git a/poky/meta/lib/bblayers/create.py b/poky/meta/lib/bblayers/create.py
index 0aeb5d5..c8f3f1b 100644
--- a/poky/meta/lib/bblayers/create.py
+++ b/poky/meta/lib/bblayers/create.py
@@ -53,7 +53,7 @@
shutil.copy(license_src, license_dst)
# Get the compat value for core layer.
- compat = self.tinfoil.config_data.getVar('LAYERSERIES_COMPAT_core') or ""
+ compat = self.tinfoil.config_data.getVar('LAYERSERIES_CORENAMES') or ""
# Create the layer.conf from templates/layer.conf
layerconf_template = read_template('layer.conf').format(
diff --git a/poky/meta/lib/bblayers/makesetup.py b/poky/meta/lib/bblayers/makesetup.py
index 22f89d8..834e933 100644
--- a/poky/meta/lib/bblayers/makesetup.py
+++ b/poky/meta/lib/bblayers/makesetup.py
@@ -46,7 +46,7 @@
return describe.strip()
def make_repo_config(self, destdir):
- """ This is a helper function for the writer plugins that discovers currently confugured layers.
+ """ This is a helper function for the writer plugins that discovers currently configured layers.
The writers do not have to use it, but it can save a bit of work and avoid duplicated code, hence it is
available here. """
repos = {}
diff --git a/poky/meta/lib/oe/__init__.py b/poky/meta/lib/oe/__init__.py
index 92f002d..47be7b5 100644
--- a/poky/meta/lib/oe/__init__.py
+++ b/poky/meta/lib/oe/__init__.py
@@ -6,3 +6,7 @@
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
+
+BBIMPORTS = ["data", "path", "utils", "types", "package", \
+ "packagegroup", "sstatesig", "lsb", "cachedpath", "license", \
+ "qa", "reproducible", "rust", "buildcfg"]
diff --git a/poky/meta/lib/oe/package.py b/poky/meta/lib/oe/package.py
index 4aa40d7..c9eb75d 100644
--- a/poky/meta/lib/oe/package.py
+++ b/poky/meta/lib/oe/package.py
@@ -4,10 +4,19 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import errno
+import fnmatch
+import itertools
+import os
+import pipes
+import re
+import glob
import stat
import mmap
import subprocess
+import oe.cachedpath
+
def runstrip(arg):
# Function to strip a single file, called from split_and_strip_files below
# A working 'file' (one which works on the target architecture)
@@ -292,3 +301,1715 @@
shlib_provider[s[0]] = {}
shlib_provider[s[0]][s[1]] = (dep_pkg, s[2])
return shlib_provider
+
+# We generate a master list of directories to process, we start by
+# seeding this list with reasonable defaults, then load from
+# the fs-perms.txt files
+def fixup_perms(d):
+ import pwd, grp
+
+ cpath = oe.cachedpath.CachedPath()
+ dvar = d.getVar('PKGD')
+
+ # init using a string with the same format as a line as documented in
+ # the fs-perms.txt file
+ # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
+ # <path> link <link target>
+ #
+ # __str__ can be used to print out an entry in the input format
+ #
+ # if fs_perms_entry.path is None:
+ # an error occurred
+ # if fs_perms_entry.link, you can retrieve:
+ # fs_perms_entry.path = path
+ # fs_perms_entry.link = target of link
+ # if not fs_perms_entry.link, you can retrieve:
+ # fs_perms_entry.path = path
+ # fs_perms_entry.mode = expected dir mode or None
+ # fs_perms_entry.uid = expected uid or -1
+ # fs_perms_entry.gid = expected gid or -1
+ # fs_perms_entry.walk = 'true' or something else
+ # fs_perms_entry.fmode = expected file mode or None
+ # fs_perms_entry.fuid = expected file uid or -1
+ # fs_perms_entry_fgid = expected file gid or -1
+ class fs_perms_entry():
+ def __init__(self, line):
+ lsplit = line.split()
+ if len(lsplit) == 3 and lsplit[1].lower() == "link":
+ self._setlink(lsplit[0], lsplit[2])
+ elif len(lsplit) == 8:
+ self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
+ else:
+ msg = "Fixup Perms: invalid config line %s" % line
+ oe.qa.handle_error("perm-config", msg, d)
+ self.path = None
+ self.link = None
+
+ def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
+ self.path = os.path.normpath(path)
+ self.link = None
+ self.mode = self._procmode(mode)
+ self.uid = self._procuid(uid)
+ self.gid = self._procgid(gid)
+ self.walk = walk.lower()
+ self.fmode = self._procmode(fmode)
+ self.fuid = self._procuid(fuid)
+ self.fgid = self._procgid(fgid)
+
+ def _setlink(self, path, link):
+ self.path = os.path.normpath(path)
+ self.link = link
+
+ def _procmode(self, mode):
+ if not mode or (mode and mode == "-"):
+ return None
+ else:
+ return int(mode,8)
+
+ # Note uid/gid -1 has special significance in os.lchown
+ def _procuid(self, uid):
+ if uid is None or uid == "-":
+ return -1
+ elif uid.isdigit():
+ return int(uid)
+ else:
+ return pwd.getpwnam(uid).pw_uid
+
+ def _procgid(self, gid):
+ if gid is None or gid == "-":
+ return -1
+ elif gid.isdigit():
+ return int(gid)
+ else:
+ return grp.getgrnam(gid).gr_gid
+
+ # Use for debugging the entries
+ def __str__(self):
+ if self.link:
+ return "%s link %s" % (self.path, self.link)
+ else:
+ mode = "-"
+ if self.mode:
+ mode = "0%o" % self.mode
+ fmode = "-"
+ if self.fmode:
+ fmode = "0%o" % self.fmode
+ uid = self._mapugid(self.uid)
+ gid = self._mapugid(self.gid)
+ fuid = self._mapugid(self.fuid)
+ fgid = self._mapugid(self.fgid)
+ return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
+
+ def _mapugid(self, id):
+ if id is None or id == -1:
+ return "-"
+ else:
+ return "%d" % id
+
+ # Fix the permission, owner and group of path
+ def fix_perms(path, mode, uid, gid, dir):
+ if mode and not os.path.islink(path):
+ #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
+ os.chmod(path, mode)
+ # -1 is a special value that means don't change the uid/gid
+ # if they are BOTH -1, don't bother to lchown
+ if not (uid == -1 and gid == -1):
+ #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
+ os.lchown(path, uid, gid)
+
+ # Return a list of configuration files based on either the default
+ # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
+ # paths are resolved via BBPATH
+ def get_fs_perms_list(d):
+ str = ""
+ bbpath = d.getVar('BBPATH')
+ fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES') or ""
+ for conf_file in fs_perms_tables.split():
+ confpath = bb.utils.which(bbpath, conf_file)
+ if confpath:
+ str += " %s" % bb.utils.which(bbpath, conf_file)
+ else:
+ bb.warn("cannot find %s specified in FILESYSTEM_PERMS_TABLES" % conf_file)
+ return str
+
+ fs_perms_table = {}
+ fs_link_table = {}
+
+ # By default all of the standard directories specified in
+ # bitbake.conf will get 0755 root:root.
+ target_path_vars = [ 'base_prefix',
+ 'prefix',
+ 'exec_prefix',
+ 'base_bindir',
+ 'base_sbindir',
+ 'base_libdir',
+ 'datadir',
+ 'sysconfdir',
+ 'servicedir',
+ 'sharedstatedir',
+ 'localstatedir',
+ 'infodir',
+ 'mandir',
+ 'docdir',
+ 'bindir',
+ 'sbindir',
+ 'libexecdir',
+ 'libdir',
+ 'includedir',
+ 'oldincludedir' ]
+
+ for path in target_path_vars:
+ dir = d.getVar(path) or ""
+ if dir == "":
+ continue
+ fs_perms_table[dir] = fs_perms_entry(d.expand("%s 0755 root root false - - -" % (dir)))
+
+ # Now we actually load from the configuration files
+ for conf in get_fs_perms_list(d).split():
+ if not os.path.exists(conf):
+ continue
+ with open(conf) as f:
+ for line in f:
+ if line.startswith('#'):
+ continue
+ lsplit = line.split()
+ if len(lsplit) == 0:
+ continue
+ if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
+ msg = "Fixup perms: %s invalid line: %s" % (conf, line)
+ oe.qa.handle_error("perm-line", msg, d)
+ continue
+ entry = fs_perms_entry(d.expand(line))
+ if entry and entry.path:
+ if entry.link:
+ fs_link_table[entry.path] = entry
+ if entry.path in fs_perms_table:
+ fs_perms_table.pop(entry.path)
+ else:
+ fs_perms_table[entry.path] = entry
+ if entry.path in fs_link_table:
+ fs_link_table.pop(entry.path)
+
+ # Debug -- list out in-memory table
+ #for dir in fs_perms_table:
+ # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
+ #for link in fs_link_table:
+ # bb.note("Fixup Perms: %s: %s" % (link, str(fs_link_table[link])))
+
+ # We process links first, so we can go back and fixup directory ownership
+ # for any newly created directories
+ # Process in sorted order so /run gets created before /run/lock, etc.
+ for entry in sorted(fs_link_table.values(), key=lambda x: x.link):
+ link = entry.link
+ dir = entry.path
+ origin = dvar + dir
+ if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)):
+ continue
+
+ if link[0] == "/":
+ target = dvar + link
+ ptarget = link
+ else:
+ target = os.path.join(os.path.dirname(origin), link)
+ ptarget = os.path.join(os.path.dirname(dir), link)
+ if os.path.exists(target):
+ msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
+ oe.qa.handle_error("perm-link", msg, d)
+ continue
+
+ # Create path to move directory to, move it, and then setup the symlink
+ bb.utils.mkdirhier(os.path.dirname(target))
+ #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
+ bb.utils.rename(origin, target)
+ #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
+ os.symlink(link, origin)
+
+ for dir in fs_perms_table:
+ origin = dvar + dir
+ if not (cpath.exists(origin) and cpath.isdir(origin)):
+ continue
+
+ fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
+
+ if fs_perms_table[dir].walk == 'true':
+ for root, dirs, files in os.walk(origin):
+ for dr in dirs:
+ each_dir = os.path.join(root, dr)
+ fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
+ for f in files:
+ each_file = os.path.join(root, f)
+ fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
+
+# Get a list of files from file vars by searching files under current working directory
+# The list contains symlinks, directories and normal files.
+def files_from_filevars(filevars):
+ cpath = oe.cachedpath.CachedPath()
+ files = []
+ for f in filevars:
+ if os.path.isabs(f):
+ f = '.' + f
+ if not f.startswith("./"):
+ f = './' + f
+ globbed = glob.glob(f)
+ if globbed:
+ if [ f ] != globbed:
+ files += globbed
+ continue
+ files.append(f)
+
+ symlink_paths = []
+ for ind, f in enumerate(files):
+ # Handle directory symlinks. Truncate path to the lowest level symlink
+ parent = ''
+ for dirname in f.split('/')[:-1]:
+ parent = os.path.join(parent, dirname)
+ if dirname == '.':
+ continue
+ if cpath.islink(parent):
+ bb.warn("FILES contains file '%s' which resides under a "
+ "directory symlink. Please fix the recipe and use the "
+ "real path for the file." % f[1:])
+ symlink_paths.append(f)
+ files[ind] = parent
+ f = parent
+ break
+
+ if not cpath.islink(f):
+ if cpath.isdir(f):
+ newfiles = [ os.path.join(f,x) for x in os.listdir(f) ]
+ if newfiles:
+ files += newfiles
+
+ return files, symlink_paths
+
+# Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files
+def get_conffiles(pkg, d):
+ pkgdest = d.getVar('PKGDEST')
+ root = os.path.join(pkgdest, pkg)
+ cwd = os.getcwd()
+ os.chdir(root)
+
+ conffiles = d.getVar('CONFFILES:%s' % pkg);
+ if conffiles == None:
+ conffiles = d.getVar('CONFFILES')
+ if conffiles == None:
+ conffiles = ""
+ conffiles = conffiles.split()
+ conf_orig_list = files_from_filevars(conffiles)[0]
+
+ # Remove links and directories from conf_orig_list to get conf_list which only contains normal files
+ conf_list = []
+ for f in conf_orig_list:
+ if os.path.isdir(f):
+ continue
+ if os.path.islink(f):
+ continue
+ if not os.path.exists(f):
+ continue
+ conf_list.append(f)
+
+ # Remove the leading './'
+ for i in range(0, len(conf_list)):
+ conf_list[i] = conf_list[i][1:]
+
+ os.chdir(cwd)
+ return conf_list
+
+def legitimize_package_name(s):
+ """
+ Make sure package names are legitimate strings
+ """
+
+ def fixutf(m):
+ cp = m.group(1)
+ if cp:
+ return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape')
+
+ # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
+ s = re.sub(r'<U([0-9A-Fa-f]{1,4})>', fixutf, s)
+
+ # Remaining package name validity fixes
+ return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
+
+def split_locales(d):
+ cpath = oe.cachedpath.CachedPath()
+ if (d.getVar('PACKAGE_NO_LOCALE') == '1'):
+ bb.debug(1, "package requested not splitting locales")
+ return
+
+ packages = (d.getVar('PACKAGES') or "").split()
+
+ datadir = d.getVar('datadir')
+ if not datadir:
+ bb.note("datadir not defined")
+ return
+
+ dvar = d.getVar('PKGD')
+ pn = d.getVar('LOCALEBASEPN')
+
+ if pn + '-locale' in packages:
+ packages.remove(pn + '-locale')
+
+ localedir = os.path.join(dvar + datadir, 'locale')
+
+ if not cpath.isdir(localedir):
+ bb.debug(1, "No locale files in this package")
+ return
+
+ locales = os.listdir(localedir)
+
+ summary = d.getVar('SUMMARY') or pn
+ description = d.getVar('DESCRIPTION') or ""
+ locale_section = d.getVar('LOCALE_SECTION')
+ mlprefix = d.getVar('MLPREFIX') or ""
+ for l in sorted(locales):
+ ln = legitimize_package_name(l)
+ pkg = pn + '-locale-' + ln
+ packages.append(pkg)
+ d.setVar('FILES:' + pkg, os.path.join(datadir, 'locale', l))
+ d.setVar('RRECOMMENDS:' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
+ d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
+ d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l))
+ d.setVar('DESCRIPTION:' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l))
+ if locale_section:
+ d.setVar('SECTION:' + pkg, locale_section)
+
+ d.setVar('PACKAGES', ' '.join(packages))
+
+ # Disabled by RP 18/06/07
+ # Wildcards aren't supported in debian
+ # They break with ipkg since glibc-locale* will mean that
+ # glibc-localedata-translit* won't install as a dependency
+ # for some other package which breaks meta-toolchain
+ # Probably breaks since virtual-locale- isn't provided anywhere
+ #rdep = (d.getVar('RDEPENDS:%s' % pn) or "").split()
+ #rdep.append('%s-locale*' % pn)
+ #d.setVar('RDEPENDS:%s' % pn, ' '.join(rdep))
+
+def package_debug_vars(d):
+ # We default to '.debug' style
+ if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory':
+ # Single debug-file-directory style debug info
+ debug_vars = {
+ "append": ".debug",
+ "staticappend": "",
+ "dir": "",
+ "staticdir": "",
+ "libdir": "/usr/lib/debug",
+ "staticlibdir": "/usr/lib/debug-static",
+ "srcdir": "/usr/src/debug",
+ }
+ elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src':
+ # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
+ debug_vars = {
+ "append": "",
+ "staticappend": "",
+ "dir": "/.debug",
+ "staticdir": "/.debug-static",
+ "libdir": "",
+ "staticlibdir": "",
+ "srcdir": "",
+ }
+ elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg':
+ debug_vars = {
+ "append": "",
+ "staticappend": "",
+ "dir": "/.debug",
+ "staticdir": "/.debug-static",
+ "libdir": "",
+ "staticlibdir": "",
+ "srcdir": "/usr/src/debug",
+ }
+ else:
+ # Original OE-core, a.k.a. ".debug", style debug info
+ debug_vars = {
+ "append": "",
+ "staticappend": "",
+ "dir": "/.debug",
+ "staticdir": "/.debug-static",
+ "libdir": "",
+ "staticlibdir": "",
+ "srcdir": "/usr/src/debug",
+ }
+
+ return debug_vars
+
+
+def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output):
+ debugfiles = {}
+
+ for line in dwarfsrcfiles_output.splitlines():
+ if line.startswith("\t"):
+ debugfiles[os.path.normpath(line.split()[0])] = ""
+
+ return debugfiles.keys()
+
+def source_info(file, d, fatal=True):
+ cmd = ["dwarfsrcfiles", file]
+ try:
+ output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT)
+ retval = 0
+ except subprocess.CalledProcessError as exc:
+ output = exc.output
+ retval = exc.returncode
+
+ # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure
+ if retval != 0 and retval != 255:
+ msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "")
+ if fatal:
+ bb.fatal(msg)
+ bb.note(msg)
+
+ debugsources = parse_debugsources_from_dwarfsrcfiles_output(output)
+
+ return list(debugsources)
+
+def splitdebuginfo(file, dvar, dv, d):
+ # Function to split a single file into two components, one is the stripped
+ # target system binary, the other contains any debugging information. The
+ # two files are linked to reference each other.
+ #
+ # return a mapping of files:debugsources
+
+ src = file[len(dvar):]
+ dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
+ debugfile = dvar + dest
+ sources = []
+
+ if file.endswith(".ko") and file.find("/lib/modules/") != -1:
+ if oe.package.is_kernel_module_signed(file):
+ bb.debug(1, "Skip strip on signed module %s" % file)
+ return (file, sources)
+
+ # Split the file...
+ bb.utils.mkdirhier(os.path.dirname(debugfile))
+ #bb.note("Split %s -> %s" % (file, debugfile))
+ # Only store off the hard link reference if we successfully split!
+
+ dvar = d.getVar('PKGD')
+ objcopy = d.getVar("OBJCOPY")
+
+ newmode = None
+ if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
+ origmode = os.stat(file)[stat.ST_MODE]
+ newmode = origmode | stat.S_IWRITE | stat.S_IREAD
+ os.chmod(file, newmode)
+
+ # We need to extract the debug src information here...
+ if dv["srcdir"]:
+ sources = source_info(file, d)
+
+ bb.utils.mkdirhier(os.path.dirname(debugfile))
+
+ subprocess.check_output([objcopy, '--only-keep-debug', file, debugfile], stderr=subprocess.STDOUT)
+
+ # Set the debuglink to have the view of the file path on the target
+ subprocess.check_output([objcopy, '--add-gnu-debuglink', debugfile, file], stderr=subprocess.STDOUT)
+
+ if newmode:
+ os.chmod(file, origmode)
+
+ return (file, sources)
+
+def splitstaticdebuginfo(file, dvar, dv, d):
+ # Unlike the function above, there is no way to split a static library
+ # two components. So to get similar results we will copy the unmodified
+ # static library (containing the debug symbols) into a new directory.
+ # We will then strip (preserving symbols) the static library in the
+ # typical location.
+ #
+ # return a mapping of files:debugsources
+
+ src = file[len(dvar):]
+ dest = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(src) + dv["staticappend"]
+ debugfile = dvar + dest
+ sources = []
+
+ # Copy the file...
+ bb.utils.mkdirhier(os.path.dirname(debugfile))
+ #bb.note("Copy %s -> %s" % (file, debugfile))
+
+ dvar = d.getVar('PKGD')
+
+ newmode = None
+ if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
+ origmode = os.stat(file)[stat.ST_MODE]
+ newmode = origmode | stat.S_IWRITE | stat.S_IREAD
+ os.chmod(file, newmode)
+
+ # We need to extract the debug src information here...
+ if dv["srcdir"]:
+ sources = source_info(file, d)
+
+ bb.utils.mkdirhier(os.path.dirname(debugfile))
+
+ # Copy the unmodified item to the debug directory
+ shutil.copy2(file, debugfile)
+
+ if newmode:
+ os.chmod(file, origmode)
+
+ return (file, sources)
+
+def inject_minidebuginfo(file, dvar, dv, d):
+ # Extract just the symbols from debuginfo into minidebuginfo,
+ # compress it with xz and inject it back into the binary in a .gnu_debugdata section.
+ # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html
+
+ readelf = d.getVar('READELF')
+ nm = d.getVar('NM')
+ objcopy = d.getVar('OBJCOPY')
+
+ minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo')
+
+ src = file[len(dvar):]
+ dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
+ debugfile = dvar + dest
+ minidebugfile = minidebuginfodir + src + '.minidebug'
+ bb.utils.mkdirhier(os.path.dirname(minidebugfile))
+
+ # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either
+ # so skip it.
+ if not os.path.exists(debugfile):
+ bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file))
+ return
+
+ # minidebuginfo does not make sense to apply to ELF objects other than
+ # executables and shared libraries, skip applying the minidebuginfo
+ # generation for objects like kernel modules.
+ for line in subprocess.check_output([readelf, '-h', debugfile], universal_newlines=True).splitlines():
+ if not line.strip().startswith("Type:"):
+ continue
+ elftype = line.split(":")[1].strip()
+ if not any(elftype.startswith(i) for i in ["EXEC", "DYN"]):
+ bb.debug(1, 'ELF file {} is not executable/shared, skipping minidebuginfo injection'.format(file))
+ return
+ break
+
+ # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo.
+ # We will exclude all of these from minidebuginfo to save space.
+ remove_section_names = []
+ for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines():
+ # strip the leading " [ 1]" section index to allow splitting on space
+ if ']' not in line:
+ continue
+ fields = line[line.index(']') + 1:].split()
+ if len(fields) < 7:
+ continue
+ name = fields[0]
+ type = fields[1]
+ flags = fields[6]
+ # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them
+ if name.startswith('.debug_'):
+ continue
+ if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']:
+ remove_section_names.append(name)
+
+ # List dynamic symbols in the binary. We can exclude these from minidebuginfo
+ # because they are always present in the binary.
+ dynsyms = set()
+ for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines():
+ dynsyms.add(line.split()[0])
+
+ # Find all function symbols from debuginfo which aren't in the dynamic symbols table.
+ # These are the ones we want to keep in minidebuginfo.
+ keep_symbols_file = minidebugfile + '.symlist'
+ found_any_symbols = False
+ with open(keep_symbols_file, 'w') as f:
+ for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines():
+ fields = line.split('|')
+ if len(fields) < 7:
+ continue
+ name = fields[0].strip()
+ type = fields[3].strip()
+ if type == 'FUNC' and name not in dynsyms:
+ f.write('{}\n'.format(name))
+ found_any_symbols = True
+
+ if not found_any_symbols:
+ bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file))
+ return
+
+ bb.utils.remove(minidebugfile)
+ bb.utils.remove(minidebugfile + '.xz')
+
+ subprocess.check_call([objcopy, '-S'] +
+ ['--remove-section={}'.format(s) for s in remove_section_names] +
+ ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile])
+
+ subprocess.check_call(['xz', '--keep', minidebugfile])
+
+ subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file])
+
+def copydebugsources(debugsrcdir, sources, d):
+ # The debug src information written out to sourcefile is further processed
+ # and copied to the destination here.
+
+ cpath = oe.cachedpath.CachedPath()
+
+ if debugsrcdir and sources:
+ sourcefile = d.expand("${WORKDIR}/debugsources.list")
+ bb.utils.remove(sourcefile)
+
+ # filenames are null-separated - this is an artefact of the previous use
+ # of rpm's debugedit, which was writing them out that way, and the code elsewhere
+ # is still assuming that.
+ debuglistoutput = '\0'.join(sources) + '\0'
+ with open(sourcefile, 'a') as sf:
+ sf.write(debuglistoutput)
+
+ dvar = d.getVar('PKGD')
+ strip = d.getVar("STRIP")
+ objcopy = d.getVar("OBJCOPY")
+ workdir = d.getVar("WORKDIR")
+ sdir = d.getVar("S")
+ cflags = d.expand("${CFLAGS}")
+
+ prefixmap = {}
+ for flag in cflags.split():
+ if not flag.startswith("-fdebug-prefix-map"):
+ continue
+ if "recipe-sysroot" in flag:
+ continue
+ flag = flag.split("=")
+ prefixmap[flag[1]] = flag[2]
+
+ nosuchdir = []
+ basepath = dvar
+ for p in debugsrcdir.split("/"):
+ basepath = basepath + "/" + p
+ if not cpath.exists(basepath):
+ nosuchdir.append(basepath)
+ bb.utils.mkdirhier(basepath)
+ cpath.updatecache(basepath)
+
+ for pmap in prefixmap:
+ # Ignore files from the recipe sysroots (target and native)
+ cmd = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | " % sourcefile
+ # We need to ignore files that are not actually ours
+ # we do this by only paying attention to items from this package
+ cmd += "fgrep -zw '%s' | " % prefixmap[pmap]
+ # Remove prefix in the source paths
+ cmd += "sed 's#%s/##g' | " % (prefixmap[pmap])
+ cmd += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)" % (pmap, dvar, prefixmap[pmap])
+
+ try:
+ subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError:
+ # Can "fail" if internal headers/transient sources are attempted
+ pass
+ # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
+ # Work around this by manually finding and copying any symbolic links that made it through.
+ cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \
+ (dvar, prefixmap[pmap], dvar, prefixmap[pmap], pmap, dvar, prefixmap[pmap])
+ subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
+
+ # debugsources.list may be polluted from the host if we used externalsrc,
+ # cpio uses copy-pass and may have just created a directory structure
+ # matching the one from the host, if thats the case move those files to
+ # debugsrcdir to avoid host contamination.
+ # Empty dir structure will be deleted in the next step.
+
+ # Same check as above for externalsrc
+ if workdir not in sdir:
+ if os.path.exists(dvar + debugsrcdir + sdir):
+ cmd = "mv %s%s%s/* %s%s" % (dvar, debugsrcdir, sdir, dvar,debugsrcdir)
+ subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
+
+ # The copy by cpio may have resulted in some empty directories! Remove these
+ cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
+ subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
+
+ # Also remove debugsrcdir if its empty
+ for p in nosuchdir[::-1]:
+ if os.path.exists(p) and not os.listdir(p):
+ os.rmdir(p)
+
+
+def process_split_and_strip_files(d):
+ cpath = oe.cachedpath.CachedPath()
+
+ dvar = d.getVar('PKGD')
+ pn = d.getVar('PN')
+ hostos = d.getVar('HOST_OS')
+
+ oldcwd = os.getcwd()
+ os.chdir(dvar)
+
+ dv = package_debug_vars(d)
+
+ #
+ # First lets figure out all of the files we may have to process ... do this only once!
+ #
+ elffiles = {}
+ symlinks = {}
+ staticlibs = []
+ inodes = {}
+ libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
+ baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
+ skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split()
+ if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \
+ d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
+ checkelf = {}
+ checkelflinks = {}
+ for root, dirs, files in cpath.walk(dvar):
+ for f in files:
+ file = os.path.join(root, f)
+
+ # Skip debug files
+ if dv["append"] and file.endswith(dv["append"]):
+ continue
+ if dv["dir"] and dv["dir"] in os.path.dirname(file[len(dvar):]):
+ continue
+
+ if file in skipfiles:
+ continue
+
+ if oe.package.is_static_lib(file):
+ staticlibs.append(file)
+ continue
+
+ try:
+ ltarget = cpath.realpath(file, dvar, False)
+ s = cpath.lstat(ltarget)
+ except OSError as e:
+ (err, strerror) = e.args
+ if err != errno.ENOENT:
+ raise
+ # Skip broken symlinks
+ continue
+ if not s:
+ continue
+ # Check its an executable
+ if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \
+ or (s[stat.ST_MODE] & stat.S_IXOTH) \
+ or ((file.startswith(libdir) or file.startswith(baselibdir)) \
+ and (".so" in f or ".node" in f)) \
+ or (f.startswith('vmlinux') or ".ko" in f):
+
+ if cpath.islink(file):
+ checkelflinks[file] = ltarget
+ continue
+ # Use a reference of device ID and inode number to identify files
+ file_reference = "%d_%d" % (s.st_dev, s.st_ino)
+ checkelf[file] = (file, file_reference)
+
+ results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelflinks.values(), d)
+ results_map = {}
+ for (ltarget, elf_file) in results:
+ results_map[ltarget] = elf_file
+ for file in checkelflinks:
+ ltarget = checkelflinks[file]
+ # If it's a symlink, and points to an ELF file, we capture the readlink target
+ if results_map[ltarget]:
+ target = os.readlink(file)
+ #bb.note("Sym: %s (%d)" % (ltarget, results_map[ltarget]))
+ symlinks[file] = target
+
+ results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelf.keys(), d)
+
+ # Sort results by file path. This ensures that the files are always
+ # processed in the same order, which is important to make sure builds
+ # are reproducible when dealing with hardlinks
+ results.sort(key=lambda x: x[0])
+
+ for (file, elf_file) in results:
+ # It's a file (or hardlink), not a link
+ # ...but is it ELF, and is it already stripped?
+ if elf_file & 1:
+ if elf_file & 2:
+ if 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split():
+ bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
+ else:
+ msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
+ oe.qa.handle_error("already-stripped", msg, d)
+ continue
+
+ # At this point we have an unstripped elf file. We need to:
+ # a) Make sure any file we strip is not hardlinked to anything else outside this tree
+ # b) Only strip any hardlinked file once (no races)
+ # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks
+
+ # Use a reference of device ID and inode number to identify files
+ file_reference = checkelf[file][1]
+ if file_reference in inodes:
+ os.unlink(file)
+ os.link(inodes[file_reference][0], file)
+ inodes[file_reference].append(file)
+ else:
+ inodes[file_reference] = [file]
+ # break hardlink
+ bb.utils.break_hardlinks(file)
+ elffiles[file] = elf_file
+ # Modified the file so clear the cache
+ cpath.updatecache(file)
+
+ def strip_pkgd_prefix(f):
+ nonlocal dvar
+
+ if f.startswith(dvar):
+ return f[len(dvar):]
+
+ return f
+
+ #
+ # First lets process debug splitting
+ #
+ if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
+ results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, dv, d))
+
+ if dv["srcdir"] and not hostos.startswith("mingw"):
+ if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
+ results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, dv, d))
+ else:
+ for file in staticlibs:
+ results.append( (file,source_info(file, d)) )
+
+ d.setVar("PKGDEBUGSOURCES", {strip_pkgd_prefix(f): sorted(s) for f, s in results})
+
+ sources = set()
+ for r in results:
+ sources.update(r[1])
+
+ # Hardlink our debug symbols to the other hardlink copies
+ for ref in inodes:
+ if len(inodes[ref]) == 1:
+ continue
+
+ target = inodes[ref][0][len(dvar):]
+ for file in inodes[ref][1:]:
+ src = file[len(dvar):]
+ dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
+ fpath = dvar + dest
+ ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
+ bb.utils.mkdirhier(os.path.dirname(fpath))
+ # Only one hardlink of separated debug info file in each directory
+ if not os.access(fpath, os.R_OK):
+ #bb.note("Link %s -> %s" % (fpath, ftarget))
+ os.link(ftarget, fpath)
+
+ # Create symlinks for all cases we were able to split symbols
+ for file in symlinks:
+ src = file[len(dvar):]
+ dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
+ fpath = dvar + dest
+ # Skip it if the target doesn't exist
+ try:
+ s = os.stat(fpath)
+ except OSError as e:
+ (err, strerror) = e.args
+ if err != errno.ENOENT:
+ raise
+ continue
+
+ ltarget = symlinks[file]
+ lpath = os.path.dirname(ltarget)
+ lbase = os.path.basename(ltarget)
+ ftarget = ""
+ if lpath and lpath != ".":
+ ftarget += lpath + dv["dir"] + "/"
+ ftarget += lbase + dv["append"]
+ if lpath.startswith(".."):
+ ftarget = os.path.join("..", ftarget)
+ bb.utils.mkdirhier(os.path.dirname(fpath))
+ #bb.note("Symlink %s -> %s" % (fpath, ftarget))
+ os.symlink(ftarget, fpath)
+
+ # Process the dv["srcdir"] if requested...
+ # This copies and places the referenced sources for later debugging...
+ copydebugsources(dv["srcdir"], sources, d)
+ #
+ # End of debug splitting
+ #
+
+ #
+ # Now lets go back over things and strip them
+ #
+ if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'):
+ strip = d.getVar("STRIP")
+ sfiles = []
+ for file in elffiles:
+ elf_file = int(elffiles[file])
+ #bb.note("Strip %s" % file)
+ sfiles.append((file, elf_file, strip))
+ if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
+ for f in staticlibs:
+ sfiles.append((f, 16, strip))
+
+ oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d)
+
+ # Build "minidebuginfo" and reinject it back into the stripped binaries
+ if d.getVar('PACKAGE_MINIDEBUGINFO') == '1':
+ oe.utils.multiprocess_launch(inject_minidebuginfo, list(elffiles), d,
+ extraargs=(dvar, dv, d))
+
+ #
+ # End of strip
+ #
+ os.chdir(oldcwd)
+
+
+def populate_packages(d):
+ cpath = oe.cachedpath.CachedPath()
+
+ workdir = d.getVar('WORKDIR')
+ outdir = d.getVar('DEPLOY_DIR')
+ dvar = d.getVar('PKGD')
+ packages = d.getVar('PACKAGES').split()
+ pn = d.getVar('PN')
+
+ bb.utils.mkdirhier(outdir)
+ os.chdir(dvar)
+
+ autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False)
+
+ split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg')
+
+ # If debug-with-srcpkg mode is enabled then add the source package if it
+ # doesn't exist and add the source file contents to the source package.
+ if split_source_package:
+ src_package_name = ('%s-src' % d.getVar('PN'))
+ if not src_package_name in packages:
+ packages.append(src_package_name)
+ d.setVar('FILES:%s' % src_package_name, '/usr/src/debug')
+
+ # Sanity check PACKAGES for duplicates
+ # Sanity should be moved to sanity.bbclass once we have the infrastructure
+ package_dict = {}
+
+ for i, pkg in enumerate(packages):
+ if pkg in package_dict:
+ msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
+ oe.qa.handle_error("packages-list", msg, d)
+ # Ensure the source package gets the chance to pick up the source files
+ # before the debug package by ordering it first in PACKAGES. Whether it
+ # actually picks up any source files is controlled by
+ # PACKAGE_DEBUG_SPLIT_STYLE.
+ elif pkg.endswith("-src"):
+ package_dict[pkg] = (10, i)
+ elif autodebug and pkg.endswith("-dbg"):
+ package_dict[pkg] = (30, i)
+ else:
+ package_dict[pkg] = (50, i)
+ packages = sorted(package_dict.keys(), key=package_dict.get)
+ d.setVar('PACKAGES', ' '.join(packages))
+ pkgdest = d.getVar('PKGDEST')
+
+ seen = []
+
+ # os.mkdir masks the permissions with umask so we have to unset it first
+ oldumask = os.umask(0)
+
+ debug = []
+ for root, dirs, files in cpath.walk(dvar):
+ dir = root[len(dvar):]
+ if not dir:
+ dir = os.sep
+ for f in (files + dirs):
+ path = "." + os.path.join(dir, f)
+ if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"):
+ debug.append(path)
+
+ for pkg in packages:
+ root = os.path.join(pkgdest, pkg)
+ bb.utils.mkdirhier(root)
+
+ filesvar = d.getVar('FILES:%s' % pkg) or ""
+ if "//" in filesvar:
+ msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
+ oe.qa.handle_error("files-invalid", msg, d)
+ filesvar.replace("//", "/")
+
+ origfiles = filesvar.split()
+ files, symlink_paths = oe.package.files_from_filevars(origfiles)
+
+ if autodebug and pkg.endswith("-dbg"):
+ files.extend(debug)
+
+ for file in files:
+ if (not cpath.islink(file)) and (not cpath.exists(file)):
+ continue
+ if file in seen:
+ continue
+ seen.append(file)
+
+ def mkdir(src, dest, p):
+ src = os.path.join(src, p)
+ dest = os.path.join(dest, p)
+ fstat = cpath.stat(src)
+ os.mkdir(dest)
+ os.chmod(dest, fstat.st_mode)
+ os.chown(dest, fstat.st_uid, fstat.st_gid)
+ if p not in seen:
+ seen.append(p)
+ cpath.updatecache(dest)
+
+ def mkdir_recurse(src, dest, paths):
+ if cpath.exists(dest + '/' + paths):
+ return
+ while paths.startswith("./"):
+ paths = paths[2:]
+ p = "."
+ for c in paths.split("/"):
+ p = os.path.join(p, c)
+ if not cpath.exists(os.path.join(dest, p)):
+ mkdir(src, dest, p)
+
+ if cpath.isdir(file) and not cpath.islink(file):
+ mkdir_recurse(dvar, root, file)
+ continue
+
+ mkdir_recurse(dvar, root, os.path.dirname(file))
+ fpath = os.path.join(root,file)
+ if not cpath.islink(file):
+ os.link(file, fpath)
+ continue
+ ret = bb.utils.copyfile(file, fpath)
+ if ret is False or ret == 0:
+ bb.fatal("File population failed")
+
+ # Check if symlink paths exist
+ for file in symlink_paths:
+ if not os.path.exists(os.path.join(root,file)):
+ bb.fatal("File '%s' cannot be packaged into '%s' because its "
+ "parent directory structure does not exist. One of "
+ "its parent directories is a symlink whose target "
+ "directory is not included in the package." %
+ (file, pkg))
+
+ os.umask(oldumask)
+ os.chdir(workdir)
+
+ # Handle excluding packages with incompatible licenses
+ package_list = []
+ for pkg in packages:
+ licenses = d.getVar('_exclude_incompatible-' + pkg)
+ if licenses:
+ msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, licenses)
+ oe.qa.handle_error("incompatible-license", msg, d)
+ else:
+ package_list.append(pkg)
+ d.setVar('PACKAGES', ' '.join(package_list))
+
+ unshipped = []
+ for root, dirs, files in cpath.walk(dvar):
+ dir = root[len(dvar):]
+ if not dir:
+ dir = os.sep
+ for f in (files + dirs):
+ path = os.path.join(dir, f)
+ if ('.' + path) not in seen:
+ unshipped.append(path)
+
+ if unshipped != []:
+ msg = pn + ": Files/directories were installed but not shipped in any package:"
+ if "installed-vs-shipped" in (d.getVar('INSANE_SKIP:' + pn) or "").split():
+ bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
+ else:
+ for f in unshipped:
+ msg = msg + "\n " + f
+ msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n"
+ msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped))
+ oe.qa.handle_error("installed-vs-shipped", msg, d)
+
+def process_fixsymlinks(pkgfiles, d):
+ cpath = oe.cachedpath.CachedPath()
+ pkgdest = d.getVar('PKGDEST')
+ packages = d.getVar("PACKAGES", False).split()
+
+ dangling_links = {}
+ pkg_files = {}
+ for pkg in packages:
+ dangling_links[pkg] = []
+ pkg_files[pkg] = []
+ inst_root = os.path.join(pkgdest, pkg)
+ for path in pkgfiles[pkg]:
+ rpath = path[len(inst_root):]
+ pkg_files[pkg].append(rpath)
+ rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
+ if not cpath.lexists(rtarget):
+ dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))
+
+ newrdepends = {}
+ for pkg in dangling_links:
+ for l in dangling_links[pkg]:
+ found = False
+ bb.debug(1, "%s contains dangling link %s" % (pkg, l))
+ for p in packages:
+ if l in pkg_files[p]:
+ found = True
+ bb.debug(1, "target found in %s" % p)
+ if p == pkg:
+ break
+ if pkg not in newrdepends:
+ newrdepends[pkg] = []
+ newrdepends[pkg].append(p)
+ break
+ if found == False:
+ bb.note("%s contains dangling symlink to %s" % (pkg, l))
+
+ for pkg in newrdepends:
+ rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "")
+ for p in newrdepends[pkg]:
+ if p not in rdepends:
+ rdepends[p] = []
+ d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False))
+
+def process_filedeps(pkgfiles, d):
+ """
+ Collect perfile run-time dependency metadata
+ Output:
+ FILERPROVIDESFLIST:pkg - list of all files w/ deps
+ FILERPROVIDES:filepath:pkg - per file dep
+
+ FILERDEPENDSFLIST:pkg - list of all files w/ deps
+ FILERDEPENDS:filepath:pkg - per file dep
+ """
+ if d.getVar('SKIP_FILEDEPS') == '1':
+ return
+
+ pkgdest = d.getVar('PKGDEST')
+ packages = d.getVar('PACKAGES')
+ rpmdeps = d.getVar('RPMDEPS')
+
+ def chunks(files, n):
+ return [files[i:i+n] for i in range(0, len(files), n)]
+
+ pkglist = []
+ for pkg in packages.split():
+ if d.getVar('SKIP_FILEDEPS:' + pkg) == '1':
+ continue
+ if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'):
+ continue
+ for files in chunks(pkgfiles[pkg], 100):
+ pkglist.append((pkg, files, rpmdeps, pkgdest))
+
+ processed = oe.utils.multiprocess_launch(oe.package.filedeprunner, pkglist, d)
+
+ provides_files = {}
+ requires_files = {}
+
+ for result in processed:
+ (pkg, provides, requires) = result
+
+ if pkg not in provides_files:
+ provides_files[pkg] = []
+ if pkg not in requires_files:
+ requires_files[pkg] = []
+
+ for file in sorted(provides):
+ provides_files[pkg].append(file)
+ key = "FILERPROVIDES:" + file + ":" + pkg
+ d.appendVar(key, " " + " ".join(provides[file]))
+
+ for file in sorted(requires):
+ requires_files[pkg].append(file)
+ key = "FILERDEPENDS:" + file + ":" + pkg
+ d.appendVar(key, " " + " ".join(requires[file]))
+
+ for pkg in requires_files:
+ d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(sorted(requires_files[pkg])))
+ for pkg in provides_files:
+ d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(sorted(provides_files[pkg])))
+
+def process_shlibs(pkgfiles, d):
+ cpath = oe.cachedpath.CachedPath()
+
+ exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False)
+ if exclude_shlibs:
+ bb.note("not generating shlibs")
+ return
+
+ lib_re = re.compile(r"^.*\.so")
+ libdir_re = re.compile(r".*/%s$" % d.getVar('baselib'))
+
+ packages = d.getVar('PACKAGES')
+
+ shlib_pkgs = []
+ exclusion_list = d.getVar("EXCLUDE_PACKAGES_FROM_SHLIBS")
+ if exclusion_list:
+ for pkg in packages.split():
+ if pkg not in exclusion_list.split():
+ shlib_pkgs.append(pkg)
+ else:
+ bb.note("not generating shlibs for %s" % pkg)
+ else:
+ shlib_pkgs = packages.split()
+
+ hostos = d.getVar('HOST_OS')
+
+ workdir = d.getVar('WORKDIR')
+
+ ver = d.getVar('PKGV')
+ if not ver:
+ msg = "PKGV not defined"
+ oe.qa.handle_error("pkgv-undefined", msg, d)
+ return
+
+ pkgdest = d.getVar('PKGDEST')
+
+ shlibswork_dir = d.getVar('SHLIBSWORKDIR')
+
+ def linux_so(file, pkg, pkgver, d):
+ needs_ldconfig = False
+ needed = set()
+ sonames = set()
+ renames = []
+ ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
+ cmd = d.getVar('OBJDUMP') + " -p " + pipes.quote(file) + " 2>/dev/null"
+ fd = os.popen(cmd)
+ lines = fd.readlines()
+ fd.close()
+ rpath = tuple()
+ for l in lines:
+ m = re.match(r"\s+RPATH\s+([^\s]*)", l)
+ if m:
+ rpaths = m.group(1).replace("$ORIGIN", ldir).split(":")
+ rpath = tuple(map(os.path.normpath, rpaths))
+ for l in lines:
+ m = re.match(r"\s+NEEDED\s+([^\s]*)", l)
+ if m:
+ dep = m.group(1)
+ if dep not in needed:
+ needed.add((dep, file, rpath))
+ m = re.match(r"\s+SONAME\s+([^\s]*)", l)
+ if m:
+ this_soname = m.group(1)
+ prov = (this_soname, ldir, pkgver)
+ if not prov in sonames:
+ # if library is private (only used by package) then do not build shlib for it
+ if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0:
+ sonames.add(prov)
+ if libdir_re.match(os.path.dirname(file)):
+ needs_ldconfig = True
+ if needs_ldconfig and snap_symlinks and (os.path.basename(file) != this_soname):
+ renames.append((file, os.path.join(os.path.dirname(file), this_soname)))
+ return (needs_ldconfig, needed, sonames, renames)
+
+ def darwin_so(file, needed, sonames, renames, pkgver):
+ if not os.path.exists(file):
+ return
+ ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
+
+ def get_combinations(base):
+ #
+ # Given a base library name, find all combinations of this split by "." and "-"
+ #
+ combos = []
+ options = base.split(".")
+ for i in range(1, len(options) + 1):
+ combos.append(".".join(options[0:i]))
+ options = base.split("-")
+ for i in range(1, len(options) + 1):
+ combos.append("-".join(options[0:i]))
+ return combos
+
+ if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'):
+ # Drop suffix
+ name = os.path.basename(file).rsplit(".",1)[0]
+ # Find all combinations
+ combos = get_combinations(name)
+ for combo in combos:
+ if not combo in sonames:
+ prov = (combo, ldir, pkgver)
+ sonames.add(prov)
+ if file.endswith('.dylib') or file.endswith('.so'):
+ rpath = []
+ p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = p.communicate()
+ # If returned successfully, process stdout for results
+ if p.returncode == 0:
+ for l in out.split("\n"):
+ l = l.strip()
+ if l.startswith('path '):
+ rpath.append(l.split()[1])
+
+ p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = p.communicate()
+ # If returned successfully, process stdout for results
+ if p.returncode == 0:
+ for l in out.split("\n"):
+ l = l.strip()
+ if not l or l.endswith(":"):
+ continue
+ if "is not an object file" in l:
+ continue
+ name = os.path.basename(l.split()[0]).rsplit(".", 1)[0]
+ if name and name not in needed[pkg]:
+ needed[pkg].add((name, file, tuple()))
+
+ def mingw_dll(file, needed, sonames, renames, pkgver):
+ if not os.path.exists(file):
+ return
+
+ if file.endswith(".dll"):
+ # assume all dlls are shared objects provided by the package
+ sonames.add((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver))
+
+ if (file.endswith(".dll") or file.endswith(".exe")):
+ # use objdump to search for "DLL Name: .*\.dll"
+ p = subprocess.Popen([d.expand("${HOST_PREFIX}objdump"), "-p", file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = p.communicate()
+ # process the output, grabbing all .dll names
+ if p.returncode == 0:
+ for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE):
+ dllname = m.group(1)
+ if dllname:
+ needed[pkg].add((dllname, file, tuple()))
+
+ if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1":
+ snap_symlinks = True
+ else:
+ snap_symlinks = False
+
+ needed = {}
+
+ shlib_provider = oe.package.read_shlib_providers(d)
+
+ for pkg in shlib_pkgs:
+ private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
+ private_libs = private_libs.split()
+ needs_ldconfig = False
+ bb.debug(2, "calculating shlib provides for %s" % pkg)
+
+ pkgver = d.getVar('PKGV:' + pkg)
+ if not pkgver:
+ pkgver = d.getVar('PV_' + pkg)
+ if not pkgver:
+ pkgver = ver
+
+ needed[pkg] = set()
+ sonames = set()
+ renames = []
+ linuxlist = []
+ for file in pkgfiles[pkg]:
+ soname = None
+ if cpath.islink(file):
+ continue
+ if hostos == "darwin" or hostos == "darwin8":
+ darwin_so(file, needed, sonames, renames, pkgver)
+ elif hostos.startswith("mingw"):
+ mingw_dll(file, needed, sonames, renames, pkgver)
+ elif os.access(file, os.X_OK) or lib_re.match(file):
+ linuxlist.append(file)
+
+ if linuxlist:
+ results = oe.utils.multiprocess_launch(linux_so, linuxlist, d, extraargs=(pkg, pkgver, d))
+ for r in results:
+ ldconfig = r[0]
+ needed[pkg] |= r[1]
+ sonames |= r[2]
+ renames.extend(r[3])
+ needs_ldconfig = needs_ldconfig or ldconfig
+
+ for (old, new) in renames:
+ bb.note("Renaming %s to %s" % (old, new))
+ bb.utils.rename(old, new)
+ pkgfiles[pkg].remove(old)
+
+ shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
+ if len(sonames):
+ with open(shlibs_file, 'w') as fd:
+ for s in sorted(sonames):
+ if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]:
+ (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]]
+ if old_pkg != pkg:
+ bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver))
+ bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0]))
+ fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n')
+ if s[0] not in shlib_provider:
+ shlib_provider[s[0]] = {}
+ shlib_provider[s[0]][s[1]] = (pkg, pkgver)
+ if needs_ldconfig:
+ bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
+ postinst = d.getVar('pkg_postinst:%s' % pkg)
+ if not postinst:
+ postinst = '#!/bin/sh\n'
+ postinst += d.getVar('ldconfig_postinst_fragment')
+ d.setVar('pkg_postinst:%s' % pkg, postinst)
+ bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
+
+ assumed_libs = d.getVar('ASSUME_SHLIBS')
+ if assumed_libs:
+ libdir = d.getVar("libdir")
+ for e in assumed_libs.split():
+ l, dep_pkg = e.split(":")
+ lib_ver = None
+ dep_pkg = dep_pkg.rsplit("_", 1)
+ if len(dep_pkg) == 2:
+ lib_ver = dep_pkg[1]
+ dep_pkg = dep_pkg[0]
+ if l not in shlib_provider:
+ shlib_provider[l] = {}
+ shlib_provider[l][libdir] = (dep_pkg, lib_ver)
+
+ libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')]
+
+ for pkg in shlib_pkgs:
+ bb.debug(2, "calculating shlib requirements for %s" % pkg)
+
+ private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
+ private_libs = private_libs.split()
+
+ deps = list()
+ for n in needed[pkg]:
+ # if n is in private libraries, don't try to search provider for it
+ # this could cause problem in case some abc.bb provides private
+ # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1
+ # but skipping it is still better alternative than providing own
+ # version and then adding runtime dependency for the same system library
+ if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0:
+ bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0]))
+ continue
+ if n[0] in shlib_provider.keys():
+ shlib_provider_map = shlib_provider[n[0]]
+ matches = set()
+ for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath):
+ if p in shlib_provider_map:
+ matches.add(p)
+ if len(matches) > 1:
+ matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches])
+ bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1]))
+ elif len(matches) == 1:
+ (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()]
+
+ bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1]))
+
+ if dep_pkg == pkg:
+ continue
+
+ if ver_needed:
+ dep = "%s (>= %s)" % (dep_pkg, ver_needed)
+ else:
+ dep = dep_pkg
+ if not dep in deps:
+ deps.append(dep)
+ continue
+ bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1]))
+
+ deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
+ if os.path.exists(deps_file):
+ os.remove(deps_file)
+ if deps:
+ with open(deps_file, 'w') as fd:
+ for dep in sorted(deps):
+ fd.write(dep + '\n')
+
+def process_pkgconfig(pkgfiles, d):
+ packages = d.getVar('PACKAGES')
+ workdir = d.getVar('WORKDIR')
+ pkgdest = d.getVar('PKGDEST')
+
+ shlibs_dirs = d.getVar('SHLIBSDIRS').split()
+ shlibswork_dir = d.getVar('SHLIBSWORKDIR')
+
+ pc_re = re.compile(r'(.*)\.pc$')
+ var_re = re.compile(r'(.*)=(.*)')
+ field_re = re.compile(r'(.*): (.*)')
+
+ pkgconfig_provided = {}
+ pkgconfig_needed = {}
+ for pkg in packages.split():
+ pkgconfig_provided[pkg] = []
+ pkgconfig_needed[pkg] = []
+ for file in sorted(pkgfiles[pkg]):
+ m = pc_re.match(file)
+ if m:
+ pd = bb.data.init()
+ name = m.group(1)
+ pkgconfig_provided[pkg].append(os.path.basename(name))
+ if not os.access(file, os.R_OK):
+ continue
+ with open(file, 'r') as f:
+ lines = f.readlines()
+ for l in lines:
+ m = var_re.match(l)
+ if m:
+ name = m.group(1)
+ val = m.group(2)
+ pd.setVar(name, pd.expand(val))
+ continue
+ m = field_re.match(l)
+ if m:
+ hdr = m.group(1)
+ exp = pd.expand(m.group(2))
+ if hdr == 'Requires':
+ pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
+
+ for pkg in packages.split():
+ pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
+ if pkgconfig_provided[pkg] != []:
+ with open(pkgs_file, 'w') as f:
+ for p in sorted(pkgconfig_provided[pkg]):
+ f.write('%s\n' % p)
+
+ # Go from least to most specific since the last one found wins
+ for dir in reversed(shlibs_dirs):
+ if not os.path.exists(dir):
+ continue
+ for file in sorted(os.listdir(dir)):
+ m = re.match(r'^(.*)\.pclist$', file)
+ if m:
+ pkg = m.group(1)
+ with open(os.path.join(dir, file)) as fd:
+ lines = fd.readlines()
+ pkgconfig_provided[pkg] = []
+ for l in lines:
+ pkgconfig_provided[pkg].append(l.rstrip())
+
+ for pkg in packages.split():
+ deps = []
+ for n in pkgconfig_needed[pkg]:
+ found = False
+ for k in pkgconfig_provided.keys():
+ if n in pkgconfig_provided[k]:
+ if k != pkg and not (k in deps):
+ deps.append(k)
+ found = True
+ if found == False:
+ bb.note("couldn't find pkgconfig module '%s' in any package" % n)
+ deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
+ if len(deps):
+ with open(deps_file, 'w') as fd:
+ for dep in deps:
+ fd.write(dep + '\n')
+
+def read_libdep_files(d):
+ pkglibdeps = {}
+ packages = d.getVar('PACKAGES').split()
+ for pkg in packages:
+ pkglibdeps[pkg] = {}
+ for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
+ depsfile = d.expand("${PKGDEST}/" + pkg + extension)
+ if os.access(depsfile, os.R_OK):
+ with open(depsfile) as fd:
+ lines = fd.readlines()
+ for l in lines:
+ l.rstrip()
+ deps = bb.utils.explode_dep_versions2(l)
+ for dep in deps:
+ if not dep in pkglibdeps[pkg]:
+ pkglibdeps[pkg][dep] = deps[dep]
+ return pkglibdeps
+
+def process_depchains(pkgfiles, d):
+ """
+ For a given set of prefix and postfix modifiers, make those packages
+ RRECOMMENDS on the corresponding packages for its RDEPENDS.
+
+ Example: If package A depends upon package B, and A's .bb emits an
+ A-dev package, this would make A-dev Recommends: B-dev.
+
+ If only one of a given suffix is specified, it will take the RRECOMMENDS
+ based on the RDEPENDS of *all* other packages. If more than one of a given
+ suffix is specified, its will only use the RDEPENDS of the single parent
+ package.
+ """
+
+ packages = d.getVar('PACKAGES')
+ postfixes = (d.getVar('DEPCHAIN_POST') or '').split()
+ prefixes = (d.getVar('DEPCHAIN_PRE') or '').split()
+
+ def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
+
+ #bb.note('depends for %s is %s' % (base, depends))
+ rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
+
+ for depend in sorted(depends):
+ if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
+ #bb.note("Skipping %s" % depend)
+ continue
+ if depend.endswith('-dev'):
+ depend = depend[:-4]
+ if depend.endswith('-dbg'):
+ depend = depend[:-4]
+ pkgname = getname(depend, suffix)
+ #bb.note("Adding %s for %s" % (pkgname, depend))
+ if pkgname not in rreclist and pkgname != pkg:
+ rreclist[pkgname] = []
+
+ #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
+ d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
+
+ def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
+
+ #bb.note('rdepends for %s is %s' % (base, rdepends))
+ rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
+
+ for depend in sorted(rdepends):
+ if depend.find('virtual-locale-') != -1:
+ #bb.note("Skipping %s" % depend)
+ continue
+ if depend.endswith('-dev'):
+ depend = depend[:-4]
+ if depend.endswith('-dbg'):
+ depend = depend[:-4]
+ pkgname = getname(depend, suffix)
+ #bb.note("Adding %s for %s" % (pkgname, depend))
+ if pkgname not in rreclist and pkgname != pkg:
+ rreclist[pkgname] = []
+
+ #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
+ d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
+
+ def add_dep(list, dep):
+ if dep not in list:
+ list.append(dep)
+
+ depends = []
+ for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""):
+ add_dep(depends, dep)
+
+ rdepends = []
+ for pkg in packages.split():
+ for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + pkg) or ""):
+ add_dep(rdepends, dep)
+
+ #bb.note('rdepends is %s' % rdepends)
+
+ def post_getname(name, suffix):
+ return '%s%s' % (name, suffix)
+ def pre_getname(name, suffix):
+ return '%s%s' % (suffix, name)
+
+ pkgs = {}
+ for pkg in packages.split():
+ for postfix in postfixes:
+ if pkg.endswith(postfix):
+ if not postfix in pkgs:
+ pkgs[postfix] = {}
+ pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
+
+ for prefix in prefixes:
+ if pkg.startswith(prefix):
+ if not prefix in pkgs:
+ pkgs[prefix] = {}
+ pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
+
+ if "-dbg" in pkgs:
+ pkglibdeps = read_libdep_files(d)
+ pkglibdeplist = []
+ for pkg in pkglibdeps:
+ for k in pkglibdeps[pkg]:
+ add_dep(pkglibdeplist, k)
+ dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d)))
+
+ for suffix in pkgs:
+ for pkg in pkgs[suffix]:
+ if d.getVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs'):
+ continue
+ (base, func) = pkgs[suffix][pkg]
+ if suffix == "-dev":
+ pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
+ elif suffix == "-dbg":
+ if not dbgdefaultdeps:
+ pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
+ continue
+ if len(pkgs[suffix]) == 1:
+ pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
+ else:
+ rdeps = []
+ for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + base) or ""):
+ add_dep(rdeps, dep)
+ pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
+
diff --git a/poky/meta/lib/oe/packagedata.py b/poky/meta/lib/oe/packagedata.py
index b2ed8b5..162ff60 100644
--- a/poky/meta/lib/oe/packagedata.py
+++ b/poky/meta/lib/oe/packagedata.py
@@ -6,6 +6,11 @@
import codecs
import os
+import json
+import bb.compress.zstd
+import oe.path
+
+from glob import glob
def packaged(pkg, d):
return os.access(get_subpkgedata_fn(pkg, d) + '.packaged', os.R_OK)
@@ -110,3 +115,237 @@
"""Return the recipe name for the given binary package name."""
return pkgmap(d).get(pkg)
+
+def get_package_mapping(pkg, basepkg, d, depversions=None):
+ import oe.packagedata
+
+ data = oe.packagedata.read_subpkgdata(pkg, d)
+ key = "PKG:%s" % pkg
+
+ if key in data:
+ if bb.data.inherits_class('allarch', d) and bb.data.inherits_class('packagegroup', d) and pkg != data[key]:
+ bb.error("An allarch packagegroup shouldn't depend on packages which are dynamically renamed (%s to %s)" % (pkg, data[key]))
+ # Have to avoid undoing the write_extra_pkgs(global_variants...)
+ if bb.data.inherits_class('allarch', d) and not d.getVar('MULTILIB_VARIANTS') \
+ and data[key] == basepkg:
+ return pkg
+ if depversions == []:
+ # Avoid returning a mapping if the renamed package rprovides its original name
+ rprovkey = "RPROVIDES:%s" % pkg
+ if rprovkey in data:
+ if pkg in bb.utils.explode_dep_versions2(data[rprovkey]):
+ bb.note("%s rprovides %s, not replacing the latter" % (data[key], pkg))
+ return pkg
+ # Do map to rewritten package name
+ return data[key]
+
+ return pkg
+
+def get_package_additional_metadata(pkg_type, d):
+ base_key = "PACKAGE_ADD_METADATA"
+ for key in ("%s_%s" % (base_key, pkg_type.upper()), base_key):
+ if d.getVar(key, False) is None:
+ continue
+ d.setVarFlag(key, "type", "list")
+ if d.getVarFlag(key, "separator") is None:
+ d.setVarFlag(key, "separator", "\\n")
+ metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)]
+ return "\n".join(metadata_fields).strip()
+
+def runtime_mapping_rename(varname, pkg, d):
+ #bb.note("%s before: %s" % (varname, d.getVar(varname)))
+
+ new_depends = {}
+ deps = bb.utils.explode_dep_versions2(d.getVar(varname) or "")
+ for depend, depversions in deps.items():
+ new_depend = get_package_mapping(depend, pkg, d, depversions)
+ if depend != new_depend:
+ bb.note("package name mapping done: %s -> %s" % (depend, new_depend))
+ new_depends[new_depend] = deps[depend]
+
+ d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False))
+
+ #bb.note("%s after: %s" % (varname, d.getVar(varname)))
+
+def emit_pkgdata(pkgfiles, d):
+ def process_postinst_on_target(pkg, mlprefix):
+ pkgval = d.getVar('PKG:%s' % pkg)
+ if pkgval is None:
+ pkgval = pkg
+
+ defer_fragment = """
+if [ -n "$D" ]; then
+ $INTERCEPT_DIR/postinst_intercept delay_to_first_boot %s mlprefix=%s
+ exit 0
+fi
+""" % (pkgval, mlprefix)
+
+ postinst = d.getVar('pkg_postinst:%s' % pkg)
+ postinst_ontarget = d.getVar('pkg_postinst_ontarget:%s' % pkg)
+
+ if postinst_ontarget:
+ bb.debug(1, 'adding deferred pkg_postinst_ontarget() to pkg_postinst() for %s' % pkg)
+ if not postinst:
+ postinst = '#!/bin/sh\n'
+ postinst += defer_fragment
+ postinst += postinst_ontarget
+ d.setVar('pkg_postinst:%s' % pkg, postinst)
+
+ def add_set_e_to_scriptlets(pkg):
+ for scriptlet_name in ('pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm'):
+ scriptlet = d.getVar('%s:%s' % (scriptlet_name, pkg))
+ if scriptlet:
+ scriptlet_split = scriptlet.split('\n')
+ if scriptlet_split[0].startswith("#!"):
+ scriptlet = scriptlet_split[0] + "\nset -e\n" + "\n".join(scriptlet_split[1:])
+ else:
+ scriptlet = "set -e\n" + "\n".join(scriptlet_split[0:])
+ d.setVar('%s:%s' % (scriptlet_name, pkg), scriptlet)
+
+ def write_if_exists(f, pkg, var):
+ def encode(str):
+ import codecs
+ c = codecs.getencoder("unicode_escape")
+ return c(str)[0].decode("latin1")
+
+ val = d.getVar('%s:%s' % (var, pkg))
+ if val:
+ f.write('%s:%s: %s\n' % (var, pkg, encode(val)))
+ return val
+ val = d.getVar('%s' % (var))
+ if val:
+ f.write('%s: %s\n' % (var, encode(val)))
+ return val
+
+ def write_extra_pkgs(variants, pn, packages, pkgdatadir):
+ for variant in variants:
+ with open("%s/%s-%s" % (pkgdatadir, variant, pn), 'w') as fd:
+ fd.write("PACKAGES: %s\n" % ' '.join(
+ map(lambda pkg: '%s-%s' % (variant, pkg), packages.split())))
+
+ def write_extra_runtime_pkgs(variants, packages, pkgdatadir):
+ for variant in variants:
+ for pkg in packages.split():
+ ml_pkg = "%s-%s" % (variant, pkg)
+ subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg)
+ with open(subdata_file, 'w') as fd:
+ fd.write("PKG:%s: %s" % (ml_pkg, pkg))
+
+ packages = d.getVar('PACKAGES')
+ pkgdest = d.getVar('PKGDEST')
+ pkgdatadir = d.getVar('PKGDESTWORK')
+
+ data_file = pkgdatadir + d.expand("/${PN}")
+ with open(data_file, 'w') as fd:
+ fd.write("PACKAGES: %s\n" % packages)
+
+ pkgdebugsource = d.getVar("PKGDEBUGSOURCES") or []
+
+ pn = d.getVar('PN')
+ global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split()
+ variants = (d.getVar('MULTILIB_VARIANTS') or "").split()
+
+ if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
+ write_extra_pkgs(variants, pn, packages, pkgdatadir)
+
+ if bb.data.inherits_class('allarch', d) and not variants \
+ and not bb.data.inherits_class('packagegroup', d):
+ write_extra_pkgs(global_variants, pn, packages, pkgdatadir)
+
+ workdir = d.getVar('WORKDIR')
+
+ for pkg in packages.split():
+ pkgval = d.getVar('PKG:%s' % pkg)
+ if pkgval is None:
+ pkgval = pkg
+ d.setVar('PKG:%s' % pkg, pkg)
+
+ extended_data = {
+ "files_info": {}
+ }
+
+ pkgdestpkg = os.path.join(pkgdest, pkg)
+ files = {}
+ files_extra = {}
+ total_size = 0
+ seen = set()
+ for f in pkgfiles[pkg]:
+ fpath = os.sep + os.path.relpath(f, pkgdestpkg)
+
+ fstat = os.lstat(f)
+ files[fpath] = fstat.st_size
+
+ extended_data["files_info"].setdefault(fpath, {})
+ extended_data["files_info"][fpath]['size'] = fstat.st_size
+
+ if fstat.st_ino not in seen:
+ seen.add(fstat.st_ino)
+ total_size += fstat.st_size
+
+ if fpath in pkgdebugsource:
+ extended_data["files_info"][fpath]['debugsrc'] = pkgdebugsource[fpath]
+ del pkgdebugsource[fpath]
+
+ d.setVar('FILES_INFO:' + pkg , json.dumps(files, sort_keys=True))
+
+ process_postinst_on_target(pkg, d.getVar("MLPREFIX"))
+ add_set_e_to_scriptlets(pkg)
+
+ subdata_file = pkgdatadir + "/runtime/%s" % pkg
+ with open(subdata_file, 'w') as sf:
+ for var in (d.getVar('PKGDATA_VARS') or "").split():
+ val = write_if_exists(sf, pkg, var)
+
+ write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
+ for dfile in sorted((d.getVar('FILERPROVIDESFLIST:' + pkg) or "").split()):
+ write_if_exists(sf, pkg, 'FILERPROVIDES:' + dfile)
+
+ write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
+ for dfile in sorted((d.getVar('FILERDEPENDSFLIST:' + pkg) or "").split()):
+ write_if_exists(sf, pkg, 'FILERDEPENDS:' + dfile)
+
+ sf.write('%s:%s: %d\n' % ('PKGSIZE', pkg, total_size))
+
+ subdata_extended_file = pkgdatadir + "/extended/%s.json.zstd" % pkg
+ num_threads = int(d.getVar("BB_NUMBER_THREADS"))
+ with bb.compress.zstd.open(subdata_extended_file, "wt", encoding="utf-8", num_threads=num_threads) as f:
+ json.dump(extended_data, f, sort_keys=True, separators=(",", ":"))
+
+ # Symlinks needed for rprovides lookup
+ rprov = d.getVar('RPROVIDES:%s' % pkg) or d.getVar('RPROVIDES')
+ if rprov:
+ for p in bb.utils.explode_deps(rprov):
+ subdata_sym = pkgdatadir + "/runtime-rprovides/%s/%s" % (p, pkg)
+ bb.utils.mkdirhier(os.path.dirname(subdata_sym))
+ oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True)
+
+ allow_empty = d.getVar('ALLOW_EMPTY:%s' % pkg)
+ if not allow_empty:
+ allow_empty = d.getVar('ALLOW_EMPTY')
+ root = "%s/%s" % (pkgdest, pkg)
+ os.chdir(root)
+ g = glob('*')
+ if g or allow_empty == "1":
+ # Symlinks needed for reverse lookups (from the final package name)
+ subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval
+ oe.path.symlink("../runtime/%s" % pkg, subdata_sym, True)
+
+ packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg
+ open(packagedfile, 'w').close()
+
+ if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
+ write_extra_runtime_pkgs(variants, packages, pkgdatadir)
+
+ if bb.data.inherits_class('allarch', d) and not variants \
+ and not bb.data.inherits_class('packagegroup', d):
+ write_extra_runtime_pkgs(global_variants, packages, pkgdatadir)
+
+def mapping_rename_hook(d):
+ """
+ Rewrite variables to account for package renaming in things
+ like debian.bbclass or manual PKG variable name changes
+ """
+ pkg = d.getVar("PKG")
+ oe.packagedata.runtime_mapping_rename("RDEPENDS", pkg, d)
+ oe.packagedata.runtime_mapping_rename("RRECOMMENDS", pkg, d)
+ oe.packagedata.runtime_mapping_rename("RSUGGESTS", pkg, d)
diff --git a/poky/meta/lib/oe/sstatesig.py b/poky/meta/lib/oe/sstatesig.py
index aa273df..f022445 100644
--- a/poky/meta/lib/oe/sstatesig.py
+++ b/poky/meta/lib/oe/sstatesig.py
@@ -101,15 +101,6 @@
sigs[pn][task] = [h, siggen_lockedsigs_var]
return sigs
-class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic):
- name = "OEBasic"
- def init_rundepcheck(self, data):
- self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
- self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
- pass
- def rundep_check(self, fn, recipename, task, dep, depname, dataCaches = None):
- return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCaches)
-
class SignatureGeneratorOEBasicHashMixIn(object):
supports_multiconfig_datacaches = True
@@ -326,7 +317,6 @@
bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set")
# Insert these classes into siggen's namespace so it can see and select them
-bb.siggen.SignatureGeneratorOEBasic = SignatureGeneratorOEBasic
bb.siggen.SignatureGeneratorOEBasicHash = SignatureGeneratorOEBasicHash
bb.siggen.SignatureGeneratorOEEquivHash = SignatureGeneratorOEEquivHash
@@ -469,11 +459,15 @@
pkgarchs.append('allarch')
pkgarchs.append('${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}')
+ searched_manifests = []
+
for pkgarch in pkgarchs:
manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-%s-%s.%s" % (pkgarch, taskdata, taskname))
if os.path.exists(manifest):
return manifest, d2
- bb.fatal("Manifest %s not found in %s (variant '%s')?" % (manifest, d2.expand(" ".join(pkgarchs)), variant))
+ searched_manifests.append(manifest)
+ bb.fatal("The sstate manifest for task '%s:%s' (multilib variant '%s') could not be found.\nThe pkgarchs considered were: %s.\nBut none of these manifests exists:\n %s"
+ % (taskdata, taskname, variant, d2.expand(", ".join(pkgarchs)),"\n ".join(searched_manifests)))
return None, d2
def OEOuthashBasic(path, sigfile, task, d):
diff --git a/poky/meta/lib/oeqa/core/utils/concurrencytest.py b/poky/meta/lib/oeqa/core/utils/concurrencytest.py
index 383479c..4f77589 100644
--- a/poky/meta/lib/oeqa/core/utils/concurrencytest.py
+++ b/poky/meta/lib/oeqa/core/utils/concurrencytest.py
@@ -59,6 +59,7 @@
self.outputbuf = output
self.finalresult = finalresult
self.finalresult.buffer = True
+ self.target = target
def _add_result_with_semaphore(self, method, test, *args, **kwargs):
self.semaphore.acquire()
@@ -67,13 +68,14 @@
self.result.starttime[test.id()] = self._test_start.timestamp()
self.result.threadprogress[self.threadnum].append(test.id())
totalprogress = sum(len(x) for x in self.result.threadprogress.values())
- self.result.progressinfo[test.id()] = "%s: %s/%s %s/%s (%ss) (%s)" % (
+ self.result.progressinfo[test.id()] = "%s: %s/%s %s/%s (%ss) (%s failed) (%s)" % (
self.threadnum,
len(self.result.threadprogress[self.threadnum]),
self.totalinprocess,
totalprogress,
self.totaltests,
"{0:.2f}".format(time.time()-self._test_start.timestamp()),
+ self.target.failed_tests,
test.id())
finally:
self.semaphore.release()
diff --git a/poky/meta/lib/oeqa/files/test.rs b/poky/meta/lib/oeqa/files/test.rs
new file mode 100644
index 0000000..f79c691
--- /dev/null
+++ b/poky/meta/lib/oeqa/files/test.rs
@@ -0,0 +1,2 @@
+fn main() {
+}
diff --git a/poky/meta/lib/oeqa/runtime/cases/rpm.py b/poky/meta/lib/oeqa/runtime/cases/rpm.py
index e3cd818..fa86eb0 100644
--- a/poky/meta/lib/oeqa/runtime/cases/rpm.py
+++ b/poky/meta/lib/oeqa/runtime/cases/rpm.py
@@ -51,21 +51,20 @@
msg = 'status: %s. Cannot run rpm -qa: %s' % (status, output)
self.assertEqual(status, 0, msg=msg)
- def check_no_process_for_user(u):
- _, output = self.target.run(self.tc.target_cmds['ps'])
- if u + ' ' in output:
- return False
- else:
- return True
+ def wait_for_no_process_for_user(u, timeout = 120):
+ timeout_at = time.time() + timeout
+ while time.time() < timeout_at:
+ _, output = self.target.run(self.tc.target_cmds['ps'])
+ if u + ' ' not in output:
+ return
+ time.sleep(1)
+ user_pss = [ps for ps in output.split("\n") if u + ' ' in ps]
+ msg = "There're %s 's process(es) still running: %s".format(u, "\n".join(user_pss))
+ assertTrue(True, msg=msg)
def unset_up_test_user(u):
# ensure no test1 process in running
- timeout = time.time() + 30
- while time.time() < timeout:
- if check_no_process_for_user(u):
- break
- else:
- time.sleep(1)
+ wait_for_no_process_for_user(u)
status, output = self.target.run('userdel -r %s' % u)
msg = 'Failed to erase user: %s' % output
self.assertTrue(status == 0, msg=msg)
diff --git a/poky/meta/lib/oeqa/runtime/cases/rust.py b/poky/meta/lib/oeqa/runtime/cases/rust.py
index 55b280d..c9c60e1 100644
--- a/poky/meta/lib/oeqa/runtime/cases/rust.py
+++ b/poky/meta/lib/oeqa/runtime/cases/rust.py
@@ -8,6 +8,47 @@
from oeqa.core.decorator.depends import OETestDepends
from oeqa.runtime.decorator.package import OEHasPackage
+class RustCompileTest(OERuntimeTestCase):
+
+ @classmethod
+ def setUp(cls):
+ dst = '/tmp/'
+ src = os.path.join(cls.tc.files_dir, 'test.rs')
+ cls.tc.target.copyTo(src, dst)
+
+ @classmethod
+ def tearDown(cls):
+ files = '/tmp/test.rs /tmp/test'
+ cls.tc.target.run('rm %s' % files)
+ dirs = '/tmp/hello'
+ cls.tc.target.run('rm -r %s' % dirs)
+
+ @OETestDepends(['ssh.SSHTest.test_ssh'])
+ @OEHasPackage(['rust'])
+ def test_rust_compile(self):
+ status, output = self.target.run('rustc /tmp/test.rs -o /tmp/test')
+ msg = 'rust compile failed, output: %s' % output
+ self.assertEqual(status, 0, msg=msg)
+
+ status, output = self.target.run('/tmp/test')
+ msg = 'running compiled file failed, output: %s' % output
+ self.assertEqual(status, 0, msg=msg)
+
+ @OETestDepends(['ssh.SSHTest.test_ssh'])
+ @OEHasPackage(['cargo'])
+ def test_cargo_compile(self):
+ status, output = self.target.run('cargo new /tmp/hello')
+ msg = 'cargo new failed, output: %s' % output
+ self.assertEqual(status, 0, msg=msg)
+
+ status, output = self.target.run('cargo build --manifest-path=/tmp/hello/Cargo.toml')
+ msg = 'cargo build failed, output: %s' % output
+ self.assertEqual(status, 0, msg=msg)
+
+ status, output = self.target.run('cargo run --manifest-path=/tmp/hello/Cargo.toml')
+ msg = 'running compiled file failed, output: %s' % output
+ self.assertEqual(status, 0, msg=msg)
+
class RustHelloworldTest(OERuntimeTestCase):
@OETestDepends(['ssh.SSHTest.test_ssh'])
@OEHasPackage(['rust-hello-world'])
diff --git a/poky/meta/lib/oeqa/selftest/cases/baremetal.py b/poky/meta/lib/oeqa/selftest/cases/baremetal.py
new file mode 100644
index 0000000..cadaea2
--- /dev/null
+++ b/poky/meta/lib/oeqa/selftest/cases/baremetal.py
@@ -0,0 +1,14 @@
+
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import bitbake
+
+class BaremetalTest(OESelftestTestCase):
+ def test_baremetal(self):
+ self.write_config('TCLIBC = "baremetal"')
+ bitbake('baremetal-helloworld')
diff --git a/poky/meta/lib/oeqa/selftest/cases/debuginfod.py b/poky/meta/lib/oeqa/selftest/cases/debuginfod.py
index 3c40119..37f5176 100644
--- a/poky/meta/lib/oeqa/selftest/cases/debuginfod.py
+++ b/poky/meta/lib/oeqa/selftest/cases/debuginfod.py
@@ -12,6 +12,36 @@
class Debuginfod(OESelftestTestCase):
+
+ def wait_for_debuginfod(self, port):
+ """
+ debuginfod takes time to scan the packages and requesting too early may
+ result in a test failure if the right packages haven't been scanned yet.
+
+ Request the metrics endpoint periodically and wait for there to be no
+ busy scanning threads.
+
+ Returns True if debuginfod is ready, False if we timed out
+ """
+ import time, urllib
+
+ # Wait a minute
+ countdown = 6
+ delay = 10
+
+ while countdown:
+ time.sleep(delay)
+ try:
+ with urllib.request.urlopen("http://localhost:%d/metrics" % port) as f:
+ lines = f.read().decode("ascii").splitlines()
+ if "thread_busy{role=\"scan\"} 0" in lines:
+ return True
+ except urllib.error.URLError as e:
+ self.logger.error(e)
+ countdown -= 1
+ return False
+
+
def test_debuginfod(self):
self.write_config(
"""
@@ -25,29 +55,50 @@
cmd = [
os.path.join(native_sysroot, "usr", "bin", "debuginfod"),
"--verbose",
+ # In-memory database, this is a one-shot test
"--database=:memory:",
+ # Don't use all the host cores
+ "--concurrency=8",
+ "--connection-pool=8",
+ # Disable rescanning, this is a one-shot test
+ "--rescan-time=0",
+ "--groom-time=0",
get_bb_var("DEPLOY_DIR"),
]
- for format in get_bb_var("PACKAGE_CLASSES").split():
- if format == "package_deb":
- cmd.append("--scan-deb-dir")
- elif format == "package_ipk":
- cmd.append("--scan-deb-dir")
- elif format == "package_rpm":
- cmd.append("--scan-rpm-dir")
+
+ format = get_bb_var("PACKAGE_CLASSES").split()[0]
+ if format == "package_deb":
+ cmd.append("--scan-deb-dir")
+ elif format == "package_ipk":
+ cmd.append("--scan-deb-dir")
+ elif format == "package_rpm":
+ cmd.append("--scan-rpm-dir")
+ else:
+ self.fail("Unknown package class %s" % format)
+
# Find a free port
with socketserver.TCPServer(("localhost", 0), None) as s:
port = s.server_address[1]
cmd.append("--port=%d" % port)
try:
- debuginfod = subprocess.Popen(cmd)
+ # Remove DEBUGINFOD_URLS from the environment so we don't try
+ # looking in the distro debuginfod
+ env = os.environ.copy()
+ if "DEBUGINFOD_URLS" in env:
+ del env["DEBUGINFOD_URLS"]
+
+ self.logger.info(f"Starting server {cmd}")
+ debuginfod = subprocess.Popen(cmd, env=env)
with runqemu("core-image-minimal", runqemuparams="nographic") as qemu:
+ self.assertTrue(self.wait_for_debuginfod(port))
+
cmd = (
"DEBUGINFOD_URLS=http://%s:%d/ debuginfod-find debuginfo /usr/bin/debuginfod"
% (qemu.server_ip, port)
)
+ self.logger.info(f"Starting client {cmd}")
status, output = qemu.run_serial(cmd)
# This should be more comprehensive
self.assertIn("/.cache/debuginfod_client/", output)
diff --git a/poky/meta/lib/oeqa/selftest/cases/devtool.py b/poky/meta/lib/oeqa/selftest/cases/devtool.py
index 0cb7403..c78a68b 100644
--- a/poky/meta/lib/oeqa/selftest/cases/devtool.py
+++ b/poky/meta/lib/oeqa/selftest/cases/devtool.py
@@ -1076,9 +1076,10 @@
def test_devtool_update_recipe_append_git(self):
# Check preconditions
testrecipe = 'mtd-utils-selftest'
- bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe)
+ bb_vars = get_bb_vars(['FILE', 'SRC_URI', 'LAYERSERIES_CORENAMES'], testrecipe)
recipefile = bb_vars['FILE']
src_uri = bb_vars['SRC_URI']
+ corenames = bb_vars['LAYERSERIES_CORENAMES']
self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe)
for entry in src_uri.split():
if entry.startswith('git://'):
@@ -1109,7 +1110,7 @@
f.write('BBFILE_PATTERN_oeselftesttemplayer = "^${LAYERDIR}/"\n')
f.write('BBFILE_PRIORITY_oeselftesttemplayer = "999"\n')
f.write('BBFILE_PATTERN_IGNORE_EMPTY_oeselftesttemplayer = "1"\n')
- f.write('LAYERSERIES_COMPAT_oeselftesttemplayer = "${LAYERSERIES_COMPAT_core}"\n')
+ f.write('LAYERSERIES_COMPAT_oeselftesttemplayer = "%s"\n' % corenames)
self.add_command_to_tearDown('bitbake-layers remove-layer %s || true' % templayerdir)
result = runCmd('bitbake-layers add-layer %s' % templayerdir, cwd=self.builddir)
# Create the bbappend
diff --git a/poky/meta/lib/oeqa/selftest/cases/externalsrc.py b/poky/meta/lib/oeqa/selftest/cases/externalsrc.py
new file mode 100644
index 0000000..1d800dc
--- /dev/null
+++ b/poky/meta/lib/oeqa/selftest/cases/externalsrc.py
@@ -0,0 +1,44 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import os
+import shutil
+import tempfile
+
+from oeqa.selftest.case import OESelftestTestCase
+from oeqa.utils.commands import get_bb_var, runCmd
+
+class ExternalSrc(OESelftestTestCase):
+ # test that srctree_hash_files does not crash
+ # we should be actually checking do_compile[file-checksums] but oeqa currently does not support it
+ # so we check only that a recipe with externalsrc can be parsed
+ def test_externalsrc_srctree_hash_files(self):
+ test_recipe = "git-submodule-test"
+ git_url = "git://git.yoctoproject.org/git-submodule-test"
+ externalsrc_dir = tempfile.TemporaryDirectory(prefix="externalsrc").name
+
+ self.write_config(
+ """
+INHERIT += "externalsrc"
+EXTERNALSRC:pn-%s = "%s"
+""" % (test_recipe, externalsrc_dir)
+ )
+
+ # test with git without submodules
+ runCmd('git clone %s %s' % (git_url, externalsrc_dir))
+ os.unlink(externalsrc_dir + "/.gitmodules")
+ open(".gitmodules", 'w').close() # local file .gitmodules in cwd should not affect externalsrc parsing
+ self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
+ os.unlink(".gitmodules")
+
+ # test with git with submodules
+ runCmd('git checkout .gitmodules', cwd=externalsrc_dir)
+ runCmd('git submodule update --init --recursive', cwd=externalsrc_dir)
+ self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
+
+ # test without git
+ shutil.rmtree(os.path.join(externalsrc_dir, ".git"))
+ self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
diff --git a/poky/meta/lib/oeqa/selftest/cases/runqemu.py b/poky/meta/lib/oeqa/selftest/cases/runqemu.py
index 58a4526..c2c3fbc 100644
--- a/poky/meta/lib/oeqa/selftest/cases/runqemu.py
+++ b/poky/meta/lib/oeqa/selftest/cases/runqemu.py
@@ -199,22 +199,12 @@
qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout)
self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout))
- # Need to have portmap/rpcbind running to allow this test to work and
- # current autobuilder setup does not have this.
- def disabled_test_qemu_can_boot_nfs_and_shutdown(self):
- self.assertExists(self.qemuboot_conf)
- bitbake('meta-ide-support')
+ def test_qemu_can_boot_nfs_and_shutdown(self):
rootfs_tar = "%s-%s.tar.bz2" % (self.recipe, self.machine)
rootfs_tar = os.path.join(self.deploy_dir_image, rootfs_tar)
self.assertExists(rootfs_tar)
- tmpdir = tempfile.mkdtemp(prefix='qemu_nfs')
- tmpdir_nfs = os.path.join(tmpdir, 'nfs')
- cmd_extract_nfs = 'runqemu-extract-sdk %s %s' % (rootfs_tar, tmpdir_nfs)
- result = runCmd(cmd_extract_nfs)
- self.assertEqual(0, result.status, "runqemu-extract-sdk didn't run as expected. %s" % result.output)
- cmd = "%s nfs %s %s" % (self.cmd_common, self.qemuboot_conf, tmpdir_nfs)
+ cmd = "%s %s" % (self.cmd_common, rootfs_tar)
shutdown_timeout = 120
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout)
self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout))
- runCmd('rm -rf %s' % tmpdir)
diff --git a/poky/meta/lib/oeqa/selftest/cases/tinfoil.py b/poky/meta/lib/oeqa/selftest/cases/tinfoil.py
index 0a66615..dd13c20 100644
--- a/poky/meta/lib/oeqa/selftest/cases/tinfoil.py
+++ b/poky/meta/lib/oeqa/selftest/cases/tinfoil.py
@@ -66,6 +66,20 @@
localdata.setVar('PN', 'hello')
self.assertEqual('hello', localdata.getVar('BPN'))
+ # The config_data API tp parse_recipe_file is used by:
+ # layerindex-web layerindex/update_layer.py
+ def test_parse_recipe_custom_data(self):
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=False, quiet=2)
+ localdata = bb.data.createCopy(tinfoil.config_data)
+ localdata.setVar("TESTVAR", "testval")
+ testrecipe = 'mdadm'
+ best = tinfoil.find_best_provider(testrecipe)
+ if not best:
+ self.fail('Unable to find recipe providing %s' % testrecipe)
+ rd = tinfoil.parse_recipe_file(best[3], config_data=localdata)
+ self.assertEqual("testval", rd.getVar('TESTVAR'))
+
def test_list_recipes(self):
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.prepare(config_only=False, quiet=2)
diff --git a/poky/meta/lib/oeqa/selftest/context.py b/poky/meta/lib/oeqa/selftest/context.py
index 78c7a46..c7dd03c 100644
--- a/poky/meta/lib/oeqa/selftest/context.py
+++ b/poky/meta/lib/oeqa/selftest/context.py
@@ -154,9 +154,6 @@
group.add_argument('-a', '--run-all-tests', default=False,
action="store_true", dest="run_all_tests",
help='Run all (unhidden) tests')
- group.add_argument('-R', '--skip-tests', required=False, action='store',
- nargs='+', dest="skips", default=None,
- help='Run all (unhidden) tests except the ones specified. Format should be <module>[.<class>[.<test_method>]]')
group.add_argument('-r', '--run-tests', required=False, action='store',
nargs='+', dest="run_tests", default=None,
help='Select what tests to run (modules, classes or test methods). Format should be: <module>.<class>.<test_method>')
@@ -171,6 +168,9 @@
action="store_true", default=False,
help='List all available tests.')
+ parser.add_argument('-R', '--skip-tests', required=False, action='store',
+ nargs='+', dest="skips", default=None,
+ help='Skip the tests specified. Format should be <module>[.<class>[.<test_method>]]')
parser.add_argument('-j', '--num-processes', dest='processes', action='store',
type=int, help="number of processes to execute in parallel with")
diff --git a/poky/meta/lib/oeqa/utils/commands.py b/poky/meta/lib/oeqa/utils/commands.py
index f733fcd..f4daea2 100644
--- a/poky/meta/lib/oeqa/utils/commands.py
+++ b/poky/meta/lib/oeqa/utils/commands.py
@@ -300,6 +300,7 @@
def create_temp_layer(templayerdir, templayername, priority=999, recipepathspec='recipes-*/*'):
os.makedirs(os.path.join(templayerdir, 'conf'))
+ corenames = get_bb_var('LAYERSERIES_CORENAMES')
with open(os.path.join(templayerdir, 'conf', 'layer.conf'), 'w') as f:
f.write('BBPATH .= ":${LAYERDIR}"\n')
f.write('BBFILES += "${LAYERDIR}/%s/*.bb \\' % recipepathspec)
@@ -308,7 +309,7 @@
f.write('BBFILE_PATTERN_%s = "^${LAYERDIR}/"\n' % templayername)
f.write('BBFILE_PRIORITY_%s = "%d"\n' % (templayername, priority))
f.write('BBFILE_PATTERN_IGNORE_EMPTY_%s = "1"\n' % templayername)
- f.write('LAYERSERIES_COMPAT_%s = "${LAYERSERIES_COMPAT_core}"\n' % templayername)
+ f.write('LAYERSERIES_COMPAT_%s = "%s"\n' % (templayername, corenames))
@contextlib.contextmanager
def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None, qemuparams=None, overrides={}, discard_writes=True):
diff --git a/poky/meta/lib/oeqa/utils/nfs.py b/poky/meta/lib/oeqa/utils/nfs.py
index c121865..b66ed42 100644
--- a/poky/meta/lib/oeqa/utils/nfs.py
+++ b/poky/meta/lib/oeqa/utils/nfs.py
@@ -30,7 +30,7 @@
nenv = dict(os.environ)
nenv['PATH'] = "{0}/sbin:{0}/usr/sbin:{0}/usr/bin:".format(unfs_sysroot) + nenv.get('PATH', '')
- cmd = Command(["unfsd", "-d", "-p", "-N", "-e", exports.name, "-n", str(nfsport), "-m", str(mountport)],
+ cmd = Command(["unfsd", "-d", "-p", "-e", exports.name, "-n", str(nfsport), "-m", str(mountport)],
bg = True, env = nenv, output_log = logger)
cmd.run()
yield nfsport, mountport
diff --git a/poky/meta/lib/oeqa/utils/qemurunner.py b/poky/meta/lib/oeqa/utils/qemurunner.py
index e602399..b5fed6c 100644
--- a/poky/meta/lib/oeqa/utils/qemurunner.py
+++ b/poky/meta/lib/oeqa/utils/qemurunner.py
@@ -177,7 +177,11 @@
launch_cmd += ' slirp'
if self.use_ovmf:
launch_cmd += ' ovmf'
- launch_cmd += ' %s %s %s' % (runqemuparams, self.machine, self.rootfs)
+ launch_cmd += ' %s %s' % (runqemuparams, self.machine)
+ if self.rootfs.endswith('.vmdk'):
+ self.logger.debug('Bypassing VMDK rootfs for runqemu')
+ else:
+ launch_cmd += ' %s' % (self.rootfs)
return self.launch(launch_cmd, qemuparams=qemuparams, get_ip=get_ip, extra_bootparams=extra_bootparams, env=env)
@@ -401,9 +405,10 @@
cmdline = re_control_char.sub(' ', cmdline)
try:
if self.use_slirp:
- tcp_ports = cmdline.split("hostfwd=tcp::")[1]
+ tcp_ports = cmdline.split("hostfwd=tcp:")[1]
+ ip, tcp_ports = tcp_ports.split(":")[:2]
host_port = tcp_ports[:tcp_ports.find('-')]
- self.ip = "localhost:%s" % host_port
+ self.ip = "%s:%s" % (ip, host_port)
else:
ips = re.findall(r"((?:[0-9]{1,3}\.){3}[0-9]{1,3})", cmdline.split("ip=")[1])
self.ip = ips[0]