blob: 114781c780308e337e490e0549d1e10ec1cf7f67 [file] [log] [blame]
Patrick Williams92b42cb2022-09-03 06:53:57 -05001#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7# BB Class inspired by ebuild.sh
8#
9# This class will test files after installation for certain
10# security issues and other kind of issues.
11#
12# Checks we do:
13# -Check the ownership and permissions
14# -Check the RUNTIME path for the $TMPDIR
15# -Check if .la files wrongly point to workdir
16# -Check if .pc files wrongly point to workdir
17# -Check if packages contains .debug directories or .so files
18# where they should be in -dev or -dbg
19# -Check if config.log contains traces to broken autoconf tests
20# -Check invalid characters (non-utf8) on some package metadata
21# -Ensure that binaries in base_[bindir|sbindir|libdir] do not link
22# into exec_prefix
23# -Check that scripts in base_[bindir|sbindir|libdir] do not reference
24# files under exec_prefix
25# -Check if the package name is upper case
26
27# Elect whether a given type of error is a warning or error, they may
28# have been set by other files.
29WARN_QA ?= " libdir xorg-driver-abi buildpaths \
30 textrel incompatible-license files-invalid \
31 infodir build-deps src-uri-bad symlink-to-sysroot multilib \
Andrew Geisslerc5535c92023-01-27 16:10:19 -060032 invalid-packageconfig host-user-contaminated uppercase-pn \
Patrick Williams92b42cb2022-09-03 06:53:57 -050033 mime mime-xdg unlisted-pkg-lics unhandled-features-check \
34 missing-update-alternatives native-last missing-ptest \
35 license-exists license-no-generic license-syntax license-format \
36 license-incompatible license-file-missing obsolete-license \
Andrew Geissler8f840682023-07-21 09:09:43 -050037 32bit-time \
Patrick Williams92b42cb2022-09-03 06:53:57 -050038 "
39ERROR_QA ?= "dev-so debug-deps dev-deps debug-files arch pkgconfig la \
40 perms dep-cmp pkgvarcheck perm-config perm-line perm-link \
41 split-strip packages-list pkgv-undefined var-undefined \
42 version-going-backwards expanded-d invalid-chars \
43 license-checksum dev-elf file-rdeps configure-unsafe \
44 configure-gettext perllocalpod shebang-size \
45 already-stripped installed-vs-shipped ldflags compile-host-path \
46 install-host-path pn-overrides unknown-configure-option \
47 useless-rpaths rpaths staticdev empty-dirs \
Patrick Williams520786c2023-06-25 16:20:36 -050048 patch-fuzz \
Patrick Williams92b42cb2022-09-03 06:53:57 -050049 "
50# Add usrmerge QA check based on distro feature
51ERROR_QA:append = "${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge', ' usrmerge', '', d)}"
Patrick Williams520786c2023-06-25 16:20:36 -050052ERROR_QA:append:layer-core = " patch-status"
Patrick Williams92b42cb2022-09-03 06:53:57 -050053
54FAKEROOT_QA = "host-user-contaminated"
55FAKEROOT_QA[doc] = "QA tests which need to run under fakeroot. If any \
56enabled tests are listed here, the do_package_qa task will run under fakeroot."
57
58ALL_QA = "${WARN_QA} ${ERROR_QA}"
59
60UNKNOWN_CONFIGURE_OPT_IGNORE ?= "--enable-nls --disable-nls --disable-silent-rules --disable-dependency-tracking --with-libtool-sysroot --disable-static"
61
62# This is a list of directories that are expected to be empty.
63QA_EMPTY_DIRS ?= " \
64 /dev/pts \
65 /media \
66 /proc \
67 /run \
68 /tmp \
69 ${localstatedir}/run \
70 ${localstatedir}/volatile \
71"
72# It is possible to specify why a directory is expected to be empty by defining
73# QA_EMPTY_DIRS_RECOMMENDATION:<path>, which will then be included in the error
74# message if the directory is not empty. If it is not specified for a directory,
75# then "but it is expected to be empty" will be used.
76
77def package_qa_clean_path(path, d, pkg=None):
78 """
79 Remove redundant paths from the path for display. If pkg isn't set then
80 TMPDIR is stripped, otherwise PKGDEST/pkg is stripped.
81 """
82 if pkg:
83 path = path.replace(os.path.join(d.getVar("PKGDEST"), pkg), "/")
84 return path.replace(d.getVar("TMPDIR"), "/").replace("//", "/")
85
86QAPATHTEST[shebang-size] = "package_qa_check_shebang_size"
87def package_qa_check_shebang_size(path, name, d, elf, messages):
88 import stat
89 if os.path.islink(path) or stat.S_ISFIFO(os.stat(path).st_mode) or elf:
90 return
91
92 try:
93 with open(path, 'rb') as f:
94 stanza = f.readline(130)
95 except IOError:
96 return
97
98 if stanza.startswith(b'#!'):
99 #Shebang not found
100 try:
101 stanza = stanza.decode("utf-8")
102 except UnicodeDecodeError:
103 #If it is not a text file, it is not a script
104 return
105
106 if len(stanza) > 129:
107 oe.qa.add_message(messages, "shebang-size", "%s: %s maximum shebang size exceeded, the maximum size is 128." % (name, package_qa_clean_path(path, d)))
108 return
109
110QAPATHTEST[libexec] = "package_qa_check_libexec"
111def package_qa_check_libexec(path,name, d, elf, messages):
112
113 # Skip the case where the default is explicitly /usr/libexec
114 libexec = d.getVar('libexecdir')
115 if libexec == "/usr/libexec":
116 return True
117
118 if 'libexec' in path.split(os.path.sep):
119 oe.qa.add_message(messages, "libexec", "%s: %s is using libexec please relocate to %s" % (name, package_qa_clean_path(path, d), libexec))
120 return False
121
122 return True
123
124QAPATHTEST[rpaths] = "package_qa_check_rpath"
125def package_qa_check_rpath(file,name, d, elf, messages):
126 """
127 Check for dangerous RPATHs
128 """
129 if not elf:
130 return
131
132 if os.path.islink(file):
133 return
134
135 bad_dirs = [d.getVar('BASE_WORKDIR'), d.getVar('STAGING_DIR_TARGET')]
136
137 phdrs = elf.run_objdump("-p", d)
138
139 import re
140 rpath_re = re.compile(r"\s+RPATH\s+(.*)")
141 for line in phdrs.split("\n"):
142 m = rpath_re.match(line)
143 if m:
144 rpath = m.group(1)
145 for dir in bad_dirs:
146 if dir in rpath:
147 oe.qa.add_message(messages, "rpaths", "package %s contains bad RPATH %s in file %s" % (name, rpath, file))
148
149QAPATHTEST[useless-rpaths] = "package_qa_check_useless_rpaths"
150def package_qa_check_useless_rpaths(file, name, d, elf, messages):
151 """
152 Check for RPATHs that are useless but not dangerous
153 """
154 def rpath_eq(a, b):
155 return os.path.normpath(a) == os.path.normpath(b)
156
157 if not elf:
158 return
159
160 if os.path.islink(file):
161 return
162
163 libdir = d.getVar("libdir")
164 base_libdir = d.getVar("base_libdir")
165
166 phdrs = elf.run_objdump("-p", d)
167
168 import re
169 rpath_re = re.compile(r"\s+RPATH\s+(.*)")
170 for line in phdrs.split("\n"):
171 m = rpath_re.match(line)
172 if m:
173 rpath = m.group(1)
174 if rpath_eq(rpath, libdir) or rpath_eq(rpath, base_libdir):
175 # The dynamic linker searches both these places anyway. There is no point in
176 # looking there again.
177 oe.qa.add_message(messages, "useless-rpaths", "%s: %s contains probably-redundant RPATH %s" % (name, package_qa_clean_path(file, d, name), rpath))
178
179QAPATHTEST[dev-so] = "package_qa_check_dev"
180def package_qa_check_dev(path, name, d, elf, messages):
181 """
182 Check for ".so" library symlinks in non-dev packages
183 """
184
185 if not name.endswith("-dev") and not name.endswith("-dbg") and not name.endswith("-ptest") and not name.startswith("nativesdk-") and path.endswith(".so") and os.path.islink(path):
186 oe.qa.add_message(messages, "dev-so", "non -dev/-dbg/nativesdk- package %s contains symlink .so '%s'" % \
187 (name, package_qa_clean_path(path, d, name)))
188
189QAPATHTEST[dev-elf] = "package_qa_check_dev_elf"
190def package_qa_check_dev_elf(path, name, d, elf, messages):
191 """
192 Check that -dev doesn't contain real shared libraries. The test has to
193 check that the file is not a link and is an ELF object as some recipes
194 install link-time .so files that are linker scripts.
195 """
196 if name.endswith("-dev") and path.endswith(".so") and not os.path.islink(path) and elf:
197 oe.qa.add_message(messages, "dev-elf", "-dev package %s contains non-symlink .so '%s'" % \
198 (name, package_qa_clean_path(path, d, name)))
199
200QAPATHTEST[staticdev] = "package_qa_check_staticdev"
201def package_qa_check_staticdev(path, name, d, elf, messages):
202 """
203 Check for ".a" library in non-staticdev packages
204 There are a number of exceptions to this rule, -pic packages can contain
205 static libraries, the _nonshared.a belong with their -dev packages and
206 libgcc.a, libgcov.a will be skipped in their packages
207 """
208
209 if not name.endswith("-pic") and not name.endswith("-staticdev") and not name.endswith("-ptest") and path.endswith(".a") and not path.endswith("_nonshared.a") and not '/usr/lib/debug-static/' in path and not '/.debug-static/' in path:
210 oe.qa.add_message(messages, "staticdev", "non -staticdev package contains static .a library: %s path '%s'" % \
211 (name, package_qa_clean_path(path,d, name)))
212
213QAPATHTEST[mime] = "package_qa_check_mime"
214def package_qa_check_mime(path, name, d, elf, messages):
215 """
216 Check if package installs mime types to /usr/share/mime/packages
217 while no inheriting mime.bbclass
218 """
219
220 if d.getVar("datadir") + "/mime/packages" in path and path.endswith('.xml') and not bb.data.inherits_class("mime", d):
221 oe.qa.add_message(messages, "mime", "package contains mime types but does not inherit mime: %s path '%s'" % \
222 (name, package_qa_clean_path(path,d)))
223
224QAPATHTEST[mime-xdg] = "package_qa_check_mime_xdg"
225def package_qa_check_mime_xdg(path, name, d, elf, messages):
226 """
227 Check if package installs desktop file containing MimeType and requires
228 mime-types.bbclass to create /usr/share/applications/mimeinfo.cache
229 """
230
231 if d.getVar("datadir") + "/applications" in path and path.endswith('.desktop') and not bb.data.inherits_class("mime-xdg", d):
232 mime_type_found = False
233 try:
234 with open(path, 'r') as f:
235 for line in f.read().split('\n'):
236 if 'MimeType' in line:
237 mime_type_found = True
238 break;
239 except:
240 # At least libreoffice installs symlinks with absolute paths that are dangling here.
241 # We could implement some magic but for few (one) recipes it is not worth the effort so just warn:
242 wstr = "%s cannot open %s - is it a symlink with absolute path?\n" % (name, package_qa_clean_path(path,d))
243 wstr += "Please check if (linked) file contains key 'MimeType'.\n"
244 pkgname = name
245 if name == d.getVar('PN'):
246 pkgname = '${PN}'
247 wstr += "If yes: add \'inhert mime-xdg\' and \'MIME_XDG_PACKAGES += \"%s\"\' / if no add \'INSANE_SKIP:%s += \"mime-xdg\"\' to recipe." % (pkgname, pkgname)
248 oe.qa.add_message(messages, "mime-xdg", wstr)
249 if mime_type_found:
250 oe.qa.add_message(messages, "mime-xdg", "package contains desktop file with key 'MimeType' but does not inhert mime-xdg: %s path '%s'" % \
251 (name, package_qa_clean_path(path,d)))
252
253def package_qa_check_libdir(d):
254 """
255 Check for wrong library installation paths. For instance, catch
256 recipes installing /lib/bar.so when ${base_libdir}="lib32" or
257 installing in /usr/lib64 when ${libdir}="/usr/lib"
258 """
259 import re
260
261 pkgdest = d.getVar('PKGDEST')
262 base_libdir = d.getVar("base_libdir") + os.sep
263 libdir = d.getVar("libdir") + os.sep
264 libexecdir = d.getVar("libexecdir") + os.sep
265 exec_prefix = d.getVar("exec_prefix") + os.sep
266
267 messages = []
268
269 # The re's are purposely fuzzy, as some there are some .so.x.y.z files
270 # that don't follow the standard naming convention. It checks later
271 # that they are actual ELF files
272 lib_re = re.compile(r"^/lib.+\.so(\..+)?$")
273 exec_re = re.compile(r"^%s.*/lib.+\.so(\..+)?$" % exec_prefix)
274
275 for root, dirs, files in os.walk(pkgdest):
276 if root == pkgdest:
277 # Skip subdirectories for any packages with libdir in INSANE_SKIP
278 skippackages = []
279 for package in dirs:
280 if 'libdir' in (d.getVar('INSANE_SKIP:' + package) or "").split():
281 bb.note("Package %s skipping libdir QA test" % (package))
282 skippackages.append(package)
283 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory' and package.endswith("-dbg"):
284 bb.note("Package %s skipping libdir QA test for PACKAGE_DEBUG_SPLIT_STYLE equals debug-file-directory" % (package))
285 skippackages.append(package)
286 for package in skippackages:
287 dirs.remove(package)
288 for file in files:
289 full_path = os.path.join(root, file)
290 rel_path = os.path.relpath(full_path, pkgdest)
291 if os.sep in rel_path:
292 package, rel_path = rel_path.split(os.sep, 1)
293 rel_path = os.sep + rel_path
294 if lib_re.match(rel_path):
295 if base_libdir not in rel_path:
296 # make sure it's an actual ELF file
297 elf = oe.qa.ELFFile(full_path)
298 try:
299 elf.open()
300 messages.append("%s: found library in wrong location: %s" % (package, rel_path))
301 except (oe.qa.NotELFFileError):
302 pass
303 if exec_re.match(rel_path):
304 if libdir not in rel_path and libexecdir not in rel_path:
305 # make sure it's an actual ELF file
306 elf = oe.qa.ELFFile(full_path)
307 try:
308 elf.open()
309 messages.append("%s: found library in wrong location: %s" % (package, rel_path))
310 except (oe.qa.NotELFFileError):
311 pass
312
313 if messages:
314 oe.qa.handle_error("libdir", "\n".join(messages), d)
315
316QAPATHTEST[debug-files] = "package_qa_check_dbg"
317def package_qa_check_dbg(path, name, d, elf, messages):
318 """
319 Check for ".debug" files or directories outside of the dbg package
320 """
321
322 if not "-dbg" in name and not "-ptest" in name:
323 if '.debug' in path.split(os.path.sep):
324 oe.qa.add_message(messages, "debug-files", "non debug package contains .debug directory: %s path %s" % \
325 (name, package_qa_clean_path(path,d)))
326
327QAPATHTEST[arch] = "package_qa_check_arch"
328def package_qa_check_arch(path,name,d, elf, messages):
329 """
330 Check if archs are compatible
331 """
332 import re, oe.elf
333
334 if not elf:
335 return
336
337 target_os = d.getVar('HOST_OS')
338 target_arch = d.getVar('HOST_ARCH')
339 provides = d.getVar('PROVIDES')
340 bpn = d.getVar('BPN')
341
342 if target_arch == "allarch":
343 pn = d.getVar('PN')
344 oe.qa.add_message(messages, "arch", pn + ": Recipe inherits the allarch class, but has packaged architecture-specific binaries")
345 return
346
347 # FIXME: Cross package confuse this check, so just skip them
348 for s in ['cross', 'nativesdk', 'cross-canadian']:
349 if bb.data.inherits_class(s, d):
350 return
351
352 # avoid following links to /usr/bin (e.g. on udev builds)
353 # we will check the files pointed to anyway...
354 if os.path.islink(path):
355 return
356
357 #if this will throw an exception, then fix the dict above
358 (machine, osabi, abiversion, littleendian, bits) \
359 = oe.elf.machine_dict(d)[target_os][target_arch]
360
361 # Check the architecture and endiannes of the binary
362 is_32 = (("virtual/kernel" in provides) or bb.data.inherits_class("module", d)) and \
363 (target_os == "linux-gnux32" or target_os == "linux-muslx32" or \
364 target_os == "linux-gnu_ilp32" or re.match(r'mips64.*32', d.getVar('DEFAULTTUNE')))
365 is_bpf = (oe.qa.elf_machine_to_string(elf.machine()) == "BPF")
366 if not ((machine == elf.machine()) or is_32 or is_bpf):
367 oe.qa.add_message(messages, "arch", "Architecture did not match (%s, expected %s) in %s" % \
368 (oe.qa.elf_machine_to_string(elf.machine()), oe.qa.elf_machine_to_string(machine), package_qa_clean_path(path, d, name)))
369 elif not ((bits == elf.abiSize()) or is_32 or is_bpf):
370 oe.qa.add_message(messages, "arch", "Bit size did not match (%d, expected %d) in %s" % \
371 (elf.abiSize(), bits, package_qa_clean_path(path, d, name)))
372 elif not ((littleendian == elf.isLittleEndian()) or is_bpf):
373 oe.qa.add_message(messages, "arch", "Endiannes did not match (%d, expected %d) in %s" % \
374 (elf.isLittleEndian(), littleendian, package_qa_clean_path(path,d, name)))
375
376QAPATHTEST[desktop] = "package_qa_check_desktop"
377def package_qa_check_desktop(path, name, d, elf, messages):
378 """
379 Run all desktop files through desktop-file-validate.
380 """
381 if path.endswith(".desktop"):
382 desktop_file_validate = os.path.join(d.getVar('STAGING_BINDIR_NATIVE'),'desktop-file-validate')
383 output = os.popen("%s %s" % (desktop_file_validate, path))
384 # This only produces output on errors
385 for l in output:
386 oe.qa.add_message(messages, "desktop", "Desktop file issue: " + l.strip())
387
388QAPATHTEST[textrel] = "package_qa_textrel"
389def package_qa_textrel(path, name, d, elf, messages):
390 """
391 Check if the binary contains relocations in .text
392 """
393
394 if not elf:
395 return
396
397 if os.path.islink(path):
398 return
399
400 phdrs = elf.run_objdump("-p", d)
401 sane = True
402
403 import re
404 textrel_re = re.compile(r"\s+TEXTREL\s+")
405 for line in phdrs.split("\n"):
406 if textrel_re.match(line):
407 sane = False
408 break
409
410 if not sane:
411 path = package_qa_clean_path(path, d, name)
412 oe.qa.add_message(messages, "textrel", "%s: ELF binary %s has relocations in .text" % (name, path))
413
414QAPATHTEST[ldflags] = "package_qa_hash_style"
415def package_qa_hash_style(path, name, d, elf, messages):
416 """
417 Check if the binary has the right hash style...
418 """
419
420 if not elf:
421 return
422
423 if os.path.islink(path):
424 return
425
426 gnu_hash = "--hash-style=gnu" in d.getVar('LDFLAGS')
427 if not gnu_hash:
428 gnu_hash = "--hash-style=both" in d.getVar('LDFLAGS')
429 if not gnu_hash:
430 return
431
432 sane = False
433 has_syms = False
434
435 phdrs = elf.run_objdump("-p", d)
436
437 # If this binary has symbols, we expect it to have GNU_HASH too.
438 for line in phdrs.split("\n"):
439 if "SYMTAB" in line:
440 has_syms = True
441 if "GNU_HASH" in line or "MIPS_XHASH" in line:
442 sane = True
443 if ("[mips32]" in line or "[mips64]" in line) and d.getVar('TCLIBC') == "musl":
444 sane = True
445 if has_syms and not sane:
446 path = package_qa_clean_path(path, d, name)
447 oe.qa.add_message(messages, "ldflags", "File %s in package %s doesn't have GNU_HASH (didn't pass LDFLAGS?)" % (path, name))
448
449
450QAPATHTEST[buildpaths] = "package_qa_check_buildpaths"
451def package_qa_check_buildpaths(path, name, d, elf, messages):
452 """
453 Check for build paths inside target files and error if paths are not
454 explicitly ignored.
455 """
456 import stat
457
458 # Ignore symlinks/devs/fifos
459 mode = os.lstat(path).st_mode
460 if stat.S_ISLNK(mode) or stat.S_ISBLK(mode) or stat.S_ISFIFO(mode) or stat.S_ISCHR(mode) or stat.S_ISSOCK(mode):
461 return
462
463 tmpdir = bytes(d.getVar('TMPDIR'), encoding="utf-8")
464 with open(path, 'rb') as f:
465 file_content = f.read()
466 if tmpdir in file_content:
467 trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "")
468 oe.qa.add_message(messages, "buildpaths", "File %s in package %s contains reference to TMPDIR" % (trimmed, name))
469
470
471QAPATHTEST[xorg-driver-abi] = "package_qa_check_xorg_driver_abi"
472def package_qa_check_xorg_driver_abi(path, name, d, elf, messages):
473 """
474 Check that all packages containing Xorg drivers have ABI dependencies
475 """
476
477 # Skip dev, dbg or nativesdk packages
478 if name.endswith("-dev") or name.endswith("-dbg") or name.startswith("nativesdk-"):
479 return
480
481 driverdir = d.expand("${libdir}/xorg/modules/drivers/")
482 if driverdir in path and path.endswith(".so"):
483 mlprefix = d.getVar('MLPREFIX') or ''
484 for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + name) or ""):
485 if rdep.startswith("%sxorg-abi-" % mlprefix):
486 return
487 oe.qa.add_message(messages, "xorg-driver-abi", "Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path)))
488
489QAPATHTEST[infodir] = "package_qa_check_infodir"
490def package_qa_check_infodir(path, name, d, elf, messages):
491 """
492 Check that /usr/share/info/dir isn't shipped in a particular package
493 """
494 infodir = d.expand("${infodir}/dir")
495
496 if infodir in path:
497 oe.qa.add_message(messages, "infodir", "The /usr/share/info/dir file is not meant to be shipped in a particular package.")
498
499QAPATHTEST[symlink-to-sysroot] = "package_qa_check_symlink_to_sysroot"
500def package_qa_check_symlink_to_sysroot(path, name, d, elf, messages):
501 """
502 Check that the package doesn't contain any absolute symlinks to the sysroot.
503 """
504 if os.path.islink(path):
505 target = os.readlink(path)
506 if os.path.isabs(target):
507 tmpdir = d.getVar('TMPDIR')
508 if target.startswith(tmpdir):
509 trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "")
510 oe.qa.add_message(messages, "symlink-to-sysroot", "Symlink %s in %s points to TMPDIR" % (trimmed, name))
511
Andrew Geissler517393d2023-01-13 08:55:19 -0600512QAPATHTEST[32bit-time] = "check_32bit_symbols"
513def check_32bit_symbols(path, packagename, d, elf, messages):
514 """
515 Check that ELF files do not use any 32 bit time APIs from glibc.
516 """
Andrew Geissler8f840682023-07-21 09:09:43 -0500517 thirtytwo_bit_time_archs = set(('arm','armeb','mipsarcho32','powerpc','x86'))
518 overrides = set(d.getVar('OVERRIDES').split(':'))
519 if not(thirtytwo_bit_time_archs & overrides):
520 return
521
Andrew Geissler517393d2023-01-13 08:55:19 -0600522 import re
523 # This list is manually constructed by searching the image folder of the
524 # glibc recipe for __USE_TIME_BITS64. There is no good way to do this
525 # automatically.
526 api32 = {
527 # /usr/include/time.h
528 "clock_getres", "clock_gettime", "clock_nanosleep", "clock_settime",
529 "ctime", "ctime_r", "difftime", "gmtime", "gmtime_r", "localtime",
530 "localtime_r", "mktime", "nanosleep", "time", "timegm", "timelocal",
531 "timer_gettime", "timer_settime", "timespec_get", "timespec_getres",
532 # /usr/include/bits/time.h
533 "clock_adjtime",
534 # /usr/include/signal.h
535 "sigtimedwait",
536 # /usr/include/sys/time.h
537 "futimes", "futimesat", "getitimer", "gettimeofday", "lutimes",
538 "setitimer", "settimeofday", "utimes",
539 # /usr/include/sys/timex.h
540 "adjtimex", "ntp_adjtime", "ntp_gettime", "ntp_gettimex",
541 # /usr/include/sys/wait.h
542 "wait3", "wait4",
543 # /usr/include/sys/stat.h
544 "fstat", "fstat64", "fstatat", "fstatat64", "futimens", "lstat",
545 "lstat64", "stat", "stat64", "utimensat",
546 # /usr/include/sys/poll.h
547 "ppoll",
548 # /usr/include/sys/resource.h
549 "getrusage",
550 # /usr/include/sys/ioctl.h
551 "ioctl",
552 # /usr/include/sys/select.h
553 "select", "pselect",
554 # /usr/include/sys/prctl.h
555 "prctl",
556 # /usr/include/sys/epoll.h
557 "epoll_pwait2",
558 # /usr/include/sys/timerfd.h
559 "timerfd_gettime", "timerfd_settime",
560 # /usr/include/sys/socket.h
561 "getsockopt", "recvmmsg", "recvmsg", "sendmmsg", "sendmsg",
562 "setsockopt",
563 # /usr/include/sys/msg.h
564 "msgctl",
565 # /usr/include/sys/sem.h
566 "semctl", "semtimedop",
567 # /usr/include/sys/shm.h
568 "shmctl",
569 # /usr/include/pthread.h
570 "pthread_clockjoin_np", "pthread_cond_clockwait",
571 "pthread_cond_timedwait", "pthread_mutex_clocklock",
572 "pthread_mutex_timedlock", "pthread_rwlock_clockrdlock",
573 "pthread_rwlock_clockwrlock", "pthread_rwlock_timedrdlock",
574 "pthread_rwlock_timedwrlock", "pthread_timedjoin_np",
575 # /usr/include/semaphore.h
576 "sem_clockwait", "sem_timedwait",
577 # /usr/include/threads.h
578 "cnd_timedwait", "mtx_timedlock", "thrd_sleep",
579 # /usr/include/aio.h
580 "aio_cancel", "aio_error", "aio_read", "aio_return", "aio_suspend",
581 "aio_write", "lio_listio",
582 # /usr/include/mqueue.h
583 "mq_timedreceive", "mq_timedsend",
584 # /usr/include/glob.h
585 "glob", "glob64", "globfree", "globfree64",
586 # /usr/include/sched.h
587 "sched_rr_get_interval",
588 # /usr/include/fcntl.h
589 "fcntl", "fcntl64",
590 # /usr/include/utime.h
591 "utime",
592 # /usr/include/ftw.h
593 "ftw", "ftw64", "nftw", "nftw64",
594 # /usr/include/fts.h
595 "fts64_children", "fts64_close", "fts64_open", "fts64_read",
596 "fts64_set", "fts_children", "fts_close", "fts_open", "fts_read",
597 "fts_set",
598 # /usr/include/netdb.h
599 "gai_suspend",
600 }
601
602 ptrn = re.compile(
603 r'''
604 (?P<value>[\da-fA-F]+) \s+
605 (?P<flags>[lgu! ][w ][C ][W ][Ii ][dD ]F) \s+
606 (?P<section>\*UND\*) \s+
607 (?P<alignment>(?P<size>[\da-fA-F]+)) \s+
608 (?P<symbol>
609 ''' +
610 r'(?P<notag>' + r'|'.join(sorted(api32)) + r')' +
611 r'''
612 (@+(?P<tag>GLIBC_\d+\.\d+\S*)))
613 ''', re.VERBOSE
614 )
615
616 # elf is a oe.qa.ELFFile object
617 if elf is not None:
618 phdrs = elf.run_objdump("-tw", d)
619 syms = re.finditer(ptrn, phdrs)
620 usedapis = {sym.group('notag') for sym in syms}
621 if usedapis:
622 elfpath = package_qa_clean_path(path, d, packagename)
623 # Remove any .debug dir, heuristic that probably works
624 # At this point, any symbol information is stripped into the debug
625 # package, so that is the only place we will find them.
626 elfpath = elfpath.replace('.debug/', '')
Andrew Geissler028142b2023-05-05 11:29:21 -0500627 allowed = "32bit-time" in (d.getVar('INSANE_SKIP') or '').split()
628 if not allowed:
Andrew Geissler517393d2023-01-13 08:55:19 -0600629 msgformat = elfpath + " uses 32-bit api '%s'"
630 for sym in usedapis:
631 oe.qa.add_message(messages, '32bit-time', msgformat % sym)
632 oe.qa.add_message(
633 messages, '32bit-time',
Andrew Geissler028142b2023-05-05 11:29:21 -0500634 'Suppress with INSANE_SKIP = "32bit-time"'
Andrew Geissler517393d2023-01-13 08:55:19 -0600635 )
636
Patrick Williams92b42cb2022-09-03 06:53:57 -0500637# Check license variables
638do_populate_lic[postfuncs] += "populate_lic_qa_checksum"
639python populate_lic_qa_checksum() {
640 """
641 Check for changes in the license files.
642 """
643
644 lic_files = d.getVar('LIC_FILES_CHKSUM') or ''
645 lic = d.getVar('LICENSE')
646 pn = d.getVar('PN')
647
648 if lic == "CLOSED":
649 return
650
651 if not lic_files and d.getVar('SRC_URI'):
652 oe.qa.handle_error("license-checksum", pn + ": Recipe file fetches files and does not have license file information (LIC_FILES_CHKSUM)", d)
653
654 srcdir = d.getVar('S')
655 corebase_licensefile = d.getVar('COREBASE') + "/LICENSE"
656 for url in lic_files.split():
657 try:
658 (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
659 except bb.fetch.MalformedUrl:
660 oe.qa.handle_error("license-checksum", pn + ": LIC_FILES_CHKSUM contains an invalid URL: " + url, d)
661 continue
662 srclicfile = os.path.join(srcdir, path)
663 if not os.path.isfile(srclicfile):
664 oe.qa.handle_error("license-checksum", pn + ": LIC_FILES_CHKSUM points to an invalid file: " + srclicfile, d)
665 continue
666
667 if (srclicfile == corebase_licensefile):
668 bb.warn("${COREBASE}/LICENSE is not a valid license file, please use '${COMMON_LICENSE_DIR}/MIT' for a MIT License file in LIC_FILES_CHKSUM. This will become an error in the future")
669
670 recipemd5 = parm.get('md5', '')
671 beginline, endline = 0, 0
672 if 'beginline' in parm:
673 beginline = int(parm['beginline'])
674 if 'endline' in parm:
675 endline = int(parm['endline'])
676
677 if (not beginline) and (not endline):
678 md5chksum = bb.utils.md5_file(srclicfile)
679 with open(srclicfile, 'r', errors='replace') as f:
680 license = f.read().splitlines()
681 else:
682 with open(srclicfile, 'rb') as f:
683 import hashlib
684 lineno = 0
685 license = []
Patrick Williams2390b1b2022-11-03 13:47:49 -0500686 try:
687 m = hashlib.new('MD5', usedforsecurity=False)
688 except TypeError:
689 m = hashlib.new('MD5')
Patrick Williams92b42cb2022-09-03 06:53:57 -0500690 for line in f:
691 lineno += 1
692 if (lineno >= beginline):
693 if ((lineno <= endline) or not endline):
694 m.update(line)
695 license.append(line.decode('utf-8', errors='replace').rstrip())
696 else:
697 break
698 md5chksum = m.hexdigest()
699 if recipemd5 == md5chksum:
700 bb.note (pn + ": md5 checksum matched for ", url)
701 else:
702 if recipemd5:
703 msg = pn + ": The LIC_FILES_CHKSUM does not match for " + url
704 msg = msg + "\n" + pn + ": The new md5 checksum is " + md5chksum
705 max_lines = int(d.getVar('QA_MAX_LICENSE_LINES') or 20)
706 if not license or license[-1] != '':
707 # Ensure that our license text ends with a line break
708 # (will be added with join() below).
709 license.append('')
710 remove = len(license) - max_lines
711 if remove > 0:
712 start = max_lines // 2
713 end = start + remove - 1
714 del license[start:end]
715 license.insert(start, '...')
716 msg = msg + "\n" + pn + ": Here is the selected license text:" + \
717 "\n" + \
718 "{:v^70}".format(" beginline=%d " % beginline if beginline else "") + \
719 "\n" + "\n".join(license) + \
720 "{:^^70}".format(" endline=%d " % endline if endline else "")
721 if beginline:
722 if endline:
723 srcfiledesc = "%s (lines %d through to %d)" % (srclicfile, beginline, endline)
724 else:
725 srcfiledesc = "%s (beginning on line %d)" % (srclicfile, beginline)
726 elif endline:
727 srcfiledesc = "%s (ending on line %d)" % (srclicfile, endline)
728 else:
729 srcfiledesc = srclicfile
730 msg = msg + "\n" + pn + ": Check if the license information has changed in %s to verify that the LICENSE value \"%s\" remains valid" % (srcfiledesc, lic)
731
732 else:
733 msg = pn + ": LIC_FILES_CHKSUM is not specified for " + url
734 msg = msg + "\n" + pn + ": The md5 checksum is " + md5chksum
735 oe.qa.handle_error("license-checksum", msg, d)
736
737 oe.qa.exit_if_errors(d)
738}
739
740def qa_check_staged(path,d):
741 """
742 Check staged la and pc files for common problems like references to the work
743 directory.
744
745 As this is run after every stage we should be able to find the one
746 responsible for the errors easily even if we look at every .pc and .la file.
747 """
748
749 tmpdir = d.getVar('TMPDIR')
750 workdir = os.path.join(tmpdir, "work")
751 recipesysroot = d.getVar("RECIPE_SYSROOT")
752
753 if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d):
754 pkgconfigcheck = workdir
755 else:
756 pkgconfigcheck = tmpdir
757
758 skip = (d.getVar('INSANE_SKIP') or "").split()
759 skip_la = False
760 if 'la' in skip:
761 bb.note("Recipe %s skipping qa checking: la" % d.getVar('PN'))
762 skip_la = True
763
764 skip_pkgconfig = False
765 if 'pkgconfig' in skip:
766 bb.note("Recipe %s skipping qa checking: pkgconfig" % d.getVar('PN'))
767 skip_pkgconfig = True
768
769 skip_shebang_size = False
770 if 'shebang-size' in skip:
771 bb.note("Recipe %s skipping qa checkking: shebang-size" % d.getVar('PN'))
772 skip_shebang_size = True
773
774 # find all .la and .pc files
775 # read the content
776 # and check for stuff that looks wrong
777 for root, dirs, files in os.walk(path):
778 for file in files:
779 path = os.path.join(root,file)
780 if file.endswith(".la") and not skip_la:
781 with open(path) as f:
782 file_content = f.read()
783 file_content = file_content.replace(recipesysroot, "")
784 if workdir in file_content:
785 error_msg = "%s failed sanity test (workdir) in path %s" % (file,root)
786 oe.qa.handle_error("la", error_msg, d)
787 elif file.endswith(".pc") and not skip_pkgconfig:
788 with open(path) as f:
789 file_content = f.read()
790 file_content = file_content.replace(recipesysroot, "")
791 if pkgconfigcheck in file_content:
792 error_msg = "%s failed sanity test (tmpdir) in path %s" % (file,root)
793 oe.qa.handle_error("pkgconfig", error_msg, d)
794
795 if not skip_shebang_size:
796 errors = {}
797 package_qa_check_shebang_size(path, "", d, None, errors)
798 for e in errors:
799 oe.qa.handle_error(e, errors[e], d)
800
801
802# Run all package-wide warnfuncs and errorfuncs
803def package_qa_package(warnfuncs, errorfuncs, package, d):
804 warnings = {}
805 errors = {}
806
807 for func in warnfuncs:
808 func(package, d, warnings)
809 for func in errorfuncs:
810 func(package, d, errors)
811
812 for w in warnings:
813 oe.qa.handle_error(w, warnings[w], d)
814 for e in errors:
815 oe.qa.handle_error(e, errors[e], d)
816
817 return len(errors) == 0
818
819# Run all recipe-wide warnfuncs and errorfuncs
820def package_qa_recipe(warnfuncs, errorfuncs, pn, d):
821 warnings = {}
822 errors = {}
823
824 for func in warnfuncs:
825 func(pn, d, warnings)
826 for func in errorfuncs:
827 func(pn, d, errors)
828
829 for w in warnings:
830 oe.qa.handle_error(w, warnings[w], d)
831 for e in errors:
832 oe.qa.handle_error(e, errors[e], d)
833
834 return len(errors) == 0
835
836def prepopulate_objdump_p(elf, d):
837 output = elf.run_objdump("-p", d)
838 return (elf.name, output)
839
840# Walk over all files in a directory and call func
841def package_qa_walk(warnfuncs, errorfuncs, package, d):
842 #if this will throw an exception, then fix the dict above
843 target_os = d.getVar('HOST_OS')
844 target_arch = d.getVar('HOST_ARCH')
845
846 warnings = {}
847 errors = {}
848 elves = {}
849 for path in pkgfiles[package]:
850 elf = None
851 if os.path.isfile(path):
852 elf = oe.qa.ELFFile(path)
853 try:
854 elf.open()
855 elf.close()
856 except oe.qa.NotELFFileError:
857 elf = None
858 if elf:
859 elves[path] = elf
860
861 results = oe.utils.multiprocess_launch(prepopulate_objdump_p, elves.values(), d, extraargs=(d,))
862 for item in results:
863 elves[item[0]].set_objdump("-p", item[1])
864
865 for path in pkgfiles[package]:
866 if path in elves:
867 elves[path].open()
868 for func in warnfuncs:
869 func(path, package, d, elves.get(path), warnings)
870 for func in errorfuncs:
871 func(path, package, d, elves.get(path), errors)
872 if path in elves:
873 elves[path].close()
874
875 for w in warnings:
876 oe.qa.handle_error(w, warnings[w], d)
877 for e in errors:
878 oe.qa.handle_error(e, errors[e], d)
879
880def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
881 # Don't do this check for kernel/module recipes, there aren't too many debug/development
882 # packages and you can get false positives e.g. on kernel-module-lirc-dev
883 if bb.data.inherits_class("kernel", d) or bb.data.inherits_class("module-base", d):
884 return
885
886 if not "-dbg" in pkg and not "packagegroup-" in pkg and not "-image" in pkg:
887 localdata = bb.data.createCopy(d)
888 localdata.setVar('OVERRIDES', localdata.getVar('OVERRIDES') + ':' + pkg)
889
890 # Now check the RDEPENDS
891 rdepends = bb.utils.explode_deps(localdata.getVar('RDEPENDS') or "")
892
893 # Now do the sanity check!!!
894 if "build-deps" not in skip:
895 for rdepend in rdepends:
896 if "-dbg" in rdepend and "debug-deps" not in skip:
897 error_msg = "%s rdepends on %s" % (pkg,rdepend)
898 oe.qa.handle_error("debug-deps", error_msg, d)
899 if (not "-dev" in pkg and not "-staticdev" in pkg) and rdepend.endswith("-dev") and "dev-deps" not in skip:
900 error_msg = "%s rdepends on %s" % (pkg, rdepend)
901 oe.qa.handle_error("dev-deps", error_msg, d)
902 if rdepend not in packages:
903 rdep_data = oe.packagedata.read_subpkgdata(rdepend, d)
904 if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps:
905 continue
906 if not rdep_data or not 'PN' in rdep_data:
907 pkgdata_dir = d.getVar("PKGDATA_DIR")
908 try:
909 possibles = os.listdir("%s/runtime-rprovides/%s/" % (pkgdata_dir, rdepend))
910 except OSError:
911 possibles = []
912 for p in possibles:
913 rdep_data = oe.packagedata.read_subpkgdata(p, d)
914 if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps:
915 break
916 if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps:
917 continue
918 if rdep_data and 'PN' in rdep_data:
919 error_msg = "%s rdepends on %s, but it isn't a build dependency, missing %s in DEPENDS or PACKAGECONFIG?" % (pkg, rdepend, rdep_data['PN'])
920 else:
921 error_msg = "%s rdepends on %s, but it isn't a build dependency?" % (pkg, rdepend)
922 oe.qa.handle_error("build-deps", error_msg, d)
923
924 if "file-rdeps" not in skip:
925 ignored_file_rdeps = set(['/bin/sh', '/usr/bin/env', 'rtld(GNU_HASH)'])
926 if bb.data.inherits_class('nativesdk', d):
927 ignored_file_rdeps |= set(['/bin/bash', '/usr/bin/perl', 'perl'])
928 # For Saving the FILERDEPENDS
929 filerdepends = {}
930 rdep_data = oe.packagedata.read_subpkgdata(pkg, d)
931 for key in rdep_data:
932 if key.startswith("FILERDEPENDS:"):
933 for subkey in bb.utils.explode_deps(rdep_data[key]):
934 if subkey not in ignored_file_rdeps and \
935 not subkey.startswith('perl('):
936 # We already know it starts with FILERDEPENDS_
937 filerdepends[subkey] = key[13:]
938
939 if filerdepends:
940 done = rdepends[:]
941 # Add the rprovides of itself
942 if pkg not in done:
943 done.insert(0, pkg)
944
945 # The python is not a package, but python-core provides it, so
946 # skip checking /usr/bin/python if python is in the rdeps, in
947 # case there is a RDEPENDS:pkg = "python" in the recipe.
948 for py in [ d.getVar('MLPREFIX') + "python", "python" ]:
949 if py in done:
950 filerdepends.pop("/usr/bin/python",None)
951 done.remove(py)
952 for rdep in done:
953 # The file dependencies may contain package names, e.g.,
954 # perl
955 filerdepends.pop(rdep,None)
956
957 # For Saving the FILERPROVIDES, RPROVIDES and FILES_INFO
958 rdep_data = oe.packagedata.read_subpkgdata(rdep, d)
959 for key in rdep_data:
960 if key.startswith("FILERPROVIDES:") or key.startswith("RPROVIDES:"):
961 for subkey in bb.utils.explode_deps(rdep_data[key]):
962 filerdepends.pop(subkey,None)
963 # Add the files list to the rprovides
964 if key.startswith("FILES_INFO:"):
965 # Use eval() to make it as a dict
966 for subkey in eval(rdep_data[key]):
967 filerdepends.pop(subkey,None)
968 if not filerdepends:
969 # Break if all the file rdepends are met
970 break
971 if filerdepends:
972 for key in filerdepends:
973 error_msg = "%s contained in package %s requires %s, but no providers found in RDEPENDS:%s?" % \
974 (filerdepends[key].replace(":%s" % pkg, "").replace("@underscore@", "_"), pkg, key, pkg)
975 oe.qa.handle_error("file-rdeps", error_msg, d)
976package_qa_check_rdepends[vardepsexclude] = "OVERRIDES"
977
978def package_qa_check_deps(pkg, pkgdest, d):
979
980 localdata = bb.data.createCopy(d)
981 localdata.setVar('OVERRIDES', pkg)
982
983 def check_valid_deps(var):
984 try:
985 rvar = bb.utils.explode_dep_versions2(localdata.getVar(var) or "")
986 except ValueError as e:
987 bb.fatal("%s:%s: %s" % (var, pkg, e))
988 for dep in rvar:
989 for v in rvar[dep]:
990 if v and not v.startswith(('< ', '= ', '> ', '<= ', '>=')):
991 error_msg = "%s:%s is invalid: %s (%s) only comparisons <, =, >, <=, and >= are allowed" % (var, pkg, dep, v)
992 oe.qa.handle_error("dep-cmp", error_msg, d)
993
994 check_valid_deps('RDEPENDS')
995 check_valid_deps('RRECOMMENDS')
996 check_valid_deps('RSUGGESTS')
997 check_valid_deps('RPROVIDES')
998 check_valid_deps('RREPLACES')
999 check_valid_deps('RCONFLICTS')
1000
1001QAPKGTEST[usrmerge] = "package_qa_check_usrmerge"
1002def package_qa_check_usrmerge(pkg, d, messages):
1003
1004 pkgdest = d.getVar('PKGDEST')
1005 pkg_dir = pkgdest + os.sep + pkg + os.sep
1006 merged_dirs = ['bin', 'sbin', 'lib'] + d.getVar('MULTILIB_VARIANTS').split()
1007 for f in merged_dirs:
1008 if os.path.exists(pkg_dir + f) and not os.path.islink(pkg_dir + f):
1009 msg = "%s package is not obeying usrmerge distro feature. /%s should be relocated to /usr." % (pkg, f)
1010 oe.qa.add_message(messages, "usrmerge", msg)
1011 return False
1012 return True
1013
1014QAPKGTEST[perllocalpod] = "package_qa_check_perllocalpod"
1015def package_qa_check_perllocalpod(pkg, d, messages):
1016 """
1017 Check that the recipe didn't ship a perlocal.pod file, which shouldn't be
1018 installed in a distribution package. cpan.bbclass sets NO_PERLLOCAL=1 to
1019 handle this for most recipes.
1020 """
1021 import glob
1022 pkgd = oe.path.join(d.getVar('PKGDEST'), pkg)
1023 podpath = oe.path.join(pkgd, d.getVar("libdir"), "perl*", "*", "*", "perllocal.pod")
1024
1025 matches = glob.glob(podpath)
1026 if matches:
1027 matches = [package_qa_clean_path(path, d, pkg) for path in matches]
1028 msg = "%s contains perllocal.pod (%s), should not be installed" % (pkg, " ".join(matches))
1029 oe.qa.add_message(messages, "perllocalpod", msg)
1030
1031QAPKGTEST[expanded-d] = "package_qa_check_expanded_d"
1032def package_qa_check_expanded_d(package, d, messages):
1033 """
1034 Check for the expanded D (${D}) value in pkg_* and FILES
1035 variables, warn the user to use it correctly.
1036 """
1037 sane = True
1038 expanded_d = d.getVar('D')
1039
1040 for var in 'FILES','pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm':
1041 bbvar = d.getVar(var + ":" + package) or ""
1042 if expanded_d in bbvar:
1043 if var == 'FILES':
1044 oe.qa.add_message(messages, "expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % package)
1045 sane = False
1046 else:
1047 oe.qa.add_message(messages, "expanded-d", "%s in %s recipe contains ${D}, it should be replaced by $D instead" % (var, package))
1048 sane = False
1049 return sane
1050
1051QAPKGTEST[unlisted-pkg-lics] = "package_qa_check_unlisted_pkg_lics"
1052def package_qa_check_unlisted_pkg_lics(package, d, messages):
1053 """
1054 Check that all licenses for a package are among the licenses for the recipe.
1055 """
1056 pkg_lics = d.getVar('LICENSE:' + package)
1057 if not pkg_lics:
1058 return True
1059
1060 recipe_lics_set = oe.license.list_licenses(d.getVar('LICENSE'))
1061 package_lics = oe.license.list_licenses(pkg_lics)
1062 unlisted = package_lics - recipe_lics_set
1063 if unlisted:
1064 oe.qa.add_message(messages, "unlisted-pkg-lics",
1065 "LICENSE:%s includes licenses (%s) that are not "
1066 "listed in LICENSE" % (package, ' '.join(unlisted)))
1067 return False
1068 obsolete = set(oe.license.obsolete_license_list()) & package_lics - recipe_lics_set
1069 if obsolete:
1070 oe.qa.add_message(messages, "obsolete-license",
1071 "LICENSE:%s includes obsolete licenses %s" % (package, ' '.join(obsolete)))
1072 return False
1073 return True
1074
1075QAPKGTEST[empty-dirs] = "package_qa_check_empty_dirs"
1076def package_qa_check_empty_dirs(pkg, d, messages):
1077 """
1078 Check for the existence of files in directories that are expected to be
1079 empty.
1080 """
1081
1082 pkgd = oe.path.join(d.getVar('PKGDEST'), pkg)
1083 for dir in (d.getVar('QA_EMPTY_DIRS') or "").split():
1084 empty_dir = oe.path.join(pkgd, dir)
1085 if os.path.exists(empty_dir) and os.listdir(empty_dir):
1086 recommendation = (d.getVar('QA_EMPTY_DIRS_RECOMMENDATION:' + dir) or
1087 "but it is expected to be empty")
1088 msg = "%s installs files in %s, %s" % (pkg, dir, recommendation)
1089 oe.qa.add_message(messages, "empty-dirs", msg)
1090
1091def package_qa_check_encoding(keys, encode, d):
1092 def check_encoding(key, enc):
1093 sane = True
1094 value = d.getVar(key)
1095 if value:
1096 try:
1097 s = value.encode(enc)
1098 except UnicodeDecodeError as e:
1099 error_msg = "%s has non %s characters" % (key,enc)
1100 sane = False
1101 oe.qa.handle_error("invalid-chars", error_msg, d)
1102 return sane
1103
1104 for key in keys:
1105 sane = check_encoding(key, encode)
1106 if not sane:
1107 break
1108
1109HOST_USER_UID := "${@os.getuid()}"
1110HOST_USER_GID := "${@os.getgid()}"
1111
1112QAPATHTEST[host-user-contaminated] = "package_qa_check_host_user"
1113def package_qa_check_host_user(path, name, d, elf, messages):
1114 """Check for paths outside of /home which are owned by the user running bitbake."""
1115
1116 if not os.path.lexists(path):
1117 return
1118
1119 dest = d.getVar('PKGDEST')
1120 pn = d.getVar('PN')
1121 home = os.path.join(dest, name, 'home')
1122 if path == home or path.startswith(home + os.sep):
1123 return
1124
1125 try:
1126 stat = os.lstat(path)
1127 except OSError as exc:
1128 import errno
1129 if exc.errno != errno.ENOENT:
1130 raise
1131 else:
1132 check_uid = int(d.getVar('HOST_USER_UID'))
1133 if stat.st_uid == check_uid:
1134 oe.qa.add_message(messages, "host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_uid))
1135 return False
1136
1137 check_gid = int(d.getVar('HOST_USER_GID'))
1138 if stat.st_gid == check_gid:
1139 oe.qa.add_message(messages, "host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_gid))
1140 return False
1141 return True
1142
1143QARECIPETEST[unhandled-features-check] = "package_qa_check_unhandled_features_check"
1144def package_qa_check_unhandled_features_check(pn, d, messages):
1145 if not bb.data.inherits_class('features_check', d):
1146 var_set = False
1147 for kind in ['DISTRO', 'MACHINE', 'COMBINED']:
1148 for var in ['ANY_OF_' + kind + '_FEATURES', 'REQUIRED_' + kind + '_FEATURES', 'CONFLICT_' + kind + '_FEATURES']:
1149 if d.getVar(var) is not None or d.hasOverrides(var):
1150 var_set = True
1151 if var_set:
1152 oe.qa.handle_error("unhandled-features-check", "%s: recipe doesn't inherit features_check" % pn, d)
1153
1154QARECIPETEST[missing-update-alternatives] = "package_qa_check_missing_update_alternatives"
1155def package_qa_check_missing_update_alternatives(pn, d, messages):
1156 # Look at all packages and find out if any of those sets ALTERNATIVE variable
1157 # without inheriting update-alternatives class
1158 for pkg in (d.getVar('PACKAGES') or '').split():
1159 if d.getVar('ALTERNATIVE:%s' % pkg) and not bb.data.inherits_class('update-alternatives', d):
1160 oe.qa.handle_error("missing-update-alternatives", "%s: recipe defines ALTERNATIVE:%s but doesn't inherit update-alternatives. This might fail during do_rootfs later!" % (pn, pkg), d)
1161
1162# The PACKAGE FUNC to scan each package
1163python do_package_qa () {
1164 import subprocess
1165 import oe.packagedata
1166
1167 bb.note("DO PACKAGE QA")
1168
1169 main_lic = d.getVar('LICENSE')
1170
1171 # Check for obsolete license references in main LICENSE (packages are checked below for any changes)
1172 main_licenses = oe.license.list_licenses(d.getVar('LICENSE'))
1173 obsolete = set(oe.license.obsolete_license_list()) & main_licenses
1174 if obsolete:
1175 oe.qa.handle_error("obsolete-license", "Recipe LICENSE includes obsolete licenses %s" % ' '.join(obsolete), d)
1176
1177 bb.build.exec_func("read_subpackage_metadata", d)
1178
1179 # Check non UTF-8 characters on recipe's metadata
1180 package_qa_check_encoding(['DESCRIPTION', 'SUMMARY', 'LICENSE', 'SECTION'], 'utf-8', d)
1181
1182 logdir = d.getVar('T')
1183 pn = d.getVar('PN')
1184
1185 # Scan the packages...
1186 pkgdest = d.getVar('PKGDEST')
1187 packages = set((d.getVar('PACKAGES') or '').split())
1188
1189 global pkgfiles
1190 pkgfiles = {}
1191 for pkg in packages:
1192 pkgfiles[pkg] = []
1193 pkgdir = os.path.join(pkgdest, pkg)
1194 for walkroot, dirs, files in os.walk(pkgdir):
1195 # Don't walk into top-level CONTROL or DEBIAN directories as these
1196 # are temporary directories created by do_package.
1197 if walkroot == pkgdir:
1198 for control in ("CONTROL", "DEBIAN"):
1199 if control in dirs:
1200 dirs.remove(control)
1201 for file in files:
1202 pkgfiles[pkg].append(os.path.join(walkroot, file))
1203
1204 # no packages should be scanned
1205 if not packages:
1206 return
1207
1208 import re
1209 # The package name matches the [a-z0-9.+-]+ regular expression
1210 pkgname_pattern = re.compile(r"^[a-z0-9.+-]+$")
1211
1212 taskdepdata = d.getVar("BB_TASKDEPDATA", False)
1213 taskdeps = set()
1214 for dep in taskdepdata:
1215 taskdeps.add(taskdepdata[dep][0])
1216
1217 def parse_test_matrix(matrix_name):
1218 testmatrix = d.getVarFlags(matrix_name) or {}
1219 g = globals()
1220 warnchecks = []
1221 for w in (d.getVar("WARN_QA") or "").split():
1222 if w in skip:
1223 continue
1224 if w in testmatrix and testmatrix[w] in g:
1225 warnchecks.append(g[testmatrix[w]])
1226
1227 errorchecks = []
1228 for e in (d.getVar("ERROR_QA") or "").split():
1229 if e in skip:
1230 continue
1231 if e in testmatrix and testmatrix[e] in g:
1232 errorchecks.append(g[testmatrix[e]])
1233 return warnchecks, errorchecks
1234
1235 for package in packages:
1236 skip = set((d.getVar('INSANE_SKIP') or "").split() +
1237 (d.getVar('INSANE_SKIP:' + package) or "").split())
1238 if skip:
1239 bb.note("Package %s skipping QA tests: %s" % (package, str(skip)))
1240
1241 bb.note("Checking Package: %s" % package)
1242 # Check package name
1243 if not pkgname_pattern.match(package):
1244 oe.qa.handle_error("pkgname",
1245 "%s doesn't match the [a-z0-9.+-]+ regex" % package, d)
1246
1247 warn_checks, error_checks = parse_test_matrix("QAPATHTEST")
1248 package_qa_walk(warn_checks, error_checks, package, d)
1249
1250 warn_checks, error_checks = parse_test_matrix("QAPKGTEST")
1251 package_qa_package(warn_checks, error_checks, package, d)
1252
1253 package_qa_check_rdepends(package, pkgdest, skip, taskdeps, packages, d)
1254 package_qa_check_deps(package, pkgdest, d)
1255
1256 warn_checks, error_checks = parse_test_matrix("QARECIPETEST")
1257 package_qa_recipe(warn_checks, error_checks, pn, d)
1258
1259 if 'libdir' in d.getVar("ALL_QA").split():
1260 package_qa_check_libdir(d)
1261
1262 oe.qa.exit_if_errors(d)
1263}
1264
1265# binutils is used for most checks, so need to set as dependency
1266# POPULATESYSROOTDEPS is defined in staging class.
1267do_package_qa[depends] += "${POPULATESYSROOTDEPS}"
1268do_package_qa[vardeps] = "${@bb.utils.contains('ERROR_QA', 'empty-dirs', 'QA_EMPTY_DIRS', '', d)}"
1269do_package_qa[vardepsexclude] = "BB_TASKDEPDATA"
1270do_package_qa[rdeptask] = "do_packagedata"
1271addtask do_package_qa after do_packagedata do_package before do_build
1272
1273# Add the package specific INSANE_SKIPs to the sstate dependencies
1274python() {
1275 pkgs = (d.getVar('PACKAGES') or '').split()
1276 for pkg in pkgs:
1277 d.appendVarFlag("do_package_qa", "vardeps", " INSANE_SKIP:{}".format(pkg))
1278}
1279
1280SSTATETASKS += "do_package_qa"
1281do_package_qa[sstate-inputdirs] = ""
1282do_package_qa[sstate-outputdirs] = ""
1283python do_package_qa_setscene () {
1284 sstate_setscene(d)
1285}
1286addtask do_package_qa_setscene
1287
1288python do_qa_sysroot() {
1289 bb.note("QA checking do_populate_sysroot")
1290 sysroot_destdir = d.expand('${SYSROOT_DESTDIR}')
1291 for sysroot_dir in d.expand('${SYSROOT_DIRS}').split():
1292 qa_check_staged(sysroot_destdir + sysroot_dir, d)
1293 oe.qa.exit_with_message_if_errors("do_populate_sysroot for this recipe installed files with QA issues", d)
1294}
1295do_populate_sysroot[postfuncs] += "do_qa_sysroot"
1296
1297python do_qa_patch() {
1298 import subprocess
1299
1300 ###########################################################################
1301 # Check patch.log for fuzz warnings
1302 #
1303 # Further information on why we check for patch fuzz warnings:
1304 # http://lists.openembedded.org/pipermail/openembedded-core/2018-March/148675.html
1305 # https://bugzilla.yoctoproject.org/show_bug.cgi?id=10450
1306 ###########################################################################
1307
1308 logdir = d.getVar('T')
1309 patchlog = os.path.join(logdir,"log.do_patch")
1310
1311 if os.path.exists(patchlog):
1312 fuzzheader = '--- Patch fuzz start ---'
1313 fuzzfooter = '--- Patch fuzz end ---'
1314 statement = "grep -e '%s' %s > /dev/null" % (fuzzheader, patchlog)
1315 if subprocess.call(statement, shell=True) == 0:
1316 msg = "Fuzz detected:\n\n"
1317 fuzzmsg = ""
1318 inFuzzInfo = False
1319 f = open(patchlog, "r")
1320 for line in f:
1321 if fuzzheader in line:
1322 inFuzzInfo = True
1323 fuzzmsg = ""
1324 elif fuzzfooter in line:
1325 fuzzmsg = fuzzmsg.replace('\n\n', '\n')
1326 msg += fuzzmsg
1327 msg += "\n"
1328 inFuzzInfo = False
1329 elif inFuzzInfo and not 'Now at patch' in line:
1330 fuzzmsg += line
1331 f.close()
1332 msg += "The context lines in the patches can be updated with devtool:\n"
1333 msg += "\n"
1334 msg += " devtool modify %s\n" % d.getVar('PN')
1335 msg += " devtool finish --force-patch-refresh %s <layer_path>\n\n" % d.getVar('PN')
1336 msg += "Don't forget to review changes done by devtool!\n"
Andrew Geisslerc5535c92023-01-27 16:10:19 -06001337 msg += "\nPatch log indicates that patches do not apply cleanly."
Patrick Williams92b42cb2022-09-03 06:53:57 -05001338 oe.qa.handle_error("patch-fuzz", msg, d)
1339
1340 # Check if the patch contains a correctly formatted and spelled Upstream-Status
1341 import re
1342 from oe import patch
1343
1344 coremeta_path = os.path.join(d.getVar('COREBASE'), 'meta', '')
1345 for url in patch.src_patches(d):
Andrew Geissler6aa7eec2023-03-03 12:41:14 -06001346 (_, _, fullpath, _, _, _) = bb.fetch.decodeurl(url)
Patrick Williams92b42cb2022-09-03 06:53:57 -05001347
Andrew Geissler6aa7eec2023-03-03 12:41:14 -06001348 msg = oe.qa.check_upstream_status(fullpath)
1349 if msg:
Patrick Williams520786c2023-06-25 16:20:36 -05001350 oe.qa.handle_error("patch-status", msg, d)
Andrew Geisslerc5535c92023-01-27 16:10:19 -06001351
1352 oe.qa.exit_if_errors(d)
Patrick Williams92b42cb2022-09-03 06:53:57 -05001353}
1354
1355python do_qa_configure() {
1356 import subprocess
1357
1358 ###########################################################################
1359 # Check config.log for cross compile issues
1360 ###########################################################################
1361
1362 configs = []
1363 workdir = d.getVar('WORKDIR')
1364
1365 skip = (d.getVar('INSANE_SKIP') or "").split()
1366 skip_configure_unsafe = False
1367 if 'configure-unsafe' in skip:
1368 bb.note("Recipe %s skipping qa checking: configure-unsafe" % d.getVar('PN'))
1369 skip_configure_unsafe = True
1370
1371 if bb.data.inherits_class('autotools', d) and not skip_configure_unsafe:
1372 bb.note("Checking autotools environment for common misconfiguration")
1373 for root, dirs, files in os.walk(workdir):
1374 statement = "grep -q -F -e 'is unsafe for cross-compilation' %s" % \
1375 os.path.join(root,"config.log")
1376 if "config.log" in files:
1377 if subprocess.call(statement, shell=True) == 0:
1378 error_msg = """This autoconf log indicates errors, it looked at host include and/or library paths while determining system capabilities.
1379Rerun configure task after fixing this."""
1380 oe.qa.handle_error("configure-unsafe", error_msg, d)
1381
1382 if "configure.ac" in files:
1383 configs.append(os.path.join(root,"configure.ac"))
1384 if "configure.in" in files:
1385 configs.append(os.path.join(root, "configure.in"))
1386
1387 ###########################################################################
1388 # Check gettext configuration and dependencies are correct
1389 ###########################################################################
1390
1391 skip_configure_gettext = False
1392 if 'configure-gettext' in skip:
1393 bb.note("Recipe %s skipping qa checking: configure-gettext" % d.getVar('PN'))
1394 skip_configure_gettext = True
1395
1396 cnf = d.getVar('EXTRA_OECONF') or ""
1397 if not ("gettext" in d.getVar('P') or "gcc-runtime" in d.getVar('P') or \
1398 "--disable-nls" in cnf or skip_configure_gettext):
1399 ml = d.getVar("MLPREFIX") or ""
1400 if bb.data.inherits_class('cross-canadian', d):
1401 gt = "nativesdk-gettext"
1402 else:
1403 gt = "gettext-native"
1404 deps = bb.utils.explode_deps(d.getVar('DEPENDS') or "")
1405 if gt not in deps:
1406 for config in configs:
1407 gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config
1408 if subprocess.call(gnu, shell=True) == 0:
1409 error_msg = "AM_GNU_GETTEXT used but no inherit gettext"
1410 oe.qa.handle_error("configure-gettext", error_msg, d)
1411
1412 ###########################################################################
1413 # Check unrecognised configure options (with a white list)
1414 ###########################################################################
1415 if bb.data.inherits_class("autotools", d):
1416 bb.note("Checking configure output for unrecognised options")
1417 try:
1418 if bb.data.inherits_class("autotools", d):
1419 flag = "WARNING: unrecognized options:"
1420 log = os.path.join(d.getVar('B'), 'config.log')
1421 output = subprocess.check_output(['grep', '-F', flag, log]).decode("utf-8").replace(', ', ' ').replace('"', '')
1422 options = set()
1423 for line in output.splitlines():
1424 options |= set(line.partition(flag)[2].split())
1425 ignore_opts = set(d.getVar("UNKNOWN_CONFIGURE_OPT_IGNORE").split())
1426 options -= ignore_opts
1427 if options:
1428 pn = d.getVar('PN')
1429 error_msg = pn + ": configure was passed unrecognised options: " + " ".join(options)
1430 oe.qa.handle_error("unknown-configure-option", error_msg, d)
1431 except subprocess.CalledProcessError:
1432 pass
1433
1434 # Check invalid PACKAGECONFIG
1435 pkgconfig = (d.getVar("PACKAGECONFIG") or "").split()
1436 if pkgconfig:
1437 pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
1438 for pconfig in pkgconfig:
1439 if pconfig not in pkgconfigflags:
1440 pn = d.getVar('PN')
1441 error_msg = "%s: invalid PACKAGECONFIG: %s" % (pn, pconfig)
1442 oe.qa.handle_error("invalid-packageconfig", error_msg, d)
1443
1444 oe.qa.exit_if_errors(d)
1445}
1446
1447def unpack_check_src_uri(pn, d):
1448 import re
1449
1450 skip = (d.getVar('INSANE_SKIP') or "").split()
1451 if 'src-uri-bad' in skip:
1452 bb.note("Recipe %s skipping qa checking: src-uri-bad" % d.getVar('PN'))
1453 return
1454
1455 if "${PN}" in d.getVar("SRC_URI", False):
1456 oe.qa.handle_error("src-uri-bad", "%s: SRC_URI uses PN not BPN" % pn, d)
1457
1458 for url in d.getVar("SRC_URI").split():
1459 # Search for github and gitlab URLs that pull unstable archives (comment for future greppers)
Patrick Williams7784c422022-11-17 07:29:11 -06001460 if re.search(r"git(hu|la)b\.com/.+/.+/archive/.+", url) or "//codeload.github.com/" in url:
Patrick Williams92b42cb2022-09-03 06:53:57 -05001461 oe.qa.handle_error("src-uri-bad", "%s: SRC_URI uses unstable GitHub/GitLab archives, convert recipe to use git protocol" % pn, d)
1462
1463python do_qa_unpack() {
1464 src_uri = d.getVar('SRC_URI')
1465 s_dir = d.getVar('S')
1466 if src_uri and not os.path.exists(s_dir):
1467 bb.warn('%s: the directory %s (%s) pointed to by the S variable doesn\'t exist - please set S within the recipe to point to where the source has been unpacked to' % (d.getVar('PN'), d.getVar('S', False), s_dir))
1468
1469 unpack_check_src_uri(d.getVar('PN'), d)
1470}
1471
1472# Check for patch fuzz
1473do_patch[postfuncs] += "do_qa_patch "
1474
1475# Check broken config.log files, for packages requiring Gettext which
1476# don't have it in DEPENDS.
1477#addtask qa_configure after do_configure before do_compile
1478do_configure[postfuncs] += "do_qa_configure "
1479
1480# Check does S exist.
1481do_unpack[postfuncs] += "do_qa_unpack"
1482
1483python () {
1484 import re
1485
1486 tests = d.getVar('ALL_QA').split()
1487 if "desktop" in tests:
1488 d.appendVar("PACKAGE_DEPENDS", " desktop-file-utils-native")
1489
1490 ###########################################################################
1491 # Check various variables
1492 ###########################################################################
1493
1494 # Checking ${FILESEXTRAPATHS}
1495 extrapaths = (d.getVar("FILESEXTRAPATHS") or "")
1496 if '__default' not in extrapaths.split(":"):
1497 msg = "FILESEXTRAPATHS-variable, must always use :prepend (or :append)\n"
1498 msg += "type of assignment, and don't forget the colon.\n"
1499 msg += "Please assign it with the format of:\n"
1500 msg += " FILESEXTRAPATHS:append := \":${THISDIR}/Your_Files_Path\" or\n"
1501 msg += " FILESEXTRAPATHS:prepend := \"${THISDIR}/Your_Files_Path:\"\n"
1502 msg += "in your bbappend file\n\n"
1503 msg += "Your incorrect assignment is:\n"
1504 msg += "%s\n" % extrapaths
1505 bb.warn(msg)
1506
1507 overrides = d.getVar('OVERRIDES').split(':')
1508 pn = d.getVar('PN')
1509 if pn in overrides:
1510 msg = 'Recipe %s has PN of "%s" which is in OVERRIDES, this can result in unexpected behaviour.' % (d.getVar("FILE"), pn)
1511 oe.qa.handle_error("pn-overrides", msg, d)
1512 prog = re.compile(r'[A-Z]')
1513 if prog.search(pn):
1514 oe.qa.handle_error("uppercase-pn", 'PN: %s is upper case, this can result in unexpected behavior.' % pn, d)
1515
1516 # Some people mistakenly use DEPENDS:${PN} instead of DEPENDS and wonder
1517 # why it doesn't work.
1518 if (d.getVar(d.expand('DEPENDS:${PN}'))):
1519 oe.qa.handle_error("pkgvarcheck", "recipe uses DEPENDS:${PN}, should use DEPENDS", d)
1520
1521 issues = []
1522 if (d.getVar('PACKAGES') or "").split():
1523 for dep in (d.getVar('QADEPENDS') or "").split():
1524 d.appendVarFlag('do_package_qa', 'depends', " %s:do_populate_sysroot" % dep)
1525 for var in 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RCONFLICTS', 'RPROVIDES', 'RREPLACES', 'FILES', 'pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm', 'ALLOW_EMPTY':
1526 if d.getVar(var, False):
1527 issues.append(var)
1528
1529 fakeroot_tests = d.getVar('FAKEROOT_QA').split()
1530 if set(tests) & set(fakeroot_tests):
1531 d.setVarFlag('do_package_qa', 'fakeroot', '1')
1532 d.appendVarFlag('do_package_qa', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
1533 else:
1534 d.setVarFlag('do_package_qa', 'rdeptask', '')
1535 for i in issues:
1536 oe.qa.handle_error("pkgvarcheck", "%s: Variable %s is set as not being package specific, please fix this." % (d.getVar("FILE"), i), d)
1537
1538 if 'native-last' not in (d.getVar('INSANE_SKIP') or "").split():
1539 for native_class in ['native', 'nativesdk']:
1540 if bb.data.inherits_class(native_class, d):
1541
1542 inherited_classes = d.getVar('__inherit_cache', False) or []
1543 needle = "/" + native_class
1544
1545 bbclassextend = (d.getVar('BBCLASSEXTEND') or '').split()
1546 # BBCLASSEXTEND items are always added in the end
1547 skip_classes = bbclassextend
1548 if bb.data.inherits_class('native', d) or 'native' in bbclassextend:
1549 # native also inherits nopackages and relocatable bbclasses
1550 skip_classes.extend(['nopackages', 'relocatable'])
1551
1552 broken_order = []
1553 for class_item in reversed(inherited_classes):
1554 if needle not in class_item:
1555 for extend_item in skip_classes:
1556 if '/%s.bbclass' % extend_item in class_item:
1557 break
1558 else:
1559 pn = d.getVar('PN')
1560 broken_order.append(os.path.basename(class_item))
1561 else:
1562 break
1563 if broken_order:
1564 oe.qa.handle_error("native-last", "%s: native/nativesdk class is not inherited last, this can result in unexpected behaviour. "
1565 "Classes inherited after native/nativesdk: %s" % (pn, " ".join(broken_order)), d)
1566
1567 oe.qa.exit_if_errors(d)
1568}