| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 1 | # | 
|  | 2 | # Copyright OpenEmbedded Contributors | 
|  | 3 | # | 
|  | 4 | # SPDX-License-Identifier: MIT | 
|  | 5 | # | 
|  | 6 |  | 
|  | 7 | # BB Class inspired by ebuild.sh | 
|  | 8 | # | 
|  | 9 | # This class will test files after installation for certain | 
|  | 10 | # security issues and other kind of issues. | 
|  | 11 | # | 
|  | 12 | # Checks we do: | 
|  | 13 | #  -Check the ownership and permissions | 
|  | 14 | #  -Check the RUNTIME path for the $TMPDIR | 
|  | 15 | #  -Check if .la files wrongly point to workdir | 
|  | 16 | #  -Check if .pc files wrongly point to workdir | 
|  | 17 | #  -Check if packages contains .debug directories or .so files | 
|  | 18 | #   where they should be in -dev or -dbg | 
|  | 19 | #  -Check if config.log contains traces to broken autoconf tests | 
|  | 20 | #  -Check invalid characters (non-utf8) on some package metadata | 
|  | 21 | #  -Ensure that binaries in base_[bindir|sbindir|libdir] do not link | 
|  | 22 | #   into exec_prefix | 
|  | 23 | #  -Check that scripts in base_[bindir|sbindir|libdir] do not reference | 
|  | 24 | #   files under exec_prefix | 
|  | 25 | #  -Check if the package name is upper case | 
|  | 26 |  | 
|  | 27 | # Elect whether a given type of error is a warning or error, they may | 
|  | 28 | # have been set by other files. | 
|  | 29 | WARN_QA ?= " libdir xorg-driver-abi buildpaths \ | 
|  | 30 | textrel incompatible-license files-invalid \ | 
|  | 31 | infodir build-deps src-uri-bad symlink-to-sysroot multilib \ | 
| Andrew Geissler | c5535c9 | 2023-01-27 16:10:19 -0600 | [diff] [blame] | 32 | invalid-packageconfig host-user-contaminated uppercase-pn \ | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 33 | mime mime-xdg unlisted-pkg-lics unhandled-features-check \ | 
|  | 34 | missing-update-alternatives native-last missing-ptest \ | 
|  | 35 | license-exists license-no-generic license-syntax license-format \ | 
|  | 36 | license-incompatible license-file-missing obsolete-license \ | 
|  | 37 | " | 
|  | 38 | ERROR_QA ?= "dev-so debug-deps dev-deps debug-files arch pkgconfig la \ | 
|  | 39 | perms dep-cmp pkgvarcheck perm-config perm-line perm-link \ | 
|  | 40 | split-strip packages-list pkgv-undefined var-undefined \ | 
|  | 41 | version-going-backwards expanded-d invalid-chars \ | 
|  | 42 | license-checksum dev-elf file-rdeps configure-unsafe \ | 
|  | 43 | configure-gettext perllocalpod shebang-size \ | 
|  | 44 | already-stripped installed-vs-shipped ldflags compile-host-path \ | 
|  | 45 | install-host-path pn-overrides unknown-configure-option \ | 
|  | 46 | useless-rpaths rpaths staticdev empty-dirs \ | 
| Patrick Williams | 520786c | 2023-06-25 16:20:36 -0500 | [diff] [blame] | 47 | patch-fuzz \ | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 48 | " | 
|  | 49 | # Add usrmerge QA check based on distro feature | 
|  | 50 | ERROR_QA:append = "${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge', ' usrmerge', '', d)}" | 
| Patrick Williams | 520786c | 2023-06-25 16:20:36 -0500 | [diff] [blame] | 51 | ERROR_QA:append:layer-core = " patch-status" | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 52 |  | 
|  | 53 | FAKEROOT_QA = "host-user-contaminated" | 
|  | 54 | FAKEROOT_QA[doc] = "QA tests which need to run under fakeroot. If any \ | 
|  | 55 | enabled tests are listed here, the do_package_qa task will run under fakeroot." | 
|  | 56 |  | 
|  | 57 | ALL_QA = "${WARN_QA} ${ERROR_QA}" | 
|  | 58 |  | 
|  | 59 | UNKNOWN_CONFIGURE_OPT_IGNORE ?= "--enable-nls --disable-nls --disable-silent-rules --disable-dependency-tracking --with-libtool-sysroot --disable-static" | 
|  | 60 |  | 
|  | 61 | # This is a list of directories that are expected to be empty. | 
|  | 62 | QA_EMPTY_DIRS ?= " \ | 
|  | 63 | /dev/pts \ | 
|  | 64 | /media \ | 
|  | 65 | /proc \ | 
|  | 66 | /run \ | 
|  | 67 | /tmp \ | 
|  | 68 | ${localstatedir}/run \ | 
|  | 69 | ${localstatedir}/volatile \ | 
|  | 70 | " | 
|  | 71 | # It is possible to specify why a directory is expected to be empty by defining | 
|  | 72 | # QA_EMPTY_DIRS_RECOMMENDATION:<path>, which will then be included in the error | 
|  | 73 | # message if the directory is not empty. If it is not specified for a directory, | 
|  | 74 | # then "but it is expected to be empty" will be used. | 
|  | 75 |  | 
|  | 76 | def package_qa_clean_path(path, d, pkg=None): | 
|  | 77 | """ | 
|  | 78 | Remove redundant paths from the path for display.  If pkg isn't set then | 
|  | 79 | TMPDIR is stripped, otherwise PKGDEST/pkg is stripped. | 
|  | 80 | """ | 
|  | 81 | if pkg: | 
|  | 82 | path = path.replace(os.path.join(d.getVar("PKGDEST"), pkg), "/") | 
|  | 83 | return path.replace(d.getVar("TMPDIR"), "/").replace("//", "/") | 
|  | 84 |  | 
|  | 85 | QAPATHTEST[shebang-size] = "package_qa_check_shebang_size" | 
|  | 86 | def package_qa_check_shebang_size(path, name, d, elf, messages): | 
|  | 87 | import stat | 
|  | 88 | if os.path.islink(path) or stat.S_ISFIFO(os.stat(path).st_mode) or elf: | 
|  | 89 | return | 
|  | 90 |  | 
|  | 91 | try: | 
|  | 92 | with open(path, 'rb') as f: | 
|  | 93 | stanza = f.readline(130) | 
|  | 94 | except IOError: | 
|  | 95 | return | 
|  | 96 |  | 
|  | 97 | if stanza.startswith(b'#!'): | 
|  | 98 | #Shebang not found | 
|  | 99 | try: | 
|  | 100 | stanza = stanza.decode("utf-8") | 
|  | 101 | except UnicodeDecodeError: | 
|  | 102 | #If it is not a text file, it is not a script | 
|  | 103 | return | 
|  | 104 |  | 
|  | 105 | if len(stanza) > 129: | 
|  | 106 | oe.qa.add_message(messages, "shebang-size", "%s: %s maximum shebang size exceeded, the maximum size is 128." % (name, package_qa_clean_path(path, d))) | 
|  | 107 | return | 
|  | 108 |  | 
|  | 109 | QAPATHTEST[libexec] = "package_qa_check_libexec" | 
|  | 110 | def package_qa_check_libexec(path,name, d, elf, messages): | 
|  | 111 |  | 
|  | 112 | # Skip the case where the default is explicitly /usr/libexec | 
|  | 113 | libexec = d.getVar('libexecdir') | 
|  | 114 | if libexec == "/usr/libexec": | 
|  | 115 | return True | 
|  | 116 |  | 
|  | 117 | if 'libexec' in path.split(os.path.sep): | 
|  | 118 | oe.qa.add_message(messages, "libexec", "%s: %s is using libexec please relocate to %s" % (name, package_qa_clean_path(path, d), libexec)) | 
|  | 119 | return False | 
|  | 120 |  | 
|  | 121 | return True | 
|  | 122 |  | 
|  | 123 | QAPATHTEST[rpaths] = "package_qa_check_rpath" | 
|  | 124 | def package_qa_check_rpath(file,name, d, elf, messages): | 
|  | 125 | """ | 
|  | 126 | Check for dangerous RPATHs | 
|  | 127 | """ | 
|  | 128 | if not elf: | 
|  | 129 | return | 
|  | 130 |  | 
|  | 131 | if os.path.islink(file): | 
|  | 132 | return | 
|  | 133 |  | 
|  | 134 | bad_dirs = [d.getVar('BASE_WORKDIR'), d.getVar('STAGING_DIR_TARGET')] | 
|  | 135 |  | 
|  | 136 | phdrs = elf.run_objdump("-p", d) | 
|  | 137 |  | 
|  | 138 | import re | 
|  | 139 | rpath_re = re.compile(r"\s+RPATH\s+(.*)") | 
|  | 140 | for line in phdrs.split("\n"): | 
|  | 141 | m = rpath_re.match(line) | 
|  | 142 | if m: | 
|  | 143 | rpath = m.group(1) | 
|  | 144 | for dir in bad_dirs: | 
|  | 145 | if dir in rpath: | 
|  | 146 | oe.qa.add_message(messages, "rpaths", "package %s contains bad RPATH %s in file %s" % (name, rpath, file)) | 
|  | 147 |  | 
|  | 148 | QAPATHTEST[useless-rpaths] = "package_qa_check_useless_rpaths" | 
|  | 149 | def package_qa_check_useless_rpaths(file, name, d, elf, messages): | 
|  | 150 | """ | 
|  | 151 | Check for RPATHs that are useless but not dangerous | 
|  | 152 | """ | 
|  | 153 | def rpath_eq(a, b): | 
|  | 154 | return os.path.normpath(a) == os.path.normpath(b) | 
|  | 155 |  | 
|  | 156 | if not elf: | 
|  | 157 | return | 
|  | 158 |  | 
|  | 159 | if os.path.islink(file): | 
|  | 160 | return | 
|  | 161 |  | 
|  | 162 | libdir = d.getVar("libdir") | 
|  | 163 | base_libdir = d.getVar("base_libdir") | 
|  | 164 |  | 
|  | 165 | phdrs = elf.run_objdump("-p", d) | 
|  | 166 |  | 
|  | 167 | import re | 
|  | 168 | rpath_re = re.compile(r"\s+RPATH\s+(.*)") | 
|  | 169 | for line in phdrs.split("\n"): | 
|  | 170 | m = rpath_re.match(line) | 
|  | 171 | if m: | 
|  | 172 | rpath = m.group(1) | 
|  | 173 | if rpath_eq(rpath, libdir) or rpath_eq(rpath, base_libdir): | 
|  | 174 | # The dynamic linker searches both these places anyway.  There is no point in | 
|  | 175 | # looking there again. | 
|  | 176 | oe.qa.add_message(messages, "useless-rpaths", "%s: %s contains probably-redundant RPATH %s" % (name, package_qa_clean_path(file, d, name), rpath)) | 
|  | 177 |  | 
|  | 178 | QAPATHTEST[dev-so] = "package_qa_check_dev" | 
|  | 179 | def package_qa_check_dev(path, name, d, elf, messages): | 
|  | 180 | """ | 
|  | 181 | Check for ".so" library symlinks in non-dev packages | 
|  | 182 | """ | 
|  | 183 |  | 
|  | 184 | if not name.endswith("-dev") and not name.endswith("-dbg") and not name.endswith("-ptest") and not name.startswith("nativesdk-") and path.endswith(".so") and os.path.islink(path): | 
|  | 185 | oe.qa.add_message(messages, "dev-so", "non -dev/-dbg/nativesdk- package %s contains symlink .so '%s'" % \ | 
|  | 186 | (name, package_qa_clean_path(path, d, name))) | 
|  | 187 |  | 
|  | 188 | QAPATHTEST[dev-elf] = "package_qa_check_dev_elf" | 
|  | 189 | def package_qa_check_dev_elf(path, name, d, elf, messages): | 
|  | 190 | """ | 
|  | 191 | Check that -dev doesn't contain real shared libraries.  The test has to | 
|  | 192 | check that the file is not a link and is an ELF object as some recipes | 
|  | 193 | install link-time .so files that are linker scripts. | 
|  | 194 | """ | 
|  | 195 | if name.endswith("-dev") and path.endswith(".so") and not os.path.islink(path) and elf: | 
|  | 196 | oe.qa.add_message(messages, "dev-elf", "-dev package %s contains non-symlink .so '%s'" % \ | 
|  | 197 | (name, package_qa_clean_path(path, d, name))) | 
|  | 198 |  | 
|  | 199 | QAPATHTEST[staticdev] = "package_qa_check_staticdev" | 
|  | 200 | def package_qa_check_staticdev(path, name, d, elf, messages): | 
|  | 201 | """ | 
|  | 202 | Check for ".a" library in non-staticdev packages | 
|  | 203 | There are a number of exceptions to this rule, -pic packages can contain | 
|  | 204 | static libraries, the _nonshared.a belong with their -dev packages and | 
|  | 205 | libgcc.a, libgcov.a will be skipped in their packages | 
|  | 206 | """ | 
|  | 207 |  | 
|  | 208 | if not name.endswith("-pic") and not name.endswith("-staticdev") and not name.endswith("-ptest") and path.endswith(".a") and not path.endswith("_nonshared.a") and not '/usr/lib/debug-static/' in path and not '/.debug-static/' in path: | 
|  | 209 | oe.qa.add_message(messages, "staticdev", "non -staticdev package contains static .a library: %s path '%s'" % \ | 
|  | 210 | (name, package_qa_clean_path(path,d, name))) | 
|  | 211 |  | 
|  | 212 | QAPATHTEST[mime] = "package_qa_check_mime" | 
|  | 213 | def package_qa_check_mime(path, name, d, elf, messages): | 
|  | 214 | """ | 
|  | 215 | Check if package installs mime types to /usr/share/mime/packages | 
|  | 216 | while no inheriting mime.bbclass | 
|  | 217 | """ | 
|  | 218 |  | 
|  | 219 | if d.getVar("datadir") + "/mime/packages" in path and path.endswith('.xml') and not bb.data.inherits_class("mime", d): | 
|  | 220 | oe.qa.add_message(messages, "mime", "package contains mime types but does not inherit mime: %s path '%s'" % \ | 
|  | 221 | (name, package_qa_clean_path(path,d))) | 
|  | 222 |  | 
|  | 223 | QAPATHTEST[mime-xdg] = "package_qa_check_mime_xdg" | 
|  | 224 | def package_qa_check_mime_xdg(path, name, d, elf, messages): | 
|  | 225 | """ | 
|  | 226 | Check if package installs desktop file containing MimeType and requires | 
|  | 227 | mime-types.bbclass to create /usr/share/applications/mimeinfo.cache | 
|  | 228 | """ | 
|  | 229 |  | 
|  | 230 | if d.getVar("datadir") + "/applications" in path and path.endswith('.desktop') and not bb.data.inherits_class("mime-xdg", d): | 
|  | 231 | mime_type_found = False | 
|  | 232 | try: | 
|  | 233 | with open(path, 'r') as f: | 
|  | 234 | for line in f.read().split('\n'): | 
|  | 235 | if 'MimeType' in line: | 
|  | 236 | mime_type_found = True | 
|  | 237 | break; | 
|  | 238 | except: | 
|  | 239 | # At least libreoffice installs symlinks with absolute paths that are dangling here. | 
|  | 240 | # We could implement some magic but for few (one) recipes it is not worth the effort so just warn: | 
|  | 241 | wstr = "%s cannot open %s - is it a symlink with absolute path?\n" % (name, package_qa_clean_path(path,d)) | 
|  | 242 | wstr += "Please check if (linked) file contains key 'MimeType'.\n" | 
|  | 243 | pkgname = name | 
|  | 244 | if name == d.getVar('PN'): | 
|  | 245 | pkgname = '${PN}' | 
|  | 246 | wstr += "If yes: add \'inhert mime-xdg\' and \'MIME_XDG_PACKAGES += \"%s\"\' / if no add \'INSANE_SKIP:%s += \"mime-xdg\"\' to recipe." % (pkgname, pkgname) | 
|  | 247 | oe.qa.add_message(messages, "mime-xdg", wstr) | 
|  | 248 | if mime_type_found: | 
|  | 249 | oe.qa.add_message(messages, "mime-xdg", "package contains desktop file with key 'MimeType' but does not inhert mime-xdg: %s path '%s'" % \ | 
|  | 250 | (name, package_qa_clean_path(path,d))) | 
|  | 251 |  | 
|  | 252 | def package_qa_check_libdir(d): | 
|  | 253 | """ | 
|  | 254 | Check for wrong library installation paths. For instance, catch | 
|  | 255 | recipes installing /lib/bar.so when ${base_libdir}="lib32" or | 
|  | 256 | installing in /usr/lib64 when ${libdir}="/usr/lib" | 
|  | 257 | """ | 
|  | 258 | import re | 
|  | 259 |  | 
|  | 260 | pkgdest = d.getVar('PKGDEST') | 
|  | 261 | base_libdir = d.getVar("base_libdir") + os.sep | 
|  | 262 | libdir = d.getVar("libdir") + os.sep | 
|  | 263 | libexecdir = d.getVar("libexecdir") + os.sep | 
|  | 264 | exec_prefix = d.getVar("exec_prefix") + os.sep | 
|  | 265 |  | 
|  | 266 | messages = [] | 
|  | 267 |  | 
|  | 268 | # The re's are purposely fuzzy, as some there are some .so.x.y.z files | 
|  | 269 | # that don't follow the standard naming convention. It checks later | 
|  | 270 | # that they are actual ELF files | 
|  | 271 | lib_re = re.compile(r"^/lib.+\.so(\..+)?$") | 
|  | 272 | exec_re = re.compile(r"^%s.*/lib.+\.so(\..+)?$" % exec_prefix) | 
|  | 273 |  | 
|  | 274 | for root, dirs, files in os.walk(pkgdest): | 
|  | 275 | if root == pkgdest: | 
|  | 276 | # Skip subdirectories for any packages with libdir in INSANE_SKIP | 
|  | 277 | skippackages = [] | 
|  | 278 | for package in dirs: | 
|  | 279 | if 'libdir' in (d.getVar('INSANE_SKIP:' + package) or "").split(): | 
|  | 280 | bb.note("Package %s skipping libdir QA test" % (package)) | 
|  | 281 | skippackages.append(package) | 
|  | 282 | elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory' and package.endswith("-dbg"): | 
|  | 283 | bb.note("Package %s skipping libdir QA test for PACKAGE_DEBUG_SPLIT_STYLE equals debug-file-directory" % (package)) | 
|  | 284 | skippackages.append(package) | 
|  | 285 | for package in skippackages: | 
|  | 286 | dirs.remove(package) | 
|  | 287 | for file in files: | 
|  | 288 | full_path = os.path.join(root, file) | 
|  | 289 | rel_path = os.path.relpath(full_path, pkgdest) | 
|  | 290 | if os.sep in rel_path: | 
|  | 291 | package, rel_path = rel_path.split(os.sep, 1) | 
|  | 292 | rel_path = os.sep + rel_path | 
|  | 293 | if lib_re.match(rel_path): | 
|  | 294 | if base_libdir not in rel_path: | 
|  | 295 | # make sure it's an actual ELF file | 
|  | 296 | elf = oe.qa.ELFFile(full_path) | 
|  | 297 | try: | 
|  | 298 | elf.open() | 
|  | 299 | messages.append("%s: found library in wrong location: %s" % (package, rel_path)) | 
|  | 300 | except (oe.qa.NotELFFileError): | 
|  | 301 | pass | 
|  | 302 | if exec_re.match(rel_path): | 
|  | 303 | if libdir not in rel_path and libexecdir not in rel_path: | 
|  | 304 | # make sure it's an actual ELF file | 
|  | 305 | elf = oe.qa.ELFFile(full_path) | 
|  | 306 | try: | 
|  | 307 | elf.open() | 
|  | 308 | messages.append("%s: found library in wrong location: %s" % (package, rel_path)) | 
|  | 309 | except (oe.qa.NotELFFileError): | 
|  | 310 | pass | 
|  | 311 |  | 
|  | 312 | if messages: | 
|  | 313 | oe.qa.handle_error("libdir", "\n".join(messages), d) | 
|  | 314 |  | 
|  | 315 | QAPATHTEST[debug-files] = "package_qa_check_dbg" | 
|  | 316 | def package_qa_check_dbg(path, name, d, elf, messages): | 
|  | 317 | """ | 
|  | 318 | Check for ".debug" files or directories outside of the dbg package | 
|  | 319 | """ | 
|  | 320 |  | 
|  | 321 | if not "-dbg" in name and not "-ptest" in name: | 
|  | 322 | if '.debug' in path.split(os.path.sep): | 
|  | 323 | oe.qa.add_message(messages, "debug-files", "non debug package contains .debug directory: %s path %s" % \ | 
|  | 324 | (name, package_qa_clean_path(path,d))) | 
|  | 325 |  | 
|  | 326 | QAPATHTEST[arch] = "package_qa_check_arch" | 
|  | 327 | def package_qa_check_arch(path,name,d, elf, messages): | 
|  | 328 | """ | 
|  | 329 | Check if archs are compatible | 
|  | 330 | """ | 
|  | 331 | import re, oe.elf | 
|  | 332 |  | 
|  | 333 | if not elf: | 
|  | 334 | return | 
|  | 335 |  | 
|  | 336 | target_os   = d.getVar('HOST_OS') | 
|  | 337 | target_arch = d.getVar('HOST_ARCH') | 
|  | 338 | provides = d.getVar('PROVIDES') | 
|  | 339 | bpn = d.getVar('BPN') | 
|  | 340 |  | 
|  | 341 | if target_arch == "allarch": | 
|  | 342 | pn = d.getVar('PN') | 
|  | 343 | oe.qa.add_message(messages, "arch", pn + ": Recipe inherits the allarch class, but has packaged architecture-specific binaries") | 
|  | 344 | return | 
|  | 345 |  | 
|  | 346 | # FIXME: Cross package confuse this check, so just skip them | 
|  | 347 | for s in ['cross', 'nativesdk', 'cross-canadian']: | 
|  | 348 | if bb.data.inherits_class(s, d): | 
|  | 349 | return | 
|  | 350 |  | 
|  | 351 | # avoid following links to /usr/bin (e.g. on udev builds) | 
|  | 352 | # we will check the files pointed to anyway... | 
|  | 353 | if os.path.islink(path): | 
|  | 354 | return | 
|  | 355 |  | 
|  | 356 | #if this will throw an exception, then fix the dict above | 
|  | 357 | (machine, osabi, abiversion, littleendian, bits) \ | 
|  | 358 | = oe.elf.machine_dict(d)[target_os][target_arch] | 
|  | 359 |  | 
|  | 360 | # Check the architecture and endiannes of the binary | 
|  | 361 | is_32 = (("virtual/kernel" in provides) or bb.data.inherits_class("module", d)) and \ | 
|  | 362 | (target_os == "linux-gnux32" or target_os == "linux-muslx32" or \ | 
|  | 363 | target_os == "linux-gnu_ilp32" or re.match(r'mips64.*32', d.getVar('DEFAULTTUNE'))) | 
|  | 364 | is_bpf = (oe.qa.elf_machine_to_string(elf.machine()) == "BPF") | 
|  | 365 | if not ((machine == elf.machine()) or is_32 or is_bpf): | 
|  | 366 | oe.qa.add_message(messages, "arch", "Architecture did not match (%s, expected %s) in %s" % \ | 
|  | 367 | (oe.qa.elf_machine_to_string(elf.machine()), oe.qa.elf_machine_to_string(machine), package_qa_clean_path(path, d, name))) | 
|  | 368 | elif not ((bits == elf.abiSize()) or is_32 or is_bpf): | 
|  | 369 | oe.qa.add_message(messages, "arch", "Bit size did not match (%d, expected %d) in %s" % \ | 
|  | 370 | (elf.abiSize(), bits, package_qa_clean_path(path, d, name))) | 
|  | 371 | elif not ((littleendian == elf.isLittleEndian()) or is_bpf): | 
|  | 372 | oe.qa.add_message(messages, "arch", "Endiannes did not match (%d, expected %d) in %s" % \ | 
|  | 373 | (elf.isLittleEndian(), littleendian, package_qa_clean_path(path,d, name))) | 
|  | 374 |  | 
|  | 375 | QAPATHTEST[desktop] = "package_qa_check_desktop" | 
|  | 376 | def package_qa_check_desktop(path, name, d, elf, messages): | 
|  | 377 | """ | 
|  | 378 | Run all desktop files through desktop-file-validate. | 
|  | 379 | """ | 
|  | 380 | if path.endswith(".desktop"): | 
|  | 381 | desktop_file_validate = os.path.join(d.getVar('STAGING_BINDIR_NATIVE'),'desktop-file-validate') | 
|  | 382 | output = os.popen("%s %s" % (desktop_file_validate, path)) | 
|  | 383 | # This only produces output on errors | 
|  | 384 | for l in output: | 
|  | 385 | oe.qa.add_message(messages, "desktop", "Desktop file issue: " + l.strip()) | 
|  | 386 |  | 
|  | 387 | QAPATHTEST[textrel] = "package_qa_textrel" | 
|  | 388 | def package_qa_textrel(path, name, d, elf, messages): | 
|  | 389 | """ | 
|  | 390 | Check if the binary contains relocations in .text | 
|  | 391 | """ | 
|  | 392 |  | 
|  | 393 | if not elf: | 
|  | 394 | return | 
|  | 395 |  | 
|  | 396 | if os.path.islink(path): | 
|  | 397 | return | 
|  | 398 |  | 
|  | 399 | phdrs = elf.run_objdump("-p", d) | 
|  | 400 | sane = True | 
|  | 401 |  | 
|  | 402 | import re | 
|  | 403 | textrel_re = re.compile(r"\s+TEXTREL\s+") | 
|  | 404 | for line in phdrs.split("\n"): | 
|  | 405 | if textrel_re.match(line): | 
|  | 406 | sane = False | 
|  | 407 | break | 
|  | 408 |  | 
|  | 409 | if not sane: | 
|  | 410 | path = package_qa_clean_path(path, d, name) | 
|  | 411 | oe.qa.add_message(messages, "textrel", "%s: ELF binary %s has relocations in .text" % (name, path)) | 
|  | 412 |  | 
|  | 413 | QAPATHTEST[ldflags] = "package_qa_hash_style" | 
|  | 414 | def package_qa_hash_style(path, name, d, elf, messages): | 
|  | 415 | """ | 
|  | 416 | Check if the binary has the right hash style... | 
|  | 417 | """ | 
|  | 418 |  | 
|  | 419 | if not elf: | 
|  | 420 | return | 
|  | 421 |  | 
|  | 422 | if os.path.islink(path): | 
|  | 423 | return | 
|  | 424 |  | 
|  | 425 | gnu_hash = "--hash-style=gnu" in d.getVar('LDFLAGS') | 
|  | 426 | if not gnu_hash: | 
|  | 427 | gnu_hash = "--hash-style=both" in d.getVar('LDFLAGS') | 
|  | 428 | if not gnu_hash: | 
|  | 429 | return | 
|  | 430 |  | 
|  | 431 | sane = False | 
|  | 432 | has_syms = False | 
|  | 433 |  | 
|  | 434 | phdrs = elf.run_objdump("-p", d) | 
|  | 435 |  | 
|  | 436 | # If this binary has symbols, we expect it to have GNU_HASH too. | 
|  | 437 | for line in phdrs.split("\n"): | 
|  | 438 | if "SYMTAB" in line: | 
|  | 439 | has_syms = True | 
|  | 440 | if "GNU_HASH" in line or "MIPS_XHASH" in line: | 
|  | 441 | sane = True | 
|  | 442 | if ("[mips32]" in line or "[mips64]" in line) and d.getVar('TCLIBC') == "musl": | 
|  | 443 | sane = True | 
|  | 444 | if has_syms and not sane: | 
|  | 445 | path = package_qa_clean_path(path, d, name) | 
|  | 446 | oe.qa.add_message(messages, "ldflags", "File %s in package %s doesn't have GNU_HASH (didn't pass LDFLAGS?)" % (path, name)) | 
|  | 447 |  | 
|  | 448 |  | 
|  | 449 | QAPATHTEST[buildpaths] = "package_qa_check_buildpaths" | 
|  | 450 | def package_qa_check_buildpaths(path, name, d, elf, messages): | 
|  | 451 | """ | 
|  | 452 | Check for build paths inside target files and error if paths are not | 
|  | 453 | explicitly ignored. | 
|  | 454 | """ | 
|  | 455 | import stat | 
|  | 456 |  | 
|  | 457 | # Ignore symlinks/devs/fifos | 
|  | 458 | mode = os.lstat(path).st_mode | 
|  | 459 | if stat.S_ISLNK(mode) or stat.S_ISBLK(mode) or stat.S_ISFIFO(mode) or stat.S_ISCHR(mode) or stat.S_ISSOCK(mode): | 
|  | 460 | return | 
|  | 461 |  | 
|  | 462 | tmpdir = bytes(d.getVar('TMPDIR'), encoding="utf-8") | 
|  | 463 | with open(path, 'rb') as f: | 
|  | 464 | file_content = f.read() | 
|  | 465 | if tmpdir in file_content: | 
|  | 466 | trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "") | 
|  | 467 | oe.qa.add_message(messages, "buildpaths", "File %s in package %s contains reference to TMPDIR" % (trimmed, name)) | 
|  | 468 |  | 
|  | 469 |  | 
|  | 470 | QAPATHTEST[xorg-driver-abi] = "package_qa_check_xorg_driver_abi" | 
|  | 471 | def package_qa_check_xorg_driver_abi(path, name, d, elf, messages): | 
|  | 472 | """ | 
|  | 473 | Check that all packages containing Xorg drivers have ABI dependencies | 
|  | 474 | """ | 
|  | 475 |  | 
|  | 476 | # Skip dev, dbg or nativesdk packages | 
|  | 477 | if name.endswith("-dev") or name.endswith("-dbg") or name.startswith("nativesdk-"): | 
|  | 478 | return | 
|  | 479 |  | 
|  | 480 | driverdir = d.expand("${libdir}/xorg/modules/drivers/") | 
|  | 481 | if driverdir in path and path.endswith(".so"): | 
|  | 482 | mlprefix = d.getVar('MLPREFIX') or '' | 
|  | 483 | for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + name) or ""): | 
|  | 484 | if rdep.startswith("%sxorg-abi-" % mlprefix): | 
|  | 485 | return | 
|  | 486 | oe.qa.add_message(messages, "xorg-driver-abi", "Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path))) | 
|  | 487 |  | 
|  | 488 | QAPATHTEST[infodir] = "package_qa_check_infodir" | 
|  | 489 | def package_qa_check_infodir(path, name, d, elf, messages): | 
|  | 490 | """ | 
|  | 491 | Check that /usr/share/info/dir isn't shipped in a particular package | 
|  | 492 | """ | 
|  | 493 | infodir = d.expand("${infodir}/dir") | 
|  | 494 |  | 
|  | 495 | if infodir in path: | 
|  | 496 | oe.qa.add_message(messages, "infodir", "The /usr/share/info/dir file is not meant to be shipped in a particular package.") | 
|  | 497 |  | 
|  | 498 | QAPATHTEST[symlink-to-sysroot] = "package_qa_check_symlink_to_sysroot" | 
|  | 499 | def package_qa_check_symlink_to_sysroot(path, name, d, elf, messages): | 
|  | 500 | """ | 
|  | 501 | Check that the package doesn't contain any absolute symlinks to the sysroot. | 
|  | 502 | """ | 
|  | 503 | if os.path.islink(path): | 
|  | 504 | target = os.readlink(path) | 
|  | 505 | if os.path.isabs(target): | 
|  | 506 | tmpdir = d.getVar('TMPDIR') | 
|  | 507 | if target.startswith(tmpdir): | 
|  | 508 | trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "") | 
|  | 509 | oe.qa.add_message(messages, "symlink-to-sysroot", "Symlink %s in %s points to TMPDIR" % (trimmed, name)) | 
|  | 510 |  | 
| Andrew Geissler | 517393d | 2023-01-13 08:55:19 -0600 | [diff] [blame] | 511 | QAPATHTEST[32bit-time] = "check_32bit_symbols" | 
|  | 512 | def check_32bit_symbols(path, packagename, d, elf, messages): | 
|  | 513 | """ | 
|  | 514 | Check that ELF files do not use any 32 bit time APIs from glibc. | 
|  | 515 | """ | 
|  | 516 | import re | 
|  | 517 | # This list is manually constructed by searching the image folder of the | 
|  | 518 | # glibc recipe for __USE_TIME_BITS64.  There is no good way to do this | 
|  | 519 | # automatically. | 
|  | 520 | api32 = { | 
|  | 521 | # /usr/include/time.h | 
|  | 522 | "clock_getres", "clock_gettime", "clock_nanosleep", "clock_settime", | 
|  | 523 | "ctime", "ctime_r", "difftime", "gmtime", "gmtime_r", "localtime", | 
|  | 524 | "localtime_r", "mktime", "nanosleep", "time", "timegm", "timelocal", | 
|  | 525 | "timer_gettime", "timer_settime", "timespec_get", "timespec_getres", | 
|  | 526 | # /usr/include/bits/time.h | 
|  | 527 | "clock_adjtime", | 
|  | 528 | # /usr/include/signal.h | 
|  | 529 | "sigtimedwait", | 
|  | 530 | # /usr/include/sys/time.h | 
|  | 531 | "futimes", "futimesat", "getitimer", "gettimeofday", "lutimes", | 
|  | 532 | "setitimer", "settimeofday", "utimes", | 
|  | 533 | # /usr/include/sys/timex.h | 
|  | 534 | "adjtimex", "ntp_adjtime", "ntp_gettime", "ntp_gettimex", | 
|  | 535 | # /usr/include/sys/wait.h | 
|  | 536 | "wait3", "wait4", | 
|  | 537 | # /usr/include/sys/stat.h | 
|  | 538 | "fstat", "fstat64", "fstatat", "fstatat64", "futimens", "lstat", | 
|  | 539 | "lstat64", "stat", "stat64", "utimensat", | 
|  | 540 | # /usr/include/sys/poll.h | 
|  | 541 | "ppoll", | 
|  | 542 | # /usr/include/sys/resource.h | 
|  | 543 | "getrusage", | 
|  | 544 | # /usr/include/sys/ioctl.h | 
|  | 545 | "ioctl", | 
|  | 546 | # /usr/include/sys/select.h | 
|  | 547 | "select", "pselect", | 
|  | 548 | # /usr/include/sys/prctl.h | 
|  | 549 | "prctl", | 
|  | 550 | # /usr/include/sys/epoll.h | 
|  | 551 | "epoll_pwait2", | 
|  | 552 | # /usr/include/sys/timerfd.h | 
|  | 553 | "timerfd_gettime", "timerfd_settime", | 
|  | 554 | # /usr/include/sys/socket.h | 
|  | 555 | "getsockopt", "recvmmsg", "recvmsg", "sendmmsg", "sendmsg", | 
|  | 556 | "setsockopt", | 
|  | 557 | # /usr/include/sys/msg.h | 
|  | 558 | "msgctl", | 
|  | 559 | # /usr/include/sys/sem.h | 
|  | 560 | "semctl", "semtimedop", | 
|  | 561 | # /usr/include/sys/shm.h | 
|  | 562 | "shmctl", | 
|  | 563 | # /usr/include/pthread.h | 
|  | 564 | "pthread_clockjoin_np", "pthread_cond_clockwait", | 
|  | 565 | "pthread_cond_timedwait", "pthread_mutex_clocklock", | 
|  | 566 | "pthread_mutex_timedlock", "pthread_rwlock_clockrdlock", | 
|  | 567 | "pthread_rwlock_clockwrlock", "pthread_rwlock_timedrdlock", | 
|  | 568 | "pthread_rwlock_timedwrlock", "pthread_timedjoin_np", | 
|  | 569 | # /usr/include/semaphore.h | 
|  | 570 | "sem_clockwait", "sem_timedwait", | 
|  | 571 | # /usr/include/threads.h | 
|  | 572 | "cnd_timedwait", "mtx_timedlock", "thrd_sleep", | 
|  | 573 | # /usr/include/aio.h | 
|  | 574 | "aio_cancel", "aio_error", "aio_read", "aio_return", "aio_suspend", | 
|  | 575 | "aio_write", "lio_listio", | 
|  | 576 | # /usr/include/mqueue.h | 
|  | 577 | "mq_timedreceive", "mq_timedsend", | 
|  | 578 | # /usr/include/glob.h | 
|  | 579 | "glob", "glob64", "globfree", "globfree64", | 
|  | 580 | # /usr/include/sched.h | 
|  | 581 | "sched_rr_get_interval", | 
|  | 582 | # /usr/include/fcntl.h | 
|  | 583 | "fcntl", "fcntl64", | 
|  | 584 | # /usr/include/utime.h | 
|  | 585 | "utime", | 
|  | 586 | # /usr/include/ftw.h | 
|  | 587 | "ftw", "ftw64", "nftw", "nftw64", | 
|  | 588 | # /usr/include/fts.h | 
|  | 589 | "fts64_children", "fts64_close", "fts64_open", "fts64_read", | 
|  | 590 | "fts64_set", "fts_children", "fts_close", "fts_open", "fts_read", | 
|  | 591 | "fts_set", | 
|  | 592 | # /usr/include/netdb.h | 
|  | 593 | "gai_suspend", | 
|  | 594 | } | 
|  | 595 |  | 
|  | 596 | ptrn = re.compile( | 
|  | 597 | r''' | 
|  | 598 | (?P<value>[\da-fA-F]+) \s+ | 
|  | 599 | (?P<flags>[lgu! ][w ][C ][W ][Ii ][dD ]F) \s+ | 
|  | 600 | (?P<section>\*UND\*) \s+ | 
|  | 601 | (?P<alignment>(?P<size>[\da-fA-F]+)) \s+ | 
|  | 602 | (?P<symbol> | 
|  | 603 | ''' + | 
|  | 604 | r'(?P<notag>' + r'|'.join(sorted(api32)) + r')' + | 
|  | 605 | r''' | 
|  | 606 | (@+(?P<tag>GLIBC_\d+\.\d+\S*))) | 
|  | 607 | ''', re.VERBOSE | 
|  | 608 | ) | 
|  | 609 |  | 
|  | 610 | # elf is a oe.qa.ELFFile object | 
|  | 611 | if elf is not None: | 
|  | 612 | phdrs = elf.run_objdump("-tw", d) | 
|  | 613 | syms = re.finditer(ptrn, phdrs) | 
|  | 614 | usedapis = {sym.group('notag') for sym in syms} | 
|  | 615 | if usedapis: | 
|  | 616 | elfpath = package_qa_clean_path(path, d, packagename) | 
|  | 617 | # Remove any .debug dir, heuristic that probably works | 
|  | 618 | # At this point, any symbol information is stripped into the debug | 
|  | 619 | # package, so that is the only place we will find them. | 
|  | 620 | elfpath = elfpath.replace('.debug/', '') | 
| Andrew Geissler | 028142b | 2023-05-05 11:29:21 -0500 | [diff] [blame] | 621 | allowed = "32bit-time" in (d.getVar('INSANE_SKIP') or '').split() | 
|  | 622 | if not allowed: | 
| Andrew Geissler | 517393d | 2023-01-13 08:55:19 -0600 | [diff] [blame] | 623 | msgformat = elfpath + " uses 32-bit api '%s'" | 
|  | 624 | for sym in usedapis: | 
|  | 625 | oe.qa.add_message(messages, '32bit-time', msgformat % sym) | 
|  | 626 | oe.qa.add_message( | 
|  | 627 | messages, '32bit-time', | 
| Andrew Geissler | 028142b | 2023-05-05 11:29:21 -0500 | [diff] [blame] | 628 | 'Suppress with INSANE_SKIP = "32bit-time"' | 
| Andrew Geissler | 517393d | 2023-01-13 08:55:19 -0600 | [diff] [blame] | 629 | ) | 
|  | 630 |  | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 631 | # Check license variables | 
|  | 632 | do_populate_lic[postfuncs] += "populate_lic_qa_checksum" | 
|  | 633 | python populate_lic_qa_checksum() { | 
|  | 634 | """ | 
|  | 635 | Check for changes in the license files. | 
|  | 636 | """ | 
|  | 637 |  | 
|  | 638 | lic_files = d.getVar('LIC_FILES_CHKSUM') or '' | 
|  | 639 | lic = d.getVar('LICENSE') | 
|  | 640 | pn = d.getVar('PN') | 
|  | 641 |  | 
|  | 642 | if lic == "CLOSED": | 
|  | 643 | return | 
|  | 644 |  | 
|  | 645 | if not lic_files and d.getVar('SRC_URI'): | 
|  | 646 | oe.qa.handle_error("license-checksum", pn + ": Recipe file fetches files and does not have license file information (LIC_FILES_CHKSUM)", d) | 
|  | 647 |  | 
|  | 648 | srcdir = d.getVar('S') | 
|  | 649 | corebase_licensefile = d.getVar('COREBASE') + "/LICENSE" | 
|  | 650 | for url in lic_files.split(): | 
|  | 651 | try: | 
|  | 652 | (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url) | 
|  | 653 | except bb.fetch.MalformedUrl: | 
|  | 654 | oe.qa.handle_error("license-checksum", pn + ": LIC_FILES_CHKSUM contains an invalid URL: " + url, d) | 
|  | 655 | continue | 
|  | 656 | srclicfile = os.path.join(srcdir, path) | 
|  | 657 | if not os.path.isfile(srclicfile): | 
|  | 658 | oe.qa.handle_error("license-checksum", pn + ": LIC_FILES_CHKSUM points to an invalid file: " + srclicfile, d) | 
|  | 659 | continue | 
|  | 660 |  | 
|  | 661 | if (srclicfile == corebase_licensefile): | 
|  | 662 | bb.warn("${COREBASE}/LICENSE is not a valid license file, please use '${COMMON_LICENSE_DIR}/MIT' for a MIT License file in LIC_FILES_CHKSUM. This will become an error in the future") | 
|  | 663 |  | 
|  | 664 | recipemd5 = parm.get('md5', '') | 
|  | 665 | beginline, endline = 0, 0 | 
|  | 666 | if 'beginline' in parm: | 
|  | 667 | beginline = int(parm['beginline']) | 
|  | 668 | if 'endline' in parm: | 
|  | 669 | endline = int(parm['endline']) | 
|  | 670 |  | 
|  | 671 | if (not beginline) and (not endline): | 
|  | 672 | md5chksum = bb.utils.md5_file(srclicfile) | 
|  | 673 | with open(srclicfile, 'r', errors='replace') as f: | 
|  | 674 | license = f.read().splitlines() | 
|  | 675 | else: | 
|  | 676 | with open(srclicfile, 'rb') as f: | 
|  | 677 | import hashlib | 
|  | 678 | lineno = 0 | 
|  | 679 | license = [] | 
| Patrick Williams | 2390b1b | 2022-11-03 13:47:49 -0500 | [diff] [blame] | 680 | try: | 
|  | 681 | m = hashlib.new('MD5', usedforsecurity=False) | 
|  | 682 | except TypeError: | 
|  | 683 | m = hashlib.new('MD5') | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 684 | for line in f: | 
|  | 685 | lineno += 1 | 
|  | 686 | if (lineno >= beginline): | 
|  | 687 | if ((lineno <= endline) or not endline): | 
|  | 688 | m.update(line) | 
|  | 689 | license.append(line.decode('utf-8', errors='replace').rstrip()) | 
|  | 690 | else: | 
|  | 691 | break | 
|  | 692 | md5chksum = m.hexdigest() | 
|  | 693 | if recipemd5 == md5chksum: | 
|  | 694 | bb.note (pn + ": md5 checksum matched for ", url) | 
|  | 695 | else: | 
|  | 696 | if recipemd5: | 
|  | 697 | msg = pn + ": The LIC_FILES_CHKSUM does not match for " + url | 
|  | 698 | msg = msg + "\n" + pn + ": The new md5 checksum is " + md5chksum | 
|  | 699 | max_lines = int(d.getVar('QA_MAX_LICENSE_LINES') or 20) | 
|  | 700 | if not license or license[-1] != '': | 
|  | 701 | # Ensure that our license text ends with a line break | 
|  | 702 | # (will be added with join() below). | 
|  | 703 | license.append('') | 
|  | 704 | remove = len(license) - max_lines | 
|  | 705 | if remove > 0: | 
|  | 706 | start = max_lines // 2 | 
|  | 707 | end = start + remove - 1 | 
|  | 708 | del license[start:end] | 
|  | 709 | license.insert(start, '...') | 
|  | 710 | msg = msg + "\n" + pn + ": Here is the selected license text:" + \ | 
|  | 711 | "\n" + \ | 
|  | 712 | "{:v^70}".format(" beginline=%d " % beginline if beginline else "") + \ | 
|  | 713 | "\n" + "\n".join(license) + \ | 
|  | 714 | "{:^^70}".format(" endline=%d " % endline if endline else "") | 
|  | 715 | if beginline: | 
|  | 716 | if endline: | 
|  | 717 | srcfiledesc = "%s (lines %d through to %d)" % (srclicfile, beginline, endline) | 
|  | 718 | else: | 
|  | 719 | srcfiledesc = "%s (beginning on line %d)" % (srclicfile, beginline) | 
|  | 720 | elif endline: | 
|  | 721 | srcfiledesc = "%s (ending on line %d)" % (srclicfile, endline) | 
|  | 722 | else: | 
|  | 723 | srcfiledesc = srclicfile | 
|  | 724 | msg = msg + "\n" + pn + ": Check if the license information has changed in %s to verify that the LICENSE value \"%s\" remains valid" % (srcfiledesc, lic) | 
|  | 725 |  | 
|  | 726 | else: | 
|  | 727 | msg = pn + ": LIC_FILES_CHKSUM is not specified for " +  url | 
|  | 728 | msg = msg + "\n" + pn + ": The md5 checksum is " + md5chksum | 
|  | 729 | oe.qa.handle_error("license-checksum", msg, d) | 
|  | 730 |  | 
|  | 731 | oe.qa.exit_if_errors(d) | 
|  | 732 | } | 
|  | 733 |  | 
|  | 734 | def qa_check_staged(path,d): | 
|  | 735 | """ | 
|  | 736 | Check staged la and pc files for common problems like references to the work | 
|  | 737 | directory. | 
|  | 738 |  | 
|  | 739 | As this is run after every stage we should be able to find the one | 
|  | 740 | responsible for the errors easily even if we look at every .pc and .la file. | 
|  | 741 | """ | 
|  | 742 |  | 
|  | 743 | tmpdir = d.getVar('TMPDIR') | 
|  | 744 | workdir = os.path.join(tmpdir, "work") | 
|  | 745 | recipesysroot = d.getVar("RECIPE_SYSROOT") | 
|  | 746 |  | 
|  | 747 | if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d): | 
|  | 748 | pkgconfigcheck = workdir | 
|  | 749 | else: | 
|  | 750 | pkgconfigcheck = tmpdir | 
|  | 751 |  | 
|  | 752 | skip = (d.getVar('INSANE_SKIP') or "").split() | 
|  | 753 | skip_la = False | 
|  | 754 | if 'la' in skip: | 
|  | 755 | bb.note("Recipe %s skipping qa checking: la" % d.getVar('PN')) | 
|  | 756 | skip_la = True | 
|  | 757 |  | 
|  | 758 | skip_pkgconfig = False | 
|  | 759 | if 'pkgconfig' in skip: | 
|  | 760 | bb.note("Recipe %s skipping qa checking: pkgconfig" % d.getVar('PN')) | 
|  | 761 | skip_pkgconfig = True | 
|  | 762 |  | 
|  | 763 | skip_shebang_size = False | 
|  | 764 | if 'shebang-size' in skip: | 
|  | 765 | bb.note("Recipe %s skipping qa checkking: shebang-size" % d.getVar('PN')) | 
|  | 766 | skip_shebang_size = True | 
|  | 767 |  | 
|  | 768 | # find all .la and .pc files | 
|  | 769 | # read the content | 
|  | 770 | # and check for stuff that looks wrong | 
|  | 771 | for root, dirs, files in os.walk(path): | 
|  | 772 | for file in files: | 
|  | 773 | path = os.path.join(root,file) | 
|  | 774 | if file.endswith(".la") and not skip_la: | 
|  | 775 | with open(path) as f: | 
|  | 776 | file_content = f.read() | 
|  | 777 | file_content = file_content.replace(recipesysroot, "") | 
|  | 778 | if workdir in file_content: | 
|  | 779 | error_msg = "%s failed sanity test (workdir) in path %s" % (file,root) | 
|  | 780 | oe.qa.handle_error("la", error_msg, d) | 
|  | 781 | elif file.endswith(".pc") and not skip_pkgconfig: | 
|  | 782 | with open(path) as f: | 
|  | 783 | file_content = f.read() | 
|  | 784 | file_content = file_content.replace(recipesysroot, "") | 
|  | 785 | if pkgconfigcheck in file_content: | 
|  | 786 | error_msg = "%s failed sanity test (tmpdir) in path %s" % (file,root) | 
|  | 787 | oe.qa.handle_error("pkgconfig", error_msg, d) | 
|  | 788 |  | 
|  | 789 | if not skip_shebang_size: | 
|  | 790 | errors = {} | 
|  | 791 | package_qa_check_shebang_size(path, "", d, None, errors) | 
|  | 792 | for e in errors: | 
|  | 793 | oe.qa.handle_error(e, errors[e], d) | 
|  | 794 |  | 
|  | 795 |  | 
|  | 796 | # Run all package-wide warnfuncs and errorfuncs | 
|  | 797 | def package_qa_package(warnfuncs, errorfuncs, package, d): | 
|  | 798 | warnings = {} | 
|  | 799 | errors = {} | 
|  | 800 |  | 
|  | 801 | for func in warnfuncs: | 
|  | 802 | func(package, d, warnings) | 
|  | 803 | for func in errorfuncs: | 
|  | 804 | func(package, d, errors) | 
|  | 805 |  | 
|  | 806 | for w in warnings: | 
|  | 807 | oe.qa.handle_error(w, warnings[w], d) | 
|  | 808 | for e in errors: | 
|  | 809 | oe.qa.handle_error(e, errors[e], d) | 
|  | 810 |  | 
|  | 811 | return len(errors) == 0 | 
|  | 812 |  | 
|  | 813 | # Run all recipe-wide warnfuncs and errorfuncs | 
|  | 814 | def package_qa_recipe(warnfuncs, errorfuncs, pn, d): | 
|  | 815 | warnings = {} | 
|  | 816 | errors = {} | 
|  | 817 |  | 
|  | 818 | for func in warnfuncs: | 
|  | 819 | func(pn, d, warnings) | 
|  | 820 | for func in errorfuncs: | 
|  | 821 | func(pn, d, errors) | 
|  | 822 |  | 
|  | 823 | for w in warnings: | 
|  | 824 | oe.qa.handle_error(w, warnings[w], d) | 
|  | 825 | for e in errors: | 
|  | 826 | oe.qa.handle_error(e, errors[e], d) | 
|  | 827 |  | 
|  | 828 | return len(errors) == 0 | 
|  | 829 |  | 
|  | 830 | def prepopulate_objdump_p(elf, d): | 
|  | 831 | output = elf.run_objdump("-p", d) | 
|  | 832 | return (elf.name, output) | 
|  | 833 |  | 
|  | 834 | # Walk over all files in a directory and call func | 
|  | 835 | def package_qa_walk(warnfuncs, errorfuncs, package, d): | 
|  | 836 | #if this will throw an exception, then fix the dict above | 
|  | 837 | target_os   = d.getVar('HOST_OS') | 
|  | 838 | target_arch = d.getVar('HOST_ARCH') | 
|  | 839 |  | 
|  | 840 | warnings = {} | 
|  | 841 | errors = {} | 
|  | 842 | elves = {} | 
|  | 843 | for path in pkgfiles[package]: | 
|  | 844 | elf = None | 
|  | 845 | if os.path.isfile(path): | 
|  | 846 | elf = oe.qa.ELFFile(path) | 
|  | 847 | try: | 
|  | 848 | elf.open() | 
|  | 849 | elf.close() | 
|  | 850 | except oe.qa.NotELFFileError: | 
|  | 851 | elf = None | 
|  | 852 | if elf: | 
|  | 853 | elves[path] = elf | 
|  | 854 |  | 
|  | 855 | results = oe.utils.multiprocess_launch(prepopulate_objdump_p, elves.values(), d, extraargs=(d,)) | 
|  | 856 | for item in results: | 
|  | 857 | elves[item[0]].set_objdump("-p", item[1]) | 
|  | 858 |  | 
|  | 859 | for path in pkgfiles[package]: | 
|  | 860 | if path in elves: | 
|  | 861 | elves[path].open() | 
|  | 862 | for func in warnfuncs: | 
|  | 863 | func(path, package, d, elves.get(path), warnings) | 
|  | 864 | for func in errorfuncs: | 
|  | 865 | func(path, package, d, elves.get(path), errors) | 
|  | 866 | if path in elves: | 
|  | 867 | elves[path].close() | 
|  | 868 |  | 
|  | 869 | for w in warnings: | 
|  | 870 | oe.qa.handle_error(w, warnings[w], d) | 
|  | 871 | for e in errors: | 
|  | 872 | oe.qa.handle_error(e, errors[e], d) | 
|  | 873 |  | 
|  | 874 | def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d): | 
|  | 875 | # Don't do this check for kernel/module recipes, there aren't too many debug/development | 
|  | 876 | # packages and you can get false positives e.g. on kernel-module-lirc-dev | 
|  | 877 | if bb.data.inherits_class("kernel", d) or bb.data.inherits_class("module-base", d): | 
|  | 878 | return | 
|  | 879 |  | 
|  | 880 | if not "-dbg" in pkg and not "packagegroup-" in pkg and not "-image" in pkg: | 
|  | 881 | localdata = bb.data.createCopy(d) | 
|  | 882 | localdata.setVar('OVERRIDES', localdata.getVar('OVERRIDES') + ':' + pkg) | 
|  | 883 |  | 
|  | 884 | # Now check the RDEPENDS | 
|  | 885 | rdepends = bb.utils.explode_deps(localdata.getVar('RDEPENDS') or "") | 
|  | 886 |  | 
|  | 887 | # Now do the sanity check!!! | 
|  | 888 | if "build-deps" not in skip: | 
|  | 889 | for rdepend in rdepends: | 
|  | 890 | if "-dbg" in rdepend and "debug-deps" not in skip: | 
|  | 891 | error_msg = "%s rdepends on %s" % (pkg,rdepend) | 
|  | 892 | oe.qa.handle_error("debug-deps", error_msg, d) | 
|  | 893 | if (not "-dev" in pkg and not "-staticdev" in pkg) and rdepend.endswith("-dev") and "dev-deps" not in skip: | 
|  | 894 | error_msg = "%s rdepends on %s" % (pkg, rdepend) | 
|  | 895 | oe.qa.handle_error("dev-deps", error_msg, d) | 
|  | 896 | if rdepend not in packages: | 
|  | 897 | rdep_data = oe.packagedata.read_subpkgdata(rdepend, d) | 
|  | 898 | if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps: | 
|  | 899 | continue | 
|  | 900 | if not rdep_data or not 'PN' in rdep_data: | 
|  | 901 | pkgdata_dir = d.getVar("PKGDATA_DIR") | 
|  | 902 | try: | 
|  | 903 | possibles = os.listdir("%s/runtime-rprovides/%s/" % (pkgdata_dir, rdepend)) | 
|  | 904 | except OSError: | 
|  | 905 | possibles = [] | 
|  | 906 | for p in possibles: | 
|  | 907 | rdep_data = oe.packagedata.read_subpkgdata(p, d) | 
|  | 908 | if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps: | 
|  | 909 | break | 
|  | 910 | if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps: | 
|  | 911 | continue | 
|  | 912 | if rdep_data and 'PN' in rdep_data: | 
|  | 913 | error_msg = "%s rdepends on %s, but it isn't a build dependency, missing %s in DEPENDS or PACKAGECONFIG?" % (pkg, rdepend, rdep_data['PN']) | 
|  | 914 | else: | 
|  | 915 | error_msg = "%s rdepends on %s, but it isn't a build dependency?" % (pkg, rdepend) | 
|  | 916 | oe.qa.handle_error("build-deps", error_msg, d) | 
|  | 917 |  | 
|  | 918 | if "file-rdeps" not in skip: | 
|  | 919 | ignored_file_rdeps = set(['/bin/sh', '/usr/bin/env', 'rtld(GNU_HASH)']) | 
|  | 920 | if bb.data.inherits_class('nativesdk', d): | 
|  | 921 | ignored_file_rdeps |= set(['/bin/bash', '/usr/bin/perl', 'perl']) | 
|  | 922 | # For Saving the FILERDEPENDS | 
|  | 923 | filerdepends = {} | 
|  | 924 | rdep_data = oe.packagedata.read_subpkgdata(pkg, d) | 
|  | 925 | for key in rdep_data: | 
|  | 926 | if key.startswith("FILERDEPENDS:"): | 
|  | 927 | for subkey in bb.utils.explode_deps(rdep_data[key]): | 
|  | 928 | if subkey not in ignored_file_rdeps and \ | 
|  | 929 | not subkey.startswith('perl('): | 
|  | 930 | # We already know it starts with FILERDEPENDS_ | 
|  | 931 | filerdepends[subkey] = key[13:] | 
|  | 932 |  | 
|  | 933 | if filerdepends: | 
|  | 934 | done = rdepends[:] | 
|  | 935 | # Add the rprovides of itself | 
|  | 936 | if pkg not in done: | 
|  | 937 | done.insert(0, pkg) | 
|  | 938 |  | 
|  | 939 | # The python is not a package, but python-core provides it, so | 
|  | 940 | # skip checking /usr/bin/python if python is in the rdeps, in | 
|  | 941 | # case there is a RDEPENDS:pkg = "python" in the recipe. | 
|  | 942 | for py in [ d.getVar('MLPREFIX') + "python", "python" ]: | 
|  | 943 | if py in done: | 
|  | 944 | filerdepends.pop("/usr/bin/python",None) | 
|  | 945 | done.remove(py) | 
|  | 946 | for rdep in done: | 
|  | 947 | # The file dependencies may contain package names, e.g., | 
|  | 948 | # perl | 
|  | 949 | filerdepends.pop(rdep,None) | 
|  | 950 |  | 
|  | 951 | # For Saving the FILERPROVIDES, RPROVIDES and FILES_INFO | 
|  | 952 | rdep_data = oe.packagedata.read_subpkgdata(rdep, d) | 
|  | 953 | for key in rdep_data: | 
|  | 954 | if key.startswith("FILERPROVIDES:") or key.startswith("RPROVIDES:"): | 
|  | 955 | for subkey in bb.utils.explode_deps(rdep_data[key]): | 
|  | 956 | filerdepends.pop(subkey,None) | 
|  | 957 | # Add the files list to the rprovides | 
|  | 958 | if key.startswith("FILES_INFO:"): | 
|  | 959 | # Use eval() to make it as a dict | 
|  | 960 | for subkey in eval(rdep_data[key]): | 
|  | 961 | filerdepends.pop(subkey,None) | 
|  | 962 | if not filerdepends: | 
|  | 963 | # Break if all the file rdepends are met | 
|  | 964 | break | 
|  | 965 | if filerdepends: | 
|  | 966 | for key in filerdepends: | 
|  | 967 | error_msg = "%s contained in package %s requires %s, but no providers found in RDEPENDS:%s?" % \ | 
|  | 968 | (filerdepends[key].replace(":%s" % pkg, "").replace("@underscore@", "_"), pkg, key, pkg) | 
|  | 969 | oe.qa.handle_error("file-rdeps", error_msg, d) | 
|  | 970 | package_qa_check_rdepends[vardepsexclude] = "OVERRIDES" | 
|  | 971 |  | 
|  | 972 | def package_qa_check_deps(pkg, pkgdest, d): | 
|  | 973 |  | 
|  | 974 | localdata = bb.data.createCopy(d) | 
|  | 975 | localdata.setVar('OVERRIDES', pkg) | 
|  | 976 |  | 
|  | 977 | def check_valid_deps(var): | 
|  | 978 | try: | 
|  | 979 | rvar = bb.utils.explode_dep_versions2(localdata.getVar(var) or "") | 
|  | 980 | except ValueError as e: | 
|  | 981 | bb.fatal("%s:%s: %s" % (var, pkg, e)) | 
|  | 982 | for dep in rvar: | 
|  | 983 | for v in rvar[dep]: | 
|  | 984 | if v and not v.startswith(('< ', '= ', '> ', '<= ', '>=')): | 
|  | 985 | error_msg = "%s:%s is invalid: %s (%s)   only comparisons <, =, >, <=, and >= are allowed" % (var, pkg, dep, v) | 
|  | 986 | oe.qa.handle_error("dep-cmp", error_msg, d) | 
|  | 987 |  | 
|  | 988 | check_valid_deps('RDEPENDS') | 
|  | 989 | check_valid_deps('RRECOMMENDS') | 
|  | 990 | check_valid_deps('RSUGGESTS') | 
|  | 991 | check_valid_deps('RPROVIDES') | 
|  | 992 | check_valid_deps('RREPLACES') | 
|  | 993 | check_valid_deps('RCONFLICTS') | 
|  | 994 |  | 
|  | 995 | QAPKGTEST[usrmerge] = "package_qa_check_usrmerge" | 
|  | 996 | def package_qa_check_usrmerge(pkg, d, messages): | 
|  | 997 |  | 
|  | 998 | pkgdest = d.getVar('PKGDEST') | 
|  | 999 | pkg_dir = pkgdest + os.sep + pkg + os.sep | 
|  | 1000 | merged_dirs = ['bin', 'sbin', 'lib'] + d.getVar('MULTILIB_VARIANTS').split() | 
|  | 1001 | for f in merged_dirs: | 
|  | 1002 | if os.path.exists(pkg_dir + f) and not os.path.islink(pkg_dir + f): | 
|  | 1003 | msg = "%s package is not obeying usrmerge distro feature. /%s should be relocated to /usr." % (pkg, f) | 
|  | 1004 | oe.qa.add_message(messages, "usrmerge", msg) | 
|  | 1005 | return False | 
|  | 1006 | return True | 
|  | 1007 |  | 
|  | 1008 | QAPKGTEST[perllocalpod] = "package_qa_check_perllocalpod" | 
|  | 1009 | def package_qa_check_perllocalpod(pkg, d, messages): | 
|  | 1010 | """ | 
|  | 1011 | Check that the recipe didn't ship a perlocal.pod file, which shouldn't be | 
|  | 1012 | installed in a distribution package.  cpan.bbclass sets NO_PERLLOCAL=1 to | 
|  | 1013 | handle this for most recipes. | 
|  | 1014 | """ | 
|  | 1015 | import glob | 
|  | 1016 | pkgd = oe.path.join(d.getVar('PKGDEST'), pkg) | 
|  | 1017 | podpath = oe.path.join(pkgd, d.getVar("libdir"), "perl*", "*", "*", "perllocal.pod") | 
|  | 1018 |  | 
|  | 1019 | matches = glob.glob(podpath) | 
|  | 1020 | if matches: | 
|  | 1021 | matches = [package_qa_clean_path(path, d, pkg) for path in matches] | 
|  | 1022 | msg = "%s contains perllocal.pod (%s), should not be installed" % (pkg, " ".join(matches)) | 
|  | 1023 | oe.qa.add_message(messages, "perllocalpod", msg) | 
|  | 1024 |  | 
|  | 1025 | QAPKGTEST[expanded-d] = "package_qa_check_expanded_d" | 
|  | 1026 | def package_qa_check_expanded_d(package, d, messages): | 
|  | 1027 | """ | 
|  | 1028 | Check for the expanded D (${D}) value in pkg_* and FILES | 
|  | 1029 | variables, warn the user to use it correctly. | 
|  | 1030 | """ | 
|  | 1031 | sane = True | 
|  | 1032 | expanded_d = d.getVar('D') | 
|  | 1033 |  | 
|  | 1034 | for var in 'FILES','pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm': | 
|  | 1035 | bbvar = d.getVar(var + ":" + package) or "" | 
|  | 1036 | if expanded_d in bbvar: | 
|  | 1037 | if var == 'FILES': | 
|  | 1038 | oe.qa.add_message(messages, "expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % package) | 
|  | 1039 | sane = False | 
|  | 1040 | else: | 
|  | 1041 | oe.qa.add_message(messages, "expanded-d", "%s in %s recipe contains ${D}, it should be replaced by $D instead" % (var, package)) | 
|  | 1042 | sane = False | 
|  | 1043 | return sane | 
|  | 1044 |  | 
|  | 1045 | QAPKGTEST[unlisted-pkg-lics] = "package_qa_check_unlisted_pkg_lics" | 
|  | 1046 | def package_qa_check_unlisted_pkg_lics(package, d, messages): | 
|  | 1047 | """ | 
|  | 1048 | Check that all licenses for a package are among the licenses for the recipe. | 
|  | 1049 | """ | 
|  | 1050 | pkg_lics = d.getVar('LICENSE:' + package) | 
|  | 1051 | if not pkg_lics: | 
|  | 1052 | return True | 
|  | 1053 |  | 
|  | 1054 | recipe_lics_set = oe.license.list_licenses(d.getVar('LICENSE')) | 
|  | 1055 | package_lics = oe.license.list_licenses(pkg_lics) | 
|  | 1056 | unlisted = package_lics - recipe_lics_set | 
|  | 1057 | if unlisted: | 
|  | 1058 | oe.qa.add_message(messages, "unlisted-pkg-lics", | 
|  | 1059 | "LICENSE:%s includes licenses (%s) that are not " | 
|  | 1060 | "listed in LICENSE" % (package, ' '.join(unlisted))) | 
|  | 1061 | return False | 
|  | 1062 | obsolete = set(oe.license.obsolete_license_list()) & package_lics - recipe_lics_set | 
|  | 1063 | if obsolete: | 
|  | 1064 | oe.qa.add_message(messages, "obsolete-license", | 
|  | 1065 | "LICENSE:%s includes obsolete licenses %s" % (package, ' '.join(obsolete))) | 
|  | 1066 | return False | 
|  | 1067 | return True | 
|  | 1068 |  | 
|  | 1069 | QAPKGTEST[empty-dirs] = "package_qa_check_empty_dirs" | 
|  | 1070 | def package_qa_check_empty_dirs(pkg, d, messages): | 
|  | 1071 | """ | 
|  | 1072 | Check for the existence of files in directories that are expected to be | 
|  | 1073 | empty. | 
|  | 1074 | """ | 
|  | 1075 |  | 
|  | 1076 | pkgd = oe.path.join(d.getVar('PKGDEST'), pkg) | 
|  | 1077 | for dir in (d.getVar('QA_EMPTY_DIRS') or "").split(): | 
|  | 1078 | empty_dir = oe.path.join(pkgd, dir) | 
|  | 1079 | if os.path.exists(empty_dir) and os.listdir(empty_dir): | 
|  | 1080 | recommendation = (d.getVar('QA_EMPTY_DIRS_RECOMMENDATION:' + dir) or | 
|  | 1081 | "but it is expected to be empty") | 
|  | 1082 | msg = "%s installs files in %s, %s" % (pkg, dir, recommendation) | 
|  | 1083 | oe.qa.add_message(messages, "empty-dirs", msg) | 
|  | 1084 |  | 
|  | 1085 | def package_qa_check_encoding(keys, encode, d): | 
|  | 1086 | def check_encoding(key, enc): | 
|  | 1087 | sane = True | 
|  | 1088 | value = d.getVar(key) | 
|  | 1089 | if value: | 
|  | 1090 | try: | 
|  | 1091 | s = value.encode(enc) | 
|  | 1092 | except UnicodeDecodeError as e: | 
|  | 1093 | error_msg = "%s has non %s characters" % (key,enc) | 
|  | 1094 | sane = False | 
|  | 1095 | oe.qa.handle_error("invalid-chars", error_msg, d) | 
|  | 1096 | return sane | 
|  | 1097 |  | 
|  | 1098 | for key in keys: | 
|  | 1099 | sane = check_encoding(key, encode) | 
|  | 1100 | if not sane: | 
|  | 1101 | break | 
|  | 1102 |  | 
|  | 1103 | HOST_USER_UID := "${@os.getuid()}" | 
|  | 1104 | HOST_USER_GID := "${@os.getgid()}" | 
|  | 1105 |  | 
|  | 1106 | QAPATHTEST[host-user-contaminated] = "package_qa_check_host_user" | 
|  | 1107 | def package_qa_check_host_user(path, name, d, elf, messages): | 
|  | 1108 | """Check for paths outside of /home which are owned by the user running bitbake.""" | 
|  | 1109 |  | 
|  | 1110 | if not os.path.lexists(path): | 
|  | 1111 | return | 
|  | 1112 |  | 
|  | 1113 | dest = d.getVar('PKGDEST') | 
|  | 1114 | pn = d.getVar('PN') | 
|  | 1115 | home = os.path.join(dest, name, 'home') | 
|  | 1116 | if path == home or path.startswith(home + os.sep): | 
|  | 1117 | return | 
|  | 1118 |  | 
|  | 1119 | try: | 
|  | 1120 | stat = os.lstat(path) | 
|  | 1121 | except OSError as exc: | 
|  | 1122 | import errno | 
|  | 1123 | if exc.errno != errno.ENOENT: | 
|  | 1124 | raise | 
|  | 1125 | else: | 
|  | 1126 | check_uid = int(d.getVar('HOST_USER_UID')) | 
|  | 1127 | if stat.st_uid == check_uid: | 
|  | 1128 | oe.qa.add_message(messages, "host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_uid)) | 
|  | 1129 | return False | 
|  | 1130 |  | 
|  | 1131 | check_gid = int(d.getVar('HOST_USER_GID')) | 
|  | 1132 | if stat.st_gid == check_gid: | 
|  | 1133 | oe.qa.add_message(messages, "host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_gid)) | 
|  | 1134 | return False | 
|  | 1135 | return True | 
|  | 1136 |  | 
|  | 1137 | QARECIPETEST[unhandled-features-check] = "package_qa_check_unhandled_features_check" | 
|  | 1138 | def package_qa_check_unhandled_features_check(pn, d, messages): | 
|  | 1139 | if not bb.data.inherits_class('features_check', d): | 
|  | 1140 | var_set = False | 
|  | 1141 | for kind in ['DISTRO', 'MACHINE', 'COMBINED']: | 
|  | 1142 | for var in ['ANY_OF_' + kind + '_FEATURES', 'REQUIRED_' + kind + '_FEATURES', 'CONFLICT_' + kind + '_FEATURES']: | 
|  | 1143 | if d.getVar(var) is not None or d.hasOverrides(var): | 
|  | 1144 | var_set = True | 
|  | 1145 | if var_set: | 
|  | 1146 | oe.qa.handle_error("unhandled-features-check", "%s: recipe doesn't inherit features_check" % pn, d) | 
|  | 1147 |  | 
|  | 1148 | QARECIPETEST[missing-update-alternatives] = "package_qa_check_missing_update_alternatives" | 
|  | 1149 | def package_qa_check_missing_update_alternatives(pn, d, messages): | 
|  | 1150 | # Look at all packages and find out if any of those sets ALTERNATIVE variable | 
|  | 1151 | # without inheriting update-alternatives class | 
|  | 1152 | for pkg in (d.getVar('PACKAGES') or '').split(): | 
|  | 1153 | if d.getVar('ALTERNATIVE:%s' % pkg) and not bb.data.inherits_class('update-alternatives', d): | 
|  | 1154 | oe.qa.handle_error("missing-update-alternatives", "%s: recipe defines ALTERNATIVE:%s but doesn't inherit update-alternatives. This might fail during do_rootfs later!" % (pn, pkg), d) | 
|  | 1155 |  | 
|  | 1156 | # The PACKAGE FUNC to scan each package | 
|  | 1157 | python do_package_qa () { | 
|  | 1158 | import subprocess | 
|  | 1159 | import oe.packagedata | 
|  | 1160 |  | 
|  | 1161 | bb.note("DO PACKAGE QA") | 
|  | 1162 |  | 
|  | 1163 | main_lic = d.getVar('LICENSE') | 
|  | 1164 |  | 
|  | 1165 | # Check for obsolete license references in main LICENSE (packages are checked below for any changes) | 
|  | 1166 | main_licenses = oe.license.list_licenses(d.getVar('LICENSE')) | 
|  | 1167 | obsolete = set(oe.license.obsolete_license_list()) & main_licenses | 
|  | 1168 | if obsolete: | 
|  | 1169 | oe.qa.handle_error("obsolete-license", "Recipe LICENSE includes obsolete licenses %s" % ' '.join(obsolete), d) | 
|  | 1170 |  | 
|  | 1171 | bb.build.exec_func("read_subpackage_metadata", d) | 
|  | 1172 |  | 
|  | 1173 | # Check non UTF-8 characters on recipe's metadata | 
|  | 1174 | package_qa_check_encoding(['DESCRIPTION', 'SUMMARY', 'LICENSE', 'SECTION'], 'utf-8', d) | 
|  | 1175 |  | 
|  | 1176 | logdir = d.getVar('T') | 
|  | 1177 | pn = d.getVar('PN') | 
|  | 1178 |  | 
|  | 1179 | # Scan the packages... | 
|  | 1180 | pkgdest = d.getVar('PKGDEST') | 
|  | 1181 | packages = set((d.getVar('PACKAGES') or '').split()) | 
|  | 1182 |  | 
|  | 1183 | global pkgfiles | 
|  | 1184 | pkgfiles = {} | 
|  | 1185 | for pkg in packages: | 
|  | 1186 | pkgfiles[pkg] = [] | 
|  | 1187 | pkgdir = os.path.join(pkgdest, pkg) | 
|  | 1188 | for walkroot, dirs, files in os.walk(pkgdir): | 
|  | 1189 | # Don't walk into top-level CONTROL or DEBIAN directories as these | 
|  | 1190 | # are temporary directories created by do_package. | 
|  | 1191 | if walkroot == pkgdir: | 
|  | 1192 | for control in ("CONTROL", "DEBIAN"): | 
|  | 1193 | if control in dirs: | 
|  | 1194 | dirs.remove(control) | 
|  | 1195 | for file in files: | 
|  | 1196 | pkgfiles[pkg].append(os.path.join(walkroot, file)) | 
|  | 1197 |  | 
|  | 1198 | # no packages should be scanned | 
|  | 1199 | if not packages: | 
|  | 1200 | return | 
|  | 1201 |  | 
|  | 1202 | import re | 
|  | 1203 | # The package name matches the [a-z0-9.+-]+ regular expression | 
|  | 1204 | pkgname_pattern = re.compile(r"^[a-z0-9.+-]+$") | 
|  | 1205 |  | 
|  | 1206 | taskdepdata = d.getVar("BB_TASKDEPDATA", False) | 
|  | 1207 | taskdeps = set() | 
|  | 1208 | for dep in taskdepdata: | 
|  | 1209 | taskdeps.add(taskdepdata[dep][0]) | 
|  | 1210 |  | 
|  | 1211 | def parse_test_matrix(matrix_name): | 
|  | 1212 | testmatrix = d.getVarFlags(matrix_name) or {} | 
|  | 1213 | g = globals() | 
|  | 1214 | warnchecks = [] | 
|  | 1215 | for w in (d.getVar("WARN_QA") or "").split(): | 
|  | 1216 | if w in skip: | 
|  | 1217 | continue | 
|  | 1218 | if w in testmatrix and testmatrix[w] in g: | 
|  | 1219 | warnchecks.append(g[testmatrix[w]]) | 
|  | 1220 |  | 
|  | 1221 | errorchecks = [] | 
|  | 1222 | for e in (d.getVar("ERROR_QA") or "").split(): | 
|  | 1223 | if e in skip: | 
|  | 1224 | continue | 
|  | 1225 | if e in testmatrix and testmatrix[e] in g: | 
|  | 1226 | errorchecks.append(g[testmatrix[e]]) | 
|  | 1227 | return warnchecks, errorchecks | 
|  | 1228 |  | 
|  | 1229 | for package in packages: | 
|  | 1230 | skip = set((d.getVar('INSANE_SKIP') or "").split() + | 
|  | 1231 | (d.getVar('INSANE_SKIP:' + package) or "").split()) | 
|  | 1232 | if skip: | 
|  | 1233 | bb.note("Package %s skipping QA tests: %s" % (package, str(skip))) | 
|  | 1234 |  | 
|  | 1235 | bb.note("Checking Package: %s" % package) | 
|  | 1236 | # Check package name | 
|  | 1237 | if not pkgname_pattern.match(package): | 
|  | 1238 | oe.qa.handle_error("pkgname", | 
|  | 1239 | "%s doesn't match the [a-z0-9.+-]+ regex" % package, d) | 
|  | 1240 |  | 
|  | 1241 | warn_checks, error_checks = parse_test_matrix("QAPATHTEST") | 
|  | 1242 | package_qa_walk(warn_checks, error_checks, package, d) | 
|  | 1243 |  | 
|  | 1244 | warn_checks, error_checks = parse_test_matrix("QAPKGTEST") | 
|  | 1245 | package_qa_package(warn_checks, error_checks, package, d) | 
|  | 1246 |  | 
|  | 1247 | package_qa_check_rdepends(package, pkgdest, skip, taskdeps, packages, d) | 
|  | 1248 | package_qa_check_deps(package, pkgdest, d) | 
|  | 1249 |  | 
|  | 1250 | warn_checks, error_checks = parse_test_matrix("QARECIPETEST") | 
|  | 1251 | package_qa_recipe(warn_checks, error_checks, pn, d) | 
|  | 1252 |  | 
|  | 1253 | if 'libdir' in d.getVar("ALL_QA").split(): | 
|  | 1254 | package_qa_check_libdir(d) | 
|  | 1255 |  | 
|  | 1256 | oe.qa.exit_if_errors(d) | 
|  | 1257 | } | 
|  | 1258 |  | 
|  | 1259 | # binutils is used for most checks, so need to set as dependency | 
|  | 1260 | # POPULATESYSROOTDEPS is defined in staging class. | 
|  | 1261 | do_package_qa[depends] += "${POPULATESYSROOTDEPS}" | 
|  | 1262 | do_package_qa[vardeps] = "${@bb.utils.contains('ERROR_QA', 'empty-dirs', 'QA_EMPTY_DIRS', '', d)}" | 
|  | 1263 | do_package_qa[vardepsexclude] = "BB_TASKDEPDATA" | 
|  | 1264 | do_package_qa[rdeptask] = "do_packagedata" | 
|  | 1265 | addtask do_package_qa after do_packagedata do_package before do_build | 
|  | 1266 |  | 
|  | 1267 | # Add the package specific INSANE_SKIPs to the sstate dependencies | 
|  | 1268 | python() { | 
|  | 1269 | pkgs = (d.getVar('PACKAGES') or '').split() | 
|  | 1270 | for pkg in pkgs: | 
|  | 1271 | d.appendVarFlag("do_package_qa", "vardeps", " INSANE_SKIP:{}".format(pkg)) | 
|  | 1272 | } | 
|  | 1273 |  | 
|  | 1274 | SSTATETASKS += "do_package_qa" | 
|  | 1275 | do_package_qa[sstate-inputdirs] = "" | 
|  | 1276 | do_package_qa[sstate-outputdirs] = "" | 
|  | 1277 | python do_package_qa_setscene () { | 
|  | 1278 | sstate_setscene(d) | 
|  | 1279 | } | 
|  | 1280 | addtask do_package_qa_setscene | 
|  | 1281 |  | 
|  | 1282 | python do_qa_sysroot() { | 
|  | 1283 | bb.note("QA checking do_populate_sysroot") | 
|  | 1284 | sysroot_destdir = d.expand('${SYSROOT_DESTDIR}') | 
|  | 1285 | for sysroot_dir in d.expand('${SYSROOT_DIRS}').split(): | 
|  | 1286 | qa_check_staged(sysroot_destdir + sysroot_dir, d) | 
|  | 1287 | oe.qa.exit_with_message_if_errors("do_populate_sysroot for this recipe installed files with QA issues", d) | 
|  | 1288 | } | 
|  | 1289 | do_populate_sysroot[postfuncs] += "do_qa_sysroot" | 
|  | 1290 |  | 
|  | 1291 | python do_qa_patch() { | 
|  | 1292 | import subprocess | 
|  | 1293 |  | 
|  | 1294 | ########################################################################### | 
|  | 1295 | # Check patch.log for fuzz warnings | 
|  | 1296 | # | 
|  | 1297 | # Further information on why we check for patch fuzz warnings: | 
|  | 1298 | # http://lists.openembedded.org/pipermail/openembedded-core/2018-March/148675.html | 
|  | 1299 | # https://bugzilla.yoctoproject.org/show_bug.cgi?id=10450 | 
|  | 1300 | ########################################################################### | 
|  | 1301 |  | 
|  | 1302 | logdir = d.getVar('T') | 
|  | 1303 | patchlog = os.path.join(logdir,"log.do_patch") | 
|  | 1304 |  | 
|  | 1305 | if os.path.exists(patchlog): | 
|  | 1306 | fuzzheader = '--- Patch fuzz start ---' | 
|  | 1307 | fuzzfooter = '--- Patch fuzz end ---' | 
|  | 1308 | statement = "grep -e '%s' %s > /dev/null" % (fuzzheader, patchlog) | 
|  | 1309 | if subprocess.call(statement, shell=True) == 0: | 
|  | 1310 | msg = "Fuzz detected:\n\n" | 
|  | 1311 | fuzzmsg = "" | 
|  | 1312 | inFuzzInfo = False | 
|  | 1313 | f = open(patchlog, "r") | 
|  | 1314 | for line in f: | 
|  | 1315 | if fuzzheader in line: | 
|  | 1316 | inFuzzInfo = True | 
|  | 1317 | fuzzmsg = "" | 
|  | 1318 | elif fuzzfooter in line: | 
|  | 1319 | fuzzmsg = fuzzmsg.replace('\n\n', '\n') | 
|  | 1320 | msg += fuzzmsg | 
|  | 1321 | msg += "\n" | 
|  | 1322 | inFuzzInfo = False | 
|  | 1323 | elif inFuzzInfo and not 'Now at patch' in line: | 
|  | 1324 | fuzzmsg += line | 
|  | 1325 | f.close() | 
|  | 1326 | msg += "The context lines in the patches can be updated with devtool:\n" | 
|  | 1327 | msg += "\n" | 
|  | 1328 | msg += "    devtool modify %s\n" % d.getVar('PN') | 
|  | 1329 | msg += "    devtool finish --force-patch-refresh %s <layer_path>\n\n" % d.getVar('PN') | 
|  | 1330 | msg += "Don't forget to review changes done by devtool!\n" | 
| Andrew Geissler | c5535c9 | 2023-01-27 16:10:19 -0600 | [diff] [blame] | 1331 | msg += "\nPatch log indicates that patches do not apply cleanly." | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 1332 | oe.qa.handle_error("patch-fuzz", msg, d) | 
|  | 1333 |  | 
|  | 1334 | # Check if the patch contains a correctly formatted and spelled Upstream-Status | 
|  | 1335 | import re | 
|  | 1336 | from oe import patch | 
|  | 1337 |  | 
|  | 1338 | coremeta_path = os.path.join(d.getVar('COREBASE'), 'meta', '') | 
|  | 1339 | for url in patch.src_patches(d): | 
| Andrew Geissler | 6aa7eec | 2023-03-03 12:41:14 -0600 | [diff] [blame] | 1340 | (_, _, fullpath, _, _, _) = bb.fetch.decodeurl(url) | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 1341 |  | 
| Andrew Geissler | 6aa7eec | 2023-03-03 12:41:14 -0600 | [diff] [blame] | 1342 | msg = oe.qa.check_upstream_status(fullpath) | 
|  | 1343 | if msg: | 
| Patrick Williams | 520786c | 2023-06-25 16:20:36 -0500 | [diff] [blame] | 1344 | oe.qa.handle_error("patch-status", msg, d) | 
| Andrew Geissler | c5535c9 | 2023-01-27 16:10:19 -0600 | [diff] [blame] | 1345 |  | 
|  | 1346 | oe.qa.exit_if_errors(d) | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 1347 | } | 
|  | 1348 |  | 
|  | 1349 | python do_qa_configure() { | 
|  | 1350 | import subprocess | 
|  | 1351 |  | 
|  | 1352 | ########################################################################### | 
|  | 1353 | # Check config.log for cross compile issues | 
|  | 1354 | ########################################################################### | 
|  | 1355 |  | 
|  | 1356 | configs = [] | 
|  | 1357 | workdir = d.getVar('WORKDIR') | 
|  | 1358 |  | 
|  | 1359 | skip = (d.getVar('INSANE_SKIP') or "").split() | 
|  | 1360 | skip_configure_unsafe = False | 
|  | 1361 | if 'configure-unsafe' in skip: | 
|  | 1362 | bb.note("Recipe %s skipping qa checking: configure-unsafe" % d.getVar('PN')) | 
|  | 1363 | skip_configure_unsafe = True | 
|  | 1364 |  | 
|  | 1365 | if bb.data.inherits_class('autotools', d) and not skip_configure_unsafe: | 
|  | 1366 | bb.note("Checking autotools environment for common misconfiguration") | 
|  | 1367 | for root, dirs, files in os.walk(workdir): | 
|  | 1368 | statement = "grep -q -F -e 'is unsafe for cross-compilation' %s" % \ | 
|  | 1369 | os.path.join(root,"config.log") | 
|  | 1370 | if "config.log" in files: | 
|  | 1371 | if subprocess.call(statement, shell=True) == 0: | 
|  | 1372 | error_msg = """This autoconf log indicates errors, it looked at host include and/or library paths while determining system capabilities. | 
|  | 1373 | Rerun configure task after fixing this.""" | 
|  | 1374 | oe.qa.handle_error("configure-unsafe", error_msg, d) | 
|  | 1375 |  | 
|  | 1376 | if "configure.ac" in files: | 
|  | 1377 | configs.append(os.path.join(root,"configure.ac")) | 
|  | 1378 | if "configure.in" in files: | 
|  | 1379 | configs.append(os.path.join(root, "configure.in")) | 
|  | 1380 |  | 
|  | 1381 | ########################################################################### | 
|  | 1382 | # Check gettext configuration and dependencies are correct | 
|  | 1383 | ########################################################################### | 
|  | 1384 |  | 
|  | 1385 | skip_configure_gettext = False | 
|  | 1386 | if 'configure-gettext' in skip: | 
|  | 1387 | bb.note("Recipe %s skipping qa checking: configure-gettext" % d.getVar('PN')) | 
|  | 1388 | skip_configure_gettext = True | 
|  | 1389 |  | 
|  | 1390 | cnf = d.getVar('EXTRA_OECONF') or "" | 
|  | 1391 | if not ("gettext" in d.getVar('P') or "gcc-runtime" in d.getVar('P') or \ | 
|  | 1392 | "--disable-nls" in cnf or skip_configure_gettext): | 
|  | 1393 | ml = d.getVar("MLPREFIX") or "" | 
|  | 1394 | if bb.data.inherits_class('cross-canadian', d): | 
|  | 1395 | gt = "nativesdk-gettext" | 
|  | 1396 | else: | 
|  | 1397 | gt = "gettext-native" | 
|  | 1398 | deps = bb.utils.explode_deps(d.getVar('DEPENDS') or "") | 
|  | 1399 | if gt not in deps: | 
|  | 1400 | for config in configs: | 
|  | 1401 | gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config | 
|  | 1402 | if subprocess.call(gnu, shell=True) == 0: | 
|  | 1403 | error_msg = "AM_GNU_GETTEXT used but no inherit gettext" | 
|  | 1404 | oe.qa.handle_error("configure-gettext", error_msg, d) | 
|  | 1405 |  | 
|  | 1406 | ########################################################################### | 
|  | 1407 | # Check unrecognised configure options (with a white list) | 
|  | 1408 | ########################################################################### | 
|  | 1409 | if bb.data.inherits_class("autotools", d): | 
|  | 1410 | bb.note("Checking configure output for unrecognised options") | 
|  | 1411 | try: | 
|  | 1412 | if bb.data.inherits_class("autotools", d): | 
|  | 1413 | flag = "WARNING: unrecognized options:" | 
|  | 1414 | log = os.path.join(d.getVar('B'), 'config.log') | 
|  | 1415 | output = subprocess.check_output(['grep', '-F', flag, log]).decode("utf-8").replace(', ', ' ').replace('"', '') | 
|  | 1416 | options = set() | 
|  | 1417 | for line in output.splitlines(): | 
|  | 1418 | options |= set(line.partition(flag)[2].split()) | 
|  | 1419 | ignore_opts = set(d.getVar("UNKNOWN_CONFIGURE_OPT_IGNORE").split()) | 
|  | 1420 | options -= ignore_opts | 
|  | 1421 | if options: | 
|  | 1422 | pn = d.getVar('PN') | 
|  | 1423 | error_msg = pn + ": configure was passed unrecognised options: " + " ".join(options) | 
|  | 1424 | oe.qa.handle_error("unknown-configure-option", error_msg, d) | 
|  | 1425 | except subprocess.CalledProcessError: | 
|  | 1426 | pass | 
|  | 1427 |  | 
|  | 1428 | # Check invalid PACKAGECONFIG | 
|  | 1429 | pkgconfig = (d.getVar("PACKAGECONFIG") or "").split() | 
|  | 1430 | if pkgconfig: | 
|  | 1431 | pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {} | 
|  | 1432 | for pconfig in pkgconfig: | 
|  | 1433 | if pconfig not in pkgconfigflags: | 
|  | 1434 | pn = d.getVar('PN') | 
|  | 1435 | error_msg = "%s: invalid PACKAGECONFIG: %s" % (pn, pconfig) | 
|  | 1436 | oe.qa.handle_error("invalid-packageconfig", error_msg, d) | 
|  | 1437 |  | 
|  | 1438 | oe.qa.exit_if_errors(d) | 
|  | 1439 | } | 
|  | 1440 |  | 
|  | 1441 | def unpack_check_src_uri(pn, d): | 
|  | 1442 | import re | 
|  | 1443 |  | 
|  | 1444 | skip = (d.getVar('INSANE_SKIP') or "").split() | 
|  | 1445 | if 'src-uri-bad' in skip: | 
|  | 1446 | bb.note("Recipe %s skipping qa checking: src-uri-bad" % d.getVar('PN')) | 
|  | 1447 | return | 
|  | 1448 |  | 
|  | 1449 | if "${PN}" in d.getVar("SRC_URI", False): | 
|  | 1450 | oe.qa.handle_error("src-uri-bad", "%s: SRC_URI uses PN not BPN" % pn, d) | 
|  | 1451 |  | 
|  | 1452 | for url in d.getVar("SRC_URI").split(): | 
|  | 1453 | # Search for github and gitlab URLs that pull unstable archives (comment for future greppers) | 
| Patrick Williams | 7784c42 | 2022-11-17 07:29:11 -0600 | [diff] [blame] | 1454 | if re.search(r"git(hu|la)b\.com/.+/.+/archive/.+", url) or "//codeload.github.com/" in url: | 
| Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 1455 | oe.qa.handle_error("src-uri-bad", "%s: SRC_URI uses unstable GitHub/GitLab archives, convert recipe to use git protocol" % pn, d) | 
|  | 1456 |  | 
|  | 1457 | python do_qa_unpack() { | 
|  | 1458 | src_uri = d.getVar('SRC_URI') | 
|  | 1459 | s_dir = d.getVar('S') | 
|  | 1460 | if src_uri and not os.path.exists(s_dir): | 
|  | 1461 | bb.warn('%s: the directory %s (%s) pointed to by the S variable doesn\'t exist - please set S within the recipe to point to where the source has been unpacked to' % (d.getVar('PN'), d.getVar('S', False), s_dir)) | 
|  | 1462 |  | 
|  | 1463 | unpack_check_src_uri(d.getVar('PN'), d) | 
|  | 1464 | } | 
|  | 1465 |  | 
|  | 1466 | # Check for patch fuzz | 
|  | 1467 | do_patch[postfuncs] += "do_qa_patch " | 
|  | 1468 |  | 
|  | 1469 | # Check broken config.log files, for packages requiring Gettext which | 
|  | 1470 | # don't have it in DEPENDS. | 
|  | 1471 | #addtask qa_configure after do_configure before do_compile | 
|  | 1472 | do_configure[postfuncs] += "do_qa_configure " | 
|  | 1473 |  | 
|  | 1474 | # Check does S exist. | 
|  | 1475 | do_unpack[postfuncs] += "do_qa_unpack" | 
|  | 1476 |  | 
|  | 1477 | python () { | 
|  | 1478 | import re | 
|  | 1479 |  | 
|  | 1480 | tests = d.getVar('ALL_QA').split() | 
|  | 1481 | if "desktop" in tests: | 
|  | 1482 | d.appendVar("PACKAGE_DEPENDS", " desktop-file-utils-native") | 
|  | 1483 |  | 
|  | 1484 | ########################################################################### | 
|  | 1485 | # Check various variables | 
|  | 1486 | ########################################################################### | 
|  | 1487 |  | 
|  | 1488 | # Checking ${FILESEXTRAPATHS} | 
|  | 1489 | extrapaths = (d.getVar("FILESEXTRAPATHS") or "") | 
|  | 1490 | if '__default' not in extrapaths.split(":"): | 
|  | 1491 | msg = "FILESEXTRAPATHS-variable, must always use :prepend (or :append)\n" | 
|  | 1492 | msg += "type of assignment, and don't forget the colon.\n" | 
|  | 1493 | msg += "Please assign it with the format of:\n" | 
|  | 1494 | msg += "  FILESEXTRAPATHS:append := \":${THISDIR}/Your_Files_Path\" or\n" | 
|  | 1495 | msg += "  FILESEXTRAPATHS:prepend := \"${THISDIR}/Your_Files_Path:\"\n" | 
|  | 1496 | msg += "in your bbappend file\n\n" | 
|  | 1497 | msg += "Your incorrect assignment is:\n" | 
|  | 1498 | msg += "%s\n" % extrapaths | 
|  | 1499 | bb.warn(msg) | 
|  | 1500 |  | 
|  | 1501 | overrides = d.getVar('OVERRIDES').split(':') | 
|  | 1502 | pn = d.getVar('PN') | 
|  | 1503 | if pn in overrides: | 
|  | 1504 | msg = 'Recipe %s has PN of "%s" which is in OVERRIDES, this can result in unexpected behaviour.' % (d.getVar("FILE"), pn) | 
|  | 1505 | oe.qa.handle_error("pn-overrides", msg, d) | 
|  | 1506 | prog = re.compile(r'[A-Z]') | 
|  | 1507 | if prog.search(pn): | 
|  | 1508 | oe.qa.handle_error("uppercase-pn", 'PN: %s is upper case, this can result in unexpected behavior.' % pn, d) | 
|  | 1509 |  | 
|  | 1510 | # Some people mistakenly use DEPENDS:${PN} instead of DEPENDS and wonder | 
|  | 1511 | # why it doesn't work. | 
|  | 1512 | if (d.getVar(d.expand('DEPENDS:${PN}'))): | 
|  | 1513 | oe.qa.handle_error("pkgvarcheck", "recipe uses DEPENDS:${PN}, should use DEPENDS", d) | 
|  | 1514 |  | 
|  | 1515 | issues = [] | 
|  | 1516 | if (d.getVar('PACKAGES') or "").split(): | 
|  | 1517 | for dep in (d.getVar('QADEPENDS') or "").split(): | 
|  | 1518 | d.appendVarFlag('do_package_qa', 'depends', " %s:do_populate_sysroot" % dep) | 
|  | 1519 | for var in 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RCONFLICTS', 'RPROVIDES', 'RREPLACES', 'FILES', 'pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm', 'ALLOW_EMPTY': | 
|  | 1520 | if d.getVar(var, False): | 
|  | 1521 | issues.append(var) | 
|  | 1522 |  | 
|  | 1523 | fakeroot_tests = d.getVar('FAKEROOT_QA').split() | 
|  | 1524 | if set(tests) & set(fakeroot_tests): | 
|  | 1525 | d.setVarFlag('do_package_qa', 'fakeroot', '1') | 
|  | 1526 | d.appendVarFlag('do_package_qa', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') | 
|  | 1527 | else: | 
|  | 1528 | d.setVarFlag('do_package_qa', 'rdeptask', '') | 
|  | 1529 | for i in issues: | 
|  | 1530 | oe.qa.handle_error("pkgvarcheck", "%s: Variable %s is set as not being package specific, please fix this." % (d.getVar("FILE"), i), d) | 
|  | 1531 |  | 
|  | 1532 | if 'native-last' not in (d.getVar('INSANE_SKIP') or "").split(): | 
|  | 1533 | for native_class in ['native', 'nativesdk']: | 
|  | 1534 | if bb.data.inherits_class(native_class, d): | 
|  | 1535 |  | 
|  | 1536 | inherited_classes = d.getVar('__inherit_cache', False) or [] | 
|  | 1537 | needle = "/" + native_class | 
|  | 1538 |  | 
|  | 1539 | bbclassextend = (d.getVar('BBCLASSEXTEND') or '').split() | 
|  | 1540 | # BBCLASSEXTEND items are always added in the end | 
|  | 1541 | skip_classes = bbclassextend | 
|  | 1542 | if bb.data.inherits_class('native', d) or 'native' in bbclassextend: | 
|  | 1543 | # native also inherits nopackages and relocatable bbclasses | 
|  | 1544 | skip_classes.extend(['nopackages', 'relocatable']) | 
|  | 1545 |  | 
|  | 1546 | broken_order = [] | 
|  | 1547 | for class_item in reversed(inherited_classes): | 
|  | 1548 | if needle not in class_item: | 
|  | 1549 | for extend_item in skip_classes: | 
|  | 1550 | if '/%s.bbclass' % extend_item in class_item: | 
|  | 1551 | break | 
|  | 1552 | else: | 
|  | 1553 | pn = d.getVar('PN') | 
|  | 1554 | broken_order.append(os.path.basename(class_item)) | 
|  | 1555 | else: | 
|  | 1556 | break | 
|  | 1557 | if broken_order: | 
|  | 1558 | oe.qa.handle_error("native-last", "%s: native/nativesdk class is not inherited last, this can result in unexpected behaviour. " | 
|  | 1559 | "Classes inherited after native/nativesdk: %s" % (pn, " ".join(broken_order)), d) | 
|  | 1560 |  | 
|  | 1561 | oe.qa.exit_if_errors(d) | 
|  | 1562 | } |