Brad Bishop | c342db3 | 2019-05-15 21:57:59 -0400 | [diff] [blame] | 1 | # |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 2 | # Copyright OpenEmbedded Contributors |
| 3 | # |
Brad Bishop | c342db3 | 2019-05-15 21:57:59 -0400 | [diff] [blame] | 4 | # SPDX-License-Identifier: GPL-2.0-only |
| 5 | # |
| 6 | |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 7 | import codecs |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 8 | import os |
Andrew Geissler | 517393d | 2023-01-13 08:55:19 -0600 | [diff] [blame] | 9 | import json |
| 10 | import bb.compress.zstd |
| 11 | import oe.path |
| 12 | |
| 13 | from glob import glob |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 14 | |
| 15 | def packaged(pkg, d): |
| 16 | return os.access(get_subpkgedata_fn(pkg, d) + '.packaged', os.R_OK) |
| 17 | |
| 18 | def read_pkgdatafile(fn): |
| 19 | pkgdata = {} |
| 20 | |
| 21 | def decode(str): |
Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 22 | c = codecs.getdecoder("unicode_escape") |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 23 | return c(str)[0] |
| 24 | |
| 25 | if os.access(fn, os.R_OK): |
| 26 | import re |
Brad Bishop | 64c979e | 2019-11-04 13:55:29 -0500 | [diff] [blame] | 27 | with open(fn, 'r') as f: |
| 28 | lines = f.readlines() |
Andrew Geissler | eff2747 | 2021-10-29 15:35:00 -0500 | [diff] [blame] | 29 | r = re.compile(r"(^.+?):\s+(.*)") |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 30 | for l in lines: |
| 31 | m = r.match(l) |
| 32 | if m: |
| 33 | pkgdata[m.group(1)] = decode(m.group(2)) |
| 34 | |
| 35 | return pkgdata |
| 36 | |
| 37 | def get_subpkgedata_fn(pkg, d): |
| 38 | return d.expand('${PKGDATA_DIR}/runtime/%s' % pkg) |
| 39 | |
| 40 | def has_subpkgdata(pkg, d): |
| 41 | return os.access(get_subpkgedata_fn(pkg, d), os.R_OK) |
| 42 | |
| 43 | def read_subpkgdata(pkg, d): |
| 44 | return read_pkgdatafile(get_subpkgedata_fn(pkg, d)) |
| 45 | |
| 46 | def has_pkgdata(pn, d): |
| 47 | fn = d.expand('${PKGDATA_DIR}/%s' % pn) |
| 48 | return os.access(fn, os.R_OK) |
| 49 | |
| 50 | def read_pkgdata(pn, d): |
| 51 | fn = d.expand('${PKGDATA_DIR}/%s' % pn) |
| 52 | return read_pkgdatafile(fn) |
| 53 | |
| 54 | # |
Patrick Williams | 0ca19cc | 2021-08-16 14:03:13 -0500 | [diff] [blame] | 55 | # Collapse FOO:pkg variables into FOO |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 56 | # |
| 57 | def read_subpkgdata_dict(pkg, d): |
| 58 | ret = {} |
| 59 | subd = read_pkgdatafile(get_subpkgedata_fn(pkg, d)) |
| 60 | for var in subd: |
Patrick Williams | 0ca19cc | 2021-08-16 14:03:13 -0500 | [diff] [blame] | 61 | newvar = var.replace(":" + pkg, "") |
| 62 | if newvar == var and var + ":" + pkg in subd: |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 63 | continue |
| 64 | ret[newvar] = subd[var] |
| 65 | return ret |
| 66 | |
Andrew Geissler | 5199d83 | 2021-09-24 16:47:35 -0500 | [diff] [blame] | 67 | def read_subpkgdata_extended(pkg, d): |
| 68 | import json |
| 69 | import bb.compress.zstd |
| 70 | |
| 71 | fn = d.expand("${PKGDATA_DIR}/extended/%s.json.zstd" % pkg) |
| 72 | try: |
| 73 | num_threads = int(d.getVar("BB_NUMBER_THREADS")) |
| 74 | with bb.compress.zstd.open(fn, "rt", encoding="utf-8", num_threads=num_threads) as f: |
| 75 | return json.load(f) |
| 76 | except FileNotFoundError: |
| 77 | return None |
| 78 | |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 79 | def _pkgmap(d): |
| 80 | """Return a dictionary mapping package to recipe name.""" |
| 81 | |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 82 | pkgdatadir = d.getVar("PKGDATA_DIR") |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 83 | |
| 84 | pkgmap = {} |
| 85 | try: |
| 86 | files = os.listdir(pkgdatadir) |
| 87 | except OSError: |
| 88 | bb.warn("No files in %s?" % pkgdatadir) |
| 89 | files = [] |
| 90 | |
Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 91 | for pn in [f for f in files if not os.path.isdir(os.path.join(pkgdatadir, f))]: |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 92 | try: |
| 93 | pkgdata = read_pkgdatafile(os.path.join(pkgdatadir, pn)) |
| 94 | except OSError: |
| 95 | continue |
| 96 | |
| 97 | packages = pkgdata.get("PACKAGES") or "" |
| 98 | for pkg in packages.split(): |
| 99 | pkgmap[pkg] = pn |
| 100 | |
| 101 | return pkgmap |
| 102 | |
| 103 | def pkgmap(d): |
| 104 | """Return a dictionary mapping package to recipe name. |
| 105 | Cache the mapping in the metadata""" |
| 106 | |
| 107 | pkgmap_data = d.getVar("__pkgmap_data", False) |
| 108 | if pkgmap_data is None: |
| 109 | pkgmap_data = _pkgmap(d) |
| 110 | d.setVar("__pkgmap_data", pkgmap_data) |
| 111 | |
| 112 | return pkgmap_data |
| 113 | |
| 114 | def recipename(pkg, d): |
| 115 | """Return the recipe name for the given binary package name.""" |
| 116 | |
| 117 | return pkgmap(d).get(pkg) |
Andrew Geissler | 517393d | 2023-01-13 08:55:19 -0600 | [diff] [blame] | 118 | |
| 119 | def get_package_mapping(pkg, basepkg, d, depversions=None): |
| 120 | import oe.packagedata |
| 121 | |
| 122 | data = oe.packagedata.read_subpkgdata(pkg, d) |
| 123 | key = "PKG:%s" % pkg |
| 124 | |
| 125 | if key in data: |
| 126 | if bb.data.inherits_class('allarch', d) and bb.data.inherits_class('packagegroup', d) and pkg != data[key]: |
| 127 | bb.error("An allarch packagegroup shouldn't depend on packages which are dynamically renamed (%s to %s)" % (pkg, data[key])) |
| 128 | # Have to avoid undoing the write_extra_pkgs(global_variants...) |
| 129 | if bb.data.inherits_class('allarch', d) and not d.getVar('MULTILIB_VARIANTS') \ |
| 130 | and data[key] == basepkg: |
| 131 | return pkg |
| 132 | if depversions == []: |
| 133 | # Avoid returning a mapping if the renamed package rprovides its original name |
| 134 | rprovkey = "RPROVIDES:%s" % pkg |
| 135 | if rprovkey in data: |
| 136 | if pkg in bb.utils.explode_dep_versions2(data[rprovkey]): |
| 137 | bb.note("%s rprovides %s, not replacing the latter" % (data[key], pkg)) |
| 138 | return pkg |
| 139 | # Do map to rewritten package name |
| 140 | return data[key] |
| 141 | |
| 142 | return pkg |
| 143 | |
| 144 | def get_package_additional_metadata(pkg_type, d): |
| 145 | base_key = "PACKAGE_ADD_METADATA" |
| 146 | for key in ("%s_%s" % (base_key, pkg_type.upper()), base_key): |
| 147 | if d.getVar(key, False) is None: |
| 148 | continue |
| 149 | d.setVarFlag(key, "type", "list") |
| 150 | if d.getVarFlag(key, "separator") is None: |
| 151 | d.setVarFlag(key, "separator", "\\n") |
| 152 | metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)] |
| 153 | return "\n".join(metadata_fields).strip() |
| 154 | |
| 155 | def runtime_mapping_rename(varname, pkg, d): |
| 156 | #bb.note("%s before: %s" % (varname, d.getVar(varname))) |
| 157 | |
| 158 | new_depends = {} |
| 159 | deps = bb.utils.explode_dep_versions2(d.getVar(varname) or "") |
| 160 | for depend, depversions in deps.items(): |
| 161 | new_depend = get_package_mapping(depend, pkg, d, depversions) |
| 162 | if depend != new_depend: |
| 163 | bb.note("package name mapping done: %s -> %s" % (depend, new_depend)) |
| 164 | new_depends[new_depend] = deps[depend] |
| 165 | |
| 166 | d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False)) |
| 167 | |
| 168 | #bb.note("%s after: %s" % (varname, d.getVar(varname))) |
| 169 | |
| 170 | def emit_pkgdata(pkgfiles, d): |
| 171 | def process_postinst_on_target(pkg, mlprefix): |
| 172 | pkgval = d.getVar('PKG:%s' % pkg) |
| 173 | if pkgval is None: |
| 174 | pkgval = pkg |
| 175 | |
| 176 | defer_fragment = """ |
| 177 | if [ -n "$D" ]; then |
| 178 | $INTERCEPT_DIR/postinst_intercept delay_to_first_boot %s mlprefix=%s |
| 179 | exit 0 |
| 180 | fi |
| 181 | """ % (pkgval, mlprefix) |
| 182 | |
| 183 | postinst = d.getVar('pkg_postinst:%s' % pkg) |
| 184 | postinst_ontarget = d.getVar('pkg_postinst_ontarget:%s' % pkg) |
| 185 | |
| 186 | if postinst_ontarget: |
| 187 | bb.debug(1, 'adding deferred pkg_postinst_ontarget() to pkg_postinst() for %s' % pkg) |
| 188 | if not postinst: |
| 189 | postinst = '#!/bin/sh\n' |
| 190 | postinst += defer_fragment |
| 191 | postinst += postinst_ontarget |
| 192 | d.setVar('pkg_postinst:%s' % pkg, postinst) |
| 193 | |
| 194 | def add_set_e_to_scriptlets(pkg): |
| 195 | for scriptlet_name in ('pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm'): |
| 196 | scriptlet = d.getVar('%s:%s' % (scriptlet_name, pkg)) |
| 197 | if scriptlet: |
| 198 | scriptlet_split = scriptlet.split('\n') |
| 199 | if scriptlet_split[0].startswith("#!"): |
| 200 | scriptlet = scriptlet_split[0] + "\nset -e\n" + "\n".join(scriptlet_split[1:]) |
| 201 | else: |
| 202 | scriptlet = "set -e\n" + "\n".join(scriptlet_split[0:]) |
| 203 | d.setVar('%s:%s' % (scriptlet_name, pkg), scriptlet) |
| 204 | |
| 205 | def write_if_exists(f, pkg, var): |
| 206 | def encode(str): |
| 207 | import codecs |
| 208 | c = codecs.getencoder("unicode_escape") |
| 209 | return c(str)[0].decode("latin1") |
| 210 | |
| 211 | val = d.getVar('%s:%s' % (var, pkg)) |
| 212 | if val: |
| 213 | f.write('%s:%s: %s\n' % (var, pkg, encode(val))) |
| 214 | return val |
| 215 | val = d.getVar('%s' % (var)) |
| 216 | if val: |
| 217 | f.write('%s: %s\n' % (var, encode(val))) |
| 218 | return val |
| 219 | |
| 220 | def write_extra_pkgs(variants, pn, packages, pkgdatadir): |
| 221 | for variant in variants: |
| 222 | with open("%s/%s-%s" % (pkgdatadir, variant, pn), 'w') as fd: |
| 223 | fd.write("PACKAGES: %s\n" % ' '.join( |
| 224 | map(lambda pkg: '%s-%s' % (variant, pkg), packages.split()))) |
| 225 | |
| 226 | def write_extra_runtime_pkgs(variants, packages, pkgdatadir): |
| 227 | for variant in variants: |
| 228 | for pkg in packages.split(): |
| 229 | ml_pkg = "%s-%s" % (variant, pkg) |
| 230 | subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg) |
| 231 | with open(subdata_file, 'w') as fd: |
| 232 | fd.write("PKG:%s: %s" % (ml_pkg, pkg)) |
| 233 | |
| 234 | packages = d.getVar('PACKAGES') |
| 235 | pkgdest = d.getVar('PKGDEST') |
| 236 | pkgdatadir = d.getVar('PKGDESTWORK') |
| 237 | |
| 238 | data_file = pkgdatadir + d.expand("/${PN}") |
| 239 | with open(data_file, 'w') as fd: |
| 240 | fd.write("PACKAGES: %s\n" % packages) |
| 241 | |
| 242 | pkgdebugsource = d.getVar("PKGDEBUGSOURCES") or [] |
| 243 | |
| 244 | pn = d.getVar('PN') |
| 245 | global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split() |
| 246 | variants = (d.getVar('MULTILIB_VARIANTS') or "").split() |
| 247 | |
| 248 | if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d): |
| 249 | write_extra_pkgs(variants, pn, packages, pkgdatadir) |
| 250 | |
| 251 | if bb.data.inherits_class('allarch', d) and not variants \ |
| 252 | and not bb.data.inherits_class('packagegroup', d): |
| 253 | write_extra_pkgs(global_variants, pn, packages, pkgdatadir) |
| 254 | |
| 255 | workdir = d.getVar('WORKDIR') |
| 256 | |
| 257 | for pkg in packages.split(): |
| 258 | pkgval = d.getVar('PKG:%s' % pkg) |
| 259 | if pkgval is None: |
| 260 | pkgval = pkg |
| 261 | d.setVar('PKG:%s' % pkg, pkg) |
| 262 | |
| 263 | extended_data = { |
| 264 | "files_info": {} |
| 265 | } |
| 266 | |
| 267 | pkgdestpkg = os.path.join(pkgdest, pkg) |
| 268 | files = {} |
| 269 | files_extra = {} |
| 270 | total_size = 0 |
| 271 | seen = set() |
| 272 | for f in pkgfiles[pkg]: |
| 273 | fpath = os.sep + os.path.relpath(f, pkgdestpkg) |
| 274 | |
| 275 | fstat = os.lstat(f) |
| 276 | files[fpath] = fstat.st_size |
| 277 | |
| 278 | extended_data["files_info"].setdefault(fpath, {}) |
| 279 | extended_data["files_info"][fpath]['size'] = fstat.st_size |
| 280 | |
| 281 | if fstat.st_ino not in seen: |
| 282 | seen.add(fstat.st_ino) |
| 283 | total_size += fstat.st_size |
| 284 | |
| 285 | if fpath in pkgdebugsource: |
| 286 | extended_data["files_info"][fpath]['debugsrc'] = pkgdebugsource[fpath] |
| 287 | del pkgdebugsource[fpath] |
| 288 | |
| 289 | d.setVar('FILES_INFO:' + pkg , json.dumps(files, sort_keys=True)) |
| 290 | |
| 291 | process_postinst_on_target(pkg, d.getVar("MLPREFIX")) |
| 292 | add_set_e_to_scriptlets(pkg) |
| 293 | |
| 294 | subdata_file = pkgdatadir + "/runtime/%s" % pkg |
| 295 | with open(subdata_file, 'w') as sf: |
| 296 | for var in (d.getVar('PKGDATA_VARS') or "").split(): |
| 297 | val = write_if_exists(sf, pkg, var) |
| 298 | |
| 299 | write_if_exists(sf, pkg, 'FILERPROVIDESFLIST') |
| 300 | for dfile in sorted((d.getVar('FILERPROVIDESFLIST:' + pkg) or "").split()): |
| 301 | write_if_exists(sf, pkg, 'FILERPROVIDES:' + dfile) |
| 302 | |
| 303 | write_if_exists(sf, pkg, 'FILERDEPENDSFLIST') |
| 304 | for dfile in sorted((d.getVar('FILERDEPENDSFLIST:' + pkg) or "").split()): |
| 305 | write_if_exists(sf, pkg, 'FILERDEPENDS:' + dfile) |
| 306 | |
| 307 | sf.write('%s:%s: %d\n' % ('PKGSIZE', pkg, total_size)) |
| 308 | |
| 309 | subdata_extended_file = pkgdatadir + "/extended/%s.json.zstd" % pkg |
| 310 | num_threads = int(d.getVar("BB_NUMBER_THREADS")) |
| 311 | with bb.compress.zstd.open(subdata_extended_file, "wt", encoding="utf-8", num_threads=num_threads) as f: |
| 312 | json.dump(extended_data, f, sort_keys=True, separators=(",", ":")) |
| 313 | |
| 314 | # Symlinks needed for rprovides lookup |
| 315 | rprov = d.getVar('RPROVIDES:%s' % pkg) or d.getVar('RPROVIDES') |
| 316 | if rprov: |
| 317 | for p in bb.utils.explode_deps(rprov): |
| 318 | subdata_sym = pkgdatadir + "/runtime-rprovides/%s/%s" % (p, pkg) |
| 319 | bb.utils.mkdirhier(os.path.dirname(subdata_sym)) |
| 320 | oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True) |
| 321 | |
| 322 | allow_empty = d.getVar('ALLOW_EMPTY:%s' % pkg) |
| 323 | if not allow_empty: |
| 324 | allow_empty = d.getVar('ALLOW_EMPTY') |
| 325 | root = "%s/%s" % (pkgdest, pkg) |
| 326 | os.chdir(root) |
| 327 | g = glob('*') |
| 328 | if g or allow_empty == "1": |
| 329 | # Symlinks needed for reverse lookups (from the final package name) |
| 330 | subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval |
| 331 | oe.path.symlink("../runtime/%s" % pkg, subdata_sym, True) |
| 332 | |
| 333 | packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg |
| 334 | open(packagedfile, 'w').close() |
| 335 | |
| 336 | if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d): |
| 337 | write_extra_runtime_pkgs(variants, packages, pkgdatadir) |
| 338 | |
| 339 | if bb.data.inherits_class('allarch', d) and not variants \ |
| 340 | and not bb.data.inherits_class('packagegroup', d): |
| 341 | write_extra_runtime_pkgs(global_variants, packages, pkgdatadir) |
| 342 | |
| 343 | def mapping_rename_hook(d): |
| 344 | """ |
| 345 | Rewrite variables to account for package renaming in things |
| 346 | like debian.bbclass or manual PKG variable name changes |
| 347 | """ |
| 348 | pkg = d.getVar("PKG") |
| 349 | oe.packagedata.runtime_mapping_rename("RDEPENDS", pkg, d) |
| 350 | oe.packagedata.runtime_mapping_rename("RRECOMMENDS", pkg, d) |
| 351 | oe.packagedata.runtime_mapping_rename("RSUGGESTS", pkg, d) |