Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 1 | # |
| 2 | # Copyright OpenEmbedded Contributors |
| 3 | # |
| 4 | # SPDX-License-Identifier: MIT |
| 5 | # |
| 6 | |
| 7 | # |
| 8 | # This class knows how to package up [e]glibc. Its shared since prebuild binary toolchains |
| 9 | # may need packaging and its pointless to duplicate this code. |
| 10 | # |
| 11 | # Caller should set GLIBC_INTERNAL_USE_BINARY_LOCALE to one of: |
| 12 | # "compile" - Use QEMU to generate the binary locale files |
| 13 | # "precompiled" - The binary locale files are pregenerated and already present |
| 14 | # "ondevice" - The device will build the locale files upon first boot through the postinst |
| 15 | |
| 16 | GLIBC_INTERNAL_USE_BINARY_LOCALE ?= "ondevice" |
| 17 | |
| 18 | GLIBC_SPLIT_LC_PACKAGES ?= "0" |
| 19 | |
| 20 | python __anonymous () { |
| 21 | enabled = d.getVar("ENABLE_BINARY_LOCALE_GENERATION") |
| 22 | |
| 23 | pn = d.getVar("PN") |
| 24 | if pn.endswith("-initial"): |
| 25 | enabled = False |
| 26 | |
| 27 | if enabled and int(enabled): |
| 28 | import re |
| 29 | |
| 30 | target_arch = d.getVar("TARGET_ARCH") |
| 31 | binary_arches = d.getVar("BINARY_LOCALE_ARCHES") or "" |
| 32 | use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF") or "" |
| 33 | |
| 34 | for regexp in binary_arches.split(" "): |
| 35 | r = re.compile(regexp) |
| 36 | |
| 37 | if r.match(target_arch): |
| 38 | depends = d.getVar("DEPENDS") |
| 39 | if use_cross_localedef == "1" : |
| 40 | depends = "%s cross-localedef-native" % depends |
| 41 | else: |
| 42 | depends = "%s qemu-native" % depends |
| 43 | d.setVar("DEPENDS", depends) |
| 44 | d.setVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", "compile") |
| 45 | break |
| 46 | } |
| 47 | |
| 48 | # try to fix disable charsets/locales/locale-code compile fail |
| 49 | PACKAGE_NO_GCONV ?= "0" |
| 50 | |
| 51 | OVERRIDES:append = ":${TARGET_ARCH}-${TARGET_OS}" |
| 52 | |
| 53 | locale_base_postinst_ontarget() { |
| 54 | localedef --inputfile=${datadir}/i18n/locales/%s --charmap=%s %s |
| 55 | } |
| 56 | |
| 57 | locale_base_postrm() { |
| 58 | #!/bin/sh |
| 59 | localedef --delete-from-archive --inputfile=${datadir}/locales/%s --charmap=%s %s |
| 60 | } |
| 61 | |
| 62 | LOCALETREESRC ?= "${PKGD}" |
| 63 | |
| 64 | do_prep_locale_tree() { |
| 65 | treedir=${WORKDIR}/locale-tree |
| 66 | rm -rf $treedir |
| 67 | mkdir -p $treedir/${base_bindir} $treedir/${base_libdir} $treedir/${datadir} $treedir/${localedir} |
| 68 | tar -cf - -C ${LOCALETREESRC}${datadir} -p i18n | tar -xf - -C $treedir/${datadir} |
| 69 | # unzip to avoid parsing errors |
| 70 | for i in $treedir/${datadir}/i18n/charmaps/*gz; do |
| 71 | gunzip $i |
| 72 | done |
| 73 | # The extract pattern "./l*.so*" is carefully selected so that it will |
| 74 | # match ld*.so and lib*.so*, but not any files in the gconv directory |
| 75 | # (if it exists). This makes sure we only unpack the files we need. |
| 76 | # This is important in case usrmerge is set in DISTRO_FEATURES, which |
| 77 | # means ${base_libdir} == ${libdir}. |
| 78 | tar -cf - -C ${LOCALETREESRC}${base_libdir} -p . | tar -xf - -C $treedir/${base_libdir} --wildcards './l*.so*' |
| 79 | if [ -f ${STAGING_LIBDIR_NATIVE}/libgcc_s.* ]; then |
| 80 | tar -cf - -C ${STAGING_LIBDIR_NATIVE} -p libgcc_s.* | tar -xf - -C $treedir/${base_libdir} |
| 81 | fi |
| 82 | install -m 0755 ${LOCALETREESRC}${bindir}/localedef $treedir/${base_bindir} |
| 83 | } |
| 84 | |
| 85 | do_collect_bins_from_locale_tree() { |
| 86 | treedir=${WORKDIR}/locale-tree |
| 87 | |
| 88 | parent=$(dirname ${localedir}) |
| 89 | mkdir -p ${PKGD}/$parent |
| 90 | tar -cf - -C $treedir/$parent -p $(basename ${localedir}) | tar -xf - -C ${PKGD}$parent |
| 91 | |
| 92 | # Finalize tree by chaning all duplicate files into hard links |
| 93 | cross-localedef-hardlink -c -v ${WORKDIR}/locale-tree |
| 94 | } |
| 95 | |
| 96 | inherit qemu |
| 97 | |
| 98 | python package_do_split_gconvs () { |
| 99 | import re |
| 100 | if (d.getVar('PACKAGE_NO_GCONV') == '1'): |
| 101 | bb.note("package requested not splitting gconvs") |
| 102 | return |
| 103 | |
| 104 | if not d.getVar('PACKAGES'): |
| 105 | return |
| 106 | |
| 107 | mlprefix = d.getVar("MLPREFIX") or "" |
| 108 | |
| 109 | bpn = d.getVar('BPN') |
| 110 | libdir = d.getVar('libdir') |
| 111 | if not libdir: |
| 112 | bb.error("libdir not defined") |
| 113 | return |
| 114 | datadir = d.getVar('datadir') |
| 115 | if not datadir: |
| 116 | bb.error("datadir not defined") |
| 117 | return |
| 118 | |
| 119 | gconv_libdir = oe.path.join(libdir, "gconv") |
| 120 | charmap_dir = oe.path.join(datadir, "i18n", "charmaps") |
| 121 | locales_dir = oe.path.join(datadir, "i18n", "locales") |
| 122 | binary_locales_dir = d.getVar('localedir') |
| 123 | |
| 124 | def calc_gconv_deps(fn, pkg, file_regex, output_pattern, group): |
| 125 | deps = [] |
| 126 | f = open(fn, "rb") |
| 127 | c_re = re.compile(r'^copy "(.*)"') |
| 128 | i_re = re.compile(r'^include "(\w+)".*') |
| 129 | for l in f.readlines(): |
| 130 | l = l.decode("latin-1") |
| 131 | m = c_re.match(l) or i_re.match(l) |
| 132 | if m: |
| 133 | dp = legitimize_package_name('%s%s-gconv-%s' % (mlprefix, bpn, m.group(1))) |
| 134 | if not dp in deps: |
| 135 | deps.append(dp) |
| 136 | f.close() |
| 137 | if deps != []: |
| 138 | d.setVar('RDEPENDS:%s' % pkg, " ".join(deps)) |
| 139 | if bpn != 'glibc': |
| 140 | d.setVar('RPROVIDES:%s' % pkg, pkg.replace(bpn, 'glibc')) |
| 141 | |
| 142 | do_split_packages(d, gconv_libdir, file_regex=r'^(.*)\.so$', output_pattern=bpn+'-gconv-%s', \ |
| 143 | description='gconv module for character set %s', hook=calc_gconv_deps, \ |
| 144 | extra_depends=bpn+'-gconv') |
| 145 | |
| 146 | def calc_charmap_deps(fn, pkg, file_regex, output_pattern, group): |
| 147 | deps = [] |
| 148 | f = open(fn, "rb") |
| 149 | c_re = re.compile(r'^copy "(.*)"') |
| 150 | i_re = re.compile(r'^include "(\w+)".*') |
| 151 | for l in f.readlines(): |
| 152 | l = l.decode("latin-1") |
| 153 | m = c_re.match(l) or i_re.match(l) |
| 154 | if m: |
| 155 | dp = legitimize_package_name('%s%s-charmap-%s' % (mlprefix, bpn, m.group(1))) |
| 156 | if not dp in deps: |
| 157 | deps.append(dp) |
| 158 | f.close() |
| 159 | if deps != []: |
| 160 | d.setVar('RDEPENDS:%s' % pkg, " ".join(deps)) |
| 161 | if bpn != 'glibc': |
| 162 | d.setVar('RPROVIDES:%s' % pkg, pkg.replace(bpn, 'glibc')) |
| 163 | |
| 164 | do_split_packages(d, charmap_dir, file_regex=r'^(.*)\.gz$', output_pattern=bpn+'-charmap-%s', \ |
| 165 | description='character map for %s encoding', hook=calc_charmap_deps, extra_depends='') |
| 166 | |
| 167 | def calc_locale_deps(fn, pkg, file_regex, output_pattern, group): |
| 168 | deps = [] |
| 169 | f = open(fn, "rb") |
| 170 | c_re = re.compile(r'^copy "(.*)"') |
| 171 | i_re = re.compile(r'^include "(\w+)".*') |
| 172 | for l in f.readlines(): |
| 173 | l = l.decode("latin-1") |
| 174 | m = c_re.match(l) or i_re.match(l) |
| 175 | if m: |
| 176 | dp = legitimize_package_name(mlprefix+bpn+'-localedata-%s' % m.group(1)) |
| 177 | if not dp in deps: |
| 178 | deps.append(dp) |
| 179 | f.close() |
| 180 | if deps != []: |
| 181 | d.setVar('RDEPENDS:%s' % pkg, " ".join(deps)) |
| 182 | if bpn != 'glibc': |
| 183 | d.setVar('RPROVIDES:%s' % pkg, pkg.replace(bpn, 'glibc')) |
| 184 | |
| 185 | do_split_packages(d, locales_dir, file_regex=r'(.*)', output_pattern=bpn+'-localedata-%s', \ |
| 186 | description='locale definition for %s', hook=calc_locale_deps, extra_depends='') |
| 187 | d.setVar('PACKAGES', d.getVar('PACKAGES', False) + ' ' + d.getVar('MLPREFIX', False) + bpn + '-gconv') |
| 188 | |
| 189 | use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE") |
| 190 | |
| 191 | dot_re = re.compile(r"(.*)\.(.*)") |
| 192 | |
| 193 | # Read in supported locales and associated encodings |
| 194 | supported = {} |
| 195 | with open(oe.path.join(d.getVar('WORKDIR'), "SUPPORTED")) as f: |
| 196 | for line in f.readlines(): |
| 197 | try: |
| 198 | locale, charset = line.rstrip().split() |
| 199 | except ValueError: |
| 200 | continue |
| 201 | supported[locale] = charset |
| 202 | |
| 203 | # GLIBC_GENERATE_LOCALES var specifies which locales to be generated. empty or "all" means all locales |
| 204 | to_generate = d.getVar('GLIBC_GENERATE_LOCALES') |
| 205 | if not to_generate or to_generate == 'all': |
| 206 | to_generate = sorted(supported.keys()) |
| 207 | else: |
| 208 | to_generate = to_generate.split() |
| 209 | for locale in to_generate: |
| 210 | if locale not in supported: |
| 211 | if '.' in locale: |
| 212 | charset = locale.split('.')[1] |
| 213 | else: |
| 214 | charset = 'UTF-8' |
| 215 | bb.warn("Unsupported locale '%s', assuming encoding '%s'" % (locale, charset)) |
| 216 | supported[locale] = charset |
| 217 | |
| 218 | def output_locale_source(name, pkgname, locale, encoding): |
| 219 | d.setVar('RDEPENDS:%s' % pkgname, '%slocaledef %s-localedata-%s %s-charmap-%s' % \ |
| 220 | (mlprefix, mlprefix+bpn, legitimize_package_name(locale), mlprefix+bpn, legitimize_package_name(encoding))) |
| 221 | d.setVar('pkg_postinst_ontarget:%s' % pkgname, d.getVar('locale_base_postinst_ontarget') \ |
| 222 | % (locale, encoding, locale)) |
| 223 | d.setVar('pkg_postrm:%s' % pkgname, d.getVar('locale_base_postrm') % \ |
| 224 | (locale, encoding, locale)) |
| 225 | |
| 226 | def output_locale_binary_rdepends(name, pkgname, locale, encoding): |
| 227 | dep = legitimize_package_name('%s-binary-localedata-%s' % (bpn, name)) |
| 228 | lcsplit = d.getVar('GLIBC_SPLIT_LC_PACKAGES') |
| 229 | if lcsplit and int(lcsplit): |
| 230 | d.appendVar('PACKAGES', ' ' + dep) |
| 231 | d.setVar('ALLOW_EMPTY:%s' % dep, '1') |
| 232 | d.setVar('RDEPENDS:%s' % pkgname, mlprefix + dep) |
| 233 | |
| 234 | commands = {} |
| 235 | |
| 236 | def output_locale_binary(name, pkgname, locale, encoding): |
| 237 | treedir = oe.path.join(d.getVar("WORKDIR"), "locale-tree") |
| 238 | ldlibdir = oe.path.join(treedir, d.getVar("base_libdir")) |
| 239 | path = d.getVar("PATH") |
| 240 | i18npath = oe.path.join(treedir, datadir, "i18n") |
| 241 | gconvpath = oe.path.join(treedir, "iconvdata") |
| 242 | outputpath = oe.path.join(treedir, binary_locales_dir) |
| 243 | |
| 244 | use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF") or "0" |
| 245 | if use_cross_localedef == "1": |
| 246 | target_arch = d.getVar('TARGET_ARCH') |
| 247 | locale_arch_options = { \ |
| 248 | "arc": " --uint32-align=4 --little-endian ", \ |
| 249 | "arceb": " --uint32-align=4 --big-endian ", \ |
| 250 | "arm": " --uint32-align=4 --little-endian ", \ |
| 251 | "armeb": " --uint32-align=4 --big-endian ", \ |
| 252 | "aarch64": " --uint32-align=4 --little-endian ", \ |
| 253 | "aarch64_be": " --uint32-align=4 --big-endian ", \ |
| 254 | "sh4": " --uint32-align=4 --big-endian ", \ |
| 255 | "powerpc": " --uint32-align=4 --big-endian ", \ |
| 256 | "powerpc64": " --uint32-align=4 --big-endian ", \ |
| 257 | "powerpc64le": " --uint32-align=4 --little-endian ", \ |
| 258 | "mips": " --uint32-align=4 --big-endian ", \ |
| 259 | "mipsisa32r6": " --uint32-align=4 --big-endian ", \ |
| 260 | "mips64": " --uint32-align=4 --big-endian ", \ |
| 261 | "mipsisa64r6": " --uint32-align=4 --big-endian ", \ |
| 262 | "mipsel": " --uint32-align=4 --little-endian ", \ |
| 263 | "mipsisa32r6el": " --uint32-align=4 --little-endian ", \ |
| 264 | "mips64el":" --uint32-align=4 --little-endian ", \ |
| 265 | "mipsisa64r6el":" --uint32-align=4 --little-endian ", \ |
| 266 | "riscv64": " --uint32-align=4 --little-endian ", \ |
| 267 | "riscv32": " --uint32-align=4 --little-endian ", \ |
| 268 | "i586": " --uint32-align=4 --little-endian ", \ |
| 269 | "i686": " --uint32-align=4 --little-endian ", \ |
| 270 | "x86_64": " --uint32-align=4 --little-endian " } |
| 271 | |
| 272 | if target_arch in locale_arch_options: |
| 273 | localedef_opts = locale_arch_options[target_arch] |
| 274 | else: |
| 275 | bb.error("locale_arch_options not found for target_arch=" + target_arch) |
| 276 | bb.fatal("unknown arch:" + target_arch + " for locale_arch_options") |
| 277 | |
| 278 | localedef_opts += " --force --no-hard-links --no-archive --prefix=%s \ |
| 279 | --inputfile=%s/%s/i18n/locales/%s --charmap=%s %s/%s" \ |
| 280 | % (treedir, treedir, datadir, locale, encoding, outputpath, name) |
| 281 | |
| 282 | cmd = "PATH=\"%s\" I18NPATH=\"%s\" GCONV_PATH=\"%s\" cross-localedef %s" % \ |
| 283 | (path, i18npath, gconvpath, localedef_opts) |
| 284 | else: # earlier slower qemu way |
| 285 | qemu = qemu_target_binary(d) |
| 286 | localedef_opts = "--force --no-hard-links --no-archive --prefix=%s \ |
| 287 | --inputfile=%s/i18n/locales/%s --charmap=%s %s" \ |
| 288 | % (treedir, datadir, locale, encoding, name) |
| 289 | |
| 290 | qemu_options = d.getVar('QEMU_OPTIONS') |
| 291 | |
| 292 | cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \ |
| 293 | -E LD_LIBRARY_PATH=%s %s %s${base_bindir}/localedef %s" % \ |
| 294 | (path, i18npath, qemu, treedir, ldlibdir, qemu_options, treedir, localedef_opts) |
| 295 | |
| 296 | commands["%s/%s" % (outputpath, name)] = cmd |
| 297 | |
| 298 | bb.note("generating locale %s (%s)" % (locale, encoding)) |
| 299 | |
| 300 | def output_locale(name, locale, encoding): |
| 301 | pkgname = d.getVar('MLPREFIX', False) + 'locale-base-' + legitimize_package_name(name) |
| 302 | d.setVar('ALLOW_EMPTY:%s' % pkgname, '1') |
| 303 | d.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES'))) |
| 304 | rprovides = ' %svirtual-locale-%s' % (mlprefix, legitimize_package_name(name)) |
| 305 | m = re.match(r"(.*)_(.*)", name) |
| 306 | if m: |
| 307 | rprovides += ' %svirtual-locale-%s' % (mlprefix, m.group(1)) |
| 308 | d.setVar('RPROVIDES:%s' % pkgname, rprovides) |
| 309 | |
| 310 | if use_bin == "compile": |
| 311 | output_locale_binary_rdepends(name, pkgname, locale, encoding) |
| 312 | output_locale_binary(name, pkgname, locale, encoding) |
| 313 | elif use_bin == "precompiled": |
| 314 | output_locale_binary_rdepends(name, pkgname, locale, encoding) |
| 315 | else: |
| 316 | output_locale_source(name, pkgname, locale, encoding) |
| 317 | |
| 318 | if use_bin == "compile": |
| 319 | bb.note("preparing tree for binary locale generation") |
| 320 | bb.build.exec_func("do_prep_locale_tree", d) |
| 321 | |
| 322 | utf8_only = int(d.getVar('LOCALE_UTF8_ONLY') or 0) |
| 323 | utf8_is_default = int(d.getVar('LOCALE_UTF8_IS_DEFAULT') or 0) |
| 324 | |
| 325 | encodings = {} |
| 326 | for locale in to_generate: |
| 327 | charset = supported[locale] |
| 328 | if utf8_only and charset != 'UTF-8': |
| 329 | continue |
| 330 | |
| 331 | m = dot_re.match(locale) |
| 332 | if m: |
| 333 | base = m.group(1) |
| 334 | else: |
| 335 | base = locale |
| 336 | |
| 337 | # Non-precompiled locales may be renamed so that the default |
| 338 | # (non-suffixed) encoding is always UTF-8, i.e., instead of en_US and |
| 339 | # en_US.UTF-8, we have en_US and en_US.ISO-8859-1. This implicitly |
| 340 | # contradicts SUPPORTED. |
| 341 | if use_bin == "precompiled" or not utf8_is_default: |
| 342 | output_locale(locale, base, charset) |
| 343 | else: |
| 344 | if charset == 'UTF-8': |
| 345 | output_locale(base, base, charset) |
| 346 | else: |
| 347 | output_locale('%s.%s' % (base, charset), base, charset) |
| 348 | |
| 349 | def metapkg_hook(file, pkg, pattern, format, basename): |
| 350 | name = basename.split('/', 1)[0] |
| 351 | metapkg = legitimize_package_name('%s-binary-localedata-%s' % (mlprefix+bpn, name)) |
| 352 | d.appendVar('RDEPENDS:%s' % metapkg, ' ' + pkg) |
| 353 | |
| 354 | if use_bin == "compile": |
| 355 | makefile = oe.path.join(d.getVar("WORKDIR"), "locale-tree", "Makefile") |
| 356 | with open(makefile, "w") as m: |
| 357 | m.write("all: %s\n\n" % " ".join(commands.keys())) |
| 358 | total = len(commands) |
| 359 | for i, (maketarget, makerecipe) in enumerate(commands.items()): |
| 360 | m.write(maketarget + ":\n") |
| 361 | m.write("\t@echo 'Progress %d/%d'\n" % (i, total)) |
| 362 | m.write("\t" + makerecipe + "\n\n") |
| 363 | d.setVar("EXTRA_OEMAKE", "-C %s ${PARALLEL_MAKE}" % (os.path.dirname(makefile))) |
| 364 | d.setVarFlag("oe_runmake", "progress", r"outof:Progress\s(\d+)/(\d+)") |
| 365 | bb.note("Executing binary locale generation makefile") |
| 366 | bb.build.exec_func("oe_runmake", d) |
| 367 | bb.note("collecting binary locales from locale tree") |
| 368 | bb.build.exec_func("do_collect_bins_from_locale_tree", d) |
| 369 | |
| 370 | if use_bin in ('compile', 'precompiled'): |
| 371 | lcsplit = d.getVar('GLIBC_SPLIT_LC_PACKAGES') |
| 372 | if lcsplit and int(lcsplit): |
| 373 | do_split_packages(d, binary_locales_dir, file_regex=r'^(.*/LC_\w+)', \ |
| 374 | output_pattern=bpn+'-binary-localedata-%s', \ |
| 375 | description='binary locale definition for %s', recursive=True, |
| 376 | hook=metapkg_hook, extra_depends='', allow_dirs=True, match_path=True) |
| 377 | else: |
| 378 | do_split_packages(d, binary_locales_dir, file_regex=r'(.*)', \ |
| 379 | output_pattern=bpn+'-binary-localedata-%s', \ |
| 380 | description='binary locale definition for %s', extra_depends='', allow_dirs=True) |
| 381 | else: |
| 382 | bb.note("generation of binary locales disabled. this may break i18n!") |
| 383 | |
| 384 | } |
| 385 | |
| 386 | # We want to do this indirection so that we can safely 'return' |
| 387 | # from the called function even though we're prepending |
| 388 | python populate_packages:prepend () { |
| 389 | bb.build.exec_func('package_do_split_gconvs', d) |
| 390 | } |