blob: 2d985d8affd0d85464f038e87653de358c53d6fb [file] [log] [blame]
Patrick Williams92b42cb2022-09-03 06:53:57 -05001#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7#
8# Packaging process
9#
10# Executive summary: This class iterates over the functions listed in PACKAGEFUNCS
11# Taking D and splitting it up into the packages listed in PACKAGES, placing the
12# resulting output in PKGDEST.
13#
14# There are the following default steps but PACKAGEFUNCS can be extended:
15#
16# a) package_convert_pr_autoinc - convert AUTOINC in PKGV to ${PRSERV_PV_AUTOINC}
17#
18# b) perform_packagecopy - Copy D into PKGD
19#
20# c) package_do_split_locales - Split out the locale files, updates FILES and PACKAGES
21#
22# d) split_and_strip_files - split the files into runtime and debug and strip them.
23# Debug files include debug info split, and associated sources that end up in -dbg packages
24#
25# e) fixup_perms - Fix up permissions in the package before we split it.
26#
27# f) populate_packages - Split the files in PKGD into separate packages in PKGDEST/<pkgname>
28# Also triggers the binary stripping code to put files in -dbg packages.
29#
30# g) package_do_filedeps - Collect perfile run-time dependency metadata
31# The data is stores in FILER{PROVIDES,DEPENDS}_file_pkg variables with
32# a list of affected files in FILER{PROVIDES,DEPENDS}FLIST_pkg
33#
34# h) package_do_shlibs - Look at the shared libraries generated and autotmatically add any
35# dependencies found. Also stores the package name so anyone else using this library
36# knows which package to depend on.
37#
38# i) package_do_pkgconfig - Keep track of which packages need and provide which .pc files
39#
40# j) read_shlibdeps - Reads the stored shlibs information into the metadata
41#
42# k) package_depchains - Adds automatic dependencies to -dbg and -dev packages
43#
44# l) emit_pkgdata - saves the packaging data into PKGDATA_DIR for use in later
45# packaging steps
46
47inherit packagedata
48inherit chrpath
49inherit package_pkgdata
50inherit insane
51
52PKGD = "${WORKDIR}/package"
53PKGDEST = "${WORKDIR}/packages-split"
54
55LOCALE_SECTION ?= ''
56
57ALL_MULTILIB_PACKAGE_ARCHS = "${@all_multilib_tune_values(d, 'PACKAGE_ARCHS')}"
58
59# rpm is used for the per-file dependency identification
60# dwarfsrcfiles is used to determine the list of debug source files
61PACKAGE_DEPENDS += "rpm-native dwarfsrcfiles-native"
62
63
64# If your postinstall can execute at rootfs creation time rather than on
65# target but depends on a native/cross tool in order to execute, you need to
66# list that tool in PACKAGE_WRITE_DEPS. Target package dependencies belong
67# in the package dependencies as normal, this is just for native/cross support
68# tools at rootfs build time.
69PACKAGE_WRITE_DEPS ??= ""
70
71def legitimize_package_name(s):
72 """
73 Make sure package names are legitimate strings
74 """
75 import re
76
77 def fixutf(m):
78 cp = m.group(1)
79 if cp:
80 return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape')
81
82 # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
83 s = re.sub(r'<U([0-9A-Fa-f]{1,4})>', fixutf, s)
84
85 # Remaining package name validity fixes
86 return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
87
88def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False, summary=None):
89 """
90 Used in .bb files to split up dynamically generated subpackages of a
91 given package, usually plugins or modules.
92
93 Arguments:
94 root -- the path in which to search
95 file_regex -- regular expression to match searched files. Use
96 parentheses () to mark the part of this expression
97 that should be used to derive the module name (to be
98 substituted where %s is used in other function
99 arguments as noted below)
100 output_pattern -- pattern to use for the package names. Must include %s.
101 description -- description to set for each package. Must include %s.
102 postinst -- postinstall script to use for all packages (as a
103 string)
104 recursive -- True to perform a recursive search - default False
105 hook -- a hook function to be called for every match. The
106 function will be called with the following arguments
107 (in the order listed):
108 f: full path to the file/directory match
109 pkg: the package name
110 file_regex: as above
111 output_pattern: as above
112 modulename: the module name derived using file_regex
113 extra_depends -- extra runtime dependencies (RDEPENDS) to be set for
114 all packages. The default value of None causes a
115 dependency on the main package (${PN}) - if you do
116 not want this, pass '' for this parameter.
117 aux_files_pattern -- extra item(s) to be added to FILES for each
118 package. Can be a single string item or a list of
119 strings for multiple items. Must include %s.
120 postrm -- postrm script to use for all packages (as a string)
121 allow_dirs -- True allow directories to be matched - default False
122 prepend -- if True, prepend created packages to PACKAGES instead
123 of the default False which appends them
124 match_path -- match file_regex on the whole relative path to the
125 root rather than just the file name
126 aux_files_pattern_verbatim -- extra item(s) to be added to FILES for
127 each package, using the actual derived module name
128 rather than converting it to something legal for a
129 package name. Can be a single string item or a list
130 of strings for multiple items. Must include %s.
131 allow_links -- True to allow symlinks to be matched - default False
132 summary -- Summary to set for each package. Must include %s;
133 defaults to description if not set.
134
135 """
136
137 dvar = d.getVar('PKGD')
138 root = d.expand(root)
139 output_pattern = d.expand(output_pattern)
140 extra_depends = d.expand(extra_depends)
141
142 # If the root directory doesn't exist, don't error out later but silently do
143 # no splitting.
144 if not os.path.exists(dvar + root):
145 return []
146
147 ml = d.getVar("MLPREFIX")
148 if ml:
149 if not output_pattern.startswith(ml):
150 output_pattern = ml + output_pattern
151
152 newdeps = []
153 for dep in (extra_depends or "").split():
154 if dep.startswith(ml):
155 newdeps.append(dep)
156 else:
157 newdeps.append(ml + dep)
158 if newdeps:
159 extra_depends = " ".join(newdeps)
160
161
162 packages = d.getVar('PACKAGES').split()
163 split_packages = set()
164
165 if postinst:
166 postinst = '#!/bin/sh\n' + postinst + '\n'
167 if postrm:
168 postrm = '#!/bin/sh\n' + postrm + '\n'
169 if not recursive:
170 objs = os.listdir(dvar + root)
171 else:
172 objs = []
173 for walkroot, dirs, files in os.walk(dvar + root):
174 for file in files:
175 relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1)
176 if relpath:
177 objs.append(relpath)
178
179 if extra_depends == None:
180 extra_depends = d.getVar("PN")
181
182 if not summary:
183 summary = description
184
185 for o in sorted(objs):
186 import re, stat
187 if match_path:
188 m = re.match(file_regex, o)
189 else:
190 m = re.match(file_regex, os.path.basename(o))
191
192 if not m:
193 continue
194 f = os.path.join(dvar + root, o)
195 mode = os.lstat(f).st_mode
196 if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))):
197 continue
198 on = legitimize_package_name(m.group(1))
199 pkg = output_pattern % on
200 split_packages.add(pkg)
201 if not pkg in packages:
202 if prepend:
203 packages = [pkg] + packages
204 else:
205 packages.append(pkg)
206 oldfiles = d.getVar('FILES:' + pkg)
207 newfile = os.path.join(root, o)
208 # These names will be passed through glob() so if the filename actually
209 # contains * or ? (rare, but possible) we need to handle that specially
210 newfile = newfile.replace('*', '[*]')
211 newfile = newfile.replace('?', '[?]')
212 if not oldfiles:
213 the_files = [newfile]
214 if aux_files_pattern:
215 if type(aux_files_pattern) is list:
216 for fp in aux_files_pattern:
217 the_files.append(fp % on)
218 else:
219 the_files.append(aux_files_pattern % on)
220 if aux_files_pattern_verbatim:
221 if type(aux_files_pattern_verbatim) is list:
222 for fp in aux_files_pattern_verbatim:
223 the_files.append(fp % m.group(1))
224 else:
225 the_files.append(aux_files_pattern_verbatim % m.group(1))
226 d.setVar('FILES:' + pkg, " ".join(the_files))
227 else:
228 d.setVar('FILES:' + pkg, oldfiles + " " + newfile)
229 if extra_depends != '':
230 d.appendVar('RDEPENDS:' + pkg, ' ' + extra_depends)
231 if not d.getVar('DESCRIPTION:' + pkg):
232 d.setVar('DESCRIPTION:' + pkg, description % on)
233 if not d.getVar('SUMMARY:' + pkg):
234 d.setVar('SUMMARY:' + pkg, summary % on)
235 if postinst:
236 d.setVar('pkg_postinst:' + pkg, postinst)
237 if postrm:
238 d.setVar('pkg_postrm:' + pkg, postrm)
239 if callable(hook):
240 hook(f, pkg, file_regex, output_pattern, m.group(1))
241
242 d.setVar('PACKAGES', ' '.join(packages))
243 return list(split_packages)
244
245PACKAGE_DEPENDS += "file-native"
246
247python () {
248 if d.getVar('PACKAGES') != '':
249 deps = ""
250 for dep in (d.getVar('PACKAGE_DEPENDS') or "").split():
251 deps += " %s:do_populate_sysroot" % dep
252 if d.getVar('PACKAGE_MINIDEBUGINFO') == '1':
253 deps += ' xz-native:do_populate_sysroot'
254 d.appendVarFlag('do_package', 'depends', deps)
255
256 # shlibs requires any DEPENDS to have already packaged for the *.list files
257 d.appendVarFlag('do_package', 'deptask', " do_packagedata")
258}
259
260# Get a list of files from file vars by searching files under current working directory
261# The list contains symlinks, directories and normal files.
262def files_from_filevars(filevars):
263 import os,glob
264 cpath = oe.cachedpath.CachedPath()
265 files = []
266 for f in filevars:
267 if os.path.isabs(f):
268 f = '.' + f
269 if not f.startswith("./"):
270 f = './' + f
271 globbed = glob.glob(f)
272 if globbed:
273 if [ f ] != globbed:
274 files += globbed
275 continue
276 files.append(f)
277
278 symlink_paths = []
279 for ind, f in enumerate(files):
280 # Handle directory symlinks. Truncate path to the lowest level symlink
281 parent = ''
282 for dirname in f.split('/')[:-1]:
283 parent = os.path.join(parent, dirname)
284 if dirname == '.':
285 continue
286 if cpath.islink(parent):
287 bb.warn("FILES contains file '%s' which resides under a "
288 "directory symlink. Please fix the recipe and use the "
289 "real path for the file." % f[1:])
290 symlink_paths.append(f)
291 files[ind] = parent
292 f = parent
293 break
294
295 if not cpath.islink(f):
296 if cpath.isdir(f):
297 newfiles = [ os.path.join(f,x) for x in os.listdir(f) ]
298 if newfiles:
299 files += newfiles
300
301 return files, symlink_paths
302
303# Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files
304def get_conffiles(pkg, d):
305 pkgdest = d.getVar('PKGDEST')
306 root = os.path.join(pkgdest, pkg)
307 cwd = os.getcwd()
308 os.chdir(root)
309
310 conffiles = d.getVar('CONFFILES:%s' % pkg);
311 if conffiles == None:
312 conffiles = d.getVar('CONFFILES')
313 if conffiles == None:
314 conffiles = ""
315 conffiles = conffiles.split()
316 conf_orig_list = files_from_filevars(conffiles)[0]
317
318 # Remove links and directories from conf_orig_list to get conf_list which only contains normal files
319 conf_list = []
320 for f in conf_orig_list:
321 if os.path.isdir(f):
322 continue
323 if os.path.islink(f):
324 continue
325 if not os.path.exists(f):
326 continue
327 conf_list.append(f)
328
329 # Remove the leading './'
330 for i in range(0, len(conf_list)):
331 conf_list[i] = conf_list[i][1:]
332
333 os.chdir(cwd)
334 return conf_list
335
336def checkbuildpath(file, d):
337 tmpdir = d.getVar('TMPDIR')
338 with open(file) as f:
339 file_content = f.read()
340 if tmpdir in file_content:
341 return True
342
343 return False
344
345def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output):
346 debugfiles = {}
347
348 for line in dwarfsrcfiles_output.splitlines():
349 if line.startswith("\t"):
350 debugfiles[os.path.normpath(line.split()[0])] = ""
351
352 return debugfiles.keys()
353
354def source_info(file, d, fatal=True):
355 import subprocess
356
357 cmd = ["dwarfsrcfiles", file]
358 try:
359 output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT)
360 retval = 0
361 except subprocess.CalledProcessError as exc:
362 output = exc.output
363 retval = exc.returncode
364
365 # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure
366 if retval != 0 and retval != 255:
367 msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "")
368 if fatal:
369 bb.fatal(msg)
370 bb.note(msg)
371
372 debugsources = parse_debugsources_from_dwarfsrcfiles_output(output)
373
374 return list(debugsources)
375
376def splitdebuginfo(file, dvar, dv, d):
377 # Function to split a single file into two components, one is the stripped
378 # target system binary, the other contains any debugging information. The
379 # two files are linked to reference each other.
380 #
381 # return a mapping of files:debugsources
382
383 import stat
384 import subprocess
385
386 src = file[len(dvar):]
387 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
388 debugfile = dvar + dest
389 sources = []
390
391 if file.endswith(".ko") and file.find("/lib/modules/") != -1:
392 if oe.package.is_kernel_module_signed(file):
393 bb.debug(1, "Skip strip on signed module %s" % file)
394 return (file, sources)
395
396 # Split the file...
397 bb.utils.mkdirhier(os.path.dirname(debugfile))
398 #bb.note("Split %s -> %s" % (file, debugfile))
399 # Only store off the hard link reference if we successfully split!
400
401 dvar = d.getVar('PKGD')
402 objcopy = d.getVar("OBJCOPY")
403
404 newmode = None
405 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
406 origmode = os.stat(file)[stat.ST_MODE]
407 newmode = origmode | stat.S_IWRITE | stat.S_IREAD
408 os.chmod(file, newmode)
409
410 # We need to extract the debug src information here...
411 if dv["srcdir"]:
412 sources = source_info(file, d)
413
414 bb.utils.mkdirhier(os.path.dirname(debugfile))
415
416 subprocess.check_output([objcopy, '--only-keep-debug', file, debugfile], stderr=subprocess.STDOUT)
417
418 # Set the debuglink to have the view of the file path on the target
419 subprocess.check_output([objcopy, '--add-gnu-debuglink', debugfile, file], stderr=subprocess.STDOUT)
420
421 if newmode:
422 os.chmod(file, origmode)
423
424 return (file, sources)
425
426def splitstaticdebuginfo(file, dvar, dv, d):
427 # Unlike the function above, there is no way to split a static library
428 # two components. So to get similar results we will copy the unmodified
429 # static library (containing the debug symbols) into a new directory.
430 # We will then strip (preserving symbols) the static library in the
431 # typical location.
432 #
433 # return a mapping of files:debugsources
434
435 import stat
436
437 src = file[len(dvar):]
438 dest = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(src) + dv["staticappend"]
439 debugfile = dvar + dest
440 sources = []
441
442 # Copy the file...
443 bb.utils.mkdirhier(os.path.dirname(debugfile))
444 #bb.note("Copy %s -> %s" % (file, debugfile))
445
446 dvar = d.getVar('PKGD')
447
448 newmode = None
449 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
450 origmode = os.stat(file)[stat.ST_MODE]
451 newmode = origmode | stat.S_IWRITE | stat.S_IREAD
452 os.chmod(file, newmode)
453
454 # We need to extract the debug src information here...
455 if dv["srcdir"]:
456 sources = source_info(file, d)
457
458 bb.utils.mkdirhier(os.path.dirname(debugfile))
459
460 # Copy the unmodified item to the debug directory
461 shutil.copy2(file, debugfile)
462
463 if newmode:
464 os.chmod(file, origmode)
465
466 return (file, sources)
467
468def inject_minidebuginfo(file, dvar, dv, d):
469 # Extract just the symbols from debuginfo into minidebuginfo,
470 # compress it with xz and inject it back into the binary in a .gnu_debugdata section.
471 # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html
472
473 import subprocess
474
475 readelf = d.getVar('READELF')
476 nm = d.getVar('NM')
477 objcopy = d.getVar('OBJCOPY')
478
479 minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo')
480
481 src = file[len(dvar):]
482 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
483 debugfile = dvar + dest
484 minidebugfile = minidebuginfodir + src + '.minidebug'
485 bb.utils.mkdirhier(os.path.dirname(minidebugfile))
486
487 # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either
488 # so skip it.
489 if not os.path.exists(debugfile):
490 bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file))
491 return
492
493 # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo.
494 # We will exclude all of these from minidebuginfo to save space.
495 remove_section_names = []
496 for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines():
497 fields = line.split()
498 if len(fields) < 8:
499 continue
500 name = fields[0]
501 type = fields[1]
502 flags = fields[7]
503 # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them
504 if name.startswith('.debug_'):
505 continue
506 if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']:
507 remove_section_names.append(name)
508
509 # List dynamic symbols in the binary. We can exclude these from minidebuginfo
510 # because they are always present in the binary.
511 dynsyms = set()
512 for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines():
513 dynsyms.add(line.split()[0])
514
515 # Find all function symbols from debuginfo which aren't in the dynamic symbols table.
516 # These are the ones we want to keep in minidebuginfo.
517 keep_symbols_file = minidebugfile + '.symlist'
518 found_any_symbols = False
519 with open(keep_symbols_file, 'w') as f:
520 for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines():
521 fields = line.split('|')
522 if len(fields) < 7:
523 continue
524 name = fields[0].strip()
525 type = fields[3].strip()
526 if type == 'FUNC' and name not in dynsyms:
527 f.write('{}\n'.format(name))
528 found_any_symbols = True
529
530 if not found_any_symbols:
531 bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file))
532 return
533
534 bb.utils.remove(minidebugfile)
535 bb.utils.remove(minidebugfile + '.xz')
536
537 subprocess.check_call([objcopy, '-S'] +
538 ['--remove-section={}'.format(s) for s in remove_section_names] +
539 ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile])
540
541 subprocess.check_call(['xz', '--keep', minidebugfile])
542
543 subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file])
544
545def copydebugsources(debugsrcdir, sources, d):
546 # The debug src information written out to sourcefile is further processed
547 # and copied to the destination here.
548
549 import stat
550 import subprocess
551
552 if debugsrcdir and sources:
553 sourcefile = d.expand("${WORKDIR}/debugsources.list")
554 bb.utils.remove(sourcefile)
555
556 # filenames are null-separated - this is an artefact of the previous use
557 # of rpm's debugedit, which was writing them out that way, and the code elsewhere
558 # is still assuming that.
559 debuglistoutput = '\0'.join(sources) + '\0'
560 with open(sourcefile, 'a') as sf:
561 sf.write(debuglistoutput)
562
563 dvar = d.getVar('PKGD')
564 strip = d.getVar("STRIP")
565 objcopy = d.getVar("OBJCOPY")
566 workdir = d.getVar("WORKDIR")
567 sdir = d.getVar("S")
568 cflags = d.expand("${CFLAGS}")
569
570 prefixmap = {}
571 for flag in cflags.split():
572 if not flag.startswith("-fdebug-prefix-map"):
573 continue
574 if "recipe-sysroot" in flag:
575 continue
576 flag = flag.split("=")
577 prefixmap[flag[1]] = flag[2]
578
579 nosuchdir = []
580 basepath = dvar
581 for p in debugsrcdir.split("/"):
582 basepath = basepath + "/" + p
583 if not cpath.exists(basepath):
584 nosuchdir.append(basepath)
585 bb.utils.mkdirhier(basepath)
586 cpath.updatecache(basepath)
587
588 for pmap in prefixmap:
589 # Ignore files from the recipe sysroots (target and native)
590 cmd = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | " % sourcefile
591 # We need to ignore files that are not actually ours
592 # we do this by only paying attention to items from this package
593 cmd += "fgrep -zw '%s' | " % prefixmap[pmap]
594 # Remove prefix in the source paths
595 cmd += "sed 's#%s/##g' | " % (prefixmap[pmap])
596 cmd += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)" % (pmap, dvar, prefixmap[pmap])
597
598 try:
599 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
600 except subprocess.CalledProcessError:
601 # Can "fail" if internal headers/transient sources are attempted
602 pass
603 # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
604 # Work around this by manually finding and copying any symbolic links that made it through.
605 cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \
606 (dvar, prefixmap[pmap], dvar, prefixmap[pmap], pmap, dvar, prefixmap[pmap])
607 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
608
609 # debugsources.list may be polluted from the host if we used externalsrc,
610 # cpio uses copy-pass and may have just created a directory structure
611 # matching the one from the host, if thats the case move those files to
612 # debugsrcdir to avoid host contamination.
613 # Empty dir structure will be deleted in the next step.
614
615 # Same check as above for externalsrc
616 if workdir not in sdir:
617 if os.path.exists(dvar + debugsrcdir + sdir):
618 cmd = "mv %s%s%s/* %s%s" % (dvar, debugsrcdir, sdir, dvar,debugsrcdir)
619 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
620
621 # The copy by cpio may have resulted in some empty directories! Remove these
622 cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
623 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
624
625 # Also remove debugsrcdir if its empty
626 for p in nosuchdir[::-1]:
627 if os.path.exists(p) and not os.listdir(p):
628 os.rmdir(p)
629
630#
631# Package data handling routines
632#
633
634def get_package_mapping (pkg, basepkg, d, depversions=None):
635 import oe.packagedata
636
637 data = oe.packagedata.read_subpkgdata(pkg, d)
638 key = "PKG:%s" % pkg
639
640 if key in data:
641 if bb.data.inherits_class('allarch', d) and bb.data.inherits_class('packagegroup', d) and pkg != data[key]:
642 bb.error("An allarch packagegroup shouldn't depend on packages which are dynamically renamed (%s to %s)" % (pkg, data[key]))
643 # Have to avoid undoing the write_extra_pkgs(global_variants...)
644 if bb.data.inherits_class('allarch', d) and not d.getVar('MULTILIB_VARIANTS') \
645 and data[key] == basepkg:
646 return pkg
647 if depversions == []:
648 # Avoid returning a mapping if the renamed package rprovides its original name
649 rprovkey = "RPROVIDES:%s" % pkg
650 if rprovkey in data:
651 if pkg in bb.utils.explode_dep_versions2(data[rprovkey]):
652 bb.note("%s rprovides %s, not replacing the latter" % (data[key], pkg))
653 return pkg
654 # Do map to rewritten package name
655 return data[key]
656
657 return pkg
658
659def get_package_additional_metadata (pkg_type, d):
660 base_key = "PACKAGE_ADD_METADATA"
661 for key in ("%s_%s" % (base_key, pkg_type.upper()), base_key):
662 if d.getVar(key, False) is None:
663 continue
664 d.setVarFlag(key, "type", "list")
665 if d.getVarFlag(key, "separator") is None:
666 d.setVarFlag(key, "separator", "\\n")
667 metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)]
668 return "\n".join(metadata_fields).strip()
669
670def runtime_mapping_rename (varname, pkg, d):
671 #bb.note("%s before: %s" % (varname, d.getVar(varname)))
672
673 new_depends = {}
674 deps = bb.utils.explode_dep_versions2(d.getVar(varname) or "")
675 for depend, depversions in deps.items():
676 new_depend = get_package_mapping(depend, pkg, d, depversions)
677 if depend != new_depend:
678 bb.note("package name mapping done: %s -> %s" % (depend, new_depend))
679 new_depends[new_depend] = deps[depend]
680
681 d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False))
682
683 #bb.note("%s after: %s" % (varname, d.getVar(varname)))
684
685#
686# Used by do_packagedata (and possibly other routines post do_package)
687#
688
689PRSERV_ACTIVE = "${@bool(d.getVar("PRSERV_HOST"))}"
690PRSERV_ACTIVE[vardepvalue] = "${PRSERV_ACTIVE}"
691package_get_auto_pr[vardepsexclude] = "BB_TASKDEPDATA"
692package_get_auto_pr[vardeps] += "PRSERV_ACTIVE"
693python package_get_auto_pr() {
694 import oe.prservice
695
696 def get_do_package_hash(pn):
697 if d.getVar("BB_RUNTASK") != "do_package":
698 taskdepdata = d.getVar("BB_TASKDEPDATA", False)
699 for dep in taskdepdata:
700 if taskdepdata[dep][1] == "do_package" and taskdepdata[dep][0] == pn:
701 return taskdepdata[dep][6]
702 return None
703
704 # Support per recipe PRSERV_HOST
705 pn = d.getVar('PN')
706 host = d.getVar("PRSERV_HOST_" + pn)
707 if not (host is None):
708 d.setVar("PRSERV_HOST", host)
709
710 pkgv = d.getVar("PKGV")
711
712 # PR Server not active, handle AUTOINC
713 if not d.getVar('PRSERV_HOST'):
714 d.setVar("PRSERV_PV_AUTOINC", "0")
715 return
716
717 auto_pr = None
718 pv = d.getVar("PV")
719 version = d.getVar("PRAUTOINX")
720 pkgarch = d.getVar("PACKAGE_ARCH")
721 checksum = get_do_package_hash(pn)
722
723 # If do_package isn't in the dependencies, we can't get the checksum...
724 if not checksum:
725 bb.warn('Task %s requested do_package unihash, but it was not available.' % d.getVar('BB_RUNTASK'))
726 #taskdepdata = d.getVar("BB_TASKDEPDATA", False)
727 #for dep in taskdepdata:
728 # bb.warn('%s:%s = %s' % (taskdepdata[dep][0], taskdepdata[dep][1], taskdepdata[dep][6]))
729 return
730
731 if d.getVar('PRSERV_LOCKDOWN'):
732 auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch) or d.getVar('PRAUTO_' + version) or None
733 if auto_pr is None:
734 bb.fatal("Can NOT get PRAUTO from lockdown exported file")
735 d.setVar('PRAUTO',str(auto_pr))
736 return
737
738 try:
739 conn = oe.prservice.prserv_make_conn(d)
740 if conn is not None:
741 if "AUTOINC" in pkgv:
742 srcpv = bb.fetch2.get_srcrev(d)
743 base_ver = "AUTOINC-%s" % version[:version.find(srcpv)]
744 value = conn.getPR(base_ver, pkgarch, srcpv)
745 d.setVar("PRSERV_PV_AUTOINC", str(value))
746
747 auto_pr = conn.getPR(version, pkgarch, checksum)
748 conn.close()
749 except Exception as e:
750 bb.fatal("Can NOT get PRAUTO, exception %s" % str(e))
751 if auto_pr is None:
752 bb.fatal("Can NOT get PRAUTO from remote PR service")
753 d.setVar('PRAUTO',str(auto_pr))
754}
755
756#
757# Package functions suitable for inclusion in PACKAGEFUNCS
758#
759
760python package_convert_pr_autoinc() {
761 pkgv = d.getVar("PKGV")
762
763 # Adjust pkgv as necessary...
764 if 'AUTOINC' in pkgv:
765 d.setVar("PKGV", pkgv.replace("AUTOINC", "${PRSERV_PV_AUTOINC}"))
766
767 # Change PRSERV_PV_AUTOINC and EXTENDPRAUTO usage to special values
768 d.setVar('PRSERV_PV_AUTOINC', '@PRSERV_PV_AUTOINC@')
769 d.setVar('EXTENDPRAUTO', '@EXTENDPRAUTO@')
770}
771
772LOCALEBASEPN ??= "${PN}"
773
774python package_do_split_locales() {
775 if (d.getVar('PACKAGE_NO_LOCALE') == '1'):
776 bb.debug(1, "package requested not splitting locales")
777 return
778
779 packages = (d.getVar('PACKAGES') or "").split()
780
781 datadir = d.getVar('datadir')
782 if not datadir:
783 bb.note("datadir not defined")
784 return
785
786 dvar = d.getVar('PKGD')
787 pn = d.getVar('LOCALEBASEPN')
788
789 if pn + '-locale' in packages:
790 packages.remove(pn + '-locale')
791
792 localedir = os.path.join(dvar + datadir, 'locale')
793
794 if not cpath.isdir(localedir):
795 bb.debug(1, "No locale files in this package")
796 return
797
798 locales = os.listdir(localedir)
799
800 summary = d.getVar('SUMMARY') or pn
801 description = d.getVar('DESCRIPTION') or ""
802 locale_section = d.getVar('LOCALE_SECTION')
803 mlprefix = d.getVar('MLPREFIX') or ""
804 for l in sorted(locales):
805 ln = legitimize_package_name(l)
806 pkg = pn + '-locale-' + ln
807 packages.append(pkg)
808 d.setVar('FILES:' + pkg, os.path.join(datadir, 'locale', l))
809 d.setVar('RRECOMMENDS:' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
810 d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
811 d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l))
812 d.setVar('DESCRIPTION:' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l))
813 if locale_section:
814 d.setVar('SECTION:' + pkg, locale_section)
815
816 d.setVar('PACKAGES', ' '.join(packages))
817
818 # Disabled by RP 18/06/07
819 # Wildcards aren't supported in debian
820 # They break with ipkg since glibc-locale* will mean that
821 # glibc-localedata-translit* won't install as a dependency
822 # for some other package which breaks meta-toolchain
823 # Probably breaks since virtual-locale- isn't provided anywhere
824 #rdep = (d.getVar('RDEPENDS:%s' % pn) or "").split()
825 #rdep.append('%s-locale*' % pn)
826 #d.setVar('RDEPENDS:%s' % pn, ' '.join(rdep))
827}
828
829python perform_packagecopy () {
830 import subprocess
831 import shutil
832
833 dest = d.getVar('D')
834 dvar = d.getVar('PKGD')
835
836 # Start by package population by taking a copy of the installed
837 # files to operate on
838 # Preserve sparse files and hard links
839 cmd = 'tar --exclude=./sysroot-only -cf - -C %s -p -S . | tar -xf - -C %s' % (dest, dvar)
840 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
841
842 # replace RPATHs for the nativesdk binaries, to make them relocatable
843 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d):
844 rpath_replace (dvar, d)
845}
846perform_packagecopy[cleandirs] = "${PKGD}"
847perform_packagecopy[dirs] = "${PKGD}"
848
849# We generate a master list of directories to process, we start by
850# seeding this list with reasonable defaults, then load from
851# the fs-perms.txt files
852python fixup_perms () {
853 import pwd, grp
854
855 # init using a string with the same format as a line as documented in
856 # the fs-perms.txt file
857 # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
858 # <path> link <link target>
859 #
860 # __str__ can be used to print out an entry in the input format
861 #
862 # if fs_perms_entry.path is None:
863 # an error occurred
864 # if fs_perms_entry.link, you can retrieve:
865 # fs_perms_entry.path = path
866 # fs_perms_entry.link = target of link
867 # if not fs_perms_entry.link, you can retrieve:
868 # fs_perms_entry.path = path
869 # fs_perms_entry.mode = expected dir mode or None
870 # fs_perms_entry.uid = expected uid or -1
871 # fs_perms_entry.gid = expected gid or -1
872 # fs_perms_entry.walk = 'true' or something else
873 # fs_perms_entry.fmode = expected file mode or None
874 # fs_perms_entry.fuid = expected file uid or -1
875 # fs_perms_entry_fgid = expected file gid or -1
876 class fs_perms_entry():
877 def __init__(self, line):
878 lsplit = line.split()
879 if len(lsplit) == 3 and lsplit[1].lower() == "link":
880 self._setlink(lsplit[0], lsplit[2])
881 elif len(lsplit) == 8:
882 self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
883 else:
884 msg = "Fixup Perms: invalid config line %s" % line
885 oe.qa.handle_error("perm-config", msg, d)
886 self.path = None
887 self.link = None
888
889 def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
890 self.path = os.path.normpath(path)
891 self.link = None
892 self.mode = self._procmode(mode)
893 self.uid = self._procuid(uid)
894 self.gid = self._procgid(gid)
895 self.walk = walk.lower()
896 self.fmode = self._procmode(fmode)
897 self.fuid = self._procuid(fuid)
898 self.fgid = self._procgid(fgid)
899
900 def _setlink(self, path, link):
901 self.path = os.path.normpath(path)
902 self.link = link
903
904 def _procmode(self, mode):
905 if not mode or (mode and mode == "-"):
906 return None
907 else:
908 return int(mode,8)
909
910 # Note uid/gid -1 has special significance in os.lchown
911 def _procuid(self, uid):
912 if uid is None or uid == "-":
913 return -1
914 elif uid.isdigit():
915 return int(uid)
916 else:
917 return pwd.getpwnam(uid).pw_uid
918
919 def _procgid(self, gid):
920 if gid is None or gid == "-":
921 return -1
922 elif gid.isdigit():
923 return int(gid)
924 else:
925 return grp.getgrnam(gid).gr_gid
926
927 # Use for debugging the entries
928 def __str__(self):
929 if self.link:
930 return "%s link %s" % (self.path, self.link)
931 else:
932 mode = "-"
933 if self.mode:
934 mode = "0%o" % self.mode
935 fmode = "-"
936 if self.fmode:
937 fmode = "0%o" % self.fmode
938 uid = self._mapugid(self.uid)
939 gid = self._mapugid(self.gid)
940 fuid = self._mapugid(self.fuid)
941 fgid = self._mapugid(self.fgid)
942 return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
943
944 def _mapugid(self, id):
945 if id is None or id == -1:
946 return "-"
947 else:
948 return "%d" % id
949
950 # Fix the permission, owner and group of path
951 def fix_perms(path, mode, uid, gid, dir):
952 if mode and not os.path.islink(path):
953 #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
954 os.chmod(path, mode)
955 # -1 is a special value that means don't change the uid/gid
956 # if they are BOTH -1, don't bother to lchown
957 if not (uid == -1 and gid == -1):
958 #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
959 os.lchown(path, uid, gid)
960
961 # Return a list of configuration files based on either the default
962 # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
963 # paths are resolved via BBPATH
964 def get_fs_perms_list(d):
965 str = ""
966 bbpath = d.getVar('BBPATH')
967 fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES') or ""
968 for conf_file in fs_perms_tables.split():
969 confpath = bb.utils.which(bbpath, conf_file)
970 if confpath:
971 str += " %s" % bb.utils.which(bbpath, conf_file)
972 else:
973 bb.warn("cannot find %s specified in FILESYSTEM_PERMS_TABLES" % conf_file)
974 return str
975
976
977
978 dvar = d.getVar('PKGD')
979
980 fs_perms_table = {}
981 fs_link_table = {}
982
983 # By default all of the standard directories specified in
984 # bitbake.conf will get 0755 root:root.
985 target_path_vars = [ 'base_prefix',
986 'prefix',
987 'exec_prefix',
988 'base_bindir',
989 'base_sbindir',
990 'base_libdir',
991 'datadir',
992 'sysconfdir',
993 'servicedir',
994 'sharedstatedir',
995 'localstatedir',
996 'infodir',
997 'mandir',
998 'docdir',
999 'bindir',
1000 'sbindir',
1001 'libexecdir',
1002 'libdir',
1003 'includedir',
1004 'oldincludedir' ]
1005
1006 for path in target_path_vars:
1007 dir = d.getVar(path) or ""
1008 if dir == "":
1009 continue
1010 fs_perms_table[dir] = fs_perms_entry(d.expand("%s 0755 root root false - - -" % (dir)))
1011
1012 # Now we actually load from the configuration files
1013 for conf in get_fs_perms_list(d).split():
1014 if not os.path.exists(conf):
1015 continue
1016 with open(conf) as f:
1017 for line in f:
1018 if line.startswith('#'):
1019 continue
1020 lsplit = line.split()
1021 if len(lsplit) == 0:
1022 continue
1023 if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
1024 msg = "Fixup perms: %s invalid line: %s" % (conf, line)
1025 oe.qa.handle_error("perm-line", msg, d)
1026 continue
1027 entry = fs_perms_entry(d.expand(line))
1028 if entry and entry.path:
1029 if entry.link:
1030 fs_link_table[entry.path] = entry
1031 if entry.path in fs_perms_table:
1032 fs_perms_table.pop(entry.path)
1033 else:
1034 fs_perms_table[entry.path] = entry
1035 if entry.path in fs_link_table:
1036 fs_link_table.pop(entry.path)
1037
1038 # Debug -- list out in-memory table
1039 #for dir in fs_perms_table:
1040 # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
1041 #for link in fs_link_table:
1042 # bb.note("Fixup Perms: %s: %s" % (link, str(fs_link_table[link])))
1043
1044 # We process links first, so we can go back and fixup directory ownership
1045 # for any newly created directories
1046 # Process in sorted order so /run gets created before /run/lock, etc.
1047 for entry in sorted(fs_link_table.values(), key=lambda x: x.link):
1048 link = entry.link
1049 dir = entry.path
1050 origin = dvar + dir
1051 if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)):
1052 continue
1053
1054 if link[0] == "/":
1055 target = dvar + link
1056 ptarget = link
1057 else:
1058 target = os.path.join(os.path.dirname(origin), link)
1059 ptarget = os.path.join(os.path.dirname(dir), link)
1060 if os.path.exists(target):
1061 msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
1062 oe.qa.handle_error("perm-link", msg, d)
1063 continue
1064
1065 # Create path to move directory to, move it, and then setup the symlink
1066 bb.utils.mkdirhier(os.path.dirname(target))
1067 #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
1068 bb.utils.rename(origin, target)
1069 #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
1070 os.symlink(link, origin)
1071
1072 for dir in fs_perms_table:
1073 origin = dvar + dir
1074 if not (cpath.exists(origin) and cpath.isdir(origin)):
1075 continue
1076
1077 fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
1078
1079 if fs_perms_table[dir].walk == 'true':
1080 for root, dirs, files in os.walk(origin):
1081 for dr in dirs:
1082 each_dir = os.path.join(root, dr)
1083 fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
1084 for f in files:
1085 each_file = os.path.join(root, f)
1086 fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
1087}
1088
1089def package_debug_vars(d):
1090 # We default to '.debug' style
1091 if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory':
1092 # Single debug-file-directory style debug info
1093 debug_vars = {
1094 "append": ".debug",
1095 "staticappend": "",
1096 "dir": "",
1097 "staticdir": "",
1098 "libdir": "/usr/lib/debug",
1099 "staticlibdir": "/usr/lib/debug-static",
1100 "srcdir": "/usr/src/debug",
1101 }
1102 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src':
1103 # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
1104 debug_vars = {
1105 "append": "",
1106 "staticappend": "",
1107 "dir": "/.debug",
1108 "staticdir": "/.debug-static",
1109 "libdir": "",
1110 "staticlibdir": "",
1111 "srcdir": "",
1112 }
1113 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg':
1114 debug_vars = {
1115 "append": "",
1116 "staticappend": "",
1117 "dir": "/.debug",
1118 "staticdir": "/.debug-static",
1119 "libdir": "",
1120 "staticlibdir": "",
1121 "srcdir": "/usr/src/debug",
1122 }
1123 else:
1124 # Original OE-core, a.k.a. ".debug", style debug info
1125 debug_vars = {
1126 "append": "",
1127 "staticappend": "",
1128 "dir": "/.debug",
1129 "staticdir": "/.debug-static",
1130 "libdir": "",
1131 "staticlibdir": "",
1132 "srcdir": "/usr/src/debug",
1133 }
1134
1135 return debug_vars
1136
1137python split_and_strip_files () {
1138 import stat, errno
1139 import subprocess
1140
1141 dvar = d.getVar('PKGD')
1142 pn = d.getVar('PN')
1143 hostos = d.getVar('HOST_OS')
1144
1145 oldcwd = os.getcwd()
1146 os.chdir(dvar)
1147
1148 dv = package_debug_vars(d)
1149
1150 #
1151 # First lets figure out all of the files we may have to process ... do this only once!
1152 #
1153 elffiles = {}
1154 symlinks = {}
1155 staticlibs = []
1156 inodes = {}
1157 libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
1158 baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
1159 skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split()
1160 if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \
1161 d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
1162 checkelf = {}
1163 checkelflinks = {}
1164 for root, dirs, files in cpath.walk(dvar):
1165 for f in files:
1166 file = os.path.join(root, f)
1167
1168 # Skip debug files
1169 if dv["append"] and file.endswith(dv["append"]):
1170 continue
1171 if dv["dir"] and dv["dir"] in os.path.dirname(file[len(dvar):]):
1172 continue
1173
1174 if file in skipfiles:
1175 continue
1176
1177 if oe.package.is_static_lib(file):
1178 staticlibs.append(file)
1179 continue
1180
1181 try:
1182 ltarget = cpath.realpath(file, dvar, False)
1183 s = cpath.lstat(ltarget)
1184 except OSError as e:
1185 (err, strerror) = e.args
1186 if err != errno.ENOENT:
1187 raise
1188 # Skip broken symlinks
1189 continue
1190 if not s:
1191 continue
1192 # Check its an executable
1193 if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \
1194 or (s[stat.ST_MODE] & stat.S_IXOTH) \
1195 or ((file.startswith(libdir) or file.startswith(baselibdir)) \
1196 and (".so" in f or ".node" in f)) \
1197 or (f.startswith('vmlinux') or ".ko" in f):
1198
1199 if cpath.islink(file):
1200 checkelflinks[file] = ltarget
1201 continue
1202 # Use a reference of device ID and inode number to identify files
1203 file_reference = "%d_%d" % (s.st_dev, s.st_ino)
1204 checkelf[file] = (file, file_reference)
1205
1206 results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelflinks.values(), d)
1207 results_map = {}
1208 for (ltarget, elf_file) in results:
1209 results_map[ltarget] = elf_file
1210 for file in checkelflinks:
1211 ltarget = checkelflinks[file]
1212 # If it's a symlink, and points to an ELF file, we capture the readlink target
1213 if results_map[ltarget]:
1214 target = os.readlink(file)
1215 #bb.note("Sym: %s (%d)" % (ltarget, results_map[ltarget]))
1216 symlinks[file] = target
1217
1218 results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelf.keys(), d)
1219
1220 # Sort results by file path. This ensures that the files are always
1221 # processed in the same order, which is important to make sure builds
1222 # are reproducible when dealing with hardlinks
1223 results.sort(key=lambda x: x[0])
1224
1225 for (file, elf_file) in results:
1226 # It's a file (or hardlink), not a link
1227 # ...but is it ELF, and is it already stripped?
1228 if elf_file & 1:
1229 if elf_file & 2:
1230 if 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split():
1231 bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
1232 else:
1233 msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
1234 oe.qa.handle_error("already-stripped", msg, d)
1235 continue
1236
1237 # At this point we have an unstripped elf file. We need to:
1238 # a) Make sure any file we strip is not hardlinked to anything else outside this tree
1239 # b) Only strip any hardlinked file once (no races)
1240 # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks
1241
1242 # Use a reference of device ID and inode number to identify files
1243 file_reference = checkelf[file][1]
1244 if file_reference in inodes:
1245 os.unlink(file)
1246 os.link(inodes[file_reference][0], file)
1247 inodes[file_reference].append(file)
1248 else:
1249 inodes[file_reference] = [file]
1250 # break hardlink
1251 bb.utils.break_hardlinks(file)
1252 elffiles[file] = elf_file
1253 # Modified the file so clear the cache
1254 cpath.updatecache(file)
1255
1256 def strip_pkgd_prefix(f):
1257 nonlocal dvar
1258
1259 if f.startswith(dvar):
1260 return f[len(dvar):]
1261
1262 return f
1263
1264 #
1265 # First lets process debug splitting
1266 #
1267 if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
1268 results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, dv, d))
1269
1270 if dv["srcdir"] and not hostos.startswith("mingw"):
1271 if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1272 results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, dv, d))
1273 else:
1274 for file in staticlibs:
1275 results.append( (file,source_info(file, d)) )
1276
1277 d.setVar("PKGDEBUGSOURCES", {strip_pkgd_prefix(f): sorted(s) for f, s in results})
1278
1279 sources = set()
1280 for r in results:
1281 sources.update(r[1])
1282
1283 # Hardlink our debug symbols to the other hardlink copies
1284 for ref in inodes:
1285 if len(inodes[ref]) == 1:
1286 continue
1287
1288 target = inodes[ref][0][len(dvar):]
1289 for file in inodes[ref][1:]:
1290 src = file[len(dvar):]
1291 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
1292 fpath = dvar + dest
1293 ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
1294 bb.utils.mkdirhier(os.path.dirname(fpath))
1295 # Only one hardlink of separated debug info file in each directory
1296 if not os.access(fpath, os.R_OK):
1297 #bb.note("Link %s -> %s" % (fpath, ftarget))
1298 os.link(ftarget, fpath)
1299
1300 # Create symlinks for all cases we were able to split symbols
1301 for file in symlinks:
1302 src = file[len(dvar):]
1303 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
1304 fpath = dvar + dest
1305 # Skip it if the target doesn't exist
1306 try:
1307 s = os.stat(fpath)
1308 except OSError as e:
1309 (err, strerror) = e.args
1310 if err != errno.ENOENT:
1311 raise
1312 continue
1313
1314 ltarget = symlinks[file]
1315 lpath = os.path.dirname(ltarget)
1316 lbase = os.path.basename(ltarget)
1317 ftarget = ""
1318 if lpath and lpath != ".":
1319 ftarget += lpath + dv["dir"] + "/"
1320 ftarget += lbase + dv["append"]
1321 if lpath.startswith(".."):
1322 ftarget = os.path.join("..", ftarget)
1323 bb.utils.mkdirhier(os.path.dirname(fpath))
1324 #bb.note("Symlink %s -> %s" % (fpath, ftarget))
1325 os.symlink(ftarget, fpath)
1326
1327 # Process the dv["srcdir"] if requested...
1328 # This copies and places the referenced sources for later debugging...
1329 copydebugsources(dv["srcdir"], sources, d)
1330 #
1331 # End of debug splitting
1332 #
1333
1334 #
1335 # Now lets go back over things and strip them
1336 #
1337 if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'):
1338 strip = d.getVar("STRIP")
1339 sfiles = []
1340 for file in elffiles:
1341 elf_file = int(elffiles[file])
1342 #bb.note("Strip %s" % file)
1343 sfiles.append((file, elf_file, strip))
1344 if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1345 for f in staticlibs:
1346 sfiles.append((f, 16, strip))
1347
1348 oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d)
1349
1350 # Build "minidebuginfo" and reinject it back into the stripped binaries
1351 if d.getVar('PACKAGE_MINIDEBUGINFO') == '1':
1352 oe.utils.multiprocess_launch(inject_minidebuginfo, list(elffiles), d,
1353 extraargs=(dvar, dv, d))
1354
1355 #
1356 # End of strip
1357 #
1358 os.chdir(oldcwd)
1359}
1360
1361python populate_packages () {
1362 import glob, re
1363
1364 workdir = d.getVar('WORKDIR')
1365 outdir = d.getVar('DEPLOY_DIR')
1366 dvar = d.getVar('PKGD')
1367 packages = d.getVar('PACKAGES').split()
1368 pn = d.getVar('PN')
1369
1370 bb.utils.mkdirhier(outdir)
1371 os.chdir(dvar)
1372
1373 autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False)
1374
1375 split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg')
1376
1377 # If debug-with-srcpkg mode is enabled then add the source package if it
1378 # doesn't exist and add the source file contents to the source package.
1379 if split_source_package:
1380 src_package_name = ('%s-src' % d.getVar('PN'))
1381 if not src_package_name in packages:
1382 packages.append(src_package_name)
1383 d.setVar('FILES:%s' % src_package_name, '/usr/src/debug')
1384
1385 # Sanity check PACKAGES for duplicates
1386 # Sanity should be moved to sanity.bbclass once we have the infrastructure
1387 package_dict = {}
1388
1389 for i, pkg in enumerate(packages):
1390 if pkg in package_dict:
1391 msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
1392 oe.qa.handle_error("packages-list", msg, d)
1393 # Ensure the source package gets the chance to pick up the source files
1394 # before the debug package by ordering it first in PACKAGES. Whether it
1395 # actually picks up any source files is controlled by
1396 # PACKAGE_DEBUG_SPLIT_STYLE.
1397 elif pkg.endswith("-src"):
1398 package_dict[pkg] = (10, i)
1399 elif autodebug and pkg.endswith("-dbg"):
1400 package_dict[pkg] = (30, i)
1401 else:
1402 package_dict[pkg] = (50, i)
1403 packages = sorted(package_dict.keys(), key=package_dict.get)
1404 d.setVar('PACKAGES', ' '.join(packages))
1405 pkgdest = d.getVar('PKGDEST')
1406
1407 seen = []
1408
1409 # os.mkdir masks the permissions with umask so we have to unset it first
1410 oldumask = os.umask(0)
1411
1412 debug = []
1413 for root, dirs, files in cpath.walk(dvar):
1414 dir = root[len(dvar):]
1415 if not dir:
1416 dir = os.sep
1417 for f in (files + dirs):
1418 path = "." + os.path.join(dir, f)
1419 if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"):
1420 debug.append(path)
1421
1422 for pkg in packages:
1423 root = os.path.join(pkgdest, pkg)
1424 bb.utils.mkdirhier(root)
1425
1426 filesvar = d.getVar('FILES:%s' % pkg) or ""
1427 if "//" in filesvar:
1428 msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
1429 oe.qa.handle_error("files-invalid", msg, d)
1430 filesvar.replace("//", "/")
1431
1432 origfiles = filesvar.split()
1433 files, symlink_paths = files_from_filevars(origfiles)
1434
1435 if autodebug and pkg.endswith("-dbg"):
1436 files.extend(debug)
1437
1438 for file in files:
1439 if (not cpath.islink(file)) and (not cpath.exists(file)):
1440 continue
1441 if file in seen:
1442 continue
1443 seen.append(file)
1444
1445 def mkdir(src, dest, p):
1446 src = os.path.join(src, p)
1447 dest = os.path.join(dest, p)
1448 fstat = cpath.stat(src)
1449 os.mkdir(dest)
1450 os.chmod(dest, fstat.st_mode)
1451 os.chown(dest, fstat.st_uid, fstat.st_gid)
1452 if p not in seen:
1453 seen.append(p)
1454 cpath.updatecache(dest)
1455
1456 def mkdir_recurse(src, dest, paths):
1457 if cpath.exists(dest + '/' + paths):
1458 return
1459 while paths.startswith("./"):
1460 paths = paths[2:]
1461 p = "."
1462 for c in paths.split("/"):
1463 p = os.path.join(p, c)
1464 if not cpath.exists(os.path.join(dest, p)):
1465 mkdir(src, dest, p)
1466
1467 if cpath.isdir(file) and not cpath.islink(file):
1468 mkdir_recurse(dvar, root, file)
1469 continue
1470
1471 mkdir_recurse(dvar, root, os.path.dirname(file))
1472 fpath = os.path.join(root,file)
1473 if not cpath.islink(file):
1474 os.link(file, fpath)
1475 continue
1476 ret = bb.utils.copyfile(file, fpath)
1477 if ret is False or ret == 0:
1478 bb.fatal("File population failed")
1479
1480 # Check if symlink paths exist
1481 for file in symlink_paths:
1482 if not os.path.exists(os.path.join(root,file)):
1483 bb.fatal("File '%s' cannot be packaged into '%s' because its "
1484 "parent directory structure does not exist. One of "
1485 "its parent directories is a symlink whose target "
1486 "directory is not included in the package." %
1487 (file, pkg))
1488
1489 os.umask(oldumask)
1490 os.chdir(workdir)
1491
1492 # Handle excluding packages with incompatible licenses
1493 package_list = []
1494 for pkg in packages:
1495 licenses = d.getVar('_exclude_incompatible-' + pkg)
1496 if licenses:
1497 msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, licenses)
1498 oe.qa.handle_error("incompatible-license", msg, d)
1499 else:
1500 package_list.append(pkg)
1501 d.setVar('PACKAGES', ' '.join(package_list))
1502
1503 unshipped = []
1504 for root, dirs, files in cpath.walk(dvar):
1505 dir = root[len(dvar):]
1506 if not dir:
1507 dir = os.sep
1508 for f in (files + dirs):
1509 path = os.path.join(dir, f)
1510 if ('.' + path) not in seen:
1511 unshipped.append(path)
1512
1513 if unshipped != []:
1514 msg = pn + ": Files/directories were installed but not shipped in any package:"
1515 if "installed-vs-shipped" in (d.getVar('INSANE_SKIP:' + pn) or "").split():
1516 bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
1517 else:
1518 for f in unshipped:
1519 msg = msg + "\n " + f
1520 msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n"
1521 msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped))
1522 oe.qa.handle_error("installed-vs-shipped", msg, d)
1523}
1524populate_packages[dirs] = "${D}"
1525
1526python package_fixsymlinks () {
1527 import errno
1528 pkgdest = d.getVar('PKGDEST')
1529 packages = d.getVar("PACKAGES", False).split()
1530
1531 dangling_links = {}
1532 pkg_files = {}
1533 for pkg in packages:
1534 dangling_links[pkg] = []
1535 pkg_files[pkg] = []
1536 inst_root = os.path.join(pkgdest, pkg)
1537 for path in pkgfiles[pkg]:
1538 rpath = path[len(inst_root):]
1539 pkg_files[pkg].append(rpath)
1540 rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
1541 if not cpath.lexists(rtarget):
1542 dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))
1543
1544 newrdepends = {}
1545 for pkg in dangling_links:
1546 for l in dangling_links[pkg]:
1547 found = False
1548 bb.debug(1, "%s contains dangling link %s" % (pkg, l))
1549 for p in packages:
1550 if l in pkg_files[p]:
1551 found = True
1552 bb.debug(1, "target found in %s" % p)
1553 if p == pkg:
1554 break
1555 if pkg not in newrdepends:
1556 newrdepends[pkg] = []
1557 newrdepends[pkg].append(p)
1558 break
1559 if found == False:
1560 bb.note("%s contains dangling symlink to %s" % (pkg, l))
1561
1562 for pkg in newrdepends:
1563 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "")
1564 for p in newrdepends[pkg]:
1565 if p not in rdepends:
1566 rdepends[p] = []
1567 d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1568}
1569
1570
1571python package_package_name_hook() {
1572 """
1573 A package_name_hook function can be used to rewrite the package names by
1574 changing PKG. For an example, see debian.bbclass.
1575 """
1576 pass
1577}
1578
1579EXPORT_FUNCTIONS package_name_hook
1580
1581
1582PKGDESTWORK = "${WORKDIR}/pkgdata"
1583
1584PKGDATA_VARS = "PN PE PV PR PKGE PKGV PKGR LICENSE DESCRIPTION SUMMARY RDEPENDS RPROVIDES RRECOMMENDS RSUGGESTS RREPLACES RCONFLICTS SECTION PKG ALLOW_EMPTY FILES CONFFILES FILES_INFO PACKAGE_ADD_METADATA pkg_postinst pkg_postrm pkg_preinst pkg_prerm"
1585
1586python emit_pkgdata() {
1587 from glob import glob
1588 import json
1589 import bb.compress.zstd
1590
1591 def process_postinst_on_target(pkg, mlprefix):
1592 pkgval = d.getVar('PKG:%s' % pkg)
1593 if pkgval is None:
1594 pkgval = pkg
1595
1596 defer_fragment = """
1597if [ -n "$D" ]; then
1598 $INTERCEPT_DIR/postinst_intercept delay_to_first_boot %s mlprefix=%s
1599 exit 0
1600fi
1601""" % (pkgval, mlprefix)
1602
1603 postinst = d.getVar('pkg_postinst:%s' % pkg)
1604 postinst_ontarget = d.getVar('pkg_postinst_ontarget:%s' % pkg)
1605
1606 if postinst_ontarget:
1607 bb.debug(1, 'adding deferred pkg_postinst_ontarget() to pkg_postinst() for %s' % pkg)
1608 if not postinst:
1609 postinst = '#!/bin/sh\n'
1610 postinst += defer_fragment
1611 postinst += postinst_ontarget
1612 d.setVar('pkg_postinst:%s' % pkg, postinst)
1613
1614 def add_set_e_to_scriptlets(pkg):
1615 for scriptlet_name in ('pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm'):
1616 scriptlet = d.getVar('%s:%s' % (scriptlet_name, pkg))
1617 if scriptlet:
1618 scriptlet_split = scriptlet.split('\n')
1619 if scriptlet_split[0].startswith("#!"):
1620 scriptlet = scriptlet_split[0] + "\nset -e\n" + "\n".join(scriptlet_split[1:])
1621 else:
1622 scriptlet = "set -e\n" + "\n".join(scriptlet_split[0:])
1623 d.setVar('%s:%s' % (scriptlet_name, pkg), scriptlet)
1624
1625 def write_if_exists(f, pkg, var):
1626 def encode(str):
1627 import codecs
1628 c = codecs.getencoder("unicode_escape")
1629 return c(str)[0].decode("latin1")
1630
1631 val = d.getVar('%s:%s' % (var, pkg))
1632 if val:
1633 f.write('%s:%s: %s\n' % (var, pkg, encode(val)))
1634 return val
1635 val = d.getVar('%s' % (var))
1636 if val:
1637 f.write('%s: %s\n' % (var, encode(val)))
1638 return val
1639
1640 def write_extra_pkgs(variants, pn, packages, pkgdatadir):
1641 for variant in variants:
1642 with open("%s/%s-%s" % (pkgdatadir, variant, pn), 'w') as fd:
1643 fd.write("PACKAGES: %s\n" % ' '.join(
1644 map(lambda pkg: '%s-%s' % (variant, pkg), packages.split())))
1645
1646 def write_extra_runtime_pkgs(variants, packages, pkgdatadir):
1647 for variant in variants:
1648 for pkg in packages.split():
1649 ml_pkg = "%s-%s" % (variant, pkg)
1650 subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg)
1651 with open(subdata_file, 'w') as fd:
1652 fd.write("PKG:%s: %s" % (ml_pkg, pkg))
1653
1654 packages = d.getVar('PACKAGES')
1655 pkgdest = d.getVar('PKGDEST')
1656 pkgdatadir = d.getVar('PKGDESTWORK')
1657
1658 data_file = pkgdatadir + d.expand("/${PN}")
1659 with open(data_file, 'w') as fd:
1660 fd.write("PACKAGES: %s\n" % packages)
1661
1662 pkgdebugsource = d.getVar("PKGDEBUGSOURCES") or []
1663
1664 pn = d.getVar('PN')
1665 global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split()
1666 variants = (d.getVar('MULTILIB_VARIANTS') or "").split()
1667
1668 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
1669 write_extra_pkgs(variants, pn, packages, pkgdatadir)
1670
1671 if bb.data.inherits_class('allarch', d) and not variants \
1672 and not bb.data.inherits_class('packagegroup', d):
1673 write_extra_pkgs(global_variants, pn, packages, pkgdatadir)
1674
1675 workdir = d.getVar('WORKDIR')
1676
1677 for pkg in packages.split():
1678 pkgval = d.getVar('PKG:%s' % pkg)
1679 if pkgval is None:
1680 pkgval = pkg
1681 d.setVar('PKG:%s' % pkg, pkg)
1682
1683 extended_data = {
1684 "files_info": {}
1685 }
1686
1687 pkgdestpkg = os.path.join(pkgdest, pkg)
1688 files = {}
1689 files_extra = {}
1690 total_size = 0
1691 seen = set()
1692 for f in pkgfiles[pkg]:
1693 fpath = os.sep + os.path.relpath(f, pkgdestpkg)
1694
1695 fstat = os.lstat(f)
1696 files[fpath] = fstat.st_size
1697
1698 extended_data["files_info"].setdefault(fpath, {})
1699 extended_data["files_info"][fpath]['size'] = fstat.st_size
1700
1701 if fstat.st_ino not in seen:
1702 seen.add(fstat.st_ino)
1703 total_size += fstat.st_size
1704
1705 if fpath in pkgdebugsource:
1706 extended_data["files_info"][fpath]['debugsrc'] = pkgdebugsource[fpath]
1707 del pkgdebugsource[fpath]
1708
1709 d.setVar('FILES_INFO:' + pkg , json.dumps(files, sort_keys=True))
1710
1711 process_postinst_on_target(pkg, d.getVar("MLPREFIX"))
1712 add_set_e_to_scriptlets(pkg)
1713
1714 subdata_file = pkgdatadir + "/runtime/%s" % pkg
1715 with open(subdata_file, 'w') as sf:
1716 for var in (d.getVar('PKGDATA_VARS') or "").split():
1717 val = write_if_exists(sf, pkg, var)
1718
1719 write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
1720 for dfile in sorted((d.getVar('FILERPROVIDESFLIST:' + pkg) or "").split()):
1721 write_if_exists(sf, pkg, 'FILERPROVIDES:' + dfile)
1722
1723 write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
1724 for dfile in sorted((d.getVar('FILERDEPENDSFLIST:' + pkg) or "").split()):
1725 write_if_exists(sf, pkg, 'FILERDEPENDS:' + dfile)
1726
1727 sf.write('%s:%s: %d\n' % ('PKGSIZE', pkg, total_size))
1728
1729 subdata_extended_file = pkgdatadir + "/extended/%s.json.zstd" % pkg
1730 num_threads = int(d.getVar("BB_NUMBER_THREADS"))
1731 with bb.compress.zstd.open(subdata_extended_file, "wt", encoding="utf-8", num_threads=num_threads) as f:
1732 json.dump(extended_data, f, sort_keys=True, separators=(",", ":"))
1733
1734 # Symlinks needed for rprovides lookup
1735 rprov = d.getVar('RPROVIDES:%s' % pkg) or d.getVar('RPROVIDES')
1736 if rprov:
1737 for p in bb.utils.explode_deps(rprov):
1738 subdata_sym = pkgdatadir + "/runtime-rprovides/%s/%s" % (p, pkg)
1739 bb.utils.mkdirhier(os.path.dirname(subdata_sym))
1740 oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True)
1741
1742 allow_empty = d.getVar('ALLOW_EMPTY:%s' % pkg)
1743 if not allow_empty:
1744 allow_empty = d.getVar('ALLOW_EMPTY')
1745 root = "%s/%s" % (pkgdest, pkg)
1746 os.chdir(root)
1747 g = glob('*')
1748 if g or allow_empty == "1":
1749 # Symlinks needed for reverse lookups (from the final package name)
1750 subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval
1751 oe.path.symlink("../runtime/%s" % pkg, subdata_sym, True)
1752
1753 packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg
1754 open(packagedfile, 'w').close()
1755
1756 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
1757 write_extra_runtime_pkgs(variants, packages, pkgdatadir)
1758
1759 if bb.data.inherits_class('allarch', d) and not variants \
1760 and not bb.data.inherits_class('packagegroup', d):
1761 write_extra_runtime_pkgs(global_variants, packages, pkgdatadir)
1762
1763}
1764emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime ${PKGDESTWORK}/runtime-reverse ${PKGDESTWORK}/runtime-rprovides ${PKGDESTWORK}/extended"
1765emit_pkgdata[vardepsexclude] = "BB_NUMBER_THREADS"
1766
1767ldconfig_postinst_fragment() {
1768if [ x"$D" = "x" ]; then
1769 if [ -x /sbin/ldconfig ]; then /sbin/ldconfig ; fi
1770fi
1771}
1772
1773RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/rpmdeps --alldeps --define '__font_provides %{nil}'"
1774
1775# Collect perfile run-time dependency metadata
1776# Output:
1777# FILERPROVIDESFLIST:pkg - list of all files w/ deps
1778# FILERPROVIDES:filepath:pkg - per file dep
1779#
1780# FILERDEPENDSFLIST:pkg - list of all files w/ deps
1781# FILERDEPENDS:filepath:pkg - per file dep
1782
1783python package_do_filedeps() {
1784 if d.getVar('SKIP_FILEDEPS') == '1':
1785 return
1786
1787 pkgdest = d.getVar('PKGDEST')
1788 packages = d.getVar('PACKAGES')
1789 rpmdeps = d.getVar('RPMDEPS')
1790
1791 def chunks(files, n):
1792 return [files[i:i+n] for i in range(0, len(files), n)]
1793
1794 pkglist = []
1795 for pkg in packages.split():
1796 if d.getVar('SKIP_FILEDEPS:' + pkg) == '1':
1797 continue
1798 if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'):
1799 continue
1800 for files in chunks(pkgfiles[pkg], 100):
1801 pkglist.append((pkg, files, rpmdeps, pkgdest))
1802
1803 processed = oe.utils.multiprocess_launch(oe.package.filedeprunner, pkglist, d)
1804
1805 provides_files = {}
1806 requires_files = {}
1807
1808 for result in processed:
1809 (pkg, provides, requires) = result
1810
1811 if pkg not in provides_files:
1812 provides_files[pkg] = []
1813 if pkg not in requires_files:
1814 requires_files[pkg] = []
1815
1816 for file in sorted(provides):
1817 provides_files[pkg].append(file)
1818 key = "FILERPROVIDES:" + file + ":" + pkg
1819 d.appendVar(key, " " + " ".join(provides[file]))
1820
1821 for file in sorted(requires):
1822 requires_files[pkg].append(file)
1823 key = "FILERDEPENDS:" + file + ":" + pkg
1824 d.appendVar(key, " " + " ".join(requires[file]))
1825
1826 for pkg in requires_files:
1827 d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(sorted(requires_files[pkg])))
1828 for pkg in provides_files:
1829 d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(sorted(provides_files[pkg])))
1830}
1831
1832SHLIBSDIRS = "${WORKDIR_PKGDATA}/${MLPREFIX}shlibs2"
1833SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs2"
1834
1835python package_do_shlibs() {
1836 import itertools
1837 import re, pipes
1838 import subprocess
1839
1840 exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False)
1841 if exclude_shlibs:
1842 bb.note("not generating shlibs")
1843 return
1844
1845 lib_re = re.compile(r"^.*\.so")
1846 libdir_re = re.compile(r".*/%s$" % d.getVar('baselib'))
1847
1848 packages = d.getVar('PACKAGES')
1849
1850 shlib_pkgs = []
1851 exclusion_list = d.getVar("EXCLUDE_PACKAGES_FROM_SHLIBS")
1852 if exclusion_list:
1853 for pkg in packages.split():
1854 if pkg not in exclusion_list.split():
1855 shlib_pkgs.append(pkg)
1856 else:
1857 bb.note("not generating shlibs for %s" % pkg)
1858 else:
1859 shlib_pkgs = packages.split()
1860
1861 hostos = d.getVar('HOST_OS')
1862
1863 workdir = d.getVar('WORKDIR')
1864
1865 ver = d.getVar('PKGV')
1866 if not ver:
1867 msg = "PKGV not defined"
1868 oe.qa.handle_error("pkgv-undefined", msg, d)
1869 return
1870
1871 pkgdest = d.getVar('PKGDEST')
1872
1873 shlibswork_dir = d.getVar('SHLIBSWORKDIR')
1874
1875 def linux_so(file, pkg, pkgver, d):
1876 needs_ldconfig = False
1877 needed = set()
1878 sonames = set()
1879 renames = []
1880 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1881 cmd = d.getVar('OBJDUMP') + " -p " + pipes.quote(file) + " 2>/dev/null"
1882 fd = os.popen(cmd)
1883 lines = fd.readlines()
1884 fd.close()
1885 rpath = tuple()
1886 for l in lines:
1887 m = re.match(r"\s+RPATH\s+([^\s]*)", l)
1888 if m:
1889 rpaths = m.group(1).replace("$ORIGIN", ldir).split(":")
1890 rpath = tuple(map(os.path.normpath, rpaths))
1891 for l in lines:
1892 m = re.match(r"\s+NEEDED\s+([^\s]*)", l)
1893 if m:
1894 dep = m.group(1)
1895 if dep not in needed:
1896 needed.add((dep, file, rpath))
1897 m = re.match(r"\s+SONAME\s+([^\s]*)", l)
1898 if m:
1899 this_soname = m.group(1)
1900 prov = (this_soname, ldir, pkgver)
1901 if not prov in sonames:
1902 # if library is private (only used by package) then do not build shlib for it
1903 import fnmatch
1904 if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0:
1905 sonames.add(prov)
1906 if libdir_re.match(os.path.dirname(file)):
1907 needs_ldconfig = True
1908 if needs_ldconfig and snap_symlinks and (os.path.basename(file) != this_soname):
1909 renames.append((file, os.path.join(os.path.dirname(file), this_soname)))
1910 return (needs_ldconfig, needed, sonames, renames)
1911
1912 def darwin_so(file, needed, sonames, renames, pkgver):
1913 if not os.path.exists(file):
1914 return
1915 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1916
1917 def get_combinations(base):
1918 #
1919 # Given a base library name, find all combinations of this split by "." and "-"
1920 #
1921 combos = []
1922 options = base.split(".")
1923 for i in range(1, len(options) + 1):
1924 combos.append(".".join(options[0:i]))
1925 options = base.split("-")
1926 for i in range(1, len(options) + 1):
1927 combos.append("-".join(options[0:i]))
1928 return combos
1929
1930 if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'):
1931 # Drop suffix
1932 name = os.path.basename(file).rsplit(".",1)[0]
1933 # Find all combinations
1934 combos = get_combinations(name)
1935 for combo in combos:
1936 if not combo in sonames:
1937 prov = (combo, ldir, pkgver)
1938 sonames.add(prov)
1939 if file.endswith('.dylib') or file.endswith('.so'):
1940 rpath = []
1941 p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1942 out, err = p.communicate()
1943 # If returned successfully, process stdout for results
1944 if p.returncode == 0:
1945 for l in out.split("\n"):
1946 l = l.strip()
1947 if l.startswith('path '):
1948 rpath.append(l.split()[1])
1949
1950 p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1951 out, err = p.communicate()
1952 # If returned successfully, process stdout for results
1953 if p.returncode == 0:
1954 for l in out.split("\n"):
1955 l = l.strip()
1956 if not l or l.endswith(":"):
1957 continue
1958 if "is not an object file" in l:
1959 continue
1960 name = os.path.basename(l.split()[0]).rsplit(".", 1)[0]
1961 if name and name not in needed[pkg]:
1962 needed[pkg].add((name, file, tuple()))
1963
1964 def mingw_dll(file, needed, sonames, renames, pkgver):
1965 if not os.path.exists(file):
1966 return
1967
1968 if file.endswith(".dll"):
1969 # assume all dlls are shared objects provided by the package
1970 sonames.add((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver))
1971
1972 if (file.endswith(".dll") or file.endswith(".exe")):
1973 # use objdump to search for "DLL Name: .*\.dll"
1974 p = subprocess.Popen([d.expand("${HOST_PREFIX}objdump"), "-p", file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1975 out, err = p.communicate()
1976 # process the output, grabbing all .dll names
1977 if p.returncode == 0:
1978 for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE):
1979 dllname = m.group(1)
1980 if dllname:
1981 needed[pkg].add((dllname, file, tuple()))
1982
1983 if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1":
1984 snap_symlinks = True
1985 else:
1986 snap_symlinks = False
1987
1988 needed = {}
1989
1990 shlib_provider = oe.package.read_shlib_providers(d)
1991
1992 for pkg in shlib_pkgs:
1993 private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
1994 private_libs = private_libs.split()
1995 needs_ldconfig = False
1996 bb.debug(2, "calculating shlib provides for %s" % pkg)
1997
1998 pkgver = d.getVar('PKGV:' + pkg)
1999 if not pkgver:
2000 pkgver = d.getVar('PV_' + pkg)
2001 if not pkgver:
2002 pkgver = ver
2003
2004 needed[pkg] = set()
2005 sonames = set()
2006 renames = []
2007 linuxlist = []
2008 for file in pkgfiles[pkg]:
2009 soname = None
2010 if cpath.islink(file):
2011 continue
2012 if hostos == "darwin" or hostos == "darwin8":
2013 darwin_so(file, needed, sonames, renames, pkgver)
2014 elif hostos.startswith("mingw"):
2015 mingw_dll(file, needed, sonames, renames, pkgver)
2016 elif os.access(file, os.X_OK) or lib_re.match(file):
2017 linuxlist.append(file)
2018
2019 if linuxlist:
2020 results = oe.utils.multiprocess_launch(linux_so, linuxlist, d, extraargs=(pkg, pkgver, d))
2021 for r in results:
2022 ldconfig = r[0]
2023 needed[pkg] |= r[1]
2024 sonames |= r[2]
2025 renames.extend(r[3])
2026 needs_ldconfig = needs_ldconfig or ldconfig
2027
2028 for (old, new) in renames:
2029 bb.note("Renaming %s to %s" % (old, new))
2030 bb.utils.rename(old, new)
2031 pkgfiles[pkg].remove(old)
2032
2033 shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
2034 if len(sonames):
2035 with open(shlibs_file, 'w') as fd:
2036 for s in sorted(sonames):
2037 if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]:
2038 (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]]
2039 if old_pkg != pkg:
2040 bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver))
2041 bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0]))
2042 fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n')
2043 if s[0] not in shlib_provider:
2044 shlib_provider[s[0]] = {}
2045 shlib_provider[s[0]][s[1]] = (pkg, pkgver)
2046 if needs_ldconfig:
2047 bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
2048 postinst = d.getVar('pkg_postinst:%s' % pkg)
2049 if not postinst:
2050 postinst = '#!/bin/sh\n'
2051 postinst += d.getVar('ldconfig_postinst_fragment')
2052 d.setVar('pkg_postinst:%s' % pkg, postinst)
2053 bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
2054
2055 assumed_libs = d.getVar('ASSUME_SHLIBS')
2056 if assumed_libs:
2057 libdir = d.getVar("libdir")
2058 for e in assumed_libs.split():
2059 l, dep_pkg = e.split(":")
2060 lib_ver = None
2061 dep_pkg = dep_pkg.rsplit("_", 1)
2062 if len(dep_pkg) == 2:
2063 lib_ver = dep_pkg[1]
2064 dep_pkg = dep_pkg[0]
2065 if l not in shlib_provider:
2066 shlib_provider[l] = {}
2067 shlib_provider[l][libdir] = (dep_pkg, lib_ver)
2068
2069 libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')]
2070
2071 for pkg in shlib_pkgs:
2072 bb.debug(2, "calculating shlib requirements for %s" % pkg)
2073
2074 private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
2075 private_libs = private_libs.split()
2076
2077 deps = list()
2078 for n in needed[pkg]:
2079 # if n is in private libraries, don't try to search provider for it
2080 # this could cause problem in case some abc.bb provides private
2081 # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1
2082 # but skipping it is still better alternative than providing own
2083 # version and then adding runtime dependency for the same system library
2084 import fnmatch
2085 if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0:
2086 bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0]))
2087 continue
2088 if n[0] in shlib_provider.keys():
2089 shlib_provider_map = shlib_provider[n[0]]
2090 matches = set()
2091 for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath):
2092 if p in shlib_provider_map:
2093 matches.add(p)
2094 if len(matches) > 1:
2095 matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches])
2096 bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1]))
2097 elif len(matches) == 1:
2098 (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()]
2099
2100 bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1]))
2101
2102 if dep_pkg == pkg:
2103 continue
2104
2105 if ver_needed:
2106 dep = "%s (>= %s)" % (dep_pkg, ver_needed)
2107 else:
2108 dep = dep_pkg
2109 if not dep in deps:
2110 deps.append(dep)
2111 continue
2112 bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1]))
2113
2114 deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
2115 if os.path.exists(deps_file):
2116 os.remove(deps_file)
2117 if deps:
2118 with open(deps_file, 'w') as fd:
2119 for dep in sorted(deps):
2120 fd.write(dep + '\n')
2121}
2122
2123python package_do_pkgconfig () {
2124 import re
2125
2126 packages = d.getVar('PACKAGES')
2127 workdir = d.getVar('WORKDIR')
2128 pkgdest = d.getVar('PKGDEST')
2129
2130 shlibs_dirs = d.getVar('SHLIBSDIRS').split()
2131 shlibswork_dir = d.getVar('SHLIBSWORKDIR')
2132
2133 pc_re = re.compile(r'(.*)\.pc$')
2134 var_re = re.compile(r'(.*)=(.*)')
2135 field_re = re.compile(r'(.*): (.*)')
2136
2137 pkgconfig_provided = {}
2138 pkgconfig_needed = {}
2139 for pkg in packages.split():
2140 pkgconfig_provided[pkg] = []
2141 pkgconfig_needed[pkg] = []
2142 for file in sorted(pkgfiles[pkg]):
2143 m = pc_re.match(file)
2144 if m:
2145 pd = bb.data.init()
2146 name = m.group(1)
2147 pkgconfig_provided[pkg].append(os.path.basename(name))
2148 if not os.access(file, os.R_OK):
2149 continue
2150 with open(file, 'r') as f:
2151 lines = f.readlines()
2152 for l in lines:
2153 m = var_re.match(l)
2154 if m:
2155 name = m.group(1)
2156 val = m.group(2)
2157 pd.setVar(name, pd.expand(val))
2158 continue
2159 m = field_re.match(l)
2160 if m:
2161 hdr = m.group(1)
2162 exp = pd.expand(m.group(2))
2163 if hdr == 'Requires':
2164 pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
2165
2166 for pkg in packages.split():
2167 pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
2168 if pkgconfig_provided[pkg] != []:
2169 with open(pkgs_file, 'w') as f:
2170 for p in sorted(pkgconfig_provided[pkg]):
2171 f.write('%s\n' % p)
2172
2173 # Go from least to most specific since the last one found wins
2174 for dir in reversed(shlibs_dirs):
2175 if not os.path.exists(dir):
2176 continue
2177 for file in sorted(os.listdir(dir)):
2178 m = re.match(r'^(.*)\.pclist$', file)
2179 if m:
2180 pkg = m.group(1)
2181 with open(os.path.join(dir, file)) as fd:
2182 lines = fd.readlines()
2183 pkgconfig_provided[pkg] = []
2184 for l in lines:
2185 pkgconfig_provided[pkg].append(l.rstrip())
2186
2187 for pkg in packages.split():
2188 deps = []
2189 for n in pkgconfig_needed[pkg]:
2190 found = False
2191 for k in pkgconfig_provided.keys():
2192 if n in pkgconfig_provided[k]:
2193 if k != pkg and not (k in deps):
2194 deps.append(k)
2195 found = True
2196 if found == False:
2197 bb.note("couldn't find pkgconfig module '%s' in any package" % n)
2198 deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
2199 if len(deps):
2200 with open(deps_file, 'w') as fd:
2201 for dep in deps:
2202 fd.write(dep + '\n')
2203}
2204
2205def read_libdep_files(d):
2206 pkglibdeps = {}
2207 packages = d.getVar('PACKAGES').split()
2208 for pkg in packages:
2209 pkglibdeps[pkg] = {}
2210 for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
2211 depsfile = d.expand("${PKGDEST}/" + pkg + extension)
2212 if os.access(depsfile, os.R_OK):
2213 with open(depsfile) as fd:
2214 lines = fd.readlines()
2215 for l in lines:
2216 l.rstrip()
2217 deps = bb.utils.explode_dep_versions2(l)
2218 for dep in deps:
2219 if not dep in pkglibdeps[pkg]:
2220 pkglibdeps[pkg][dep] = deps[dep]
2221 return pkglibdeps
2222
2223python read_shlibdeps () {
2224 pkglibdeps = read_libdep_files(d)
2225
2226 packages = d.getVar('PACKAGES').split()
2227 for pkg in packages:
2228 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "")
2229 for dep in sorted(pkglibdeps[pkg]):
2230 # Add the dep if it's not already there, or if no comparison is set
2231 if dep not in rdepends:
2232 rdepends[dep] = []
2233 for v in pkglibdeps[pkg][dep]:
2234 if v not in rdepends[dep]:
2235 rdepends[dep].append(v)
2236 d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False))
2237}
2238
2239python package_depchains() {
2240 """
2241 For a given set of prefix and postfix modifiers, make those packages
2242 RRECOMMENDS on the corresponding packages for its RDEPENDS.
2243
2244 Example: If package A depends upon package B, and A's .bb emits an
2245 A-dev package, this would make A-dev Recommends: B-dev.
2246
2247 If only one of a given suffix is specified, it will take the RRECOMMENDS
2248 based on the RDEPENDS of *all* other packages. If more than one of a given
2249 suffix is specified, its will only use the RDEPENDS of the single parent
2250 package.
2251 """
2252
2253 packages = d.getVar('PACKAGES')
2254 postfixes = (d.getVar('DEPCHAIN_POST') or '').split()
2255 prefixes = (d.getVar('DEPCHAIN_PRE') or '').split()
2256
2257 def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
2258
2259 #bb.note('depends for %s is %s' % (base, depends))
2260 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
2261
2262 for depend in sorted(depends):
2263 if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
2264 #bb.note("Skipping %s" % depend)
2265 continue
2266 if depend.endswith('-dev'):
2267 depend = depend[:-4]
2268 if depend.endswith('-dbg'):
2269 depend = depend[:-4]
2270 pkgname = getname(depend, suffix)
2271 #bb.note("Adding %s for %s" % (pkgname, depend))
2272 if pkgname not in rreclist and pkgname != pkg:
2273 rreclist[pkgname] = []
2274
2275 #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
2276 d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
2277
2278 def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
2279
2280 #bb.note('rdepends for %s is %s' % (base, rdepends))
2281 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
2282
2283 for depend in sorted(rdepends):
2284 if depend.find('virtual-locale-') != -1:
2285 #bb.note("Skipping %s" % depend)
2286 continue
2287 if depend.endswith('-dev'):
2288 depend = depend[:-4]
2289 if depend.endswith('-dbg'):
2290 depend = depend[:-4]
2291 pkgname = getname(depend, suffix)
2292 #bb.note("Adding %s for %s" % (pkgname, depend))
2293 if pkgname not in rreclist and pkgname != pkg:
2294 rreclist[pkgname] = []
2295
2296 #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
2297 d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
2298
2299 def add_dep(list, dep):
2300 if dep not in list:
2301 list.append(dep)
2302
2303 depends = []
2304 for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""):
2305 add_dep(depends, dep)
2306
2307 rdepends = []
2308 for pkg in packages.split():
2309 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + pkg) or ""):
2310 add_dep(rdepends, dep)
2311
2312 #bb.note('rdepends is %s' % rdepends)
2313
2314 def post_getname(name, suffix):
2315 return '%s%s' % (name, suffix)
2316 def pre_getname(name, suffix):
2317 return '%s%s' % (suffix, name)
2318
2319 pkgs = {}
2320 for pkg in packages.split():
2321 for postfix in postfixes:
2322 if pkg.endswith(postfix):
2323 if not postfix in pkgs:
2324 pkgs[postfix] = {}
2325 pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
2326
2327 for prefix in prefixes:
2328 if pkg.startswith(prefix):
2329 if not prefix in pkgs:
2330 pkgs[prefix] = {}
2331 pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
2332
2333 if "-dbg" in pkgs:
2334 pkglibdeps = read_libdep_files(d)
2335 pkglibdeplist = []
2336 for pkg in pkglibdeps:
2337 for k in pkglibdeps[pkg]:
2338 add_dep(pkglibdeplist, k)
2339 dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d)))
2340
2341 for suffix in pkgs:
2342 for pkg in pkgs[suffix]:
2343 if d.getVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs'):
2344 continue
2345 (base, func) = pkgs[suffix][pkg]
2346 if suffix == "-dev":
2347 pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
2348 elif suffix == "-dbg":
2349 if not dbgdefaultdeps:
2350 pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
2351 continue
2352 if len(pkgs[suffix]) == 1:
2353 pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
2354 else:
2355 rdeps = []
2356 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + base) or ""):
2357 add_dep(rdeps, dep)
2358 pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
2359}
2360
2361# Since bitbake can't determine which variables are accessed during package
2362# iteration, we need to list them here:
2363PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm pkg_postinst_ontarget INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR USERADD_PARAM GROUPADD_PARAM CONFFILES SYSTEMD_SERVICE LICENSE SECTION pkg_preinst pkg_prerm RREPLACES GROUPMEMS_PARAM SYSTEMD_AUTO_ENABLE SKIP_FILEDEPS PRIVATE_LIBS PACKAGE_ADD_METADATA"
2364
2365def gen_packagevar(d, pkgvars="PACKAGEVARS"):
2366 ret = []
2367 pkgs = (d.getVar("PACKAGES") or "").split()
2368 vars = (d.getVar(pkgvars) or "").split()
2369 for v in vars:
2370 ret.append(v)
2371 for p in pkgs:
2372 for v in vars:
2373 ret.append(v + ":" + p)
2374
2375 # Ensure that changes to INCOMPATIBLE_LICENSE re-run do_package for
2376 # affected recipes.
2377 ret.append('_exclude_incompatible-%s' % p)
2378 return " ".join(ret)
2379
2380PACKAGE_PREPROCESS_FUNCS ?= ""
2381# Functions for setting up PKGD
2382PACKAGEBUILDPKGD ?= " \
2383 package_prepare_pkgdata \
2384 perform_packagecopy \
2385 ${PACKAGE_PREPROCESS_FUNCS} \
2386 split_and_strip_files \
2387 fixup_perms \
2388 "
2389# Functions which split PKGD up into separate packages
2390PACKAGESPLITFUNCS ?= " \
2391 package_do_split_locales \
2392 populate_packages"
2393# Functions which process metadata based on split packages
2394PACKAGEFUNCS += " \
2395 package_fixsymlinks \
2396 package_name_hook \
2397 package_do_filedeps \
2398 package_do_shlibs \
2399 package_do_pkgconfig \
2400 read_shlibdeps \
2401 package_depchains \
2402 emit_pkgdata"
2403
2404python do_package () {
2405 # Change the following version to cause sstate to invalidate the package
2406 # cache. This is useful if an item this class depends on changes in a
2407 # way that the output of this class changes. rpmdeps is a good example
2408 # as any change to rpmdeps requires this to be rerun.
2409 # PACKAGE_BBCLASS_VERSION = "4"
2410
2411 # Init cachedpath
2412 global cpath
2413 cpath = oe.cachedpath.CachedPath()
2414
2415 ###########################################################################
2416 # Sanity test the setup
2417 ###########################################################################
2418
2419 packages = (d.getVar('PACKAGES') or "").split()
2420 if len(packages) < 1:
2421 bb.debug(1, "No packages to build, skipping do_package")
2422 return
2423
2424 workdir = d.getVar('WORKDIR')
2425 outdir = d.getVar('DEPLOY_DIR')
2426 dest = d.getVar('D')
2427 dvar = d.getVar('PKGD')
2428 pn = d.getVar('PN')
2429
2430 if not workdir or not outdir or not dest or not dvar or not pn:
2431 msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package"
2432 oe.qa.handle_error("var-undefined", msg, d)
2433 return
2434
2435 bb.build.exec_func("package_convert_pr_autoinc", d)
2436
2437 ###########################################################################
2438 # Optimisations
2439 ###########################################################################
2440
2441 # Continually expanding complex expressions is inefficient, particularly
2442 # when we write to the datastore and invalidate the expansion cache. This
2443 # code pre-expands some frequently used variables
2444
2445 def expandVar(x, d):
2446 d.setVar(x, d.getVar(x))
2447
2448 for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO':
2449 expandVar(x, d)
2450
2451 ###########################################################################
2452 # Setup PKGD (from D)
2453 ###########################################################################
2454
2455 for f in (d.getVar('PACKAGEBUILDPKGD') or '').split():
2456 bb.build.exec_func(f, d)
2457
2458 ###########################################################################
2459 # Split up PKGD into PKGDEST
2460 ###########################################################################
2461
2462 cpath = oe.cachedpath.CachedPath()
2463
2464 for f in (d.getVar('PACKAGESPLITFUNCS') or '').split():
2465 bb.build.exec_func(f, d)
2466
2467 ###########################################################################
2468 # Process PKGDEST
2469 ###########################################################################
2470
2471 # Build global list of files in each split package
2472 global pkgfiles
2473 pkgfiles = {}
2474 packages = d.getVar('PACKAGES').split()
2475 pkgdest = d.getVar('PKGDEST')
2476 for pkg in packages:
2477 pkgfiles[pkg] = []
2478 for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg):
2479 for file in files:
2480 pkgfiles[pkg].append(walkroot + os.sep + file)
2481
2482 for f in (d.getVar('PACKAGEFUNCS') or '').split():
2483 bb.build.exec_func(f, d)
2484
2485 oe.qa.exit_if_errors(d)
2486}
2487
2488do_package[dirs] = "${SHLIBSWORKDIR} ${D}"
2489do_package[vardeps] += "${PACKAGEBUILDPKGD} ${PACKAGESPLITFUNCS} ${PACKAGEFUNCS} ${@gen_packagevar(d)}"
2490addtask package after do_install
2491
2492SSTATETASKS += "do_package"
2493do_package[cleandirs] = "${PKGDEST} ${PKGDESTWORK}"
2494do_package[sstate-plaindirs] = "${PKGD} ${PKGDEST} ${PKGDESTWORK}"
2495do_package_setscene[dirs] = "${STAGING_DIR}"
2496
2497python do_package_setscene () {
2498 sstate_setscene(d)
2499}
2500addtask do_package_setscene
2501
2502# Copy from PKGDESTWORK to tempdirectory as tempdirectory can be cleaned at both
2503# do_package_setscene and do_packagedata_setscene leading to races
2504python do_packagedata () {
2505 bb.build.exec_func("package_get_auto_pr", d)
2506
2507 src = d.expand("${PKGDESTWORK}")
2508 dest = d.expand("${WORKDIR}/pkgdata-pdata-input")
2509 oe.path.copyhardlinktree(src, dest)
2510
2511 bb.build.exec_func("packagedata_translate_pr_autoinc", d)
2512}
2513do_packagedata[cleandirs] += "${WORKDIR}/pkgdata-pdata-input"
2514
2515# Translate the EXTENDPRAUTO and AUTOINC to the final values
2516packagedata_translate_pr_autoinc() {
2517 find ${WORKDIR}/pkgdata-pdata-input -type f | xargs --no-run-if-empty \
2518 sed -e 's,@PRSERV_PV_AUTOINC@,${PRSERV_PV_AUTOINC},g' \
2519 -e 's,@EXTENDPRAUTO@,${EXTENDPRAUTO},g' -i
2520}
2521
2522addtask packagedata before do_build after do_package
2523
2524SSTATETASKS += "do_packagedata"
2525do_packagedata[sstate-inputdirs] = "${WORKDIR}/pkgdata-pdata-input"
2526do_packagedata[sstate-outputdirs] = "${PKGDATA_DIR}"
2527do_packagedata[stamp-extra-info] = "${MACHINE_ARCH}"
2528
2529python do_packagedata_setscene () {
2530 sstate_setscene(d)
2531}
2532addtask do_packagedata_setscene
2533
2534#
2535# Helper functions for the package writing classes
2536#
2537
2538def mapping_rename_hook(d):
2539 """
2540 Rewrite variables to account for package renaming in things
2541 like debian.bbclass or manual PKG variable name changes
2542 """
2543 pkg = d.getVar("PKG")
2544 runtime_mapping_rename("RDEPENDS", pkg, d)
2545 runtime_mapping_rename("RRECOMMENDS", pkg, d)
2546 runtime_mapping_rename("RSUGGESTS", pkg, d)