blob: 7a0a428b304cd33e160a15e689a827f647a9b04b [file] [log] [blame]
Patrick Williams92b42cb2022-09-03 06:53:57 -05001#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7#
8# Packaging process
9#
10# Executive summary: This class iterates over the functions listed in PACKAGEFUNCS
11# Taking D and splitting it up into the packages listed in PACKAGES, placing the
12# resulting output in PKGDEST.
13#
14# There are the following default steps but PACKAGEFUNCS can be extended:
15#
16# a) package_convert_pr_autoinc - convert AUTOINC in PKGV to ${PRSERV_PV_AUTOINC}
17#
18# b) perform_packagecopy - Copy D into PKGD
19#
20# c) package_do_split_locales - Split out the locale files, updates FILES and PACKAGES
21#
22# d) split_and_strip_files - split the files into runtime and debug and strip them.
23# Debug files include debug info split, and associated sources that end up in -dbg packages
24#
25# e) fixup_perms - Fix up permissions in the package before we split it.
26#
27# f) populate_packages - Split the files in PKGD into separate packages in PKGDEST/<pkgname>
28# Also triggers the binary stripping code to put files in -dbg packages.
29#
30# g) package_do_filedeps - Collect perfile run-time dependency metadata
31# The data is stores in FILER{PROVIDES,DEPENDS}_file_pkg variables with
32# a list of affected files in FILER{PROVIDES,DEPENDS}FLIST_pkg
33#
34# h) package_do_shlibs - Look at the shared libraries generated and autotmatically add any
35# dependencies found. Also stores the package name so anyone else using this library
36# knows which package to depend on.
37#
38# i) package_do_pkgconfig - Keep track of which packages need and provide which .pc files
39#
40# j) read_shlibdeps - Reads the stored shlibs information into the metadata
41#
42# k) package_depchains - Adds automatic dependencies to -dbg and -dev packages
43#
44# l) emit_pkgdata - saves the packaging data into PKGDATA_DIR for use in later
45# packaging steps
46
47inherit packagedata
48inherit chrpath
49inherit package_pkgdata
50inherit insane
51
52PKGD = "${WORKDIR}/package"
53PKGDEST = "${WORKDIR}/packages-split"
54
55LOCALE_SECTION ?= ''
56
57ALL_MULTILIB_PACKAGE_ARCHS = "${@all_multilib_tune_values(d, 'PACKAGE_ARCHS')}"
58
59# rpm is used for the per-file dependency identification
60# dwarfsrcfiles is used to determine the list of debug source files
61PACKAGE_DEPENDS += "rpm-native dwarfsrcfiles-native"
62
63
64# If your postinstall can execute at rootfs creation time rather than on
65# target but depends on a native/cross tool in order to execute, you need to
66# list that tool in PACKAGE_WRITE_DEPS. Target package dependencies belong
67# in the package dependencies as normal, this is just for native/cross support
68# tools at rootfs build time.
69PACKAGE_WRITE_DEPS ??= ""
70
71def legitimize_package_name(s):
72 """
73 Make sure package names are legitimate strings
74 """
75 import re
76
77 def fixutf(m):
78 cp = m.group(1)
79 if cp:
80 return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape')
81
82 # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
83 s = re.sub(r'<U([0-9A-Fa-f]{1,4})>', fixutf, s)
84
85 # Remaining package name validity fixes
86 return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
87
88def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False, summary=None):
89 """
90 Used in .bb files to split up dynamically generated subpackages of a
91 given package, usually plugins or modules.
92
93 Arguments:
94 root -- the path in which to search
95 file_regex -- regular expression to match searched files. Use
96 parentheses () to mark the part of this expression
97 that should be used to derive the module name (to be
98 substituted where %s is used in other function
99 arguments as noted below)
100 output_pattern -- pattern to use for the package names. Must include %s.
101 description -- description to set for each package. Must include %s.
102 postinst -- postinstall script to use for all packages (as a
103 string)
104 recursive -- True to perform a recursive search - default False
105 hook -- a hook function to be called for every match. The
106 function will be called with the following arguments
107 (in the order listed):
108 f: full path to the file/directory match
109 pkg: the package name
110 file_regex: as above
111 output_pattern: as above
112 modulename: the module name derived using file_regex
113 extra_depends -- extra runtime dependencies (RDEPENDS) to be set for
114 all packages. The default value of None causes a
115 dependency on the main package (${PN}) - if you do
116 not want this, pass '' for this parameter.
117 aux_files_pattern -- extra item(s) to be added to FILES for each
118 package. Can be a single string item or a list of
119 strings for multiple items. Must include %s.
120 postrm -- postrm script to use for all packages (as a string)
121 allow_dirs -- True allow directories to be matched - default False
122 prepend -- if True, prepend created packages to PACKAGES instead
123 of the default False which appends them
124 match_path -- match file_regex on the whole relative path to the
125 root rather than just the file name
126 aux_files_pattern_verbatim -- extra item(s) to be added to FILES for
127 each package, using the actual derived module name
128 rather than converting it to something legal for a
129 package name. Can be a single string item or a list
130 of strings for multiple items. Must include %s.
131 allow_links -- True to allow symlinks to be matched - default False
132 summary -- Summary to set for each package. Must include %s;
133 defaults to description if not set.
134
135 """
136
137 dvar = d.getVar('PKGD')
138 root = d.expand(root)
139 output_pattern = d.expand(output_pattern)
140 extra_depends = d.expand(extra_depends)
141
142 # If the root directory doesn't exist, don't error out later but silently do
143 # no splitting.
144 if not os.path.exists(dvar + root):
145 return []
146
147 ml = d.getVar("MLPREFIX")
148 if ml:
149 if not output_pattern.startswith(ml):
150 output_pattern = ml + output_pattern
151
152 newdeps = []
153 for dep in (extra_depends or "").split():
154 if dep.startswith(ml):
155 newdeps.append(dep)
156 else:
157 newdeps.append(ml + dep)
158 if newdeps:
159 extra_depends = " ".join(newdeps)
160
161
162 packages = d.getVar('PACKAGES').split()
163 split_packages = set()
164
165 if postinst:
166 postinst = '#!/bin/sh\n' + postinst + '\n'
167 if postrm:
168 postrm = '#!/bin/sh\n' + postrm + '\n'
169 if not recursive:
170 objs = os.listdir(dvar + root)
171 else:
172 objs = []
173 for walkroot, dirs, files in os.walk(dvar + root):
174 for file in files:
175 relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1)
176 if relpath:
177 objs.append(relpath)
178
179 if extra_depends == None:
180 extra_depends = d.getVar("PN")
181
182 if not summary:
183 summary = description
184
185 for o in sorted(objs):
186 import re, stat
187 if match_path:
188 m = re.match(file_regex, o)
189 else:
190 m = re.match(file_regex, os.path.basename(o))
191
192 if not m:
193 continue
194 f = os.path.join(dvar + root, o)
195 mode = os.lstat(f).st_mode
196 if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))):
197 continue
198 on = legitimize_package_name(m.group(1))
199 pkg = output_pattern % on
200 split_packages.add(pkg)
201 if not pkg in packages:
202 if prepend:
203 packages = [pkg] + packages
204 else:
205 packages.append(pkg)
206 oldfiles = d.getVar('FILES:' + pkg)
207 newfile = os.path.join(root, o)
208 # These names will be passed through glob() so if the filename actually
209 # contains * or ? (rare, but possible) we need to handle that specially
210 newfile = newfile.replace('*', '[*]')
211 newfile = newfile.replace('?', '[?]')
212 if not oldfiles:
213 the_files = [newfile]
214 if aux_files_pattern:
215 if type(aux_files_pattern) is list:
216 for fp in aux_files_pattern:
217 the_files.append(fp % on)
218 else:
219 the_files.append(aux_files_pattern % on)
220 if aux_files_pattern_verbatim:
221 if type(aux_files_pattern_verbatim) is list:
222 for fp in aux_files_pattern_verbatim:
223 the_files.append(fp % m.group(1))
224 else:
225 the_files.append(aux_files_pattern_verbatim % m.group(1))
226 d.setVar('FILES:' + pkg, " ".join(the_files))
227 else:
228 d.setVar('FILES:' + pkg, oldfiles + " " + newfile)
229 if extra_depends != '':
230 d.appendVar('RDEPENDS:' + pkg, ' ' + extra_depends)
231 if not d.getVar('DESCRIPTION:' + pkg):
232 d.setVar('DESCRIPTION:' + pkg, description % on)
233 if not d.getVar('SUMMARY:' + pkg):
234 d.setVar('SUMMARY:' + pkg, summary % on)
235 if postinst:
236 d.setVar('pkg_postinst:' + pkg, postinst)
237 if postrm:
238 d.setVar('pkg_postrm:' + pkg, postrm)
239 if callable(hook):
240 hook(f, pkg, file_regex, output_pattern, m.group(1))
241
242 d.setVar('PACKAGES', ' '.join(packages))
243 return list(split_packages)
244
245PACKAGE_DEPENDS += "file-native"
246
247python () {
248 if d.getVar('PACKAGES') != '':
249 deps = ""
250 for dep in (d.getVar('PACKAGE_DEPENDS') or "").split():
251 deps += " %s:do_populate_sysroot" % dep
252 if d.getVar('PACKAGE_MINIDEBUGINFO') == '1':
253 deps += ' xz-native:do_populate_sysroot'
254 d.appendVarFlag('do_package', 'depends', deps)
255
256 # shlibs requires any DEPENDS to have already packaged for the *.list files
257 d.appendVarFlag('do_package', 'deptask', " do_packagedata")
258}
259
260# Get a list of files from file vars by searching files under current working directory
261# The list contains symlinks, directories and normal files.
262def files_from_filevars(filevars):
263 import os,glob
264 cpath = oe.cachedpath.CachedPath()
265 files = []
266 for f in filevars:
267 if os.path.isabs(f):
268 f = '.' + f
269 if not f.startswith("./"):
270 f = './' + f
271 globbed = glob.glob(f)
272 if globbed:
273 if [ f ] != globbed:
274 files += globbed
275 continue
276 files.append(f)
277
278 symlink_paths = []
279 for ind, f in enumerate(files):
280 # Handle directory symlinks. Truncate path to the lowest level symlink
281 parent = ''
282 for dirname in f.split('/')[:-1]:
283 parent = os.path.join(parent, dirname)
284 if dirname == '.':
285 continue
286 if cpath.islink(parent):
287 bb.warn("FILES contains file '%s' which resides under a "
288 "directory symlink. Please fix the recipe and use the "
289 "real path for the file." % f[1:])
290 symlink_paths.append(f)
291 files[ind] = parent
292 f = parent
293 break
294
295 if not cpath.islink(f):
296 if cpath.isdir(f):
297 newfiles = [ os.path.join(f,x) for x in os.listdir(f) ]
298 if newfiles:
299 files += newfiles
300
301 return files, symlink_paths
302
303# Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files
304def get_conffiles(pkg, d):
305 pkgdest = d.getVar('PKGDEST')
306 root = os.path.join(pkgdest, pkg)
307 cwd = os.getcwd()
308 os.chdir(root)
309
310 conffiles = d.getVar('CONFFILES:%s' % pkg);
311 if conffiles == None:
312 conffiles = d.getVar('CONFFILES')
313 if conffiles == None:
314 conffiles = ""
315 conffiles = conffiles.split()
316 conf_orig_list = files_from_filevars(conffiles)[0]
317
318 # Remove links and directories from conf_orig_list to get conf_list which only contains normal files
319 conf_list = []
320 for f in conf_orig_list:
321 if os.path.isdir(f):
322 continue
323 if os.path.islink(f):
324 continue
325 if not os.path.exists(f):
326 continue
327 conf_list.append(f)
328
329 # Remove the leading './'
330 for i in range(0, len(conf_list)):
331 conf_list[i] = conf_list[i][1:]
332
333 os.chdir(cwd)
334 return conf_list
335
336def checkbuildpath(file, d):
337 tmpdir = d.getVar('TMPDIR')
338 with open(file) as f:
339 file_content = f.read()
340 if tmpdir in file_content:
341 return True
342
343 return False
344
345def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output):
346 debugfiles = {}
347
348 for line in dwarfsrcfiles_output.splitlines():
349 if line.startswith("\t"):
350 debugfiles[os.path.normpath(line.split()[0])] = ""
351
352 return debugfiles.keys()
353
354def source_info(file, d, fatal=True):
355 import subprocess
356
357 cmd = ["dwarfsrcfiles", file]
358 try:
359 output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT)
360 retval = 0
361 except subprocess.CalledProcessError as exc:
362 output = exc.output
363 retval = exc.returncode
364
365 # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure
366 if retval != 0 and retval != 255:
367 msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "")
368 if fatal:
369 bb.fatal(msg)
370 bb.note(msg)
371
372 debugsources = parse_debugsources_from_dwarfsrcfiles_output(output)
373
374 return list(debugsources)
375
376def splitdebuginfo(file, dvar, dv, d):
377 # Function to split a single file into two components, one is the stripped
378 # target system binary, the other contains any debugging information. The
379 # two files are linked to reference each other.
380 #
381 # return a mapping of files:debugsources
382
383 import stat
384 import subprocess
385
386 src = file[len(dvar):]
387 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
388 debugfile = dvar + dest
389 sources = []
390
391 if file.endswith(".ko") and file.find("/lib/modules/") != -1:
392 if oe.package.is_kernel_module_signed(file):
393 bb.debug(1, "Skip strip on signed module %s" % file)
394 return (file, sources)
395
396 # Split the file...
397 bb.utils.mkdirhier(os.path.dirname(debugfile))
398 #bb.note("Split %s -> %s" % (file, debugfile))
399 # Only store off the hard link reference if we successfully split!
400
401 dvar = d.getVar('PKGD')
402 objcopy = d.getVar("OBJCOPY")
403
404 newmode = None
405 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
406 origmode = os.stat(file)[stat.ST_MODE]
407 newmode = origmode | stat.S_IWRITE | stat.S_IREAD
408 os.chmod(file, newmode)
409
410 # We need to extract the debug src information here...
411 if dv["srcdir"]:
412 sources = source_info(file, d)
413
414 bb.utils.mkdirhier(os.path.dirname(debugfile))
415
416 subprocess.check_output([objcopy, '--only-keep-debug', file, debugfile], stderr=subprocess.STDOUT)
417
418 # Set the debuglink to have the view of the file path on the target
419 subprocess.check_output([objcopy, '--add-gnu-debuglink', debugfile, file], stderr=subprocess.STDOUT)
420
421 if newmode:
422 os.chmod(file, origmode)
423
424 return (file, sources)
425
426def splitstaticdebuginfo(file, dvar, dv, d):
427 # Unlike the function above, there is no way to split a static library
428 # two components. So to get similar results we will copy the unmodified
429 # static library (containing the debug symbols) into a new directory.
430 # We will then strip (preserving symbols) the static library in the
431 # typical location.
432 #
433 # return a mapping of files:debugsources
434
435 import stat
436
437 src = file[len(dvar):]
438 dest = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(src) + dv["staticappend"]
439 debugfile = dvar + dest
440 sources = []
441
442 # Copy the file...
443 bb.utils.mkdirhier(os.path.dirname(debugfile))
444 #bb.note("Copy %s -> %s" % (file, debugfile))
445
446 dvar = d.getVar('PKGD')
447
448 newmode = None
449 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
450 origmode = os.stat(file)[stat.ST_MODE]
451 newmode = origmode | stat.S_IWRITE | stat.S_IREAD
452 os.chmod(file, newmode)
453
454 # We need to extract the debug src information here...
455 if dv["srcdir"]:
456 sources = source_info(file, d)
457
458 bb.utils.mkdirhier(os.path.dirname(debugfile))
459
460 # Copy the unmodified item to the debug directory
461 shutil.copy2(file, debugfile)
462
463 if newmode:
464 os.chmod(file, origmode)
465
466 return (file, sources)
467
468def inject_minidebuginfo(file, dvar, dv, d):
469 # Extract just the symbols from debuginfo into minidebuginfo,
470 # compress it with xz and inject it back into the binary in a .gnu_debugdata section.
471 # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html
472
473 import subprocess
474
475 readelf = d.getVar('READELF')
476 nm = d.getVar('NM')
477 objcopy = d.getVar('OBJCOPY')
478
479 minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo')
480
481 src = file[len(dvar):]
482 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
483 debugfile = dvar + dest
484 minidebugfile = minidebuginfodir + src + '.minidebug'
485 bb.utils.mkdirhier(os.path.dirname(minidebugfile))
486
487 # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either
488 # so skip it.
489 if not os.path.exists(debugfile):
490 bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file))
491 return
492
Patrick Williams7784c422022-11-17 07:29:11 -0600493 # minidebuginfo does not make sense to apply to ELF objects other than
494 # executables and shared libraries, skip applying the minidebuginfo
495 # generation for objects like kernel modules.
496 for line in subprocess.check_output([readelf, '-h', debugfile], universal_newlines=True).splitlines():
497 if not line.strip().startswith("Type:"):
498 continue
499 elftype = line.split(":")[1].strip()
500 if not any(elftype.startswith(i) for i in ["EXEC", "DYN"]):
501 bb.debug(1, 'ELF file {} is not executable/shared, skipping minidebuginfo injection'.format(file))
502 return
503 break
504
Patrick Williams92b42cb2022-09-03 06:53:57 -0500505 # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo.
506 # We will exclude all of these from minidebuginfo to save space.
507 remove_section_names = []
508 for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines():
Patrick Williams7784c422022-11-17 07:29:11 -0600509 # strip the leading " [ 1]" section index to allow splitting on space
510 if ']' not in line:
511 continue
512 fields = line[line.index(']') + 1:].split()
513 if len(fields) < 7:
Patrick Williams92b42cb2022-09-03 06:53:57 -0500514 continue
515 name = fields[0]
516 type = fields[1]
Patrick Williams7784c422022-11-17 07:29:11 -0600517 flags = fields[6]
Patrick Williams92b42cb2022-09-03 06:53:57 -0500518 # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them
519 if name.startswith('.debug_'):
520 continue
521 if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']:
522 remove_section_names.append(name)
523
524 # List dynamic symbols in the binary. We can exclude these from minidebuginfo
525 # because they are always present in the binary.
526 dynsyms = set()
527 for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines():
528 dynsyms.add(line.split()[0])
529
530 # Find all function symbols from debuginfo which aren't in the dynamic symbols table.
531 # These are the ones we want to keep in minidebuginfo.
532 keep_symbols_file = minidebugfile + '.symlist'
533 found_any_symbols = False
534 with open(keep_symbols_file, 'w') as f:
535 for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines():
536 fields = line.split('|')
537 if len(fields) < 7:
538 continue
539 name = fields[0].strip()
540 type = fields[3].strip()
541 if type == 'FUNC' and name not in dynsyms:
542 f.write('{}\n'.format(name))
543 found_any_symbols = True
544
545 if not found_any_symbols:
546 bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file))
547 return
548
549 bb.utils.remove(minidebugfile)
550 bb.utils.remove(minidebugfile + '.xz')
551
552 subprocess.check_call([objcopy, '-S'] +
553 ['--remove-section={}'.format(s) for s in remove_section_names] +
554 ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile])
555
556 subprocess.check_call(['xz', '--keep', minidebugfile])
557
558 subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file])
559
560def copydebugsources(debugsrcdir, sources, d):
561 # The debug src information written out to sourcefile is further processed
562 # and copied to the destination here.
563
564 import stat
565 import subprocess
566
567 if debugsrcdir and sources:
568 sourcefile = d.expand("${WORKDIR}/debugsources.list")
569 bb.utils.remove(sourcefile)
570
571 # filenames are null-separated - this is an artefact of the previous use
572 # of rpm's debugedit, which was writing them out that way, and the code elsewhere
573 # is still assuming that.
574 debuglistoutput = '\0'.join(sources) + '\0'
575 with open(sourcefile, 'a') as sf:
576 sf.write(debuglistoutput)
577
578 dvar = d.getVar('PKGD')
579 strip = d.getVar("STRIP")
580 objcopy = d.getVar("OBJCOPY")
581 workdir = d.getVar("WORKDIR")
582 sdir = d.getVar("S")
583 cflags = d.expand("${CFLAGS}")
584
585 prefixmap = {}
586 for flag in cflags.split():
587 if not flag.startswith("-fdebug-prefix-map"):
588 continue
589 if "recipe-sysroot" in flag:
590 continue
591 flag = flag.split("=")
592 prefixmap[flag[1]] = flag[2]
593
594 nosuchdir = []
595 basepath = dvar
596 for p in debugsrcdir.split("/"):
597 basepath = basepath + "/" + p
598 if not cpath.exists(basepath):
599 nosuchdir.append(basepath)
600 bb.utils.mkdirhier(basepath)
601 cpath.updatecache(basepath)
602
603 for pmap in prefixmap:
604 # Ignore files from the recipe sysroots (target and native)
605 cmd = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | " % sourcefile
606 # We need to ignore files that are not actually ours
607 # we do this by only paying attention to items from this package
608 cmd += "fgrep -zw '%s' | " % prefixmap[pmap]
609 # Remove prefix in the source paths
610 cmd += "sed 's#%s/##g' | " % (prefixmap[pmap])
611 cmd += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)" % (pmap, dvar, prefixmap[pmap])
612
613 try:
614 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
615 except subprocess.CalledProcessError:
616 # Can "fail" if internal headers/transient sources are attempted
617 pass
618 # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
619 # Work around this by manually finding and copying any symbolic links that made it through.
620 cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \
621 (dvar, prefixmap[pmap], dvar, prefixmap[pmap], pmap, dvar, prefixmap[pmap])
622 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
623
624 # debugsources.list may be polluted from the host if we used externalsrc,
625 # cpio uses copy-pass and may have just created a directory structure
626 # matching the one from the host, if thats the case move those files to
627 # debugsrcdir to avoid host contamination.
628 # Empty dir structure will be deleted in the next step.
629
630 # Same check as above for externalsrc
631 if workdir not in sdir:
632 if os.path.exists(dvar + debugsrcdir + sdir):
633 cmd = "mv %s%s%s/* %s%s" % (dvar, debugsrcdir, sdir, dvar,debugsrcdir)
634 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
635
636 # The copy by cpio may have resulted in some empty directories! Remove these
637 cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
638 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
639
640 # Also remove debugsrcdir if its empty
641 for p in nosuchdir[::-1]:
642 if os.path.exists(p) and not os.listdir(p):
643 os.rmdir(p)
644
645#
646# Package data handling routines
647#
648
649def get_package_mapping (pkg, basepkg, d, depversions=None):
650 import oe.packagedata
651
652 data = oe.packagedata.read_subpkgdata(pkg, d)
653 key = "PKG:%s" % pkg
654
655 if key in data:
656 if bb.data.inherits_class('allarch', d) and bb.data.inherits_class('packagegroup', d) and pkg != data[key]:
657 bb.error("An allarch packagegroup shouldn't depend on packages which are dynamically renamed (%s to %s)" % (pkg, data[key]))
658 # Have to avoid undoing the write_extra_pkgs(global_variants...)
659 if bb.data.inherits_class('allarch', d) and not d.getVar('MULTILIB_VARIANTS') \
660 and data[key] == basepkg:
661 return pkg
662 if depversions == []:
663 # Avoid returning a mapping if the renamed package rprovides its original name
664 rprovkey = "RPROVIDES:%s" % pkg
665 if rprovkey in data:
666 if pkg in bb.utils.explode_dep_versions2(data[rprovkey]):
667 bb.note("%s rprovides %s, not replacing the latter" % (data[key], pkg))
668 return pkg
669 # Do map to rewritten package name
670 return data[key]
671
672 return pkg
673
674def get_package_additional_metadata (pkg_type, d):
675 base_key = "PACKAGE_ADD_METADATA"
676 for key in ("%s_%s" % (base_key, pkg_type.upper()), base_key):
677 if d.getVar(key, False) is None:
678 continue
679 d.setVarFlag(key, "type", "list")
680 if d.getVarFlag(key, "separator") is None:
681 d.setVarFlag(key, "separator", "\\n")
682 metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)]
683 return "\n".join(metadata_fields).strip()
684
685def runtime_mapping_rename (varname, pkg, d):
686 #bb.note("%s before: %s" % (varname, d.getVar(varname)))
687
688 new_depends = {}
689 deps = bb.utils.explode_dep_versions2(d.getVar(varname) or "")
690 for depend, depversions in deps.items():
691 new_depend = get_package_mapping(depend, pkg, d, depversions)
692 if depend != new_depend:
693 bb.note("package name mapping done: %s -> %s" % (depend, new_depend))
694 new_depends[new_depend] = deps[depend]
695
696 d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False))
697
698 #bb.note("%s after: %s" % (varname, d.getVar(varname)))
699
700#
701# Used by do_packagedata (and possibly other routines post do_package)
702#
703
704PRSERV_ACTIVE = "${@bool(d.getVar("PRSERV_HOST"))}"
705PRSERV_ACTIVE[vardepvalue] = "${PRSERV_ACTIVE}"
706package_get_auto_pr[vardepsexclude] = "BB_TASKDEPDATA"
707package_get_auto_pr[vardeps] += "PRSERV_ACTIVE"
708python package_get_auto_pr() {
709 import oe.prservice
710
711 def get_do_package_hash(pn):
712 if d.getVar("BB_RUNTASK") != "do_package":
713 taskdepdata = d.getVar("BB_TASKDEPDATA", False)
714 for dep in taskdepdata:
715 if taskdepdata[dep][1] == "do_package" and taskdepdata[dep][0] == pn:
716 return taskdepdata[dep][6]
717 return None
718
719 # Support per recipe PRSERV_HOST
720 pn = d.getVar('PN')
721 host = d.getVar("PRSERV_HOST_" + pn)
722 if not (host is None):
723 d.setVar("PRSERV_HOST", host)
724
725 pkgv = d.getVar("PKGV")
726
727 # PR Server not active, handle AUTOINC
728 if not d.getVar('PRSERV_HOST'):
729 d.setVar("PRSERV_PV_AUTOINC", "0")
730 return
731
732 auto_pr = None
733 pv = d.getVar("PV")
734 version = d.getVar("PRAUTOINX")
735 pkgarch = d.getVar("PACKAGE_ARCH")
736 checksum = get_do_package_hash(pn)
737
738 # If do_package isn't in the dependencies, we can't get the checksum...
739 if not checksum:
740 bb.warn('Task %s requested do_package unihash, but it was not available.' % d.getVar('BB_RUNTASK'))
741 #taskdepdata = d.getVar("BB_TASKDEPDATA", False)
742 #for dep in taskdepdata:
743 # bb.warn('%s:%s = %s' % (taskdepdata[dep][0], taskdepdata[dep][1], taskdepdata[dep][6]))
744 return
745
746 if d.getVar('PRSERV_LOCKDOWN'):
747 auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch) or d.getVar('PRAUTO_' + version) or None
748 if auto_pr is None:
749 bb.fatal("Can NOT get PRAUTO from lockdown exported file")
750 d.setVar('PRAUTO',str(auto_pr))
751 return
752
753 try:
754 conn = oe.prservice.prserv_make_conn(d)
755 if conn is not None:
756 if "AUTOINC" in pkgv:
757 srcpv = bb.fetch2.get_srcrev(d)
758 base_ver = "AUTOINC-%s" % version[:version.find(srcpv)]
759 value = conn.getPR(base_ver, pkgarch, srcpv)
760 d.setVar("PRSERV_PV_AUTOINC", str(value))
761
762 auto_pr = conn.getPR(version, pkgarch, checksum)
763 conn.close()
764 except Exception as e:
765 bb.fatal("Can NOT get PRAUTO, exception %s" % str(e))
766 if auto_pr is None:
767 bb.fatal("Can NOT get PRAUTO from remote PR service")
768 d.setVar('PRAUTO',str(auto_pr))
769}
770
771#
772# Package functions suitable for inclusion in PACKAGEFUNCS
773#
774
775python package_convert_pr_autoinc() {
776 pkgv = d.getVar("PKGV")
777
778 # Adjust pkgv as necessary...
779 if 'AUTOINC' in pkgv:
780 d.setVar("PKGV", pkgv.replace("AUTOINC", "${PRSERV_PV_AUTOINC}"))
781
782 # Change PRSERV_PV_AUTOINC and EXTENDPRAUTO usage to special values
783 d.setVar('PRSERV_PV_AUTOINC', '@PRSERV_PV_AUTOINC@')
784 d.setVar('EXTENDPRAUTO', '@EXTENDPRAUTO@')
785}
786
787LOCALEBASEPN ??= "${PN}"
788
789python package_do_split_locales() {
790 if (d.getVar('PACKAGE_NO_LOCALE') == '1'):
791 bb.debug(1, "package requested not splitting locales")
792 return
793
794 packages = (d.getVar('PACKAGES') or "").split()
795
796 datadir = d.getVar('datadir')
797 if not datadir:
798 bb.note("datadir not defined")
799 return
800
801 dvar = d.getVar('PKGD')
802 pn = d.getVar('LOCALEBASEPN')
803
804 if pn + '-locale' in packages:
805 packages.remove(pn + '-locale')
806
807 localedir = os.path.join(dvar + datadir, 'locale')
808
809 if not cpath.isdir(localedir):
810 bb.debug(1, "No locale files in this package")
811 return
812
813 locales = os.listdir(localedir)
814
815 summary = d.getVar('SUMMARY') or pn
816 description = d.getVar('DESCRIPTION') or ""
817 locale_section = d.getVar('LOCALE_SECTION')
818 mlprefix = d.getVar('MLPREFIX') or ""
819 for l in sorted(locales):
820 ln = legitimize_package_name(l)
821 pkg = pn + '-locale-' + ln
822 packages.append(pkg)
823 d.setVar('FILES:' + pkg, os.path.join(datadir, 'locale', l))
824 d.setVar('RRECOMMENDS:' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
825 d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
826 d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l))
827 d.setVar('DESCRIPTION:' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l))
828 if locale_section:
829 d.setVar('SECTION:' + pkg, locale_section)
830
831 d.setVar('PACKAGES', ' '.join(packages))
832
833 # Disabled by RP 18/06/07
834 # Wildcards aren't supported in debian
835 # They break with ipkg since glibc-locale* will mean that
836 # glibc-localedata-translit* won't install as a dependency
837 # for some other package which breaks meta-toolchain
838 # Probably breaks since virtual-locale- isn't provided anywhere
839 #rdep = (d.getVar('RDEPENDS:%s' % pn) or "").split()
840 #rdep.append('%s-locale*' % pn)
841 #d.setVar('RDEPENDS:%s' % pn, ' '.join(rdep))
842}
843
844python perform_packagecopy () {
845 import subprocess
846 import shutil
847
848 dest = d.getVar('D')
849 dvar = d.getVar('PKGD')
850
851 # Start by package population by taking a copy of the installed
852 # files to operate on
853 # Preserve sparse files and hard links
854 cmd = 'tar --exclude=./sysroot-only -cf - -C %s -p -S . | tar -xf - -C %s' % (dest, dvar)
855 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
856
857 # replace RPATHs for the nativesdk binaries, to make them relocatable
858 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d):
859 rpath_replace (dvar, d)
860}
861perform_packagecopy[cleandirs] = "${PKGD}"
862perform_packagecopy[dirs] = "${PKGD}"
863
864# We generate a master list of directories to process, we start by
865# seeding this list with reasonable defaults, then load from
866# the fs-perms.txt files
867python fixup_perms () {
868 import pwd, grp
869
870 # init using a string with the same format as a line as documented in
871 # the fs-perms.txt file
872 # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
873 # <path> link <link target>
874 #
875 # __str__ can be used to print out an entry in the input format
876 #
877 # if fs_perms_entry.path is None:
878 # an error occurred
879 # if fs_perms_entry.link, you can retrieve:
880 # fs_perms_entry.path = path
881 # fs_perms_entry.link = target of link
882 # if not fs_perms_entry.link, you can retrieve:
883 # fs_perms_entry.path = path
884 # fs_perms_entry.mode = expected dir mode or None
885 # fs_perms_entry.uid = expected uid or -1
886 # fs_perms_entry.gid = expected gid or -1
887 # fs_perms_entry.walk = 'true' or something else
888 # fs_perms_entry.fmode = expected file mode or None
889 # fs_perms_entry.fuid = expected file uid or -1
890 # fs_perms_entry_fgid = expected file gid or -1
891 class fs_perms_entry():
892 def __init__(self, line):
893 lsplit = line.split()
894 if len(lsplit) == 3 and lsplit[1].lower() == "link":
895 self._setlink(lsplit[0], lsplit[2])
896 elif len(lsplit) == 8:
897 self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
898 else:
899 msg = "Fixup Perms: invalid config line %s" % line
900 oe.qa.handle_error("perm-config", msg, d)
901 self.path = None
902 self.link = None
903
904 def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
905 self.path = os.path.normpath(path)
906 self.link = None
907 self.mode = self._procmode(mode)
908 self.uid = self._procuid(uid)
909 self.gid = self._procgid(gid)
910 self.walk = walk.lower()
911 self.fmode = self._procmode(fmode)
912 self.fuid = self._procuid(fuid)
913 self.fgid = self._procgid(fgid)
914
915 def _setlink(self, path, link):
916 self.path = os.path.normpath(path)
917 self.link = link
918
919 def _procmode(self, mode):
920 if not mode or (mode and mode == "-"):
921 return None
922 else:
923 return int(mode,8)
924
925 # Note uid/gid -1 has special significance in os.lchown
926 def _procuid(self, uid):
927 if uid is None or uid == "-":
928 return -1
929 elif uid.isdigit():
930 return int(uid)
931 else:
932 return pwd.getpwnam(uid).pw_uid
933
934 def _procgid(self, gid):
935 if gid is None or gid == "-":
936 return -1
937 elif gid.isdigit():
938 return int(gid)
939 else:
940 return grp.getgrnam(gid).gr_gid
941
942 # Use for debugging the entries
943 def __str__(self):
944 if self.link:
945 return "%s link %s" % (self.path, self.link)
946 else:
947 mode = "-"
948 if self.mode:
949 mode = "0%o" % self.mode
950 fmode = "-"
951 if self.fmode:
952 fmode = "0%o" % self.fmode
953 uid = self._mapugid(self.uid)
954 gid = self._mapugid(self.gid)
955 fuid = self._mapugid(self.fuid)
956 fgid = self._mapugid(self.fgid)
957 return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
958
959 def _mapugid(self, id):
960 if id is None or id == -1:
961 return "-"
962 else:
963 return "%d" % id
964
965 # Fix the permission, owner and group of path
966 def fix_perms(path, mode, uid, gid, dir):
967 if mode and not os.path.islink(path):
968 #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
969 os.chmod(path, mode)
970 # -1 is a special value that means don't change the uid/gid
971 # if they are BOTH -1, don't bother to lchown
972 if not (uid == -1 and gid == -1):
973 #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
974 os.lchown(path, uid, gid)
975
976 # Return a list of configuration files based on either the default
977 # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
978 # paths are resolved via BBPATH
979 def get_fs_perms_list(d):
980 str = ""
981 bbpath = d.getVar('BBPATH')
982 fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES') or ""
983 for conf_file in fs_perms_tables.split():
984 confpath = bb.utils.which(bbpath, conf_file)
985 if confpath:
986 str += " %s" % bb.utils.which(bbpath, conf_file)
987 else:
988 bb.warn("cannot find %s specified in FILESYSTEM_PERMS_TABLES" % conf_file)
989 return str
990
991
992
993 dvar = d.getVar('PKGD')
994
995 fs_perms_table = {}
996 fs_link_table = {}
997
998 # By default all of the standard directories specified in
999 # bitbake.conf will get 0755 root:root.
1000 target_path_vars = [ 'base_prefix',
1001 'prefix',
1002 'exec_prefix',
1003 'base_bindir',
1004 'base_sbindir',
1005 'base_libdir',
1006 'datadir',
1007 'sysconfdir',
1008 'servicedir',
1009 'sharedstatedir',
1010 'localstatedir',
1011 'infodir',
1012 'mandir',
1013 'docdir',
1014 'bindir',
1015 'sbindir',
1016 'libexecdir',
1017 'libdir',
1018 'includedir',
1019 'oldincludedir' ]
1020
1021 for path in target_path_vars:
1022 dir = d.getVar(path) or ""
1023 if dir == "":
1024 continue
1025 fs_perms_table[dir] = fs_perms_entry(d.expand("%s 0755 root root false - - -" % (dir)))
1026
1027 # Now we actually load from the configuration files
1028 for conf in get_fs_perms_list(d).split():
1029 if not os.path.exists(conf):
1030 continue
1031 with open(conf) as f:
1032 for line in f:
1033 if line.startswith('#'):
1034 continue
1035 lsplit = line.split()
1036 if len(lsplit) == 0:
1037 continue
1038 if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
1039 msg = "Fixup perms: %s invalid line: %s" % (conf, line)
1040 oe.qa.handle_error("perm-line", msg, d)
1041 continue
1042 entry = fs_perms_entry(d.expand(line))
1043 if entry and entry.path:
1044 if entry.link:
1045 fs_link_table[entry.path] = entry
1046 if entry.path in fs_perms_table:
1047 fs_perms_table.pop(entry.path)
1048 else:
1049 fs_perms_table[entry.path] = entry
1050 if entry.path in fs_link_table:
1051 fs_link_table.pop(entry.path)
1052
1053 # Debug -- list out in-memory table
1054 #for dir in fs_perms_table:
1055 # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
1056 #for link in fs_link_table:
1057 # bb.note("Fixup Perms: %s: %s" % (link, str(fs_link_table[link])))
1058
1059 # We process links first, so we can go back and fixup directory ownership
1060 # for any newly created directories
1061 # Process in sorted order so /run gets created before /run/lock, etc.
1062 for entry in sorted(fs_link_table.values(), key=lambda x: x.link):
1063 link = entry.link
1064 dir = entry.path
1065 origin = dvar + dir
1066 if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)):
1067 continue
1068
1069 if link[0] == "/":
1070 target = dvar + link
1071 ptarget = link
1072 else:
1073 target = os.path.join(os.path.dirname(origin), link)
1074 ptarget = os.path.join(os.path.dirname(dir), link)
1075 if os.path.exists(target):
1076 msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
1077 oe.qa.handle_error("perm-link", msg, d)
1078 continue
1079
1080 # Create path to move directory to, move it, and then setup the symlink
1081 bb.utils.mkdirhier(os.path.dirname(target))
1082 #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
1083 bb.utils.rename(origin, target)
1084 #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
1085 os.symlink(link, origin)
1086
1087 for dir in fs_perms_table:
1088 origin = dvar + dir
1089 if not (cpath.exists(origin) and cpath.isdir(origin)):
1090 continue
1091
1092 fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
1093
1094 if fs_perms_table[dir].walk == 'true':
1095 for root, dirs, files in os.walk(origin):
1096 for dr in dirs:
1097 each_dir = os.path.join(root, dr)
1098 fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
1099 for f in files:
1100 each_file = os.path.join(root, f)
1101 fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
1102}
1103
1104def package_debug_vars(d):
1105 # We default to '.debug' style
1106 if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory':
1107 # Single debug-file-directory style debug info
1108 debug_vars = {
1109 "append": ".debug",
1110 "staticappend": "",
1111 "dir": "",
1112 "staticdir": "",
1113 "libdir": "/usr/lib/debug",
1114 "staticlibdir": "/usr/lib/debug-static",
1115 "srcdir": "/usr/src/debug",
1116 }
1117 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src':
1118 # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
1119 debug_vars = {
1120 "append": "",
1121 "staticappend": "",
1122 "dir": "/.debug",
1123 "staticdir": "/.debug-static",
1124 "libdir": "",
1125 "staticlibdir": "",
1126 "srcdir": "",
1127 }
1128 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg':
1129 debug_vars = {
1130 "append": "",
1131 "staticappend": "",
1132 "dir": "/.debug",
1133 "staticdir": "/.debug-static",
1134 "libdir": "",
1135 "staticlibdir": "",
1136 "srcdir": "/usr/src/debug",
1137 }
1138 else:
1139 # Original OE-core, a.k.a. ".debug", style debug info
1140 debug_vars = {
1141 "append": "",
1142 "staticappend": "",
1143 "dir": "/.debug",
1144 "staticdir": "/.debug-static",
1145 "libdir": "",
1146 "staticlibdir": "",
1147 "srcdir": "/usr/src/debug",
1148 }
1149
1150 return debug_vars
1151
1152python split_and_strip_files () {
1153 import stat, errno
1154 import subprocess
1155
1156 dvar = d.getVar('PKGD')
1157 pn = d.getVar('PN')
1158 hostos = d.getVar('HOST_OS')
1159
1160 oldcwd = os.getcwd()
1161 os.chdir(dvar)
1162
1163 dv = package_debug_vars(d)
1164
1165 #
1166 # First lets figure out all of the files we may have to process ... do this only once!
1167 #
1168 elffiles = {}
1169 symlinks = {}
1170 staticlibs = []
1171 inodes = {}
1172 libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
1173 baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
1174 skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split()
1175 if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \
1176 d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
1177 checkelf = {}
1178 checkelflinks = {}
1179 for root, dirs, files in cpath.walk(dvar):
1180 for f in files:
1181 file = os.path.join(root, f)
1182
1183 # Skip debug files
1184 if dv["append"] and file.endswith(dv["append"]):
1185 continue
1186 if dv["dir"] and dv["dir"] in os.path.dirname(file[len(dvar):]):
1187 continue
1188
1189 if file in skipfiles:
1190 continue
1191
1192 if oe.package.is_static_lib(file):
1193 staticlibs.append(file)
1194 continue
1195
1196 try:
1197 ltarget = cpath.realpath(file, dvar, False)
1198 s = cpath.lstat(ltarget)
1199 except OSError as e:
1200 (err, strerror) = e.args
1201 if err != errno.ENOENT:
1202 raise
1203 # Skip broken symlinks
1204 continue
1205 if not s:
1206 continue
1207 # Check its an executable
1208 if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \
1209 or (s[stat.ST_MODE] & stat.S_IXOTH) \
1210 or ((file.startswith(libdir) or file.startswith(baselibdir)) \
1211 and (".so" in f or ".node" in f)) \
1212 or (f.startswith('vmlinux') or ".ko" in f):
1213
1214 if cpath.islink(file):
1215 checkelflinks[file] = ltarget
1216 continue
1217 # Use a reference of device ID and inode number to identify files
1218 file_reference = "%d_%d" % (s.st_dev, s.st_ino)
1219 checkelf[file] = (file, file_reference)
1220
1221 results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelflinks.values(), d)
1222 results_map = {}
1223 for (ltarget, elf_file) in results:
1224 results_map[ltarget] = elf_file
1225 for file in checkelflinks:
1226 ltarget = checkelflinks[file]
1227 # If it's a symlink, and points to an ELF file, we capture the readlink target
1228 if results_map[ltarget]:
1229 target = os.readlink(file)
1230 #bb.note("Sym: %s (%d)" % (ltarget, results_map[ltarget]))
1231 symlinks[file] = target
1232
1233 results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelf.keys(), d)
1234
1235 # Sort results by file path. This ensures that the files are always
1236 # processed in the same order, which is important to make sure builds
1237 # are reproducible when dealing with hardlinks
1238 results.sort(key=lambda x: x[0])
1239
1240 for (file, elf_file) in results:
1241 # It's a file (or hardlink), not a link
1242 # ...but is it ELF, and is it already stripped?
1243 if elf_file & 1:
1244 if elf_file & 2:
1245 if 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split():
1246 bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
1247 else:
1248 msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
1249 oe.qa.handle_error("already-stripped", msg, d)
1250 continue
1251
1252 # At this point we have an unstripped elf file. We need to:
1253 # a) Make sure any file we strip is not hardlinked to anything else outside this tree
1254 # b) Only strip any hardlinked file once (no races)
1255 # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks
1256
1257 # Use a reference of device ID and inode number to identify files
1258 file_reference = checkelf[file][1]
1259 if file_reference in inodes:
1260 os.unlink(file)
1261 os.link(inodes[file_reference][0], file)
1262 inodes[file_reference].append(file)
1263 else:
1264 inodes[file_reference] = [file]
1265 # break hardlink
1266 bb.utils.break_hardlinks(file)
1267 elffiles[file] = elf_file
1268 # Modified the file so clear the cache
1269 cpath.updatecache(file)
1270
1271 def strip_pkgd_prefix(f):
1272 nonlocal dvar
1273
1274 if f.startswith(dvar):
1275 return f[len(dvar):]
1276
1277 return f
1278
1279 #
1280 # First lets process debug splitting
1281 #
1282 if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
1283 results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, dv, d))
1284
1285 if dv["srcdir"] and not hostos.startswith("mingw"):
1286 if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1287 results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, dv, d))
1288 else:
1289 for file in staticlibs:
1290 results.append( (file,source_info(file, d)) )
1291
1292 d.setVar("PKGDEBUGSOURCES", {strip_pkgd_prefix(f): sorted(s) for f, s in results})
1293
1294 sources = set()
1295 for r in results:
1296 sources.update(r[1])
1297
1298 # Hardlink our debug symbols to the other hardlink copies
1299 for ref in inodes:
1300 if len(inodes[ref]) == 1:
1301 continue
1302
1303 target = inodes[ref][0][len(dvar):]
1304 for file in inodes[ref][1:]:
1305 src = file[len(dvar):]
1306 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
1307 fpath = dvar + dest
1308 ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
1309 bb.utils.mkdirhier(os.path.dirname(fpath))
1310 # Only one hardlink of separated debug info file in each directory
1311 if not os.access(fpath, os.R_OK):
1312 #bb.note("Link %s -> %s" % (fpath, ftarget))
1313 os.link(ftarget, fpath)
1314
1315 # Create symlinks for all cases we were able to split symbols
1316 for file in symlinks:
1317 src = file[len(dvar):]
1318 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
1319 fpath = dvar + dest
1320 # Skip it if the target doesn't exist
1321 try:
1322 s = os.stat(fpath)
1323 except OSError as e:
1324 (err, strerror) = e.args
1325 if err != errno.ENOENT:
1326 raise
1327 continue
1328
1329 ltarget = symlinks[file]
1330 lpath = os.path.dirname(ltarget)
1331 lbase = os.path.basename(ltarget)
1332 ftarget = ""
1333 if lpath and lpath != ".":
1334 ftarget += lpath + dv["dir"] + "/"
1335 ftarget += lbase + dv["append"]
1336 if lpath.startswith(".."):
1337 ftarget = os.path.join("..", ftarget)
1338 bb.utils.mkdirhier(os.path.dirname(fpath))
1339 #bb.note("Symlink %s -> %s" % (fpath, ftarget))
1340 os.symlink(ftarget, fpath)
1341
1342 # Process the dv["srcdir"] if requested...
1343 # This copies and places the referenced sources for later debugging...
1344 copydebugsources(dv["srcdir"], sources, d)
1345 #
1346 # End of debug splitting
1347 #
1348
1349 #
1350 # Now lets go back over things and strip them
1351 #
1352 if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'):
1353 strip = d.getVar("STRIP")
1354 sfiles = []
1355 for file in elffiles:
1356 elf_file = int(elffiles[file])
1357 #bb.note("Strip %s" % file)
1358 sfiles.append((file, elf_file, strip))
1359 if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1360 for f in staticlibs:
1361 sfiles.append((f, 16, strip))
1362
1363 oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d)
1364
1365 # Build "minidebuginfo" and reinject it back into the stripped binaries
1366 if d.getVar('PACKAGE_MINIDEBUGINFO') == '1':
1367 oe.utils.multiprocess_launch(inject_minidebuginfo, list(elffiles), d,
1368 extraargs=(dvar, dv, d))
1369
1370 #
1371 # End of strip
1372 #
1373 os.chdir(oldcwd)
1374}
1375
1376python populate_packages () {
1377 import glob, re
1378
1379 workdir = d.getVar('WORKDIR')
1380 outdir = d.getVar('DEPLOY_DIR')
1381 dvar = d.getVar('PKGD')
1382 packages = d.getVar('PACKAGES').split()
1383 pn = d.getVar('PN')
1384
1385 bb.utils.mkdirhier(outdir)
1386 os.chdir(dvar)
1387
1388 autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False)
1389
1390 split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg')
1391
1392 # If debug-with-srcpkg mode is enabled then add the source package if it
1393 # doesn't exist and add the source file contents to the source package.
1394 if split_source_package:
1395 src_package_name = ('%s-src' % d.getVar('PN'))
1396 if not src_package_name in packages:
1397 packages.append(src_package_name)
1398 d.setVar('FILES:%s' % src_package_name, '/usr/src/debug')
1399
1400 # Sanity check PACKAGES for duplicates
1401 # Sanity should be moved to sanity.bbclass once we have the infrastructure
1402 package_dict = {}
1403
1404 for i, pkg in enumerate(packages):
1405 if pkg in package_dict:
1406 msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
1407 oe.qa.handle_error("packages-list", msg, d)
1408 # Ensure the source package gets the chance to pick up the source files
1409 # before the debug package by ordering it first in PACKAGES. Whether it
1410 # actually picks up any source files is controlled by
1411 # PACKAGE_DEBUG_SPLIT_STYLE.
1412 elif pkg.endswith("-src"):
1413 package_dict[pkg] = (10, i)
1414 elif autodebug and pkg.endswith("-dbg"):
1415 package_dict[pkg] = (30, i)
1416 else:
1417 package_dict[pkg] = (50, i)
1418 packages = sorted(package_dict.keys(), key=package_dict.get)
1419 d.setVar('PACKAGES', ' '.join(packages))
1420 pkgdest = d.getVar('PKGDEST')
1421
1422 seen = []
1423
1424 # os.mkdir masks the permissions with umask so we have to unset it first
1425 oldumask = os.umask(0)
1426
1427 debug = []
1428 for root, dirs, files in cpath.walk(dvar):
1429 dir = root[len(dvar):]
1430 if not dir:
1431 dir = os.sep
1432 for f in (files + dirs):
1433 path = "." + os.path.join(dir, f)
1434 if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"):
1435 debug.append(path)
1436
1437 for pkg in packages:
1438 root = os.path.join(pkgdest, pkg)
1439 bb.utils.mkdirhier(root)
1440
1441 filesvar = d.getVar('FILES:%s' % pkg) or ""
1442 if "//" in filesvar:
1443 msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
1444 oe.qa.handle_error("files-invalid", msg, d)
1445 filesvar.replace("//", "/")
1446
1447 origfiles = filesvar.split()
1448 files, symlink_paths = files_from_filevars(origfiles)
1449
1450 if autodebug and pkg.endswith("-dbg"):
1451 files.extend(debug)
1452
1453 for file in files:
1454 if (not cpath.islink(file)) and (not cpath.exists(file)):
1455 continue
1456 if file in seen:
1457 continue
1458 seen.append(file)
1459
1460 def mkdir(src, dest, p):
1461 src = os.path.join(src, p)
1462 dest = os.path.join(dest, p)
1463 fstat = cpath.stat(src)
1464 os.mkdir(dest)
1465 os.chmod(dest, fstat.st_mode)
1466 os.chown(dest, fstat.st_uid, fstat.st_gid)
1467 if p not in seen:
1468 seen.append(p)
1469 cpath.updatecache(dest)
1470
1471 def mkdir_recurse(src, dest, paths):
1472 if cpath.exists(dest + '/' + paths):
1473 return
1474 while paths.startswith("./"):
1475 paths = paths[2:]
1476 p = "."
1477 for c in paths.split("/"):
1478 p = os.path.join(p, c)
1479 if not cpath.exists(os.path.join(dest, p)):
1480 mkdir(src, dest, p)
1481
1482 if cpath.isdir(file) and not cpath.islink(file):
1483 mkdir_recurse(dvar, root, file)
1484 continue
1485
1486 mkdir_recurse(dvar, root, os.path.dirname(file))
1487 fpath = os.path.join(root,file)
1488 if not cpath.islink(file):
1489 os.link(file, fpath)
1490 continue
1491 ret = bb.utils.copyfile(file, fpath)
1492 if ret is False or ret == 0:
1493 bb.fatal("File population failed")
1494
1495 # Check if symlink paths exist
1496 for file in symlink_paths:
1497 if not os.path.exists(os.path.join(root,file)):
1498 bb.fatal("File '%s' cannot be packaged into '%s' because its "
1499 "parent directory structure does not exist. One of "
1500 "its parent directories is a symlink whose target "
1501 "directory is not included in the package." %
1502 (file, pkg))
1503
1504 os.umask(oldumask)
1505 os.chdir(workdir)
1506
1507 # Handle excluding packages with incompatible licenses
1508 package_list = []
1509 for pkg in packages:
1510 licenses = d.getVar('_exclude_incompatible-' + pkg)
1511 if licenses:
1512 msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, licenses)
1513 oe.qa.handle_error("incompatible-license", msg, d)
1514 else:
1515 package_list.append(pkg)
1516 d.setVar('PACKAGES', ' '.join(package_list))
1517
1518 unshipped = []
1519 for root, dirs, files in cpath.walk(dvar):
1520 dir = root[len(dvar):]
1521 if not dir:
1522 dir = os.sep
1523 for f in (files + dirs):
1524 path = os.path.join(dir, f)
1525 if ('.' + path) not in seen:
1526 unshipped.append(path)
1527
1528 if unshipped != []:
1529 msg = pn + ": Files/directories were installed but not shipped in any package:"
1530 if "installed-vs-shipped" in (d.getVar('INSANE_SKIP:' + pn) or "").split():
1531 bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
1532 else:
1533 for f in unshipped:
1534 msg = msg + "\n " + f
1535 msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n"
1536 msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped))
1537 oe.qa.handle_error("installed-vs-shipped", msg, d)
1538}
1539populate_packages[dirs] = "${D}"
1540
1541python package_fixsymlinks () {
1542 import errno
1543 pkgdest = d.getVar('PKGDEST')
1544 packages = d.getVar("PACKAGES", False).split()
1545
1546 dangling_links = {}
1547 pkg_files = {}
1548 for pkg in packages:
1549 dangling_links[pkg] = []
1550 pkg_files[pkg] = []
1551 inst_root = os.path.join(pkgdest, pkg)
1552 for path in pkgfiles[pkg]:
1553 rpath = path[len(inst_root):]
1554 pkg_files[pkg].append(rpath)
1555 rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
1556 if not cpath.lexists(rtarget):
1557 dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))
1558
1559 newrdepends = {}
1560 for pkg in dangling_links:
1561 for l in dangling_links[pkg]:
1562 found = False
1563 bb.debug(1, "%s contains dangling link %s" % (pkg, l))
1564 for p in packages:
1565 if l in pkg_files[p]:
1566 found = True
1567 bb.debug(1, "target found in %s" % p)
1568 if p == pkg:
1569 break
1570 if pkg not in newrdepends:
1571 newrdepends[pkg] = []
1572 newrdepends[pkg].append(p)
1573 break
1574 if found == False:
1575 bb.note("%s contains dangling symlink to %s" % (pkg, l))
1576
1577 for pkg in newrdepends:
1578 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "")
1579 for p in newrdepends[pkg]:
1580 if p not in rdepends:
1581 rdepends[p] = []
1582 d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1583}
1584
1585
1586python package_package_name_hook() {
1587 """
1588 A package_name_hook function can be used to rewrite the package names by
1589 changing PKG. For an example, see debian.bbclass.
1590 """
1591 pass
1592}
1593
1594EXPORT_FUNCTIONS package_name_hook
1595
1596
1597PKGDESTWORK = "${WORKDIR}/pkgdata"
1598
1599PKGDATA_VARS = "PN PE PV PR PKGE PKGV PKGR LICENSE DESCRIPTION SUMMARY RDEPENDS RPROVIDES RRECOMMENDS RSUGGESTS RREPLACES RCONFLICTS SECTION PKG ALLOW_EMPTY FILES CONFFILES FILES_INFO PACKAGE_ADD_METADATA pkg_postinst pkg_postrm pkg_preinst pkg_prerm"
1600
1601python emit_pkgdata() {
1602 from glob import glob
1603 import json
1604 import bb.compress.zstd
1605
1606 def process_postinst_on_target(pkg, mlprefix):
1607 pkgval = d.getVar('PKG:%s' % pkg)
1608 if pkgval is None:
1609 pkgval = pkg
1610
1611 defer_fragment = """
1612if [ -n "$D" ]; then
1613 $INTERCEPT_DIR/postinst_intercept delay_to_first_boot %s mlprefix=%s
1614 exit 0
1615fi
1616""" % (pkgval, mlprefix)
1617
1618 postinst = d.getVar('pkg_postinst:%s' % pkg)
1619 postinst_ontarget = d.getVar('pkg_postinst_ontarget:%s' % pkg)
1620
1621 if postinst_ontarget:
1622 bb.debug(1, 'adding deferred pkg_postinst_ontarget() to pkg_postinst() for %s' % pkg)
1623 if not postinst:
1624 postinst = '#!/bin/sh\n'
1625 postinst += defer_fragment
1626 postinst += postinst_ontarget
1627 d.setVar('pkg_postinst:%s' % pkg, postinst)
1628
1629 def add_set_e_to_scriptlets(pkg):
1630 for scriptlet_name in ('pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm'):
1631 scriptlet = d.getVar('%s:%s' % (scriptlet_name, pkg))
1632 if scriptlet:
1633 scriptlet_split = scriptlet.split('\n')
1634 if scriptlet_split[0].startswith("#!"):
1635 scriptlet = scriptlet_split[0] + "\nset -e\n" + "\n".join(scriptlet_split[1:])
1636 else:
1637 scriptlet = "set -e\n" + "\n".join(scriptlet_split[0:])
1638 d.setVar('%s:%s' % (scriptlet_name, pkg), scriptlet)
1639
1640 def write_if_exists(f, pkg, var):
1641 def encode(str):
1642 import codecs
1643 c = codecs.getencoder("unicode_escape")
1644 return c(str)[0].decode("latin1")
1645
1646 val = d.getVar('%s:%s' % (var, pkg))
1647 if val:
1648 f.write('%s:%s: %s\n' % (var, pkg, encode(val)))
1649 return val
1650 val = d.getVar('%s' % (var))
1651 if val:
1652 f.write('%s: %s\n' % (var, encode(val)))
1653 return val
1654
1655 def write_extra_pkgs(variants, pn, packages, pkgdatadir):
1656 for variant in variants:
1657 with open("%s/%s-%s" % (pkgdatadir, variant, pn), 'w') as fd:
1658 fd.write("PACKAGES: %s\n" % ' '.join(
1659 map(lambda pkg: '%s-%s' % (variant, pkg), packages.split())))
1660
1661 def write_extra_runtime_pkgs(variants, packages, pkgdatadir):
1662 for variant in variants:
1663 for pkg in packages.split():
1664 ml_pkg = "%s-%s" % (variant, pkg)
1665 subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg)
1666 with open(subdata_file, 'w') as fd:
1667 fd.write("PKG:%s: %s" % (ml_pkg, pkg))
1668
1669 packages = d.getVar('PACKAGES')
1670 pkgdest = d.getVar('PKGDEST')
1671 pkgdatadir = d.getVar('PKGDESTWORK')
1672
1673 data_file = pkgdatadir + d.expand("/${PN}")
1674 with open(data_file, 'w') as fd:
1675 fd.write("PACKAGES: %s\n" % packages)
1676
1677 pkgdebugsource = d.getVar("PKGDEBUGSOURCES") or []
1678
1679 pn = d.getVar('PN')
1680 global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split()
1681 variants = (d.getVar('MULTILIB_VARIANTS') or "").split()
1682
1683 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
1684 write_extra_pkgs(variants, pn, packages, pkgdatadir)
1685
1686 if bb.data.inherits_class('allarch', d) and not variants \
1687 and not bb.data.inherits_class('packagegroup', d):
1688 write_extra_pkgs(global_variants, pn, packages, pkgdatadir)
1689
1690 workdir = d.getVar('WORKDIR')
1691
1692 for pkg in packages.split():
1693 pkgval = d.getVar('PKG:%s' % pkg)
1694 if pkgval is None:
1695 pkgval = pkg
1696 d.setVar('PKG:%s' % pkg, pkg)
1697
1698 extended_data = {
1699 "files_info": {}
1700 }
1701
1702 pkgdestpkg = os.path.join(pkgdest, pkg)
1703 files = {}
1704 files_extra = {}
1705 total_size = 0
1706 seen = set()
1707 for f in pkgfiles[pkg]:
1708 fpath = os.sep + os.path.relpath(f, pkgdestpkg)
1709
1710 fstat = os.lstat(f)
1711 files[fpath] = fstat.st_size
1712
1713 extended_data["files_info"].setdefault(fpath, {})
1714 extended_data["files_info"][fpath]['size'] = fstat.st_size
1715
1716 if fstat.st_ino not in seen:
1717 seen.add(fstat.st_ino)
1718 total_size += fstat.st_size
1719
1720 if fpath in pkgdebugsource:
1721 extended_data["files_info"][fpath]['debugsrc'] = pkgdebugsource[fpath]
1722 del pkgdebugsource[fpath]
1723
1724 d.setVar('FILES_INFO:' + pkg , json.dumps(files, sort_keys=True))
1725
1726 process_postinst_on_target(pkg, d.getVar("MLPREFIX"))
1727 add_set_e_to_scriptlets(pkg)
1728
1729 subdata_file = pkgdatadir + "/runtime/%s" % pkg
1730 with open(subdata_file, 'w') as sf:
1731 for var in (d.getVar('PKGDATA_VARS') or "").split():
1732 val = write_if_exists(sf, pkg, var)
1733
1734 write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
1735 for dfile in sorted((d.getVar('FILERPROVIDESFLIST:' + pkg) or "").split()):
1736 write_if_exists(sf, pkg, 'FILERPROVIDES:' + dfile)
1737
1738 write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
1739 for dfile in sorted((d.getVar('FILERDEPENDSFLIST:' + pkg) or "").split()):
1740 write_if_exists(sf, pkg, 'FILERDEPENDS:' + dfile)
1741
1742 sf.write('%s:%s: %d\n' % ('PKGSIZE', pkg, total_size))
1743
1744 subdata_extended_file = pkgdatadir + "/extended/%s.json.zstd" % pkg
1745 num_threads = int(d.getVar("BB_NUMBER_THREADS"))
1746 with bb.compress.zstd.open(subdata_extended_file, "wt", encoding="utf-8", num_threads=num_threads) as f:
1747 json.dump(extended_data, f, sort_keys=True, separators=(",", ":"))
1748
1749 # Symlinks needed for rprovides lookup
1750 rprov = d.getVar('RPROVIDES:%s' % pkg) or d.getVar('RPROVIDES')
1751 if rprov:
1752 for p in bb.utils.explode_deps(rprov):
1753 subdata_sym = pkgdatadir + "/runtime-rprovides/%s/%s" % (p, pkg)
1754 bb.utils.mkdirhier(os.path.dirname(subdata_sym))
1755 oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True)
1756
1757 allow_empty = d.getVar('ALLOW_EMPTY:%s' % pkg)
1758 if not allow_empty:
1759 allow_empty = d.getVar('ALLOW_EMPTY')
1760 root = "%s/%s" % (pkgdest, pkg)
1761 os.chdir(root)
1762 g = glob('*')
1763 if g or allow_empty == "1":
1764 # Symlinks needed for reverse lookups (from the final package name)
1765 subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval
1766 oe.path.symlink("../runtime/%s" % pkg, subdata_sym, True)
1767
1768 packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg
1769 open(packagedfile, 'w').close()
1770
1771 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
1772 write_extra_runtime_pkgs(variants, packages, pkgdatadir)
1773
1774 if bb.data.inherits_class('allarch', d) and not variants \
1775 and not bb.data.inherits_class('packagegroup', d):
1776 write_extra_runtime_pkgs(global_variants, packages, pkgdatadir)
1777
1778}
1779emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime ${PKGDESTWORK}/runtime-reverse ${PKGDESTWORK}/runtime-rprovides ${PKGDESTWORK}/extended"
1780emit_pkgdata[vardepsexclude] = "BB_NUMBER_THREADS"
1781
1782ldconfig_postinst_fragment() {
1783if [ x"$D" = "x" ]; then
1784 if [ -x /sbin/ldconfig ]; then /sbin/ldconfig ; fi
1785fi
1786}
1787
1788RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/rpmdeps --alldeps --define '__font_provides %{nil}'"
1789
1790# Collect perfile run-time dependency metadata
1791# Output:
1792# FILERPROVIDESFLIST:pkg - list of all files w/ deps
1793# FILERPROVIDES:filepath:pkg - per file dep
1794#
1795# FILERDEPENDSFLIST:pkg - list of all files w/ deps
1796# FILERDEPENDS:filepath:pkg - per file dep
1797
1798python package_do_filedeps() {
1799 if d.getVar('SKIP_FILEDEPS') == '1':
1800 return
1801
1802 pkgdest = d.getVar('PKGDEST')
1803 packages = d.getVar('PACKAGES')
1804 rpmdeps = d.getVar('RPMDEPS')
1805
1806 def chunks(files, n):
1807 return [files[i:i+n] for i in range(0, len(files), n)]
1808
1809 pkglist = []
1810 for pkg in packages.split():
1811 if d.getVar('SKIP_FILEDEPS:' + pkg) == '1':
1812 continue
1813 if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'):
1814 continue
1815 for files in chunks(pkgfiles[pkg], 100):
1816 pkglist.append((pkg, files, rpmdeps, pkgdest))
1817
1818 processed = oe.utils.multiprocess_launch(oe.package.filedeprunner, pkglist, d)
1819
1820 provides_files = {}
1821 requires_files = {}
1822
1823 for result in processed:
1824 (pkg, provides, requires) = result
1825
1826 if pkg not in provides_files:
1827 provides_files[pkg] = []
1828 if pkg not in requires_files:
1829 requires_files[pkg] = []
1830
1831 for file in sorted(provides):
1832 provides_files[pkg].append(file)
1833 key = "FILERPROVIDES:" + file + ":" + pkg
1834 d.appendVar(key, " " + " ".join(provides[file]))
1835
1836 for file in sorted(requires):
1837 requires_files[pkg].append(file)
1838 key = "FILERDEPENDS:" + file + ":" + pkg
1839 d.appendVar(key, " " + " ".join(requires[file]))
1840
1841 for pkg in requires_files:
1842 d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(sorted(requires_files[pkg])))
1843 for pkg in provides_files:
1844 d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(sorted(provides_files[pkg])))
1845}
1846
1847SHLIBSDIRS = "${WORKDIR_PKGDATA}/${MLPREFIX}shlibs2"
1848SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs2"
1849
1850python package_do_shlibs() {
1851 import itertools
1852 import re, pipes
1853 import subprocess
1854
1855 exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False)
1856 if exclude_shlibs:
1857 bb.note("not generating shlibs")
1858 return
1859
1860 lib_re = re.compile(r"^.*\.so")
1861 libdir_re = re.compile(r".*/%s$" % d.getVar('baselib'))
1862
1863 packages = d.getVar('PACKAGES')
1864
1865 shlib_pkgs = []
1866 exclusion_list = d.getVar("EXCLUDE_PACKAGES_FROM_SHLIBS")
1867 if exclusion_list:
1868 for pkg in packages.split():
1869 if pkg not in exclusion_list.split():
1870 shlib_pkgs.append(pkg)
1871 else:
1872 bb.note("not generating shlibs for %s" % pkg)
1873 else:
1874 shlib_pkgs = packages.split()
1875
1876 hostos = d.getVar('HOST_OS')
1877
1878 workdir = d.getVar('WORKDIR')
1879
1880 ver = d.getVar('PKGV')
1881 if not ver:
1882 msg = "PKGV not defined"
1883 oe.qa.handle_error("pkgv-undefined", msg, d)
1884 return
1885
1886 pkgdest = d.getVar('PKGDEST')
1887
1888 shlibswork_dir = d.getVar('SHLIBSWORKDIR')
1889
1890 def linux_so(file, pkg, pkgver, d):
1891 needs_ldconfig = False
1892 needed = set()
1893 sonames = set()
1894 renames = []
1895 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1896 cmd = d.getVar('OBJDUMP') + " -p " + pipes.quote(file) + " 2>/dev/null"
1897 fd = os.popen(cmd)
1898 lines = fd.readlines()
1899 fd.close()
1900 rpath = tuple()
1901 for l in lines:
1902 m = re.match(r"\s+RPATH\s+([^\s]*)", l)
1903 if m:
1904 rpaths = m.group(1).replace("$ORIGIN", ldir).split(":")
1905 rpath = tuple(map(os.path.normpath, rpaths))
1906 for l in lines:
1907 m = re.match(r"\s+NEEDED\s+([^\s]*)", l)
1908 if m:
1909 dep = m.group(1)
1910 if dep not in needed:
1911 needed.add((dep, file, rpath))
1912 m = re.match(r"\s+SONAME\s+([^\s]*)", l)
1913 if m:
1914 this_soname = m.group(1)
1915 prov = (this_soname, ldir, pkgver)
1916 if not prov in sonames:
1917 # if library is private (only used by package) then do not build shlib for it
1918 import fnmatch
1919 if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0:
1920 sonames.add(prov)
1921 if libdir_re.match(os.path.dirname(file)):
1922 needs_ldconfig = True
1923 if needs_ldconfig and snap_symlinks and (os.path.basename(file) != this_soname):
1924 renames.append((file, os.path.join(os.path.dirname(file), this_soname)))
1925 return (needs_ldconfig, needed, sonames, renames)
1926
1927 def darwin_so(file, needed, sonames, renames, pkgver):
1928 if not os.path.exists(file):
1929 return
1930 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1931
1932 def get_combinations(base):
1933 #
1934 # Given a base library name, find all combinations of this split by "." and "-"
1935 #
1936 combos = []
1937 options = base.split(".")
1938 for i in range(1, len(options) + 1):
1939 combos.append(".".join(options[0:i]))
1940 options = base.split("-")
1941 for i in range(1, len(options) + 1):
1942 combos.append("-".join(options[0:i]))
1943 return combos
1944
1945 if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'):
1946 # Drop suffix
1947 name = os.path.basename(file).rsplit(".",1)[0]
1948 # Find all combinations
1949 combos = get_combinations(name)
1950 for combo in combos:
1951 if not combo in sonames:
1952 prov = (combo, ldir, pkgver)
1953 sonames.add(prov)
1954 if file.endswith('.dylib') or file.endswith('.so'):
1955 rpath = []
1956 p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1957 out, err = p.communicate()
1958 # If returned successfully, process stdout for results
1959 if p.returncode == 0:
1960 for l in out.split("\n"):
1961 l = l.strip()
1962 if l.startswith('path '):
1963 rpath.append(l.split()[1])
1964
1965 p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1966 out, err = p.communicate()
1967 # If returned successfully, process stdout for results
1968 if p.returncode == 0:
1969 for l in out.split("\n"):
1970 l = l.strip()
1971 if not l or l.endswith(":"):
1972 continue
1973 if "is not an object file" in l:
1974 continue
1975 name = os.path.basename(l.split()[0]).rsplit(".", 1)[0]
1976 if name and name not in needed[pkg]:
1977 needed[pkg].add((name, file, tuple()))
1978
1979 def mingw_dll(file, needed, sonames, renames, pkgver):
1980 if not os.path.exists(file):
1981 return
1982
1983 if file.endswith(".dll"):
1984 # assume all dlls are shared objects provided by the package
1985 sonames.add((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver))
1986
1987 if (file.endswith(".dll") or file.endswith(".exe")):
1988 # use objdump to search for "DLL Name: .*\.dll"
1989 p = subprocess.Popen([d.expand("${HOST_PREFIX}objdump"), "-p", file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1990 out, err = p.communicate()
1991 # process the output, grabbing all .dll names
1992 if p.returncode == 0:
1993 for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE):
1994 dllname = m.group(1)
1995 if dllname:
1996 needed[pkg].add((dllname, file, tuple()))
1997
1998 if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1":
1999 snap_symlinks = True
2000 else:
2001 snap_symlinks = False
2002
2003 needed = {}
2004
2005 shlib_provider = oe.package.read_shlib_providers(d)
2006
2007 for pkg in shlib_pkgs:
2008 private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
2009 private_libs = private_libs.split()
2010 needs_ldconfig = False
2011 bb.debug(2, "calculating shlib provides for %s" % pkg)
2012
2013 pkgver = d.getVar('PKGV:' + pkg)
2014 if not pkgver:
2015 pkgver = d.getVar('PV_' + pkg)
2016 if not pkgver:
2017 pkgver = ver
2018
2019 needed[pkg] = set()
2020 sonames = set()
2021 renames = []
2022 linuxlist = []
2023 for file in pkgfiles[pkg]:
2024 soname = None
2025 if cpath.islink(file):
2026 continue
2027 if hostos == "darwin" or hostos == "darwin8":
2028 darwin_so(file, needed, sonames, renames, pkgver)
2029 elif hostos.startswith("mingw"):
2030 mingw_dll(file, needed, sonames, renames, pkgver)
2031 elif os.access(file, os.X_OK) or lib_re.match(file):
2032 linuxlist.append(file)
2033
2034 if linuxlist:
2035 results = oe.utils.multiprocess_launch(linux_so, linuxlist, d, extraargs=(pkg, pkgver, d))
2036 for r in results:
2037 ldconfig = r[0]
2038 needed[pkg] |= r[1]
2039 sonames |= r[2]
2040 renames.extend(r[3])
2041 needs_ldconfig = needs_ldconfig or ldconfig
2042
2043 for (old, new) in renames:
2044 bb.note("Renaming %s to %s" % (old, new))
2045 bb.utils.rename(old, new)
2046 pkgfiles[pkg].remove(old)
2047
2048 shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
2049 if len(sonames):
2050 with open(shlibs_file, 'w') as fd:
2051 for s in sorted(sonames):
2052 if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]:
2053 (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]]
2054 if old_pkg != pkg:
2055 bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver))
2056 bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0]))
2057 fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n')
2058 if s[0] not in shlib_provider:
2059 shlib_provider[s[0]] = {}
2060 shlib_provider[s[0]][s[1]] = (pkg, pkgver)
2061 if needs_ldconfig:
2062 bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
2063 postinst = d.getVar('pkg_postinst:%s' % pkg)
2064 if not postinst:
2065 postinst = '#!/bin/sh\n'
2066 postinst += d.getVar('ldconfig_postinst_fragment')
2067 d.setVar('pkg_postinst:%s' % pkg, postinst)
2068 bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
2069
2070 assumed_libs = d.getVar('ASSUME_SHLIBS')
2071 if assumed_libs:
2072 libdir = d.getVar("libdir")
2073 for e in assumed_libs.split():
2074 l, dep_pkg = e.split(":")
2075 lib_ver = None
2076 dep_pkg = dep_pkg.rsplit("_", 1)
2077 if len(dep_pkg) == 2:
2078 lib_ver = dep_pkg[1]
2079 dep_pkg = dep_pkg[0]
2080 if l not in shlib_provider:
2081 shlib_provider[l] = {}
2082 shlib_provider[l][libdir] = (dep_pkg, lib_ver)
2083
2084 libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')]
2085
2086 for pkg in shlib_pkgs:
2087 bb.debug(2, "calculating shlib requirements for %s" % pkg)
2088
2089 private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
2090 private_libs = private_libs.split()
2091
2092 deps = list()
2093 for n in needed[pkg]:
2094 # if n is in private libraries, don't try to search provider for it
2095 # this could cause problem in case some abc.bb provides private
2096 # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1
2097 # but skipping it is still better alternative than providing own
2098 # version and then adding runtime dependency for the same system library
2099 import fnmatch
2100 if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0:
2101 bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0]))
2102 continue
2103 if n[0] in shlib_provider.keys():
2104 shlib_provider_map = shlib_provider[n[0]]
2105 matches = set()
2106 for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath):
2107 if p in shlib_provider_map:
2108 matches.add(p)
2109 if len(matches) > 1:
2110 matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches])
2111 bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1]))
2112 elif len(matches) == 1:
2113 (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()]
2114
2115 bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1]))
2116
2117 if dep_pkg == pkg:
2118 continue
2119
2120 if ver_needed:
2121 dep = "%s (>= %s)" % (dep_pkg, ver_needed)
2122 else:
2123 dep = dep_pkg
2124 if not dep in deps:
2125 deps.append(dep)
2126 continue
2127 bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1]))
2128
2129 deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
2130 if os.path.exists(deps_file):
2131 os.remove(deps_file)
2132 if deps:
2133 with open(deps_file, 'w') as fd:
2134 for dep in sorted(deps):
2135 fd.write(dep + '\n')
2136}
2137
2138python package_do_pkgconfig () {
2139 import re
2140
2141 packages = d.getVar('PACKAGES')
2142 workdir = d.getVar('WORKDIR')
2143 pkgdest = d.getVar('PKGDEST')
2144
2145 shlibs_dirs = d.getVar('SHLIBSDIRS').split()
2146 shlibswork_dir = d.getVar('SHLIBSWORKDIR')
2147
2148 pc_re = re.compile(r'(.*)\.pc$')
2149 var_re = re.compile(r'(.*)=(.*)')
2150 field_re = re.compile(r'(.*): (.*)')
2151
2152 pkgconfig_provided = {}
2153 pkgconfig_needed = {}
2154 for pkg in packages.split():
2155 pkgconfig_provided[pkg] = []
2156 pkgconfig_needed[pkg] = []
2157 for file in sorted(pkgfiles[pkg]):
2158 m = pc_re.match(file)
2159 if m:
2160 pd = bb.data.init()
2161 name = m.group(1)
2162 pkgconfig_provided[pkg].append(os.path.basename(name))
2163 if not os.access(file, os.R_OK):
2164 continue
2165 with open(file, 'r') as f:
2166 lines = f.readlines()
2167 for l in lines:
2168 m = var_re.match(l)
2169 if m:
2170 name = m.group(1)
2171 val = m.group(2)
2172 pd.setVar(name, pd.expand(val))
2173 continue
2174 m = field_re.match(l)
2175 if m:
2176 hdr = m.group(1)
2177 exp = pd.expand(m.group(2))
2178 if hdr == 'Requires':
2179 pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
2180
2181 for pkg in packages.split():
2182 pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
2183 if pkgconfig_provided[pkg] != []:
2184 with open(pkgs_file, 'w') as f:
2185 for p in sorted(pkgconfig_provided[pkg]):
2186 f.write('%s\n' % p)
2187
2188 # Go from least to most specific since the last one found wins
2189 for dir in reversed(shlibs_dirs):
2190 if not os.path.exists(dir):
2191 continue
2192 for file in sorted(os.listdir(dir)):
2193 m = re.match(r'^(.*)\.pclist$', file)
2194 if m:
2195 pkg = m.group(1)
2196 with open(os.path.join(dir, file)) as fd:
2197 lines = fd.readlines()
2198 pkgconfig_provided[pkg] = []
2199 for l in lines:
2200 pkgconfig_provided[pkg].append(l.rstrip())
2201
2202 for pkg in packages.split():
2203 deps = []
2204 for n in pkgconfig_needed[pkg]:
2205 found = False
2206 for k in pkgconfig_provided.keys():
2207 if n in pkgconfig_provided[k]:
2208 if k != pkg and not (k in deps):
2209 deps.append(k)
2210 found = True
2211 if found == False:
2212 bb.note("couldn't find pkgconfig module '%s' in any package" % n)
2213 deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
2214 if len(deps):
2215 with open(deps_file, 'w') as fd:
2216 for dep in deps:
2217 fd.write(dep + '\n')
2218}
2219
2220def read_libdep_files(d):
2221 pkglibdeps = {}
2222 packages = d.getVar('PACKAGES').split()
2223 for pkg in packages:
2224 pkglibdeps[pkg] = {}
2225 for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
2226 depsfile = d.expand("${PKGDEST}/" + pkg + extension)
2227 if os.access(depsfile, os.R_OK):
2228 with open(depsfile) as fd:
2229 lines = fd.readlines()
2230 for l in lines:
2231 l.rstrip()
2232 deps = bb.utils.explode_dep_versions2(l)
2233 for dep in deps:
2234 if not dep in pkglibdeps[pkg]:
2235 pkglibdeps[pkg][dep] = deps[dep]
2236 return pkglibdeps
2237
2238python read_shlibdeps () {
2239 pkglibdeps = read_libdep_files(d)
2240
2241 packages = d.getVar('PACKAGES').split()
2242 for pkg in packages:
2243 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "")
2244 for dep in sorted(pkglibdeps[pkg]):
2245 # Add the dep if it's not already there, or if no comparison is set
2246 if dep not in rdepends:
2247 rdepends[dep] = []
2248 for v in pkglibdeps[pkg][dep]:
2249 if v not in rdepends[dep]:
2250 rdepends[dep].append(v)
2251 d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False))
2252}
2253
2254python package_depchains() {
2255 """
2256 For a given set of prefix and postfix modifiers, make those packages
2257 RRECOMMENDS on the corresponding packages for its RDEPENDS.
2258
2259 Example: If package A depends upon package B, and A's .bb emits an
2260 A-dev package, this would make A-dev Recommends: B-dev.
2261
2262 If only one of a given suffix is specified, it will take the RRECOMMENDS
2263 based on the RDEPENDS of *all* other packages. If more than one of a given
2264 suffix is specified, its will only use the RDEPENDS of the single parent
2265 package.
2266 """
2267
2268 packages = d.getVar('PACKAGES')
2269 postfixes = (d.getVar('DEPCHAIN_POST') or '').split()
2270 prefixes = (d.getVar('DEPCHAIN_PRE') or '').split()
2271
2272 def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
2273
2274 #bb.note('depends for %s is %s' % (base, depends))
2275 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
2276
2277 for depend in sorted(depends):
2278 if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
2279 #bb.note("Skipping %s" % depend)
2280 continue
2281 if depend.endswith('-dev'):
2282 depend = depend[:-4]
2283 if depend.endswith('-dbg'):
2284 depend = depend[:-4]
2285 pkgname = getname(depend, suffix)
2286 #bb.note("Adding %s for %s" % (pkgname, depend))
2287 if pkgname not in rreclist and pkgname != pkg:
2288 rreclist[pkgname] = []
2289
2290 #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
2291 d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
2292
2293 def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
2294
2295 #bb.note('rdepends for %s is %s' % (base, rdepends))
2296 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
2297
2298 for depend in sorted(rdepends):
2299 if depend.find('virtual-locale-') != -1:
2300 #bb.note("Skipping %s" % depend)
2301 continue
2302 if depend.endswith('-dev'):
2303 depend = depend[:-4]
2304 if depend.endswith('-dbg'):
2305 depend = depend[:-4]
2306 pkgname = getname(depend, suffix)
2307 #bb.note("Adding %s for %s" % (pkgname, depend))
2308 if pkgname not in rreclist and pkgname != pkg:
2309 rreclist[pkgname] = []
2310
2311 #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
2312 d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
2313
2314 def add_dep(list, dep):
2315 if dep not in list:
2316 list.append(dep)
2317
2318 depends = []
2319 for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""):
2320 add_dep(depends, dep)
2321
2322 rdepends = []
2323 for pkg in packages.split():
2324 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + pkg) or ""):
2325 add_dep(rdepends, dep)
2326
2327 #bb.note('rdepends is %s' % rdepends)
2328
2329 def post_getname(name, suffix):
2330 return '%s%s' % (name, suffix)
2331 def pre_getname(name, suffix):
2332 return '%s%s' % (suffix, name)
2333
2334 pkgs = {}
2335 for pkg in packages.split():
2336 for postfix in postfixes:
2337 if pkg.endswith(postfix):
2338 if not postfix in pkgs:
2339 pkgs[postfix] = {}
2340 pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
2341
2342 for prefix in prefixes:
2343 if pkg.startswith(prefix):
2344 if not prefix in pkgs:
2345 pkgs[prefix] = {}
2346 pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
2347
2348 if "-dbg" in pkgs:
2349 pkglibdeps = read_libdep_files(d)
2350 pkglibdeplist = []
2351 for pkg in pkglibdeps:
2352 for k in pkglibdeps[pkg]:
2353 add_dep(pkglibdeplist, k)
2354 dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d)))
2355
2356 for suffix in pkgs:
2357 for pkg in pkgs[suffix]:
2358 if d.getVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs'):
2359 continue
2360 (base, func) = pkgs[suffix][pkg]
2361 if suffix == "-dev":
2362 pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
2363 elif suffix == "-dbg":
2364 if not dbgdefaultdeps:
2365 pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
2366 continue
2367 if len(pkgs[suffix]) == 1:
2368 pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
2369 else:
2370 rdeps = []
2371 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + base) or ""):
2372 add_dep(rdeps, dep)
2373 pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
2374}
2375
2376# Since bitbake can't determine which variables are accessed during package
2377# iteration, we need to list them here:
2378PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm pkg_postinst_ontarget INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR USERADD_PARAM GROUPADD_PARAM CONFFILES SYSTEMD_SERVICE LICENSE SECTION pkg_preinst pkg_prerm RREPLACES GROUPMEMS_PARAM SYSTEMD_AUTO_ENABLE SKIP_FILEDEPS PRIVATE_LIBS PACKAGE_ADD_METADATA"
2379
2380def gen_packagevar(d, pkgvars="PACKAGEVARS"):
2381 ret = []
2382 pkgs = (d.getVar("PACKAGES") or "").split()
2383 vars = (d.getVar(pkgvars) or "").split()
2384 for v in vars:
2385 ret.append(v)
2386 for p in pkgs:
2387 for v in vars:
2388 ret.append(v + ":" + p)
2389
2390 # Ensure that changes to INCOMPATIBLE_LICENSE re-run do_package for
2391 # affected recipes.
2392 ret.append('_exclude_incompatible-%s' % p)
2393 return " ".join(ret)
2394
2395PACKAGE_PREPROCESS_FUNCS ?= ""
2396# Functions for setting up PKGD
2397PACKAGEBUILDPKGD ?= " \
2398 package_prepare_pkgdata \
2399 perform_packagecopy \
2400 ${PACKAGE_PREPROCESS_FUNCS} \
2401 split_and_strip_files \
2402 fixup_perms \
2403 "
2404# Functions which split PKGD up into separate packages
2405PACKAGESPLITFUNCS ?= " \
2406 package_do_split_locales \
2407 populate_packages"
2408# Functions which process metadata based on split packages
2409PACKAGEFUNCS += " \
2410 package_fixsymlinks \
2411 package_name_hook \
2412 package_do_filedeps \
2413 package_do_shlibs \
2414 package_do_pkgconfig \
2415 read_shlibdeps \
2416 package_depchains \
2417 emit_pkgdata"
2418
2419python do_package () {
2420 # Change the following version to cause sstate to invalidate the package
2421 # cache. This is useful if an item this class depends on changes in a
2422 # way that the output of this class changes. rpmdeps is a good example
2423 # as any change to rpmdeps requires this to be rerun.
2424 # PACKAGE_BBCLASS_VERSION = "4"
2425
2426 # Init cachedpath
2427 global cpath
2428 cpath = oe.cachedpath.CachedPath()
2429
2430 ###########################################################################
2431 # Sanity test the setup
2432 ###########################################################################
2433
2434 packages = (d.getVar('PACKAGES') or "").split()
2435 if len(packages) < 1:
2436 bb.debug(1, "No packages to build, skipping do_package")
2437 return
2438
2439 workdir = d.getVar('WORKDIR')
2440 outdir = d.getVar('DEPLOY_DIR')
2441 dest = d.getVar('D')
2442 dvar = d.getVar('PKGD')
2443 pn = d.getVar('PN')
2444
2445 if not workdir or not outdir or not dest or not dvar or not pn:
2446 msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package"
2447 oe.qa.handle_error("var-undefined", msg, d)
2448 return
2449
2450 bb.build.exec_func("package_convert_pr_autoinc", d)
2451
2452 ###########################################################################
2453 # Optimisations
2454 ###########################################################################
2455
2456 # Continually expanding complex expressions is inefficient, particularly
2457 # when we write to the datastore and invalidate the expansion cache. This
2458 # code pre-expands some frequently used variables
2459
2460 def expandVar(x, d):
2461 d.setVar(x, d.getVar(x))
2462
2463 for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO':
2464 expandVar(x, d)
2465
2466 ###########################################################################
2467 # Setup PKGD (from D)
2468 ###########################################################################
2469
2470 for f in (d.getVar('PACKAGEBUILDPKGD') or '').split():
2471 bb.build.exec_func(f, d)
2472
2473 ###########################################################################
2474 # Split up PKGD into PKGDEST
2475 ###########################################################################
2476
2477 cpath = oe.cachedpath.CachedPath()
2478
2479 for f in (d.getVar('PACKAGESPLITFUNCS') or '').split():
2480 bb.build.exec_func(f, d)
2481
2482 ###########################################################################
2483 # Process PKGDEST
2484 ###########################################################################
2485
2486 # Build global list of files in each split package
2487 global pkgfiles
2488 pkgfiles = {}
2489 packages = d.getVar('PACKAGES').split()
2490 pkgdest = d.getVar('PKGDEST')
2491 for pkg in packages:
2492 pkgfiles[pkg] = []
2493 for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg):
2494 for file in files:
2495 pkgfiles[pkg].append(walkroot + os.sep + file)
2496
2497 for f in (d.getVar('PACKAGEFUNCS') or '').split():
2498 bb.build.exec_func(f, d)
2499
2500 oe.qa.exit_if_errors(d)
2501}
2502
2503do_package[dirs] = "${SHLIBSWORKDIR} ${D}"
2504do_package[vardeps] += "${PACKAGEBUILDPKGD} ${PACKAGESPLITFUNCS} ${PACKAGEFUNCS} ${@gen_packagevar(d)}"
2505addtask package after do_install
2506
2507SSTATETASKS += "do_package"
2508do_package[cleandirs] = "${PKGDEST} ${PKGDESTWORK}"
2509do_package[sstate-plaindirs] = "${PKGD} ${PKGDEST} ${PKGDESTWORK}"
2510do_package_setscene[dirs] = "${STAGING_DIR}"
2511
2512python do_package_setscene () {
2513 sstate_setscene(d)
2514}
2515addtask do_package_setscene
2516
2517# Copy from PKGDESTWORK to tempdirectory as tempdirectory can be cleaned at both
2518# do_package_setscene and do_packagedata_setscene leading to races
2519python do_packagedata () {
2520 bb.build.exec_func("package_get_auto_pr", d)
2521
2522 src = d.expand("${PKGDESTWORK}")
2523 dest = d.expand("${WORKDIR}/pkgdata-pdata-input")
2524 oe.path.copyhardlinktree(src, dest)
2525
2526 bb.build.exec_func("packagedata_translate_pr_autoinc", d)
2527}
2528do_packagedata[cleandirs] += "${WORKDIR}/pkgdata-pdata-input"
2529
2530# Translate the EXTENDPRAUTO and AUTOINC to the final values
2531packagedata_translate_pr_autoinc() {
2532 find ${WORKDIR}/pkgdata-pdata-input -type f | xargs --no-run-if-empty \
2533 sed -e 's,@PRSERV_PV_AUTOINC@,${PRSERV_PV_AUTOINC},g' \
2534 -e 's,@EXTENDPRAUTO@,${EXTENDPRAUTO},g' -i
2535}
2536
2537addtask packagedata before do_build after do_package
2538
2539SSTATETASKS += "do_packagedata"
2540do_packagedata[sstate-inputdirs] = "${WORKDIR}/pkgdata-pdata-input"
2541do_packagedata[sstate-outputdirs] = "${PKGDATA_DIR}"
2542do_packagedata[stamp-extra-info] = "${MACHINE_ARCH}"
2543
2544python do_packagedata_setscene () {
2545 sstate_setscene(d)
2546}
2547addtask do_packagedata_setscene
2548
2549#
2550# Helper functions for the package writing classes
2551#
2552
2553def mapping_rename_hook(d):
2554 """
2555 Rewrite variables to account for package renaming in things
2556 like debian.bbclass or manual PKG variable name changes
2557 """
2558 pkg = d.getVar("PKG")
2559 runtime_mapping_rename("RDEPENDS", pkg, d)
2560 runtime_mapping_rename("RRECOMMENDS", pkg, d)
2561 runtime_mapping_rename("RSUGGESTS", pkg, d)