blob: cd92beb394a5b1966e23206a9448ae56ba03ba41 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# Packaging process
3#
4# Executive summary: This class iterates over the functions listed in PACKAGEFUNCS
5# Taking D and splitting it up into the packages listed in PACKAGES, placing the
6# resulting output in PKGDEST.
7#
8# There are the following default steps but PACKAGEFUNCS can be extended:
9#
10# a) package_get_auto_pr - get PRAUTO from remote PR service
11#
12# b) perform_packagecopy - Copy D into PKGD
13#
14# c) package_do_split_locales - Split out the locale files, updates FILES and PACKAGES
15#
16# d) split_and_strip_files - split the files into runtime and debug and strip them.
17# Debug files include debug info split, and associated sources that end up in -dbg packages
18#
19# e) fixup_perms - Fix up permissions in the package before we split it.
20#
21# f) populate_packages - Split the files in PKGD into separate packages in PKGDEST/<pkgname>
22# Also triggers the binary stripping code to put files in -dbg packages.
23#
24# g) package_do_filedeps - Collect perfile run-time dependency metadata
25# The data is stores in FILER{PROVIDES,DEPENDS}_file_pkg variables with
26# a list of affected files in FILER{PROVIDES,DEPENDS}FLIST_pkg
27#
28# h) package_do_shlibs - Look at the shared libraries generated and autotmatically add any
29# depenedencies found. Also stores the package name so anyone else using this library
30# knows which package to depend on.
31#
32# i) package_do_pkgconfig - Keep track of which packages need and provide which .pc files
33#
34# j) read_shlibdeps - Reads the stored shlibs information into the metadata
35#
36# k) package_depchains - Adds automatic dependencies to -dbg and -dev packages
37#
38# l) emit_pkgdata - saves the packaging data into PKGDATA_DIR for use in later
39# packaging steps
40
41inherit packagedata
42inherit prserv
43inherit chrpath
44
45# Need the package_qa_handle_error() in insane.bbclass
46inherit insane
47
48PKGD = "${WORKDIR}/package"
49PKGDEST = "${WORKDIR}/packages-split"
50
51LOCALE_SECTION ?= ''
52
53ALL_MULTILIB_PACKAGE_ARCHS = "${@all_multilib_tune_values(d, 'PACKAGE_ARCHS')}"
54
55# rpm is used for the per-file dependency identification
56PACKAGE_DEPENDS += "rpm-native"
57
58def legitimize_package_name(s):
59 """
60 Make sure package names are legitimate strings
61 """
62 import re
63
64 def fixutf(m):
65 cp = m.group(1)
66 if cp:
67 return ('\u%s' % cp).decode('unicode_escape').encode('utf-8')
68
69 # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
70 s = re.sub('<U([0-9A-Fa-f]{1,4})>', fixutf, s)
71
72 # Remaining package name validity fixes
73 return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
74
75def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False, summary=None):
76 """
77 Used in .bb files to split up dynamically generated subpackages of a
78 given package, usually plugins or modules.
79
80 Arguments:
81 root -- the path in which to search
82 file_regex -- regular expression to match searched files. Use
83 parentheses () to mark the part of this expression
84 that should be used to derive the module name (to be
85 substituted where %s is used in other function
86 arguments as noted below)
87 output_pattern -- pattern to use for the package names. Must include %s.
88 description -- description to set for each package. Must include %s.
89 postinst -- postinstall script to use for all packages (as a
90 string)
91 recursive -- True to perform a recursive search - default False
92 hook -- a hook function to be called for every match. The
93 function will be called with the following arguments
94 (in the order listed):
95 f: full path to the file/directory match
96 pkg: the package name
97 file_regex: as above
98 output_pattern: as above
99 modulename: the module name derived using file_regex
100 extra_depends -- extra runtime dependencies (RDEPENDS) to be set for
101 all packages. The default value of None causes a
102 dependency on the main package (${PN}) - if you do
103 not want this, pass '' for this parameter.
104 aux_files_pattern -- extra item(s) to be added to FILES for each
105 package. Can be a single string item or a list of
106 strings for multiple items. Must include %s.
107 postrm -- postrm script to use for all packages (as a string)
108 allow_dirs -- True allow directories to be matched - default False
109 prepend -- if True, prepend created packages to PACKAGES instead
110 of the default False which appends them
111 match_path -- match file_regex on the whole relative path to the
112 root rather than just the file name
113 aux_files_pattern_verbatim -- extra item(s) to be added to FILES for
114 each package, using the actual derived module name
115 rather than converting it to something legal for a
116 package name. Can be a single string item or a list
117 of strings for multiple items. Must include %s.
118 allow_links -- True to allow symlinks to be matched - default False
119 summary -- Summary to set for each package. Must include %s;
120 defaults to description if not set.
121
122 """
123
124 dvar = d.getVar('PKGD', True)
125
126 # If the root directory doesn't exist, don't error out later but silently do
127 # no splitting.
128 if not os.path.exists(dvar + root):
129 return []
130
131 ml = d.getVar("MLPREFIX", True)
132 if ml:
133 if not output_pattern.startswith(ml):
134 output_pattern = ml + output_pattern
135
136 newdeps = []
137 for dep in (extra_depends or "").split():
138 if dep.startswith(ml):
139 newdeps.append(dep)
140 else:
141 newdeps.append(ml + dep)
142 if newdeps:
143 extra_depends = " ".join(newdeps)
144
145
146 packages = d.getVar('PACKAGES', True).split()
147 split_packages = []
148
149 if postinst:
150 postinst = '#!/bin/sh\n' + postinst + '\n'
151 if postrm:
152 postrm = '#!/bin/sh\n' + postrm + '\n'
153 if not recursive:
154 objs = os.listdir(dvar + root)
155 else:
156 objs = []
157 for walkroot, dirs, files in os.walk(dvar + root):
158 for file in files:
159 relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1)
160 if relpath:
161 objs.append(relpath)
162
163 if extra_depends == None:
164 extra_depends = d.getVar("PN", True)
165
166 if not summary:
167 summary = description
168
169 for o in sorted(objs):
170 import re, stat
171 if match_path:
172 m = re.match(file_regex, o)
173 else:
174 m = re.match(file_regex, os.path.basename(o))
175
176 if not m:
177 continue
178 f = os.path.join(dvar + root, o)
179 mode = os.lstat(f).st_mode
180 if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))):
181 continue
182 on = legitimize_package_name(m.group(1))
183 pkg = output_pattern % on
184 split_packages.append(pkg)
185 if not pkg in packages:
186 if prepend:
187 packages = [pkg] + packages
188 else:
189 packages.append(pkg)
190 oldfiles = d.getVar('FILES_' + pkg, True)
191 newfile = os.path.join(root, o)
192 # These names will be passed through glob() so if the filename actually
193 # contains * or ? (rare, but possible) we need to handle that specially
194 newfile = newfile.replace('*', '[*]')
195 newfile = newfile.replace('?', '[?]')
196 if not oldfiles:
197 the_files = [newfile]
198 if aux_files_pattern:
199 if type(aux_files_pattern) is list:
200 for fp in aux_files_pattern:
201 the_files.append(fp % on)
202 else:
203 the_files.append(aux_files_pattern % on)
204 if aux_files_pattern_verbatim:
205 if type(aux_files_pattern_verbatim) is list:
206 for fp in aux_files_pattern_verbatim:
207 the_files.append(fp % m.group(1))
208 else:
209 the_files.append(aux_files_pattern_verbatim % m.group(1))
210 d.setVar('FILES_' + pkg, " ".join(the_files))
211 else:
212 d.setVar('FILES_' + pkg, oldfiles + " " + newfile)
213 if extra_depends != '':
214 d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends)
215 if not d.getVar('DESCRIPTION_' + pkg, True):
216 d.setVar('DESCRIPTION_' + pkg, description % on)
217 if not d.getVar('SUMMARY_' + pkg, True):
218 d.setVar('SUMMARY_' + pkg, summary % on)
219 if postinst:
220 d.setVar('pkg_postinst_' + pkg, postinst)
221 if postrm:
222 d.setVar('pkg_postrm_' + pkg, postrm)
223 if callable(hook):
224 hook(f, pkg, file_regex, output_pattern, m.group(1))
225
226 d.setVar('PACKAGES', ' '.join(packages))
227 return split_packages
228
229PACKAGE_DEPENDS += "file-native"
230
231python () {
232 if d.getVar('PACKAGES', True) != '':
233 deps = ""
234 for dep in (d.getVar('PACKAGE_DEPENDS', True) or "").split():
235 deps += " %s:do_populate_sysroot" % dep
236 d.appendVarFlag('do_package', 'depends', deps)
237
238 # shlibs requires any DEPENDS to have already packaged for the *.list files
239 d.appendVarFlag('do_package', 'deptask', " do_packagedata")
240}
241
242# Get a list of files from file vars by searching files under current working directory
243# The list contains symlinks, directories and normal files.
244def files_from_filevars(filevars):
245 import os,glob
246 cpath = oe.cachedpath.CachedPath()
247 files = []
248 for f in filevars:
249 if os.path.isabs(f):
250 f = '.' + f
251 if not f.startswith("./"):
252 f = './' + f
253 globbed = glob.glob(f)
254 if globbed:
255 if [ f ] != globbed:
256 files += globbed
257 continue
258 files.append(f)
259
260 for f in files:
261 if not cpath.islink(f):
262 if cpath.isdir(f):
263 newfiles = [ os.path.join(f,x) for x in os.listdir(f) ]
264 if newfiles:
265 files += newfiles
266
267 return files
268
269# Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files
270def get_conffiles(pkg, d):
271 pkgdest = d.getVar('PKGDEST', True)
272 root = os.path.join(pkgdest, pkg)
273 cwd = os.getcwd()
274 os.chdir(root)
275
276 conffiles = d.getVar('CONFFILES_%s' % pkg, True);
277 if conffiles == None:
278 conffiles = d.getVar('CONFFILES', True)
279 if conffiles == None:
280 conffiles = ""
281 conffiles = conffiles.split()
282 conf_orig_list = files_from_filevars(conffiles)
283
284 # Remove links and directories from conf_orig_list to get conf_list which only contains normal files
285 conf_list = []
286 for f in conf_orig_list:
287 if os.path.isdir(f):
288 continue
289 if os.path.islink(f):
290 continue
291 if not os.path.exists(f):
292 continue
293 conf_list.append(f)
294
295 # Remove the leading './'
296 for i in range(0, len(conf_list)):
297 conf_list[i] = conf_list[i][1:]
298
299 os.chdir(cwd)
300 return conf_list
301
302def splitdebuginfo(file, debugfile, debugsrcdir, sourcefile, d):
303 # Function to split a single file into two components, one is the stripped
304 # target system binary, the other contains any debugging information. The
305 # two files are linked to reference each other.
306 #
307 # sourcefile is also generated containing a list of debugsources
308
309 import stat
310
311 dvar = d.getVar('PKGD', True)
312 objcopy = d.getVar("OBJCOPY", True)
313 debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit")
314 workdir = d.getVar("WORKDIR", True)
315 workparentdir = d.getVar("DEBUGSRC_OVERRIDE_PATH", True) or os.path.dirname(os.path.dirname(workdir))
316
317 # We ignore kernel modules, we don't generate debug info files.
318 if file.find("/lib/modules/") != -1 and file.endswith(".ko"):
319 return 1
320
321 newmode = None
322 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
323 origmode = os.stat(file)[stat.ST_MODE]
324 newmode = origmode | stat.S_IWRITE | stat.S_IREAD
325 os.chmod(file, newmode)
326
327 # We need to extract the debug src information here...
328 if debugsrcdir:
329 cmd = "'%s' -b '%s' -d '%s' -i -l '%s' '%s'" % (debugedit, workparentdir, debugsrcdir, sourcefile, file)
330 (retval, output) = oe.utils.getstatusoutput(cmd)
331 if retval:
332 bb.fatal("debugedit failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
333
334 bb.utils.mkdirhier(os.path.dirname(debugfile))
335
336 cmd = "'%s' --only-keep-debug '%s' '%s'" % (objcopy, file, debugfile)
337 (retval, output) = oe.utils.getstatusoutput(cmd)
338 if retval:
339 bb.fatal("objcopy failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
340
341 # Set the debuglink to have the view of the file path on the target
342 cmd = "'%s' --add-gnu-debuglink='%s' '%s'" % (objcopy, debugfile, file)
343 (retval, output) = oe.utils.getstatusoutput(cmd)
344 if retval:
345 bb.fatal("objcopy failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
346
347 if newmode:
348 os.chmod(file, origmode)
349
350 return 0
351
352def copydebugsources(debugsrcdir, d):
353 # The debug src information written out to sourcefile is further procecessed
354 # and copied to the destination here.
355
356 import stat
357
358 sourcefile = d.expand("${WORKDIR}/debugsources.list")
359 if debugsrcdir and os.path.isfile(sourcefile):
360 dvar = d.getVar('PKGD', True)
361 strip = d.getVar("STRIP", True)
362 objcopy = d.getVar("OBJCOPY", True)
363 debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit")
364 workdir = d.getVar("WORKDIR", True)
365 workparentdir = os.path.dirname(os.path.dirname(workdir))
366 workbasedir = os.path.basename(os.path.dirname(workdir)) + "/" + os.path.basename(workdir)
367
368 nosuchdir = []
369 basepath = dvar
370 for p in debugsrcdir.split("/"):
371 basepath = basepath + "/" + p
372 if not cpath.exists(basepath):
373 nosuchdir.append(basepath)
374 bb.utils.mkdirhier(basepath)
375 cpath.updatecache(basepath)
376
377 processdebugsrc = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '(<internal>|<built-in>)$' | "
378 # We need to ignore files that are not actually ours
379 # we do this by only paying attention to items from this package
380 processdebugsrc += "fgrep -zw '%s' | "
381 processdebugsrc += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)"
382
383 cmd = processdebugsrc % (sourcefile, workbasedir, workparentdir, dvar, debugsrcdir)
384 (retval, output) = oe.utils.getstatusoutput(cmd)
385 # Can "fail" if internal headers/transient sources are attempted
386 #if retval:
387 # bb.fatal("debug source copy failed with exit code %s (cmd was %s)" % (retval, cmd))
388
389 # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
390 # Work around this by manually finding and copying any symbolic links that made it through.
391 cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s' 2>/dev/null)" % (dvar, debugsrcdir, dvar, debugsrcdir, workparentdir, dvar, debugsrcdir)
392 (retval, output) = oe.utils.getstatusoutput(cmd)
393 if retval:
394 bb.fatal("debugsrc symlink fixup failed with exit code %s (cmd was %s)" % (retval, cmd))
395
396 # The copy by cpio may have resulted in some empty directories! Remove these
397 cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
398 (retval, output) = oe.utils.getstatusoutput(cmd)
399 if retval:
400 bb.fatal("empty directory removal failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
401
402 # Also remove debugsrcdir if its empty
403 for p in nosuchdir[::-1]:
404 if os.path.exists(p) and not os.listdir(p):
405 os.rmdir(p)
406
407#
408# Package data handling routines
409#
410
411def get_package_mapping (pkg, basepkg, d):
412 import oe.packagedata
413
414 data = oe.packagedata.read_subpkgdata(pkg, d)
415 key = "PKG_%s" % pkg
416
417 if key in data:
418 # Have to avoid undoing the write_extra_pkgs(global_variants...)
419 if bb.data.inherits_class('allarch', d) and data[key] == basepkg:
420 return pkg
421 return data[key]
422
423 return pkg
424
425def get_package_additional_metadata (pkg_type, d):
426 base_key = "PACKAGE_ADD_METADATA"
427 for key in ("%s_%s" % (base_key, pkg_type.upper()), base_key):
428 if d.getVar(key, False) is None:
429 continue
430 d.setVarFlag(key, "type", "list")
431 if d.getVarFlag(key, "separator") is None:
432 d.setVarFlag(key, "separator", "\\n")
433 metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)]
434 return "\n".join(metadata_fields).strip()
435
436def runtime_mapping_rename (varname, pkg, d):
437 #bb.note("%s before: %s" % (varname, d.getVar(varname, True)))
438
439 if bb.data.inherits_class('packagegroup', d):
440 return
441
442 new_depends = {}
443 deps = bb.utils.explode_dep_versions2(d.getVar(varname, True) or "")
444 for depend in deps:
445 new_depend = get_package_mapping(depend, pkg, d)
446 new_depends[new_depend] = deps[depend]
447
448 d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False))
449
450 #bb.note("%s after: %s" % (varname, d.getVar(varname, True)))
451
452#
453# Package functions suitable for inclusion in PACKAGEFUNCS
454#
455
456python package_get_auto_pr() {
457 import oe.prservice
458 import re
459
460 # Support per recipe PRSERV_HOST
461 pn = d.getVar('PN', True)
462 host = d.getVar("PRSERV_HOST_" + pn, True)
463 if not (host is None):
464 d.setVar("PRSERV_HOST", host)
465
466 pkgv = d.getVar("PKGV", True)
467
468 # PR Server not active, handle AUTOINC
469 if not d.getVar('PRSERV_HOST', True):
470 if 'AUTOINC' in pkgv:
471 d.setVar("PKGV", pkgv.replace("AUTOINC", "0"))
472 return
473
474 auto_pr = None
475 pv = d.getVar("PV", True)
476 version = d.getVar("PRAUTOINX", True)
477 pkgarch = d.getVar("PACKAGE_ARCH", True)
478 checksum = d.getVar("BB_TASKHASH", True)
479
480 if d.getVar('PRSERV_LOCKDOWN', True):
481 auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch, True) or d.getVar('PRAUTO_' + version, True) or None
482 if auto_pr is None:
483 bb.fatal("Can NOT get PRAUTO from lockdown exported file")
484 d.setVar('PRAUTO',str(auto_pr))
485 return
486
487 try:
488 conn = d.getVar("__PRSERV_CONN", True)
489 if conn is None:
490 conn = oe.prservice.prserv_make_conn(d)
491 if conn is not None:
492 if "AUTOINC" in pkgv:
493 srcpv = bb.fetch2.get_srcrev(d)
494 base_ver = "AUTOINC-%s" % version[:version.find(srcpv)]
495 value = conn.getPR(base_ver, pkgarch, srcpv)
496 d.setVar("PKGV", pkgv.replace("AUTOINC", str(value)))
497
498 auto_pr = conn.getPR(version, pkgarch, checksum)
499 except Exception as e:
500 bb.fatal("Can NOT get PRAUTO, exception %s" % str(e))
501 if auto_pr is None:
502 bb.fatal("Can NOT get PRAUTO from remote PR service")
503 d.setVar('PRAUTO',str(auto_pr))
504}
505
506LOCALEBASEPN ??= "${PN}"
507
508python package_do_split_locales() {
509 if (d.getVar('PACKAGE_NO_LOCALE', True) == '1'):
510 bb.debug(1, "package requested not splitting locales")
511 return
512
513 packages = (d.getVar('PACKAGES', True) or "").split()
514
515 datadir = d.getVar('datadir', True)
516 if not datadir:
517 bb.note("datadir not defined")
518 return
519
520 dvar = d.getVar('PKGD', True)
521 pn = d.getVar('LOCALEBASEPN', True)
522
523 if pn + '-locale' in packages:
524 packages.remove(pn + '-locale')
525
526 localedir = os.path.join(dvar + datadir, 'locale')
527
528 if not cpath.isdir(localedir):
529 bb.debug(1, "No locale files in this package")
530 return
531
532 locales = os.listdir(localedir)
533
534 summary = d.getVar('SUMMARY', True) or pn
535 description = d.getVar('DESCRIPTION', True) or ""
536 locale_section = d.getVar('LOCALE_SECTION', True)
537 mlprefix = d.getVar('MLPREFIX', True) or ""
538 for l in sorted(locales):
539 ln = legitimize_package_name(l)
540 pkg = pn + '-locale-' + ln
541 packages.append(pkg)
542 d.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l))
543 d.setVar('RRECOMMENDS_' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
544 d.setVar('RPROVIDES_' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
545 d.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l))
546 d.setVar('DESCRIPTION_' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l))
547 if locale_section:
548 d.setVar('SECTION_' + pkg, locale_section)
549
550 d.setVar('PACKAGES', ' '.join(packages))
551
552 # Disabled by RP 18/06/07
553 # Wildcards aren't supported in debian
554 # They break with ipkg since glibc-locale* will mean that
555 # glibc-localedata-translit* won't install as a dependency
556 # for some other package which breaks meta-toolchain
557 # Probably breaks since virtual-locale- isn't provided anywhere
558 #rdep = (d.getVar('RDEPENDS_%s' % pn, True) or "").split()
559 #rdep.append('%s-locale*' % pn)
560 #d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep))
561}
562
563python perform_packagecopy () {
564 dest = d.getVar('D', True)
565 dvar = d.getVar('PKGD', True)
566
567 # Start by package population by taking a copy of the installed
568 # files to operate on
569 # Preserve sparse files and hard links
570 cmd = 'tar -cf - -C %s -p . | tar -xf - -C %s' % (dest, dvar)
571 (retval, output) = oe.utils.getstatusoutput(cmd)
572 if retval:
573 bb.fatal("file copy failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
574
575 # replace RPATHs for the nativesdk binaries, to make them relocatable
576 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d):
577 rpath_replace (dvar, d)
578}
579perform_packagecopy[cleandirs] = "${PKGD}"
580perform_packagecopy[dirs] = "${PKGD}"
581
582# We generate a master list of directories to process, we start by
583# seeding this list with reasonable defaults, then load from
584# the fs-perms.txt files
585python fixup_perms () {
586 import pwd, grp
587
588 # init using a string with the same format as a line as documented in
589 # the fs-perms.txt file
590 # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
591 # <path> link <link target>
592 #
593 # __str__ can be used to print out an entry in the input format
594 #
595 # if fs_perms_entry.path is None:
596 # an error occured
597 # if fs_perms_entry.link, you can retrieve:
598 # fs_perms_entry.path = path
599 # fs_perms_entry.link = target of link
600 # if not fs_perms_entry.link, you can retrieve:
601 # fs_perms_entry.path = path
602 # fs_perms_entry.mode = expected dir mode or None
603 # fs_perms_entry.uid = expected uid or -1
604 # fs_perms_entry.gid = expected gid or -1
605 # fs_perms_entry.walk = 'true' or something else
606 # fs_perms_entry.fmode = expected file mode or None
607 # fs_perms_entry.fuid = expected file uid or -1
608 # fs_perms_entry_fgid = expected file gid or -1
609 class fs_perms_entry():
610 def __init__(self, line):
611 lsplit = line.split()
612 if len(lsplit) == 3 and lsplit[1].lower() == "link":
613 self._setlink(lsplit[0], lsplit[2])
614 elif len(lsplit) == 8:
615 self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
616 else:
617 msg = "Fixup Perms: invalid config line %s" % line
618 package_qa_handle_error("perm-config", msg, d)
619 self.path = None
620 self.link = None
621
622 def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
623 self.path = os.path.normpath(path)
624 self.link = None
625 self.mode = self._procmode(mode)
626 self.uid = self._procuid(uid)
627 self.gid = self._procgid(gid)
628 self.walk = walk.lower()
629 self.fmode = self._procmode(fmode)
630 self.fuid = self._procuid(fuid)
631 self.fgid = self._procgid(fgid)
632
633 def _setlink(self, path, link):
634 self.path = os.path.normpath(path)
635 self.link = link
636
637 def _procmode(self, mode):
638 if not mode or (mode and mode == "-"):
639 return None
640 else:
641 return int(mode,8)
642
643 # Note uid/gid -1 has special significance in os.lchown
644 def _procuid(self, uid):
645 if uid is None or uid == "-":
646 return -1
647 elif uid.isdigit():
648 return int(uid)
649 else:
650 return pwd.getpwnam(uid).pw_uid
651
652 def _procgid(self, gid):
653 if gid is None or gid == "-":
654 return -1
655 elif gid.isdigit():
656 return int(gid)
657 else:
658 return grp.getgrnam(gid).gr_gid
659
660 # Use for debugging the entries
661 def __str__(self):
662 if self.link:
663 return "%s link %s" % (self.path, self.link)
664 else:
665 mode = "-"
666 if self.mode:
667 mode = "0%o" % self.mode
668 fmode = "-"
669 if self.fmode:
670 fmode = "0%o" % self.fmode
671 uid = self._mapugid(self.uid)
672 gid = self._mapugid(self.gid)
673 fuid = self._mapugid(self.fuid)
674 fgid = self._mapugid(self.fgid)
675 return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
676
677 def _mapugid(self, id):
678 if id is None or id == -1:
679 return "-"
680 else:
681 return "%d" % id
682
683 # Fix the permission, owner and group of path
684 def fix_perms(path, mode, uid, gid, dir):
685 if mode and not os.path.islink(path):
686 #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
687 os.chmod(path, mode)
688 # -1 is a special value that means don't change the uid/gid
689 # if they are BOTH -1, don't bother to lchown
690 if not (uid == -1 and gid == -1):
691 #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
692 os.lchown(path, uid, gid)
693
694 # Return a list of configuration files based on either the default
695 # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
696 # paths are resolved via BBPATH
697 def get_fs_perms_list(d):
698 str = ""
699 bbpath = d.getVar('BBPATH', True)
700 fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES', True)
701 if not fs_perms_tables:
702 fs_perms_tables = 'files/fs-perms.txt'
703 for conf_file in fs_perms_tables.split():
704 str += " %s" % bb.utils.which(bbpath, conf_file)
705 return str
706
707
708
709 dvar = d.getVar('PKGD', True)
710
711 fs_perms_table = {}
712
713 # By default all of the standard directories specified in
714 # bitbake.conf will get 0755 root:root.
715 target_path_vars = [ 'base_prefix',
716 'prefix',
717 'exec_prefix',
718 'base_bindir',
719 'base_sbindir',
720 'base_libdir',
721 'datadir',
722 'sysconfdir',
723 'servicedir',
724 'sharedstatedir',
725 'localstatedir',
726 'infodir',
727 'mandir',
728 'docdir',
729 'bindir',
730 'sbindir',
731 'libexecdir',
732 'libdir',
733 'includedir',
734 'oldincludedir' ]
735
736 for path in target_path_vars:
737 dir = d.getVar(path, True) or ""
738 if dir == "":
739 continue
740 fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d))
741
742 # Now we actually load from the configuration files
743 for conf in get_fs_perms_list(d).split():
744 if os.path.exists(conf):
745 f = open(conf)
746 for line in f:
747 if line.startswith('#'):
748 continue
749 lsplit = line.split()
750 if len(lsplit) == 0:
751 continue
752 if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
753 msg = "Fixup perms: %s invalid line: %s" % (conf, line)
754 package_qa_handle_error("perm-line", msg, d)
755 continue
756 entry = fs_perms_entry(d.expand(line))
757 if entry and entry.path:
758 fs_perms_table[entry.path] = entry
759 f.close()
760
761 # Debug -- list out in-memory table
762 #for dir in fs_perms_table:
763 # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
764
765 # We process links first, so we can go back and fixup directory ownership
766 # for any newly created directories
767 for dir in fs_perms_table:
768 if not fs_perms_table[dir].link:
769 continue
770
771 origin = dvar + dir
772 if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)):
773 continue
774
775 link = fs_perms_table[dir].link
776 if link[0] == "/":
777 target = dvar + link
778 ptarget = link
779 else:
780 target = os.path.join(os.path.dirname(origin), link)
781 ptarget = os.path.join(os.path.dirname(dir), link)
782 if os.path.exists(target):
783 msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
784 package_qa_handle_error("perm-link", msg, d)
785 continue
786
787 # Create path to move directory to, move it, and then setup the symlink
788 bb.utils.mkdirhier(os.path.dirname(target))
789 #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
790 os.rename(origin, target)
791 #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
792 os.symlink(link, origin)
793
794 for dir in fs_perms_table:
795 if fs_perms_table[dir].link:
796 continue
797
798 origin = dvar + dir
799 if not (cpath.exists(origin) and cpath.isdir(origin)):
800 continue
801
802 fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
803
804 if fs_perms_table[dir].walk == 'true':
805 for root, dirs, files in os.walk(origin):
806 for dr in dirs:
807 each_dir = os.path.join(root, dr)
808 fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
809 for f in files:
810 each_file = os.path.join(root, f)
811 fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
812}
813
814python split_and_strip_files () {
815 import stat, errno
816
817 dvar = d.getVar('PKGD', True)
818 pn = d.getVar('PN', True)
819
820 # We default to '.debug' style
821 if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory':
822 # Single debug-file-directory style debug info
823 debugappend = ".debug"
824 debugdir = ""
825 debuglibdir = "/usr/lib/debug"
826 debugsrcdir = "/usr/src/debug"
827 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-without-src':
828 # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
829 debugappend = ""
830 debugdir = "/.debug"
831 debuglibdir = ""
832 debugsrcdir = ""
833 else:
834 # Original OE-core, a.k.a. ".debug", style debug info
835 debugappend = ""
836 debugdir = "/.debug"
837 debuglibdir = ""
838 debugsrcdir = "/usr/src/debug"
839
840 sourcefile = d.expand("${WORKDIR}/debugsources.list")
841 bb.utils.remove(sourcefile)
842
843 os.chdir(dvar)
844
845 # Return type (bits):
846 # 0 - not elf
847 # 1 - ELF
848 # 2 - stripped
849 # 4 - executable
850 # 8 - shared library
851 # 16 - kernel module
852 def isELF(path):
853 type = 0
854 ret, result = oe.utils.getstatusoutput("file \"%s\"" % path.replace("\"", "\\\""))
855
856 if ret:
857 msg = "split_and_strip_files: 'file %s' failed" % path
858 package_qa_handle_error("split-strip", msg, d)
859 return type
860
861 # Not stripped
862 if "ELF" in result:
863 type |= 1
864 if "not stripped" not in result:
865 type |= 2
866 if "executable" in result:
867 type |= 4
868 if "shared" in result:
869 type |= 8
870 return type
871
872
873 #
874 # First lets figure out all of the files we may have to process ... do this only once!
875 #
876 elffiles = {}
877 symlinks = {}
878 kernmods = []
879 inodes = {}
880 libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir", True))
881 baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir", True))
882 if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
883 for root, dirs, files in cpath.walk(dvar):
884 for f in files:
885 file = os.path.join(root, f)
886 if file.endswith(".ko") and file.find("/lib/modules/") != -1:
887 kernmods.append(file)
888 continue
889
890 # Skip debug files
891 if debugappend and file.endswith(debugappend):
892 continue
893 if debugdir and debugdir in os.path.dirname(file[len(dvar):]):
894 continue
895
896 try:
897 ltarget = cpath.realpath(file, dvar, False)
898 s = cpath.lstat(ltarget)
899 except OSError as e:
900 (err, strerror) = e.args
901 if err != errno.ENOENT:
902 raise
903 # Skip broken symlinks
904 continue
905 if not s:
906 continue
907 # Check its an excutable
908 if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) or (s[stat.ST_MODE] & stat.S_IXOTH) \
909 or ((file.startswith(libdir) or file.startswith(baselibdir)) and ".so" in f):
910 # If it's a symlink, and points to an ELF file, we capture the readlink target
911 if cpath.islink(file):
912 target = os.readlink(file)
913 if isELF(ltarget):
914 #bb.note("Sym: %s (%d)" % (ltarget, isELF(ltarget)))
915 symlinks[file] = target
916 continue
917
918 # It's a file (or hardlink), not a link
919 # ...but is it ELF, and is it already stripped?
920 elf_file = isELF(file)
921 if elf_file & 1:
922 if elf_file & 2:
923 if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn, True) or "").split():
924 bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
925 else:
926 msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
927 package_qa_handle_error("already-stripped", msg, d)
928 continue
929
930 # At this point we have an unstripped elf file. We need to:
931 # a) Make sure any file we strip is not hardlinked to anything else outside this tree
932 # b) Only strip any hardlinked file once (no races)
933 # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks
934
935 # Use a reference of device ID and inode number to indentify files
936 file_reference = "%d_%d" % (s.st_dev, s.st_ino)
937 if file_reference in inodes:
938 os.unlink(file)
939 os.link(inodes[file_reference][0], file)
940 inodes[file_reference].append(file)
941 else:
942 inodes[file_reference] = [file]
943 # break hardlink
944 bb.utils.copyfile(file, file)
945 elffiles[file] = elf_file
946 # Modified the file so clear the cache
947 cpath.updatecache(file)
948
949 #
950 # First lets process debug splitting
951 #
952 if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'):
953 for file in elffiles:
954 src = file[len(dvar):]
955 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
956 fpath = dvar + dest
957
958 # Split the file...
959 bb.utils.mkdirhier(os.path.dirname(fpath))
960 #bb.note("Split %s -> %s" % (file, fpath))
961 # Only store off the hard link reference if we successfully split!
962 splitdebuginfo(file, fpath, debugsrcdir, sourcefile, d)
963
964 # Hardlink our debug symbols to the other hardlink copies
965 for ref in inodes:
966 if len(inodes[ref]) == 1:
967 continue
968 for file in inodes[ref][1:]:
969 src = file[len(dvar):]
970 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
971 fpath = dvar + dest
972 target = inodes[ref][0][len(dvar):]
973 ftarget = dvar + debuglibdir + os.path.dirname(target) + debugdir + "/" + os.path.basename(target) + debugappend
974 bb.utils.mkdirhier(os.path.dirname(fpath))
975 #bb.note("Link %s -> %s" % (fpath, ftarget))
976 os.link(ftarget, fpath)
977
978 # Create symlinks for all cases we were able to split symbols
979 for file in symlinks:
980 src = file[len(dvar):]
981 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
982 fpath = dvar + dest
983 # Skip it if the target doesn't exist
984 try:
985 s = os.stat(fpath)
986 except OSError as e:
987 (err, strerror) = e.args
988 if err != errno.ENOENT:
989 raise
990 continue
991
992 ltarget = symlinks[file]
993 lpath = os.path.dirname(ltarget)
994 lbase = os.path.basename(ltarget)
995 ftarget = ""
996 if lpath and lpath != ".":
997 ftarget += lpath + debugdir + "/"
998 ftarget += lbase + debugappend
999 if lpath.startswith(".."):
1000 ftarget = os.path.join("..", ftarget)
1001 bb.utils.mkdirhier(os.path.dirname(fpath))
1002 #bb.note("Symlink %s -> %s" % (fpath, ftarget))
1003 os.symlink(ftarget, fpath)
1004
1005 # Process the debugsrcdir if requested...
1006 # This copies and places the referenced sources for later debugging...
1007 copydebugsources(debugsrcdir, d)
1008 #
1009 # End of debug splitting
1010 #
1011
1012 #
1013 # Now lets go back over things and strip them
1014 #
1015 if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
1016 strip = d.getVar("STRIP", True)
1017 sfiles = []
1018 for file in elffiles:
1019 elf_file = int(elffiles[file])
1020 #bb.note("Strip %s" % file)
1021 sfiles.append((file, elf_file, strip))
1022 for f in kernmods:
1023 sfiles.append((f, 16, strip))
1024
1025 oe.utils.multiprocess_exec(sfiles, oe.package.runstrip)
1026
1027 #
1028 # End of strip
1029 #
1030}
1031
1032python populate_packages () {
1033 import glob, re
1034
1035 workdir = d.getVar('WORKDIR', True)
1036 outdir = d.getVar('DEPLOY_DIR', True)
1037 dvar = d.getVar('PKGD', True)
1038 packages = d.getVar('PACKAGES', True)
1039 pn = d.getVar('PN', True)
1040
1041 bb.utils.mkdirhier(outdir)
1042 os.chdir(dvar)
1043
1044 # Sanity check PACKAGES for duplicates
1045 # Sanity should be moved to sanity.bbclass once we have the infrastucture
1046 package_list = []
1047
1048 for pkg in packages.split():
1049 if pkg in package_list:
1050 msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
1051 package_qa_handle_error("packages-list", msg, d)
1052 else:
1053 package_list.append(pkg)
1054 d.setVar('PACKAGES', ' '.join(package_list))
1055 pkgdest = d.getVar('PKGDEST', True)
1056
1057 seen = []
1058
1059 # os.mkdir masks the permissions with umask so we have to unset it first
1060 oldumask = os.umask(0)
1061
1062 for pkg in package_list:
1063 root = os.path.join(pkgdest, pkg)
1064 bb.utils.mkdirhier(root)
1065
1066 filesvar = d.getVar('FILES_%s' % pkg, True) or ""
1067 if "//" in filesvar:
1068 msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
1069 package_qa_handle_error("files-invalid", msg, d)
1070 filesvar.replace("//", "/")
1071
1072 origfiles = filesvar.split()
1073 files = files_from_filevars(origfiles)
1074
1075 for file in files:
1076 if (not cpath.islink(file)) and (not cpath.exists(file)):
1077 continue
1078 if file in seen:
1079 continue
1080 seen.append(file)
1081
1082 def mkdir(src, dest, p):
1083 src = os.path.join(src, p)
1084 dest = os.path.join(dest, p)
1085 fstat = cpath.stat(src)
1086 os.mkdir(dest, fstat.st_mode)
1087 os.chown(dest, fstat.st_uid, fstat.st_gid)
1088 if p not in seen:
1089 seen.append(p)
1090 cpath.updatecache(dest)
1091
1092 def mkdir_recurse(src, dest, paths):
1093 if cpath.exists(dest + '/' + paths):
1094 return
1095 while paths.startswith("./"):
1096 paths = paths[2:]
1097 p = "."
1098 for c in paths.split("/"):
1099 p = os.path.join(p, c)
1100 if not cpath.exists(os.path.join(dest, p)):
1101 mkdir(src, dest, p)
1102
1103 if cpath.isdir(file) and not cpath.islink(file):
1104 mkdir_recurse(dvar, root, file)
1105 continue
1106
1107 mkdir_recurse(dvar, root, os.path.dirname(file))
1108 fpath = os.path.join(root,file)
1109 if not cpath.islink(file):
1110 os.link(file, fpath)
1111 fstat = cpath.stat(file)
1112 os.chmod(fpath, fstat.st_mode)
1113 os.chown(fpath, fstat.st_uid, fstat.st_gid)
1114 continue
1115 ret = bb.utils.copyfile(file, fpath)
1116 if ret is False or ret == 0:
1117 raise bb.build.FuncFailed("File population failed")
1118
1119 os.umask(oldumask)
1120 os.chdir(workdir)
1121
1122 # Handle LICENSE_EXCLUSION
1123 package_list = []
1124 for pkg in packages.split():
1125 if d.getVar('LICENSE_EXCLUSION-' + pkg, True):
1126 msg = "%s has an incompatible license. Excluding from packaging." % pkg
1127 package_qa_handle_error("incompatible-license", msg, d)
1128 else:
1129 package_list.append(pkg)
1130 d.setVar('PACKAGES', ' '.join(package_list))
1131
1132 unshipped = []
1133 for root, dirs, files in cpath.walk(dvar):
1134 dir = root[len(dvar):]
1135 if not dir:
1136 dir = os.sep
1137 for f in (files + dirs):
1138 path = os.path.join(dir, f)
1139 if ('.' + path) not in seen:
1140 unshipped.append(path)
1141
1142 if unshipped != []:
1143 msg = pn + ": Files/directories were installed but not shipped in any package:"
1144 if "installed-vs-shipped" in (d.getVar('INSANE_SKIP_' + pn, True) or "").split():
1145 bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
1146 else:
1147 for f in unshipped:
1148 msg = msg + "\n " + f
1149 msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install."
1150 package_qa_handle_error("installed-vs-shipped", msg, d)
1151}
1152populate_packages[dirs] = "${D}"
1153
1154python package_fixsymlinks () {
1155 import errno
1156 pkgdest = d.getVar('PKGDEST', True)
1157 packages = d.getVar("PACKAGES", False).split()
1158
1159 dangling_links = {}
1160 pkg_files = {}
1161 for pkg in packages:
1162 dangling_links[pkg] = []
1163 pkg_files[pkg] = []
1164 inst_root = os.path.join(pkgdest, pkg)
1165 for path in pkgfiles[pkg]:
1166 rpath = path[len(inst_root):]
1167 pkg_files[pkg].append(rpath)
1168 rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
1169 if not cpath.lexists(rtarget):
1170 dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))
1171
1172 newrdepends = {}
1173 for pkg in dangling_links:
1174 for l in dangling_links[pkg]:
1175 found = False
1176 bb.debug(1, "%s contains dangling link %s" % (pkg, l))
1177 for p in packages:
1178 if l in pkg_files[p]:
1179 found = True
1180 bb.debug(1, "target found in %s" % p)
1181 if p == pkg:
1182 break
1183 if pkg not in newrdepends:
1184 newrdepends[pkg] = []
1185 newrdepends[pkg].append(p)
1186 break
1187 if found == False:
1188 bb.note("%s contains dangling symlink to %s" % (pkg, l))
1189
1190 for pkg in newrdepends:
1191 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "")
1192 for p in newrdepends[pkg]:
1193 if p not in rdepends:
1194 rdepends[p] = []
1195 d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1196}
1197
1198
1199python package_package_name_hook() {
1200 """
1201 A package_name_hook function can be used to rewrite the package names by
1202 changing PKG. For an example, see debian.bbclass.
1203 """
1204 pass
1205}
1206
1207EXPORT_FUNCTIONS package_name_hook
1208
1209
1210PKGDESTWORK = "${WORKDIR}/pkgdata"
1211
1212python emit_pkgdata() {
1213 from glob import glob
1214 import json
1215
1216 def write_if_exists(f, pkg, var):
1217 def encode(str):
1218 import codecs
1219 c = codecs.getencoder("string_escape")
1220 return c(str)[0]
1221
1222 val = d.getVar('%s_%s' % (var, pkg), True)
1223 if val:
1224 f.write('%s_%s: %s\n' % (var, pkg, encode(val)))
1225 return val
1226 val = d.getVar('%s' % (var), True)
1227 if val:
1228 f.write('%s: %s\n' % (var, encode(val)))
1229 return val
1230
1231 def write_extra_pkgs(variants, pn, packages, pkgdatadir):
1232 for variant in variants:
1233 with open("%s/%s-%s" % (pkgdatadir, variant, pn), 'w') as fd:
1234 fd.write("PACKAGES: %s\n" % ' '.join(
1235 map(lambda pkg: '%s-%s' % (variant, pkg), packages.split())))
1236
1237 def write_extra_runtime_pkgs(variants, packages, pkgdatadir):
1238 for variant in variants:
1239 for pkg in packages.split():
1240 ml_pkg = "%s-%s" % (variant, pkg)
1241 subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg)
1242 with open(subdata_file, 'w') as fd:
1243 fd.write("PKG_%s: %s" % (ml_pkg, pkg))
1244
1245 packages = d.getVar('PACKAGES', True)
1246 pkgdest = d.getVar('PKGDEST', True)
1247 pkgdatadir = d.getVar('PKGDESTWORK', True)
1248
1249 # Take shared lock since we're only reading, not writing
1250 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True)
1251
1252 data_file = pkgdatadir + d.expand("/${PN}" )
1253 f = open(data_file, 'w')
1254 f.write("PACKAGES: %s\n" % packages)
1255 f.close()
1256
1257 pn = d.getVar('PN', True)
1258 global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS', True) or "").split()
1259 variants = (d.getVar('MULTILIB_VARIANTS', True) or "").split()
1260
1261 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
1262 write_extra_pkgs(variants, pn, packages, pkgdatadir)
1263
1264 if (bb.data.inherits_class('allarch', d) and not bb.data.inherits_class('packagegroup', d)):
1265 write_extra_pkgs(global_variants, pn, packages, pkgdatadir)
1266
1267 workdir = d.getVar('WORKDIR', True)
1268
1269 for pkg in packages.split():
1270 pkgval = d.getVar('PKG_%s' % pkg, True)
1271 if pkgval is None:
1272 pkgval = pkg
1273 d.setVar('PKG_%s' % pkg, pkg)
1274
1275 pkgdestpkg = os.path.join(pkgdest, pkg)
1276 files = {}
1277 total_size = 0
1278 for f in pkgfiles[pkg]:
1279 relpth = os.path.relpath(f, pkgdestpkg)
1280 fstat = os.lstat(f)
1281 total_size += fstat.st_size
1282 files[os.sep + relpth] = fstat.st_size
1283 d.setVar('FILES_INFO', json.dumps(files))
1284
1285 subdata_file = pkgdatadir + "/runtime/%s" % pkg
1286 sf = open(subdata_file, 'w')
1287 write_if_exists(sf, pkg, 'PN')
1288 write_if_exists(sf, pkg, 'PE')
1289 write_if_exists(sf, pkg, 'PV')
1290 write_if_exists(sf, pkg, 'PR')
1291 write_if_exists(sf, pkg, 'PKGE')
1292 write_if_exists(sf, pkg, 'PKGV')
1293 write_if_exists(sf, pkg, 'PKGR')
1294 write_if_exists(sf, pkg, 'LICENSE')
1295 write_if_exists(sf, pkg, 'DESCRIPTION')
1296 write_if_exists(sf, pkg, 'SUMMARY')
1297 write_if_exists(sf, pkg, 'RDEPENDS')
1298 rprov = write_if_exists(sf, pkg, 'RPROVIDES')
1299 write_if_exists(sf, pkg, 'RRECOMMENDS')
1300 write_if_exists(sf, pkg, 'RSUGGESTS')
1301 write_if_exists(sf, pkg, 'RREPLACES')
1302 write_if_exists(sf, pkg, 'RCONFLICTS')
1303 write_if_exists(sf, pkg, 'SECTION')
1304 write_if_exists(sf, pkg, 'PKG')
1305 write_if_exists(sf, pkg, 'ALLOW_EMPTY')
1306 write_if_exists(sf, pkg, 'FILES')
1307 write_if_exists(sf, pkg, 'pkg_postinst')
1308 write_if_exists(sf, pkg, 'pkg_postrm')
1309 write_if_exists(sf, pkg, 'pkg_preinst')
1310 write_if_exists(sf, pkg, 'pkg_prerm')
1311 write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
1312 write_if_exists(sf, pkg, 'FILES_INFO')
1313 for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg, True) or "").split():
1314 write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile)
1315
1316 write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
1317 for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg, True) or "").split():
1318 write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile)
1319
1320 sf.write('%s_%s: %d\n' % ('PKGSIZE', pkg, total_size))
1321 sf.close()
1322
1323 # Symlinks needed for rprovides lookup
1324 if rprov:
1325 for p in rprov.strip().split():
1326 subdata_sym = pkgdatadir + "/runtime-rprovides/%s/%s" % (p, pkg)
1327 bb.utils.mkdirhier(os.path.dirname(subdata_sym))
1328 oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True)
1329
1330 allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg, True)
1331 if not allow_empty:
1332 allow_empty = d.getVar('ALLOW_EMPTY', True)
1333 root = "%s/%s" % (pkgdest, pkg)
1334 os.chdir(root)
1335 g = glob('*')
1336 if g or allow_empty == "1":
1337 # Symlinks needed for reverse lookups (from the final package name)
1338 subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval
1339 oe.path.symlink("../runtime/%s" % pkg, subdata_sym, True)
1340
1341 packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg
1342 open(packagedfile, 'w').close()
1343
1344 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
1345 write_extra_runtime_pkgs(variants, packages, pkgdatadir)
1346
1347 if bb.data.inherits_class('allarch', d) and not bb.data.inherits_class('packagegroup', d):
1348 write_extra_runtime_pkgs(global_variants, packages, pkgdatadir)
1349
1350 bb.utils.unlockfile(lf)
1351}
1352emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime ${PKGDESTWORK}/runtime-reverse ${PKGDESTWORK}/runtime-rprovides"
1353
1354ldconfig_postinst_fragment() {
1355if [ x"$D" = "x" ]; then
1356 if [ -x /sbin/ldconfig ]; then /sbin/ldconfig ; fi
1357fi
1358}
1359
1360RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/bin/rpmdeps-oecore --macros ${STAGING_LIBDIR_NATIVE}/rpm/macros --define '_rpmfc_magic_path ${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc' --rpmpopt ${STAGING_LIBDIR_NATIVE}/rpm/rpmpopt"
1361
1362# Collect perfile run-time dependency metadata
1363# Output:
1364# FILERPROVIDESFLIST_pkg - list of all files w/ deps
1365# FILERPROVIDES_filepath_pkg - per file dep
1366#
1367# FILERDEPENDSFLIST_pkg - list of all files w/ deps
1368# FILERDEPENDS_filepath_pkg - per file dep
1369
1370python package_do_filedeps() {
1371 if d.getVar('SKIP_FILEDEPS', True) == '1':
1372 return
1373
1374 pkgdest = d.getVar('PKGDEST', True)
1375 packages = d.getVar('PACKAGES', True)
1376 rpmdeps = d.getVar('RPMDEPS', True)
1377
1378 def chunks(files, n):
1379 return [files[i:i+n] for i in range(0, len(files), n)]
1380
1381 pkglist = []
1382 for pkg in packages.split():
1383 if d.getVar('SKIP_FILEDEPS_' + pkg, True) == '1':
1384 continue
1385 if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-'):
1386 continue
1387 for files in chunks(pkgfiles[pkg], 100):
1388 pkglist.append((pkg, files, rpmdeps, pkgdest))
1389
1390 processed = oe.utils.multiprocess_exec( pkglist, oe.package.filedeprunner)
1391
1392 provides_files = {}
1393 requires_files = {}
1394
1395 for result in processed:
1396 (pkg, provides, requires) = result
1397
1398 if pkg not in provides_files:
1399 provides_files[pkg] = []
1400 if pkg not in requires_files:
1401 requires_files[pkg] = []
1402
1403 for file in provides:
1404 provides_files[pkg].append(file)
1405 key = "FILERPROVIDES_" + file + "_" + pkg
1406 d.setVar(key, " ".join(provides[file]))
1407
1408 for file in requires:
1409 requires_files[pkg].append(file)
1410 key = "FILERDEPENDS_" + file + "_" + pkg
1411 d.setVar(key, " ".join(requires[file]))
1412
1413 for pkg in requires_files:
1414 d.setVar("FILERDEPENDSFLIST_" + pkg, " ".join(requires_files[pkg]))
1415 for pkg in provides_files:
1416 d.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files[pkg]))
1417}
1418
1419SHLIBSDIRS = "${PKGDATA_DIR}/${MLPREFIX}shlibs2"
1420SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs2"
1421
1422python package_do_shlibs() {
1423 import re, pipes
1424 import subprocess as sub
1425
1426 exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', 0)
1427 if exclude_shlibs:
1428 bb.note("not generating shlibs")
1429 return
1430
1431 lib_re = re.compile("^.*\.so")
1432 libdir_re = re.compile(".*/%s$" % d.getVar('baselib', True))
1433
1434 packages = d.getVar('PACKAGES', True)
1435 targetos = d.getVar('TARGET_OS', True)
1436
1437 workdir = d.getVar('WORKDIR', True)
1438
1439 ver = d.getVar('PKGV', True)
1440 if not ver:
1441 msg = "PKGV not defined"
1442 package_qa_handle_error("pkgv-undefined", msg, d)
1443 return
1444
1445 pkgdest = d.getVar('PKGDEST', True)
1446
1447 shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
1448
1449 # Take shared lock since we're only reading, not writing
1450 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
1451
1452 def linux_so(file, needed, sonames, renames, pkgver):
1453 needs_ldconfig = False
1454 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1455 cmd = d.getVar('OBJDUMP', True) + " -p " + pipes.quote(file) + " 2>/dev/null"
1456 fd = os.popen(cmd)
1457 lines = fd.readlines()
1458 fd.close()
1459 rpath = []
1460 for l in lines:
1461 m = re.match("\s+RPATH\s+([^\s]*)", l)
1462 if m:
1463 rpaths = m.group(1).replace("$ORIGIN", ldir).split(":")
1464 rpath = map(os.path.normpath, rpaths)
1465 for l in lines:
1466 m = re.match("\s+NEEDED\s+([^\s]*)", l)
1467 if m:
1468 dep = m.group(1)
1469 if dep not in needed[pkg]:
1470 needed[pkg].append((dep, file, rpath))
1471 m = re.match("\s+SONAME\s+([^\s]*)", l)
1472 if m:
1473 this_soname = m.group(1)
1474 prov = (this_soname, ldir, pkgver)
1475 if not prov in sonames:
1476 # if library is private (only used by package) then do not build shlib for it
1477 if not private_libs or this_soname not in private_libs:
1478 sonames.append(prov)
1479 if libdir_re.match(os.path.dirname(file)):
1480 needs_ldconfig = True
1481 if snap_symlinks and (os.path.basename(file) != this_soname):
1482 renames.append((file, os.path.join(os.path.dirname(file), this_soname)))
1483 return needs_ldconfig
1484
1485 def darwin_so(file, needed, sonames, renames, pkgver):
1486 if not os.path.exists(file):
1487 return
1488 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1489
1490 def get_combinations(base):
1491 #
1492 # Given a base library name, find all combinations of this split by "." and "-"
1493 #
1494 combos = []
1495 options = base.split(".")
1496 for i in range(1, len(options) + 1):
1497 combos.append(".".join(options[0:i]))
1498 options = base.split("-")
1499 for i in range(1, len(options) + 1):
1500 combos.append("-".join(options[0:i]))
1501 return combos
1502
1503 if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg'):
1504 # Drop suffix
1505 name = os.path.basename(file).rsplit(".",1)[0]
1506 # Find all combinations
1507 combos = get_combinations(name)
1508 for combo in combos:
1509 if not combo in sonames:
1510 prov = (combo, ldir, pkgver)
1511 sonames.append(prov)
1512 if file.endswith('.dylib') or file.endswith('.so'):
1513 rpath = []
1514 p = sub.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file],stdout=sub.PIPE,stderr=sub.PIPE)
1515 err, out = p.communicate()
1516 # If returned succesfully, process stderr for results
1517 if p.returncode == 0:
1518 for l in err.split("\n"):
1519 l = l.strip()
1520 if l.startswith('path '):
1521 rpath.append(l.split()[1])
1522
1523 p = sub.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file],stdout=sub.PIPE,stderr=sub.PIPE)
1524 err, out = p.communicate()
1525 # If returned succesfully, process stderr for results
1526 if p.returncode == 0:
1527 for l in err.split("\n"):
1528 l = l.strip()
1529 if not l or l.endswith(":"):
1530 continue
1531 if "is not an object file" in l:
1532 continue
1533 name = os.path.basename(l.split()[0]).rsplit(".", 1)[0]
1534 if name and name not in needed[pkg]:
1535 needed[pkg].append((name, file, []))
1536
1537 if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS', True) == "1":
1538 snap_symlinks = True
1539 else:
1540 snap_symlinks = False
1541
1542 if (d.getVar('USE_LDCONFIG', True) or "1") == "1":
1543 use_ldconfig = True
1544 else:
1545 use_ldconfig = False
1546
1547 needed = {}
1548 shlib_provider = oe.package.read_shlib_providers(d)
1549
1550 for pkg in packages.split():
1551 private_libs = d.getVar('PRIVATE_LIBS_' + pkg, True) or d.getVar('PRIVATE_LIBS', True) or ""
1552 private_libs = private_libs.split()
1553 needs_ldconfig = False
1554 bb.debug(2, "calculating shlib provides for %s" % pkg)
1555
1556 pkgver = d.getVar('PKGV_' + pkg, True)
1557 if not pkgver:
1558 pkgver = d.getVar('PV_' + pkg, True)
1559 if not pkgver:
1560 pkgver = ver
1561
1562 needed[pkg] = []
1563 sonames = list()
1564 renames = list()
1565 for file in pkgfiles[pkg]:
1566 soname = None
1567 if cpath.islink(file):
1568 continue
1569 if targetos == "darwin" or targetos == "darwin8":
1570 darwin_so(file, needed, sonames, renames, pkgver)
1571 elif os.access(file, os.X_OK) or lib_re.match(file):
1572 ldconfig = linux_so(file, needed, sonames, renames, pkgver)
1573 needs_ldconfig = needs_ldconfig or ldconfig
1574 for (old, new) in renames:
1575 bb.note("Renaming %s to %s" % (old, new))
1576 os.rename(old, new)
1577 pkgfiles[pkg].remove(old)
1578
1579 shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
1580 if len(sonames):
1581 fd = open(shlibs_file, 'w')
1582 for s in sonames:
1583 if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]:
1584 (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]]
1585 if old_pkg != pkg:
1586 bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver))
1587 bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0]))
1588 fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n')
1589 if s[0] not in shlib_provider:
1590 shlib_provider[s[0]] = {}
1591 shlib_provider[s[0]][s[1]] = (pkg, pkgver)
1592 fd.close()
1593 if needs_ldconfig and use_ldconfig:
1594 bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
1595 postinst = d.getVar('pkg_postinst_%s' % pkg, True)
1596 if not postinst:
1597 postinst = '#!/bin/sh\n'
1598 postinst += d.getVar('ldconfig_postinst_fragment', True)
1599 d.setVar('pkg_postinst_%s' % pkg, postinst)
1600 bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
1601
1602 bb.utils.unlockfile(lf)
1603
1604 assumed_libs = d.getVar('ASSUME_SHLIBS', True)
1605 if assumed_libs:
1606 libdir = d.getVar("libdir", True)
1607 for e in assumed_libs.split():
1608 l, dep_pkg = e.split(":")
1609 lib_ver = None
1610 dep_pkg = dep_pkg.rsplit("_", 1)
1611 if len(dep_pkg) == 2:
1612 lib_ver = dep_pkg[1]
1613 dep_pkg = dep_pkg[0]
1614 if l not in shlib_provider:
1615 shlib_provider[l] = {}
1616 shlib_provider[l][libdir] = (dep_pkg, lib_ver)
1617
1618 libsearchpath = [d.getVar('libdir', True), d.getVar('base_libdir', True)]
1619
1620 for pkg in packages.split():
1621 bb.debug(2, "calculating shlib requirements for %s" % pkg)
1622
1623 deps = list()
1624 for n in needed[pkg]:
1625 # if n is in private libraries, don't try to search provider for it
1626 # this could cause problem in case some abc.bb provides private
1627 # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1
1628 # but skipping it is still better alternative than providing own
1629 # version and then adding runtime dependency for the same system library
1630 if private_libs and n[0] in private_libs:
1631 bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0]))
1632 continue
1633 if n[0] in shlib_provider.keys():
1634 shlib_provider_path = list()
1635 for k in shlib_provider[n[0]].keys():
1636 shlib_provider_path.append(k)
1637 match = None
1638 for p in n[2] + shlib_provider_path + libsearchpath:
1639 if p in shlib_provider[n[0]]:
1640 match = p
1641 break
1642 if match:
1643 (dep_pkg, ver_needed) = shlib_provider[n[0]][match]
1644
1645 bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1]))
1646
1647 if dep_pkg == pkg:
1648 continue
1649
1650 if ver_needed:
1651 dep = "%s (>= %s)" % (dep_pkg, ver_needed)
1652 else:
1653 dep = dep_pkg
1654 if not dep in deps:
1655 deps.append(dep)
1656 continue
1657 bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1]))
1658
1659 deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
1660 if os.path.exists(deps_file):
1661 os.remove(deps_file)
1662 if len(deps):
1663 fd = open(deps_file, 'w')
1664 for dep in deps:
1665 fd.write(dep + '\n')
1666 fd.close()
1667}
1668
1669python package_do_pkgconfig () {
1670 import re
1671
1672 packages = d.getVar('PACKAGES', True)
1673 workdir = d.getVar('WORKDIR', True)
1674 pkgdest = d.getVar('PKGDEST', True)
1675
1676 shlibs_dirs = d.getVar('SHLIBSDIRS', True).split()
1677 shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
1678
1679 pc_re = re.compile('(.*)\.pc$')
1680 var_re = re.compile('(.*)=(.*)')
1681 field_re = re.compile('(.*): (.*)')
1682
1683 pkgconfig_provided = {}
1684 pkgconfig_needed = {}
1685 for pkg in packages.split():
1686 pkgconfig_provided[pkg] = []
1687 pkgconfig_needed[pkg] = []
1688 for file in pkgfiles[pkg]:
1689 m = pc_re.match(file)
1690 if m:
1691 pd = bb.data.init()
1692 name = m.group(1)
1693 pkgconfig_provided[pkg].append(name)
1694 if not os.access(file, os.R_OK):
1695 continue
1696 f = open(file, 'r')
1697 lines = f.readlines()
1698 f.close()
1699 for l in lines:
1700 m = var_re.match(l)
1701 if m:
1702 name = m.group(1)
1703 val = m.group(2)
1704 pd.setVar(name, pd.expand(val))
1705 continue
1706 m = field_re.match(l)
1707 if m:
1708 hdr = m.group(1)
1709 exp = bb.data.expand(m.group(2), pd)
1710 if hdr == 'Requires':
1711 pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
1712
1713 # Take shared lock since we're only reading, not writing
1714 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
1715
1716 for pkg in packages.split():
1717 pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
1718 if pkgconfig_provided[pkg] != []:
1719 f = open(pkgs_file, 'w')
1720 for p in pkgconfig_provided[pkg]:
1721 f.write('%s\n' % p)
1722 f.close()
1723
1724 # Go from least to most specific since the last one found wins
1725 for dir in reversed(shlibs_dirs):
1726 if not os.path.exists(dir):
1727 continue
1728 for file in os.listdir(dir):
1729 m = re.match('^(.*)\.pclist$', file)
1730 if m:
1731 pkg = m.group(1)
1732 fd = open(os.path.join(dir, file))
1733 lines = fd.readlines()
1734 fd.close()
1735 pkgconfig_provided[pkg] = []
1736 for l in lines:
1737 pkgconfig_provided[pkg].append(l.rstrip())
1738
1739 for pkg in packages.split():
1740 deps = []
1741 for n in pkgconfig_needed[pkg]:
1742 found = False
1743 for k in pkgconfig_provided.keys():
1744 if n in pkgconfig_provided[k]:
1745 if k != pkg and not (k in deps):
1746 deps.append(k)
1747 found = True
1748 if found == False:
1749 bb.note("couldn't find pkgconfig module '%s' in any package" % n)
1750 deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
1751 if len(deps):
1752 fd = open(deps_file, 'w')
1753 for dep in deps:
1754 fd.write(dep + '\n')
1755 fd.close()
1756
1757 bb.utils.unlockfile(lf)
1758}
1759
1760def read_libdep_files(d):
1761 pkglibdeps = {}
1762 packages = d.getVar('PACKAGES', True).split()
1763 for pkg in packages:
1764 pkglibdeps[pkg] = {}
1765 for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
1766 depsfile = d.expand("${PKGDEST}/" + pkg + extension)
1767 if os.access(depsfile, os.R_OK):
1768 fd = open(depsfile)
1769 lines = fd.readlines()
1770 fd.close()
1771 for l in lines:
1772 l.rstrip()
1773 deps = bb.utils.explode_dep_versions2(l)
1774 for dep in deps:
1775 if not dep in pkglibdeps[pkg]:
1776 pkglibdeps[pkg][dep] = deps[dep]
1777 return pkglibdeps
1778
1779python read_shlibdeps () {
1780 pkglibdeps = read_libdep_files(d)
1781
1782 packages = d.getVar('PACKAGES', True).split()
1783 for pkg in packages:
1784 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "")
1785 for dep in pkglibdeps[pkg]:
1786 # Add the dep if it's not already there, or if no comparison is set
1787 if dep not in rdepends:
1788 rdepends[dep] = []
1789 for v in pkglibdeps[pkg][dep]:
1790 if v not in rdepends[dep]:
1791 rdepends[dep].append(v)
1792 d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1793}
1794
1795python package_depchains() {
1796 """
1797 For a given set of prefix and postfix modifiers, make those packages
1798 RRECOMMENDS on the corresponding packages for its RDEPENDS.
1799
1800 Example: If package A depends upon package B, and A's .bb emits an
1801 A-dev package, this would make A-dev Recommends: B-dev.
1802
1803 If only one of a given suffix is specified, it will take the RRECOMMENDS
1804 based on the RDEPENDS of *all* other packages. If more than one of a given
1805 suffix is specified, its will only use the RDEPENDS of the single parent
1806 package.
1807 """
1808
1809 packages = d.getVar('PACKAGES', True)
1810 postfixes = (d.getVar('DEPCHAIN_POST', True) or '').split()
1811 prefixes = (d.getVar('DEPCHAIN_PRE', True) or '').split()
1812
1813 def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
1814
1815 #bb.note('depends for %s is %s' % (base, depends))
1816 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg, True) or "")
1817
1818 for depend in depends:
1819 if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
1820 #bb.note("Skipping %s" % depend)
1821 continue
1822 if depend.endswith('-dev'):
1823 depend = depend[:-4]
1824 if depend.endswith('-dbg'):
1825 depend = depend[:-4]
1826 pkgname = getname(depend, suffix)
1827 #bb.note("Adding %s for %s" % (pkgname, depend))
1828 if pkgname not in rreclist and pkgname != pkg:
1829 rreclist[pkgname] = []
1830
1831 #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
1832 d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
1833
1834 def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
1835
1836 #bb.note('rdepends for %s is %s' % (base, rdepends))
1837 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg, True) or "")
1838
1839 for depend in rdepends:
1840 if depend.find('virtual-locale-') != -1:
1841 #bb.note("Skipping %s" % depend)
1842 continue
1843 if depend.endswith('-dev'):
1844 depend = depend[:-4]
1845 if depend.endswith('-dbg'):
1846 depend = depend[:-4]
1847 pkgname = getname(depend, suffix)
1848 #bb.note("Adding %s for %s" % (pkgname, depend))
1849 if pkgname not in rreclist and pkgname != pkg:
1850 rreclist[pkgname] = []
1851
1852 #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
1853 d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
1854
1855 def add_dep(list, dep):
1856 if dep not in list:
1857 list.append(dep)
1858
1859 depends = []
1860 for dep in bb.utils.explode_deps(d.getVar('DEPENDS', True) or ""):
1861 add_dep(depends, dep)
1862
1863 rdepends = []
1864 for pkg in packages.split():
1865 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg, True) or ""):
1866 add_dep(rdepends, dep)
1867
1868 #bb.note('rdepends is %s' % rdepends)
1869
1870 def post_getname(name, suffix):
1871 return '%s%s' % (name, suffix)
1872 def pre_getname(name, suffix):
1873 return '%s%s' % (suffix, name)
1874
1875 pkgs = {}
1876 for pkg in packages.split():
1877 for postfix in postfixes:
1878 if pkg.endswith(postfix):
1879 if not postfix in pkgs:
1880 pkgs[postfix] = {}
1881 pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
1882
1883 for prefix in prefixes:
1884 if pkg.startswith(prefix):
1885 if not prefix in pkgs:
1886 pkgs[prefix] = {}
1887 pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
1888
1889 if "-dbg" in pkgs:
1890 pkglibdeps = read_libdep_files(d)
1891 pkglibdeplist = []
1892 for pkg in pkglibdeps:
1893 for k in pkglibdeps[pkg]:
1894 add_dep(pkglibdeplist, k)
1895 # FIXME this should not look at PN once all task recipes inherit from task.bbclass
1896 dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS', True) == '1') or (d.getVar('PN', True) or '').startswith('packagegroup-'))
1897
1898 for suffix in pkgs:
1899 for pkg in pkgs[suffix]:
1900 if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs'):
1901 continue
1902 (base, func) = pkgs[suffix][pkg]
1903 if suffix == "-dev":
1904 pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
1905 elif suffix == "-dbg":
1906 if not dbgdefaultdeps:
1907 pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
1908 continue
1909 if len(pkgs[suffix]) == 1:
1910 pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
1911 else:
1912 rdeps = []
1913 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base, True) or ""):
1914 add_dep(rdeps, dep)
1915 pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
1916}
1917
1918# Since bitbake can't determine which variables are accessed during package
1919# iteration, we need to list them here:
1920PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR USERADD_PARAM GROUPADD_PARAM CONFFILES SYSTEMD_SERVICE LICENSE SECTION pkg_preinst pkg_prerm RREPLACES GROUPMEMS_PARAM SYSTEMD_AUTO_ENABLE"
1921
1922def gen_packagevar(d):
1923 ret = []
1924 pkgs = (d.getVar("PACKAGES", True) or "").split()
1925 vars = (d.getVar("PACKAGEVARS", True) or "").split()
1926 for p in pkgs:
1927 for v in vars:
1928 ret.append(v + "_" + p)
1929
1930 # Ensure that changes to INCOMPATIBLE_LICENSE re-run do_package for
1931 # affected recipes.
1932 ret.append('LICENSE_EXCLUSION-%s' % p)
1933 return " ".join(ret)
1934
1935PACKAGE_PREPROCESS_FUNCS ?= ""
1936# Functions for setting up PKGD
1937PACKAGEBUILDPKGD ?= " \
1938 perform_packagecopy \
1939 ${PACKAGE_PREPROCESS_FUNCS} \
1940 split_and_strip_files \
1941 fixup_perms \
1942 "
1943# Functions which split PKGD up into separate packages
1944PACKAGESPLITFUNCS ?= " \
1945 package_do_split_locales \
1946 populate_packages"
1947# Functions which process metadata based on split packages
1948PACKAGEFUNCS += " \
1949 package_fixsymlinks \
1950 package_name_hook \
1951 package_do_filedeps \
1952 package_do_shlibs \
1953 package_do_pkgconfig \
1954 read_shlibdeps \
1955 package_depchains \
1956 emit_pkgdata"
1957
1958python do_package () {
1959 # Change the following version to cause sstate to invalidate the package
1960 # cache. This is useful if an item this class depends on changes in a
1961 # way that the output of this class changes. rpmdeps is a good example
1962 # as any change to rpmdeps requires this to be rerun.
1963 # PACKAGE_BBCLASS_VERSION = "1"
1964
1965 # Init cachedpath
1966 global cpath
1967 cpath = oe.cachedpath.CachedPath()
1968
1969 ###########################################################################
1970 # Sanity test the setup
1971 ###########################################################################
1972
1973 packages = (d.getVar('PACKAGES', True) or "").split()
1974 if len(packages) < 1:
1975 bb.debug(1, "No packages to build, skipping do_package")
1976 return
1977
1978 workdir = d.getVar('WORKDIR', True)
1979 outdir = d.getVar('DEPLOY_DIR', True)
1980 dest = d.getVar('D', True)
1981 dvar = d.getVar('PKGD', True)
1982 pn = d.getVar('PN', True)
1983
1984 if not workdir or not outdir or not dest or not dvar or not pn:
1985 msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package"
1986 package_qa_handle_error("var-undefined", msg, d)
1987 return
1988
1989 bb.build.exec_func("package_get_auto_pr", d)
1990
1991 ###########################################################################
1992 # Optimisations
1993 ###########################################################################
1994
1995 # Continually expanding complex expressions is inefficient, particularly
1996 # when we write to the datastore and invalidate the expansion cache. This
1997 # code pre-expands some frequently used variables
1998
1999 def expandVar(x, d):
2000 d.setVar(x, d.getVar(x, True))
2001
2002 for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO':
2003 expandVar(x, d)
2004
2005 ###########################################################################
2006 # Setup PKGD (from D)
2007 ###########################################################################
2008
2009 for f in (d.getVar('PACKAGEBUILDPKGD', True) or '').split():
2010 bb.build.exec_func(f, d)
2011
2012 ###########################################################################
2013 # Split up PKGD into PKGDEST
2014 ###########################################################################
2015
2016 cpath = oe.cachedpath.CachedPath()
2017
2018 for f in (d.getVar('PACKAGESPLITFUNCS', True) or '').split():
2019 bb.build.exec_func(f, d)
2020
2021 ###########################################################################
2022 # Process PKGDEST
2023 ###########################################################################
2024
2025 # Build global list of files in each split package
2026 global pkgfiles
2027 pkgfiles = {}
2028 packages = d.getVar('PACKAGES', True).split()
2029 pkgdest = d.getVar('PKGDEST', True)
2030 for pkg in packages:
2031 pkgfiles[pkg] = []
2032 for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg):
2033 for file in files:
2034 pkgfiles[pkg].append(walkroot + os.sep + file)
2035
2036 for f in (d.getVar('PACKAGEFUNCS', True) or '').split():
2037 bb.build.exec_func(f, d)
2038}
2039
2040do_package[dirs] = "${SHLIBSWORKDIR} ${PKGDESTWORK} ${D}"
2041do_package[vardeps] += "${PACKAGEBUILDPKGD} ${PACKAGESPLITFUNCS} ${PACKAGEFUNCS} ${@gen_packagevar(d)}"
2042addtask package after do_install
2043
2044PACKAGELOCK = "${STAGING_DIR}/package-output.lock"
2045SSTATETASKS += "do_package"
2046do_package[cleandirs] = "${PKGDEST} ${PKGDESTWORK}"
2047do_package[sstate-plaindirs] = "${PKGD} ${PKGDEST} ${PKGDESTWORK}"
2048do_package[sstate-lockfile-shared] = "${PACKAGELOCK}"
2049do_package_setscene[dirs] = "${STAGING_DIR}"
2050
2051python do_package_setscene () {
2052 sstate_setscene(d)
2053}
2054addtask do_package_setscene
2055
2056do_packagedata () {
2057 :
2058}
2059
2060addtask packagedata before do_build after do_package
2061
2062SSTATETASKS += "do_packagedata"
2063do_packagedata[sstate-inputdirs] = "${PKGDESTWORK}"
2064do_packagedata[sstate-outputdirs] = "${PKGDATA_DIR}"
2065do_packagedata[sstate-lockfile-shared] = "${PACKAGELOCK}"
2066do_packagedata[stamp-extra-info] = "${MACHINE}"
2067
2068python do_packagedata_setscene () {
2069 sstate_setscene(d)
2070}
2071addtask do_packagedata_setscene
2072
2073#
2074# Helper functions for the package writing classes
2075#
2076
2077def mapping_rename_hook(d):
2078 """
2079 Rewrite variables to account for package renaming in things
2080 like debian.bbclass or manual PKG variable name changes
2081 """
2082 pkg = d.getVar("PKG", True)
2083 runtime_mapping_rename("RDEPENDS", pkg, d)
2084 runtime_mapping_rename("RRECOMMENDS", pkg, d)
2085 runtime_mapping_rename("RSUGGESTS", pkg, d)
2086