blob: a86b680167af7df1fcdb370445db2da543f574ca [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# Packaging process
3#
4# Executive summary: This class iterates over the functions listed in PACKAGEFUNCS
5# Taking D and splitting it up into the packages listed in PACKAGES, placing the
6# resulting output in PKGDEST.
7#
8# There are the following default steps but PACKAGEFUNCS can be extended:
9#
10# a) package_get_auto_pr - get PRAUTO from remote PR service
11#
12# b) perform_packagecopy - Copy D into PKGD
13#
14# c) package_do_split_locales - Split out the locale files, updates FILES and PACKAGES
15#
16# d) split_and_strip_files - split the files into runtime and debug and strip them.
17# Debug files include debug info split, and associated sources that end up in -dbg packages
18#
19# e) fixup_perms - Fix up permissions in the package before we split it.
20#
21# f) populate_packages - Split the files in PKGD into separate packages in PKGDEST/<pkgname>
22# Also triggers the binary stripping code to put files in -dbg packages.
23#
24# g) package_do_filedeps - Collect perfile run-time dependency metadata
25# The data is stores in FILER{PROVIDES,DEPENDS}_file_pkg variables with
26# a list of affected files in FILER{PROVIDES,DEPENDS}FLIST_pkg
27#
28# h) package_do_shlibs - Look at the shared libraries generated and autotmatically add any
29# depenedencies found. Also stores the package name so anyone else using this library
30# knows which package to depend on.
31#
32# i) package_do_pkgconfig - Keep track of which packages need and provide which .pc files
33#
34# j) read_shlibdeps - Reads the stored shlibs information into the metadata
35#
36# k) package_depchains - Adds automatic dependencies to -dbg and -dev packages
37#
38# l) emit_pkgdata - saves the packaging data into PKGDATA_DIR for use in later
39# packaging steps
40
41inherit packagedata
Patrick Williamsc124f4f2015-09-15 14:41:29 -050042inherit chrpath
43
44# Need the package_qa_handle_error() in insane.bbclass
45inherit insane
46
47PKGD = "${WORKDIR}/package"
48PKGDEST = "${WORKDIR}/packages-split"
49
50LOCALE_SECTION ?= ''
51
52ALL_MULTILIB_PACKAGE_ARCHS = "${@all_multilib_tune_values(d, 'PACKAGE_ARCHS')}"
53
54# rpm is used for the per-file dependency identification
55PACKAGE_DEPENDS += "rpm-native"
56
57def legitimize_package_name(s):
58 """
59 Make sure package names are legitimate strings
60 """
61 import re
62
63 def fixutf(m):
64 cp = m.group(1)
65 if cp:
66 return ('\u%s' % cp).decode('unicode_escape').encode('utf-8')
67
68 # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
69 s = re.sub('<U([0-9A-Fa-f]{1,4})>', fixutf, s)
70
71 # Remaining package name validity fixes
72 return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
73
74def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False, summary=None):
75 """
76 Used in .bb files to split up dynamically generated subpackages of a
77 given package, usually plugins or modules.
78
79 Arguments:
80 root -- the path in which to search
81 file_regex -- regular expression to match searched files. Use
82 parentheses () to mark the part of this expression
83 that should be used to derive the module name (to be
84 substituted where %s is used in other function
85 arguments as noted below)
86 output_pattern -- pattern to use for the package names. Must include %s.
87 description -- description to set for each package. Must include %s.
88 postinst -- postinstall script to use for all packages (as a
89 string)
90 recursive -- True to perform a recursive search - default False
91 hook -- a hook function to be called for every match. The
92 function will be called with the following arguments
93 (in the order listed):
94 f: full path to the file/directory match
95 pkg: the package name
96 file_regex: as above
97 output_pattern: as above
98 modulename: the module name derived using file_regex
99 extra_depends -- extra runtime dependencies (RDEPENDS) to be set for
100 all packages. The default value of None causes a
101 dependency on the main package (${PN}) - if you do
102 not want this, pass '' for this parameter.
103 aux_files_pattern -- extra item(s) to be added to FILES for each
104 package. Can be a single string item or a list of
105 strings for multiple items. Must include %s.
106 postrm -- postrm script to use for all packages (as a string)
107 allow_dirs -- True allow directories to be matched - default False
108 prepend -- if True, prepend created packages to PACKAGES instead
109 of the default False which appends them
110 match_path -- match file_regex on the whole relative path to the
111 root rather than just the file name
112 aux_files_pattern_verbatim -- extra item(s) to be added to FILES for
113 each package, using the actual derived module name
114 rather than converting it to something legal for a
115 package name. Can be a single string item or a list
116 of strings for multiple items. Must include %s.
117 allow_links -- True to allow symlinks to be matched - default False
118 summary -- Summary to set for each package. Must include %s;
119 defaults to description if not set.
120
121 """
122
123 dvar = d.getVar('PKGD', True)
124
125 # If the root directory doesn't exist, don't error out later but silently do
126 # no splitting.
127 if not os.path.exists(dvar + root):
128 return []
129
130 ml = d.getVar("MLPREFIX", True)
131 if ml:
132 if not output_pattern.startswith(ml):
133 output_pattern = ml + output_pattern
134
135 newdeps = []
136 for dep in (extra_depends or "").split():
137 if dep.startswith(ml):
138 newdeps.append(dep)
139 else:
140 newdeps.append(ml + dep)
141 if newdeps:
142 extra_depends = " ".join(newdeps)
143
144
145 packages = d.getVar('PACKAGES', True).split()
146 split_packages = []
147
148 if postinst:
149 postinst = '#!/bin/sh\n' + postinst + '\n'
150 if postrm:
151 postrm = '#!/bin/sh\n' + postrm + '\n'
152 if not recursive:
153 objs = os.listdir(dvar + root)
154 else:
155 objs = []
156 for walkroot, dirs, files in os.walk(dvar + root):
157 for file in files:
158 relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1)
159 if relpath:
160 objs.append(relpath)
161
162 if extra_depends == None:
163 extra_depends = d.getVar("PN", True)
164
165 if not summary:
166 summary = description
167
168 for o in sorted(objs):
169 import re, stat
170 if match_path:
171 m = re.match(file_regex, o)
172 else:
173 m = re.match(file_regex, os.path.basename(o))
174
175 if not m:
176 continue
177 f = os.path.join(dvar + root, o)
178 mode = os.lstat(f).st_mode
179 if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))):
180 continue
181 on = legitimize_package_name(m.group(1))
182 pkg = output_pattern % on
183 split_packages.append(pkg)
184 if not pkg in packages:
185 if prepend:
186 packages = [pkg] + packages
187 else:
188 packages.append(pkg)
189 oldfiles = d.getVar('FILES_' + pkg, True)
190 newfile = os.path.join(root, o)
191 # These names will be passed through glob() so if the filename actually
192 # contains * or ? (rare, but possible) we need to handle that specially
193 newfile = newfile.replace('*', '[*]')
194 newfile = newfile.replace('?', '[?]')
195 if not oldfiles:
196 the_files = [newfile]
197 if aux_files_pattern:
198 if type(aux_files_pattern) is list:
199 for fp in aux_files_pattern:
200 the_files.append(fp % on)
201 else:
202 the_files.append(aux_files_pattern % on)
203 if aux_files_pattern_verbatim:
204 if type(aux_files_pattern_verbatim) is list:
205 for fp in aux_files_pattern_verbatim:
206 the_files.append(fp % m.group(1))
207 else:
208 the_files.append(aux_files_pattern_verbatim % m.group(1))
209 d.setVar('FILES_' + pkg, " ".join(the_files))
210 else:
211 d.setVar('FILES_' + pkg, oldfiles + " " + newfile)
212 if extra_depends != '':
213 d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends)
214 if not d.getVar('DESCRIPTION_' + pkg, True):
215 d.setVar('DESCRIPTION_' + pkg, description % on)
216 if not d.getVar('SUMMARY_' + pkg, True):
217 d.setVar('SUMMARY_' + pkg, summary % on)
218 if postinst:
219 d.setVar('pkg_postinst_' + pkg, postinst)
220 if postrm:
221 d.setVar('pkg_postrm_' + pkg, postrm)
222 if callable(hook):
223 hook(f, pkg, file_regex, output_pattern, m.group(1))
224
225 d.setVar('PACKAGES', ' '.join(packages))
226 return split_packages
227
228PACKAGE_DEPENDS += "file-native"
229
230python () {
231 if d.getVar('PACKAGES', True) != '':
232 deps = ""
233 for dep in (d.getVar('PACKAGE_DEPENDS', True) or "").split():
234 deps += " %s:do_populate_sysroot" % dep
235 d.appendVarFlag('do_package', 'depends', deps)
236
237 # shlibs requires any DEPENDS to have already packaged for the *.list files
238 d.appendVarFlag('do_package', 'deptask', " do_packagedata")
239}
240
241# Get a list of files from file vars by searching files under current working directory
242# The list contains symlinks, directories and normal files.
243def files_from_filevars(filevars):
244 import os,glob
245 cpath = oe.cachedpath.CachedPath()
246 files = []
247 for f in filevars:
248 if os.path.isabs(f):
249 f = '.' + f
250 if not f.startswith("./"):
251 f = './' + f
252 globbed = glob.glob(f)
253 if globbed:
254 if [ f ] != globbed:
255 files += globbed
256 continue
257 files.append(f)
258
259 for f in files:
260 if not cpath.islink(f):
261 if cpath.isdir(f):
262 newfiles = [ os.path.join(f,x) for x in os.listdir(f) ]
263 if newfiles:
264 files += newfiles
265
266 return files
267
268# Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files
269def get_conffiles(pkg, d):
270 pkgdest = d.getVar('PKGDEST', True)
271 root = os.path.join(pkgdest, pkg)
272 cwd = os.getcwd()
273 os.chdir(root)
274
275 conffiles = d.getVar('CONFFILES_%s' % pkg, True);
276 if conffiles == None:
277 conffiles = d.getVar('CONFFILES', True)
278 if conffiles == None:
279 conffiles = ""
280 conffiles = conffiles.split()
281 conf_orig_list = files_from_filevars(conffiles)
282
283 # Remove links and directories from conf_orig_list to get conf_list which only contains normal files
284 conf_list = []
285 for f in conf_orig_list:
286 if os.path.isdir(f):
287 continue
288 if os.path.islink(f):
289 continue
290 if not os.path.exists(f):
291 continue
292 conf_list.append(f)
293
294 # Remove the leading './'
295 for i in range(0, len(conf_list)):
296 conf_list[i] = conf_list[i][1:]
297
298 os.chdir(cwd)
299 return conf_list
300
301def splitdebuginfo(file, debugfile, debugsrcdir, sourcefile, d):
302 # Function to split a single file into two components, one is the stripped
303 # target system binary, the other contains any debugging information. The
304 # two files are linked to reference each other.
305 #
306 # sourcefile is also generated containing a list of debugsources
307
308 import stat
309
310 dvar = d.getVar('PKGD', True)
311 objcopy = d.getVar("OBJCOPY", True)
312 debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit")
313 workdir = d.getVar("WORKDIR", True)
314 workparentdir = d.getVar("DEBUGSRC_OVERRIDE_PATH", True) or os.path.dirname(os.path.dirname(workdir))
315
316 # We ignore kernel modules, we don't generate debug info files.
317 if file.find("/lib/modules/") != -1 and file.endswith(".ko"):
318 return 1
319
320 newmode = None
321 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
322 origmode = os.stat(file)[stat.ST_MODE]
323 newmode = origmode | stat.S_IWRITE | stat.S_IREAD
324 os.chmod(file, newmode)
325
326 # We need to extract the debug src information here...
327 if debugsrcdir:
328 cmd = "'%s' -b '%s' -d '%s' -i -l '%s' '%s'" % (debugedit, workparentdir, debugsrcdir, sourcefile, file)
329 (retval, output) = oe.utils.getstatusoutput(cmd)
330 if retval:
331 bb.fatal("debugedit failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
332
333 bb.utils.mkdirhier(os.path.dirname(debugfile))
334
335 cmd = "'%s' --only-keep-debug '%s' '%s'" % (objcopy, file, debugfile)
336 (retval, output) = oe.utils.getstatusoutput(cmd)
337 if retval:
338 bb.fatal("objcopy failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
339
340 # Set the debuglink to have the view of the file path on the target
341 cmd = "'%s' --add-gnu-debuglink='%s' '%s'" % (objcopy, debugfile, file)
342 (retval, output) = oe.utils.getstatusoutput(cmd)
343 if retval:
344 bb.fatal("objcopy failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
345
346 if newmode:
347 os.chmod(file, origmode)
348
349 return 0
350
351def copydebugsources(debugsrcdir, d):
352 # The debug src information written out to sourcefile is further procecessed
353 # and copied to the destination here.
354
355 import stat
356
357 sourcefile = d.expand("${WORKDIR}/debugsources.list")
358 if debugsrcdir and os.path.isfile(sourcefile):
359 dvar = d.getVar('PKGD', True)
360 strip = d.getVar("STRIP", True)
361 objcopy = d.getVar("OBJCOPY", True)
362 debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit")
363 workdir = d.getVar("WORKDIR", True)
364 workparentdir = os.path.dirname(os.path.dirname(workdir))
365 workbasedir = os.path.basename(os.path.dirname(workdir)) + "/" + os.path.basename(workdir)
366
367 nosuchdir = []
368 basepath = dvar
369 for p in debugsrcdir.split("/"):
370 basepath = basepath + "/" + p
371 if not cpath.exists(basepath):
372 nosuchdir.append(basepath)
373 bb.utils.mkdirhier(basepath)
374 cpath.updatecache(basepath)
375
376 processdebugsrc = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '(<internal>|<built-in>)$' | "
377 # We need to ignore files that are not actually ours
378 # we do this by only paying attention to items from this package
379 processdebugsrc += "fgrep -zw '%s' | "
380 processdebugsrc += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)"
381
382 cmd = processdebugsrc % (sourcefile, workbasedir, workparentdir, dvar, debugsrcdir)
383 (retval, output) = oe.utils.getstatusoutput(cmd)
384 # Can "fail" if internal headers/transient sources are attempted
385 #if retval:
386 # bb.fatal("debug source copy failed with exit code %s (cmd was %s)" % (retval, cmd))
387
388 # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
389 # Work around this by manually finding and copying any symbolic links that made it through.
390 cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s' 2>/dev/null)" % (dvar, debugsrcdir, dvar, debugsrcdir, workparentdir, dvar, debugsrcdir)
391 (retval, output) = oe.utils.getstatusoutput(cmd)
392 if retval:
393 bb.fatal("debugsrc symlink fixup failed with exit code %s (cmd was %s)" % (retval, cmd))
394
395 # The copy by cpio may have resulted in some empty directories! Remove these
396 cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
397 (retval, output) = oe.utils.getstatusoutput(cmd)
398 if retval:
399 bb.fatal("empty directory removal failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
400
401 # Also remove debugsrcdir if its empty
402 for p in nosuchdir[::-1]:
403 if os.path.exists(p) and not os.listdir(p):
404 os.rmdir(p)
405
406#
407# Package data handling routines
408#
409
410def get_package_mapping (pkg, basepkg, d):
411 import oe.packagedata
412
413 data = oe.packagedata.read_subpkgdata(pkg, d)
414 key = "PKG_%s" % pkg
415
416 if key in data:
417 # Have to avoid undoing the write_extra_pkgs(global_variants...)
418 if bb.data.inherits_class('allarch', d) and data[key] == basepkg:
419 return pkg
420 return data[key]
421
422 return pkg
423
424def get_package_additional_metadata (pkg_type, d):
425 base_key = "PACKAGE_ADD_METADATA"
426 for key in ("%s_%s" % (base_key, pkg_type.upper()), base_key):
427 if d.getVar(key, False) is None:
428 continue
429 d.setVarFlag(key, "type", "list")
430 if d.getVarFlag(key, "separator") is None:
431 d.setVarFlag(key, "separator", "\\n")
432 metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)]
433 return "\n".join(metadata_fields).strip()
434
435def runtime_mapping_rename (varname, pkg, d):
436 #bb.note("%s before: %s" % (varname, d.getVar(varname, True)))
437
438 if bb.data.inherits_class('packagegroup', d):
439 return
440
441 new_depends = {}
442 deps = bb.utils.explode_dep_versions2(d.getVar(varname, True) or "")
443 for depend in deps:
444 new_depend = get_package_mapping(depend, pkg, d)
445 new_depends[new_depend] = deps[depend]
446
447 d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False))
448
449 #bb.note("%s after: %s" % (varname, d.getVar(varname, True)))
450
451#
452# Package functions suitable for inclusion in PACKAGEFUNCS
453#
454
455python package_get_auto_pr() {
456 import oe.prservice
457 import re
458
459 # Support per recipe PRSERV_HOST
460 pn = d.getVar('PN', True)
461 host = d.getVar("PRSERV_HOST_" + pn, True)
462 if not (host is None):
463 d.setVar("PRSERV_HOST", host)
464
465 pkgv = d.getVar("PKGV", True)
466
467 # PR Server not active, handle AUTOINC
468 if not d.getVar('PRSERV_HOST', True):
469 if 'AUTOINC' in pkgv:
470 d.setVar("PKGV", pkgv.replace("AUTOINC", "0"))
471 return
472
473 auto_pr = None
474 pv = d.getVar("PV", True)
475 version = d.getVar("PRAUTOINX", True)
476 pkgarch = d.getVar("PACKAGE_ARCH", True)
477 checksum = d.getVar("BB_TASKHASH", True)
478
479 if d.getVar('PRSERV_LOCKDOWN', True):
480 auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch, True) or d.getVar('PRAUTO_' + version, True) or None
481 if auto_pr is None:
482 bb.fatal("Can NOT get PRAUTO from lockdown exported file")
483 d.setVar('PRAUTO',str(auto_pr))
484 return
485
486 try:
487 conn = d.getVar("__PRSERV_CONN", True)
488 if conn is None:
489 conn = oe.prservice.prserv_make_conn(d)
490 if conn is not None:
491 if "AUTOINC" in pkgv:
492 srcpv = bb.fetch2.get_srcrev(d)
493 base_ver = "AUTOINC-%s" % version[:version.find(srcpv)]
494 value = conn.getPR(base_ver, pkgarch, srcpv)
495 d.setVar("PKGV", pkgv.replace("AUTOINC", str(value)))
496
497 auto_pr = conn.getPR(version, pkgarch, checksum)
498 except Exception as e:
499 bb.fatal("Can NOT get PRAUTO, exception %s" % str(e))
500 if auto_pr is None:
501 bb.fatal("Can NOT get PRAUTO from remote PR service")
502 d.setVar('PRAUTO',str(auto_pr))
503}
504
505LOCALEBASEPN ??= "${PN}"
506
507python package_do_split_locales() {
508 if (d.getVar('PACKAGE_NO_LOCALE', True) == '1'):
509 bb.debug(1, "package requested not splitting locales")
510 return
511
512 packages = (d.getVar('PACKAGES', True) or "").split()
513
514 datadir = d.getVar('datadir', True)
515 if not datadir:
516 bb.note("datadir not defined")
517 return
518
519 dvar = d.getVar('PKGD', True)
520 pn = d.getVar('LOCALEBASEPN', True)
521
522 if pn + '-locale' in packages:
523 packages.remove(pn + '-locale')
524
525 localedir = os.path.join(dvar + datadir, 'locale')
526
527 if not cpath.isdir(localedir):
528 bb.debug(1, "No locale files in this package")
529 return
530
531 locales = os.listdir(localedir)
532
533 summary = d.getVar('SUMMARY', True) or pn
534 description = d.getVar('DESCRIPTION', True) or ""
535 locale_section = d.getVar('LOCALE_SECTION', True)
536 mlprefix = d.getVar('MLPREFIX', True) or ""
537 for l in sorted(locales):
538 ln = legitimize_package_name(l)
539 pkg = pn + '-locale-' + ln
540 packages.append(pkg)
541 d.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l))
542 d.setVar('RRECOMMENDS_' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
543 d.setVar('RPROVIDES_' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
544 d.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l))
545 d.setVar('DESCRIPTION_' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l))
546 if locale_section:
547 d.setVar('SECTION_' + pkg, locale_section)
548
549 d.setVar('PACKAGES', ' '.join(packages))
550
551 # Disabled by RP 18/06/07
552 # Wildcards aren't supported in debian
553 # They break with ipkg since glibc-locale* will mean that
554 # glibc-localedata-translit* won't install as a dependency
555 # for some other package which breaks meta-toolchain
556 # Probably breaks since virtual-locale- isn't provided anywhere
557 #rdep = (d.getVar('RDEPENDS_%s' % pn, True) or "").split()
558 #rdep.append('%s-locale*' % pn)
559 #d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep))
560}
561
562python perform_packagecopy () {
563 dest = d.getVar('D', True)
564 dvar = d.getVar('PKGD', True)
565
566 # Start by package population by taking a copy of the installed
567 # files to operate on
568 # Preserve sparse files and hard links
569 cmd = 'tar -cf - -C %s -p . | tar -xf - -C %s' % (dest, dvar)
570 (retval, output) = oe.utils.getstatusoutput(cmd)
571 if retval:
572 bb.fatal("file copy failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
573
574 # replace RPATHs for the nativesdk binaries, to make them relocatable
575 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d):
576 rpath_replace (dvar, d)
577}
578perform_packagecopy[cleandirs] = "${PKGD}"
579perform_packagecopy[dirs] = "${PKGD}"
580
581# We generate a master list of directories to process, we start by
582# seeding this list with reasonable defaults, then load from
583# the fs-perms.txt files
584python fixup_perms () {
585 import pwd, grp
586
587 # init using a string with the same format as a line as documented in
588 # the fs-perms.txt file
589 # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
590 # <path> link <link target>
591 #
592 # __str__ can be used to print out an entry in the input format
593 #
594 # if fs_perms_entry.path is None:
595 # an error occured
596 # if fs_perms_entry.link, you can retrieve:
597 # fs_perms_entry.path = path
598 # fs_perms_entry.link = target of link
599 # if not fs_perms_entry.link, you can retrieve:
600 # fs_perms_entry.path = path
601 # fs_perms_entry.mode = expected dir mode or None
602 # fs_perms_entry.uid = expected uid or -1
603 # fs_perms_entry.gid = expected gid or -1
604 # fs_perms_entry.walk = 'true' or something else
605 # fs_perms_entry.fmode = expected file mode or None
606 # fs_perms_entry.fuid = expected file uid or -1
607 # fs_perms_entry_fgid = expected file gid or -1
608 class fs_perms_entry():
609 def __init__(self, line):
610 lsplit = line.split()
611 if len(lsplit) == 3 and lsplit[1].lower() == "link":
612 self._setlink(lsplit[0], lsplit[2])
613 elif len(lsplit) == 8:
614 self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
615 else:
616 msg = "Fixup Perms: invalid config line %s" % line
617 package_qa_handle_error("perm-config", msg, d)
618 self.path = None
619 self.link = None
620
621 def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
622 self.path = os.path.normpath(path)
623 self.link = None
624 self.mode = self._procmode(mode)
625 self.uid = self._procuid(uid)
626 self.gid = self._procgid(gid)
627 self.walk = walk.lower()
628 self.fmode = self._procmode(fmode)
629 self.fuid = self._procuid(fuid)
630 self.fgid = self._procgid(fgid)
631
632 def _setlink(self, path, link):
633 self.path = os.path.normpath(path)
634 self.link = link
635
636 def _procmode(self, mode):
637 if not mode or (mode and mode == "-"):
638 return None
639 else:
640 return int(mode,8)
641
642 # Note uid/gid -1 has special significance in os.lchown
643 def _procuid(self, uid):
644 if uid is None or uid == "-":
645 return -1
646 elif uid.isdigit():
647 return int(uid)
648 else:
649 return pwd.getpwnam(uid).pw_uid
650
651 def _procgid(self, gid):
652 if gid is None or gid == "-":
653 return -1
654 elif gid.isdigit():
655 return int(gid)
656 else:
657 return grp.getgrnam(gid).gr_gid
658
659 # Use for debugging the entries
660 def __str__(self):
661 if self.link:
662 return "%s link %s" % (self.path, self.link)
663 else:
664 mode = "-"
665 if self.mode:
666 mode = "0%o" % self.mode
667 fmode = "-"
668 if self.fmode:
669 fmode = "0%o" % self.fmode
670 uid = self._mapugid(self.uid)
671 gid = self._mapugid(self.gid)
672 fuid = self._mapugid(self.fuid)
673 fgid = self._mapugid(self.fgid)
674 return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
675
676 def _mapugid(self, id):
677 if id is None or id == -1:
678 return "-"
679 else:
680 return "%d" % id
681
682 # Fix the permission, owner and group of path
683 def fix_perms(path, mode, uid, gid, dir):
684 if mode and not os.path.islink(path):
685 #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
686 os.chmod(path, mode)
687 # -1 is a special value that means don't change the uid/gid
688 # if they are BOTH -1, don't bother to lchown
689 if not (uid == -1 and gid == -1):
690 #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
691 os.lchown(path, uid, gid)
692
693 # Return a list of configuration files based on either the default
694 # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
695 # paths are resolved via BBPATH
696 def get_fs_perms_list(d):
697 str = ""
698 bbpath = d.getVar('BBPATH', True)
699 fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES', True)
700 if not fs_perms_tables:
701 fs_perms_tables = 'files/fs-perms.txt'
702 for conf_file in fs_perms_tables.split():
703 str += " %s" % bb.utils.which(bbpath, conf_file)
704 return str
705
706
707
708 dvar = d.getVar('PKGD', True)
709
710 fs_perms_table = {}
711
712 # By default all of the standard directories specified in
713 # bitbake.conf will get 0755 root:root.
714 target_path_vars = [ 'base_prefix',
715 'prefix',
716 'exec_prefix',
717 'base_bindir',
718 'base_sbindir',
719 'base_libdir',
720 'datadir',
721 'sysconfdir',
722 'servicedir',
723 'sharedstatedir',
724 'localstatedir',
725 'infodir',
726 'mandir',
727 'docdir',
728 'bindir',
729 'sbindir',
730 'libexecdir',
731 'libdir',
732 'includedir',
733 'oldincludedir' ]
734
735 for path in target_path_vars:
736 dir = d.getVar(path, True) or ""
737 if dir == "":
738 continue
739 fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d))
740
741 # Now we actually load from the configuration files
742 for conf in get_fs_perms_list(d).split():
743 if os.path.exists(conf):
744 f = open(conf)
745 for line in f:
746 if line.startswith('#'):
747 continue
748 lsplit = line.split()
749 if len(lsplit) == 0:
750 continue
751 if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
752 msg = "Fixup perms: %s invalid line: %s" % (conf, line)
753 package_qa_handle_error("perm-line", msg, d)
754 continue
755 entry = fs_perms_entry(d.expand(line))
756 if entry and entry.path:
757 fs_perms_table[entry.path] = entry
758 f.close()
759
760 # Debug -- list out in-memory table
761 #for dir in fs_perms_table:
762 # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
763
764 # We process links first, so we can go back and fixup directory ownership
765 # for any newly created directories
766 for dir in fs_perms_table:
767 if not fs_perms_table[dir].link:
768 continue
769
770 origin = dvar + dir
771 if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)):
772 continue
773
774 link = fs_perms_table[dir].link
775 if link[0] == "/":
776 target = dvar + link
777 ptarget = link
778 else:
779 target = os.path.join(os.path.dirname(origin), link)
780 ptarget = os.path.join(os.path.dirname(dir), link)
781 if os.path.exists(target):
782 msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
783 package_qa_handle_error("perm-link", msg, d)
784 continue
785
786 # Create path to move directory to, move it, and then setup the symlink
787 bb.utils.mkdirhier(os.path.dirname(target))
788 #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
789 os.rename(origin, target)
790 #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
791 os.symlink(link, origin)
792
793 for dir in fs_perms_table:
794 if fs_perms_table[dir].link:
795 continue
796
797 origin = dvar + dir
798 if not (cpath.exists(origin) and cpath.isdir(origin)):
799 continue
800
801 fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
802
803 if fs_perms_table[dir].walk == 'true':
804 for root, dirs, files in os.walk(origin):
805 for dr in dirs:
806 each_dir = os.path.join(root, dr)
807 fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
808 for f in files:
809 each_file = os.path.join(root, f)
810 fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
811}
812
813python split_and_strip_files () {
814 import stat, errno
815
816 dvar = d.getVar('PKGD', True)
817 pn = d.getVar('PN', True)
818
819 # We default to '.debug' style
820 if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory':
821 # Single debug-file-directory style debug info
822 debugappend = ".debug"
823 debugdir = ""
824 debuglibdir = "/usr/lib/debug"
825 debugsrcdir = "/usr/src/debug"
826 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-without-src':
827 # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
828 debugappend = ""
829 debugdir = "/.debug"
830 debuglibdir = ""
831 debugsrcdir = ""
832 else:
833 # Original OE-core, a.k.a. ".debug", style debug info
834 debugappend = ""
835 debugdir = "/.debug"
836 debuglibdir = ""
837 debugsrcdir = "/usr/src/debug"
838
839 sourcefile = d.expand("${WORKDIR}/debugsources.list")
840 bb.utils.remove(sourcefile)
841
842 os.chdir(dvar)
843
844 # Return type (bits):
845 # 0 - not elf
846 # 1 - ELF
847 # 2 - stripped
848 # 4 - executable
849 # 8 - shared library
850 # 16 - kernel module
851 def isELF(path):
852 type = 0
853 ret, result = oe.utils.getstatusoutput("file \"%s\"" % path.replace("\"", "\\\""))
854
855 if ret:
856 msg = "split_and_strip_files: 'file %s' failed" % path
857 package_qa_handle_error("split-strip", msg, d)
858 return type
859
860 # Not stripped
861 if "ELF" in result:
862 type |= 1
863 if "not stripped" not in result:
864 type |= 2
865 if "executable" in result:
866 type |= 4
867 if "shared" in result:
868 type |= 8
869 return type
870
871
872 #
873 # First lets figure out all of the files we may have to process ... do this only once!
874 #
875 elffiles = {}
876 symlinks = {}
877 kernmods = []
878 inodes = {}
879 libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir", True))
880 baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir", True))
881 if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
882 for root, dirs, files in cpath.walk(dvar):
883 for f in files:
884 file = os.path.join(root, f)
885 if file.endswith(".ko") and file.find("/lib/modules/") != -1:
886 kernmods.append(file)
887 continue
888
889 # Skip debug files
890 if debugappend and file.endswith(debugappend):
891 continue
892 if debugdir and debugdir in os.path.dirname(file[len(dvar):]):
893 continue
894
895 try:
896 ltarget = cpath.realpath(file, dvar, False)
897 s = cpath.lstat(ltarget)
898 except OSError as e:
899 (err, strerror) = e.args
900 if err != errno.ENOENT:
901 raise
902 # Skip broken symlinks
903 continue
904 if not s:
905 continue
906 # Check its an excutable
907 if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) or (s[stat.ST_MODE] & stat.S_IXOTH) \
908 or ((file.startswith(libdir) or file.startswith(baselibdir)) and ".so" in f):
909 # If it's a symlink, and points to an ELF file, we capture the readlink target
910 if cpath.islink(file):
911 target = os.readlink(file)
912 if isELF(ltarget):
913 #bb.note("Sym: %s (%d)" % (ltarget, isELF(ltarget)))
914 symlinks[file] = target
915 continue
916
917 # It's a file (or hardlink), not a link
918 # ...but is it ELF, and is it already stripped?
919 elf_file = isELF(file)
920 if elf_file & 1:
921 if elf_file & 2:
922 if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn, True) or "").split():
923 bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
924 else:
925 msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
926 package_qa_handle_error("already-stripped", msg, d)
927 continue
928
929 # At this point we have an unstripped elf file. We need to:
930 # a) Make sure any file we strip is not hardlinked to anything else outside this tree
931 # b) Only strip any hardlinked file once (no races)
932 # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks
933
934 # Use a reference of device ID and inode number to indentify files
935 file_reference = "%d_%d" % (s.st_dev, s.st_ino)
936 if file_reference in inodes:
937 os.unlink(file)
938 os.link(inodes[file_reference][0], file)
939 inodes[file_reference].append(file)
940 else:
941 inodes[file_reference] = [file]
942 # break hardlink
943 bb.utils.copyfile(file, file)
944 elffiles[file] = elf_file
945 # Modified the file so clear the cache
946 cpath.updatecache(file)
947
948 #
949 # First lets process debug splitting
950 #
951 if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'):
952 for file in elffiles:
953 src = file[len(dvar):]
954 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
955 fpath = dvar + dest
956
957 # Split the file...
958 bb.utils.mkdirhier(os.path.dirname(fpath))
959 #bb.note("Split %s -> %s" % (file, fpath))
960 # Only store off the hard link reference if we successfully split!
961 splitdebuginfo(file, fpath, debugsrcdir, sourcefile, d)
962
963 # Hardlink our debug symbols to the other hardlink copies
964 for ref in inodes:
965 if len(inodes[ref]) == 1:
966 continue
967 for file in inodes[ref][1:]:
968 src = file[len(dvar):]
969 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
970 fpath = dvar + dest
971 target = inodes[ref][0][len(dvar):]
972 ftarget = dvar + debuglibdir + os.path.dirname(target) + debugdir + "/" + os.path.basename(target) + debugappend
973 bb.utils.mkdirhier(os.path.dirname(fpath))
974 #bb.note("Link %s -> %s" % (fpath, ftarget))
975 os.link(ftarget, fpath)
976
977 # Create symlinks for all cases we were able to split symbols
978 for file in symlinks:
979 src = file[len(dvar):]
980 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
981 fpath = dvar + dest
982 # Skip it if the target doesn't exist
983 try:
984 s = os.stat(fpath)
985 except OSError as e:
986 (err, strerror) = e.args
987 if err != errno.ENOENT:
988 raise
989 continue
990
991 ltarget = symlinks[file]
992 lpath = os.path.dirname(ltarget)
993 lbase = os.path.basename(ltarget)
994 ftarget = ""
995 if lpath and lpath != ".":
996 ftarget += lpath + debugdir + "/"
997 ftarget += lbase + debugappend
998 if lpath.startswith(".."):
999 ftarget = os.path.join("..", ftarget)
1000 bb.utils.mkdirhier(os.path.dirname(fpath))
1001 #bb.note("Symlink %s -> %s" % (fpath, ftarget))
1002 os.symlink(ftarget, fpath)
1003
1004 # Process the debugsrcdir if requested...
1005 # This copies and places the referenced sources for later debugging...
1006 copydebugsources(debugsrcdir, d)
1007 #
1008 # End of debug splitting
1009 #
1010
1011 #
1012 # Now lets go back over things and strip them
1013 #
1014 if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
1015 strip = d.getVar("STRIP", True)
1016 sfiles = []
1017 for file in elffiles:
1018 elf_file = int(elffiles[file])
1019 #bb.note("Strip %s" % file)
1020 sfiles.append((file, elf_file, strip))
1021 for f in kernmods:
1022 sfiles.append((f, 16, strip))
1023
1024 oe.utils.multiprocess_exec(sfiles, oe.package.runstrip)
1025
1026 #
1027 # End of strip
1028 #
1029}
1030
1031python populate_packages () {
1032 import glob, re
1033
1034 workdir = d.getVar('WORKDIR', True)
1035 outdir = d.getVar('DEPLOY_DIR', True)
1036 dvar = d.getVar('PKGD', True)
1037 packages = d.getVar('PACKAGES', True)
1038 pn = d.getVar('PN', True)
1039
1040 bb.utils.mkdirhier(outdir)
1041 os.chdir(dvar)
1042
1043 # Sanity check PACKAGES for duplicates
1044 # Sanity should be moved to sanity.bbclass once we have the infrastucture
1045 package_list = []
1046
1047 for pkg in packages.split():
1048 if pkg in package_list:
1049 msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
1050 package_qa_handle_error("packages-list", msg, d)
1051 else:
1052 package_list.append(pkg)
1053 d.setVar('PACKAGES', ' '.join(package_list))
1054 pkgdest = d.getVar('PKGDEST', True)
1055
1056 seen = []
1057
1058 # os.mkdir masks the permissions with umask so we have to unset it first
1059 oldumask = os.umask(0)
1060
1061 for pkg in package_list:
1062 root = os.path.join(pkgdest, pkg)
1063 bb.utils.mkdirhier(root)
1064
1065 filesvar = d.getVar('FILES_%s' % pkg, True) or ""
1066 if "//" in filesvar:
1067 msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
1068 package_qa_handle_error("files-invalid", msg, d)
1069 filesvar.replace("//", "/")
1070
1071 origfiles = filesvar.split()
1072 files = files_from_filevars(origfiles)
1073
1074 for file in files:
1075 if (not cpath.islink(file)) and (not cpath.exists(file)):
1076 continue
1077 if file in seen:
1078 continue
1079 seen.append(file)
1080
1081 def mkdir(src, dest, p):
1082 src = os.path.join(src, p)
1083 dest = os.path.join(dest, p)
1084 fstat = cpath.stat(src)
1085 os.mkdir(dest, fstat.st_mode)
1086 os.chown(dest, fstat.st_uid, fstat.st_gid)
1087 if p not in seen:
1088 seen.append(p)
1089 cpath.updatecache(dest)
1090
1091 def mkdir_recurse(src, dest, paths):
1092 if cpath.exists(dest + '/' + paths):
1093 return
1094 while paths.startswith("./"):
1095 paths = paths[2:]
1096 p = "."
1097 for c in paths.split("/"):
1098 p = os.path.join(p, c)
1099 if not cpath.exists(os.path.join(dest, p)):
1100 mkdir(src, dest, p)
1101
1102 if cpath.isdir(file) and not cpath.islink(file):
1103 mkdir_recurse(dvar, root, file)
1104 continue
1105
1106 mkdir_recurse(dvar, root, os.path.dirname(file))
1107 fpath = os.path.join(root,file)
1108 if not cpath.islink(file):
1109 os.link(file, fpath)
1110 fstat = cpath.stat(file)
1111 os.chmod(fpath, fstat.st_mode)
1112 os.chown(fpath, fstat.st_uid, fstat.st_gid)
1113 continue
1114 ret = bb.utils.copyfile(file, fpath)
1115 if ret is False or ret == 0:
1116 raise bb.build.FuncFailed("File population failed")
1117
1118 os.umask(oldumask)
1119 os.chdir(workdir)
1120
1121 # Handle LICENSE_EXCLUSION
1122 package_list = []
1123 for pkg in packages.split():
1124 if d.getVar('LICENSE_EXCLUSION-' + pkg, True):
1125 msg = "%s has an incompatible license. Excluding from packaging." % pkg
1126 package_qa_handle_error("incompatible-license", msg, d)
1127 else:
1128 package_list.append(pkg)
1129 d.setVar('PACKAGES', ' '.join(package_list))
1130
1131 unshipped = []
1132 for root, dirs, files in cpath.walk(dvar):
1133 dir = root[len(dvar):]
1134 if not dir:
1135 dir = os.sep
1136 for f in (files + dirs):
1137 path = os.path.join(dir, f)
1138 if ('.' + path) not in seen:
1139 unshipped.append(path)
1140
1141 if unshipped != []:
1142 msg = pn + ": Files/directories were installed but not shipped in any package:"
1143 if "installed-vs-shipped" in (d.getVar('INSANE_SKIP_' + pn, True) or "").split():
1144 bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
1145 else:
1146 for f in unshipped:
1147 msg = msg + "\n " + f
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001148 msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n"
1149 msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001150 package_qa_handle_error("installed-vs-shipped", msg, d)
1151}
1152populate_packages[dirs] = "${D}"
1153
1154python package_fixsymlinks () {
1155 import errno
1156 pkgdest = d.getVar('PKGDEST', True)
1157 packages = d.getVar("PACKAGES", False).split()
1158
1159 dangling_links = {}
1160 pkg_files = {}
1161 for pkg in packages:
1162 dangling_links[pkg] = []
1163 pkg_files[pkg] = []
1164 inst_root = os.path.join(pkgdest, pkg)
1165 for path in pkgfiles[pkg]:
1166 rpath = path[len(inst_root):]
1167 pkg_files[pkg].append(rpath)
1168 rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
1169 if not cpath.lexists(rtarget):
1170 dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))
1171
1172 newrdepends = {}
1173 for pkg in dangling_links:
1174 for l in dangling_links[pkg]:
1175 found = False
1176 bb.debug(1, "%s contains dangling link %s" % (pkg, l))
1177 for p in packages:
1178 if l in pkg_files[p]:
1179 found = True
1180 bb.debug(1, "target found in %s" % p)
1181 if p == pkg:
1182 break
1183 if pkg not in newrdepends:
1184 newrdepends[pkg] = []
1185 newrdepends[pkg].append(p)
1186 break
1187 if found == False:
1188 bb.note("%s contains dangling symlink to %s" % (pkg, l))
1189
1190 for pkg in newrdepends:
1191 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "")
1192 for p in newrdepends[pkg]:
1193 if p not in rdepends:
1194 rdepends[p] = []
1195 d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1196}
1197
1198
1199python package_package_name_hook() {
1200 """
1201 A package_name_hook function can be used to rewrite the package names by
1202 changing PKG. For an example, see debian.bbclass.
1203 """
1204 pass
1205}
1206
1207EXPORT_FUNCTIONS package_name_hook
1208
1209
1210PKGDESTWORK = "${WORKDIR}/pkgdata"
1211
1212python emit_pkgdata() {
1213 from glob import glob
1214 import json
1215
1216 def write_if_exists(f, pkg, var):
1217 def encode(str):
1218 import codecs
1219 c = codecs.getencoder("string_escape")
1220 return c(str)[0]
1221
1222 val = d.getVar('%s_%s' % (var, pkg), True)
1223 if val:
1224 f.write('%s_%s: %s\n' % (var, pkg, encode(val)))
1225 return val
1226 val = d.getVar('%s' % (var), True)
1227 if val:
1228 f.write('%s: %s\n' % (var, encode(val)))
1229 return val
1230
1231 def write_extra_pkgs(variants, pn, packages, pkgdatadir):
1232 for variant in variants:
1233 with open("%s/%s-%s" % (pkgdatadir, variant, pn), 'w') as fd:
1234 fd.write("PACKAGES: %s\n" % ' '.join(
1235 map(lambda pkg: '%s-%s' % (variant, pkg), packages.split())))
1236
1237 def write_extra_runtime_pkgs(variants, packages, pkgdatadir):
1238 for variant in variants:
1239 for pkg in packages.split():
1240 ml_pkg = "%s-%s" % (variant, pkg)
1241 subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg)
1242 with open(subdata_file, 'w') as fd:
1243 fd.write("PKG_%s: %s" % (ml_pkg, pkg))
1244
1245 packages = d.getVar('PACKAGES', True)
1246 pkgdest = d.getVar('PKGDEST', True)
1247 pkgdatadir = d.getVar('PKGDESTWORK', True)
1248
1249 # Take shared lock since we're only reading, not writing
1250 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True)
1251
1252 data_file = pkgdatadir + d.expand("/${PN}" )
1253 f = open(data_file, 'w')
1254 f.write("PACKAGES: %s\n" % packages)
1255 f.close()
1256
1257 pn = d.getVar('PN', True)
1258 global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS', True) or "").split()
1259 variants = (d.getVar('MULTILIB_VARIANTS', True) or "").split()
1260
1261 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
1262 write_extra_pkgs(variants, pn, packages, pkgdatadir)
1263
1264 if (bb.data.inherits_class('allarch', d) and not bb.data.inherits_class('packagegroup', d)):
1265 write_extra_pkgs(global_variants, pn, packages, pkgdatadir)
1266
1267 workdir = d.getVar('WORKDIR', True)
1268
1269 for pkg in packages.split():
1270 pkgval = d.getVar('PKG_%s' % pkg, True)
1271 if pkgval is None:
1272 pkgval = pkg
1273 d.setVar('PKG_%s' % pkg, pkg)
1274
1275 pkgdestpkg = os.path.join(pkgdest, pkg)
1276 files = {}
1277 total_size = 0
1278 for f in pkgfiles[pkg]:
1279 relpth = os.path.relpath(f, pkgdestpkg)
1280 fstat = os.lstat(f)
1281 total_size += fstat.st_size
1282 files[os.sep + relpth] = fstat.st_size
1283 d.setVar('FILES_INFO', json.dumps(files))
1284
1285 subdata_file = pkgdatadir + "/runtime/%s" % pkg
1286 sf = open(subdata_file, 'w')
1287 write_if_exists(sf, pkg, 'PN')
1288 write_if_exists(sf, pkg, 'PE')
1289 write_if_exists(sf, pkg, 'PV')
1290 write_if_exists(sf, pkg, 'PR')
1291 write_if_exists(sf, pkg, 'PKGE')
1292 write_if_exists(sf, pkg, 'PKGV')
1293 write_if_exists(sf, pkg, 'PKGR')
1294 write_if_exists(sf, pkg, 'LICENSE')
1295 write_if_exists(sf, pkg, 'DESCRIPTION')
1296 write_if_exists(sf, pkg, 'SUMMARY')
1297 write_if_exists(sf, pkg, 'RDEPENDS')
1298 rprov = write_if_exists(sf, pkg, 'RPROVIDES')
1299 write_if_exists(sf, pkg, 'RRECOMMENDS')
1300 write_if_exists(sf, pkg, 'RSUGGESTS')
1301 write_if_exists(sf, pkg, 'RREPLACES')
1302 write_if_exists(sf, pkg, 'RCONFLICTS')
1303 write_if_exists(sf, pkg, 'SECTION')
1304 write_if_exists(sf, pkg, 'PKG')
1305 write_if_exists(sf, pkg, 'ALLOW_EMPTY')
1306 write_if_exists(sf, pkg, 'FILES')
1307 write_if_exists(sf, pkg, 'pkg_postinst')
1308 write_if_exists(sf, pkg, 'pkg_postrm')
1309 write_if_exists(sf, pkg, 'pkg_preinst')
1310 write_if_exists(sf, pkg, 'pkg_prerm')
1311 write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
1312 write_if_exists(sf, pkg, 'FILES_INFO')
1313 for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg, True) or "").split():
1314 write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile)
1315
1316 write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
1317 for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg, True) or "").split():
1318 write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile)
1319
1320 sf.write('%s_%s: %d\n' % ('PKGSIZE', pkg, total_size))
1321 sf.close()
1322
1323 # Symlinks needed for rprovides lookup
1324 if rprov:
1325 for p in rprov.strip().split():
1326 subdata_sym = pkgdatadir + "/runtime-rprovides/%s/%s" % (p, pkg)
1327 bb.utils.mkdirhier(os.path.dirname(subdata_sym))
1328 oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True)
1329
1330 allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg, True)
1331 if not allow_empty:
1332 allow_empty = d.getVar('ALLOW_EMPTY', True)
1333 root = "%s/%s" % (pkgdest, pkg)
1334 os.chdir(root)
1335 g = glob('*')
1336 if g or allow_empty == "1":
1337 # Symlinks needed for reverse lookups (from the final package name)
1338 subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval
1339 oe.path.symlink("../runtime/%s" % pkg, subdata_sym, True)
1340
1341 packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg
1342 open(packagedfile, 'w').close()
1343
1344 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
1345 write_extra_runtime_pkgs(variants, packages, pkgdatadir)
1346
1347 if bb.data.inherits_class('allarch', d) and not bb.data.inherits_class('packagegroup', d):
1348 write_extra_runtime_pkgs(global_variants, packages, pkgdatadir)
1349
1350 bb.utils.unlockfile(lf)
1351}
1352emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime ${PKGDESTWORK}/runtime-reverse ${PKGDESTWORK}/runtime-rprovides"
1353
1354ldconfig_postinst_fragment() {
1355if [ x"$D" = "x" ]; then
1356 if [ -x /sbin/ldconfig ]; then /sbin/ldconfig ; fi
1357fi
1358}
1359
1360RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/bin/rpmdeps-oecore --macros ${STAGING_LIBDIR_NATIVE}/rpm/macros --define '_rpmfc_magic_path ${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc' --rpmpopt ${STAGING_LIBDIR_NATIVE}/rpm/rpmpopt"
1361
1362# Collect perfile run-time dependency metadata
1363# Output:
1364# FILERPROVIDESFLIST_pkg - list of all files w/ deps
1365# FILERPROVIDES_filepath_pkg - per file dep
1366#
1367# FILERDEPENDSFLIST_pkg - list of all files w/ deps
1368# FILERDEPENDS_filepath_pkg - per file dep
1369
1370python package_do_filedeps() {
1371 if d.getVar('SKIP_FILEDEPS', True) == '1':
1372 return
1373
1374 pkgdest = d.getVar('PKGDEST', True)
1375 packages = d.getVar('PACKAGES', True)
1376 rpmdeps = d.getVar('RPMDEPS', True)
1377
1378 def chunks(files, n):
1379 return [files[i:i+n] for i in range(0, len(files), n)]
1380
1381 pkglist = []
1382 for pkg in packages.split():
1383 if d.getVar('SKIP_FILEDEPS_' + pkg, True) == '1':
1384 continue
1385 if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-'):
1386 continue
1387 for files in chunks(pkgfiles[pkg], 100):
1388 pkglist.append((pkg, files, rpmdeps, pkgdest))
1389
1390 processed = oe.utils.multiprocess_exec( pkglist, oe.package.filedeprunner)
1391
1392 provides_files = {}
1393 requires_files = {}
1394
1395 for result in processed:
1396 (pkg, provides, requires) = result
1397
1398 if pkg not in provides_files:
1399 provides_files[pkg] = []
1400 if pkg not in requires_files:
1401 requires_files[pkg] = []
1402
1403 for file in provides:
1404 provides_files[pkg].append(file)
1405 key = "FILERPROVIDES_" + file + "_" + pkg
1406 d.setVar(key, " ".join(provides[file]))
1407
1408 for file in requires:
1409 requires_files[pkg].append(file)
1410 key = "FILERDEPENDS_" + file + "_" + pkg
1411 d.setVar(key, " ".join(requires[file]))
1412
1413 for pkg in requires_files:
1414 d.setVar("FILERDEPENDSFLIST_" + pkg, " ".join(requires_files[pkg]))
1415 for pkg in provides_files:
1416 d.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files[pkg]))
1417}
1418
1419SHLIBSDIRS = "${PKGDATA_DIR}/${MLPREFIX}shlibs2"
1420SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs2"
1421
1422python package_do_shlibs() {
1423 import re, pipes
1424 import subprocess as sub
1425
1426 exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', 0)
1427 if exclude_shlibs:
1428 bb.note("not generating shlibs")
1429 return
1430
1431 lib_re = re.compile("^.*\.so")
1432 libdir_re = re.compile(".*/%s$" % d.getVar('baselib', True))
1433
1434 packages = d.getVar('PACKAGES', True)
1435 targetos = d.getVar('TARGET_OS', True)
1436
1437 workdir = d.getVar('WORKDIR', True)
1438
1439 ver = d.getVar('PKGV', True)
1440 if not ver:
1441 msg = "PKGV not defined"
1442 package_qa_handle_error("pkgv-undefined", msg, d)
1443 return
1444
1445 pkgdest = d.getVar('PKGDEST', True)
1446
1447 shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
1448
1449 # Take shared lock since we're only reading, not writing
1450 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
1451
1452 def linux_so(file, needed, sonames, renames, pkgver):
1453 needs_ldconfig = False
1454 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1455 cmd = d.getVar('OBJDUMP', True) + " -p " + pipes.quote(file) + " 2>/dev/null"
1456 fd = os.popen(cmd)
1457 lines = fd.readlines()
1458 fd.close()
1459 rpath = []
1460 for l in lines:
1461 m = re.match("\s+RPATH\s+([^\s]*)", l)
1462 if m:
1463 rpaths = m.group(1).replace("$ORIGIN", ldir).split(":")
1464 rpath = map(os.path.normpath, rpaths)
1465 for l in lines:
1466 m = re.match("\s+NEEDED\s+([^\s]*)", l)
1467 if m:
1468 dep = m.group(1)
1469 if dep not in needed[pkg]:
1470 needed[pkg].append((dep, file, rpath))
1471 m = re.match("\s+SONAME\s+([^\s]*)", l)
1472 if m:
1473 this_soname = m.group(1)
1474 prov = (this_soname, ldir, pkgver)
1475 if not prov in sonames:
1476 # if library is private (only used by package) then do not build shlib for it
1477 if not private_libs or this_soname not in private_libs:
1478 sonames.append(prov)
1479 if libdir_re.match(os.path.dirname(file)):
1480 needs_ldconfig = True
1481 if snap_symlinks and (os.path.basename(file) != this_soname):
1482 renames.append((file, os.path.join(os.path.dirname(file), this_soname)))
1483 return needs_ldconfig
1484
1485 def darwin_so(file, needed, sonames, renames, pkgver):
1486 if not os.path.exists(file):
1487 return
1488 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1489
1490 def get_combinations(base):
1491 #
1492 # Given a base library name, find all combinations of this split by "." and "-"
1493 #
1494 combos = []
1495 options = base.split(".")
1496 for i in range(1, len(options) + 1):
1497 combos.append(".".join(options[0:i]))
1498 options = base.split("-")
1499 for i in range(1, len(options) + 1):
1500 combos.append("-".join(options[0:i]))
1501 return combos
1502
1503 if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg'):
1504 # Drop suffix
1505 name = os.path.basename(file).rsplit(".",1)[0]
1506 # Find all combinations
1507 combos = get_combinations(name)
1508 for combo in combos:
1509 if not combo in sonames:
1510 prov = (combo, ldir, pkgver)
1511 sonames.append(prov)
1512 if file.endswith('.dylib') or file.endswith('.so'):
1513 rpath = []
1514 p = sub.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file],stdout=sub.PIPE,stderr=sub.PIPE)
1515 err, out = p.communicate()
1516 # If returned succesfully, process stderr for results
1517 if p.returncode == 0:
1518 for l in err.split("\n"):
1519 l = l.strip()
1520 if l.startswith('path '):
1521 rpath.append(l.split()[1])
1522
1523 p = sub.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file],stdout=sub.PIPE,stderr=sub.PIPE)
1524 err, out = p.communicate()
1525 # If returned succesfully, process stderr for results
1526 if p.returncode == 0:
1527 for l in err.split("\n"):
1528 l = l.strip()
1529 if not l or l.endswith(":"):
1530 continue
1531 if "is not an object file" in l:
1532 continue
1533 name = os.path.basename(l.split()[0]).rsplit(".", 1)[0]
1534 if name and name not in needed[pkg]:
1535 needed[pkg].append((name, file, []))
1536
1537 if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS', True) == "1":
1538 snap_symlinks = True
1539 else:
1540 snap_symlinks = False
1541
1542 if (d.getVar('USE_LDCONFIG', True) or "1") == "1":
1543 use_ldconfig = True
1544 else:
1545 use_ldconfig = False
1546
1547 needed = {}
1548 shlib_provider = oe.package.read_shlib_providers(d)
1549
1550 for pkg in packages.split():
1551 private_libs = d.getVar('PRIVATE_LIBS_' + pkg, True) or d.getVar('PRIVATE_LIBS', True) or ""
1552 private_libs = private_libs.split()
1553 needs_ldconfig = False
1554 bb.debug(2, "calculating shlib provides for %s" % pkg)
1555
1556 pkgver = d.getVar('PKGV_' + pkg, True)
1557 if not pkgver:
1558 pkgver = d.getVar('PV_' + pkg, True)
1559 if not pkgver:
1560 pkgver = ver
1561
1562 needed[pkg] = []
1563 sonames = list()
1564 renames = list()
1565 for file in pkgfiles[pkg]:
1566 soname = None
1567 if cpath.islink(file):
1568 continue
1569 if targetos == "darwin" or targetos == "darwin8":
1570 darwin_so(file, needed, sonames, renames, pkgver)
1571 elif os.access(file, os.X_OK) or lib_re.match(file):
1572 ldconfig = linux_so(file, needed, sonames, renames, pkgver)
1573 needs_ldconfig = needs_ldconfig or ldconfig
1574 for (old, new) in renames:
1575 bb.note("Renaming %s to %s" % (old, new))
1576 os.rename(old, new)
1577 pkgfiles[pkg].remove(old)
1578
1579 shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
1580 if len(sonames):
1581 fd = open(shlibs_file, 'w')
1582 for s in sonames:
1583 if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]:
1584 (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]]
1585 if old_pkg != pkg:
1586 bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver))
1587 bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0]))
1588 fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n')
1589 if s[0] not in shlib_provider:
1590 shlib_provider[s[0]] = {}
1591 shlib_provider[s[0]][s[1]] = (pkg, pkgver)
1592 fd.close()
1593 if needs_ldconfig and use_ldconfig:
1594 bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
1595 postinst = d.getVar('pkg_postinst_%s' % pkg, True)
1596 if not postinst:
1597 postinst = '#!/bin/sh\n'
1598 postinst += d.getVar('ldconfig_postinst_fragment', True)
1599 d.setVar('pkg_postinst_%s' % pkg, postinst)
1600 bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
1601
1602 bb.utils.unlockfile(lf)
1603
1604 assumed_libs = d.getVar('ASSUME_SHLIBS', True)
1605 if assumed_libs:
1606 libdir = d.getVar("libdir", True)
1607 for e in assumed_libs.split():
1608 l, dep_pkg = e.split(":")
1609 lib_ver = None
1610 dep_pkg = dep_pkg.rsplit("_", 1)
1611 if len(dep_pkg) == 2:
1612 lib_ver = dep_pkg[1]
1613 dep_pkg = dep_pkg[0]
1614 if l not in shlib_provider:
1615 shlib_provider[l] = {}
1616 shlib_provider[l][libdir] = (dep_pkg, lib_ver)
1617
1618 libsearchpath = [d.getVar('libdir', True), d.getVar('base_libdir', True)]
1619
1620 for pkg in packages.split():
1621 bb.debug(2, "calculating shlib requirements for %s" % pkg)
1622
1623 deps = list()
1624 for n in needed[pkg]:
1625 # if n is in private libraries, don't try to search provider for it
1626 # this could cause problem in case some abc.bb provides private
1627 # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1
1628 # but skipping it is still better alternative than providing own
1629 # version and then adding runtime dependency for the same system library
1630 if private_libs and n[0] in private_libs:
1631 bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0]))
1632 continue
1633 if n[0] in shlib_provider.keys():
1634 shlib_provider_path = list()
1635 for k in shlib_provider[n[0]].keys():
1636 shlib_provider_path.append(k)
1637 match = None
1638 for p in n[2] + shlib_provider_path + libsearchpath:
1639 if p in shlib_provider[n[0]]:
1640 match = p
1641 break
1642 if match:
1643 (dep_pkg, ver_needed) = shlib_provider[n[0]][match]
1644
1645 bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1]))
1646
1647 if dep_pkg == pkg:
1648 continue
1649
1650 if ver_needed:
1651 dep = "%s (>= %s)" % (dep_pkg, ver_needed)
1652 else:
1653 dep = dep_pkg
1654 if not dep in deps:
1655 deps.append(dep)
1656 continue
1657 bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1]))
1658
1659 deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
1660 if os.path.exists(deps_file):
1661 os.remove(deps_file)
1662 if len(deps):
1663 fd = open(deps_file, 'w')
1664 for dep in deps:
1665 fd.write(dep + '\n')
1666 fd.close()
1667}
1668
1669python package_do_pkgconfig () {
1670 import re
1671
1672 packages = d.getVar('PACKAGES', True)
1673 workdir = d.getVar('WORKDIR', True)
1674 pkgdest = d.getVar('PKGDEST', True)
1675
1676 shlibs_dirs = d.getVar('SHLIBSDIRS', True).split()
1677 shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
1678
1679 pc_re = re.compile('(.*)\.pc$')
1680 var_re = re.compile('(.*)=(.*)')
1681 field_re = re.compile('(.*): (.*)')
1682
1683 pkgconfig_provided = {}
1684 pkgconfig_needed = {}
1685 for pkg in packages.split():
1686 pkgconfig_provided[pkg] = []
1687 pkgconfig_needed[pkg] = []
1688 for file in pkgfiles[pkg]:
1689 m = pc_re.match(file)
1690 if m:
1691 pd = bb.data.init()
1692 name = m.group(1)
1693 pkgconfig_provided[pkg].append(name)
1694 if not os.access(file, os.R_OK):
1695 continue
1696 f = open(file, 'r')
1697 lines = f.readlines()
1698 f.close()
1699 for l in lines:
1700 m = var_re.match(l)
1701 if m:
1702 name = m.group(1)
1703 val = m.group(2)
1704 pd.setVar(name, pd.expand(val))
1705 continue
1706 m = field_re.match(l)
1707 if m:
1708 hdr = m.group(1)
1709 exp = bb.data.expand(m.group(2), pd)
1710 if hdr == 'Requires':
1711 pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
1712
1713 # Take shared lock since we're only reading, not writing
1714 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
1715
1716 for pkg in packages.split():
1717 pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
1718 if pkgconfig_provided[pkg] != []:
1719 f = open(pkgs_file, 'w')
1720 for p in pkgconfig_provided[pkg]:
1721 f.write('%s\n' % p)
1722 f.close()
1723
1724 # Go from least to most specific since the last one found wins
1725 for dir in reversed(shlibs_dirs):
1726 if not os.path.exists(dir):
1727 continue
1728 for file in os.listdir(dir):
1729 m = re.match('^(.*)\.pclist$', file)
1730 if m:
1731 pkg = m.group(1)
1732 fd = open(os.path.join(dir, file))
1733 lines = fd.readlines()
1734 fd.close()
1735 pkgconfig_provided[pkg] = []
1736 for l in lines:
1737 pkgconfig_provided[pkg].append(l.rstrip())
1738
1739 for pkg in packages.split():
1740 deps = []
1741 for n in pkgconfig_needed[pkg]:
1742 found = False
1743 for k in pkgconfig_provided.keys():
1744 if n in pkgconfig_provided[k]:
1745 if k != pkg and not (k in deps):
1746 deps.append(k)
1747 found = True
1748 if found == False:
1749 bb.note("couldn't find pkgconfig module '%s' in any package" % n)
1750 deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
1751 if len(deps):
1752 fd = open(deps_file, 'w')
1753 for dep in deps:
1754 fd.write(dep + '\n')
1755 fd.close()
1756
1757 bb.utils.unlockfile(lf)
1758}
1759
1760def read_libdep_files(d):
1761 pkglibdeps = {}
1762 packages = d.getVar('PACKAGES', True).split()
1763 for pkg in packages:
1764 pkglibdeps[pkg] = {}
1765 for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
1766 depsfile = d.expand("${PKGDEST}/" + pkg + extension)
1767 if os.access(depsfile, os.R_OK):
1768 fd = open(depsfile)
1769 lines = fd.readlines()
1770 fd.close()
1771 for l in lines:
1772 l.rstrip()
1773 deps = bb.utils.explode_dep_versions2(l)
1774 for dep in deps:
1775 if not dep in pkglibdeps[pkg]:
1776 pkglibdeps[pkg][dep] = deps[dep]
1777 return pkglibdeps
1778
1779python read_shlibdeps () {
1780 pkglibdeps = read_libdep_files(d)
1781
1782 packages = d.getVar('PACKAGES', True).split()
1783 for pkg in packages:
1784 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "")
1785 for dep in pkglibdeps[pkg]:
1786 # Add the dep if it's not already there, or if no comparison is set
1787 if dep not in rdepends:
1788 rdepends[dep] = []
1789 for v in pkglibdeps[pkg][dep]:
1790 if v not in rdepends[dep]:
1791 rdepends[dep].append(v)
1792 d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1793}
1794
1795python package_depchains() {
1796 """
1797 For a given set of prefix and postfix modifiers, make those packages
1798 RRECOMMENDS on the corresponding packages for its RDEPENDS.
1799
1800 Example: If package A depends upon package B, and A's .bb emits an
1801 A-dev package, this would make A-dev Recommends: B-dev.
1802
1803 If only one of a given suffix is specified, it will take the RRECOMMENDS
1804 based on the RDEPENDS of *all* other packages. If more than one of a given
1805 suffix is specified, its will only use the RDEPENDS of the single parent
1806 package.
1807 """
1808
1809 packages = d.getVar('PACKAGES', True)
1810 postfixes = (d.getVar('DEPCHAIN_POST', True) or '').split()
1811 prefixes = (d.getVar('DEPCHAIN_PRE', True) or '').split()
1812
1813 def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
1814
1815 #bb.note('depends for %s is %s' % (base, depends))
1816 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg, True) or "")
1817
1818 for depend in depends:
1819 if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
1820 #bb.note("Skipping %s" % depend)
1821 continue
1822 if depend.endswith('-dev'):
1823 depend = depend[:-4]
1824 if depend.endswith('-dbg'):
1825 depend = depend[:-4]
1826 pkgname = getname(depend, suffix)
1827 #bb.note("Adding %s for %s" % (pkgname, depend))
1828 if pkgname not in rreclist and pkgname != pkg:
1829 rreclist[pkgname] = []
1830
1831 #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
1832 d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
1833
1834 def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
1835
1836 #bb.note('rdepends for %s is %s' % (base, rdepends))
1837 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg, True) or "")
1838
1839 for depend in rdepends:
1840 if depend.find('virtual-locale-') != -1:
1841 #bb.note("Skipping %s" % depend)
1842 continue
1843 if depend.endswith('-dev'):
1844 depend = depend[:-4]
1845 if depend.endswith('-dbg'):
1846 depend = depend[:-4]
1847 pkgname = getname(depend, suffix)
1848 #bb.note("Adding %s for %s" % (pkgname, depend))
1849 if pkgname not in rreclist and pkgname != pkg:
1850 rreclist[pkgname] = []
1851
1852 #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
1853 d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
1854
1855 def add_dep(list, dep):
1856 if dep not in list:
1857 list.append(dep)
1858
1859 depends = []
1860 for dep in bb.utils.explode_deps(d.getVar('DEPENDS', True) or ""):
1861 add_dep(depends, dep)
1862
1863 rdepends = []
1864 for pkg in packages.split():
1865 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg, True) or ""):
1866 add_dep(rdepends, dep)
1867
1868 #bb.note('rdepends is %s' % rdepends)
1869
1870 def post_getname(name, suffix):
1871 return '%s%s' % (name, suffix)
1872 def pre_getname(name, suffix):
1873 return '%s%s' % (suffix, name)
1874
1875 pkgs = {}
1876 for pkg in packages.split():
1877 for postfix in postfixes:
1878 if pkg.endswith(postfix):
1879 if not postfix in pkgs:
1880 pkgs[postfix] = {}
1881 pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
1882
1883 for prefix in prefixes:
1884 if pkg.startswith(prefix):
1885 if not prefix in pkgs:
1886 pkgs[prefix] = {}
1887 pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
1888
1889 if "-dbg" in pkgs:
1890 pkglibdeps = read_libdep_files(d)
1891 pkglibdeplist = []
1892 for pkg in pkglibdeps:
1893 for k in pkglibdeps[pkg]:
1894 add_dep(pkglibdeplist, k)
1895 # FIXME this should not look at PN once all task recipes inherit from task.bbclass
1896 dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS', True) == '1') or (d.getVar('PN', True) or '').startswith('packagegroup-'))
1897
1898 for suffix in pkgs:
1899 for pkg in pkgs[suffix]:
1900 if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs'):
1901 continue
1902 (base, func) = pkgs[suffix][pkg]
1903 if suffix == "-dev":
1904 pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
1905 elif suffix == "-dbg":
1906 if not dbgdefaultdeps:
1907 pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
1908 continue
1909 if len(pkgs[suffix]) == 1:
1910 pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
1911 else:
1912 rdeps = []
1913 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base, True) or ""):
1914 add_dep(rdeps, dep)
1915 pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
1916}
1917
1918# Since bitbake can't determine which variables are accessed during package
1919# iteration, we need to list them here:
1920PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR USERADD_PARAM GROUPADD_PARAM CONFFILES SYSTEMD_SERVICE LICENSE SECTION pkg_preinst pkg_prerm RREPLACES GROUPMEMS_PARAM SYSTEMD_AUTO_ENABLE"
1921
1922def gen_packagevar(d):
1923 ret = []
1924 pkgs = (d.getVar("PACKAGES", True) or "").split()
1925 vars = (d.getVar("PACKAGEVARS", True) or "").split()
1926 for p in pkgs:
1927 for v in vars:
1928 ret.append(v + "_" + p)
1929
1930 # Ensure that changes to INCOMPATIBLE_LICENSE re-run do_package for
1931 # affected recipes.
1932 ret.append('LICENSE_EXCLUSION-%s' % p)
1933 return " ".join(ret)
1934
1935PACKAGE_PREPROCESS_FUNCS ?= ""
1936# Functions for setting up PKGD
1937PACKAGEBUILDPKGD ?= " \
1938 perform_packagecopy \
1939 ${PACKAGE_PREPROCESS_FUNCS} \
1940 split_and_strip_files \
1941 fixup_perms \
1942 "
1943# Functions which split PKGD up into separate packages
1944PACKAGESPLITFUNCS ?= " \
1945 package_do_split_locales \
1946 populate_packages"
1947# Functions which process metadata based on split packages
1948PACKAGEFUNCS += " \
1949 package_fixsymlinks \
1950 package_name_hook \
1951 package_do_filedeps \
1952 package_do_shlibs \
1953 package_do_pkgconfig \
1954 read_shlibdeps \
1955 package_depchains \
1956 emit_pkgdata"
1957
1958python do_package () {
1959 # Change the following version to cause sstate to invalidate the package
1960 # cache. This is useful if an item this class depends on changes in a
1961 # way that the output of this class changes. rpmdeps is a good example
1962 # as any change to rpmdeps requires this to be rerun.
1963 # PACKAGE_BBCLASS_VERSION = "1"
1964
1965 # Init cachedpath
1966 global cpath
1967 cpath = oe.cachedpath.CachedPath()
1968
1969 ###########################################################################
1970 # Sanity test the setup
1971 ###########################################################################
1972
1973 packages = (d.getVar('PACKAGES', True) or "").split()
1974 if len(packages) < 1:
1975 bb.debug(1, "No packages to build, skipping do_package")
1976 return
1977
1978 workdir = d.getVar('WORKDIR', True)
1979 outdir = d.getVar('DEPLOY_DIR', True)
1980 dest = d.getVar('D', True)
1981 dvar = d.getVar('PKGD', True)
1982 pn = d.getVar('PN', True)
1983
1984 if not workdir or not outdir or not dest or not dvar or not pn:
1985 msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package"
1986 package_qa_handle_error("var-undefined", msg, d)
1987 return
1988
1989 bb.build.exec_func("package_get_auto_pr", d)
1990
1991 ###########################################################################
1992 # Optimisations
1993 ###########################################################################
1994
1995 # Continually expanding complex expressions is inefficient, particularly
1996 # when we write to the datastore and invalidate the expansion cache. This
1997 # code pre-expands some frequently used variables
1998
1999 def expandVar(x, d):
2000 d.setVar(x, d.getVar(x, True))
2001
2002 for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO':
2003 expandVar(x, d)
2004
2005 ###########################################################################
2006 # Setup PKGD (from D)
2007 ###########################################################################
2008
2009 for f in (d.getVar('PACKAGEBUILDPKGD', True) or '').split():
2010 bb.build.exec_func(f, d)
2011
2012 ###########################################################################
2013 # Split up PKGD into PKGDEST
2014 ###########################################################################
2015
2016 cpath = oe.cachedpath.CachedPath()
2017
2018 for f in (d.getVar('PACKAGESPLITFUNCS', True) or '').split():
2019 bb.build.exec_func(f, d)
2020
2021 ###########################################################################
2022 # Process PKGDEST
2023 ###########################################################################
2024
2025 # Build global list of files in each split package
2026 global pkgfiles
2027 pkgfiles = {}
2028 packages = d.getVar('PACKAGES', True).split()
2029 pkgdest = d.getVar('PKGDEST', True)
2030 for pkg in packages:
2031 pkgfiles[pkg] = []
2032 for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg):
2033 for file in files:
2034 pkgfiles[pkg].append(walkroot + os.sep + file)
2035
2036 for f in (d.getVar('PACKAGEFUNCS', True) or '').split():
2037 bb.build.exec_func(f, d)
2038}
2039
2040do_package[dirs] = "${SHLIBSWORKDIR} ${PKGDESTWORK} ${D}"
2041do_package[vardeps] += "${PACKAGEBUILDPKGD} ${PACKAGESPLITFUNCS} ${PACKAGEFUNCS} ${@gen_packagevar(d)}"
2042addtask package after do_install
2043
2044PACKAGELOCK = "${STAGING_DIR}/package-output.lock"
2045SSTATETASKS += "do_package"
2046do_package[cleandirs] = "${PKGDEST} ${PKGDESTWORK}"
2047do_package[sstate-plaindirs] = "${PKGD} ${PKGDEST} ${PKGDESTWORK}"
2048do_package[sstate-lockfile-shared] = "${PACKAGELOCK}"
2049do_package_setscene[dirs] = "${STAGING_DIR}"
2050
2051python do_package_setscene () {
2052 sstate_setscene(d)
2053}
2054addtask do_package_setscene
2055
2056do_packagedata () {
2057 :
2058}
2059
2060addtask packagedata before do_build after do_package
2061
2062SSTATETASKS += "do_packagedata"
2063do_packagedata[sstate-inputdirs] = "${PKGDESTWORK}"
2064do_packagedata[sstate-outputdirs] = "${PKGDATA_DIR}"
2065do_packagedata[sstate-lockfile-shared] = "${PACKAGELOCK}"
2066do_packagedata[stamp-extra-info] = "${MACHINE}"
2067
2068python do_packagedata_setscene () {
2069 sstate_setscene(d)
2070}
2071addtask do_packagedata_setscene
2072
2073#
2074# Helper functions for the package writing classes
2075#
2076
2077def mapping_rename_hook(d):
2078 """
2079 Rewrite variables to account for package renaming in things
2080 like debian.bbclass or manual PKG variable name changes
2081 """
2082 pkg = d.getVar("PKG", True)
2083 runtime_mapping_rename("RDEPENDS", pkg, d)
2084 runtime_mapping_rename("RRECOMMENDS", pkg, d)
2085 runtime_mapping_rename("RSUGGESTS", pkg, d)
2086