blob: 76b9f86491e7f42e3ca3c55b238955032c6a4c2a [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# Packaging process
3#
4# Executive summary: This class iterates over the functions listed in PACKAGEFUNCS
5# Taking D and splitting it up into the packages listed in PACKAGES, placing the
6# resulting output in PKGDEST.
7#
8# There are the following default steps but PACKAGEFUNCS can be extended:
9#
10# a) package_get_auto_pr - get PRAUTO from remote PR service
11#
12# b) perform_packagecopy - Copy D into PKGD
13#
14# c) package_do_split_locales - Split out the locale files, updates FILES and PACKAGES
15#
16# d) split_and_strip_files - split the files into runtime and debug and strip them.
17# Debug files include debug info split, and associated sources that end up in -dbg packages
18#
19# e) fixup_perms - Fix up permissions in the package before we split it.
20#
21# f) populate_packages - Split the files in PKGD into separate packages in PKGDEST/<pkgname>
22# Also triggers the binary stripping code to put files in -dbg packages.
23#
24# g) package_do_filedeps - Collect perfile run-time dependency metadata
25# The data is stores in FILER{PROVIDES,DEPENDS}_file_pkg variables with
26# a list of affected files in FILER{PROVIDES,DEPENDS}FLIST_pkg
27#
28# h) package_do_shlibs - Look at the shared libraries generated and autotmatically add any
29# depenedencies found. Also stores the package name so anyone else using this library
30# knows which package to depend on.
31#
32# i) package_do_pkgconfig - Keep track of which packages need and provide which .pc files
33#
34# j) read_shlibdeps - Reads the stored shlibs information into the metadata
35#
36# k) package_depchains - Adds automatic dependencies to -dbg and -dev packages
37#
38# l) emit_pkgdata - saves the packaging data into PKGDATA_DIR for use in later
39# packaging steps
40
41inherit packagedata
Patrick Williamsc124f4f2015-09-15 14:41:29 -050042inherit chrpath
43
44# Need the package_qa_handle_error() in insane.bbclass
45inherit insane
46
47PKGD = "${WORKDIR}/package"
48PKGDEST = "${WORKDIR}/packages-split"
49
50LOCALE_SECTION ?= ''
51
52ALL_MULTILIB_PACKAGE_ARCHS = "${@all_multilib_tune_values(d, 'PACKAGE_ARCHS')}"
53
54# rpm is used for the per-file dependency identification
55PACKAGE_DEPENDS += "rpm-native"
56
57def legitimize_package_name(s):
58 """
59 Make sure package names are legitimate strings
60 """
61 import re
62
63 def fixutf(m):
64 cp = m.group(1)
65 if cp:
66 return ('\u%s' % cp).decode('unicode_escape').encode('utf-8')
67
68 # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
69 s = re.sub('<U([0-9A-Fa-f]{1,4})>', fixutf, s)
70
71 # Remaining package name validity fixes
72 return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
73
74def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False, summary=None):
75 """
76 Used in .bb files to split up dynamically generated subpackages of a
77 given package, usually plugins or modules.
78
79 Arguments:
80 root -- the path in which to search
81 file_regex -- regular expression to match searched files. Use
82 parentheses () to mark the part of this expression
83 that should be used to derive the module name (to be
84 substituted where %s is used in other function
85 arguments as noted below)
86 output_pattern -- pattern to use for the package names. Must include %s.
87 description -- description to set for each package. Must include %s.
88 postinst -- postinstall script to use for all packages (as a
89 string)
90 recursive -- True to perform a recursive search - default False
91 hook -- a hook function to be called for every match. The
92 function will be called with the following arguments
93 (in the order listed):
94 f: full path to the file/directory match
95 pkg: the package name
96 file_regex: as above
97 output_pattern: as above
98 modulename: the module name derived using file_regex
99 extra_depends -- extra runtime dependencies (RDEPENDS) to be set for
100 all packages. The default value of None causes a
101 dependency on the main package (${PN}) - if you do
102 not want this, pass '' for this parameter.
103 aux_files_pattern -- extra item(s) to be added to FILES for each
104 package. Can be a single string item or a list of
105 strings for multiple items. Must include %s.
106 postrm -- postrm script to use for all packages (as a string)
107 allow_dirs -- True allow directories to be matched - default False
108 prepend -- if True, prepend created packages to PACKAGES instead
109 of the default False which appends them
110 match_path -- match file_regex on the whole relative path to the
111 root rather than just the file name
112 aux_files_pattern_verbatim -- extra item(s) to be added to FILES for
113 each package, using the actual derived module name
114 rather than converting it to something legal for a
115 package name. Can be a single string item or a list
116 of strings for multiple items. Must include %s.
117 allow_links -- True to allow symlinks to be matched - default False
118 summary -- Summary to set for each package. Must include %s;
119 defaults to description if not set.
120
121 """
122
123 dvar = d.getVar('PKGD', True)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500124 root = d.expand(root)
125 output_pattern = d.expand(output_pattern)
126 extra_depends = d.expand(extra_depends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500127
128 # If the root directory doesn't exist, don't error out later but silently do
129 # no splitting.
130 if not os.path.exists(dvar + root):
131 return []
132
133 ml = d.getVar("MLPREFIX", True)
134 if ml:
135 if not output_pattern.startswith(ml):
136 output_pattern = ml + output_pattern
137
138 newdeps = []
139 for dep in (extra_depends or "").split():
140 if dep.startswith(ml):
141 newdeps.append(dep)
142 else:
143 newdeps.append(ml + dep)
144 if newdeps:
145 extra_depends = " ".join(newdeps)
146
147
148 packages = d.getVar('PACKAGES', True).split()
149 split_packages = []
150
151 if postinst:
152 postinst = '#!/bin/sh\n' + postinst + '\n'
153 if postrm:
154 postrm = '#!/bin/sh\n' + postrm + '\n'
155 if not recursive:
156 objs = os.listdir(dvar + root)
157 else:
158 objs = []
159 for walkroot, dirs, files in os.walk(dvar + root):
160 for file in files:
161 relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1)
162 if relpath:
163 objs.append(relpath)
164
165 if extra_depends == None:
166 extra_depends = d.getVar("PN", True)
167
168 if not summary:
169 summary = description
170
171 for o in sorted(objs):
172 import re, stat
173 if match_path:
174 m = re.match(file_regex, o)
175 else:
176 m = re.match(file_regex, os.path.basename(o))
177
178 if not m:
179 continue
180 f = os.path.join(dvar + root, o)
181 mode = os.lstat(f).st_mode
182 if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))):
183 continue
184 on = legitimize_package_name(m.group(1))
185 pkg = output_pattern % on
186 split_packages.append(pkg)
187 if not pkg in packages:
188 if prepend:
189 packages = [pkg] + packages
190 else:
191 packages.append(pkg)
192 oldfiles = d.getVar('FILES_' + pkg, True)
193 newfile = os.path.join(root, o)
194 # These names will be passed through glob() so if the filename actually
195 # contains * or ? (rare, but possible) we need to handle that specially
196 newfile = newfile.replace('*', '[*]')
197 newfile = newfile.replace('?', '[?]')
198 if not oldfiles:
199 the_files = [newfile]
200 if aux_files_pattern:
201 if type(aux_files_pattern) is list:
202 for fp in aux_files_pattern:
203 the_files.append(fp % on)
204 else:
205 the_files.append(aux_files_pattern % on)
206 if aux_files_pattern_verbatim:
207 if type(aux_files_pattern_verbatim) is list:
208 for fp in aux_files_pattern_verbatim:
209 the_files.append(fp % m.group(1))
210 else:
211 the_files.append(aux_files_pattern_verbatim % m.group(1))
212 d.setVar('FILES_' + pkg, " ".join(the_files))
213 else:
214 d.setVar('FILES_' + pkg, oldfiles + " " + newfile)
215 if extra_depends != '':
216 d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends)
217 if not d.getVar('DESCRIPTION_' + pkg, True):
218 d.setVar('DESCRIPTION_' + pkg, description % on)
219 if not d.getVar('SUMMARY_' + pkg, True):
220 d.setVar('SUMMARY_' + pkg, summary % on)
221 if postinst:
222 d.setVar('pkg_postinst_' + pkg, postinst)
223 if postrm:
224 d.setVar('pkg_postrm_' + pkg, postrm)
225 if callable(hook):
226 hook(f, pkg, file_regex, output_pattern, m.group(1))
227
228 d.setVar('PACKAGES', ' '.join(packages))
229 return split_packages
230
231PACKAGE_DEPENDS += "file-native"
232
233python () {
234 if d.getVar('PACKAGES', True) != '':
235 deps = ""
236 for dep in (d.getVar('PACKAGE_DEPENDS', True) or "").split():
237 deps += " %s:do_populate_sysroot" % dep
238 d.appendVarFlag('do_package', 'depends', deps)
239
240 # shlibs requires any DEPENDS to have already packaged for the *.list files
241 d.appendVarFlag('do_package', 'deptask', " do_packagedata")
242}
243
244# Get a list of files from file vars by searching files under current working directory
245# The list contains symlinks, directories and normal files.
246def files_from_filevars(filevars):
247 import os,glob
248 cpath = oe.cachedpath.CachedPath()
249 files = []
250 for f in filevars:
251 if os.path.isabs(f):
252 f = '.' + f
253 if not f.startswith("./"):
254 f = './' + f
255 globbed = glob.glob(f)
256 if globbed:
257 if [ f ] != globbed:
258 files += globbed
259 continue
260 files.append(f)
261
262 for f in files:
263 if not cpath.islink(f):
264 if cpath.isdir(f):
265 newfiles = [ os.path.join(f,x) for x in os.listdir(f) ]
266 if newfiles:
267 files += newfiles
268
269 return files
270
271# Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files
272def get_conffiles(pkg, d):
273 pkgdest = d.getVar('PKGDEST', True)
274 root = os.path.join(pkgdest, pkg)
275 cwd = os.getcwd()
276 os.chdir(root)
277
278 conffiles = d.getVar('CONFFILES_%s' % pkg, True);
279 if conffiles == None:
280 conffiles = d.getVar('CONFFILES', True)
281 if conffiles == None:
282 conffiles = ""
283 conffiles = conffiles.split()
284 conf_orig_list = files_from_filevars(conffiles)
285
286 # Remove links and directories from conf_orig_list to get conf_list which only contains normal files
287 conf_list = []
288 for f in conf_orig_list:
289 if os.path.isdir(f):
290 continue
291 if os.path.islink(f):
292 continue
293 if not os.path.exists(f):
294 continue
295 conf_list.append(f)
296
297 # Remove the leading './'
298 for i in range(0, len(conf_list)):
299 conf_list[i] = conf_list[i][1:]
300
301 os.chdir(cwd)
302 return conf_list
303
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500304def checkbuildpath(file, d):
305 tmpdir = d.getVar('TMPDIR', True)
306 with open(file) as f:
307 file_content = f.read()
308 if tmpdir in file_content:
309 return True
310
311 return False
312
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500313def splitdebuginfo(file, debugfile, debugsrcdir, sourcefile, d):
314 # Function to split a single file into two components, one is the stripped
315 # target system binary, the other contains any debugging information. The
316 # two files are linked to reference each other.
317 #
318 # sourcefile is also generated containing a list of debugsources
319
320 import stat
321
322 dvar = d.getVar('PKGD', True)
323 objcopy = d.getVar("OBJCOPY", True)
324 debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500325
326 # We ignore kernel modules, we don't generate debug info files.
327 if file.find("/lib/modules/") != -1 and file.endswith(".ko"):
328 return 1
329
330 newmode = None
331 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
332 origmode = os.stat(file)[stat.ST_MODE]
333 newmode = origmode | stat.S_IWRITE | stat.S_IREAD
334 os.chmod(file, newmode)
335
336 # We need to extract the debug src information here...
337 if debugsrcdir:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500338 cmd = "'%s' -i -l '%s' '%s'" % (debugedit, sourcefile, file)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500339 (retval, output) = oe.utils.getstatusoutput(cmd)
340 if retval:
341 bb.fatal("debugedit failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
342
343 bb.utils.mkdirhier(os.path.dirname(debugfile))
344
345 cmd = "'%s' --only-keep-debug '%s' '%s'" % (objcopy, file, debugfile)
346 (retval, output) = oe.utils.getstatusoutput(cmd)
347 if retval:
348 bb.fatal("objcopy failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
349
350 # Set the debuglink to have the view of the file path on the target
351 cmd = "'%s' --add-gnu-debuglink='%s' '%s'" % (objcopy, debugfile, file)
352 (retval, output) = oe.utils.getstatusoutput(cmd)
353 if retval:
354 bb.fatal("objcopy failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
355
356 if newmode:
357 os.chmod(file, origmode)
358
359 return 0
360
361def copydebugsources(debugsrcdir, d):
362 # The debug src information written out to sourcefile is further procecessed
363 # and copied to the destination here.
364
365 import stat
366
367 sourcefile = d.expand("${WORKDIR}/debugsources.list")
368 if debugsrcdir and os.path.isfile(sourcefile):
369 dvar = d.getVar('PKGD', True)
370 strip = d.getVar("STRIP", True)
371 objcopy = d.getVar("OBJCOPY", True)
372 debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit")
373 workdir = d.getVar("WORKDIR", True)
374 workparentdir = os.path.dirname(os.path.dirname(workdir))
375 workbasedir = os.path.basename(os.path.dirname(workdir)) + "/" + os.path.basename(workdir)
376
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500377 # If build path exists in sourcefile, it means toolchain did not use
378 # -fdebug-prefix-map to compile
379 if checkbuildpath(sourcefile, d):
380 localsrc_prefix = workparentdir + "/"
381 else:
382 localsrc_prefix = "/usr/src/debug/"
383
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500384 nosuchdir = []
385 basepath = dvar
386 for p in debugsrcdir.split("/"):
387 basepath = basepath + "/" + p
388 if not cpath.exists(basepath):
389 nosuchdir.append(basepath)
390 bb.utils.mkdirhier(basepath)
391 cpath.updatecache(basepath)
392
393 processdebugsrc = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '(<internal>|<built-in>)$' | "
394 # We need to ignore files that are not actually ours
395 # we do this by only paying attention to items from this package
396 processdebugsrc += "fgrep -zw '%s' | "
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500397 # Remove prefix in the source paths
398 processdebugsrc += "sed 's#%s##g' | "
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500399 processdebugsrc += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)"
400
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500401 cmd = processdebugsrc % (sourcefile, workbasedir, localsrc_prefix, workparentdir, dvar, debugsrcdir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500402 (retval, output) = oe.utils.getstatusoutput(cmd)
403 # Can "fail" if internal headers/transient sources are attempted
404 #if retval:
405 # bb.fatal("debug source copy failed with exit code %s (cmd was %s)" % (retval, cmd))
406
407 # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
408 # Work around this by manually finding and copying any symbolic links that made it through.
409 cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s' 2>/dev/null)" % (dvar, debugsrcdir, dvar, debugsrcdir, workparentdir, dvar, debugsrcdir)
410 (retval, output) = oe.utils.getstatusoutput(cmd)
411 if retval:
412 bb.fatal("debugsrc symlink fixup failed with exit code %s (cmd was %s)" % (retval, cmd))
413
414 # The copy by cpio may have resulted in some empty directories! Remove these
415 cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
416 (retval, output) = oe.utils.getstatusoutput(cmd)
417 if retval:
418 bb.fatal("empty directory removal failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
419
420 # Also remove debugsrcdir if its empty
421 for p in nosuchdir[::-1]:
422 if os.path.exists(p) and not os.listdir(p):
423 os.rmdir(p)
424
425#
426# Package data handling routines
427#
428
429def get_package_mapping (pkg, basepkg, d):
430 import oe.packagedata
431
432 data = oe.packagedata.read_subpkgdata(pkg, d)
433 key = "PKG_%s" % pkg
434
435 if key in data:
436 # Have to avoid undoing the write_extra_pkgs(global_variants...)
437 if bb.data.inherits_class('allarch', d) and data[key] == basepkg:
438 return pkg
439 return data[key]
440
441 return pkg
442
443def get_package_additional_metadata (pkg_type, d):
444 base_key = "PACKAGE_ADD_METADATA"
445 for key in ("%s_%s" % (base_key, pkg_type.upper()), base_key):
446 if d.getVar(key, False) is None:
447 continue
448 d.setVarFlag(key, "type", "list")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500449 if d.getVarFlag(key, "separator", True) is None:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500450 d.setVarFlag(key, "separator", "\\n")
451 metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)]
452 return "\n".join(metadata_fields).strip()
453
454def runtime_mapping_rename (varname, pkg, d):
455 #bb.note("%s before: %s" % (varname, d.getVar(varname, True)))
456
457 if bb.data.inherits_class('packagegroup', d):
458 return
459
460 new_depends = {}
461 deps = bb.utils.explode_dep_versions2(d.getVar(varname, True) or "")
462 for depend in deps:
463 new_depend = get_package_mapping(depend, pkg, d)
464 new_depends[new_depend] = deps[depend]
465
466 d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False))
467
468 #bb.note("%s after: %s" % (varname, d.getVar(varname, True)))
469
470#
471# Package functions suitable for inclusion in PACKAGEFUNCS
472#
473
474python package_get_auto_pr() {
475 import oe.prservice
476 import re
477
478 # Support per recipe PRSERV_HOST
479 pn = d.getVar('PN', True)
480 host = d.getVar("PRSERV_HOST_" + pn, True)
481 if not (host is None):
482 d.setVar("PRSERV_HOST", host)
483
484 pkgv = d.getVar("PKGV", True)
485
486 # PR Server not active, handle AUTOINC
487 if not d.getVar('PRSERV_HOST', True):
488 if 'AUTOINC' in pkgv:
489 d.setVar("PKGV", pkgv.replace("AUTOINC", "0"))
490 return
491
492 auto_pr = None
493 pv = d.getVar("PV", True)
494 version = d.getVar("PRAUTOINX", True)
495 pkgarch = d.getVar("PACKAGE_ARCH", True)
496 checksum = d.getVar("BB_TASKHASH", True)
497
498 if d.getVar('PRSERV_LOCKDOWN', True):
499 auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch, True) or d.getVar('PRAUTO_' + version, True) or None
500 if auto_pr is None:
501 bb.fatal("Can NOT get PRAUTO from lockdown exported file")
502 d.setVar('PRAUTO',str(auto_pr))
503 return
504
505 try:
506 conn = d.getVar("__PRSERV_CONN", True)
507 if conn is None:
508 conn = oe.prservice.prserv_make_conn(d)
509 if conn is not None:
510 if "AUTOINC" in pkgv:
511 srcpv = bb.fetch2.get_srcrev(d)
512 base_ver = "AUTOINC-%s" % version[:version.find(srcpv)]
513 value = conn.getPR(base_ver, pkgarch, srcpv)
514 d.setVar("PKGV", pkgv.replace("AUTOINC", str(value)))
515
516 auto_pr = conn.getPR(version, pkgarch, checksum)
517 except Exception as e:
518 bb.fatal("Can NOT get PRAUTO, exception %s" % str(e))
519 if auto_pr is None:
520 bb.fatal("Can NOT get PRAUTO from remote PR service")
521 d.setVar('PRAUTO',str(auto_pr))
522}
523
524LOCALEBASEPN ??= "${PN}"
525
526python package_do_split_locales() {
527 if (d.getVar('PACKAGE_NO_LOCALE', True) == '1'):
528 bb.debug(1, "package requested not splitting locales")
529 return
530
531 packages = (d.getVar('PACKAGES', True) or "").split()
532
533 datadir = d.getVar('datadir', True)
534 if not datadir:
535 bb.note("datadir not defined")
536 return
537
538 dvar = d.getVar('PKGD', True)
539 pn = d.getVar('LOCALEBASEPN', True)
540
541 if pn + '-locale' in packages:
542 packages.remove(pn + '-locale')
543
544 localedir = os.path.join(dvar + datadir, 'locale')
545
546 if not cpath.isdir(localedir):
547 bb.debug(1, "No locale files in this package")
548 return
549
550 locales = os.listdir(localedir)
551
552 summary = d.getVar('SUMMARY', True) or pn
553 description = d.getVar('DESCRIPTION', True) or ""
554 locale_section = d.getVar('LOCALE_SECTION', True)
555 mlprefix = d.getVar('MLPREFIX', True) or ""
556 for l in sorted(locales):
557 ln = legitimize_package_name(l)
558 pkg = pn + '-locale-' + ln
559 packages.append(pkg)
560 d.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l))
561 d.setVar('RRECOMMENDS_' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
562 d.setVar('RPROVIDES_' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
563 d.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l))
564 d.setVar('DESCRIPTION_' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l))
565 if locale_section:
566 d.setVar('SECTION_' + pkg, locale_section)
567
568 d.setVar('PACKAGES', ' '.join(packages))
569
570 # Disabled by RP 18/06/07
571 # Wildcards aren't supported in debian
572 # They break with ipkg since glibc-locale* will mean that
573 # glibc-localedata-translit* won't install as a dependency
574 # for some other package which breaks meta-toolchain
575 # Probably breaks since virtual-locale- isn't provided anywhere
576 #rdep = (d.getVar('RDEPENDS_%s' % pn, True) or "").split()
577 #rdep.append('%s-locale*' % pn)
578 #d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep))
579}
580
581python perform_packagecopy () {
582 dest = d.getVar('D', True)
583 dvar = d.getVar('PKGD', True)
584
585 # Start by package population by taking a copy of the installed
586 # files to operate on
587 # Preserve sparse files and hard links
588 cmd = 'tar -cf - -C %s -p . | tar -xf - -C %s' % (dest, dvar)
589 (retval, output) = oe.utils.getstatusoutput(cmd)
590 if retval:
591 bb.fatal("file copy failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
592
593 # replace RPATHs for the nativesdk binaries, to make them relocatable
594 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d):
595 rpath_replace (dvar, d)
596}
597perform_packagecopy[cleandirs] = "${PKGD}"
598perform_packagecopy[dirs] = "${PKGD}"
599
600# We generate a master list of directories to process, we start by
601# seeding this list with reasonable defaults, then load from
602# the fs-perms.txt files
603python fixup_perms () {
604 import pwd, grp
605
606 # init using a string with the same format as a line as documented in
607 # the fs-perms.txt file
608 # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
609 # <path> link <link target>
610 #
611 # __str__ can be used to print out an entry in the input format
612 #
613 # if fs_perms_entry.path is None:
614 # an error occured
615 # if fs_perms_entry.link, you can retrieve:
616 # fs_perms_entry.path = path
617 # fs_perms_entry.link = target of link
618 # if not fs_perms_entry.link, you can retrieve:
619 # fs_perms_entry.path = path
620 # fs_perms_entry.mode = expected dir mode or None
621 # fs_perms_entry.uid = expected uid or -1
622 # fs_perms_entry.gid = expected gid or -1
623 # fs_perms_entry.walk = 'true' or something else
624 # fs_perms_entry.fmode = expected file mode or None
625 # fs_perms_entry.fuid = expected file uid or -1
626 # fs_perms_entry_fgid = expected file gid or -1
627 class fs_perms_entry():
628 def __init__(self, line):
629 lsplit = line.split()
630 if len(lsplit) == 3 and lsplit[1].lower() == "link":
631 self._setlink(lsplit[0], lsplit[2])
632 elif len(lsplit) == 8:
633 self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
634 else:
635 msg = "Fixup Perms: invalid config line %s" % line
636 package_qa_handle_error("perm-config", msg, d)
637 self.path = None
638 self.link = None
639
640 def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
641 self.path = os.path.normpath(path)
642 self.link = None
643 self.mode = self._procmode(mode)
644 self.uid = self._procuid(uid)
645 self.gid = self._procgid(gid)
646 self.walk = walk.lower()
647 self.fmode = self._procmode(fmode)
648 self.fuid = self._procuid(fuid)
649 self.fgid = self._procgid(fgid)
650
651 def _setlink(self, path, link):
652 self.path = os.path.normpath(path)
653 self.link = link
654
655 def _procmode(self, mode):
656 if not mode or (mode and mode == "-"):
657 return None
658 else:
659 return int(mode,8)
660
661 # Note uid/gid -1 has special significance in os.lchown
662 def _procuid(self, uid):
663 if uid is None or uid == "-":
664 return -1
665 elif uid.isdigit():
666 return int(uid)
667 else:
668 return pwd.getpwnam(uid).pw_uid
669
670 def _procgid(self, gid):
671 if gid is None or gid == "-":
672 return -1
673 elif gid.isdigit():
674 return int(gid)
675 else:
676 return grp.getgrnam(gid).gr_gid
677
678 # Use for debugging the entries
679 def __str__(self):
680 if self.link:
681 return "%s link %s" % (self.path, self.link)
682 else:
683 mode = "-"
684 if self.mode:
685 mode = "0%o" % self.mode
686 fmode = "-"
687 if self.fmode:
688 fmode = "0%o" % self.fmode
689 uid = self._mapugid(self.uid)
690 gid = self._mapugid(self.gid)
691 fuid = self._mapugid(self.fuid)
692 fgid = self._mapugid(self.fgid)
693 return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
694
695 def _mapugid(self, id):
696 if id is None or id == -1:
697 return "-"
698 else:
699 return "%d" % id
700
701 # Fix the permission, owner and group of path
702 def fix_perms(path, mode, uid, gid, dir):
703 if mode and not os.path.islink(path):
704 #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
705 os.chmod(path, mode)
706 # -1 is a special value that means don't change the uid/gid
707 # if they are BOTH -1, don't bother to lchown
708 if not (uid == -1 and gid == -1):
709 #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
710 os.lchown(path, uid, gid)
711
712 # Return a list of configuration files based on either the default
713 # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
714 # paths are resolved via BBPATH
715 def get_fs_perms_list(d):
716 str = ""
717 bbpath = d.getVar('BBPATH', True)
718 fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES', True)
719 if not fs_perms_tables:
720 fs_perms_tables = 'files/fs-perms.txt'
721 for conf_file in fs_perms_tables.split():
722 str += " %s" % bb.utils.which(bbpath, conf_file)
723 return str
724
725
726
727 dvar = d.getVar('PKGD', True)
728
729 fs_perms_table = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500730 fs_link_table = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500731
732 # By default all of the standard directories specified in
733 # bitbake.conf will get 0755 root:root.
734 target_path_vars = [ 'base_prefix',
735 'prefix',
736 'exec_prefix',
737 'base_bindir',
738 'base_sbindir',
739 'base_libdir',
740 'datadir',
741 'sysconfdir',
742 'servicedir',
743 'sharedstatedir',
744 'localstatedir',
745 'infodir',
746 'mandir',
747 'docdir',
748 'bindir',
749 'sbindir',
750 'libexecdir',
751 'libdir',
752 'includedir',
753 'oldincludedir' ]
754
755 for path in target_path_vars:
756 dir = d.getVar(path, True) or ""
757 if dir == "":
758 continue
759 fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d))
760
761 # Now we actually load from the configuration files
762 for conf in get_fs_perms_list(d).split():
763 if os.path.exists(conf):
764 f = open(conf)
765 for line in f:
766 if line.startswith('#'):
767 continue
768 lsplit = line.split()
769 if len(lsplit) == 0:
770 continue
771 if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
772 msg = "Fixup perms: %s invalid line: %s" % (conf, line)
773 package_qa_handle_error("perm-line", msg, d)
774 continue
775 entry = fs_perms_entry(d.expand(line))
776 if entry and entry.path:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500777 if entry.link:
778 fs_link_table[entry.path] = entry
779 if entry.path in fs_perms_table:
780 fs_perms_table.pop(entry.path)
781 else:
782 fs_perms_table[entry.path] = entry
783 if entry.path in fs_link_table:
784 fs_link_table.pop(entry.path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500785 f.close()
786
787 # Debug -- list out in-memory table
788 #for dir in fs_perms_table:
789 # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500790 #for link in fs_link_table:
791 # bb.note("Fixup Perms: %s: %s" % (link, str(fs_link_table[link])))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500792
793 # We process links first, so we can go back and fixup directory ownership
794 # for any newly created directories
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500795 # Process in sorted order so /run gets created before /run/lock, etc.
796 for entry in sorted(fs_link_table.values(), key=lambda x: x.link):
797 link = entry.link
798 dir = entry.path
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500799 origin = dvar + dir
800 if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)):
801 continue
802
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500803 if link[0] == "/":
804 target = dvar + link
805 ptarget = link
806 else:
807 target = os.path.join(os.path.dirname(origin), link)
808 ptarget = os.path.join(os.path.dirname(dir), link)
809 if os.path.exists(target):
810 msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
811 package_qa_handle_error("perm-link", msg, d)
812 continue
813
814 # Create path to move directory to, move it, and then setup the symlink
815 bb.utils.mkdirhier(os.path.dirname(target))
816 #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
817 os.rename(origin, target)
818 #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
819 os.symlink(link, origin)
820
821 for dir in fs_perms_table:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500822 origin = dvar + dir
823 if not (cpath.exists(origin) and cpath.isdir(origin)):
824 continue
825
826 fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
827
828 if fs_perms_table[dir].walk == 'true':
829 for root, dirs, files in os.walk(origin):
830 for dr in dirs:
831 each_dir = os.path.join(root, dr)
832 fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
833 for f in files:
834 each_file = os.path.join(root, f)
835 fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
836}
837
838python split_and_strip_files () {
839 import stat, errno
840
841 dvar = d.getVar('PKGD', True)
842 pn = d.getVar('PN', True)
843
844 # We default to '.debug' style
845 if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory':
846 # Single debug-file-directory style debug info
847 debugappend = ".debug"
848 debugdir = ""
849 debuglibdir = "/usr/lib/debug"
850 debugsrcdir = "/usr/src/debug"
851 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-without-src':
852 # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
853 debugappend = ""
854 debugdir = "/.debug"
855 debuglibdir = ""
856 debugsrcdir = ""
857 else:
858 # Original OE-core, a.k.a. ".debug", style debug info
859 debugappend = ""
860 debugdir = "/.debug"
861 debuglibdir = ""
862 debugsrcdir = "/usr/src/debug"
863
864 sourcefile = d.expand("${WORKDIR}/debugsources.list")
865 bb.utils.remove(sourcefile)
866
867 os.chdir(dvar)
868
869 # Return type (bits):
870 # 0 - not elf
871 # 1 - ELF
872 # 2 - stripped
873 # 4 - executable
874 # 8 - shared library
875 # 16 - kernel module
876 def isELF(path):
877 type = 0
878 ret, result = oe.utils.getstatusoutput("file \"%s\"" % path.replace("\"", "\\\""))
879
880 if ret:
881 msg = "split_and_strip_files: 'file %s' failed" % path
882 package_qa_handle_error("split-strip", msg, d)
883 return type
884
885 # Not stripped
886 if "ELF" in result:
887 type |= 1
888 if "not stripped" not in result:
889 type |= 2
890 if "executable" in result:
891 type |= 4
892 if "shared" in result:
893 type |= 8
894 return type
895
896
897 #
898 # First lets figure out all of the files we may have to process ... do this only once!
899 #
900 elffiles = {}
901 symlinks = {}
902 kernmods = []
903 inodes = {}
904 libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir", True))
905 baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir", True))
906 if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
907 for root, dirs, files in cpath.walk(dvar):
908 for f in files:
909 file = os.path.join(root, f)
910 if file.endswith(".ko") and file.find("/lib/modules/") != -1:
911 kernmods.append(file)
912 continue
913
914 # Skip debug files
915 if debugappend and file.endswith(debugappend):
916 continue
917 if debugdir and debugdir in os.path.dirname(file[len(dvar):]):
918 continue
919
920 try:
921 ltarget = cpath.realpath(file, dvar, False)
922 s = cpath.lstat(ltarget)
923 except OSError as e:
924 (err, strerror) = e.args
925 if err != errno.ENOENT:
926 raise
927 # Skip broken symlinks
928 continue
929 if not s:
930 continue
931 # Check its an excutable
932 if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) or (s[stat.ST_MODE] & stat.S_IXOTH) \
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500933 or ((file.startswith(libdir) or file.startswith(baselibdir)) and (".so" in f or ".node" in f)):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500934 # If it's a symlink, and points to an ELF file, we capture the readlink target
935 if cpath.islink(file):
936 target = os.readlink(file)
937 if isELF(ltarget):
938 #bb.note("Sym: %s (%d)" % (ltarget, isELF(ltarget)))
939 symlinks[file] = target
940 continue
941
942 # It's a file (or hardlink), not a link
943 # ...but is it ELF, and is it already stripped?
944 elf_file = isELF(file)
945 if elf_file & 1:
946 if elf_file & 2:
947 if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn, True) or "").split():
948 bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
949 else:
950 msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
951 package_qa_handle_error("already-stripped", msg, d)
952 continue
953
954 # At this point we have an unstripped elf file. We need to:
955 # a) Make sure any file we strip is not hardlinked to anything else outside this tree
956 # b) Only strip any hardlinked file once (no races)
957 # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks
958
959 # Use a reference of device ID and inode number to indentify files
960 file_reference = "%d_%d" % (s.st_dev, s.st_ino)
961 if file_reference in inodes:
962 os.unlink(file)
963 os.link(inodes[file_reference][0], file)
964 inodes[file_reference].append(file)
965 else:
966 inodes[file_reference] = [file]
967 # break hardlink
968 bb.utils.copyfile(file, file)
969 elffiles[file] = elf_file
970 # Modified the file so clear the cache
971 cpath.updatecache(file)
972
973 #
974 # First lets process debug splitting
975 #
976 if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'):
977 for file in elffiles:
978 src = file[len(dvar):]
979 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
980 fpath = dvar + dest
981
982 # Split the file...
983 bb.utils.mkdirhier(os.path.dirname(fpath))
984 #bb.note("Split %s -> %s" % (file, fpath))
985 # Only store off the hard link reference if we successfully split!
986 splitdebuginfo(file, fpath, debugsrcdir, sourcefile, d)
987
988 # Hardlink our debug symbols to the other hardlink copies
989 for ref in inodes:
990 if len(inodes[ref]) == 1:
991 continue
992 for file in inodes[ref][1:]:
993 src = file[len(dvar):]
994 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
995 fpath = dvar + dest
996 target = inodes[ref][0][len(dvar):]
997 ftarget = dvar + debuglibdir + os.path.dirname(target) + debugdir + "/" + os.path.basename(target) + debugappend
998 bb.utils.mkdirhier(os.path.dirname(fpath))
999 #bb.note("Link %s -> %s" % (fpath, ftarget))
1000 os.link(ftarget, fpath)
1001
1002 # Create symlinks for all cases we were able to split symbols
1003 for file in symlinks:
1004 src = file[len(dvar):]
1005 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
1006 fpath = dvar + dest
1007 # Skip it if the target doesn't exist
1008 try:
1009 s = os.stat(fpath)
1010 except OSError as e:
1011 (err, strerror) = e.args
1012 if err != errno.ENOENT:
1013 raise
1014 continue
1015
1016 ltarget = symlinks[file]
1017 lpath = os.path.dirname(ltarget)
1018 lbase = os.path.basename(ltarget)
1019 ftarget = ""
1020 if lpath and lpath != ".":
1021 ftarget += lpath + debugdir + "/"
1022 ftarget += lbase + debugappend
1023 if lpath.startswith(".."):
1024 ftarget = os.path.join("..", ftarget)
1025 bb.utils.mkdirhier(os.path.dirname(fpath))
1026 #bb.note("Symlink %s -> %s" % (fpath, ftarget))
1027 os.symlink(ftarget, fpath)
1028
1029 # Process the debugsrcdir if requested...
1030 # This copies and places the referenced sources for later debugging...
1031 copydebugsources(debugsrcdir, d)
1032 #
1033 # End of debug splitting
1034 #
1035
1036 #
1037 # Now lets go back over things and strip them
1038 #
1039 if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
1040 strip = d.getVar("STRIP", True)
1041 sfiles = []
1042 for file in elffiles:
1043 elf_file = int(elffiles[file])
1044 #bb.note("Strip %s" % file)
1045 sfiles.append((file, elf_file, strip))
1046 for f in kernmods:
1047 sfiles.append((f, 16, strip))
1048
1049 oe.utils.multiprocess_exec(sfiles, oe.package.runstrip)
1050
1051 #
1052 # End of strip
1053 #
1054}
1055
1056python populate_packages () {
1057 import glob, re
1058
1059 workdir = d.getVar('WORKDIR', True)
1060 outdir = d.getVar('DEPLOY_DIR', True)
1061 dvar = d.getVar('PKGD', True)
1062 packages = d.getVar('PACKAGES', True)
1063 pn = d.getVar('PN', True)
1064
1065 bb.utils.mkdirhier(outdir)
1066 os.chdir(dvar)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001067
1068 autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG", True) or False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001069
1070 # Sanity check PACKAGES for duplicates
1071 # Sanity should be moved to sanity.bbclass once we have the infrastucture
1072 package_list = []
1073
1074 for pkg in packages.split():
1075 if pkg in package_list:
1076 msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
1077 package_qa_handle_error("packages-list", msg, d)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001078 elif autodebug and pkg.endswith("-dbg"):
1079 package_list.insert(0, pkg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001080 else:
1081 package_list.append(pkg)
1082 d.setVar('PACKAGES', ' '.join(package_list))
1083 pkgdest = d.getVar('PKGDEST', True)
1084
1085 seen = []
1086
1087 # os.mkdir masks the permissions with umask so we have to unset it first
1088 oldumask = os.umask(0)
1089
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001090 debug = []
1091 for root, dirs, files in cpath.walk(dvar):
1092 dir = root[len(dvar):]
1093 if not dir:
1094 dir = os.sep
1095 for f in (files + dirs):
1096 path = "." + os.path.join(dir, f)
1097 if "/.debug/" in path or path.endswith("/.debug"):
1098 debug.append(path)
1099
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001100 for pkg in package_list:
1101 root = os.path.join(pkgdest, pkg)
1102 bb.utils.mkdirhier(root)
1103
1104 filesvar = d.getVar('FILES_%s' % pkg, True) or ""
1105 if "//" in filesvar:
1106 msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
1107 package_qa_handle_error("files-invalid", msg, d)
1108 filesvar.replace("//", "/")
1109
1110 origfiles = filesvar.split()
1111 files = files_from_filevars(origfiles)
1112
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001113 if autodebug and pkg.endswith("-dbg"):
1114 files.extend(debug)
1115
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001116 for file in files:
1117 if (not cpath.islink(file)) and (not cpath.exists(file)):
1118 continue
1119 if file in seen:
1120 continue
1121 seen.append(file)
1122
1123 def mkdir(src, dest, p):
1124 src = os.path.join(src, p)
1125 dest = os.path.join(dest, p)
1126 fstat = cpath.stat(src)
1127 os.mkdir(dest, fstat.st_mode)
1128 os.chown(dest, fstat.st_uid, fstat.st_gid)
1129 if p not in seen:
1130 seen.append(p)
1131 cpath.updatecache(dest)
1132
1133 def mkdir_recurse(src, dest, paths):
1134 if cpath.exists(dest + '/' + paths):
1135 return
1136 while paths.startswith("./"):
1137 paths = paths[2:]
1138 p = "."
1139 for c in paths.split("/"):
1140 p = os.path.join(p, c)
1141 if not cpath.exists(os.path.join(dest, p)):
1142 mkdir(src, dest, p)
1143
1144 if cpath.isdir(file) and not cpath.islink(file):
1145 mkdir_recurse(dvar, root, file)
1146 continue
1147
1148 mkdir_recurse(dvar, root, os.path.dirname(file))
1149 fpath = os.path.join(root,file)
1150 if not cpath.islink(file):
1151 os.link(file, fpath)
1152 fstat = cpath.stat(file)
1153 os.chmod(fpath, fstat.st_mode)
1154 os.chown(fpath, fstat.st_uid, fstat.st_gid)
1155 continue
1156 ret = bb.utils.copyfile(file, fpath)
1157 if ret is False or ret == 0:
1158 raise bb.build.FuncFailed("File population failed")
1159
1160 os.umask(oldumask)
1161 os.chdir(workdir)
1162
1163 # Handle LICENSE_EXCLUSION
1164 package_list = []
1165 for pkg in packages.split():
1166 if d.getVar('LICENSE_EXCLUSION-' + pkg, True):
1167 msg = "%s has an incompatible license. Excluding from packaging." % pkg
1168 package_qa_handle_error("incompatible-license", msg, d)
1169 else:
1170 package_list.append(pkg)
1171 d.setVar('PACKAGES', ' '.join(package_list))
1172
1173 unshipped = []
1174 for root, dirs, files in cpath.walk(dvar):
1175 dir = root[len(dvar):]
1176 if not dir:
1177 dir = os.sep
1178 for f in (files + dirs):
1179 path = os.path.join(dir, f)
1180 if ('.' + path) not in seen:
1181 unshipped.append(path)
1182
1183 if unshipped != []:
1184 msg = pn + ": Files/directories were installed but not shipped in any package:"
1185 if "installed-vs-shipped" in (d.getVar('INSANE_SKIP_' + pn, True) or "").split():
1186 bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
1187 else:
1188 for f in unshipped:
1189 msg = msg + "\n " + f
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001190 msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n"
1191 msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001192 package_qa_handle_error("installed-vs-shipped", msg, d)
1193}
1194populate_packages[dirs] = "${D}"
1195
1196python package_fixsymlinks () {
1197 import errno
1198 pkgdest = d.getVar('PKGDEST', True)
1199 packages = d.getVar("PACKAGES", False).split()
1200
1201 dangling_links = {}
1202 pkg_files = {}
1203 for pkg in packages:
1204 dangling_links[pkg] = []
1205 pkg_files[pkg] = []
1206 inst_root = os.path.join(pkgdest, pkg)
1207 for path in pkgfiles[pkg]:
1208 rpath = path[len(inst_root):]
1209 pkg_files[pkg].append(rpath)
1210 rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
1211 if not cpath.lexists(rtarget):
1212 dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))
1213
1214 newrdepends = {}
1215 for pkg in dangling_links:
1216 for l in dangling_links[pkg]:
1217 found = False
1218 bb.debug(1, "%s contains dangling link %s" % (pkg, l))
1219 for p in packages:
1220 if l in pkg_files[p]:
1221 found = True
1222 bb.debug(1, "target found in %s" % p)
1223 if p == pkg:
1224 break
1225 if pkg not in newrdepends:
1226 newrdepends[pkg] = []
1227 newrdepends[pkg].append(p)
1228 break
1229 if found == False:
1230 bb.note("%s contains dangling symlink to %s" % (pkg, l))
1231
1232 for pkg in newrdepends:
1233 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "")
1234 for p in newrdepends[pkg]:
1235 if p not in rdepends:
1236 rdepends[p] = []
1237 d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1238}
1239
1240
1241python package_package_name_hook() {
1242 """
1243 A package_name_hook function can be used to rewrite the package names by
1244 changing PKG. For an example, see debian.bbclass.
1245 """
1246 pass
1247}
1248
1249EXPORT_FUNCTIONS package_name_hook
1250
1251
1252PKGDESTWORK = "${WORKDIR}/pkgdata"
1253
1254python emit_pkgdata() {
1255 from glob import glob
1256 import json
1257
1258 def write_if_exists(f, pkg, var):
1259 def encode(str):
1260 import codecs
1261 c = codecs.getencoder("string_escape")
1262 return c(str)[0]
1263
1264 val = d.getVar('%s_%s' % (var, pkg), True)
1265 if val:
1266 f.write('%s_%s: %s\n' % (var, pkg, encode(val)))
1267 return val
1268 val = d.getVar('%s' % (var), True)
1269 if val:
1270 f.write('%s: %s\n' % (var, encode(val)))
1271 return val
1272
1273 def write_extra_pkgs(variants, pn, packages, pkgdatadir):
1274 for variant in variants:
1275 with open("%s/%s-%s" % (pkgdatadir, variant, pn), 'w') as fd:
1276 fd.write("PACKAGES: %s\n" % ' '.join(
1277 map(lambda pkg: '%s-%s' % (variant, pkg), packages.split())))
1278
1279 def write_extra_runtime_pkgs(variants, packages, pkgdatadir):
1280 for variant in variants:
1281 for pkg in packages.split():
1282 ml_pkg = "%s-%s" % (variant, pkg)
1283 subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg)
1284 with open(subdata_file, 'w') as fd:
1285 fd.write("PKG_%s: %s" % (ml_pkg, pkg))
1286
1287 packages = d.getVar('PACKAGES', True)
1288 pkgdest = d.getVar('PKGDEST', True)
1289 pkgdatadir = d.getVar('PKGDESTWORK', True)
1290
1291 # Take shared lock since we're only reading, not writing
1292 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True)
1293
1294 data_file = pkgdatadir + d.expand("/${PN}" )
1295 f = open(data_file, 'w')
1296 f.write("PACKAGES: %s\n" % packages)
1297 f.close()
1298
1299 pn = d.getVar('PN', True)
1300 global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS', True) or "").split()
1301 variants = (d.getVar('MULTILIB_VARIANTS', True) or "").split()
1302
1303 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
1304 write_extra_pkgs(variants, pn, packages, pkgdatadir)
1305
1306 if (bb.data.inherits_class('allarch', d) and not bb.data.inherits_class('packagegroup', d)):
1307 write_extra_pkgs(global_variants, pn, packages, pkgdatadir)
1308
1309 workdir = d.getVar('WORKDIR', True)
1310
1311 for pkg in packages.split():
1312 pkgval = d.getVar('PKG_%s' % pkg, True)
1313 if pkgval is None:
1314 pkgval = pkg
1315 d.setVar('PKG_%s' % pkg, pkg)
1316
1317 pkgdestpkg = os.path.join(pkgdest, pkg)
1318 files = {}
1319 total_size = 0
1320 for f in pkgfiles[pkg]:
1321 relpth = os.path.relpath(f, pkgdestpkg)
1322 fstat = os.lstat(f)
1323 total_size += fstat.st_size
1324 files[os.sep + relpth] = fstat.st_size
1325 d.setVar('FILES_INFO', json.dumps(files))
1326
1327 subdata_file = pkgdatadir + "/runtime/%s" % pkg
1328 sf = open(subdata_file, 'w')
1329 write_if_exists(sf, pkg, 'PN')
1330 write_if_exists(sf, pkg, 'PE')
1331 write_if_exists(sf, pkg, 'PV')
1332 write_if_exists(sf, pkg, 'PR')
1333 write_if_exists(sf, pkg, 'PKGE')
1334 write_if_exists(sf, pkg, 'PKGV')
1335 write_if_exists(sf, pkg, 'PKGR')
1336 write_if_exists(sf, pkg, 'LICENSE')
1337 write_if_exists(sf, pkg, 'DESCRIPTION')
1338 write_if_exists(sf, pkg, 'SUMMARY')
1339 write_if_exists(sf, pkg, 'RDEPENDS')
1340 rprov = write_if_exists(sf, pkg, 'RPROVIDES')
1341 write_if_exists(sf, pkg, 'RRECOMMENDS')
1342 write_if_exists(sf, pkg, 'RSUGGESTS')
1343 write_if_exists(sf, pkg, 'RREPLACES')
1344 write_if_exists(sf, pkg, 'RCONFLICTS')
1345 write_if_exists(sf, pkg, 'SECTION')
1346 write_if_exists(sf, pkg, 'PKG')
1347 write_if_exists(sf, pkg, 'ALLOW_EMPTY')
1348 write_if_exists(sf, pkg, 'FILES')
1349 write_if_exists(sf, pkg, 'pkg_postinst')
1350 write_if_exists(sf, pkg, 'pkg_postrm')
1351 write_if_exists(sf, pkg, 'pkg_preinst')
1352 write_if_exists(sf, pkg, 'pkg_prerm')
1353 write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
1354 write_if_exists(sf, pkg, 'FILES_INFO')
1355 for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg, True) or "").split():
1356 write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile)
1357
1358 write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
1359 for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg, True) or "").split():
1360 write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile)
1361
1362 sf.write('%s_%s: %d\n' % ('PKGSIZE', pkg, total_size))
1363 sf.close()
1364
1365 # Symlinks needed for rprovides lookup
1366 if rprov:
1367 for p in rprov.strip().split():
1368 subdata_sym = pkgdatadir + "/runtime-rprovides/%s/%s" % (p, pkg)
1369 bb.utils.mkdirhier(os.path.dirname(subdata_sym))
1370 oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True)
1371
1372 allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg, True)
1373 if not allow_empty:
1374 allow_empty = d.getVar('ALLOW_EMPTY', True)
1375 root = "%s/%s" % (pkgdest, pkg)
1376 os.chdir(root)
1377 g = glob('*')
1378 if g or allow_empty == "1":
1379 # Symlinks needed for reverse lookups (from the final package name)
1380 subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval
1381 oe.path.symlink("../runtime/%s" % pkg, subdata_sym, True)
1382
1383 packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg
1384 open(packagedfile, 'w').close()
1385
1386 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
1387 write_extra_runtime_pkgs(variants, packages, pkgdatadir)
1388
1389 if bb.data.inherits_class('allarch', d) and not bb.data.inherits_class('packagegroup', d):
1390 write_extra_runtime_pkgs(global_variants, packages, pkgdatadir)
1391
1392 bb.utils.unlockfile(lf)
1393}
1394emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime ${PKGDESTWORK}/runtime-reverse ${PKGDESTWORK}/runtime-rprovides"
1395
1396ldconfig_postinst_fragment() {
1397if [ x"$D" = "x" ]; then
1398 if [ -x /sbin/ldconfig ]; then /sbin/ldconfig ; fi
1399fi
1400}
1401
1402RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/bin/rpmdeps-oecore --macros ${STAGING_LIBDIR_NATIVE}/rpm/macros --define '_rpmfc_magic_path ${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc' --rpmpopt ${STAGING_LIBDIR_NATIVE}/rpm/rpmpopt"
1403
1404# Collect perfile run-time dependency metadata
1405# Output:
1406# FILERPROVIDESFLIST_pkg - list of all files w/ deps
1407# FILERPROVIDES_filepath_pkg - per file dep
1408#
1409# FILERDEPENDSFLIST_pkg - list of all files w/ deps
1410# FILERDEPENDS_filepath_pkg - per file dep
1411
1412python package_do_filedeps() {
1413 if d.getVar('SKIP_FILEDEPS', True) == '1':
1414 return
1415
1416 pkgdest = d.getVar('PKGDEST', True)
1417 packages = d.getVar('PACKAGES', True)
1418 rpmdeps = d.getVar('RPMDEPS', True)
1419
1420 def chunks(files, n):
1421 return [files[i:i+n] for i in range(0, len(files), n)]
1422
1423 pkglist = []
1424 for pkg in packages.split():
1425 if d.getVar('SKIP_FILEDEPS_' + pkg, True) == '1':
1426 continue
1427 if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-'):
1428 continue
1429 for files in chunks(pkgfiles[pkg], 100):
1430 pkglist.append((pkg, files, rpmdeps, pkgdest))
1431
1432 processed = oe.utils.multiprocess_exec( pkglist, oe.package.filedeprunner)
1433
1434 provides_files = {}
1435 requires_files = {}
1436
1437 for result in processed:
1438 (pkg, provides, requires) = result
1439
1440 if pkg not in provides_files:
1441 provides_files[pkg] = []
1442 if pkg not in requires_files:
1443 requires_files[pkg] = []
1444
1445 for file in provides:
1446 provides_files[pkg].append(file)
1447 key = "FILERPROVIDES_" + file + "_" + pkg
1448 d.setVar(key, " ".join(provides[file]))
1449
1450 for file in requires:
1451 requires_files[pkg].append(file)
1452 key = "FILERDEPENDS_" + file + "_" + pkg
1453 d.setVar(key, " ".join(requires[file]))
1454
1455 for pkg in requires_files:
1456 d.setVar("FILERDEPENDSFLIST_" + pkg, " ".join(requires_files[pkg]))
1457 for pkg in provides_files:
1458 d.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files[pkg]))
1459}
1460
1461SHLIBSDIRS = "${PKGDATA_DIR}/${MLPREFIX}shlibs2"
1462SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs2"
1463
1464python package_do_shlibs() {
1465 import re, pipes
1466 import subprocess as sub
1467
1468 exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', 0)
1469 if exclude_shlibs:
1470 bb.note("not generating shlibs")
1471 return
1472
1473 lib_re = re.compile("^.*\.so")
1474 libdir_re = re.compile(".*/%s$" % d.getVar('baselib', True))
1475
1476 packages = d.getVar('PACKAGES', True)
1477 targetos = d.getVar('TARGET_OS', True)
1478
1479 workdir = d.getVar('WORKDIR', True)
1480
1481 ver = d.getVar('PKGV', True)
1482 if not ver:
1483 msg = "PKGV not defined"
1484 package_qa_handle_error("pkgv-undefined", msg, d)
1485 return
1486
1487 pkgdest = d.getVar('PKGDEST', True)
1488
1489 shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
1490
1491 # Take shared lock since we're only reading, not writing
1492 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
1493
1494 def linux_so(file, needed, sonames, renames, pkgver):
1495 needs_ldconfig = False
1496 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1497 cmd = d.getVar('OBJDUMP', True) + " -p " + pipes.quote(file) + " 2>/dev/null"
1498 fd = os.popen(cmd)
1499 lines = fd.readlines()
1500 fd.close()
1501 rpath = []
1502 for l in lines:
1503 m = re.match("\s+RPATH\s+([^\s]*)", l)
1504 if m:
1505 rpaths = m.group(1).replace("$ORIGIN", ldir).split(":")
1506 rpath = map(os.path.normpath, rpaths)
1507 for l in lines:
1508 m = re.match("\s+NEEDED\s+([^\s]*)", l)
1509 if m:
1510 dep = m.group(1)
1511 if dep not in needed[pkg]:
1512 needed[pkg].append((dep, file, rpath))
1513 m = re.match("\s+SONAME\s+([^\s]*)", l)
1514 if m:
1515 this_soname = m.group(1)
1516 prov = (this_soname, ldir, pkgver)
1517 if not prov in sonames:
1518 # if library is private (only used by package) then do not build shlib for it
1519 if not private_libs or this_soname not in private_libs:
1520 sonames.append(prov)
1521 if libdir_re.match(os.path.dirname(file)):
1522 needs_ldconfig = True
1523 if snap_symlinks and (os.path.basename(file) != this_soname):
1524 renames.append((file, os.path.join(os.path.dirname(file), this_soname)))
1525 return needs_ldconfig
1526
1527 def darwin_so(file, needed, sonames, renames, pkgver):
1528 if not os.path.exists(file):
1529 return
1530 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1531
1532 def get_combinations(base):
1533 #
1534 # Given a base library name, find all combinations of this split by "." and "-"
1535 #
1536 combos = []
1537 options = base.split(".")
1538 for i in range(1, len(options) + 1):
1539 combos.append(".".join(options[0:i]))
1540 options = base.split("-")
1541 for i in range(1, len(options) + 1):
1542 combos.append("-".join(options[0:i]))
1543 return combos
1544
1545 if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg'):
1546 # Drop suffix
1547 name = os.path.basename(file).rsplit(".",1)[0]
1548 # Find all combinations
1549 combos = get_combinations(name)
1550 for combo in combos:
1551 if not combo in sonames:
1552 prov = (combo, ldir, pkgver)
1553 sonames.append(prov)
1554 if file.endswith('.dylib') or file.endswith('.so'):
1555 rpath = []
1556 p = sub.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file],stdout=sub.PIPE,stderr=sub.PIPE)
1557 err, out = p.communicate()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001558 # If returned successfully, process stderr for results
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001559 if p.returncode == 0:
1560 for l in err.split("\n"):
1561 l = l.strip()
1562 if l.startswith('path '):
1563 rpath.append(l.split()[1])
1564
1565 p = sub.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file],stdout=sub.PIPE,stderr=sub.PIPE)
1566 err, out = p.communicate()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001567 # If returned successfully, process stderr for results
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001568 if p.returncode == 0:
1569 for l in err.split("\n"):
1570 l = l.strip()
1571 if not l or l.endswith(":"):
1572 continue
1573 if "is not an object file" in l:
1574 continue
1575 name = os.path.basename(l.split()[0]).rsplit(".", 1)[0]
1576 if name and name not in needed[pkg]:
1577 needed[pkg].append((name, file, []))
1578
1579 if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS', True) == "1":
1580 snap_symlinks = True
1581 else:
1582 snap_symlinks = False
1583
1584 if (d.getVar('USE_LDCONFIG', True) or "1") == "1":
1585 use_ldconfig = True
1586 else:
1587 use_ldconfig = False
1588
1589 needed = {}
1590 shlib_provider = oe.package.read_shlib_providers(d)
1591
1592 for pkg in packages.split():
1593 private_libs = d.getVar('PRIVATE_LIBS_' + pkg, True) or d.getVar('PRIVATE_LIBS', True) or ""
1594 private_libs = private_libs.split()
1595 needs_ldconfig = False
1596 bb.debug(2, "calculating shlib provides for %s" % pkg)
1597
1598 pkgver = d.getVar('PKGV_' + pkg, True)
1599 if not pkgver:
1600 pkgver = d.getVar('PV_' + pkg, True)
1601 if not pkgver:
1602 pkgver = ver
1603
1604 needed[pkg] = []
1605 sonames = list()
1606 renames = list()
1607 for file in pkgfiles[pkg]:
1608 soname = None
1609 if cpath.islink(file):
1610 continue
1611 if targetos == "darwin" or targetos == "darwin8":
1612 darwin_so(file, needed, sonames, renames, pkgver)
1613 elif os.access(file, os.X_OK) or lib_re.match(file):
1614 ldconfig = linux_so(file, needed, sonames, renames, pkgver)
1615 needs_ldconfig = needs_ldconfig or ldconfig
1616 for (old, new) in renames:
1617 bb.note("Renaming %s to %s" % (old, new))
1618 os.rename(old, new)
1619 pkgfiles[pkg].remove(old)
1620
1621 shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
1622 if len(sonames):
1623 fd = open(shlibs_file, 'w')
1624 for s in sonames:
1625 if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]:
1626 (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]]
1627 if old_pkg != pkg:
1628 bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver))
1629 bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0]))
1630 fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n')
1631 if s[0] not in shlib_provider:
1632 shlib_provider[s[0]] = {}
1633 shlib_provider[s[0]][s[1]] = (pkg, pkgver)
1634 fd.close()
1635 if needs_ldconfig and use_ldconfig:
1636 bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
1637 postinst = d.getVar('pkg_postinst_%s' % pkg, True)
1638 if not postinst:
1639 postinst = '#!/bin/sh\n'
1640 postinst += d.getVar('ldconfig_postinst_fragment', True)
1641 d.setVar('pkg_postinst_%s' % pkg, postinst)
1642 bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
1643
1644 bb.utils.unlockfile(lf)
1645
1646 assumed_libs = d.getVar('ASSUME_SHLIBS', True)
1647 if assumed_libs:
1648 libdir = d.getVar("libdir", True)
1649 for e in assumed_libs.split():
1650 l, dep_pkg = e.split(":")
1651 lib_ver = None
1652 dep_pkg = dep_pkg.rsplit("_", 1)
1653 if len(dep_pkg) == 2:
1654 lib_ver = dep_pkg[1]
1655 dep_pkg = dep_pkg[0]
1656 if l not in shlib_provider:
1657 shlib_provider[l] = {}
1658 shlib_provider[l][libdir] = (dep_pkg, lib_ver)
1659
1660 libsearchpath = [d.getVar('libdir', True), d.getVar('base_libdir', True)]
1661
1662 for pkg in packages.split():
1663 bb.debug(2, "calculating shlib requirements for %s" % pkg)
1664
1665 deps = list()
1666 for n in needed[pkg]:
1667 # if n is in private libraries, don't try to search provider for it
1668 # this could cause problem in case some abc.bb provides private
1669 # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1
1670 # but skipping it is still better alternative than providing own
1671 # version and then adding runtime dependency for the same system library
1672 if private_libs and n[0] in private_libs:
1673 bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0]))
1674 continue
1675 if n[0] in shlib_provider.keys():
1676 shlib_provider_path = list()
1677 for k in shlib_provider[n[0]].keys():
1678 shlib_provider_path.append(k)
1679 match = None
1680 for p in n[2] + shlib_provider_path + libsearchpath:
1681 if p in shlib_provider[n[0]]:
1682 match = p
1683 break
1684 if match:
1685 (dep_pkg, ver_needed) = shlib_provider[n[0]][match]
1686
1687 bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1]))
1688
1689 if dep_pkg == pkg:
1690 continue
1691
1692 if ver_needed:
1693 dep = "%s (>= %s)" % (dep_pkg, ver_needed)
1694 else:
1695 dep = dep_pkg
1696 if not dep in deps:
1697 deps.append(dep)
1698 continue
1699 bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1]))
1700
1701 deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
1702 if os.path.exists(deps_file):
1703 os.remove(deps_file)
1704 if len(deps):
1705 fd = open(deps_file, 'w')
1706 for dep in deps:
1707 fd.write(dep + '\n')
1708 fd.close()
1709}
1710
1711python package_do_pkgconfig () {
1712 import re
1713
1714 packages = d.getVar('PACKAGES', True)
1715 workdir = d.getVar('WORKDIR', True)
1716 pkgdest = d.getVar('PKGDEST', True)
1717
1718 shlibs_dirs = d.getVar('SHLIBSDIRS', True).split()
1719 shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
1720
1721 pc_re = re.compile('(.*)\.pc$')
1722 var_re = re.compile('(.*)=(.*)')
1723 field_re = re.compile('(.*): (.*)')
1724
1725 pkgconfig_provided = {}
1726 pkgconfig_needed = {}
1727 for pkg in packages.split():
1728 pkgconfig_provided[pkg] = []
1729 pkgconfig_needed[pkg] = []
1730 for file in pkgfiles[pkg]:
1731 m = pc_re.match(file)
1732 if m:
1733 pd = bb.data.init()
1734 name = m.group(1)
1735 pkgconfig_provided[pkg].append(name)
1736 if not os.access(file, os.R_OK):
1737 continue
1738 f = open(file, 'r')
1739 lines = f.readlines()
1740 f.close()
1741 for l in lines:
1742 m = var_re.match(l)
1743 if m:
1744 name = m.group(1)
1745 val = m.group(2)
1746 pd.setVar(name, pd.expand(val))
1747 continue
1748 m = field_re.match(l)
1749 if m:
1750 hdr = m.group(1)
1751 exp = bb.data.expand(m.group(2), pd)
1752 if hdr == 'Requires':
1753 pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
1754
1755 # Take shared lock since we're only reading, not writing
1756 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
1757
1758 for pkg in packages.split():
1759 pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
1760 if pkgconfig_provided[pkg] != []:
1761 f = open(pkgs_file, 'w')
1762 for p in pkgconfig_provided[pkg]:
1763 f.write('%s\n' % p)
1764 f.close()
1765
1766 # Go from least to most specific since the last one found wins
1767 for dir in reversed(shlibs_dirs):
1768 if not os.path.exists(dir):
1769 continue
1770 for file in os.listdir(dir):
1771 m = re.match('^(.*)\.pclist$', file)
1772 if m:
1773 pkg = m.group(1)
1774 fd = open(os.path.join(dir, file))
1775 lines = fd.readlines()
1776 fd.close()
1777 pkgconfig_provided[pkg] = []
1778 for l in lines:
1779 pkgconfig_provided[pkg].append(l.rstrip())
1780
1781 for pkg in packages.split():
1782 deps = []
1783 for n in pkgconfig_needed[pkg]:
1784 found = False
1785 for k in pkgconfig_provided.keys():
1786 if n in pkgconfig_provided[k]:
1787 if k != pkg and not (k in deps):
1788 deps.append(k)
1789 found = True
1790 if found == False:
1791 bb.note("couldn't find pkgconfig module '%s' in any package" % n)
1792 deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
1793 if len(deps):
1794 fd = open(deps_file, 'w')
1795 for dep in deps:
1796 fd.write(dep + '\n')
1797 fd.close()
1798
1799 bb.utils.unlockfile(lf)
1800}
1801
1802def read_libdep_files(d):
1803 pkglibdeps = {}
1804 packages = d.getVar('PACKAGES', True).split()
1805 for pkg in packages:
1806 pkglibdeps[pkg] = {}
1807 for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
1808 depsfile = d.expand("${PKGDEST}/" + pkg + extension)
1809 if os.access(depsfile, os.R_OK):
1810 fd = open(depsfile)
1811 lines = fd.readlines()
1812 fd.close()
1813 for l in lines:
1814 l.rstrip()
1815 deps = bb.utils.explode_dep_versions2(l)
1816 for dep in deps:
1817 if not dep in pkglibdeps[pkg]:
1818 pkglibdeps[pkg][dep] = deps[dep]
1819 return pkglibdeps
1820
1821python read_shlibdeps () {
1822 pkglibdeps = read_libdep_files(d)
1823
1824 packages = d.getVar('PACKAGES', True).split()
1825 for pkg in packages:
1826 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "")
1827 for dep in pkglibdeps[pkg]:
1828 # Add the dep if it's not already there, or if no comparison is set
1829 if dep not in rdepends:
1830 rdepends[dep] = []
1831 for v in pkglibdeps[pkg][dep]:
1832 if v not in rdepends[dep]:
1833 rdepends[dep].append(v)
1834 d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1835}
1836
1837python package_depchains() {
1838 """
1839 For a given set of prefix and postfix modifiers, make those packages
1840 RRECOMMENDS on the corresponding packages for its RDEPENDS.
1841
1842 Example: If package A depends upon package B, and A's .bb emits an
1843 A-dev package, this would make A-dev Recommends: B-dev.
1844
1845 If only one of a given suffix is specified, it will take the RRECOMMENDS
1846 based on the RDEPENDS of *all* other packages. If more than one of a given
1847 suffix is specified, its will only use the RDEPENDS of the single parent
1848 package.
1849 """
1850
1851 packages = d.getVar('PACKAGES', True)
1852 postfixes = (d.getVar('DEPCHAIN_POST', True) or '').split()
1853 prefixes = (d.getVar('DEPCHAIN_PRE', True) or '').split()
1854
1855 def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
1856
1857 #bb.note('depends for %s is %s' % (base, depends))
1858 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg, True) or "")
1859
1860 for depend in depends:
1861 if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
1862 #bb.note("Skipping %s" % depend)
1863 continue
1864 if depend.endswith('-dev'):
1865 depend = depend[:-4]
1866 if depend.endswith('-dbg'):
1867 depend = depend[:-4]
1868 pkgname = getname(depend, suffix)
1869 #bb.note("Adding %s for %s" % (pkgname, depend))
1870 if pkgname not in rreclist and pkgname != pkg:
1871 rreclist[pkgname] = []
1872
1873 #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
1874 d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
1875
1876 def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
1877
1878 #bb.note('rdepends for %s is %s' % (base, rdepends))
1879 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg, True) or "")
1880
1881 for depend in rdepends:
1882 if depend.find('virtual-locale-') != -1:
1883 #bb.note("Skipping %s" % depend)
1884 continue
1885 if depend.endswith('-dev'):
1886 depend = depend[:-4]
1887 if depend.endswith('-dbg'):
1888 depend = depend[:-4]
1889 pkgname = getname(depend, suffix)
1890 #bb.note("Adding %s for %s" % (pkgname, depend))
1891 if pkgname not in rreclist and pkgname != pkg:
1892 rreclist[pkgname] = []
1893
1894 #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
1895 d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
1896
1897 def add_dep(list, dep):
1898 if dep not in list:
1899 list.append(dep)
1900
1901 depends = []
1902 for dep in bb.utils.explode_deps(d.getVar('DEPENDS', True) or ""):
1903 add_dep(depends, dep)
1904
1905 rdepends = []
1906 for pkg in packages.split():
1907 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg, True) or ""):
1908 add_dep(rdepends, dep)
1909
1910 #bb.note('rdepends is %s' % rdepends)
1911
1912 def post_getname(name, suffix):
1913 return '%s%s' % (name, suffix)
1914 def pre_getname(name, suffix):
1915 return '%s%s' % (suffix, name)
1916
1917 pkgs = {}
1918 for pkg in packages.split():
1919 for postfix in postfixes:
1920 if pkg.endswith(postfix):
1921 if not postfix in pkgs:
1922 pkgs[postfix] = {}
1923 pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
1924
1925 for prefix in prefixes:
1926 if pkg.startswith(prefix):
1927 if not prefix in pkgs:
1928 pkgs[prefix] = {}
1929 pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
1930
1931 if "-dbg" in pkgs:
1932 pkglibdeps = read_libdep_files(d)
1933 pkglibdeplist = []
1934 for pkg in pkglibdeps:
1935 for k in pkglibdeps[pkg]:
1936 add_dep(pkglibdeplist, k)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001937 dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS', True) == '1') or (bb.data.inherits_class('packagegroup', d)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001938
1939 for suffix in pkgs:
1940 for pkg in pkgs[suffix]:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001941 if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs', True):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001942 continue
1943 (base, func) = pkgs[suffix][pkg]
1944 if suffix == "-dev":
1945 pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
1946 elif suffix == "-dbg":
1947 if not dbgdefaultdeps:
1948 pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
1949 continue
1950 if len(pkgs[suffix]) == 1:
1951 pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
1952 else:
1953 rdeps = []
1954 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base, True) or ""):
1955 add_dep(rdeps, dep)
1956 pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
1957}
1958
1959# Since bitbake can't determine which variables are accessed during package
1960# iteration, we need to list them here:
1961PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR USERADD_PARAM GROUPADD_PARAM CONFFILES SYSTEMD_SERVICE LICENSE SECTION pkg_preinst pkg_prerm RREPLACES GROUPMEMS_PARAM SYSTEMD_AUTO_ENABLE"
1962
1963def gen_packagevar(d):
1964 ret = []
1965 pkgs = (d.getVar("PACKAGES", True) or "").split()
1966 vars = (d.getVar("PACKAGEVARS", True) or "").split()
1967 for p in pkgs:
1968 for v in vars:
1969 ret.append(v + "_" + p)
1970
1971 # Ensure that changes to INCOMPATIBLE_LICENSE re-run do_package for
1972 # affected recipes.
1973 ret.append('LICENSE_EXCLUSION-%s' % p)
1974 return " ".join(ret)
1975
1976PACKAGE_PREPROCESS_FUNCS ?= ""
1977# Functions for setting up PKGD
1978PACKAGEBUILDPKGD ?= " \
1979 perform_packagecopy \
1980 ${PACKAGE_PREPROCESS_FUNCS} \
1981 split_and_strip_files \
1982 fixup_perms \
1983 "
1984# Functions which split PKGD up into separate packages
1985PACKAGESPLITFUNCS ?= " \
1986 package_do_split_locales \
1987 populate_packages"
1988# Functions which process metadata based on split packages
1989PACKAGEFUNCS += " \
1990 package_fixsymlinks \
1991 package_name_hook \
1992 package_do_filedeps \
1993 package_do_shlibs \
1994 package_do_pkgconfig \
1995 read_shlibdeps \
1996 package_depchains \
1997 emit_pkgdata"
1998
1999python do_package () {
2000 # Change the following version to cause sstate to invalidate the package
2001 # cache. This is useful if an item this class depends on changes in a
2002 # way that the output of this class changes. rpmdeps is a good example
2003 # as any change to rpmdeps requires this to be rerun.
2004 # PACKAGE_BBCLASS_VERSION = "1"
2005
2006 # Init cachedpath
2007 global cpath
2008 cpath = oe.cachedpath.CachedPath()
2009
2010 ###########################################################################
2011 # Sanity test the setup
2012 ###########################################################################
2013
2014 packages = (d.getVar('PACKAGES', True) or "").split()
2015 if len(packages) < 1:
2016 bb.debug(1, "No packages to build, skipping do_package")
2017 return
2018
2019 workdir = d.getVar('WORKDIR', True)
2020 outdir = d.getVar('DEPLOY_DIR', True)
2021 dest = d.getVar('D', True)
2022 dvar = d.getVar('PKGD', True)
2023 pn = d.getVar('PN', True)
2024
2025 if not workdir or not outdir or not dest or not dvar or not pn:
2026 msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package"
2027 package_qa_handle_error("var-undefined", msg, d)
2028 return
2029
2030 bb.build.exec_func("package_get_auto_pr", d)
2031
2032 ###########################################################################
2033 # Optimisations
2034 ###########################################################################
2035
2036 # Continually expanding complex expressions is inefficient, particularly
2037 # when we write to the datastore and invalidate the expansion cache. This
2038 # code pre-expands some frequently used variables
2039
2040 def expandVar(x, d):
2041 d.setVar(x, d.getVar(x, True))
2042
2043 for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO':
2044 expandVar(x, d)
2045
2046 ###########################################################################
2047 # Setup PKGD (from D)
2048 ###########################################################################
2049
2050 for f in (d.getVar('PACKAGEBUILDPKGD', True) or '').split():
2051 bb.build.exec_func(f, d)
2052
2053 ###########################################################################
2054 # Split up PKGD into PKGDEST
2055 ###########################################################################
2056
2057 cpath = oe.cachedpath.CachedPath()
2058
2059 for f in (d.getVar('PACKAGESPLITFUNCS', True) or '').split():
2060 bb.build.exec_func(f, d)
2061
2062 ###########################################################################
2063 # Process PKGDEST
2064 ###########################################################################
2065
2066 # Build global list of files in each split package
2067 global pkgfiles
2068 pkgfiles = {}
2069 packages = d.getVar('PACKAGES', True).split()
2070 pkgdest = d.getVar('PKGDEST', True)
2071 for pkg in packages:
2072 pkgfiles[pkg] = []
2073 for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg):
2074 for file in files:
2075 pkgfiles[pkg].append(walkroot + os.sep + file)
2076
2077 for f in (d.getVar('PACKAGEFUNCS', True) or '').split():
2078 bb.build.exec_func(f, d)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002079
2080 qa_sane = d.getVar("QA_SANE", True)
2081 if not qa_sane:
2082 bb.fatal("Fatal QA errors found, failing task.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002083}
2084
2085do_package[dirs] = "${SHLIBSWORKDIR} ${PKGDESTWORK} ${D}"
2086do_package[vardeps] += "${PACKAGEBUILDPKGD} ${PACKAGESPLITFUNCS} ${PACKAGEFUNCS} ${@gen_packagevar(d)}"
2087addtask package after do_install
2088
2089PACKAGELOCK = "${STAGING_DIR}/package-output.lock"
2090SSTATETASKS += "do_package"
2091do_package[cleandirs] = "${PKGDEST} ${PKGDESTWORK}"
2092do_package[sstate-plaindirs] = "${PKGD} ${PKGDEST} ${PKGDESTWORK}"
2093do_package[sstate-lockfile-shared] = "${PACKAGELOCK}"
2094do_package_setscene[dirs] = "${STAGING_DIR}"
2095
2096python do_package_setscene () {
2097 sstate_setscene(d)
2098}
2099addtask do_package_setscene
2100
2101do_packagedata () {
2102 :
2103}
2104
2105addtask packagedata before do_build after do_package
2106
2107SSTATETASKS += "do_packagedata"
2108do_packagedata[sstate-inputdirs] = "${PKGDESTWORK}"
2109do_packagedata[sstate-outputdirs] = "${PKGDATA_DIR}"
2110do_packagedata[sstate-lockfile-shared] = "${PACKAGELOCK}"
2111do_packagedata[stamp-extra-info] = "${MACHINE}"
2112
2113python do_packagedata_setscene () {
2114 sstate_setscene(d)
2115}
2116addtask do_packagedata_setscene
2117
2118#
2119# Helper functions for the package writing classes
2120#
2121
2122def mapping_rename_hook(d):
2123 """
2124 Rewrite variables to account for package renaming in things
2125 like debian.bbclass or manual PKG variable name changes
2126 """
2127 pkg = d.getVar("PKG", True)
2128 runtime_mapping_rename("RDEPENDS", pkg, d)
2129 runtime_mapping_rename("RRECOMMENDS", pkg, d)
2130 runtime_mapping_rename("RSUGGESTS", pkg, d)
2131