blob: a6f0a7a63d6399689b17ef2c220a139d3483ed26 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001#
2# Packaging process
3#
4# Executive summary: This class iterates over the functions listed in PACKAGEFUNCS
5# Taking D and splitting it up into the packages listed in PACKAGES, placing the
6# resulting output in PKGDEST.
7#
8# There are the following default steps but PACKAGEFUNCS can be extended:
9#
10# a) package_get_auto_pr - get PRAUTO from remote PR service
11#
12# b) perform_packagecopy - Copy D into PKGD
13#
14# c) package_do_split_locales - Split out the locale files, updates FILES and PACKAGES
15#
16# d) split_and_strip_files - split the files into runtime and debug and strip them.
17# Debug files include debug info split, and associated sources that end up in -dbg packages
18#
19# e) fixup_perms - Fix up permissions in the package before we split it.
20#
21# f) populate_packages - Split the files in PKGD into separate packages in PKGDEST/<pkgname>
22# Also triggers the binary stripping code to put files in -dbg packages.
23#
24# g) package_do_filedeps - Collect perfile run-time dependency metadata
25# The data is stores in FILER{PROVIDES,DEPENDS}_file_pkg variables with
26# a list of affected files in FILER{PROVIDES,DEPENDS}FLIST_pkg
27#
28# h) package_do_shlibs - Look at the shared libraries generated and autotmatically add any
29# depenedencies found. Also stores the package name so anyone else using this library
30# knows which package to depend on.
31#
32# i) package_do_pkgconfig - Keep track of which packages need and provide which .pc files
33#
34# j) read_shlibdeps - Reads the stored shlibs information into the metadata
35#
36# k) package_depchains - Adds automatic dependencies to -dbg and -dev packages
37#
38# l) emit_pkgdata - saves the packaging data into PKGDATA_DIR for use in later
39# packaging steps
40
41inherit packagedata
Patrick Williamsc124f4f2015-09-15 14:41:29 -050042inherit chrpath
43
44# Need the package_qa_handle_error() in insane.bbclass
45inherit insane
46
47PKGD = "${WORKDIR}/package"
48PKGDEST = "${WORKDIR}/packages-split"
49
50LOCALE_SECTION ?= ''
51
52ALL_MULTILIB_PACKAGE_ARCHS = "${@all_multilib_tune_values(d, 'PACKAGE_ARCHS')}"
53
54# rpm is used for the per-file dependency identification
55PACKAGE_DEPENDS += "rpm-native"
56
57def legitimize_package_name(s):
58 """
59 Make sure package names are legitimate strings
60 """
61 import re
62
63 def fixutf(m):
64 cp = m.group(1)
65 if cp:
Patrick Williamsc0f7c042017-02-23 20:41:17 -060066 return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape')
Patrick Williamsc124f4f2015-09-15 14:41:29 -050067
68 # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
69 s = re.sub('<U([0-9A-Fa-f]{1,4})>', fixutf, s)
70
71 # Remaining package name validity fixes
72 return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
73
74def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False, summary=None):
75 """
76 Used in .bb files to split up dynamically generated subpackages of a
77 given package, usually plugins or modules.
78
79 Arguments:
80 root -- the path in which to search
81 file_regex -- regular expression to match searched files. Use
82 parentheses () to mark the part of this expression
83 that should be used to derive the module name (to be
84 substituted where %s is used in other function
85 arguments as noted below)
86 output_pattern -- pattern to use for the package names. Must include %s.
87 description -- description to set for each package. Must include %s.
88 postinst -- postinstall script to use for all packages (as a
89 string)
90 recursive -- True to perform a recursive search - default False
91 hook -- a hook function to be called for every match. The
92 function will be called with the following arguments
93 (in the order listed):
94 f: full path to the file/directory match
95 pkg: the package name
96 file_regex: as above
97 output_pattern: as above
98 modulename: the module name derived using file_regex
99 extra_depends -- extra runtime dependencies (RDEPENDS) to be set for
100 all packages. The default value of None causes a
101 dependency on the main package (${PN}) - if you do
102 not want this, pass '' for this parameter.
103 aux_files_pattern -- extra item(s) to be added to FILES for each
104 package. Can be a single string item or a list of
105 strings for multiple items. Must include %s.
106 postrm -- postrm script to use for all packages (as a string)
107 allow_dirs -- True allow directories to be matched - default False
108 prepend -- if True, prepend created packages to PACKAGES instead
109 of the default False which appends them
110 match_path -- match file_regex on the whole relative path to the
111 root rather than just the file name
112 aux_files_pattern_verbatim -- extra item(s) to be added to FILES for
113 each package, using the actual derived module name
114 rather than converting it to something legal for a
115 package name. Can be a single string item or a list
116 of strings for multiple items. Must include %s.
117 allow_links -- True to allow symlinks to be matched - default False
118 summary -- Summary to set for each package. Must include %s;
119 defaults to description if not set.
120
121 """
122
123 dvar = d.getVar('PKGD', True)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500124 root = d.expand(root)
125 output_pattern = d.expand(output_pattern)
126 extra_depends = d.expand(extra_depends)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500127
128 # If the root directory doesn't exist, don't error out later but silently do
129 # no splitting.
130 if not os.path.exists(dvar + root):
131 return []
132
133 ml = d.getVar("MLPREFIX", True)
134 if ml:
135 if not output_pattern.startswith(ml):
136 output_pattern = ml + output_pattern
137
138 newdeps = []
139 for dep in (extra_depends or "").split():
140 if dep.startswith(ml):
141 newdeps.append(dep)
142 else:
143 newdeps.append(ml + dep)
144 if newdeps:
145 extra_depends = " ".join(newdeps)
146
147
148 packages = d.getVar('PACKAGES', True).split()
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600149 split_packages = set()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500150
151 if postinst:
152 postinst = '#!/bin/sh\n' + postinst + '\n'
153 if postrm:
154 postrm = '#!/bin/sh\n' + postrm + '\n'
155 if not recursive:
156 objs = os.listdir(dvar + root)
157 else:
158 objs = []
159 for walkroot, dirs, files in os.walk(dvar + root):
160 for file in files:
161 relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1)
162 if relpath:
163 objs.append(relpath)
164
165 if extra_depends == None:
166 extra_depends = d.getVar("PN", True)
167
168 if not summary:
169 summary = description
170
171 for o in sorted(objs):
172 import re, stat
173 if match_path:
174 m = re.match(file_regex, o)
175 else:
176 m = re.match(file_regex, os.path.basename(o))
177
178 if not m:
179 continue
180 f = os.path.join(dvar + root, o)
181 mode = os.lstat(f).st_mode
182 if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))):
183 continue
184 on = legitimize_package_name(m.group(1))
185 pkg = output_pattern % on
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600186 split_packages.add(pkg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500187 if not pkg in packages:
188 if prepend:
189 packages = [pkg] + packages
190 else:
191 packages.append(pkg)
192 oldfiles = d.getVar('FILES_' + pkg, True)
193 newfile = os.path.join(root, o)
194 # These names will be passed through glob() so if the filename actually
195 # contains * or ? (rare, but possible) we need to handle that specially
196 newfile = newfile.replace('*', '[*]')
197 newfile = newfile.replace('?', '[?]')
198 if not oldfiles:
199 the_files = [newfile]
200 if aux_files_pattern:
201 if type(aux_files_pattern) is list:
202 for fp in aux_files_pattern:
203 the_files.append(fp % on)
204 else:
205 the_files.append(aux_files_pattern % on)
206 if aux_files_pattern_verbatim:
207 if type(aux_files_pattern_verbatim) is list:
208 for fp in aux_files_pattern_verbatim:
209 the_files.append(fp % m.group(1))
210 else:
211 the_files.append(aux_files_pattern_verbatim % m.group(1))
212 d.setVar('FILES_' + pkg, " ".join(the_files))
213 else:
214 d.setVar('FILES_' + pkg, oldfiles + " " + newfile)
215 if extra_depends != '':
216 d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends)
217 if not d.getVar('DESCRIPTION_' + pkg, True):
218 d.setVar('DESCRIPTION_' + pkg, description % on)
219 if not d.getVar('SUMMARY_' + pkg, True):
220 d.setVar('SUMMARY_' + pkg, summary % on)
221 if postinst:
222 d.setVar('pkg_postinst_' + pkg, postinst)
223 if postrm:
224 d.setVar('pkg_postrm_' + pkg, postrm)
225 if callable(hook):
226 hook(f, pkg, file_regex, output_pattern, m.group(1))
227
228 d.setVar('PACKAGES', ' '.join(packages))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600229 return list(split_packages)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500230
231PACKAGE_DEPENDS += "file-native"
232
233python () {
234 if d.getVar('PACKAGES', True) != '':
235 deps = ""
236 for dep in (d.getVar('PACKAGE_DEPENDS', True) or "").split():
237 deps += " %s:do_populate_sysroot" % dep
238 d.appendVarFlag('do_package', 'depends', deps)
239
240 # shlibs requires any DEPENDS to have already packaged for the *.list files
241 d.appendVarFlag('do_package', 'deptask', " do_packagedata")
242}
243
244# Get a list of files from file vars by searching files under current working directory
245# The list contains symlinks, directories and normal files.
246def files_from_filevars(filevars):
247 import os,glob
248 cpath = oe.cachedpath.CachedPath()
249 files = []
250 for f in filevars:
251 if os.path.isabs(f):
252 f = '.' + f
253 if not f.startswith("./"):
254 f = './' + f
255 globbed = glob.glob(f)
256 if globbed:
257 if [ f ] != globbed:
258 files += globbed
259 continue
260 files.append(f)
261
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600262 symlink_paths = []
263 for ind, f in enumerate(files):
264 # Handle directory symlinks. Truncate path to the lowest level symlink
265 parent = ''
266 for dirname in f.split('/')[:-1]:
267 parent = os.path.join(parent, dirname)
268 if dirname == '.':
269 continue
270 if cpath.islink(parent):
271 bb.warn("FILES contains file '%s' which resides under a "
272 "directory symlink. Please fix the recipe and use the "
273 "real path for the file." % f[1:])
274 symlink_paths.append(f)
275 files[ind] = parent
276 f = parent
277 break
278
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500279 if not cpath.islink(f):
280 if cpath.isdir(f):
281 newfiles = [ os.path.join(f,x) for x in os.listdir(f) ]
282 if newfiles:
283 files += newfiles
284
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600285 return files, symlink_paths
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500286
287# Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files
288def get_conffiles(pkg, d):
289 pkgdest = d.getVar('PKGDEST', True)
290 root = os.path.join(pkgdest, pkg)
291 cwd = os.getcwd()
292 os.chdir(root)
293
294 conffiles = d.getVar('CONFFILES_%s' % pkg, True);
295 if conffiles == None:
296 conffiles = d.getVar('CONFFILES', True)
297 if conffiles == None:
298 conffiles = ""
299 conffiles = conffiles.split()
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600300 conf_orig_list = files_from_filevars(conffiles)[0]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500301
302 # Remove links and directories from conf_orig_list to get conf_list which only contains normal files
303 conf_list = []
304 for f in conf_orig_list:
305 if os.path.isdir(f):
306 continue
307 if os.path.islink(f):
308 continue
309 if not os.path.exists(f):
310 continue
311 conf_list.append(f)
312
313 # Remove the leading './'
314 for i in range(0, len(conf_list)):
315 conf_list[i] = conf_list[i][1:]
316
317 os.chdir(cwd)
318 return conf_list
319
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500320def checkbuildpath(file, d):
321 tmpdir = d.getVar('TMPDIR', True)
322 with open(file) as f:
323 file_content = f.read()
324 if tmpdir in file_content:
325 return True
326
327 return False
328
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500329def splitdebuginfo(file, debugfile, debugsrcdir, sourcefile, d):
330 # Function to split a single file into two components, one is the stripped
331 # target system binary, the other contains any debugging information. The
332 # two files are linked to reference each other.
333 #
334 # sourcefile is also generated containing a list of debugsources
335
336 import stat
337
338 dvar = d.getVar('PKGD', True)
339 objcopy = d.getVar("OBJCOPY", True)
340 debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500341
342 # We ignore kernel modules, we don't generate debug info files.
343 if file.find("/lib/modules/") != -1 and file.endswith(".ko"):
344 return 1
345
346 newmode = None
347 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
348 origmode = os.stat(file)[stat.ST_MODE]
349 newmode = origmode | stat.S_IWRITE | stat.S_IREAD
350 os.chmod(file, newmode)
351
352 # We need to extract the debug src information here...
353 if debugsrcdir:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500354 cmd = "'%s' -i -l '%s' '%s'" % (debugedit, sourcefile, file)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500355 (retval, output) = oe.utils.getstatusoutput(cmd)
356 if retval:
357 bb.fatal("debugedit failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
358
359 bb.utils.mkdirhier(os.path.dirname(debugfile))
360
361 cmd = "'%s' --only-keep-debug '%s' '%s'" % (objcopy, file, debugfile)
362 (retval, output) = oe.utils.getstatusoutput(cmd)
363 if retval:
364 bb.fatal("objcopy failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
365
366 # Set the debuglink to have the view of the file path on the target
367 cmd = "'%s' --add-gnu-debuglink='%s' '%s'" % (objcopy, debugfile, file)
368 (retval, output) = oe.utils.getstatusoutput(cmd)
369 if retval:
370 bb.fatal("objcopy failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
371
372 if newmode:
373 os.chmod(file, origmode)
374
375 return 0
376
377def copydebugsources(debugsrcdir, d):
378 # The debug src information written out to sourcefile is further procecessed
379 # and copied to the destination here.
380
381 import stat
382
383 sourcefile = d.expand("${WORKDIR}/debugsources.list")
384 if debugsrcdir and os.path.isfile(sourcefile):
385 dvar = d.getVar('PKGD', True)
386 strip = d.getVar("STRIP", True)
387 objcopy = d.getVar("OBJCOPY", True)
388 debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit")
389 workdir = d.getVar("WORKDIR", True)
390 workparentdir = os.path.dirname(os.path.dirname(workdir))
391 workbasedir = os.path.basename(os.path.dirname(workdir)) + "/" + os.path.basename(workdir)
392
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500393 # If build path exists in sourcefile, it means toolchain did not use
394 # -fdebug-prefix-map to compile
395 if checkbuildpath(sourcefile, d):
396 localsrc_prefix = workparentdir + "/"
397 else:
398 localsrc_prefix = "/usr/src/debug/"
399
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500400 nosuchdir = []
401 basepath = dvar
402 for p in debugsrcdir.split("/"):
403 basepath = basepath + "/" + p
404 if not cpath.exists(basepath):
405 nosuchdir.append(basepath)
406 bb.utils.mkdirhier(basepath)
407 cpath.updatecache(basepath)
408
409 processdebugsrc = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '(<internal>|<built-in>)$' | "
410 # We need to ignore files that are not actually ours
411 # we do this by only paying attention to items from this package
412 processdebugsrc += "fgrep -zw '%s' | "
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500413 # Remove prefix in the source paths
414 processdebugsrc += "sed 's#%s##g' | "
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500415 processdebugsrc += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)"
416
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500417 cmd = processdebugsrc % (sourcefile, workbasedir, localsrc_prefix, workparentdir, dvar, debugsrcdir)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500418 (retval, output) = oe.utils.getstatusoutput(cmd)
419 # Can "fail" if internal headers/transient sources are attempted
420 #if retval:
421 # bb.fatal("debug source copy failed with exit code %s (cmd was %s)" % (retval, cmd))
422
423 # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
424 # Work around this by manually finding and copying any symbolic links that made it through.
425 cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s' 2>/dev/null)" % (dvar, debugsrcdir, dvar, debugsrcdir, workparentdir, dvar, debugsrcdir)
426 (retval, output) = oe.utils.getstatusoutput(cmd)
427 if retval:
428 bb.fatal("debugsrc symlink fixup failed with exit code %s (cmd was %s)" % (retval, cmd))
429
430 # The copy by cpio may have resulted in some empty directories! Remove these
431 cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
432 (retval, output) = oe.utils.getstatusoutput(cmd)
433 if retval:
434 bb.fatal("empty directory removal failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
435
436 # Also remove debugsrcdir if its empty
437 for p in nosuchdir[::-1]:
438 if os.path.exists(p) and not os.listdir(p):
439 os.rmdir(p)
440
441#
442# Package data handling routines
443#
444
445def get_package_mapping (pkg, basepkg, d):
446 import oe.packagedata
447
448 data = oe.packagedata.read_subpkgdata(pkg, d)
449 key = "PKG_%s" % pkg
450
451 if key in data:
452 # Have to avoid undoing the write_extra_pkgs(global_variants...)
453 if bb.data.inherits_class('allarch', d) and data[key] == basepkg:
454 return pkg
455 return data[key]
456
457 return pkg
458
459def get_package_additional_metadata (pkg_type, d):
460 base_key = "PACKAGE_ADD_METADATA"
461 for key in ("%s_%s" % (base_key, pkg_type.upper()), base_key):
462 if d.getVar(key, False) is None:
463 continue
464 d.setVarFlag(key, "type", "list")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500465 if d.getVarFlag(key, "separator", True) is None:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500466 d.setVarFlag(key, "separator", "\\n")
467 metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)]
468 return "\n".join(metadata_fields).strip()
469
470def runtime_mapping_rename (varname, pkg, d):
471 #bb.note("%s before: %s" % (varname, d.getVar(varname, True)))
472
473 if bb.data.inherits_class('packagegroup', d):
474 return
475
476 new_depends = {}
477 deps = bb.utils.explode_dep_versions2(d.getVar(varname, True) or "")
478 for depend in deps:
479 new_depend = get_package_mapping(depend, pkg, d)
480 new_depends[new_depend] = deps[depend]
481
482 d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False))
483
484 #bb.note("%s after: %s" % (varname, d.getVar(varname, True)))
485
486#
487# Package functions suitable for inclusion in PACKAGEFUNCS
488#
489
490python package_get_auto_pr() {
491 import oe.prservice
492 import re
493
494 # Support per recipe PRSERV_HOST
495 pn = d.getVar('PN', True)
496 host = d.getVar("PRSERV_HOST_" + pn, True)
497 if not (host is None):
498 d.setVar("PRSERV_HOST", host)
499
500 pkgv = d.getVar("PKGV", True)
501
502 # PR Server not active, handle AUTOINC
503 if not d.getVar('PRSERV_HOST', True):
504 if 'AUTOINC' in pkgv:
505 d.setVar("PKGV", pkgv.replace("AUTOINC", "0"))
506 return
507
508 auto_pr = None
509 pv = d.getVar("PV", True)
510 version = d.getVar("PRAUTOINX", True)
511 pkgarch = d.getVar("PACKAGE_ARCH", True)
512 checksum = d.getVar("BB_TASKHASH", True)
513
514 if d.getVar('PRSERV_LOCKDOWN', True):
515 auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch, True) or d.getVar('PRAUTO_' + version, True) or None
516 if auto_pr is None:
517 bb.fatal("Can NOT get PRAUTO from lockdown exported file")
518 d.setVar('PRAUTO',str(auto_pr))
519 return
520
521 try:
522 conn = d.getVar("__PRSERV_CONN", True)
523 if conn is None:
524 conn = oe.prservice.prserv_make_conn(d)
525 if conn is not None:
526 if "AUTOINC" in pkgv:
527 srcpv = bb.fetch2.get_srcrev(d)
528 base_ver = "AUTOINC-%s" % version[:version.find(srcpv)]
529 value = conn.getPR(base_ver, pkgarch, srcpv)
530 d.setVar("PKGV", pkgv.replace("AUTOINC", str(value)))
531
532 auto_pr = conn.getPR(version, pkgarch, checksum)
533 except Exception as e:
534 bb.fatal("Can NOT get PRAUTO, exception %s" % str(e))
535 if auto_pr is None:
536 bb.fatal("Can NOT get PRAUTO from remote PR service")
537 d.setVar('PRAUTO',str(auto_pr))
538}
539
540LOCALEBASEPN ??= "${PN}"
541
542python package_do_split_locales() {
543 if (d.getVar('PACKAGE_NO_LOCALE', True) == '1'):
544 bb.debug(1, "package requested not splitting locales")
545 return
546
547 packages = (d.getVar('PACKAGES', True) or "").split()
548
549 datadir = d.getVar('datadir', True)
550 if not datadir:
551 bb.note("datadir not defined")
552 return
553
554 dvar = d.getVar('PKGD', True)
555 pn = d.getVar('LOCALEBASEPN', True)
556
557 if pn + '-locale' in packages:
558 packages.remove(pn + '-locale')
559
560 localedir = os.path.join(dvar + datadir, 'locale')
561
562 if not cpath.isdir(localedir):
563 bb.debug(1, "No locale files in this package")
564 return
565
566 locales = os.listdir(localedir)
567
568 summary = d.getVar('SUMMARY', True) or pn
569 description = d.getVar('DESCRIPTION', True) or ""
570 locale_section = d.getVar('LOCALE_SECTION', True)
571 mlprefix = d.getVar('MLPREFIX', True) or ""
572 for l in sorted(locales):
573 ln = legitimize_package_name(l)
574 pkg = pn + '-locale-' + ln
575 packages.append(pkg)
576 d.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l))
577 d.setVar('RRECOMMENDS_' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
578 d.setVar('RPROVIDES_' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
579 d.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l))
580 d.setVar('DESCRIPTION_' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l))
581 if locale_section:
582 d.setVar('SECTION_' + pkg, locale_section)
583
584 d.setVar('PACKAGES', ' '.join(packages))
585
586 # Disabled by RP 18/06/07
587 # Wildcards aren't supported in debian
588 # They break with ipkg since glibc-locale* will mean that
589 # glibc-localedata-translit* won't install as a dependency
590 # for some other package which breaks meta-toolchain
591 # Probably breaks since virtual-locale- isn't provided anywhere
592 #rdep = (d.getVar('RDEPENDS_%s' % pn, True) or "").split()
593 #rdep.append('%s-locale*' % pn)
594 #d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep))
595}
596
597python perform_packagecopy () {
598 dest = d.getVar('D', True)
599 dvar = d.getVar('PKGD', True)
600
601 # Start by package population by taking a copy of the installed
602 # files to operate on
603 # Preserve sparse files and hard links
604 cmd = 'tar -cf - -C %s -p . | tar -xf - -C %s' % (dest, dvar)
605 (retval, output) = oe.utils.getstatusoutput(cmd)
606 if retval:
607 bb.fatal("file copy failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
608
609 # replace RPATHs for the nativesdk binaries, to make them relocatable
610 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d):
611 rpath_replace (dvar, d)
612}
613perform_packagecopy[cleandirs] = "${PKGD}"
614perform_packagecopy[dirs] = "${PKGD}"
615
616# We generate a master list of directories to process, we start by
617# seeding this list with reasonable defaults, then load from
618# the fs-perms.txt files
619python fixup_perms () {
620 import pwd, grp
621
622 # init using a string with the same format as a line as documented in
623 # the fs-perms.txt file
624 # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
625 # <path> link <link target>
626 #
627 # __str__ can be used to print out an entry in the input format
628 #
629 # if fs_perms_entry.path is None:
630 # an error occured
631 # if fs_perms_entry.link, you can retrieve:
632 # fs_perms_entry.path = path
633 # fs_perms_entry.link = target of link
634 # if not fs_perms_entry.link, you can retrieve:
635 # fs_perms_entry.path = path
636 # fs_perms_entry.mode = expected dir mode or None
637 # fs_perms_entry.uid = expected uid or -1
638 # fs_perms_entry.gid = expected gid or -1
639 # fs_perms_entry.walk = 'true' or something else
640 # fs_perms_entry.fmode = expected file mode or None
641 # fs_perms_entry.fuid = expected file uid or -1
642 # fs_perms_entry_fgid = expected file gid or -1
643 class fs_perms_entry():
644 def __init__(self, line):
645 lsplit = line.split()
646 if len(lsplit) == 3 and lsplit[1].lower() == "link":
647 self._setlink(lsplit[0], lsplit[2])
648 elif len(lsplit) == 8:
649 self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
650 else:
651 msg = "Fixup Perms: invalid config line %s" % line
652 package_qa_handle_error("perm-config", msg, d)
653 self.path = None
654 self.link = None
655
656 def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
657 self.path = os.path.normpath(path)
658 self.link = None
659 self.mode = self._procmode(mode)
660 self.uid = self._procuid(uid)
661 self.gid = self._procgid(gid)
662 self.walk = walk.lower()
663 self.fmode = self._procmode(fmode)
664 self.fuid = self._procuid(fuid)
665 self.fgid = self._procgid(fgid)
666
667 def _setlink(self, path, link):
668 self.path = os.path.normpath(path)
669 self.link = link
670
671 def _procmode(self, mode):
672 if not mode or (mode and mode == "-"):
673 return None
674 else:
675 return int(mode,8)
676
677 # Note uid/gid -1 has special significance in os.lchown
678 def _procuid(self, uid):
679 if uid is None or uid == "-":
680 return -1
681 elif uid.isdigit():
682 return int(uid)
683 else:
684 return pwd.getpwnam(uid).pw_uid
685
686 def _procgid(self, gid):
687 if gid is None or gid == "-":
688 return -1
689 elif gid.isdigit():
690 return int(gid)
691 else:
692 return grp.getgrnam(gid).gr_gid
693
694 # Use for debugging the entries
695 def __str__(self):
696 if self.link:
697 return "%s link %s" % (self.path, self.link)
698 else:
699 mode = "-"
700 if self.mode:
701 mode = "0%o" % self.mode
702 fmode = "-"
703 if self.fmode:
704 fmode = "0%o" % self.fmode
705 uid = self._mapugid(self.uid)
706 gid = self._mapugid(self.gid)
707 fuid = self._mapugid(self.fuid)
708 fgid = self._mapugid(self.fgid)
709 return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
710
711 def _mapugid(self, id):
712 if id is None or id == -1:
713 return "-"
714 else:
715 return "%d" % id
716
717 # Fix the permission, owner and group of path
718 def fix_perms(path, mode, uid, gid, dir):
719 if mode and not os.path.islink(path):
720 #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
721 os.chmod(path, mode)
722 # -1 is a special value that means don't change the uid/gid
723 # if they are BOTH -1, don't bother to lchown
724 if not (uid == -1 and gid == -1):
725 #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
726 os.lchown(path, uid, gid)
727
728 # Return a list of configuration files based on either the default
729 # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
730 # paths are resolved via BBPATH
731 def get_fs_perms_list(d):
732 str = ""
733 bbpath = d.getVar('BBPATH', True)
734 fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES', True)
735 if not fs_perms_tables:
736 fs_perms_tables = 'files/fs-perms.txt'
737 for conf_file in fs_perms_tables.split():
738 str += " %s" % bb.utils.which(bbpath, conf_file)
739 return str
740
741
742
743 dvar = d.getVar('PKGD', True)
744
745 fs_perms_table = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500746 fs_link_table = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500747
748 # By default all of the standard directories specified in
749 # bitbake.conf will get 0755 root:root.
750 target_path_vars = [ 'base_prefix',
751 'prefix',
752 'exec_prefix',
753 'base_bindir',
754 'base_sbindir',
755 'base_libdir',
756 'datadir',
757 'sysconfdir',
758 'servicedir',
759 'sharedstatedir',
760 'localstatedir',
761 'infodir',
762 'mandir',
763 'docdir',
764 'bindir',
765 'sbindir',
766 'libexecdir',
767 'libdir',
768 'includedir',
769 'oldincludedir' ]
770
771 for path in target_path_vars:
772 dir = d.getVar(path, True) or ""
773 if dir == "":
774 continue
775 fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d))
776
777 # Now we actually load from the configuration files
778 for conf in get_fs_perms_list(d).split():
779 if os.path.exists(conf):
780 f = open(conf)
781 for line in f:
782 if line.startswith('#'):
783 continue
784 lsplit = line.split()
785 if len(lsplit) == 0:
786 continue
787 if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
788 msg = "Fixup perms: %s invalid line: %s" % (conf, line)
789 package_qa_handle_error("perm-line", msg, d)
790 continue
791 entry = fs_perms_entry(d.expand(line))
792 if entry and entry.path:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500793 if entry.link:
794 fs_link_table[entry.path] = entry
795 if entry.path in fs_perms_table:
796 fs_perms_table.pop(entry.path)
797 else:
798 fs_perms_table[entry.path] = entry
799 if entry.path in fs_link_table:
800 fs_link_table.pop(entry.path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500801 f.close()
802
803 # Debug -- list out in-memory table
804 #for dir in fs_perms_table:
805 # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500806 #for link in fs_link_table:
807 # bb.note("Fixup Perms: %s: %s" % (link, str(fs_link_table[link])))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500808
809 # We process links first, so we can go back and fixup directory ownership
810 # for any newly created directories
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500811 # Process in sorted order so /run gets created before /run/lock, etc.
812 for entry in sorted(fs_link_table.values(), key=lambda x: x.link):
813 link = entry.link
814 dir = entry.path
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500815 origin = dvar + dir
816 if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)):
817 continue
818
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500819 if link[0] == "/":
820 target = dvar + link
821 ptarget = link
822 else:
823 target = os.path.join(os.path.dirname(origin), link)
824 ptarget = os.path.join(os.path.dirname(dir), link)
825 if os.path.exists(target):
826 msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
827 package_qa_handle_error("perm-link", msg, d)
828 continue
829
830 # Create path to move directory to, move it, and then setup the symlink
831 bb.utils.mkdirhier(os.path.dirname(target))
832 #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
833 os.rename(origin, target)
834 #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
835 os.symlink(link, origin)
836
837 for dir in fs_perms_table:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500838 origin = dvar + dir
839 if not (cpath.exists(origin) and cpath.isdir(origin)):
840 continue
841
842 fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
843
844 if fs_perms_table[dir].walk == 'true':
845 for root, dirs, files in os.walk(origin):
846 for dr in dirs:
847 each_dir = os.path.join(root, dr)
848 fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
849 for f in files:
850 each_file = os.path.join(root, f)
851 fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
852}
853
854python split_and_strip_files () {
855 import stat, errno
856
857 dvar = d.getVar('PKGD', True)
858 pn = d.getVar('PN', True)
859
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600860 oldcwd = os.getcwd()
861 os.chdir(dvar)
862
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500863 # We default to '.debug' style
864 if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory':
865 # Single debug-file-directory style debug info
866 debugappend = ".debug"
867 debugdir = ""
868 debuglibdir = "/usr/lib/debug"
869 debugsrcdir = "/usr/src/debug"
870 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-without-src':
871 # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
872 debugappend = ""
873 debugdir = "/.debug"
874 debuglibdir = ""
875 debugsrcdir = ""
876 else:
877 # Original OE-core, a.k.a. ".debug", style debug info
878 debugappend = ""
879 debugdir = "/.debug"
880 debuglibdir = ""
881 debugsrcdir = "/usr/src/debug"
882
883 sourcefile = d.expand("${WORKDIR}/debugsources.list")
884 bb.utils.remove(sourcefile)
885
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500886 # Return type (bits):
887 # 0 - not elf
888 # 1 - ELF
889 # 2 - stripped
890 # 4 - executable
891 # 8 - shared library
892 # 16 - kernel module
893 def isELF(path):
894 type = 0
895 ret, result = oe.utils.getstatusoutput("file \"%s\"" % path.replace("\"", "\\\""))
896
897 if ret:
898 msg = "split_and_strip_files: 'file %s' failed" % path
899 package_qa_handle_error("split-strip", msg, d)
900 return type
901
902 # Not stripped
903 if "ELF" in result:
904 type |= 1
905 if "not stripped" not in result:
906 type |= 2
907 if "executable" in result:
908 type |= 4
909 if "shared" in result:
910 type |= 8
911 return type
912
913
914 #
915 # First lets figure out all of the files we may have to process ... do this only once!
916 #
917 elffiles = {}
918 symlinks = {}
919 kernmods = []
920 inodes = {}
921 libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir", True))
922 baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir", True))
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600923 if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1' or \
924 d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500925 for root, dirs, files in cpath.walk(dvar):
926 for f in files:
927 file = os.path.join(root, f)
928 if file.endswith(".ko") and file.find("/lib/modules/") != -1:
929 kernmods.append(file)
930 continue
931
932 # Skip debug files
933 if debugappend and file.endswith(debugappend):
934 continue
935 if debugdir and debugdir in os.path.dirname(file[len(dvar):]):
936 continue
937
938 try:
939 ltarget = cpath.realpath(file, dvar, False)
940 s = cpath.lstat(ltarget)
941 except OSError as e:
942 (err, strerror) = e.args
943 if err != errno.ENOENT:
944 raise
945 # Skip broken symlinks
946 continue
947 if not s:
948 continue
949 # Check its an excutable
950 if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) or (s[stat.ST_MODE] & stat.S_IXOTH) \
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500951 or ((file.startswith(libdir) or file.startswith(baselibdir)) and (".so" in f or ".node" in f)):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500952 # If it's a symlink, and points to an ELF file, we capture the readlink target
953 if cpath.islink(file):
954 target = os.readlink(file)
955 if isELF(ltarget):
956 #bb.note("Sym: %s (%d)" % (ltarget, isELF(ltarget)))
957 symlinks[file] = target
958 continue
959
960 # It's a file (or hardlink), not a link
961 # ...but is it ELF, and is it already stripped?
962 elf_file = isELF(file)
963 if elf_file & 1:
964 if elf_file & 2:
965 if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn, True) or "").split():
966 bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
967 else:
968 msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
969 package_qa_handle_error("already-stripped", msg, d)
970 continue
971
972 # At this point we have an unstripped elf file. We need to:
973 # a) Make sure any file we strip is not hardlinked to anything else outside this tree
974 # b) Only strip any hardlinked file once (no races)
975 # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks
976
977 # Use a reference of device ID and inode number to indentify files
978 file_reference = "%d_%d" % (s.st_dev, s.st_ino)
979 if file_reference in inodes:
980 os.unlink(file)
981 os.link(inodes[file_reference][0], file)
982 inodes[file_reference].append(file)
983 else:
984 inodes[file_reference] = [file]
985 # break hardlink
986 bb.utils.copyfile(file, file)
987 elffiles[file] = elf_file
988 # Modified the file so clear the cache
989 cpath.updatecache(file)
990
991 #
992 # First lets process debug splitting
993 #
994 if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'):
995 for file in elffiles:
996 src = file[len(dvar):]
997 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
998 fpath = dvar + dest
999
1000 # Split the file...
1001 bb.utils.mkdirhier(os.path.dirname(fpath))
1002 #bb.note("Split %s -> %s" % (file, fpath))
1003 # Only store off the hard link reference if we successfully split!
1004 splitdebuginfo(file, fpath, debugsrcdir, sourcefile, d)
1005
1006 # Hardlink our debug symbols to the other hardlink copies
1007 for ref in inodes:
1008 if len(inodes[ref]) == 1:
1009 continue
1010 for file in inodes[ref][1:]:
1011 src = file[len(dvar):]
1012 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
1013 fpath = dvar + dest
1014 target = inodes[ref][0][len(dvar):]
1015 ftarget = dvar + debuglibdir + os.path.dirname(target) + debugdir + "/" + os.path.basename(target) + debugappend
1016 bb.utils.mkdirhier(os.path.dirname(fpath))
1017 #bb.note("Link %s -> %s" % (fpath, ftarget))
1018 os.link(ftarget, fpath)
1019
1020 # Create symlinks for all cases we were able to split symbols
1021 for file in symlinks:
1022 src = file[len(dvar):]
1023 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
1024 fpath = dvar + dest
1025 # Skip it if the target doesn't exist
1026 try:
1027 s = os.stat(fpath)
1028 except OSError as e:
1029 (err, strerror) = e.args
1030 if err != errno.ENOENT:
1031 raise
1032 continue
1033
1034 ltarget = symlinks[file]
1035 lpath = os.path.dirname(ltarget)
1036 lbase = os.path.basename(ltarget)
1037 ftarget = ""
1038 if lpath and lpath != ".":
1039 ftarget += lpath + debugdir + "/"
1040 ftarget += lbase + debugappend
1041 if lpath.startswith(".."):
1042 ftarget = os.path.join("..", ftarget)
1043 bb.utils.mkdirhier(os.path.dirname(fpath))
1044 #bb.note("Symlink %s -> %s" % (fpath, ftarget))
1045 os.symlink(ftarget, fpath)
1046
1047 # Process the debugsrcdir if requested...
1048 # This copies and places the referenced sources for later debugging...
1049 copydebugsources(debugsrcdir, d)
1050 #
1051 # End of debug splitting
1052 #
1053
1054 #
1055 # Now lets go back over things and strip them
1056 #
1057 if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
1058 strip = d.getVar("STRIP", True)
1059 sfiles = []
1060 for file in elffiles:
1061 elf_file = int(elffiles[file])
1062 #bb.note("Strip %s" % file)
1063 sfiles.append((file, elf_file, strip))
1064 for f in kernmods:
1065 sfiles.append((f, 16, strip))
1066
1067 oe.utils.multiprocess_exec(sfiles, oe.package.runstrip)
1068
1069 #
1070 # End of strip
1071 #
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001072 os.chdir(oldcwd)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001073}
1074
1075python populate_packages () {
1076 import glob, re
1077
1078 workdir = d.getVar('WORKDIR', True)
1079 outdir = d.getVar('DEPLOY_DIR', True)
1080 dvar = d.getVar('PKGD', True)
1081 packages = d.getVar('PACKAGES', True)
1082 pn = d.getVar('PN', True)
1083
1084 bb.utils.mkdirhier(outdir)
1085 os.chdir(dvar)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001086
1087 autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG", True) or False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001088
1089 # Sanity check PACKAGES for duplicates
1090 # Sanity should be moved to sanity.bbclass once we have the infrastucture
1091 package_list = []
1092
1093 for pkg in packages.split():
1094 if pkg in package_list:
1095 msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
1096 package_qa_handle_error("packages-list", msg, d)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001097 elif autodebug and pkg.endswith("-dbg"):
1098 package_list.insert(0, pkg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001099 else:
1100 package_list.append(pkg)
1101 d.setVar('PACKAGES', ' '.join(package_list))
1102 pkgdest = d.getVar('PKGDEST', True)
1103
1104 seen = []
1105
1106 # os.mkdir masks the permissions with umask so we have to unset it first
1107 oldumask = os.umask(0)
1108
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001109 debug = []
1110 for root, dirs, files in cpath.walk(dvar):
1111 dir = root[len(dvar):]
1112 if not dir:
1113 dir = os.sep
1114 for f in (files + dirs):
1115 path = "." + os.path.join(dir, f)
1116 if "/.debug/" in path or path.endswith("/.debug"):
1117 debug.append(path)
1118
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001119 for pkg in package_list:
1120 root = os.path.join(pkgdest, pkg)
1121 bb.utils.mkdirhier(root)
1122
1123 filesvar = d.getVar('FILES_%s' % pkg, True) or ""
1124 if "//" in filesvar:
1125 msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
1126 package_qa_handle_error("files-invalid", msg, d)
1127 filesvar.replace("//", "/")
1128
1129 origfiles = filesvar.split()
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001130 files, symlink_paths = files_from_filevars(origfiles)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001131
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001132 if autodebug and pkg.endswith("-dbg"):
1133 files.extend(debug)
1134
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001135 for file in files:
1136 if (not cpath.islink(file)) and (not cpath.exists(file)):
1137 continue
1138 if file in seen:
1139 continue
1140 seen.append(file)
1141
1142 def mkdir(src, dest, p):
1143 src = os.path.join(src, p)
1144 dest = os.path.join(dest, p)
1145 fstat = cpath.stat(src)
1146 os.mkdir(dest, fstat.st_mode)
1147 os.chown(dest, fstat.st_uid, fstat.st_gid)
1148 if p not in seen:
1149 seen.append(p)
1150 cpath.updatecache(dest)
1151
1152 def mkdir_recurse(src, dest, paths):
1153 if cpath.exists(dest + '/' + paths):
1154 return
1155 while paths.startswith("./"):
1156 paths = paths[2:]
1157 p = "."
1158 for c in paths.split("/"):
1159 p = os.path.join(p, c)
1160 if not cpath.exists(os.path.join(dest, p)):
1161 mkdir(src, dest, p)
1162
1163 if cpath.isdir(file) and not cpath.islink(file):
1164 mkdir_recurse(dvar, root, file)
1165 continue
1166
1167 mkdir_recurse(dvar, root, os.path.dirname(file))
1168 fpath = os.path.join(root,file)
1169 if not cpath.islink(file):
1170 os.link(file, fpath)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001171 continue
1172 ret = bb.utils.copyfile(file, fpath)
1173 if ret is False or ret == 0:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001174 bb.fatal("File population failed")
1175
1176 # Check if symlink paths exist
1177 for file in symlink_paths:
1178 if not os.path.exists(os.path.join(root,file)):
1179 bb.fatal("File '%s' cannot be packaged into '%s' because its "
1180 "parent directory structure does not exist. One of "
1181 "its parent directories is a symlink whose target "
1182 "directory is not included in the package." %
1183 (file, pkg))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001184
1185 os.umask(oldumask)
1186 os.chdir(workdir)
1187
1188 # Handle LICENSE_EXCLUSION
1189 package_list = []
1190 for pkg in packages.split():
1191 if d.getVar('LICENSE_EXCLUSION-' + pkg, True):
1192 msg = "%s has an incompatible license. Excluding from packaging." % pkg
1193 package_qa_handle_error("incompatible-license", msg, d)
1194 else:
1195 package_list.append(pkg)
1196 d.setVar('PACKAGES', ' '.join(package_list))
1197
1198 unshipped = []
1199 for root, dirs, files in cpath.walk(dvar):
1200 dir = root[len(dvar):]
1201 if not dir:
1202 dir = os.sep
1203 for f in (files + dirs):
1204 path = os.path.join(dir, f)
1205 if ('.' + path) not in seen:
1206 unshipped.append(path)
1207
1208 if unshipped != []:
1209 msg = pn + ": Files/directories were installed but not shipped in any package:"
1210 if "installed-vs-shipped" in (d.getVar('INSANE_SKIP_' + pn, True) or "").split():
1211 bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
1212 else:
1213 for f in unshipped:
1214 msg = msg + "\n " + f
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001215 msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n"
1216 msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001217 package_qa_handle_error("installed-vs-shipped", msg, d)
1218}
1219populate_packages[dirs] = "${D}"
1220
1221python package_fixsymlinks () {
1222 import errno
1223 pkgdest = d.getVar('PKGDEST', True)
1224 packages = d.getVar("PACKAGES", False).split()
1225
1226 dangling_links = {}
1227 pkg_files = {}
1228 for pkg in packages:
1229 dangling_links[pkg] = []
1230 pkg_files[pkg] = []
1231 inst_root = os.path.join(pkgdest, pkg)
1232 for path in pkgfiles[pkg]:
1233 rpath = path[len(inst_root):]
1234 pkg_files[pkg].append(rpath)
1235 rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
1236 if not cpath.lexists(rtarget):
1237 dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))
1238
1239 newrdepends = {}
1240 for pkg in dangling_links:
1241 for l in dangling_links[pkg]:
1242 found = False
1243 bb.debug(1, "%s contains dangling link %s" % (pkg, l))
1244 for p in packages:
1245 if l in pkg_files[p]:
1246 found = True
1247 bb.debug(1, "target found in %s" % p)
1248 if p == pkg:
1249 break
1250 if pkg not in newrdepends:
1251 newrdepends[pkg] = []
1252 newrdepends[pkg].append(p)
1253 break
1254 if found == False:
1255 bb.note("%s contains dangling symlink to %s" % (pkg, l))
1256
1257 for pkg in newrdepends:
1258 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "")
1259 for p in newrdepends[pkg]:
1260 if p not in rdepends:
1261 rdepends[p] = []
1262 d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1263}
1264
1265
1266python package_package_name_hook() {
1267 """
1268 A package_name_hook function can be used to rewrite the package names by
1269 changing PKG. For an example, see debian.bbclass.
1270 """
1271 pass
1272}
1273
1274EXPORT_FUNCTIONS package_name_hook
1275
1276
1277PKGDESTWORK = "${WORKDIR}/pkgdata"
1278
1279python emit_pkgdata() {
1280 from glob import glob
1281 import json
1282
1283 def write_if_exists(f, pkg, var):
1284 def encode(str):
1285 import codecs
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001286 c = codecs.getencoder("unicode_escape")
1287 return c(str)[0].decode("latin1")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001288
1289 val = d.getVar('%s_%s' % (var, pkg), True)
1290 if val:
1291 f.write('%s_%s: %s\n' % (var, pkg, encode(val)))
1292 return val
1293 val = d.getVar('%s' % (var), True)
1294 if val:
1295 f.write('%s: %s\n' % (var, encode(val)))
1296 return val
1297
1298 def write_extra_pkgs(variants, pn, packages, pkgdatadir):
1299 for variant in variants:
1300 with open("%s/%s-%s" % (pkgdatadir, variant, pn), 'w') as fd:
1301 fd.write("PACKAGES: %s\n" % ' '.join(
1302 map(lambda pkg: '%s-%s' % (variant, pkg), packages.split())))
1303
1304 def write_extra_runtime_pkgs(variants, packages, pkgdatadir):
1305 for variant in variants:
1306 for pkg in packages.split():
1307 ml_pkg = "%s-%s" % (variant, pkg)
1308 subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg)
1309 with open(subdata_file, 'w') as fd:
1310 fd.write("PKG_%s: %s" % (ml_pkg, pkg))
1311
1312 packages = d.getVar('PACKAGES', True)
1313 pkgdest = d.getVar('PKGDEST', True)
1314 pkgdatadir = d.getVar('PKGDESTWORK', True)
1315
1316 # Take shared lock since we're only reading, not writing
1317 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True)
1318
1319 data_file = pkgdatadir + d.expand("/${PN}" )
1320 f = open(data_file, 'w')
1321 f.write("PACKAGES: %s\n" % packages)
1322 f.close()
1323
1324 pn = d.getVar('PN', True)
1325 global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS', True) or "").split()
1326 variants = (d.getVar('MULTILIB_VARIANTS', True) or "").split()
1327
1328 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
1329 write_extra_pkgs(variants, pn, packages, pkgdatadir)
1330
1331 if (bb.data.inherits_class('allarch', d) and not bb.data.inherits_class('packagegroup', d)):
1332 write_extra_pkgs(global_variants, pn, packages, pkgdatadir)
1333
1334 workdir = d.getVar('WORKDIR', True)
1335
1336 for pkg in packages.split():
1337 pkgval = d.getVar('PKG_%s' % pkg, True)
1338 if pkgval is None:
1339 pkgval = pkg
1340 d.setVar('PKG_%s' % pkg, pkg)
1341
1342 pkgdestpkg = os.path.join(pkgdest, pkg)
1343 files = {}
1344 total_size = 0
1345 for f in pkgfiles[pkg]:
1346 relpth = os.path.relpath(f, pkgdestpkg)
1347 fstat = os.lstat(f)
1348 total_size += fstat.st_size
1349 files[os.sep + relpth] = fstat.st_size
1350 d.setVar('FILES_INFO', json.dumps(files))
1351
1352 subdata_file = pkgdatadir + "/runtime/%s" % pkg
1353 sf = open(subdata_file, 'w')
1354 write_if_exists(sf, pkg, 'PN')
1355 write_if_exists(sf, pkg, 'PE')
1356 write_if_exists(sf, pkg, 'PV')
1357 write_if_exists(sf, pkg, 'PR')
1358 write_if_exists(sf, pkg, 'PKGE')
1359 write_if_exists(sf, pkg, 'PKGV')
1360 write_if_exists(sf, pkg, 'PKGR')
1361 write_if_exists(sf, pkg, 'LICENSE')
1362 write_if_exists(sf, pkg, 'DESCRIPTION')
1363 write_if_exists(sf, pkg, 'SUMMARY')
1364 write_if_exists(sf, pkg, 'RDEPENDS')
1365 rprov = write_if_exists(sf, pkg, 'RPROVIDES')
1366 write_if_exists(sf, pkg, 'RRECOMMENDS')
1367 write_if_exists(sf, pkg, 'RSUGGESTS')
1368 write_if_exists(sf, pkg, 'RREPLACES')
1369 write_if_exists(sf, pkg, 'RCONFLICTS')
1370 write_if_exists(sf, pkg, 'SECTION')
1371 write_if_exists(sf, pkg, 'PKG')
1372 write_if_exists(sf, pkg, 'ALLOW_EMPTY')
1373 write_if_exists(sf, pkg, 'FILES')
1374 write_if_exists(sf, pkg, 'pkg_postinst')
1375 write_if_exists(sf, pkg, 'pkg_postrm')
1376 write_if_exists(sf, pkg, 'pkg_preinst')
1377 write_if_exists(sf, pkg, 'pkg_prerm')
1378 write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
1379 write_if_exists(sf, pkg, 'FILES_INFO')
1380 for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg, True) or "").split():
1381 write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile)
1382
1383 write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
1384 for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg, True) or "").split():
1385 write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile)
1386
1387 sf.write('%s_%s: %d\n' % ('PKGSIZE', pkg, total_size))
1388 sf.close()
1389
1390 # Symlinks needed for rprovides lookup
1391 if rprov:
1392 for p in rprov.strip().split():
1393 subdata_sym = pkgdatadir + "/runtime-rprovides/%s/%s" % (p, pkg)
1394 bb.utils.mkdirhier(os.path.dirname(subdata_sym))
1395 oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True)
1396
1397 allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg, True)
1398 if not allow_empty:
1399 allow_empty = d.getVar('ALLOW_EMPTY', True)
1400 root = "%s/%s" % (pkgdest, pkg)
1401 os.chdir(root)
1402 g = glob('*')
1403 if g or allow_empty == "1":
1404 # Symlinks needed for reverse lookups (from the final package name)
1405 subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval
1406 oe.path.symlink("../runtime/%s" % pkg, subdata_sym, True)
1407
1408 packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg
1409 open(packagedfile, 'w').close()
1410
1411 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
1412 write_extra_runtime_pkgs(variants, packages, pkgdatadir)
1413
1414 if bb.data.inherits_class('allarch', d) and not bb.data.inherits_class('packagegroup', d):
1415 write_extra_runtime_pkgs(global_variants, packages, pkgdatadir)
1416
1417 bb.utils.unlockfile(lf)
1418}
1419emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime ${PKGDESTWORK}/runtime-reverse ${PKGDESTWORK}/runtime-rprovides"
1420
1421ldconfig_postinst_fragment() {
1422if [ x"$D" = "x" ]; then
1423 if [ -x /sbin/ldconfig ]; then /sbin/ldconfig ; fi
1424fi
1425}
1426
1427RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/bin/rpmdeps-oecore --macros ${STAGING_LIBDIR_NATIVE}/rpm/macros --define '_rpmfc_magic_path ${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc' --rpmpopt ${STAGING_LIBDIR_NATIVE}/rpm/rpmpopt"
1428
1429# Collect perfile run-time dependency metadata
1430# Output:
1431# FILERPROVIDESFLIST_pkg - list of all files w/ deps
1432# FILERPROVIDES_filepath_pkg - per file dep
1433#
1434# FILERDEPENDSFLIST_pkg - list of all files w/ deps
1435# FILERDEPENDS_filepath_pkg - per file dep
1436
1437python package_do_filedeps() {
1438 if d.getVar('SKIP_FILEDEPS', True) == '1':
1439 return
1440
1441 pkgdest = d.getVar('PKGDEST', True)
1442 packages = d.getVar('PACKAGES', True)
1443 rpmdeps = d.getVar('RPMDEPS', True)
1444
1445 def chunks(files, n):
1446 return [files[i:i+n] for i in range(0, len(files), n)]
1447
1448 pkglist = []
1449 for pkg in packages.split():
1450 if d.getVar('SKIP_FILEDEPS_' + pkg, True) == '1':
1451 continue
1452 if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-'):
1453 continue
1454 for files in chunks(pkgfiles[pkg], 100):
1455 pkglist.append((pkg, files, rpmdeps, pkgdest))
1456
1457 processed = oe.utils.multiprocess_exec( pkglist, oe.package.filedeprunner)
1458
1459 provides_files = {}
1460 requires_files = {}
1461
1462 for result in processed:
1463 (pkg, provides, requires) = result
1464
1465 if pkg not in provides_files:
1466 provides_files[pkg] = []
1467 if pkg not in requires_files:
1468 requires_files[pkg] = []
1469
1470 for file in provides:
1471 provides_files[pkg].append(file)
1472 key = "FILERPROVIDES_" + file + "_" + pkg
1473 d.setVar(key, " ".join(provides[file]))
1474
1475 for file in requires:
1476 requires_files[pkg].append(file)
1477 key = "FILERDEPENDS_" + file + "_" + pkg
1478 d.setVar(key, " ".join(requires[file]))
1479
1480 for pkg in requires_files:
1481 d.setVar("FILERDEPENDSFLIST_" + pkg, " ".join(requires_files[pkg]))
1482 for pkg in provides_files:
1483 d.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files[pkg]))
1484}
1485
1486SHLIBSDIRS = "${PKGDATA_DIR}/${MLPREFIX}shlibs2"
1487SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs2"
1488
1489python package_do_shlibs() {
1490 import re, pipes
1491 import subprocess as sub
1492
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001493 exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001494 if exclude_shlibs:
1495 bb.note("not generating shlibs")
1496 return
1497
1498 lib_re = re.compile("^.*\.so")
1499 libdir_re = re.compile(".*/%s$" % d.getVar('baselib', True))
1500
1501 packages = d.getVar('PACKAGES', True)
1502 targetos = d.getVar('TARGET_OS', True)
1503
1504 workdir = d.getVar('WORKDIR', True)
1505
1506 ver = d.getVar('PKGV', True)
1507 if not ver:
1508 msg = "PKGV not defined"
1509 package_qa_handle_error("pkgv-undefined", msg, d)
1510 return
1511
1512 pkgdest = d.getVar('PKGDEST', True)
1513
1514 shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
1515
1516 # Take shared lock since we're only reading, not writing
1517 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
1518
1519 def linux_so(file, needed, sonames, renames, pkgver):
1520 needs_ldconfig = False
1521 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1522 cmd = d.getVar('OBJDUMP', True) + " -p " + pipes.quote(file) + " 2>/dev/null"
1523 fd = os.popen(cmd)
1524 lines = fd.readlines()
1525 fd.close()
1526 rpath = []
1527 for l in lines:
1528 m = re.match("\s+RPATH\s+([^\s]*)", l)
1529 if m:
1530 rpaths = m.group(1).replace("$ORIGIN", ldir).split(":")
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001531 rpath = list(map(os.path.normpath, rpaths))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001532 for l in lines:
1533 m = re.match("\s+NEEDED\s+([^\s]*)", l)
1534 if m:
1535 dep = m.group(1)
1536 if dep not in needed[pkg]:
1537 needed[pkg].append((dep, file, rpath))
1538 m = re.match("\s+SONAME\s+([^\s]*)", l)
1539 if m:
1540 this_soname = m.group(1)
1541 prov = (this_soname, ldir, pkgver)
1542 if not prov in sonames:
1543 # if library is private (only used by package) then do not build shlib for it
1544 if not private_libs or this_soname not in private_libs:
1545 sonames.append(prov)
1546 if libdir_re.match(os.path.dirname(file)):
1547 needs_ldconfig = True
1548 if snap_symlinks and (os.path.basename(file) != this_soname):
1549 renames.append((file, os.path.join(os.path.dirname(file), this_soname)))
1550 return needs_ldconfig
1551
1552 def darwin_so(file, needed, sonames, renames, pkgver):
1553 if not os.path.exists(file):
1554 return
1555 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1556
1557 def get_combinations(base):
1558 #
1559 # Given a base library name, find all combinations of this split by "." and "-"
1560 #
1561 combos = []
1562 options = base.split(".")
1563 for i in range(1, len(options) + 1):
1564 combos.append(".".join(options[0:i]))
1565 options = base.split("-")
1566 for i in range(1, len(options) + 1):
1567 combos.append("-".join(options[0:i]))
1568 return combos
1569
1570 if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg'):
1571 # Drop suffix
1572 name = os.path.basename(file).rsplit(".",1)[0]
1573 # Find all combinations
1574 combos = get_combinations(name)
1575 for combo in combos:
1576 if not combo in sonames:
1577 prov = (combo, ldir, pkgver)
1578 sonames.append(prov)
1579 if file.endswith('.dylib') or file.endswith('.so'):
1580 rpath = []
1581 p = sub.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file],stdout=sub.PIPE,stderr=sub.PIPE)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001582 out, err = p.communicate()
1583 # If returned successfully, process stdout for results
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001584 if p.returncode == 0:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001585 for l in out.split("\n"):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001586 l = l.strip()
1587 if l.startswith('path '):
1588 rpath.append(l.split()[1])
1589
1590 p = sub.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file],stdout=sub.PIPE,stderr=sub.PIPE)
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001591 out, err = p.communicate()
1592 # If returned successfully, process stdout for results
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001593 if p.returncode == 0:
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001594 for l in out.split("\n"):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001595 l = l.strip()
1596 if not l or l.endswith(":"):
1597 continue
1598 if "is not an object file" in l:
1599 continue
1600 name = os.path.basename(l.split()[0]).rsplit(".", 1)[0]
1601 if name and name not in needed[pkg]:
1602 needed[pkg].append((name, file, []))
1603
1604 if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS', True) == "1":
1605 snap_symlinks = True
1606 else:
1607 snap_symlinks = False
1608
1609 if (d.getVar('USE_LDCONFIG', True) or "1") == "1":
1610 use_ldconfig = True
1611 else:
1612 use_ldconfig = False
1613
1614 needed = {}
1615 shlib_provider = oe.package.read_shlib_providers(d)
1616
1617 for pkg in packages.split():
1618 private_libs = d.getVar('PRIVATE_LIBS_' + pkg, True) or d.getVar('PRIVATE_LIBS', True) or ""
1619 private_libs = private_libs.split()
1620 needs_ldconfig = False
1621 bb.debug(2, "calculating shlib provides for %s" % pkg)
1622
1623 pkgver = d.getVar('PKGV_' + pkg, True)
1624 if not pkgver:
1625 pkgver = d.getVar('PV_' + pkg, True)
1626 if not pkgver:
1627 pkgver = ver
1628
1629 needed[pkg] = []
1630 sonames = list()
1631 renames = list()
1632 for file in pkgfiles[pkg]:
1633 soname = None
1634 if cpath.islink(file):
1635 continue
1636 if targetos == "darwin" or targetos == "darwin8":
1637 darwin_so(file, needed, sonames, renames, pkgver)
1638 elif os.access(file, os.X_OK) or lib_re.match(file):
1639 ldconfig = linux_so(file, needed, sonames, renames, pkgver)
1640 needs_ldconfig = needs_ldconfig or ldconfig
1641 for (old, new) in renames:
1642 bb.note("Renaming %s to %s" % (old, new))
1643 os.rename(old, new)
1644 pkgfiles[pkg].remove(old)
1645
1646 shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
1647 if len(sonames):
1648 fd = open(shlibs_file, 'w')
1649 for s in sonames:
1650 if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]:
1651 (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]]
1652 if old_pkg != pkg:
1653 bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver))
1654 bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0]))
1655 fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n')
1656 if s[0] not in shlib_provider:
1657 shlib_provider[s[0]] = {}
1658 shlib_provider[s[0]][s[1]] = (pkg, pkgver)
1659 fd.close()
1660 if needs_ldconfig and use_ldconfig:
1661 bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
1662 postinst = d.getVar('pkg_postinst_%s' % pkg, True)
1663 if not postinst:
1664 postinst = '#!/bin/sh\n'
1665 postinst += d.getVar('ldconfig_postinst_fragment', True)
1666 d.setVar('pkg_postinst_%s' % pkg, postinst)
1667 bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
1668
1669 bb.utils.unlockfile(lf)
1670
1671 assumed_libs = d.getVar('ASSUME_SHLIBS', True)
1672 if assumed_libs:
1673 libdir = d.getVar("libdir", True)
1674 for e in assumed_libs.split():
1675 l, dep_pkg = e.split(":")
1676 lib_ver = None
1677 dep_pkg = dep_pkg.rsplit("_", 1)
1678 if len(dep_pkg) == 2:
1679 lib_ver = dep_pkg[1]
1680 dep_pkg = dep_pkg[0]
1681 if l not in shlib_provider:
1682 shlib_provider[l] = {}
1683 shlib_provider[l][libdir] = (dep_pkg, lib_ver)
1684
1685 libsearchpath = [d.getVar('libdir', True), d.getVar('base_libdir', True)]
1686
1687 for pkg in packages.split():
1688 bb.debug(2, "calculating shlib requirements for %s" % pkg)
1689
1690 deps = list()
1691 for n in needed[pkg]:
1692 # if n is in private libraries, don't try to search provider for it
1693 # this could cause problem in case some abc.bb provides private
1694 # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1
1695 # but skipping it is still better alternative than providing own
1696 # version and then adding runtime dependency for the same system library
1697 if private_libs and n[0] in private_libs:
1698 bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0]))
1699 continue
1700 if n[0] in shlib_provider.keys():
Patrick Williamsc0f7c042017-02-23 20:41:17 -06001701 shlib_provider_path = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001702 for k in shlib_provider[n[0]].keys():
1703 shlib_provider_path.append(k)
1704 match = None
1705 for p in n[2] + shlib_provider_path + libsearchpath:
1706 if p in shlib_provider[n[0]]:
1707 match = p
1708 break
1709 if match:
1710 (dep_pkg, ver_needed) = shlib_provider[n[0]][match]
1711
1712 bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1]))
1713
1714 if dep_pkg == pkg:
1715 continue
1716
1717 if ver_needed:
1718 dep = "%s (>= %s)" % (dep_pkg, ver_needed)
1719 else:
1720 dep = dep_pkg
1721 if not dep in deps:
1722 deps.append(dep)
1723 continue
1724 bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1]))
1725
1726 deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
1727 if os.path.exists(deps_file):
1728 os.remove(deps_file)
1729 if len(deps):
1730 fd = open(deps_file, 'w')
1731 for dep in deps:
1732 fd.write(dep + '\n')
1733 fd.close()
1734}
1735
1736python package_do_pkgconfig () {
1737 import re
1738
1739 packages = d.getVar('PACKAGES', True)
1740 workdir = d.getVar('WORKDIR', True)
1741 pkgdest = d.getVar('PKGDEST', True)
1742
1743 shlibs_dirs = d.getVar('SHLIBSDIRS', True).split()
1744 shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
1745
1746 pc_re = re.compile('(.*)\.pc$')
1747 var_re = re.compile('(.*)=(.*)')
1748 field_re = re.compile('(.*): (.*)')
1749
1750 pkgconfig_provided = {}
1751 pkgconfig_needed = {}
1752 for pkg in packages.split():
1753 pkgconfig_provided[pkg] = []
1754 pkgconfig_needed[pkg] = []
1755 for file in pkgfiles[pkg]:
1756 m = pc_re.match(file)
1757 if m:
1758 pd = bb.data.init()
1759 name = m.group(1)
1760 pkgconfig_provided[pkg].append(name)
1761 if not os.access(file, os.R_OK):
1762 continue
1763 f = open(file, 'r')
1764 lines = f.readlines()
1765 f.close()
1766 for l in lines:
1767 m = var_re.match(l)
1768 if m:
1769 name = m.group(1)
1770 val = m.group(2)
1771 pd.setVar(name, pd.expand(val))
1772 continue
1773 m = field_re.match(l)
1774 if m:
1775 hdr = m.group(1)
1776 exp = bb.data.expand(m.group(2), pd)
1777 if hdr == 'Requires':
1778 pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
1779
1780 # Take shared lock since we're only reading, not writing
1781 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
1782
1783 for pkg in packages.split():
1784 pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
1785 if pkgconfig_provided[pkg] != []:
1786 f = open(pkgs_file, 'w')
1787 for p in pkgconfig_provided[pkg]:
1788 f.write('%s\n' % p)
1789 f.close()
1790
1791 # Go from least to most specific since the last one found wins
1792 for dir in reversed(shlibs_dirs):
1793 if not os.path.exists(dir):
1794 continue
1795 for file in os.listdir(dir):
1796 m = re.match('^(.*)\.pclist$', file)
1797 if m:
1798 pkg = m.group(1)
1799 fd = open(os.path.join(dir, file))
1800 lines = fd.readlines()
1801 fd.close()
1802 pkgconfig_provided[pkg] = []
1803 for l in lines:
1804 pkgconfig_provided[pkg].append(l.rstrip())
1805
1806 for pkg in packages.split():
1807 deps = []
1808 for n in pkgconfig_needed[pkg]:
1809 found = False
1810 for k in pkgconfig_provided.keys():
1811 if n in pkgconfig_provided[k]:
1812 if k != pkg and not (k in deps):
1813 deps.append(k)
1814 found = True
1815 if found == False:
1816 bb.note("couldn't find pkgconfig module '%s' in any package" % n)
1817 deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
1818 if len(deps):
1819 fd = open(deps_file, 'w')
1820 for dep in deps:
1821 fd.write(dep + '\n')
1822 fd.close()
1823
1824 bb.utils.unlockfile(lf)
1825}
1826
1827def read_libdep_files(d):
1828 pkglibdeps = {}
1829 packages = d.getVar('PACKAGES', True).split()
1830 for pkg in packages:
1831 pkglibdeps[pkg] = {}
1832 for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
1833 depsfile = d.expand("${PKGDEST}/" + pkg + extension)
1834 if os.access(depsfile, os.R_OK):
1835 fd = open(depsfile)
1836 lines = fd.readlines()
1837 fd.close()
1838 for l in lines:
1839 l.rstrip()
1840 deps = bb.utils.explode_dep_versions2(l)
1841 for dep in deps:
1842 if not dep in pkglibdeps[pkg]:
1843 pkglibdeps[pkg][dep] = deps[dep]
1844 return pkglibdeps
1845
1846python read_shlibdeps () {
1847 pkglibdeps = read_libdep_files(d)
1848
1849 packages = d.getVar('PACKAGES', True).split()
1850 for pkg in packages:
1851 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "")
1852 for dep in pkglibdeps[pkg]:
1853 # Add the dep if it's not already there, or if no comparison is set
1854 if dep not in rdepends:
1855 rdepends[dep] = []
1856 for v in pkglibdeps[pkg][dep]:
1857 if v not in rdepends[dep]:
1858 rdepends[dep].append(v)
1859 d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1860}
1861
1862python package_depchains() {
1863 """
1864 For a given set of prefix and postfix modifiers, make those packages
1865 RRECOMMENDS on the corresponding packages for its RDEPENDS.
1866
1867 Example: If package A depends upon package B, and A's .bb emits an
1868 A-dev package, this would make A-dev Recommends: B-dev.
1869
1870 If only one of a given suffix is specified, it will take the RRECOMMENDS
1871 based on the RDEPENDS of *all* other packages. If more than one of a given
1872 suffix is specified, its will only use the RDEPENDS of the single parent
1873 package.
1874 """
1875
1876 packages = d.getVar('PACKAGES', True)
1877 postfixes = (d.getVar('DEPCHAIN_POST', True) or '').split()
1878 prefixes = (d.getVar('DEPCHAIN_PRE', True) or '').split()
1879
1880 def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
1881
1882 #bb.note('depends for %s is %s' % (base, depends))
1883 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg, True) or "")
1884
1885 for depend in depends:
1886 if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
1887 #bb.note("Skipping %s" % depend)
1888 continue
1889 if depend.endswith('-dev'):
1890 depend = depend[:-4]
1891 if depend.endswith('-dbg'):
1892 depend = depend[:-4]
1893 pkgname = getname(depend, suffix)
1894 #bb.note("Adding %s for %s" % (pkgname, depend))
1895 if pkgname not in rreclist and pkgname != pkg:
1896 rreclist[pkgname] = []
1897
1898 #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
1899 d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
1900
1901 def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
1902
1903 #bb.note('rdepends for %s is %s' % (base, rdepends))
1904 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg, True) or "")
1905
1906 for depend in rdepends:
1907 if depend.find('virtual-locale-') != -1:
1908 #bb.note("Skipping %s" % depend)
1909 continue
1910 if depend.endswith('-dev'):
1911 depend = depend[:-4]
1912 if depend.endswith('-dbg'):
1913 depend = depend[:-4]
1914 pkgname = getname(depend, suffix)
1915 #bb.note("Adding %s for %s" % (pkgname, depend))
1916 if pkgname not in rreclist and pkgname != pkg:
1917 rreclist[pkgname] = []
1918
1919 #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
1920 d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
1921
1922 def add_dep(list, dep):
1923 if dep not in list:
1924 list.append(dep)
1925
1926 depends = []
1927 for dep in bb.utils.explode_deps(d.getVar('DEPENDS', True) or ""):
1928 add_dep(depends, dep)
1929
1930 rdepends = []
1931 for pkg in packages.split():
1932 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg, True) or ""):
1933 add_dep(rdepends, dep)
1934
1935 #bb.note('rdepends is %s' % rdepends)
1936
1937 def post_getname(name, suffix):
1938 return '%s%s' % (name, suffix)
1939 def pre_getname(name, suffix):
1940 return '%s%s' % (suffix, name)
1941
1942 pkgs = {}
1943 for pkg in packages.split():
1944 for postfix in postfixes:
1945 if pkg.endswith(postfix):
1946 if not postfix in pkgs:
1947 pkgs[postfix] = {}
1948 pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
1949
1950 for prefix in prefixes:
1951 if pkg.startswith(prefix):
1952 if not prefix in pkgs:
1953 pkgs[prefix] = {}
1954 pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
1955
1956 if "-dbg" in pkgs:
1957 pkglibdeps = read_libdep_files(d)
1958 pkglibdeplist = []
1959 for pkg in pkglibdeps:
1960 for k in pkglibdeps[pkg]:
1961 add_dep(pkglibdeplist, k)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001962 dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS', True) == '1') or (bb.data.inherits_class('packagegroup', d)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001963
1964 for suffix in pkgs:
1965 for pkg in pkgs[suffix]:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001966 if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs', True):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001967 continue
1968 (base, func) = pkgs[suffix][pkg]
1969 if suffix == "-dev":
1970 pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
1971 elif suffix == "-dbg":
1972 if not dbgdefaultdeps:
1973 pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
1974 continue
1975 if len(pkgs[suffix]) == 1:
1976 pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
1977 else:
1978 rdeps = []
1979 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base, True) or ""):
1980 add_dep(rdeps, dep)
1981 pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
1982}
1983
1984# Since bitbake can't determine which variables are accessed during package
1985# iteration, we need to list them here:
1986PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR USERADD_PARAM GROUPADD_PARAM CONFFILES SYSTEMD_SERVICE LICENSE SECTION pkg_preinst pkg_prerm RREPLACES GROUPMEMS_PARAM SYSTEMD_AUTO_ENABLE"
1987
1988def gen_packagevar(d):
1989 ret = []
1990 pkgs = (d.getVar("PACKAGES", True) or "").split()
1991 vars = (d.getVar("PACKAGEVARS", True) or "").split()
1992 for p in pkgs:
1993 for v in vars:
1994 ret.append(v + "_" + p)
1995
1996 # Ensure that changes to INCOMPATIBLE_LICENSE re-run do_package for
1997 # affected recipes.
1998 ret.append('LICENSE_EXCLUSION-%s' % p)
1999 return " ".join(ret)
2000
2001PACKAGE_PREPROCESS_FUNCS ?= ""
2002# Functions for setting up PKGD
2003PACKAGEBUILDPKGD ?= " \
2004 perform_packagecopy \
2005 ${PACKAGE_PREPROCESS_FUNCS} \
2006 split_and_strip_files \
2007 fixup_perms \
2008 "
2009# Functions which split PKGD up into separate packages
2010PACKAGESPLITFUNCS ?= " \
2011 package_do_split_locales \
2012 populate_packages"
2013# Functions which process metadata based on split packages
2014PACKAGEFUNCS += " \
2015 package_fixsymlinks \
2016 package_name_hook \
2017 package_do_filedeps \
2018 package_do_shlibs \
2019 package_do_pkgconfig \
2020 read_shlibdeps \
2021 package_depchains \
2022 emit_pkgdata"
2023
2024python do_package () {
2025 # Change the following version to cause sstate to invalidate the package
2026 # cache. This is useful if an item this class depends on changes in a
2027 # way that the output of this class changes. rpmdeps is a good example
2028 # as any change to rpmdeps requires this to be rerun.
2029 # PACKAGE_BBCLASS_VERSION = "1"
2030
2031 # Init cachedpath
2032 global cpath
2033 cpath = oe.cachedpath.CachedPath()
2034
2035 ###########################################################################
2036 # Sanity test the setup
2037 ###########################################################################
2038
2039 packages = (d.getVar('PACKAGES', True) or "").split()
2040 if len(packages) < 1:
2041 bb.debug(1, "No packages to build, skipping do_package")
2042 return
2043
2044 workdir = d.getVar('WORKDIR', True)
2045 outdir = d.getVar('DEPLOY_DIR', True)
2046 dest = d.getVar('D', True)
2047 dvar = d.getVar('PKGD', True)
2048 pn = d.getVar('PN', True)
2049
2050 if not workdir or not outdir or not dest or not dvar or not pn:
2051 msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package"
2052 package_qa_handle_error("var-undefined", msg, d)
2053 return
2054
2055 bb.build.exec_func("package_get_auto_pr", d)
2056
2057 ###########################################################################
2058 # Optimisations
2059 ###########################################################################
2060
2061 # Continually expanding complex expressions is inefficient, particularly
2062 # when we write to the datastore and invalidate the expansion cache. This
2063 # code pre-expands some frequently used variables
2064
2065 def expandVar(x, d):
2066 d.setVar(x, d.getVar(x, True))
2067
2068 for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO':
2069 expandVar(x, d)
2070
2071 ###########################################################################
2072 # Setup PKGD (from D)
2073 ###########################################################################
2074
2075 for f in (d.getVar('PACKAGEBUILDPKGD', True) or '').split():
2076 bb.build.exec_func(f, d)
2077
2078 ###########################################################################
2079 # Split up PKGD into PKGDEST
2080 ###########################################################################
2081
2082 cpath = oe.cachedpath.CachedPath()
2083
2084 for f in (d.getVar('PACKAGESPLITFUNCS', True) or '').split():
2085 bb.build.exec_func(f, d)
2086
2087 ###########################################################################
2088 # Process PKGDEST
2089 ###########################################################################
2090
2091 # Build global list of files in each split package
2092 global pkgfiles
2093 pkgfiles = {}
2094 packages = d.getVar('PACKAGES', True).split()
2095 pkgdest = d.getVar('PKGDEST', True)
2096 for pkg in packages:
2097 pkgfiles[pkg] = []
2098 for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg):
2099 for file in files:
2100 pkgfiles[pkg].append(walkroot + os.sep + file)
2101
2102 for f in (d.getVar('PACKAGEFUNCS', True) or '').split():
2103 bb.build.exec_func(f, d)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002104
2105 qa_sane = d.getVar("QA_SANE", True)
2106 if not qa_sane:
2107 bb.fatal("Fatal QA errors found, failing task.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002108}
2109
2110do_package[dirs] = "${SHLIBSWORKDIR} ${PKGDESTWORK} ${D}"
2111do_package[vardeps] += "${PACKAGEBUILDPKGD} ${PACKAGESPLITFUNCS} ${PACKAGEFUNCS} ${@gen_packagevar(d)}"
2112addtask package after do_install
2113
2114PACKAGELOCK = "${STAGING_DIR}/package-output.lock"
2115SSTATETASKS += "do_package"
2116do_package[cleandirs] = "${PKGDEST} ${PKGDESTWORK}"
2117do_package[sstate-plaindirs] = "${PKGD} ${PKGDEST} ${PKGDESTWORK}"
2118do_package[sstate-lockfile-shared] = "${PACKAGELOCK}"
2119do_package_setscene[dirs] = "${STAGING_DIR}"
2120
2121python do_package_setscene () {
2122 sstate_setscene(d)
2123}
2124addtask do_package_setscene
2125
2126do_packagedata () {
2127 :
2128}
2129
2130addtask packagedata before do_build after do_package
2131
2132SSTATETASKS += "do_packagedata"
2133do_packagedata[sstate-inputdirs] = "${PKGDESTWORK}"
2134do_packagedata[sstate-outputdirs] = "${PKGDATA_DIR}"
2135do_packagedata[sstate-lockfile-shared] = "${PACKAGELOCK}"
2136do_packagedata[stamp-extra-info] = "${MACHINE}"
2137
2138python do_packagedata_setscene () {
2139 sstate_setscene(d)
2140}
2141addtask do_packagedata_setscene
2142
2143#
2144# Helper functions for the package writing classes
2145#
2146
2147def mapping_rename_hook(d):
2148 """
2149 Rewrite variables to account for package renaming in things
2150 like debian.bbclass or manual PKG variable name changes
2151 """
2152 pkg = d.getVar("PKG", True)
2153 runtime_mapping_rename("RDEPENDS", pkg, d)
2154 runtime_mapping_rename("RRECOMMENDS", pkg, d)
2155 runtime_mapping_rename("RSUGGESTS", pkg, d)