blob: a6bf2fe219d4a01add3c04d0ad0efa14e20dc1bd [file] [log] [blame]
Andrew Geissler635e0e42020-08-21 15:58:33 -05001#
Patrick Williams92b42cb2022-09-03 06:53:57 -05002# Copyright OpenEmbedded Contributors
3#
Andrew Geissler635e0e42020-08-21 15:58:33 -05004# SPDX-License-Identifier: GPL-2.0-only
5#
6
7from abc import ABCMeta, abstractmethod
8import os
9import glob
10import subprocess
11import shutil
12import re
13import collections
14import bb
15import tempfile
16import oe.utils
17import oe.path
18import string
19from oe.gpg_sign import get_signer
20import hashlib
21import fnmatch
22
23# this can be used by all PM backends to create the index files in parallel
24def create_index(arg):
25 index_cmd = arg
26
27 bb.note("Executing '%s' ..." % index_cmd)
28 result = subprocess.check_output(index_cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8")
29 if result:
30 bb.note(result)
31
32def opkg_query(cmd_output):
33 """
34 This method parse the output from the package managerand return
35 a dictionary with the information of the packages. This is used
36 when the packages are in deb or ipk format.
37 """
38 verregex = re.compile(r' \([=<>]* [^ )]*\)')
39 output = dict()
40 pkg = ""
41 arch = ""
42 ver = ""
43 filename = ""
44 dep = []
45 prov = []
46 pkgarch = ""
47 for line in cmd_output.splitlines()+['']:
48 line = line.rstrip()
49 if ':' in line:
50 if line.startswith("Package: "):
51 pkg = line.split(": ")[1]
52 elif line.startswith("Architecture: "):
53 arch = line.split(": ")[1]
54 elif line.startswith("Version: "):
55 ver = line.split(": ")[1]
56 elif line.startswith("File: ") or line.startswith("Filename:"):
57 filename = line.split(": ")[1]
58 if "/" in filename:
59 filename = os.path.basename(filename)
60 elif line.startswith("Depends: "):
61 depends = verregex.sub('', line.split(": ")[1])
62 for depend in depends.split(", "):
63 dep.append(depend)
64 elif line.startswith("Recommends: "):
65 recommends = verregex.sub('', line.split(": ")[1])
66 for recommend in recommends.split(", "):
67 dep.append("%s [REC]" % recommend)
68 elif line.startswith("PackageArch: "):
69 pkgarch = line.split(": ")[1]
70 elif line.startswith("Provides: "):
71 provides = verregex.sub('', line.split(": ")[1])
72 for provide in provides.split(", "):
73 prov.append(provide)
74
75 # When there is a blank line save the package information
76 elif not line:
77 # IPK doesn't include the filename
78 if not filename:
79 filename = "%s_%s_%s.ipk" % (pkg, ver, arch)
80 if pkg:
81 output[pkg] = {"arch":arch, "ver":ver,
82 "filename":filename, "deps": dep, "pkgarch":pkgarch, "provs": prov}
83 pkg = ""
84 arch = ""
85 ver = ""
86 filename = ""
87 dep = []
88 prov = []
89 pkgarch = ""
90
91 return output
92
93def failed_postinsts_abort(pkgs, log_path):
94 bb.fatal("""Postinstall scriptlets of %s have failed. If the intention is to defer them to first boot,
Patrick Williams213cb262021-08-07 19:21:33 -050095then please place them into pkg_postinst_ontarget:${PN} ().
Andrew Geissler635e0e42020-08-21 15:58:33 -050096Deferring to first boot via 'exit 1' is no longer supported.
97Details of the failure are in %s.""" %(pkgs, log_path))
98
99def generate_locale_archive(d, rootfs, target_arch, localedir):
100 # Pretty sure we don't need this for locale archive generation but
101 # keeping it to be safe...
102 locale_arch_options = { \
103 "arc": ["--uint32-align=4", "--little-endian"],
104 "arceb": ["--uint32-align=4", "--big-endian"],
105 "arm": ["--uint32-align=4", "--little-endian"],
106 "armeb": ["--uint32-align=4", "--big-endian"],
107 "aarch64": ["--uint32-align=4", "--little-endian"],
108 "aarch64_be": ["--uint32-align=4", "--big-endian"],
109 "sh4": ["--uint32-align=4", "--big-endian"],
110 "powerpc": ["--uint32-align=4", "--big-endian"],
111 "powerpc64": ["--uint32-align=4", "--big-endian"],
112 "powerpc64le": ["--uint32-align=4", "--little-endian"],
113 "mips": ["--uint32-align=4", "--big-endian"],
114 "mipsisa32r6": ["--uint32-align=4", "--big-endian"],
115 "mips64": ["--uint32-align=4", "--big-endian"],
116 "mipsisa64r6": ["--uint32-align=4", "--big-endian"],
117 "mipsel": ["--uint32-align=4", "--little-endian"],
118 "mipsisa32r6el": ["--uint32-align=4", "--little-endian"],
119 "mips64el": ["--uint32-align=4", "--little-endian"],
120 "mipsisa64r6el": ["--uint32-align=4", "--little-endian"],
121 "riscv64": ["--uint32-align=4", "--little-endian"],
122 "riscv32": ["--uint32-align=4", "--little-endian"],
123 "i586": ["--uint32-align=4", "--little-endian"],
124 "i686": ["--uint32-align=4", "--little-endian"],
125 "x86_64": ["--uint32-align=4", "--little-endian"]
126 }
127 if target_arch in locale_arch_options:
128 arch_options = locale_arch_options[target_arch]
129 else:
130 bb.error("locale_arch_options not found for target_arch=" + target_arch)
131 bb.fatal("unknown arch:" + target_arch + " for locale_arch_options")
132
133 # Need to set this so cross-localedef knows where the archive is
134 env = dict(os.environ)
135 env["LOCALEARCHIVE"] = oe.path.join(localedir, "locale-archive")
136
137 for name in sorted(os.listdir(localedir)):
138 path = os.path.join(localedir, name)
139 if os.path.isdir(path):
140 cmd = ["cross-localedef", "--verbose"]
141 cmd += arch_options
142 cmd += ["--add-to-archive", path]
143 subprocess.check_output(cmd, env=env, stderr=subprocess.STDOUT)
144
145class Indexer(object, metaclass=ABCMeta):
146 def __init__(self, d, deploy_dir):
147 self.d = d
148 self.deploy_dir = deploy_dir
149
150 @abstractmethod
151 def write_index(self):
152 pass
153
154class PkgsList(object, metaclass=ABCMeta):
155 def __init__(self, d, rootfs_dir):
156 self.d = d
157 self.rootfs_dir = rootfs_dir
158
159 @abstractmethod
160 def list_pkgs(self):
161 pass
162
163class PackageManager(object, metaclass=ABCMeta):
164 """
165 This is an abstract class. Do not instantiate this directly.
166 """
167
168 def __init__(self, d, target_rootfs):
169 self.d = d
170 self.target_rootfs = target_rootfs
171 self.deploy_dir = None
172 self.deploy_lock = None
173 self._initialize_intercepts()
174
175 def _initialize_intercepts(self):
176 bb.note("Initializing intercept dir for %s" % self.target_rootfs)
177 # As there might be more than one instance of PackageManager operating at the same time
178 # we need to isolate the intercept_scripts directories from each other,
179 # hence the ugly hash digest in dir name.
180 self.intercepts_dir = os.path.join(self.d.getVar('WORKDIR'), "intercept_scripts-%s" %
181 (hashlib.sha256(self.target_rootfs.encode()).hexdigest()))
182
183 postinst_intercepts = (self.d.getVar("POSTINST_INTERCEPTS") or "").split()
184 if not postinst_intercepts:
185 postinst_intercepts_path = self.d.getVar("POSTINST_INTERCEPTS_PATH")
186 if not postinst_intercepts_path:
187 postinst_intercepts_path = self.d.getVar("POSTINST_INTERCEPTS_DIR") or self.d.expand("${COREBASE}/scripts/postinst-intercepts")
188 postinst_intercepts = oe.path.which_wild('*', postinst_intercepts_path)
189
190 bb.debug(1, 'Collected intercepts:\n%s' % ''.join(' %s\n' % i for i in postinst_intercepts))
191 bb.utils.remove(self.intercepts_dir, True)
192 bb.utils.mkdirhier(self.intercepts_dir)
193 for intercept in postinst_intercepts:
Andrew Geisslerc926e172021-05-07 16:11:35 -0500194 shutil.copy(intercept, os.path.join(self.intercepts_dir, os.path.basename(intercept)))
Andrew Geissler635e0e42020-08-21 15:58:33 -0500195
196 @abstractmethod
197 def _handle_intercept_failure(self, failed_script):
198 pass
199
200 def _postpone_to_first_boot(self, postinst_intercept_hook):
201 with open(postinst_intercept_hook) as intercept:
202 registered_pkgs = None
203 for line in intercept.read().split("\n"):
204 m = re.match(r"^##PKGS:(.*)", line)
205 if m is not None:
206 registered_pkgs = m.group(1).strip()
207 break
208
209 if registered_pkgs is not None:
210 bb.note("If an image is being built, the postinstalls for the following packages "
211 "will be postponed for first boot: %s" %
212 registered_pkgs)
213
214 # call the backend dependent handler
215 self._handle_intercept_failure(registered_pkgs)
216
217
218 def run_intercepts(self, populate_sdk=None):
219 intercepts_dir = self.intercepts_dir
220
221 bb.note("Running intercept scripts:")
222 os.environ['D'] = self.target_rootfs
223 os.environ['STAGING_DIR_NATIVE'] = self.d.getVar('STAGING_DIR_NATIVE')
224 for script in os.listdir(intercepts_dir):
225 script_full = os.path.join(intercepts_dir, script)
226
227 if script == "postinst_intercept" or not os.access(script_full, os.X_OK):
228 continue
229
230 # we do not want to run any multilib variant of this
231 if script.startswith("delay_to_first_boot"):
232 self._postpone_to_first_boot(script_full)
233 continue
234
235 if populate_sdk == 'host' and self.d.getVar('SDK_OS') == 'mingw32':
236 bb.note("The postinstall intercept hook '%s' could not be executed due to missing wine support, details in %s/log.do_%s"
237 % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
238 continue
239
240 bb.note("> Executing %s intercept ..." % script)
241
242 try:
243 output = subprocess.check_output(script_full, stderr=subprocess.STDOUT)
244 if output: bb.note(output.decode("utf-8"))
245 except subprocess.CalledProcessError as e:
246 bb.note("Exit code %d. Output:\n%s" % (e.returncode, e.output.decode("utf-8")))
247 if populate_sdk == 'host':
248 bb.fatal("The postinstall intercept hook '%s' failed, details in %s/log.do_%s" % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
249 elif populate_sdk == 'target':
250 if "qemuwrapper: qemu usermode is not supported" in e.output.decode("utf-8"):
251 bb.note("The postinstall intercept hook '%s' could not be executed due to missing qemu usermode support, details in %s/log.do_%s"
252 % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
253 else:
254 bb.fatal("The postinstall intercept hook '%s' failed, details in %s/log.do_%s" % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
255 else:
256 if "qemuwrapper: qemu usermode is not supported" in e.output.decode("utf-8"):
257 bb.note("The postinstall intercept hook '%s' could not be executed due to missing qemu usermode support, details in %s/log.do_%s"
258 % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
259 self._postpone_to_first_boot(script_full)
260 else:
261 bb.fatal("The postinstall intercept hook '%s' failed, details in %s/log.do_%s" % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
262
263 @abstractmethod
264 def update(self):
265 """
266 Update the package manager package database.
267 """
268 pass
269
270 @abstractmethod
Andrew Geissler615f2f12022-07-15 14:00:58 -0500271 def install(self, pkgs, attempt_only=False, hard_depends_only=False):
Andrew Geissler635e0e42020-08-21 15:58:33 -0500272 """
273 Install a list of packages. 'pkgs' is a list object. If 'attempt_only' is
274 True, installation failures are ignored.
275 """
276 pass
277
278 @abstractmethod
279 def remove(self, pkgs, with_dependencies=True):
280 """
281 Remove a list of packages. 'pkgs' is a list object. If 'with_dependencies'
282 is False, then any dependencies are left in place.
283 """
284 pass
285
286 @abstractmethod
287 def write_index(self):
288 """
289 This function creates the index files
290 """
291 pass
292
293 @abstractmethod
294 def remove_packaging_data(self):
295 pass
296
297 @abstractmethod
298 def list_installed(self):
299 pass
300
301 @abstractmethod
302 def extract(self, pkg):
303 """
304 Returns the path to a tmpdir where resides the contents of a package.
305 Deleting the tmpdir is responsability of the caller.
306 """
307 pass
308
309 @abstractmethod
310 def insert_feeds_uris(self, feed_uris, feed_base_paths, feed_archs):
311 """
312 Add remote package feeds into repository manager configuration. The parameters
313 for the feeds are set by feed_uris, feed_base_paths and feed_archs.
314 See http://www.yoctoproject.org/docs/current/ref-manual/ref-manual.html#var-PACKAGE_FEED_URIS
315 for their description.
316 """
317 pass
318
319 def install_glob(self, globs, sdk=False):
320 """
321 Install all packages that match a glob.
322 """
323 # TODO don't have sdk here but have a property on the superclass
324 # (and respect in install_complementary)
325 if sdk:
Andrew Geisslereff27472021-10-29 15:35:00 -0500326 pkgdatadir = self.d.getVar("PKGDATA_DIR_SDK")
Andrew Geissler635e0e42020-08-21 15:58:33 -0500327 else:
328 pkgdatadir = self.d.getVar("PKGDATA_DIR")
329
330 try:
331 bb.note("Installing globbed packages...")
332 cmd = ["oe-pkgdata-util", "-p", pkgdatadir, "list-pkgs", globs]
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600333 bb.note('Running %s' % cmd)
334 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
335 stdout, stderr = proc.communicate()
336 if stderr: bb.note(stderr.decode("utf-8"))
337 pkgs = stdout.decode("utf-8")
Andrew Geissler635e0e42020-08-21 15:58:33 -0500338 self.install(pkgs.split(), attempt_only=True)
339 except subprocess.CalledProcessError as e:
340 # Return code 1 means no packages matched
341 if e.returncode != 1:
342 bb.fatal("Could not compute globbed packages list. Command "
343 "'%s' returned %d:\n%s" %
344 (' '.join(cmd), e.returncode, e.output.decode("utf-8")))
345
346 def install_complementary(self, globs=None):
347 """
348 Install complementary packages based upon the list of currently installed
Andrew Geissler5f350902021-07-23 13:09:54 -0400349 packages e.g. locales, *-dev, *-dbg, etc. Note: every backend needs to
350 call this function explicitly after the normal package installation.
Andrew Geissler635e0e42020-08-21 15:58:33 -0500351 """
352 if globs is None:
353 globs = self.d.getVar('IMAGE_INSTALL_COMPLEMENTARY')
354 split_linguas = set()
355
356 for translation in self.d.getVar('IMAGE_LINGUAS').split():
357 split_linguas.add(translation)
358 split_linguas.add(translation.split('-')[0])
359
360 split_linguas = sorted(split_linguas)
361
362 for lang in split_linguas:
363 globs += " *-locale-%s" % lang
364 for complementary_linguas in (self.d.getVar('IMAGE_LINGUAS_COMPLEMENTARY') or "").split():
365 globs += (" " + complementary_linguas) % lang
366
367 if globs is None:
368 return
369
370 # we need to write the list of installed packages to a file because the
371 # oe-pkgdata-util reads it from a file
372 with tempfile.NamedTemporaryFile(mode="w+", prefix="installed-pkgs") as installed_pkgs:
373 pkgs = self.list_installed()
374
375 provided_pkgs = set()
376 for pkg in pkgs.values():
377 provided_pkgs |= set(pkg.get('provs', []))
378
379 output = oe.utils.format_pkg_list(pkgs, "arch")
380 installed_pkgs.write(output)
381 installed_pkgs.flush()
382
383 cmd = ["oe-pkgdata-util",
384 "-p", self.d.getVar('PKGDATA_DIR'), "glob", installed_pkgs.name,
385 globs]
386 exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY')
387 if exclude:
388 cmd.extend(['--exclude=' + '|'.join(exclude.split())])
389 try:
390 bb.note('Running %s' % cmd)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600391 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
392 stdout, stderr = proc.communicate()
393 if stderr: bb.note(stderr.decode("utf-8"))
394 complementary_pkgs = stdout.decode("utf-8")
Andrew Geissler635e0e42020-08-21 15:58:33 -0500395 complementary_pkgs = set(complementary_pkgs.split())
396 skip_pkgs = sorted(complementary_pkgs & provided_pkgs)
397 install_pkgs = sorted(complementary_pkgs - provided_pkgs)
398 bb.note("Installing complementary packages ... %s (skipped already provided packages %s)" % (
399 ' '.join(install_pkgs),
400 ' '.join(skip_pkgs)))
Andrew Geissler615f2f12022-07-15 14:00:58 -0500401 self.install(install_pkgs, hard_depends_only=True)
Andrew Geissler635e0e42020-08-21 15:58:33 -0500402 except subprocess.CalledProcessError as e:
403 bb.fatal("Could not compute complementary packages list. Command "
404 "'%s' returned %d:\n%s" %
405 (' '.join(cmd), e.returncode, e.output.decode("utf-8")))
406
Andrew Geisslerf0343792020-11-18 10:42:21 -0600407 if self.d.getVar('IMAGE_LOCALES_ARCHIVE') == '1':
408 target_arch = self.d.getVar('TARGET_ARCH')
409 localedir = oe.path.join(self.target_rootfs, self.d.getVar("libdir"), "locale")
410 if os.path.exists(localedir) and os.listdir(localedir):
411 generate_locale_archive(self.d, self.target_rootfs, target_arch, localedir)
412 # And now delete the binary locales
413 self.remove(fnmatch.filter(self.list_installed(), "glibc-binary-localedata-*"), False)
Andrew Geissler635e0e42020-08-21 15:58:33 -0500414
415 def deploy_dir_lock(self):
416 if self.deploy_dir is None:
417 raise RuntimeError("deploy_dir is not set!")
418
419 lock_file_name = os.path.join(self.deploy_dir, "deploy.lock")
420
421 self.deploy_lock = bb.utils.lockfile(lock_file_name)
422
423 def deploy_dir_unlock(self):
424 if self.deploy_lock is None:
425 return
426
427 bb.utils.unlockfile(self.deploy_lock)
428
429 self.deploy_lock = None
430
431 def construct_uris(self, uris, base_paths):
432 """
433 Construct URIs based on the following pattern: uri/base_path where 'uri'
434 and 'base_path' correspond to each element of the corresponding array
435 argument leading to len(uris) x len(base_paths) elements on the returned
436 array
437 """
438 def _append(arr1, arr2, sep='/'):
439 res = []
440 narr1 = [a.rstrip(sep) for a in arr1]
441 narr2 = [a.rstrip(sep).lstrip(sep) for a in arr2]
442 for a1 in narr1:
443 if arr2:
444 for a2 in narr2:
445 res.append("%s%s%s" % (a1, sep, a2))
446 else:
447 res.append(a1)
448 return res
449 return _append(uris, base_paths)
450
451def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencies):
452 """
453 Go through our do_package_write_X dependencies and hardlink the packages we depend
454 upon into the repo directory. This prevents us seeing other packages that may
455 have been built that we don't depend upon and also packages for architectures we don't
456 support.
457 """
458 import errno
459
460 taskdepdata = d.getVar("BB_TASKDEPDATA", False)
461 mytaskname = d.getVar("BB_RUNTASK")
462 pn = d.getVar("PN")
463 seendirs = set()
464 multilibs = {}
465
466 bb.utils.remove(subrepo_dir, recurse=True)
467 bb.utils.mkdirhier(subrepo_dir)
468
469 # Detect bitbake -b usage
470 nodeps = d.getVar("BB_LIMITEDDEPS") or False
471 if nodeps or not filterbydependencies:
472 oe.path.symlink(deploydir, subrepo_dir, True)
473 return
474
475 start = None
476 for dep in taskdepdata:
477 data = taskdepdata[dep]
478 if data[1] == mytaskname and data[0] == pn:
479 start = dep
480 break
481 if start is None:
482 bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?")
483 pkgdeps = set()
484 start = [start]
485 seen = set(start)
486 # Support direct dependencies (do_rootfs -> do_package_write_X)
487 # or indirect dependencies within PN (do_populate_sdk_ext -> do_rootfs -> do_package_write_X)
488 while start:
489 next = []
490 for dep2 in start:
491 for dep in taskdepdata[dep2][3]:
492 if taskdepdata[dep][0] != pn:
493 if "do_" + taskname in dep:
494 pkgdeps.add(dep)
495 elif dep not in seen:
496 next.append(dep)
497 seen.add(dep)
498 start = next
499
500 for dep in pkgdeps:
501 c = taskdepdata[dep][0]
502 manifest, d2 = oe.sstatesig.find_sstate_manifest(c, taskdepdata[dep][2], taskname, d, multilibs)
503 if not manifest:
504 bb.fatal("No manifest generated from: %s in %s" % (c, taskdepdata[dep][2]))
505 if not os.path.exists(manifest):
506 continue
507 with open(manifest, "r") as f:
508 for l in f:
509 l = l.strip()
510 deploydir = os.path.normpath(deploydir)
511 if bb.data.inherits_class('packagefeed-stability', d):
512 dest = l.replace(deploydir + "-prediff", "")
513 else:
514 dest = l.replace(deploydir, "")
515 dest = subrepo_dir + dest
516 if l.endswith("/"):
517 if dest not in seendirs:
518 bb.utils.mkdirhier(dest)
519 seendirs.add(dest)
520 continue
521 # Try to hardlink the file, copy if that fails
522 destdir = os.path.dirname(dest)
523 if destdir not in seendirs:
524 bb.utils.mkdirhier(destdir)
525 seendirs.add(destdir)
526 try:
527 os.link(l, dest)
528 except OSError as err:
529 if err.errno == errno.EXDEV:
530 bb.utils.copyfile(l, dest)
531 else:
532 raise
533
534
535def generate_index_files(d):
536 from oe.package_manager.rpm import RpmSubdirIndexer
537 from oe.package_manager.ipk import OpkgIndexer
538 from oe.package_manager.deb import DpkgIndexer
539
540 classes = d.getVar('PACKAGE_CLASSES').replace("package_", "").split()
541
542 indexer_map = {
543 "rpm": (RpmSubdirIndexer, d.getVar('DEPLOY_DIR_RPM')),
544 "ipk": (OpkgIndexer, d.getVar('DEPLOY_DIR_IPK')),
545 "deb": (DpkgIndexer, d.getVar('DEPLOY_DIR_DEB'))
546 }
547
548 result = None
549
550 for pkg_class in classes:
551 if not pkg_class in indexer_map:
552 continue
553
554 if os.path.exists(indexer_map[pkg_class][1]):
555 result = indexer_map[pkg_class][0](d, indexer_map[pkg_class][1]).write_index()
556
557 if result is not None:
558 bb.fatal(result)