blob: 8f7b60e077034002c119919d18a3ba9dc6b54ba9 [file] [log] [blame]
Andrew Geissler635e0e42020-08-21 15:58:33 -05001#
2# SPDX-License-Identifier: GPL-2.0-only
3#
4
5from abc import ABCMeta, abstractmethod
6import os
7import glob
8import subprocess
9import shutil
10import re
11import collections
12import bb
13import tempfile
14import oe.utils
15import oe.path
16import string
17from oe.gpg_sign import get_signer
18import hashlib
19import fnmatch
20
21# this can be used by all PM backends to create the index files in parallel
22def create_index(arg):
23 index_cmd = arg
24
25 bb.note("Executing '%s' ..." % index_cmd)
26 result = subprocess.check_output(index_cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8")
27 if result:
28 bb.note(result)
29
30def opkg_query(cmd_output):
31 """
32 This method parse the output from the package managerand return
33 a dictionary with the information of the packages. This is used
34 when the packages are in deb or ipk format.
35 """
36 verregex = re.compile(r' \([=<>]* [^ )]*\)')
37 output = dict()
38 pkg = ""
39 arch = ""
40 ver = ""
41 filename = ""
42 dep = []
43 prov = []
44 pkgarch = ""
45 for line in cmd_output.splitlines()+['']:
46 line = line.rstrip()
47 if ':' in line:
48 if line.startswith("Package: "):
49 pkg = line.split(": ")[1]
50 elif line.startswith("Architecture: "):
51 arch = line.split(": ")[1]
52 elif line.startswith("Version: "):
53 ver = line.split(": ")[1]
54 elif line.startswith("File: ") or line.startswith("Filename:"):
55 filename = line.split(": ")[1]
56 if "/" in filename:
57 filename = os.path.basename(filename)
58 elif line.startswith("Depends: "):
59 depends = verregex.sub('', line.split(": ")[1])
60 for depend in depends.split(", "):
61 dep.append(depend)
62 elif line.startswith("Recommends: "):
63 recommends = verregex.sub('', line.split(": ")[1])
64 for recommend in recommends.split(", "):
65 dep.append("%s [REC]" % recommend)
66 elif line.startswith("PackageArch: "):
67 pkgarch = line.split(": ")[1]
68 elif line.startswith("Provides: "):
69 provides = verregex.sub('', line.split(": ")[1])
70 for provide in provides.split(", "):
71 prov.append(provide)
72
73 # When there is a blank line save the package information
74 elif not line:
75 # IPK doesn't include the filename
76 if not filename:
77 filename = "%s_%s_%s.ipk" % (pkg, ver, arch)
78 if pkg:
79 output[pkg] = {"arch":arch, "ver":ver,
80 "filename":filename, "deps": dep, "pkgarch":pkgarch, "provs": prov}
81 pkg = ""
82 arch = ""
83 ver = ""
84 filename = ""
85 dep = []
86 prov = []
87 pkgarch = ""
88
89 return output
90
91def failed_postinsts_abort(pkgs, log_path):
92 bb.fatal("""Postinstall scriptlets of %s have failed. If the intention is to defer them to first boot,
Patrick Williams213cb262021-08-07 19:21:33 -050093then please place them into pkg_postinst_ontarget:${PN} ().
Andrew Geissler635e0e42020-08-21 15:58:33 -050094Deferring to first boot via 'exit 1' is no longer supported.
95Details of the failure are in %s.""" %(pkgs, log_path))
96
97def generate_locale_archive(d, rootfs, target_arch, localedir):
98 # Pretty sure we don't need this for locale archive generation but
99 # keeping it to be safe...
100 locale_arch_options = { \
101 "arc": ["--uint32-align=4", "--little-endian"],
102 "arceb": ["--uint32-align=4", "--big-endian"],
103 "arm": ["--uint32-align=4", "--little-endian"],
104 "armeb": ["--uint32-align=4", "--big-endian"],
105 "aarch64": ["--uint32-align=4", "--little-endian"],
106 "aarch64_be": ["--uint32-align=4", "--big-endian"],
107 "sh4": ["--uint32-align=4", "--big-endian"],
108 "powerpc": ["--uint32-align=4", "--big-endian"],
109 "powerpc64": ["--uint32-align=4", "--big-endian"],
110 "powerpc64le": ["--uint32-align=4", "--little-endian"],
111 "mips": ["--uint32-align=4", "--big-endian"],
112 "mipsisa32r6": ["--uint32-align=4", "--big-endian"],
113 "mips64": ["--uint32-align=4", "--big-endian"],
114 "mipsisa64r6": ["--uint32-align=4", "--big-endian"],
115 "mipsel": ["--uint32-align=4", "--little-endian"],
116 "mipsisa32r6el": ["--uint32-align=4", "--little-endian"],
117 "mips64el": ["--uint32-align=4", "--little-endian"],
118 "mipsisa64r6el": ["--uint32-align=4", "--little-endian"],
119 "riscv64": ["--uint32-align=4", "--little-endian"],
120 "riscv32": ["--uint32-align=4", "--little-endian"],
121 "i586": ["--uint32-align=4", "--little-endian"],
122 "i686": ["--uint32-align=4", "--little-endian"],
123 "x86_64": ["--uint32-align=4", "--little-endian"]
124 }
125 if target_arch in locale_arch_options:
126 arch_options = locale_arch_options[target_arch]
127 else:
128 bb.error("locale_arch_options not found for target_arch=" + target_arch)
129 bb.fatal("unknown arch:" + target_arch + " for locale_arch_options")
130
131 # Need to set this so cross-localedef knows where the archive is
132 env = dict(os.environ)
133 env["LOCALEARCHIVE"] = oe.path.join(localedir, "locale-archive")
134
135 for name in sorted(os.listdir(localedir)):
136 path = os.path.join(localedir, name)
137 if os.path.isdir(path):
138 cmd = ["cross-localedef", "--verbose"]
139 cmd += arch_options
140 cmd += ["--add-to-archive", path]
141 subprocess.check_output(cmd, env=env, stderr=subprocess.STDOUT)
142
143class Indexer(object, metaclass=ABCMeta):
144 def __init__(self, d, deploy_dir):
145 self.d = d
146 self.deploy_dir = deploy_dir
147
148 @abstractmethod
149 def write_index(self):
150 pass
151
152class PkgsList(object, metaclass=ABCMeta):
153 def __init__(self, d, rootfs_dir):
154 self.d = d
155 self.rootfs_dir = rootfs_dir
156
157 @abstractmethod
158 def list_pkgs(self):
159 pass
160
161class PackageManager(object, metaclass=ABCMeta):
162 """
163 This is an abstract class. Do not instantiate this directly.
164 """
165
166 def __init__(self, d, target_rootfs):
167 self.d = d
168 self.target_rootfs = target_rootfs
169 self.deploy_dir = None
170 self.deploy_lock = None
171 self._initialize_intercepts()
172
173 def _initialize_intercepts(self):
174 bb.note("Initializing intercept dir for %s" % self.target_rootfs)
175 # As there might be more than one instance of PackageManager operating at the same time
176 # we need to isolate the intercept_scripts directories from each other,
177 # hence the ugly hash digest in dir name.
178 self.intercepts_dir = os.path.join(self.d.getVar('WORKDIR'), "intercept_scripts-%s" %
179 (hashlib.sha256(self.target_rootfs.encode()).hexdigest()))
180
181 postinst_intercepts = (self.d.getVar("POSTINST_INTERCEPTS") or "").split()
182 if not postinst_intercepts:
183 postinst_intercepts_path = self.d.getVar("POSTINST_INTERCEPTS_PATH")
184 if not postinst_intercepts_path:
185 postinst_intercepts_path = self.d.getVar("POSTINST_INTERCEPTS_DIR") or self.d.expand("${COREBASE}/scripts/postinst-intercepts")
186 postinst_intercepts = oe.path.which_wild('*', postinst_intercepts_path)
187
188 bb.debug(1, 'Collected intercepts:\n%s' % ''.join(' %s\n' % i for i in postinst_intercepts))
189 bb.utils.remove(self.intercepts_dir, True)
190 bb.utils.mkdirhier(self.intercepts_dir)
191 for intercept in postinst_intercepts:
Andrew Geisslerc926e172021-05-07 16:11:35 -0500192 shutil.copy(intercept, os.path.join(self.intercepts_dir, os.path.basename(intercept)))
Andrew Geissler635e0e42020-08-21 15:58:33 -0500193
194 @abstractmethod
195 def _handle_intercept_failure(self, failed_script):
196 pass
197
198 def _postpone_to_first_boot(self, postinst_intercept_hook):
199 with open(postinst_intercept_hook) as intercept:
200 registered_pkgs = None
201 for line in intercept.read().split("\n"):
202 m = re.match(r"^##PKGS:(.*)", line)
203 if m is not None:
204 registered_pkgs = m.group(1).strip()
205 break
206
207 if registered_pkgs is not None:
208 bb.note("If an image is being built, the postinstalls for the following packages "
209 "will be postponed for first boot: %s" %
210 registered_pkgs)
211
212 # call the backend dependent handler
213 self._handle_intercept_failure(registered_pkgs)
214
215
216 def run_intercepts(self, populate_sdk=None):
217 intercepts_dir = self.intercepts_dir
218
219 bb.note("Running intercept scripts:")
220 os.environ['D'] = self.target_rootfs
221 os.environ['STAGING_DIR_NATIVE'] = self.d.getVar('STAGING_DIR_NATIVE')
222 for script in os.listdir(intercepts_dir):
223 script_full = os.path.join(intercepts_dir, script)
224
225 if script == "postinst_intercept" or not os.access(script_full, os.X_OK):
226 continue
227
228 # we do not want to run any multilib variant of this
229 if script.startswith("delay_to_first_boot"):
230 self._postpone_to_first_boot(script_full)
231 continue
232
233 if populate_sdk == 'host' and self.d.getVar('SDK_OS') == 'mingw32':
234 bb.note("The postinstall intercept hook '%s' could not be executed due to missing wine support, details in %s/log.do_%s"
235 % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
236 continue
237
238 bb.note("> Executing %s intercept ..." % script)
239
240 try:
241 output = subprocess.check_output(script_full, stderr=subprocess.STDOUT)
242 if output: bb.note(output.decode("utf-8"))
243 except subprocess.CalledProcessError as e:
244 bb.note("Exit code %d. Output:\n%s" % (e.returncode, e.output.decode("utf-8")))
245 if populate_sdk == 'host':
246 bb.fatal("The postinstall intercept hook '%s' failed, details in %s/log.do_%s" % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
247 elif populate_sdk == 'target':
248 if "qemuwrapper: qemu usermode is not supported" in e.output.decode("utf-8"):
249 bb.note("The postinstall intercept hook '%s' could not be executed due to missing qemu usermode support, details in %s/log.do_%s"
250 % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
251 else:
252 bb.fatal("The postinstall intercept hook '%s' failed, details in %s/log.do_%s" % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
253 else:
254 if "qemuwrapper: qemu usermode is not supported" in e.output.decode("utf-8"):
255 bb.note("The postinstall intercept hook '%s' could not be executed due to missing qemu usermode support, details in %s/log.do_%s"
256 % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
257 self._postpone_to_first_boot(script_full)
258 else:
259 bb.fatal("The postinstall intercept hook '%s' failed, details in %s/log.do_%s" % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
260
261 @abstractmethod
262 def update(self):
263 """
264 Update the package manager package database.
265 """
266 pass
267
268 @abstractmethod
269 def install(self, pkgs, attempt_only=False):
270 """
271 Install a list of packages. 'pkgs' is a list object. If 'attempt_only' is
272 True, installation failures are ignored.
273 """
274 pass
275
276 @abstractmethod
277 def remove(self, pkgs, with_dependencies=True):
278 """
279 Remove a list of packages. 'pkgs' is a list object. If 'with_dependencies'
280 is False, then any dependencies are left in place.
281 """
282 pass
283
284 @abstractmethod
285 def write_index(self):
286 """
287 This function creates the index files
288 """
289 pass
290
291 @abstractmethod
292 def remove_packaging_data(self):
293 pass
294
295 @abstractmethod
296 def list_installed(self):
297 pass
298
299 @abstractmethod
300 def extract(self, pkg):
301 """
302 Returns the path to a tmpdir where resides the contents of a package.
303 Deleting the tmpdir is responsability of the caller.
304 """
305 pass
306
307 @abstractmethod
308 def insert_feeds_uris(self, feed_uris, feed_base_paths, feed_archs):
309 """
310 Add remote package feeds into repository manager configuration. The parameters
311 for the feeds are set by feed_uris, feed_base_paths and feed_archs.
312 See http://www.yoctoproject.org/docs/current/ref-manual/ref-manual.html#var-PACKAGE_FEED_URIS
313 for their description.
314 """
315 pass
316
317 def install_glob(self, globs, sdk=False):
318 """
319 Install all packages that match a glob.
320 """
321 # TODO don't have sdk here but have a property on the superclass
322 # (and respect in install_complementary)
323 if sdk:
324 pkgdatadir = self.d.expand("${TMPDIR}/pkgdata/${SDK_SYS}")
325 else:
326 pkgdatadir = self.d.getVar("PKGDATA_DIR")
327
328 try:
329 bb.note("Installing globbed packages...")
330 cmd = ["oe-pkgdata-util", "-p", pkgdatadir, "list-pkgs", globs]
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600331 bb.note('Running %s' % cmd)
332 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
333 stdout, stderr = proc.communicate()
334 if stderr: bb.note(stderr.decode("utf-8"))
335 pkgs = stdout.decode("utf-8")
Andrew Geissler635e0e42020-08-21 15:58:33 -0500336 self.install(pkgs.split(), attempt_only=True)
337 except subprocess.CalledProcessError as e:
338 # Return code 1 means no packages matched
339 if e.returncode != 1:
340 bb.fatal("Could not compute globbed packages list. Command "
341 "'%s' returned %d:\n%s" %
342 (' '.join(cmd), e.returncode, e.output.decode("utf-8")))
343
344 def install_complementary(self, globs=None):
345 """
346 Install complementary packages based upon the list of currently installed
Andrew Geissler5f350902021-07-23 13:09:54 -0400347 packages e.g. locales, *-dev, *-dbg, etc. Note: every backend needs to
348 call this function explicitly after the normal package installation.
Andrew Geissler635e0e42020-08-21 15:58:33 -0500349 """
350 if globs is None:
351 globs = self.d.getVar('IMAGE_INSTALL_COMPLEMENTARY')
352 split_linguas = set()
353
354 for translation in self.d.getVar('IMAGE_LINGUAS').split():
355 split_linguas.add(translation)
356 split_linguas.add(translation.split('-')[0])
357
358 split_linguas = sorted(split_linguas)
359
360 for lang in split_linguas:
361 globs += " *-locale-%s" % lang
362 for complementary_linguas in (self.d.getVar('IMAGE_LINGUAS_COMPLEMENTARY') or "").split():
363 globs += (" " + complementary_linguas) % lang
364
365 if globs is None:
366 return
367
368 # we need to write the list of installed packages to a file because the
369 # oe-pkgdata-util reads it from a file
370 with tempfile.NamedTemporaryFile(mode="w+", prefix="installed-pkgs") as installed_pkgs:
371 pkgs = self.list_installed()
372
373 provided_pkgs = set()
374 for pkg in pkgs.values():
375 provided_pkgs |= set(pkg.get('provs', []))
376
377 output = oe.utils.format_pkg_list(pkgs, "arch")
378 installed_pkgs.write(output)
379 installed_pkgs.flush()
380
381 cmd = ["oe-pkgdata-util",
382 "-p", self.d.getVar('PKGDATA_DIR'), "glob", installed_pkgs.name,
383 globs]
384 exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY')
385 if exclude:
386 cmd.extend(['--exclude=' + '|'.join(exclude.split())])
387 try:
388 bb.note('Running %s' % cmd)
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600389 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
390 stdout, stderr = proc.communicate()
391 if stderr: bb.note(stderr.decode("utf-8"))
392 complementary_pkgs = stdout.decode("utf-8")
Andrew Geissler635e0e42020-08-21 15:58:33 -0500393 complementary_pkgs = set(complementary_pkgs.split())
394 skip_pkgs = sorted(complementary_pkgs & provided_pkgs)
395 install_pkgs = sorted(complementary_pkgs - provided_pkgs)
396 bb.note("Installing complementary packages ... %s (skipped already provided packages %s)" % (
397 ' '.join(install_pkgs),
398 ' '.join(skip_pkgs)))
Andrew Geissler5f350902021-07-23 13:09:54 -0400399 self.install(install_pkgs)
Andrew Geissler635e0e42020-08-21 15:58:33 -0500400 except subprocess.CalledProcessError as e:
401 bb.fatal("Could not compute complementary packages list. Command "
402 "'%s' returned %d:\n%s" %
403 (' '.join(cmd), e.returncode, e.output.decode("utf-8")))
404
Andrew Geisslerf0343792020-11-18 10:42:21 -0600405 if self.d.getVar('IMAGE_LOCALES_ARCHIVE') == '1':
406 target_arch = self.d.getVar('TARGET_ARCH')
407 localedir = oe.path.join(self.target_rootfs, self.d.getVar("libdir"), "locale")
408 if os.path.exists(localedir) and os.listdir(localedir):
409 generate_locale_archive(self.d, self.target_rootfs, target_arch, localedir)
410 # And now delete the binary locales
411 self.remove(fnmatch.filter(self.list_installed(), "glibc-binary-localedata-*"), False)
Andrew Geissler635e0e42020-08-21 15:58:33 -0500412
413 def deploy_dir_lock(self):
414 if self.deploy_dir is None:
415 raise RuntimeError("deploy_dir is not set!")
416
417 lock_file_name = os.path.join(self.deploy_dir, "deploy.lock")
418
419 self.deploy_lock = bb.utils.lockfile(lock_file_name)
420
421 def deploy_dir_unlock(self):
422 if self.deploy_lock is None:
423 return
424
425 bb.utils.unlockfile(self.deploy_lock)
426
427 self.deploy_lock = None
428
429 def construct_uris(self, uris, base_paths):
430 """
431 Construct URIs based on the following pattern: uri/base_path where 'uri'
432 and 'base_path' correspond to each element of the corresponding array
433 argument leading to len(uris) x len(base_paths) elements on the returned
434 array
435 """
436 def _append(arr1, arr2, sep='/'):
437 res = []
438 narr1 = [a.rstrip(sep) for a in arr1]
439 narr2 = [a.rstrip(sep).lstrip(sep) for a in arr2]
440 for a1 in narr1:
441 if arr2:
442 for a2 in narr2:
443 res.append("%s%s%s" % (a1, sep, a2))
444 else:
445 res.append(a1)
446 return res
447 return _append(uris, base_paths)
448
449def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencies):
450 """
451 Go through our do_package_write_X dependencies and hardlink the packages we depend
452 upon into the repo directory. This prevents us seeing other packages that may
453 have been built that we don't depend upon and also packages for architectures we don't
454 support.
455 """
456 import errno
457
458 taskdepdata = d.getVar("BB_TASKDEPDATA", False)
459 mytaskname = d.getVar("BB_RUNTASK")
460 pn = d.getVar("PN")
461 seendirs = set()
462 multilibs = {}
463
464 bb.utils.remove(subrepo_dir, recurse=True)
465 bb.utils.mkdirhier(subrepo_dir)
466
467 # Detect bitbake -b usage
468 nodeps = d.getVar("BB_LIMITEDDEPS") or False
469 if nodeps or not filterbydependencies:
470 oe.path.symlink(deploydir, subrepo_dir, True)
471 return
472
473 start = None
474 for dep in taskdepdata:
475 data = taskdepdata[dep]
476 if data[1] == mytaskname and data[0] == pn:
477 start = dep
478 break
479 if start is None:
480 bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?")
481 pkgdeps = set()
482 start = [start]
483 seen = set(start)
484 # Support direct dependencies (do_rootfs -> do_package_write_X)
485 # or indirect dependencies within PN (do_populate_sdk_ext -> do_rootfs -> do_package_write_X)
486 while start:
487 next = []
488 for dep2 in start:
489 for dep in taskdepdata[dep2][3]:
490 if taskdepdata[dep][0] != pn:
491 if "do_" + taskname in dep:
492 pkgdeps.add(dep)
493 elif dep not in seen:
494 next.append(dep)
495 seen.add(dep)
496 start = next
497
498 for dep in pkgdeps:
499 c = taskdepdata[dep][0]
500 manifest, d2 = oe.sstatesig.find_sstate_manifest(c, taskdepdata[dep][2], taskname, d, multilibs)
501 if not manifest:
502 bb.fatal("No manifest generated from: %s in %s" % (c, taskdepdata[dep][2]))
503 if not os.path.exists(manifest):
504 continue
505 with open(manifest, "r") as f:
506 for l in f:
507 l = l.strip()
508 deploydir = os.path.normpath(deploydir)
509 if bb.data.inherits_class('packagefeed-stability', d):
510 dest = l.replace(deploydir + "-prediff", "")
511 else:
512 dest = l.replace(deploydir, "")
513 dest = subrepo_dir + dest
514 if l.endswith("/"):
515 if dest not in seendirs:
516 bb.utils.mkdirhier(dest)
517 seendirs.add(dest)
518 continue
519 # Try to hardlink the file, copy if that fails
520 destdir = os.path.dirname(dest)
521 if destdir not in seendirs:
522 bb.utils.mkdirhier(destdir)
523 seendirs.add(destdir)
524 try:
525 os.link(l, dest)
526 except OSError as err:
527 if err.errno == errno.EXDEV:
528 bb.utils.copyfile(l, dest)
529 else:
530 raise
531
532
533def generate_index_files(d):
534 from oe.package_manager.rpm import RpmSubdirIndexer
535 from oe.package_manager.ipk import OpkgIndexer
536 from oe.package_manager.deb import DpkgIndexer
537
538 classes = d.getVar('PACKAGE_CLASSES').replace("package_", "").split()
539
540 indexer_map = {
541 "rpm": (RpmSubdirIndexer, d.getVar('DEPLOY_DIR_RPM')),
542 "ipk": (OpkgIndexer, d.getVar('DEPLOY_DIR_IPK')),
543 "deb": (DpkgIndexer, d.getVar('DEPLOY_DIR_DEB'))
544 }
545
546 result = None
547
548 for pkg_class in classes:
549 if not pkg_class in indexer_map:
550 continue
551
552 if os.path.exists(indexer_map[pkg_class][1]):
553 result = indexer_map[pkg_class][0](d, indexer_map[pkg_class][1]).write_index()
554
555 if result is not None:
556 bb.fatal(result)