blob: b9fa6d8791d9ba88aa3a660c7b2976b5018fbddf [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001from abc import ABCMeta, abstractmethod
2import os
3import glob
4import subprocess
5import shutil
6import multiprocessing
7import re
8import bb
9import tempfile
10import oe.utils
11
12
13# this can be used by all PM backends to create the index files in parallel
14def create_index(arg):
15 index_cmd = arg
16
17 try:
18 bb.note("Executing '%s' ..." % index_cmd)
19 result = subprocess.check_output(index_cmd, stderr=subprocess.STDOUT, shell=True)
20 except subprocess.CalledProcessError as e:
21 return("Index creation command '%s' failed with return code %d:\n%s" %
22 (e.cmd, e.returncode, e.output))
23
24 if result:
25 bb.note(result)
26
27 return None
28
29
30class Indexer(object):
31 __metaclass__ = ABCMeta
32
33 def __init__(self, d, deploy_dir):
34 self.d = d
35 self.deploy_dir = deploy_dir
36
37 @abstractmethod
38 def write_index(self):
39 pass
40
41
42class RpmIndexer(Indexer):
43 def get_ml_prefix_and_os_list(self, arch_var=None, os_var=None):
44 package_archs = {
45 'default': [],
46 }
47
48 target_os = {
49 'default': "",
50 }
51
52 if arch_var is not None and os_var is not None:
53 package_archs['default'] = self.d.getVar(arch_var, True).split()
54 package_archs['default'].reverse()
55 target_os['default'] = self.d.getVar(os_var, True).strip()
56 else:
57 package_archs['default'] = self.d.getVar("PACKAGE_ARCHS", True).split()
58 # arch order is reversed. This ensures the -best- match is
59 # listed first!
60 package_archs['default'].reverse()
61 target_os['default'] = self.d.getVar("TARGET_OS", True).strip()
62 multilibs = self.d.getVar('MULTILIBS', True) or ""
63 for ext in multilibs.split():
64 eext = ext.split(':')
65 if len(eext) > 1 and eext[0] == 'multilib':
66 localdata = bb.data.createCopy(self.d)
67 default_tune_key = "DEFAULTTUNE_virtclass-multilib-" + eext[1]
68 default_tune = localdata.getVar(default_tune_key, False)
69 if default_tune is None:
70 default_tune_key = "DEFAULTTUNE_ML_" + eext[1]
71 default_tune = localdata.getVar(default_tune_key, False)
72 if default_tune:
73 localdata.setVar("DEFAULTTUNE", default_tune)
74 bb.data.update_data(localdata)
75 package_archs[eext[1]] = localdata.getVar('PACKAGE_ARCHS',
76 True).split()
77 package_archs[eext[1]].reverse()
78 target_os[eext[1]] = localdata.getVar("TARGET_OS",
79 True).strip()
80
81 ml_prefix_list = dict()
82 for mlib in package_archs:
83 if mlib == 'default':
84 ml_prefix_list[mlib] = package_archs[mlib]
85 else:
86 ml_prefix_list[mlib] = list()
87 for arch in package_archs[mlib]:
88 if arch in ['all', 'noarch', 'any']:
89 ml_prefix_list[mlib].append(arch)
90 else:
91 ml_prefix_list[mlib].append(mlib + "_" + arch)
92
93 return (ml_prefix_list, target_os)
94
95 def write_index(self):
96 sdk_pkg_archs = (self.d.getVar('SDK_PACKAGE_ARCHS', True) or "").replace('-', '_').split()
97 all_mlb_pkg_archs = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").replace('-', '_').split()
98
99 mlb_prefix_list = self.get_ml_prefix_and_os_list()[0]
100
101 archs = set()
102 for item in mlb_prefix_list:
103 archs = archs.union(set(i.replace('-', '_') for i in mlb_prefix_list[item]))
104
105 if len(archs) == 0:
106 archs = archs.union(set(all_mlb_pkg_archs))
107
108 archs = archs.union(set(sdk_pkg_archs))
109
110 rpm_createrepo = bb.utils.which(os.getenv('PATH'), "createrepo")
111 if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1':
112 pkgfeed_gpg_name = self.d.getVar('PACKAGE_FEED_GPG_NAME', True)
113 pkgfeed_gpg_pass = self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE', True)
114 else:
115 pkgfeed_gpg_name = None
116 pkgfeed_gpg_pass = None
117 gpg_bin = self.d.getVar('GPG_BIN', True) or \
118 bb.utils.which(os.getenv('PATH'), "gpg")
119
120 index_cmds = []
121 repo_sign_cmds = []
122 rpm_dirs_found = False
123 for arch in archs:
124 dbpath = os.path.join(self.d.getVar('WORKDIR', True), 'rpmdb', arch)
125 if os.path.exists(dbpath):
126 bb.utils.remove(dbpath, True)
127 arch_dir = os.path.join(self.deploy_dir, arch)
128 if not os.path.isdir(arch_dir):
129 continue
130
131 index_cmds.append("%s --dbpath %s --update -q %s" % \
132 (rpm_createrepo, dbpath, arch_dir))
133 if pkgfeed_gpg_name:
134 repomd_file = os.path.join(arch_dir, 'repodata', 'repomd.xml')
135 gpg_cmd = "%s --detach-sign --armor --batch --no-tty --yes " \
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500136 "--passphrase-file '%s' -u '%s' " % \
137 (gpg_bin, pkgfeed_gpg_pass, pkgfeed_gpg_name)
138 if self.d.getVar('GPG_PATH', True):
139 gpg_cmd += "--homedir %s " % self.d.getVar('GPG_PATH', True)
140 gpg_cmd += repomd_file
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500141 repo_sign_cmds.append(gpg_cmd)
142
143 rpm_dirs_found = True
144
145 if not rpm_dirs_found:
146 bb.note("There are no packages in %s" % self.deploy_dir)
147 return
148
149 # Create repodata
150 result = oe.utils.multiprocess_exec(index_cmds, create_index)
151 if result:
152 bb.fatal('%s' % ('\n'.join(result)))
153 # Sign repomd
154 result = oe.utils.multiprocess_exec(repo_sign_cmds, create_index)
155 if result:
156 bb.fatal('%s' % ('\n'.join(result)))
157 # Copy pubkey(s) to repo
158 distro_version = self.d.getVar('DISTRO_VERSION', True) or "oe.0"
159 if self.d.getVar('RPM_SIGN_PACKAGES', True) == '1':
160 shutil.copy2(self.d.getVar('RPM_GPG_PUBKEY', True),
161 os.path.join(self.deploy_dir,
162 'RPM-GPG-KEY-%s' % distro_version))
163 if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1':
164 shutil.copy2(self.d.getVar('PACKAGE_FEED_GPG_PUBKEY', True),
165 os.path.join(self.deploy_dir,
166 'REPODATA-GPG-KEY-%s' % distro_version))
167
168
169class OpkgIndexer(Indexer):
170 def write_index(self):
171 arch_vars = ["ALL_MULTILIB_PACKAGE_ARCHS",
172 "SDK_PACKAGE_ARCHS",
173 "MULTILIB_ARCHS"]
174
175 opkg_index_cmd = bb.utils.which(os.getenv('PATH'), "opkg-make-index")
176
177 if not os.path.exists(os.path.join(self.deploy_dir, "Packages")):
178 open(os.path.join(self.deploy_dir, "Packages"), "w").close()
179
180 index_cmds = []
181 for arch_var in arch_vars:
182 archs = self.d.getVar(arch_var, True)
183 if archs is None:
184 continue
185
186 for arch in archs.split():
187 pkgs_dir = os.path.join(self.deploy_dir, arch)
188 pkgs_file = os.path.join(pkgs_dir, "Packages")
189
190 if not os.path.isdir(pkgs_dir):
191 continue
192
193 if not os.path.exists(pkgs_file):
194 open(pkgs_file, "w").close()
195
196 index_cmds.append('%s -r %s -p %s -m %s' %
197 (opkg_index_cmd, pkgs_file, pkgs_file, pkgs_dir))
198
199 if len(index_cmds) == 0:
200 bb.note("There are no packages in %s!" % self.deploy_dir)
201 return
202
203 result = oe.utils.multiprocess_exec(index_cmds, create_index)
204 if result:
205 bb.fatal('%s' % ('\n'.join(result)))
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500206 if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1':
207 raise NotImplementedError('Package feed signing not implementd for ipk')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500208
209
210
211class DpkgIndexer(Indexer):
212 def _create_configs(self):
213 bb.utils.mkdirhier(self.apt_conf_dir)
214 bb.utils.mkdirhier(os.path.join(self.apt_conf_dir, "lists", "partial"))
215 bb.utils.mkdirhier(os.path.join(self.apt_conf_dir, "apt.conf.d"))
216 bb.utils.mkdirhier(os.path.join(self.apt_conf_dir, "preferences.d"))
217
218 with open(os.path.join(self.apt_conf_dir, "preferences"),
219 "w") as prefs_file:
220 pass
221 with open(os.path.join(self.apt_conf_dir, "sources.list"),
222 "w+") as sources_file:
223 pass
224
225 with open(self.apt_conf_file, "w") as apt_conf:
226 with open(os.path.join(self.d.expand("${STAGING_ETCDIR_NATIVE}"),
227 "apt", "apt.conf.sample")) as apt_conf_sample:
228 for line in apt_conf_sample.read().split("\n"):
229 line = re.sub("#ROOTFS#", "/dev/null", line)
230 line = re.sub("#APTCONF#", self.apt_conf_dir, line)
231 apt_conf.write(line + "\n")
232
233 def write_index(self):
234 self.apt_conf_dir = os.path.join(self.d.expand("${APTCONF_TARGET}"),
235 "apt-ftparchive")
236 self.apt_conf_file = os.path.join(self.apt_conf_dir, "apt.conf")
237 self._create_configs()
238
239 os.environ['APT_CONFIG'] = self.apt_conf_file
240
241 pkg_archs = self.d.getVar('PACKAGE_ARCHS', True)
242 if pkg_archs is not None:
243 arch_list = pkg_archs.split()
244 sdk_pkg_archs = self.d.getVar('SDK_PACKAGE_ARCHS', True)
245 if sdk_pkg_archs is not None:
246 for a in sdk_pkg_archs.split():
247 if a not in pkg_archs:
248 arch_list.append(a)
249
250 all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").replace('-', '_').split()
251 arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in arch_list)
252
253 apt_ftparchive = bb.utils.which(os.getenv('PATH'), "apt-ftparchive")
254 gzip = bb.utils.which(os.getenv('PATH'), "gzip")
255
256 index_cmds = []
257 deb_dirs_found = False
258 for arch in arch_list:
259 arch_dir = os.path.join(self.deploy_dir, arch)
260 if not os.path.isdir(arch_dir):
261 continue
262
263 cmd = "cd %s; PSEUDO_UNLOAD=1 %s packages . > Packages;" % (arch_dir, apt_ftparchive)
264
265 cmd += "%s -fc Packages > Packages.gz;" % gzip
266
267 with open(os.path.join(arch_dir, "Release"), "w+") as release:
268 release.write("Label: %s\n" % arch)
269
270 cmd += "PSEUDO_UNLOAD=1 %s release . >> Release" % apt_ftparchive
271
272 index_cmds.append(cmd)
273
274 deb_dirs_found = True
275
276 if not deb_dirs_found:
277 bb.note("There are no packages in %s" % self.deploy_dir)
278 return
279
280 result = oe.utils.multiprocess_exec(index_cmds, create_index)
281 if result:
282 bb.fatal('%s' % ('\n'.join(result)))
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500283 if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1':
284 raise NotImplementedError('Package feed signing not implementd for dpkg')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500285
286
287
288class PkgsList(object):
289 __metaclass__ = ABCMeta
290
291 def __init__(self, d, rootfs_dir):
292 self.d = d
293 self.rootfs_dir = rootfs_dir
294
295 @abstractmethod
296 def list(self, format=None):
297 pass
298
299
300class RpmPkgsList(PkgsList):
301 def __init__(self, d, rootfs_dir, arch_var=None, os_var=None):
302 super(RpmPkgsList, self).__init__(d, rootfs_dir)
303
304 self.rpm_cmd = bb.utils.which(os.getenv('PATH'), "rpm")
305 self.image_rpmlib = os.path.join(self.rootfs_dir, 'var/lib/rpm')
306
307 self.ml_prefix_list, self.ml_os_list = \
308 RpmIndexer(d, rootfs_dir).get_ml_prefix_and_os_list(arch_var, os_var)
309
310 # Determine rpm version
311 cmd = "%s --version" % self.rpm_cmd
312 try:
313 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
314 except subprocess.CalledProcessError as e:
315 bb.fatal("Getting rpm version failed. Command '%s' "
316 "returned %d:\n%s" % (cmd, e.returncode, e.output))
317 self.rpm_version = int(output.split()[-1].split('.')[0])
318
319 '''
320 Translate the RPM/Smart format names to the OE multilib format names
321 '''
322 def _pkg_translate_smart_to_oe(self, pkg, arch):
323 new_pkg = pkg
324 new_arch = arch
325 fixed_arch = arch.replace('_', '-')
326 found = 0
327 for mlib in self.ml_prefix_list:
328 for cmp_arch in self.ml_prefix_list[mlib]:
329 fixed_cmp_arch = cmp_arch.replace('_', '-')
330 if fixed_arch == fixed_cmp_arch:
331 if mlib == 'default':
332 new_pkg = pkg
333 new_arch = cmp_arch
334 else:
335 new_pkg = mlib + '-' + pkg
336 # We need to strip off the ${mlib}_ prefix on the arch
337 new_arch = cmp_arch.replace(mlib + '_', '')
338
339 # Workaround for bug 3565. Simply look to see if we
340 # know of a package with that name, if not try again!
341 filename = os.path.join(self.d.getVar('PKGDATA_DIR', True),
342 'runtime-reverse',
343 new_pkg)
344 if os.path.exists(filename):
345 found = 1
346 break
347
348 if found == 1 and fixed_arch == fixed_cmp_arch:
349 break
350 #bb.note('%s, %s -> %s, %s' % (pkg, arch, new_pkg, new_arch))
351 return new_pkg, new_arch
352
353 def _list_pkg_deps(self):
354 cmd = [bb.utils.which(os.getenv('PATH'), "rpmresolve"),
355 "-t", self.image_rpmlib]
356
357 try:
358 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).strip()
359 except subprocess.CalledProcessError as e:
360 bb.fatal("Cannot get the package dependencies. Command '%s' "
361 "returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output))
362
363 return output
364
365 def list(self, format=None):
366 if format == "deps":
367 if self.rpm_version == 4:
368 bb.fatal("'deps' format dependency listings are not supported with rpm 4 since rpmresolve does not work")
369 return self._list_pkg_deps()
370
371 cmd = self.rpm_cmd + ' --root ' + self.rootfs_dir
372 cmd += ' -D "_dbpath /var/lib/rpm" -qa'
373 if self.rpm_version == 4:
374 cmd += " --qf '[%{NAME} %{ARCH} %{VERSION}\n]'"
375 else:
376 cmd += " --qf '[%{NAME} %{ARCH} %{VERSION} %{PACKAGEORIGIN}\n]'"
377
378 try:
379 # bb.note(cmd)
380 tmp_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).strip()
381
382 except subprocess.CalledProcessError as e:
383 bb.fatal("Cannot get the installed packages list. Command '%s' "
384 "returned %d:\n%s" % (cmd, e.returncode, e.output))
385
386 output = list()
387 for line in tmp_output.split('\n'):
388 if len(line.strip()) == 0:
389 continue
390 pkg = line.split()[0]
391 arch = line.split()[1]
392 ver = line.split()[2]
393 # Skip GPG keys
394 if pkg == 'gpg-pubkey':
395 continue
396 if self.rpm_version == 4:
397 pkgorigin = "unknown"
398 else:
399 pkgorigin = line.split()[3]
400 new_pkg, new_arch = self._pkg_translate_smart_to_oe(pkg, arch)
401
402 if format == "arch":
403 output.append('%s %s' % (new_pkg, new_arch))
404 elif format == "file":
405 output.append('%s %s %s' % (new_pkg, pkgorigin, new_arch))
406 elif format == "ver":
407 output.append('%s %s %s' % (new_pkg, new_arch, ver))
408 else:
409 output.append('%s' % (new_pkg))
410
411 output.sort()
412
413 return '\n'.join(output)
414
415
416class OpkgPkgsList(PkgsList):
417 def __init__(self, d, rootfs_dir, config_file):
418 super(OpkgPkgsList, self).__init__(d, rootfs_dir)
419
420 self.opkg_cmd = bb.utils.which(os.getenv('PATH'), "opkg")
421 self.opkg_args = "-f %s -o %s " % (config_file, rootfs_dir)
422 self.opkg_args += self.d.getVar("OPKG_ARGS", True)
423
424 def list(self, format=None):
425 opkg_query_cmd = bb.utils.which(os.getenv('PATH'), "opkg-query-helper.py")
426
427 if format == "arch":
428 cmd = "%s %s status | %s -a" % \
429 (self.opkg_cmd, self.opkg_args, opkg_query_cmd)
430 elif format == "file":
431 cmd = "%s %s status | %s -f" % \
432 (self.opkg_cmd, self.opkg_args, opkg_query_cmd)
433 elif format == "ver":
434 cmd = "%s %s status | %s -v" % \
435 (self.opkg_cmd, self.opkg_args, opkg_query_cmd)
436 elif format == "deps":
437 cmd = "%s %s status | %s" % \
438 (self.opkg_cmd, self.opkg_args, opkg_query_cmd)
439 else:
440 cmd = "%s %s list_installed | cut -d' ' -f1" % \
441 (self.opkg_cmd, self.opkg_args)
442
443 try:
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500444 # bb.note(cmd)
445 tmp_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).strip()
446
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500447 except subprocess.CalledProcessError as e:
448 bb.fatal("Cannot get the installed packages list. Command '%s' "
449 "returned %d:\n%s" % (cmd, e.returncode, e.output))
450
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500451 output = list()
452 for line in tmp_output.split('\n'):
453 if len(line.strip()) == 0:
454 continue
455 if format == "file":
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500456 pkg, pkg_file, pkg_arch = line.split()
457 full_path = os.path.join(self.rootfs_dir, pkg_arch, pkg_file)
458 if os.path.exists(full_path):
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500459 output.append('%s %s %s' % (pkg, full_path, pkg_arch))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500460 else:
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500461 output.append('%s %s %s' % (pkg, pkg_file, pkg_arch))
462 else:
463 output.append(line)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500464
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500465 output.sort()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500466
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500467 return '\n'.join(output)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500468
469
470class DpkgPkgsList(PkgsList):
471 def list(self, format=None):
472 cmd = [bb.utils.which(os.getenv('PATH'), "dpkg-query"),
473 "--admindir=%s/var/lib/dpkg" % self.rootfs_dir,
474 "-W"]
475
476 if format == "arch":
477 cmd.append("-f=${Package} ${PackageArch}\n")
478 elif format == "file":
479 cmd.append("-f=${Package} ${Package}_${Version}_${Architecture}.deb ${PackageArch}\n")
480 elif format == "ver":
481 cmd.append("-f=${Package} ${PackageArch} ${Version}\n")
482 elif format == "deps":
483 cmd.append("-f=Package: ${Package}\nDepends: ${Depends}\nRecommends: ${Recommends}\n\n")
484 else:
485 cmd.append("-f=${Package}\n")
486
487 try:
488 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).strip()
489 except subprocess.CalledProcessError as e:
490 bb.fatal("Cannot get the installed packages list. Command '%s' "
491 "returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output))
492
493 if format == "file":
494 tmp_output = ""
495 for line in tuple(output.split('\n')):
496 if not line.strip():
497 continue
498 pkg, pkg_file, pkg_arch = line.split()
499 full_path = os.path.join(self.rootfs_dir, pkg_arch, pkg_file)
500 if os.path.exists(full_path):
501 tmp_output += "%s %s %s\n" % (pkg, full_path, pkg_arch)
502 else:
503 tmp_output += "%s %s %s\n" % (pkg, pkg_file, pkg_arch)
504
505 output = tmp_output
506 elif format == "deps":
507 opkg_query_cmd = bb.utils.which(os.getenv('PATH'), "opkg-query-helper.py")
508 file_out = tempfile.NamedTemporaryFile()
509 file_out.write(output)
510 file_out.flush()
511
512 try:
513 output = subprocess.check_output("cat %s | %s" %
514 (file_out.name, opkg_query_cmd),
515 stderr=subprocess.STDOUT,
516 shell=True)
517 except subprocess.CalledProcessError as e:
518 file_out.close()
519 bb.fatal("Cannot compute packages dependencies. Command '%s' "
520 "returned %d:\n%s" % (e.cmd, e.returncode, e.output))
521
522 file_out.close()
523
524 return output
525
526
527class PackageManager(object):
528 """
529 This is an abstract class. Do not instantiate this directly.
530 """
531 __metaclass__ = ABCMeta
532
533 def __init__(self, d):
534 self.d = d
535 self.deploy_dir = None
536 self.deploy_lock = None
537 self.feed_uris = self.d.getVar('PACKAGE_FEED_URIS', True) or ""
538 self.feed_prefix = self.d.getVar('PACKAGE_FEED_PREFIX', True) or ""
539
540 """
541 Update the package manager package database.
542 """
543 @abstractmethod
544 def update(self):
545 pass
546
547 """
548 Install a list of packages. 'pkgs' is a list object. If 'attempt_only' is
549 True, installation failures are ignored.
550 """
551 @abstractmethod
552 def install(self, pkgs, attempt_only=False):
553 pass
554
555 """
556 Remove a list of packages. 'pkgs' is a list object. If 'with_dependencies'
557 is False, the any dependencies are left in place.
558 """
559 @abstractmethod
560 def remove(self, pkgs, with_dependencies=True):
561 pass
562
563 """
564 This function creates the index files
565 """
566 @abstractmethod
567 def write_index(self):
568 pass
569
570 @abstractmethod
571 def remove_packaging_data(self):
572 pass
573
574 @abstractmethod
575 def list_installed(self, format=None):
576 pass
577
578 @abstractmethod
579 def insert_feeds_uris(self):
580 pass
581
582 """
583 Install complementary packages based upon the list of currently installed
584 packages e.g. locales, *-dev, *-dbg, etc. This will only attempt to install
585 these packages, if they don't exist then no error will occur. Note: every
586 backend needs to call this function explicitly after the normal package
587 installation
588 """
589 def install_complementary(self, globs=None):
590 # we need to write the list of installed packages to a file because the
591 # oe-pkgdata-util reads it from a file
592 installed_pkgs_file = os.path.join(self.d.getVar('WORKDIR', True),
593 "installed_pkgs.txt")
594 with open(installed_pkgs_file, "w+") as installed_pkgs:
595 installed_pkgs.write(self.list_installed("arch"))
596
597 if globs is None:
598 globs = self.d.getVar('IMAGE_INSTALL_COMPLEMENTARY', True)
599 split_linguas = set()
600
601 for translation in self.d.getVar('IMAGE_LINGUAS', True).split():
602 split_linguas.add(translation)
603 split_linguas.add(translation.split('-')[0])
604
605 split_linguas = sorted(split_linguas)
606
607 for lang in split_linguas:
608 globs += " *-locale-%s" % lang
609
610 if globs is None:
611 return
612
613 cmd = [bb.utils.which(os.getenv('PATH'), "oe-pkgdata-util"),
614 "-p", self.d.getVar('PKGDATA_DIR', True), "glob", installed_pkgs_file,
615 globs]
616 exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY', True)
617 if exclude:
618 cmd.extend(['-x', exclude])
619 try:
620 bb.note("Installing complementary packages ...")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500621 bb.note('Running %s' % cmd)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500622 complementary_pkgs = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
623 except subprocess.CalledProcessError as e:
624 bb.fatal("Could not compute complementary packages list. Command "
625 "'%s' returned %d:\n%s" %
626 (' '.join(cmd), e.returncode, e.output))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500627 self.install(complementary_pkgs.split(), attempt_only=True)
628
629 def deploy_dir_lock(self):
630 if self.deploy_dir is None:
631 raise RuntimeError("deploy_dir is not set!")
632
633 lock_file_name = os.path.join(self.deploy_dir, "deploy.lock")
634
635 self.deploy_lock = bb.utils.lockfile(lock_file_name)
636
637 def deploy_dir_unlock(self):
638 if self.deploy_lock is None:
639 return
640
641 bb.utils.unlockfile(self.deploy_lock)
642
643 self.deploy_lock = None
644
645
646class RpmPM(PackageManager):
647 def __init__(self,
648 d,
649 target_rootfs,
650 target_vendor,
651 task_name='target',
652 providename=None,
653 arch_var=None,
654 os_var=None):
655 super(RpmPM, self).__init__(d)
656 self.target_rootfs = target_rootfs
657 self.target_vendor = target_vendor
658 self.task_name = task_name
659 self.providename = providename
660 self.fullpkglist = list()
661 self.deploy_dir = self.d.getVar('DEPLOY_DIR_RPM', True)
662 self.etcrpm_dir = os.path.join(self.target_rootfs, "etc/rpm")
663 self.install_dir_name = "oe_install"
664 self.install_dir_path = os.path.join(self.target_rootfs, self.install_dir_name)
665 self.rpm_cmd = bb.utils.which(os.getenv('PATH'), "rpm")
666 self.smart_cmd = bb.utils.which(os.getenv('PATH'), "smart")
667 self.smart_opt = "--log-level=warning --data-dir=" + os.path.join(target_rootfs,
668 'var/lib/smart')
669 self.scriptlet_wrapper = self.d.expand('${WORKDIR}/scriptlet_wrapper')
670 self.solution_manifest = self.d.expand('${T}/saved/%s_solution' %
671 self.task_name)
672 self.saved_rpmlib = self.d.expand('${T}/saved/%s' % self.task_name)
673 self.image_rpmlib = os.path.join(self.target_rootfs, 'var/lib/rpm')
674
675 if not os.path.exists(self.d.expand('${T}/saved')):
676 bb.utils.mkdirhier(self.d.expand('${T}/saved'))
677
678 self.indexer = RpmIndexer(self.d, self.deploy_dir)
679 self.pkgs_list = RpmPkgsList(self.d, self.target_rootfs, arch_var, os_var)
680 self.rpm_version = self.pkgs_list.rpm_version
681
682 self.ml_prefix_list, self.ml_os_list = self.indexer.get_ml_prefix_and_os_list(arch_var, os_var)
683
684 def insert_feeds_uris(self):
685 if self.feed_uris == "":
686 return
687
688 # List must be prefered to least preferred order
689 default_platform_extra = set()
690 platform_extra = set()
691 bbextendvariant = self.d.getVar('BBEXTENDVARIANT', True) or ""
692 for mlib in self.ml_os_list:
693 for arch in self.ml_prefix_list[mlib]:
694 plt = arch.replace('-', '_') + '-.*-' + self.ml_os_list[mlib]
695 if mlib == bbextendvariant:
696 default_platform_extra.add(plt)
697 else:
698 platform_extra.add(plt)
699
700 platform_extra = platform_extra.union(default_platform_extra)
701
702 arch_list = []
703 for canonical_arch in platform_extra:
704 arch = canonical_arch.split('-')[0]
705 if not os.path.exists(os.path.join(self.deploy_dir, arch)):
706 continue
707 arch_list.append(arch)
708
709 uri_iterator = 0
710 channel_priority = 10 + 5 * len(self.feed_uris.split()) * len(arch_list)
711
712 for uri in self.feed_uris.split():
713 full_uri = uri
714 if self.feed_prefix:
715 full_uri = os.path.join(uri, self.feed_prefix)
716 for arch in arch_list:
717 bb.note('Note: adding Smart channel url%d%s (%s)' %
718 (uri_iterator, arch, channel_priority))
719 self._invoke_smart('channel --add url%d-%s type=rpm-md baseurl=%s/%s -y'
720 % (uri_iterator, arch, full_uri, arch))
721 self._invoke_smart('channel --set url%d-%s priority=%d' %
722 (uri_iterator, arch, channel_priority))
723 channel_priority -= 5
724 uri_iterator += 1
725
726 '''
727 Create configs for rpm and smart, and multilib is supported
728 '''
729 def create_configs(self):
730 target_arch = self.d.getVar('TARGET_ARCH', True)
731 platform = '%s%s-%s' % (target_arch.replace('-', '_'),
732 self.target_vendor,
733 self.ml_os_list['default'])
734
735 # List must be prefered to least preferred order
736 default_platform_extra = list()
737 platform_extra = list()
738 bbextendvariant = self.d.getVar('BBEXTENDVARIANT', True) or ""
739 for mlib in self.ml_os_list:
740 for arch in self.ml_prefix_list[mlib]:
741 plt = arch.replace('-', '_') + '-.*-' + self.ml_os_list[mlib]
742 if mlib == bbextendvariant:
743 if plt not in default_platform_extra:
744 default_platform_extra.append(plt)
745 else:
746 if plt not in platform_extra:
747 platform_extra.append(plt)
748 platform_extra = default_platform_extra + platform_extra
749
750 self._create_configs(platform, platform_extra)
751
752 def _invoke_smart(self, args):
753 cmd = "%s %s %s" % (self.smart_cmd, self.smart_opt, args)
754 # bb.note(cmd)
755 try:
756 complementary_pkgs = subprocess.check_output(cmd,
757 stderr=subprocess.STDOUT,
758 shell=True)
759 # bb.note(complementary_pkgs)
760 return complementary_pkgs
761 except subprocess.CalledProcessError as e:
762 bb.fatal("Could not invoke smart. Command "
763 "'%s' returned %d:\n%s" % (cmd, e.returncode, e.output))
764
765 def _search_pkg_name_in_feeds(self, pkg, feed_archs):
766 for arch in feed_archs:
767 arch = arch.replace('-', '_')
768 regex_match = re.compile(r"^%s-[^-]*-[^-]*@%s$" % \
769 (re.escape(pkg), re.escape(arch)))
770 for p in self.fullpkglist:
771 if regex_match.match(p) is not None:
772 # First found is best match
773 # bb.note('%s -> %s' % (pkg, pkg + '@' + arch))
774 return pkg + '@' + arch
775
776 # Search provides if not found by pkgname.
777 bb.note('Not found %s by name, searching provides ...' % pkg)
778 cmd = "%s %s query --provides %s --show-format='$name-$version'" % \
779 (self.smart_cmd, self.smart_opt, pkg)
780 cmd += " | sed -ne 's/ *Provides://p'"
781 bb.note('cmd: %s' % cmd)
782 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
783 # Found a provider
784 if output:
785 bb.note('Found providers for %s: %s' % (pkg, output))
786 for p in output.split():
787 for arch in feed_archs:
788 arch = arch.replace('-', '_')
789 if p.rstrip().endswith('@' + arch):
790 return p
791
792 return ""
793
794 '''
795 Translate the OE multilib format names to the RPM/Smart format names
796 It searched the RPM/Smart format names in probable multilib feeds first,
797 and then searched the default base feed.
798 '''
799 def _pkg_translate_oe_to_smart(self, pkgs, attempt_only=False):
800 new_pkgs = list()
801
802 for pkg in pkgs:
803 new_pkg = pkg
804 # Search new_pkg in probable multilibs first
805 for mlib in self.ml_prefix_list:
806 # Jump the default archs
807 if mlib == 'default':
808 continue
809
810 subst = pkg.replace(mlib + '-', '')
811 # if the pkg in this multilib feed
812 if subst != pkg:
813 feed_archs = self.ml_prefix_list[mlib]
814 new_pkg = self._search_pkg_name_in_feeds(subst, feed_archs)
815 if not new_pkg:
816 # Failed to translate, package not found!
817 err_msg = '%s not found in the %s feeds (%s).\n' % \
818 (pkg, mlib, " ".join(feed_archs))
819 if not attempt_only:
820 err_msg += " ".join(self.fullpkglist)
821 bb.fatal(err_msg)
822 bb.warn(err_msg)
823 else:
824 new_pkgs.append(new_pkg)
825
826 break
827
828 # Apparently not a multilib package...
829 if pkg == new_pkg:
830 # Search new_pkg in default archs
831 default_archs = self.ml_prefix_list['default']
832 new_pkg = self._search_pkg_name_in_feeds(pkg, default_archs)
833 if not new_pkg:
834 err_msg = '%s not found in the base feeds (%s).\n' % \
835 (pkg, ' '.join(default_archs))
836 if not attempt_only:
837 err_msg += " ".join(self.fullpkglist)
838 bb.fatal(err_msg)
839 bb.warn(err_msg)
840 else:
841 new_pkgs.append(new_pkg)
842
843 return new_pkgs
844
845 def _create_configs(self, platform, platform_extra):
846 # Setup base system configuration
847 bb.note("configuring RPM platform settings")
848
849 # Configure internal RPM environment when using Smart
850 os.environ['RPM_ETCRPM'] = self.etcrpm_dir
851 bb.utils.mkdirhier(self.etcrpm_dir)
852
853 # Setup temporary directory -- install...
854 if os.path.exists(self.install_dir_path):
855 bb.utils.remove(self.install_dir_path, True)
856 bb.utils.mkdirhier(os.path.join(self.install_dir_path, 'tmp'))
857
858 channel_priority = 5
859 platform_dir = os.path.join(self.etcrpm_dir, "platform")
860 sdkos = self.d.getVar("SDK_OS", True)
861 with open(platform_dir, "w+") as platform_fd:
862 platform_fd.write(platform + '\n')
863 for pt in platform_extra:
864 channel_priority += 5
865 if sdkos:
866 tmp = re.sub("-%s$" % sdkos, "-%s\n" % sdkos, pt)
867 tmp = re.sub("-linux.*$", "-linux.*\n", tmp)
868 platform_fd.write(tmp)
869
870 # Tell RPM that the "/" directory exist and is available
871 bb.note("configuring RPM system provides")
872 sysinfo_dir = os.path.join(self.etcrpm_dir, "sysinfo")
873 bb.utils.mkdirhier(sysinfo_dir)
874 with open(os.path.join(sysinfo_dir, "Dirnames"), "w+") as dirnames:
875 dirnames.write("/\n")
876
877 if self.providename:
878 providename_dir = os.path.join(sysinfo_dir, "Providename")
879 if not os.path.exists(providename_dir):
880 providename_content = '\n'.join(self.providename)
881 providename_content += '\n'
882 open(providename_dir, "w+").write(providename_content)
883
884 # Configure RPM... we enforce these settings!
885 bb.note("configuring RPM DB settings")
886 # After change the __db.* cache size, log file will not be
887 # generated automatically, that will raise some warnings,
888 # so touch a bare log for rpm write into it.
889 if self.rpm_version == 5:
890 rpmlib_log = os.path.join(self.image_rpmlib, 'log', 'log.0000000001')
891 if not os.path.exists(rpmlib_log):
892 bb.utils.mkdirhier(os.path.join(self.image_rpmlib, 'log'))
893 open(rpmlib_log, 'w+').close()
894
895 DB_CONFIG_CONTENT = "# ================ Environment\n" \
896 "set_data_dir .\n" \
897 "set_create_dir .\n" \
898 "set_lg_dir ./log\n" \
899 "set_tmp_dir ./tmp\n" \
900 "set_flags db_log_autoremove on\n" \
901 "\n" \
902 "# -- thread_count must be >= 8\n" \
903 "set_thread_count 64\n" \
904 "\n" \
905 "# ================ Logging\n" \
906 "\n" \
907 "# ================ Memory Pool\n" \
908 "set_cachesize 0 1048576 0\n" \
909 "set_mp_mmapsize 268435456\n" \
910 "\n" \
911 "# ================ Locking\n" \
912 "set_lk_max_locks 16384\n" \
913 "set_lk_max_lockers 16384\n" \
914 "set_lk_max_objects 16384\n" \
915 "mutex_set_max 163840\n" \
916 "\n" \
917 "# ================ Replication\n"
918
919 db_config_dir = os.path.join(self.image_rpmlib, 'DB_CONFIG')
920 if not os.path.exists(db_config_dir):
921 open(db_config_dir, 'w+').write(DB_CONFIG_CONTENT)
922
923 # Create database so that smart doesn't complain (lazy init)
924 opt = "-qa"
925 if self.rpm_version == 4:
926 opt = "--initdb"
927 cmd = "%s --root %s --dbpath /var/lib/rpm %s > /dev/null" % (
928 self.rpm_cmd, self.target_rootfs, opt)
929 try:
930 subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
931 except subprocess.CalledProcessError as e:
932 bb.fatal("Create rpm database failed. Command '%s' "
933 "returned %d:\n%s" % (cmd, e.returncode, e.output))
934 # Import GPG key to RPM database of the target system
935 if self.d.getVar('RPM_SIGN_PACKAGES', True) == '1':
936 pubkey_path = self.d.getVar('RPM_GPG_PUBKEY', True)
937 cmd = "%s --root %s --dbpath /var/lib/rpm --import %s > /dev/null" % (
938 self.rpm_cmd, self.target_rootfs, pubkey_path)
939 subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
940
941 # Configure smart
942 bb.note("configuring Smart settings")
943 bb.utils.remove(os.path.join(self.target_rootfs, 'var/lib/smart'),
944 True)
945 self._invoke_smart('config --set rpm-root=%s' % self.target_rootfs)
946 self._invoke_smart('config --set rpm-dbpath=/var/lib/rpm')
947 self._invoke_smart('config --set rpm-extra-macros._var=%s' %
948 self.d.getVar('localstatedir', True))
949 cmd = "config --set rpm-extra-macros._tmppath=/%s/tmp" % (self.install_dir_name)
950
951 prefer_color = self.d.getVar('RPM_PREFER_ELF_ARCH', True)
952 if prefer_color:
953 if prefer_color not in ['0', '1', '2', '4']:
954 bb.fatal("Invalid RPM_PREFER_ELF_ARCH: %s, it should be one of:\n"
955 "\t1: ELF32 wins\n"
956 "\t2: ELF64 wins\n"
957 "\t4: ELF64 N32 wins (mips64 or mips64el only)" %
958 prefer_color)
959 if prefer_color == "4" and self.d.getVar("TUNE_ARCH", True) not in \
960 ['mips64', 'mips64el']:
961 bb.fatal("RPM_PREFER_ELF_ARCH = \"4\" is for mips64 or mips64el "
962 "only.")
963 self._invoke_smart('config --set rpm-extra-macros._prefer_color=%s'
964 % prefer_color)
965
966 self._invoke_smart(cmd)
967 self._invoke_smart('config --set rpm-ignoresize=1')
968
969 # Write common configuration for host and target usage
970 self._invoke_smart('config --set rpm-nolinktos=1')
971 self._invoke_smart('config --set rpm-noparentdirs=1')
972 check_signature = self.d.getVar('RPM_CHECK_SIGNATURES', True)
973 if check_signature and check_signature.strip() == "0":
974 self._invoke_smart('config --set rpm-check-signatures=false')
975 for i in self.d.getVar('BAD_RECOMMENDATIONS', True).split():
976 self._invoke_smart('flag --set ignore-recommends %s' % i)
977
978 # Do the following configurations here, to avoid them being
979 # saved for field upgrade
980 if self.d.getVar('NO_RECOMMENDATIONS', True).strip() == "1":
981 self._invoke_smart('config --set ignore-all-recommends=1')
982 pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE', True) or ""
983 for i in pkg_exclude.split():
984 self._invoke_smart('flag --set exclude-packages %s' % i)
985
986 # Optional debugging
987 # self._invoke_smart('config --set rpm-log-level=debug')
988 # cmd = 'config --set rpm-log-file=/tmp/smart-debug-logfile'
989 # self._invoke_smart(cmd)
990 ch_already_added = []
991 for canonical_arch in platform_extra:
992 arch = canonical_arch.split('-')[0]
993 arch_channel = os.path.join(self.deploy_dir, arch)
994 if os.path.exists(arch_channel) and not arch in ch_already_added:
995 bb.note('Note: adding Smart channel %s (%s)' %
996 (arch, channel_priority))
997 self._invoke_smart('channel --add %s type=rpm-md baseurl=%s -y'
998 % (arch, arch_channel))
999 self._invoke_smart('channel --set %s priority=%d' %
1000 (arch, channel_priority))
1001 channel_priority -= 5
1002
1003 ch_already_added.append(arch)
1004
1005 bb.note('adding Smart RPM DB channel')
1006 self._invoke_smart('channel --add rpmsys type=rpm-sys -y')
1007
1008 # Construct install scriptlet wrapper.
1009 # Scripts need to be ordered when executed, this ensures numeric order.
1010 # If we ever run into needing more the 899 scripts, we'll have to.
1011 # change num to start with 1000.
1012 #
1013 if self.rpm_version == 4:
1014 scriptletcmd = "$2 $3 $4\n"
1015 scriptpath = "$3"
1016 else:
1017 scriptletcmd = "$2 $1/$3 $4\n"
1018 scriptpath = "$1/$3"
1019
1020 SCRIPTLET_FORMAT = "#!/bin/bash\n" \
1021 "\n" \
1022 "export PATH=%s\n" \
1023 "export D=%s\n" \
1024 'export OFFLINE_ROOT="$D"\n' \
1025 'export IPKG_OFFLINE_ROOT="$D"\n' \
1026 'export OPKG_OFFLINE_ROOT="$D"\n' \
1027 "export INTERCEPT_DIR=%s\n" \
1028 "export NATIVE_ROOT=%s\n" \
1029 "\n" \
1030 + scriptletcmd + \
1031 "if [ $? -ne 0 ]; then\n" \
1032 " if [ $4 -eq 1 ]; then\n" \
1033 " mkdir -p $1/etc/rpm-postinsts\n" \
1034 " num=100\n" \
1035 " while [ -e $1/etc/rpm-postinsts/${num}-* ]; do num=$((num + 1)); done\n" \
1036 " name=`head -1 " + scriptpath + " | cut -d\' \' -f 2`\n" \
1037 ' echo "#!$2" > $1/etc/rpm-postinsts/${num}-${name}\n' \
1038 ' echo "# Arg: $4" >> $1/etc/rpm-postinsts/${num}-${name}\n' \
1039 " cat " + scriptpath + " >> $1/etc/rpm-postinsts/${num}-${name}\n" \
1040 " chmod +x $1/etc/rpm-postinsts/${num}-${name}\n" \
1041 " else\n" \
1042 ' echo "Error: pre/post remove scriptlet failed"\n' \
1043 " fi\n" \
1044 "fi\n"
1045
1046 intercept_dir = self.d.expand('${WORKDIR}/intercept_scripts')
1047 native_root = self.d.getVar('STAGING_DIR_NATIVE', True)
1048 scriptlet_content = SCRIPTLET_FORMAT % (os.environ['PATH'],
1049 self.target_rootfs,
1050 intercept_dir,
1051 native_root)
1052 open(self.scriptlet_wrapper, 'w+').write(scriptlet_content)
1053
1054 bb.note("Note: configuring RPM cross-install scriptlet_wrapper")
1055 os.chmod(self.scriptlet_wrapper, 0755)
1056 cmd = 'config --set rpm-extra-macros._cross_scriptlet_wrapper=%s' % \
1057 self.scriptlet_wrapper
1058 self._invoke_smart(cmd)
1059
1060 # Debug to show smart config info
1061 # bb.note(self._invoke_smart('config --show'))
1062
1063 def update(self):
1064 self._invoke_smart('update rpmsys')
1065
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001066 def get_rdepends_recursively(self, pkgs):
1067 # pkgs will be changed during the loop, so use [:] to make a copy.
1068 for pkg in pkgs[:]:
1069 sub_data = oe.packagedata.read_subpkgdata(pkg, self.d)
1070 sub_rdep = sub_data.get("RDEPENDS_" + pkg)
1071 if not sub_rdep:
1072 continue
1073 done = bb.utils.explode_dep_versions2(sub_rdep).keys()
1074 next = done
1075 # Find all the rdepends on dependency chain
1076 while next:
1077 new = []
1078 for sub_pkg in next:
1079 sub_data = oe.packagedata.read_subpkgdata(sub_pkg, self.d)
1080 sub_pkg_rdep = sub_data.get("RDEPENDS_" + sub_pkg)
1081 if not sub_pkg_rdep:
1082 continue
1083 for p in bb.utils.explode_dep_versions2(sub_pkg_rdep):
1084 # Already handled, skip it.
1085 if p in done or p in pkgs:
1086 continue
1087 # It's a new dep
1088 if oe.packagedata.has_subpkgdata(p, self.d):
1089 done.append(p)
1090 new.append(p)
1091 next = new
1092 pkgs.extend(done)
1093 return pkgs
1094
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001095 '''
1096 Install pkgs with smart, the pkg name is oe format
1097 '''
1098 def install(self, pkgs, attempt_only=False):
1099
1100 if not pkgs:
1101 bb.note("There are no packages to install")
1102 return
1103 bb.note("Installing the following packages: %s" % ' '.join(pkgs))
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001104 if not attempt_only:
1105 # Pull in multilib requires since rpm may not pull in them
1106 # correctly, for example,
1107 # lib32-packagegroup-core-standalone-sdk-target requires
1108 # lib32-libc6, but rpm may pull in libc6 rather than lib32-libc6
1109 # since it doesn't know mlprefix (lib32-), bitbake knows it and
1110 # can handle it well, find out the RDEPENDS on the chain will
1111 # fix the problem. Both do_rootfs and do_populate_sdk have this
1112 # issue.
1113 # The attempt_only packages don't need this since they are
1114 # based on the installed ones.
1115 #
1116 # Separate pkgs into two lists, one is multilib, the other one
1117 # is non-multilib.
1118 ml_pkgs = []
1119 non_ml_pkgs = pkgs[:]
1120 for pkg in pkgs:
1121 for mlib in (self.d.getVar("MULTILIB_VARIANTS", True) or "").split():
1122 if pkg.startswith(mlib + '-'):
1123 ml_pkgs.append(pkg)
1124 non_ml_pkgs.remove(pkg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001125
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001126 if len(ml_pkgs) > 0 and len(non_ml_pkgs) > 0:
1127 # Found both foo and lib-foo
1128 ml_pkgs = self.get_rdepends_recursively(ml_pkgs)
1129 non_ml_pkgs = self.get_rdepends_recursively(non_ml_pkgs)
1130 # Longer list makes smart slower, so only keep the pkgs
1131 # which have the same BPN, and smart can handle others
1132 # correctly.
1133 pkgs_new = []
1134 for pkg in non_ml_pkgs:
1135 for mlib in (self.d.getVar("MULTILIB_VARIANTS", True) or "").split():
1136 mlib_pkg = mlib + "-" + pkg
1137 if mlib_pkg in ml_pkgs:
1138 pkgs_new.append(pkg)
1139 pkgs_new.append(mlib_pkg)
1140 for pkg in pkgs:
1141 if pkg not in pkgs_new:
1142 pkgs_new.append(pkg)
1143 pkgs = pkgs_new
1144 new_depends = {}
1145 deps = bb.utils.explode_dep_versions2(" ".join(pkgs))
1146 for depend in deps:
1147 data = oe.packagedata.read_subpkgdata(depend, self.d)
1148 key = "PKG_%s" % depend
1149 if key in data:
1150 new_depend = data[key]
1151 else:
1152 new_depend = depend
1153 new_depends[new_depend] = deps[depend]
1154 pkgs = bb.utils.join_deps(new_depends, commasep=True).split(', ')
1155 pkgs = self._pkg_translate_oe_to_smart(pkgs, attempt_only)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001156 if not attempt_only:
1157 bb.note('to be installed: %s' % ' '.join(pkgs))
1158 cmd = "%s %s install -y %s" % \
1159 (self.smart_cmd, self.smart_opt, ' '.join(pkgs))
1160 bb.note(cmd)
1161 else:
1162 bb.note('installing attempt only packages...')
1163 bb.note('Attempting %s' % ' '.join(pkgs))
1164 cmd = "%s %s install --attempt -y %s" % \
1165 (self.smart_cmd, self.smart_opt, ' '.join(pkgs))
1166 try:
1167 output = subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1168 bb.note(output)
1169 except subprocess.CalledProcessError as e:
1170 bb.fatal("Unable to install packages. Command '%s' "
1171 "returned %d:\n%s" % (cmd, e.returncode, e.output))
1172
1173 '''
1174 Remove pkgs with smart, the pkg name is smart/rpm format
1175 '''
1176 def remove(self, pkgs, with_dependencies=True):
1177 bb.note('to be removed: ' + ' '.join(pkgs))
1178
1179 if not with_dependencies:
1180 cmd = "%s -e --nodeps " % self.rpm_cmd
1181 cmd += "--root=%s " % self.target_rootfs
1182 cmd += "--dbpath=/var/lib/rpm "
1183 cmd += "--define='_cross_scriptlet_wrapper %s' " % \
1184 self.scriptlet_wrapper
1185 cmd += "--define='_tmppath /%s/tmp' %s" % (self.install_dir_name, ' '.join(pkgs))
1186 else:
1187 # for pkg in pkgs:
1188 # bb.note('Debug: What required: %s' % pkg)
1189 # bb.note(self._invoke_smart('query %s --show-requiredby' % pkg))
1190
1191 cmd = "%s %s remove -y %s" % (self.smart_cmd,
1192 self.smart_opt,
1193 ' '.join(pkgs))
1194
1195 try:
1196 bb.note(cmd)
1197 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
1198 bb.note(output)
1199 except subprocess.CalledProcessError as e:
1200 bb.note("Unable to remove packages. Command '%s' "
1201 "returned %d:\n%s" % (cmd, e.returncode, e.output))
1202
1203 def upgrade(self):
1204 bb.note('smart upgrade')
1205 self._invoke_smart('upgrade')
1206
1207 def write_index(self):
1208 result = self.indexer.write_index()
1209
1210 if result is not None:
1211 bb.fatal(result)
1212
1213 def remove_packaging_data(self):
1214 bb.utils.remove(self.image_rpmlib, True)
1215 bb.utils.remove(os.path.join(self.target_rootfs, 'var/lib/smart'),
1216 True)
1217 bb.utils.remove(os.path.join(self.target_rootfs, 'var/lib/opkg'), True)
1218
1219 # remove temp directory
1220 bb.utils.remove(self.install_dir_path, True)
1221
1222 def backup_packaging_data(self):
1223 # Save the rpmlib for increment rpm image generation
1224 if os.path.exists(self.saved_rpmlib):
1225 bb.utils.remove(self.saved_rpmlib, True)
1226 shutil.copytree(self.image_rpmlib,
1227 self.saved_rpmlib,
1228 symlinks=True)
1229
1230 def recovery_packaging_data(self):
1231 # Move the rpmlib back
1232 if os.path.exists(self.saved_rpmlib):
1233 if os.path.exists(self.image_rpmlib):
1234 bb.utils.remove(self.image_rpmlib, True)
1235
1236 bb.note('Recovery packaging data')
1237 shutil.copytree(self.saved_rpmlib,
1238 self.image_rpmlib,
1239 symlinks=True)
1240
1241 def list_installed(self, format=None):
1242 return self.pkgs_list.list(format)
1243
1244 '''
1245 If incremental install, we need to determine what we've got,
1246 what we need to add, and what to remove...
1247 The dump_install_solution will dump and save the new install
1248 solution.
1249 '''
1250 def dump_install_solution(self, pkgs):
1251 bb.note('creating new install solution for incremental install')
1252 if len(pkgs) == 0:
1253 return
1254
1255 pkgs = self._pkg_translate_oe_to_smart(pkgs, False)
1256 install_pkgs = list()
1257
1258 cmd = "%s %s install -y --dump %s 2>%s" % \
1259 (self.smart_cmd,
1260 self.smart_opt,
1261 ' '.join(pkgs),
1262 self.solution_manifest)
1263 try:
1264 # Disable rpmsys channel for the fake install
1265 self._invoke_smart('channel --disable rpmsys')
1266
1267 subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
1268 with open(self.solution_manifest, 'r') as manifest:
1269 for pkg in manifest.read().split('\n'):
1270 if '@' in pkg:
1271 install_pkgs.append(pkg)
1272 except subprocess.CalledProcessError as e:
1273 bb.note("Unable to dump install packages. Command '%s' "
1274 "returned %d:\n%s" % (cmd, e.returncode, e.output))
1275 # Recovery rpmsys channel
1276 self._invoke_smart('channel --enable rpmsys')
1277 return install_pkgs
1278
1279 '''
1280 If incremental install, we need to determine what we've got,
1281 what we need to add, and what to remove...
1282 The load_old_install_solution will load the previous install
1283 solution
1284 '''
1285 def load_old_install_solution(self):
1286 bb.note('load old install solution for incremental install')
1287 installed_pkgs = list()
1288 if not os.path.exists(self.solution_manifest):
1289 bb.note('old install solution not exist')
1290 return installed_pkgs
1291
1292 with open(self.solution_manifest, 'r') as manifest:
1293 for pkg in manifest.read().split('\n'):
1294 if '@' in pkg:
1295 installed_pkgs.append(pkg.strip())
1296
1297 return installed_pkgs
1298
1299 '''
1300 Dump all available packages in feeds, it should be invoked after the
1301 newest rpm index was created
1302 '''
1303 def dump_all_available_pkgs(self):
1304 available_manifest = self.d.expand('${T}/saved/available_pkgs.txt')
1305 available_pkgs = list()
1306 cmd = "%s %s query --output %s" % \
1307 (self.smart_cmd, self.smart_opt, available_manifest)
1308 try:
1309 subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
1310 with open(available_manifest, 'r') as manifest:
1311 for pkg in manifest.read().split('\n'):
1312 if '@' in pkg:
1313 available_pkgs.append(pkg.strip())
1314 except subprocess.CalledProcessError as e:
1315 bb.note("Unable to list all available packages. Command '%s' "
1316 "returned %d:\n%s" % (cmd, e.returncode, e.output))
1317
1318 self.fullpkglist = available_pkgs
1319
1320 return
1321
1322 def save_rpmpostinst(self, pkg):
1323 mlibs = (self.d.getVar('MULTILIB_GLOBAL_VARIANTS', False) or "").split()
1324
1325 new_pkg = pkg
1326 # Remove any multilib prefix from the package name
1327 for mlib in mlibs:
1328 if mlib in pkg:
1329 new_pkg = pkg.replace(mlib + '-', '')
1330 break
1331
1332 bb.note(' * postponing %s' % new_pkg)
1333 saved_dir = self.target_rootfs + self.d.expand('${sysconfdir}/rpm-postinsts/') + new_pkg
1334
1335 cmd = self.rpm_cmd + ' -q --scripts --root ' + self.target_rootfs
1336 cmd += ' --dbpath=/var/lib/rpm ' + new_pkg
1337 cmd += ' | sed -n -e "/^postinstall scriptlet (using .*):$/,/^.* scriptlet (using .*):$/ {/.*/p}"'
1338 cmd += ' | sed -e "/postinstall scriptlet (using \(.*\)):$/d"'
1339 cmd += ' -e "/^.* scriptlet (using .*):$/d" > %s' % saved_dir
1340
1341 try:
1342 bb.note(cmd)
1343 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).strip()
1344 bb.note(output)
1345 os.chmod(saved_dir, 0755)
1346 except subprocess.CalledProcessError as e:
1347 bb.fatal("Invoke save_rpmpostinst failed. Command '%s' "
1348 "returned %d:\n%s" % (cmd, e.returncode, e.output))
1349
1350 '''Write common configuration for target usage'''
1351 def rpm_setup_smart_target_config(self):
1352 bb.utils.remove(os.path.join(self.target_rootfs, 'var/lib/smart'),
1353 True)
1354
1355 self._invoke_smart('config --set rpm-nolinktos=1')
1356 self._invoke_smart('config --set rpm-noparentdirs=1')
1357 for i in self.d.getVar('BAD_RECOMMENDATIONS', True).split():
1358 self._invoke_smart('flag --set ignore-recommends %s' % i)
1359 self._invoke_smart('channel --add rpmsys type=rpm-sys -y')
1360
1361 '''
1362 The rpm db lock files were produced after invoking rpm to query on
1363 build system, and they caused the rpm on target didn't work, so we
1364 need to unlock the rpm db by removing the lock files.
1365 '''
1366 def unlock_rpm_db(self):
1367 # Remove rpm db lock files
1368 rpm_db_locks = glob.glob('%s/var/lib/rpm/__db.*' % self.target_rootfs)
1369 for f in rpm_db_locks:
1370 bb.utils.remove(f, True)
1371
1372
1373class OpkgPM(PackageManager):
1374 def __init__(self, d, target_rootfs, config_file, archs, task_name='target'):
1375 super(OpkgPM, self).__init__(d)
1376
1377 self.target_rootfs = target_rootfs
1378 self.config_file = config_file
1379 self.pkg_archs = archs
1380 self.task_name = task_name
1381
1382 self.deploy_dir = self.d.getVar("DEPLOY_DIR_IPK", True)
1383 self.deploy_lock_file = os.path.join(self.deploy_dir, "deploy.lock")
1384 self.opkg_cmd = bb.utils.which(os.getenv('PATH'), "opkg")
1385 self.opkg_args = "--volatile-cache -f %s -o %s " % (self.config_file, target_rootfs)
1386 self.opkg_args += self.d.getVar("OPKG_ARGS", True)
1387
1388 opkg_lib_dir = self.d.getVar('OPKGLIBDIR', True)
1389 if opkg_lib_dir[0] == "/":
1390 opkg_lib_dir = opkg_lib_dir[1:]
1391
1392 self.opkg_dir = os.path.join(target_rootfs, opkg_lib_dir, "opkg")
1393
1394 bb.utils.mkdirhier(self.opkg_dir)
1395
1396 self.saved_opkg_dir = self.d.expand('${T}/saved/%s' % self.task_name)
1397 if not os.path.exists(self.d.expand('${T}/saved')):
1398 bb.utils.mkdirhier(self.d.expand('${T}/saved'))
1399
1400 if (self.d.getVar('BUILD_IMAGES_FROM_FEEDS', True) or "") != "1":
1401 self._create_config()
1402 else:
1403 self._create_custom_config()
1404
1405 self.indexer = OpkgIndexer(self.d, self.deploy_dir)
1406
1407 """
1408 This function will change a package's status in /var/lib/opkg/status file.
1409 If 'packages' is None then the new_status will be applied to all
1410 packages
1411 """
1412 def mark_packages(self, status_tag, packages=None):
1413 status_file = os.path.join(self.opkg_dir, "status")
1414
1415 with open(status_file, "r") as sf:
1416 with open(status_file + ".tmp", "w+") as tmp_sf:
1417 if packages is None:
1418 tmp_sf.write(re.sub(r"Package: (.*?)\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)",
1419 r"Package: \1\n\2Status: \3%s" % status_tag,
1420 sf.read()))
1421 else:
1422 if type(packages).__name__ != "list":
1423 raise TypeError("'packages' should be a list object")
1424
1425 status = sf.read()
1426 for pkg in packages:
1427 status = re.sub(r"Package: %s\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)" % pkg,
1428 r"Package: %s\n\1Status: \2%s" % (pkg, status_tag),
1429 status)
1430
1431 tmp_sf.write(status)
1432
1433 os.rename(status_file + ".tmp", status_file)
1434
1435 def _create_custom_config(self):
1436 bb.note("Building from feeds activated!")
1437
1438 with open(self.config_file, "w+") as config_file:
1439 priority = 1
1440 for arch in self.pkg_archs.split():
1441 config_file.write("arch %s %d\n" % (arch, priority))
1442 priority += 5
1443
1444 for line in (self.d.getVar('IPK_FEED_URIS', True) or "").split():
1445 feed_match = re.match("^[ \t]*(.*)##([^ \t]*)[ \t]*$", line)
1446
1447 if feed_match is not None:
1448 feed_name = feed_match.group(1)
1449 feed_uri = feed_match.group(2)
1450
1451 bb.note("Add %s feed with URL %s" % (feed_name, feed_uri))
1452
1453 config_file.write("src/gz %s %s\n" % (feed_name, feed_uri))
1454
1455 """
1456 Allow to use package deploy directory contents as quick devel-testing
1457 feed. This creates individual feed configs for each arch subdir of those
1458 specified as compatible for the current machine.
1459 NOTE: Development-helper feature, NOT a full-fledged feed.
1460 """
1461 if (self.d.getVar('FEED_DEPLOYDIR_BASE_URI', True) or "") != "":
1462 for arch in self.pkg_archs.split():
1463 cfg_file_name = os.path.join(self.target_rootfs,
1464 self.d.getVar("sysconfdir", True),
1465 "opkg",
1466 "local-%s-feed.conf" % arch)
1467
1468 with open(cfg_file_name, "w+") as cfg_file:
1469 cfg_file.write("src/gz local-%s %s/%s" %
1470 (arch,
1471 self.d.getVar('FEED_DEPLOYDIR_BASE_URI', True),
1472 arch))
1473
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001474 if self.opkg_dir != '/var/lib/opkg':
1475 # There is no command line option for this anymore, we need to add
1476 # info_dir and status_file to config file, if OPKGLIBDIR doesn't have
1477 # the default value of "/var/lib" as defined in opkg:
1478 # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR "/var/lib/opkg/info"
1479 # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE "/var/lib/opkg/status"
1480 cfg_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'info'))
1481 cfg_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'status'))
1482
1483
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001484 def _create_config(self):
1485 with open(self.config_file, "w+") as config_file:
1486 priority = 1
1487 for arch in self.pkg_archs.split():
1488 config_file.write("arch %s %d\n" % (arch, priority))
1489 priority += 5
1490
1491 config_file.write("src oe file:%s\n" % self.deploy_dir)
1492
1493 for arch in self.pkg_archs.split():
1494 pkgs_dir = os.path.join(self.deploy_dir, arch)
1495 if os.path.isdir(pkgs_dir):
1496 config_file.write("src oe-%s file:%s\n" %
1497 (arch, pkgs_dir))
1498
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001499 if self.opkg_dir != '/var/lib/opkg':
1500 # There is no command line option for this anymore, we need to add
1501 # info_dir and status_file to config file, if OPKGLIBDIR doesn't have
1502 # the default value of "/var/lib" as defined in opkg:
1503 # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR "/var/lib/opkg/info"
1504 # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE "/var/lib/opkg/status"
1505 config_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'info'))
1506 config_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'status'))
1507
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001508 def insert_feeds_uris(self):
1509 if self.feed_uris == "":
1510 return
1511
1512 rootfs_config = os.path.join('%s/etc/opkg/base-feeds.conf'
1513 % self.target_rootfs)
1514
1515 with open(rootfs_config, "w+") as config_file:
1516 uri_iterator = 0
1517 for uri in self.feed_uris.split():
1518 full_uri = uri
1519 if self.feed_prefix:
1520 full_uri = os.path.join(uri, self.feed_prefix)
1521
1522 for arch in self.pkg_archs.split():
1523 if not os.path.exists(os.path.join(self.deploy_dir, arch)):
1524 continue
1525 bb.note('Note: adding opkg feed url-%s-%d (%s)' %
1526 (arch, uri_iterator, full_uri))
1527
1528 config_file.write("src/gz uri-%s-%d %s/%s\n" %
1529 (arch, uri_iterator, full_uri, arch))
1530 uri_iterator += 1
1531
1532 def update(self):
1533 self.deploy_dir_lock()
1534
1535 cmd = "%s %s update" % (self.opkg_cmd, self.opkg_args)
1536
1537 try:
1538 subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1539 except subprocess.CalledProcessError as e:
1540 self.deploy_dir_unlock()
1541 bb.fatal("Unable to update the package index files. Command '%s' "
1542 "returned %d:\n%s" % (cmd, e.returncode, e.output))
1543
1544 self.deploy_dir_unlock()
1545
1546 def install(self, pkgs, attempt_only=False):
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001547 if not pkgs:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001548 return
1549
1550 cmd = "%s %s install %s" % (self.opkg_cmd, self.opkg_args, ' '.join(pkgs))
1551
1552 os.environ['D'] = self.target_rootfs
1553 os.environ['OFFLINE_ROOT'] = self.target_rootfs
1554 os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs
1555 os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs
1556 os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR', True),
1557 "intercept_scripts")
1558 os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE', True)
1559
1560 try:
1561 bb.note("Installing the following packages: %s" % ' '.join(pkgs))
1562 bb.note(cmd)
1563 output = subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1564 bb.note(output)
1565 except subprocess.CalledProcessError as e:
1566 (bb.fatal, bb.note)[attempt_only]("Unable to install packages. "
1567 "Command '%s' returned %d:\n%s" %
1568 (cmd, e.returncode, e.output))
1569
1570 def remove(self, pkgs, with_dependencies=True):
1571 if with_dependencies:
1572 cmd = "%s %s --force-depends --force-remove --force-removal-of-dependent-packages remove %s" % \
1573 (self.opkg_cmd, self.opkg_args, ' '.join(pkgs))
1574 else:
1575 cmd = "%s %s --force-depends remove %s" % \
1576 (self.opkg_cmd, self.opkg_args, ' '.join(pkgs))
1577
1578 try:
1579 bb.note(cmd)
1580 output = subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1581 bb.note(output)
1582 except subprocess.CalledProcessError as e:
1583 bb.fatal("Unable to remove packages. Command '%s' "
1584 "returned %d:\n%s" % (e.cmd, e.returncode, e.output))
1585
1586 def write_index(self):
1587 self.deploy_dir_lock()
1588
1589 result = self.indexer.write_index()
1590
1591 self.deploy_dir_unlock()
1592
1593 if result is not None:
1594 bb.fatal(result)
1595
1596 def remove_packaging_data(self):
1597 bb.utils.remove(self.opkg_dir, True)
1598 # create the directory back, it's needed by PM lock
1599 bb.utils.mkdirhier(self.opkg_dir)
1600
1601 def list_installed(self, format=None):
1602 return OpkgPkgsList(self.d, self.target_rootfs, self.config_file).list(format)
1603
1604 def handle_bad_recommendations(self):
1605 bad_recommendations = self.d.getVar("BAD_RECOMMENDATIONS", True) or ""
1606 if bad_recommendations.strip() == "":
1607 return
1608
1609 status_file = os.path.join(self.opkg_dir, "status")
1610
1611 # If status file existed, it means the bad recommendations has already
1612 # been handled
1613 if os.path.exists(status_file):
1614 return
1615
1616 cmd = "%s %s info " % (self.opkg_cmd, self.opkg_args)
1617
1618 with open(status_file, "w+") as status:
1619 for pkg in bad_recommendations.split():
1620 pkg_info = cmd + pkg
1621
1622 try:
1623 output = subprocess.check_output(pkg_info.split(), stderr=subprocess.STDOUT).strip()
1624 except subprocess.CalledProcessError as e:
1625 bb.fatal("Cannot get package info. Command '%s' "
1626 "returned %d:\n%s" % (pkg_info, e.returncode, e.output))
1627
1628 if output == "":
1629 bb.note("Ignored bad recommendation: '%s' is "
1630 "not a package" % pkg)
1631 continue
1632
1633 for line in output.split('\n'):
1634 if line.startswith("Status:"):
1635 status.write("Status: deinstall hold not-installed\n")
1636 else:
1637 status.write(line + "\n")
1638
1639 # Append a blank line after each package entry to ensure that it
1640 # is separated from the following entry
1641 status.write("\n")
1642
1643 '''
1644 The following function dummy installs pkgs and returns the log of output.
1645 '''
1646 def dummy_install(self, pkgs):
1647 if len(pkgs) == 0:
1648 return
1649
1650 # Create an temp dir as opkg root for dummy installation
1651 temp_rootfs = self.d.expand('${T}/opkg')
1652 temp_opkg_dir = os.path.join(temp_rootfs, 'var/lib/opkg')
1653 bb.utils.mkdirhier(temp_opkg_dir)
1654
1655 opkg_args = "-f %s -o %s " % (self.config_file, temp_rootfs)
1656 opkg_args += self.d.getVar("OPKG_ARGS", True)
1657
1658 cmd = "%s %s update" % (self.opkg_cmd, opkg_args)
1659 try:
1660 subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
1661 except subprocess.CalledProcessError as e:
1662 bb.fatal("Unable to update. Command '%s' "
1663 "returned %d:\n%s" % (cmd, e.returncode, e.output))
1664
1665 # Dummy installation
1666 cmd = "%s %s --noaction install %s " % (self.opkg_cmd,
1667 opkg_args,
1668 ' '.join(pkgs))
1669 try:
1670 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
1671 except subprocess.CalledProcessError as e:
1672 bb.fatal("Unable to dummy install packages. Command '%s' "
1673 "returned %d:\n%s" % (cmd, e.returncode, e.output))
1674
1675 bb.utils.remove(temp_rootfs, True)
1676
1677 return output
1678
1679 def backup_packaging_data(self):
1680 # Save the opkglib for increment ipk image generation
1681 if os.path.exists(self.saved_opkg_dir):
1682 bb.utils.remove(self.saved_opkg_dir, True)
1683 shutil.copytree(self.opkg_dir,
1684 self.saved_opkg_dir,
1685 symlinks=True)
1686
1687 def recover_packaging_data(self):
1688 # Move the opkglib back
1689 if os.path.exists(self.saved_opkg_dir):
1690 if os.path.exists(self.opkg_dir):
1691 bb.utils.remove(self.opkg_dir, True)
1692
1693 bb.note('Recover packaging data')
1694 shutil.copytree(self.saved_opkg_dir,
1695 self.opkg_dir,
1696 symlinks=True)
1697
1698
1699class DpkgPM(PackageManager):
1700 def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None):
1701 super(DpkgPM, self).__init__(d)
1702 self.target_rootfs = target_rootfs
1703 self.deploy_dir = self.d.getVar('DEPLOY_DIR_DEB', True)
1704 if apt_conf_dir is None:
1705 self.apt_conf_dir = self.d.expand("${APTCONF_TARGET}/apt")
1706 else:
1707 self.apt_conf_dir = apt_conf_dir
1708 self.apt_conf_file = os.path.join(self.apt_conf_dir, "apt.conf")
1709 self.apt_get_cmd = bb.utils.which(os.getenv('PATH'), "apt-get")
1710
1711 self.apt_args = d.getVar("APT_ARGS", True)
1712
1713 self.all_arch_list = archs.split()
1714 all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").replace('-', '_').split()
1715 self.all_arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in self.all_arch_list)
1716
1717 self._create_configs(archs, base_archs)
1718
1719 self.indexer = DpkgIndexer(self.d, self.deploy_dir)
1720
1721 """
1722 This function will change a package's status in /var/lib/dpkg/status file.
1723 If 'packages' is None then the new_status will be applied to all
1724 packages
1725 """
1726 def mark_packages(self, status_tag, packages=None):
1727 status_file = self.target_rootfs + "/var/lib/dpkg/status"
1728
1729 with open(status_file, "r") as sf:
1730 with open(status_file + ".tmp", "w+") as tmp_sf:
1731 if packages is None:
1732 tmp_sf.write(re.sub(r"Package: (.*?)\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)",
1733 r"Package: \1\n\2Status: \3%s" % status_tag,
1734 sf.read()))
1735 else:
1736 if type(packages).__name__ != "list":
1737 raise TypeError("'packages' should be a list object")
1738
1739 status = sf.read()
1740 for pkg in packages:
1741 status = re.sub(r"Package: %s\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)" % pkg,
1742 r"Package: %s\n\1Status: \2%s" % (pkg, status_tag),
1743 status)
1744
1745 tmp_sf.write(status)
1746
1747 os.rename(status_file + ".tmp", status_file)
1748
1749 """
1750 Run the pre/post installs for package "package_name". If package_name is
1751 None, then run all pre/post install scriptlets.
1752 """
1753 def run_pre_post_installs(self, package_name=None):
1754 info_dir = self.target_rootfs + "/var/lib/dpkg/info"
1755 suffixes = [(".preinst", "Preinstall"), (".postinst", "Postinstall")]
1756 status_file = self.target_rootfs + "/var/lib/dpkg/status"
1757 installed_pkgs = []
1758
1759 with open(status_file, "r") as status:
1760 for line in status.read().split('\n'):
1761 m = re.match("^Package: (.*)", line)
1762 if m is not None:
1763 installed_pkgs.append(m.group(1))
1764
1765 if package_name is not None and not package_name in installed_pkgs:
1766 return
1767
1768 os.environ['D'] = self.target_rootfs
1769 os.environ['OFFLINE_ROOT'] = self.target_rootfs
1770 os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs
1771 os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs
1772 os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR', True),
1773 "intercept_scripts")
1774 os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE', True)
1775
1776 failed_pkgs = []
1777 for pkg_name in installed_pkgs:
1778 for suffix in suffixes:
1779 p_full = os.path.join(info_dir, pkg_name + suffix[0])
1780 if os.path.exists(p_full):
1781 try:
1782 bb.note("Executing %s for package: %s ..." %
1783 (suffix[1].lower(), pkg_name))
1784 subprocess.check_output(p_full, stderr=subprocess.STDOUT)
1785 except subprocess.CalledProcessError as e:
1786 bb.note("%s for package %s failed with %d:\n%s" %
1787 (suffix[1], pkg_name, e.returncode, e.output))
1788 failed_pkgs.append(pkg_name)
1789 break
1790
1791 if len(failed_pkgs):
1792 self.mark_packages("unpacked", failed_pkgs)
1793
1794 def update(self):
1795 os.environ['APT_CONFIG'] = self.apt_conf_file
1796
1797 self.deploy_dir_lock()
1798
1799 cmd = "%s update" % self.apt_get_cmd
1800
1801 try:
1802 subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1803 except subprocess.CalledProcessError as e:
1804 bb.fatal("Unable to update the package index files. Command '%s' "
1805 "returned %d:\n%s" % (e.cmd, e.returncode, e.output))
1806
1807 self.deploy_dir_unlock()
1808
1809 def install(self, pkgs, attempt_only=False):
1810 if attempt_only and len(pkgs) == 0:
1811 return
1812
1813 os.environ['APT_CONFIG'] = self.apt_conf_file
1814
1815 cmd = "%s %s install --force-yes --allow-unauthenticated %s" % \
1816 (self.apt_get_cmd, self.apt_args, ' '.join(pkgs))
1817
1818 try:
1819 bb.note("Installing the following packages: %s" % ' '.join(pkgs))
1820 subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1821 except subprocess.CalledProcessError as e:
1822 (bb.fatal, bb.note)[attempt_only]("Unable to install packages. "
1823 "Command '%s' returned %d:\n%s" %
1824 (cmd, e.returncode, e.output))
1825
1826 # rename *.dpkg-new files/dirs
1827 for root, dirs, files in os.walk(self.target_rootfs):
1828 for dir in dirs:
1829 new_dir = re.sub("\.dpkg-new", "", dir)
1830 if dir != new_dir:
1831 os.rename(os.path.join(root, dir),
1832 os.path.join(root, new_dir))
1833
1834 for file in files:
1835 new_file = re.sub("\.dpkg-new", "", file)
1836 if file != new_file:
1837 os.rename(os.path.join(root, file),
1838 os.path.join(root, new_file))
1839
1840
1841 def remove(self, pkgs, with_dependencies=True):
1842 if with_dependencies:
1843 os.environ['APT_CONFIG'] = self.apt_conf_file
1844 cmd = "%s purge %s" % (self.apt_get_cmd, ' '.join(pkgs))
1845 else:
1846 cmd = "%s --admindir=%s/var/lib/dpkg --instdir=%s" \
1847 " -P --force-depends %s" % \
1848 (bb.utils.which(os.getenv('PATH'), "dpkg"),
1849 self.target_rootfs, self.target_rootfs, ' '.join(pkgs))
1850
1851 try:
1852 subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1853 except subprocess.CalledProcessError as e:
1854 bb.fatal("Unable to remove packages. Command '%s' "
1855 "returned %d:\n%s" % (e.cmd, e.returncode, e.output))
1856
1857 def write_index(self):
1858 self.deploy_dir_lock()
1859
1860 result = self.indexer.write_index()
1861
1862 self.deploy_dir_unlock()
1863
1864 if result is not None:
1865 bb.fatal(result)
1866
1867 def insert_feeds_uris(self):
1868 if self.feed_uris == "":
1869 return
1870
1871 sources_conf = os.path.join("%s/etc/apt/sources.list"
1872 % self.target_rootfs)
1873 arch_list = []
1874
1875 for arch in self.all_arch_list:
1876 if not os.path.exists(os.path.join(self.deploy_dir, arch)):
1877 continue
1878 arch_list.append(arch)
1879
1880 with open(sources_conf, "w+") as sources_file:
1881 for uri in self.feed_uris.split():
1882 full_uri = uri
1883 if self.feed_prefix:
1884 full_uri = os.path.join(uri, self.feed_prefix)
1885 for arch in arch_list:
1886 bb.note('Note: adding dpkg channel at (%s)' % uri)
1887 sources_file.write("deb %s/%s ./\n" %
1888 (full_uri, arch))
1889
1890 def _create_configs(self, archs, base_archs):
1891 base_archs = re.sub("_", "-", base_archs)
1892
1893 if os.path.exists(self.apt_conf_dir):
1894 bb.utils.remove(self.apt_conf_dir, True)
1895
1896 bb.utils.mkdirhier(self.apt_conf_dir)
1897 bb.utils.mkdirhier(self.apt_conf_dir + "/lists/partial/")
1898 bb.utils.mkdirhier(self.apt_conf_dir + "/apt.conf.d/")
1899
1900 arch_list = []
1901 for arch in self.all_arch_list:
1902 if not os.path.exists(os.path.join(self.deploy_dir, arch)):
1903 continue
1904 arch_list.append(arch)
1905
1906 with open(os.path.join(self.apt_conf_dir, "preferences"), "w+") as prefs_file:
1907 priority = 801
1908 for arch in arch_list:
1909 prefs_file.write(
1910 "Package: *\n"
1911 "Pin: release l=%s\n"
1912 "Pin-Priority: %d\n\n" % (arch, priority))
1913
1914 priority += 5
1915
1916 pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE', True) or ""
1917 for pkg in pkg_exclude.split():
1918 prefs_file.write(
1919 "Package: %s\n"
1920 "Pin: release *\n"
1921 "Pin-Priority: -1\n\n" % pkg)
1922
1923 arch_list.reverse()
1924
1925 with open(os.path.join(self.apt_conf_dir, "sources.list"), "w+") as sources_file:
1926 for arch in arch_list:
1927 sources_file.write("deb file:%s/ ./\n" %
1928 os.path.join(self.deploy_dir, arch))
1929
1930 base_arch_list = base_archs.split()
1931 multilib_variants = self.d.getVar("MULTILIB_VARIANTS", True);
1932 for variant in multilib_variants.split():
1933 if variant == "lib32":
1934 base_arch_list.append("i386")
1935 elif variant == "lib64":
1936 base_arch_list.append("amd64")
1937
1938 with open(self.apt_conf_file, "w+") as apt_conf:
1939 with open(self.d.expand("${STAGING_ETCDIR_NATIVE}/apt/apt.conf.sample")) as apt_conf_sample:
1940 for line in apt_conf_sample.read().split("\n"):
1941 match_arch = re.match(" Architecture \".*\";$", line)
1942 architectures = ""
1943 if match_arch:
1944 for base_arch in base_arch_list:
1945 architectures += "\"%s\";" % base_arch
1946 apt_conf.write(" Architectures {%s};\n" % architectures);
1947 apt_conf.write(" Architecture \"%s\";\n" % base_archs)
1948 else:
1949 line = re.sub("#ROOTFS#", self.target_rootfs, line)
1950 line = re.sub("#APTCONF#", self.apt_conf_dir, line)
1951 apt_conf.write(line + "\n")
1952
1953 target_dpkg_dir = "%s/var/lib/dpkg" % self.target_rootfs
1954 bb.utils.mkdirhier(os.path.join(target_dpkg_dir, "info"))
1955
1956 bb.utils.mkdirhier(os.path.join(target_dpkg_dir, "updates"))
1957
1958 if not os.path.exists(os.path.join(target_dpkg_dir, "status")):
1959 open(os.path.join(target_dpkg_dir, "status"), "w+").close()
1960 if not os.path.exists(os.path.join(target_dpkg_dir, "available")):
1961 open(os.path.join(target_dpkg_dir, "available"), "w+").close()
1962
1963 def remove_packaging_data(self):
1964 bb.utils.remove(os.path.join(self.target_rootfs,
1965 self.d.getVar('opkglibdir', True)), True)
1966 bb.utils.remove(self.target_rootfs + "/var/lib/dpkg/", True)
1967
1968 def fix_broken_dependencies(self):
1969 os.environ['APT_CONFIG'] = self.apt_conf_file
1970
1971 cmd = "%s %s -f install" % (self.apt_get_cmd, self.apt_args)
1972
1973 try:
1974 subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1975 except subprocess.CalledProcessError as e:
1976 bb.fatal("Cannot fix broken dependencies. Command '%s' "
1977 "returned %d:\n%s" % (cmd, e.returncode, e.output))
1978
1979 def list_installed(self, format=None):
1980 return DpkgPkgsList(self.d, self.target_rootfs).list()
1981
1982
1983def generate_index_files(d):
1984 classes = d.getVar('PACKAGE_CLASSES', True).replace("package_", "").split()
1985
1986 indexer_map = {
1987 "rpm": (RpmIndexer, d.getVar('DEPLOY_DIR_RPM', True)),
1988 "ipk": (OpkgIndexer, d.getVar('DEPLOY_DIR_IPK', True)),
1989 "deb": (DpkgIndexer, d.getVar('DEPLOY_DIR_DEB', True))
1990 }
1991
1992 result = None
1993
1994 for pkg_class in classes:
1995 if not pkg_class in indexer_map:
1996 continue
1997
1998 if os.path.exists(indexer_map[pkg_class][1]):
1999 result = indexer_map[pkg_class][0](d, indexer_map[pkg_class][1]).write_index()
2000
2001 if result is not None:
2002 bb.fatal(result)
2003
2004if __name__ == "__main__":
2005 """
2006 We should be able to run this as a standalone script, from outside bitbake
2007 environment.
2008 """
2009 """
2010 TBD
2011 """