blob: b4b359a8c6b82cc6cc439f05cc180d10d43e021c [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001from abc import ABCMeta, abstractmethod
2import os
3import glob
4import subprocess
5import shutil
6import multiprocessing
7import re
8import bb
9import tempfile
10import oe.utils
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050011import string
12from oe.gpg_sign import get_signer
Patrick Williamsc124f4f2015-09-15 14:41:29 -050013
14# this can be used by all PM backends to create the index files in parallel
15def create_index(arg):
16 index_cmd = arg
17
18 try:
19 bb.note("Executing '%s' ..." % index_cmd)
20 result = subprocess.check_output(index_cmd, stderr=subprocess.STDOUT, shell=True)
21 except subprocess.CalledProcessError as e:
22 return("Index creation command '%s' failed with return code %d:\n%s" %
23 (e.cmd, e.returncode, e.output))
24
25 if result:
26 bb.note(result)
27
28 return None
29
30
31class Indexer(object):
32 __metaclass__ = ABCMeta
33
34 def __init__(self, d, deploy_dir):
35 self.d = d
36 self.deploy_dir = deploy_dir
37
38 @abstractmethod
39 def write_index(self):
40 pass
41
42
43class RpmIndexer(Indexer):
44 def get_ml_prefix_and_os_list(self, arch_var=None, os_var=None):
45 package_archs = {
46 'default': [],
47 }
48
49 target_os = {
50 'default': "",
51 }
52
53 if arch_var is not None and os_var is not None:
54 package_archs['default'] = self.d.getVar(arch_var, True).split()
55 package_archs['default'].reverse()
56 target_os['default'] = self.d.getVar(os_var, True).strip()
57 else:
58 package_archs['default'] = self.d.getVar("PACKAGE_ARCHS", True).split()
59 # arch order is reversed. This ensures the -best- match is
60 # listed first!
61 package_archs['default'].reverse()
62 target_os['default'] = self.d.getVar("TARGET_OS", True).strip()
63 multilibs = self.d.getVar('MULTILIBS', True) or ""
64 for ext in multilibs.split():
65 eext = ext.split(':')
66 if len(eext) > 1 and eext[0] == 'multilib':
67 localdata = bb.data.createCopy(self.d)
68 default_tune_key = "DEFAULTTUNE_virtclass-multilib-" + eext[1]
69 default_tune = localdata.getVar(default_tune_key, False)
70 if default_tune is None:
71 default_tune_key = "DEFAULTTUNE_ML_" + eext[1]
72 default_tune = localdata.getVar(default_tune_key, False)
73 if default_tune:
74 localdata.setVar("DEFAULTTUNE", default_tune)
75 bb.data.update_data(localdata)
76 package_archs[eext[1]] = localdata.getVar('PACKAGE_ARCHS',
77 True).split()
78 package_archs[eext[1]].reverse()
79 target_os[eext[1]] = localdata.getVar("TARGET_OS",
80 True).strip()
81
82 ml_prefix_list = dict()
83 for mlib in package_archs:
84 if mlib == 'default':
85 ml_prefix_list[mlib] = package_archs[mlib]
86 else:
87 ml_prefix_list[mlib] = list()
88 for arch in package_archs[mlib]:
89 if arch in ['all', 'noarch', 'any']:
90 ml_prefix_list[mlib].append(arch)
91 else:
92 ml_prefix_list[mlib].append(mlib + "_" + arch)
93
94 return (ml_prefix_list, target_os)
95
96 def write_index(self):
97 sdk_pkg_archs = (self.d.getVar('SDK_PACKAGE_ARCHS', True) or "").replace('-', '_').split()
98 all_mlb_pkg_archs = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").replace('-', '_').split()
99
100 mlb_prefix_list = self.get_ml_prefix_and_os_list()[0]
101
102 archs = set()
103 for item in mlb_prefix_list:
104 archs = archs.union(set(i.replace('-', '_') for i in mlb_prefix_list[item]))
105
106 if len(archs) == 0:
107 archs = archs.union(set(all_mlb_pkg_archs))
108
109 archs = archs.union(set(sdk_pkg_archs))
110
111 rpm_createrepo = bb.utils.which(os.getenv('PATH'), "createrepo")
112 if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1':
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500113 signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND', True))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500114 else:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500115 signer = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500116 index_cmds = []
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500117 repomd_files = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500118 rpm_dirs_found = False
119 for arch in archs:
120 dbpath = os.path.join(self.d.getVar('WORKDIR', True), 'rpmdb', arch)
121 if os.path.exists(dbpath):
122 bb.utils.remove(dbpath, True)
123 arch_dir = os.path.join(self.deploy_dir, arch)
124 if not os.path.isdir(arch_dir):
125 continue
126
127 index_cmds.append("%s --dbpath %s --update -q %s" % \
128 (rpm_createrepo, dbpath, arch_dir))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500129 repomd_files.append(os.path.join(arch_dir, 'repodata', 'repomd.xml'))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500130
131 rpm_dirs_found = True
132
133 if not rpm_dirs_found:
134 bb.note("There are no packages in %s" % self.deploy_dir)
135 return
136
137 # Create repodata
138 result = oe.utils.multiprocess_exec(index_cmds, create_index)
139 if result:
140 bb.fatal('%s' % ('\n'.join(result)))
141 # Sign repomd
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500142 if signer:
143 for repomd in repomd_files:
144 feed_sig_type = self.d.getVar('PACKAGE_FEED_GPG_SIGNATURE_TYPE', True)
145 is_ascii_sig = (feed_sig_type.upper() != "BIN")
146 signer.detach_sign(repomd,
147 self.d.getVar('PACKAGE_FEED_GPG_NAME', True),
148 self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE', True),
149 armor=is_ascii_sig)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500150
151
152class OpkgIndexer(Indexer):
153 def write_index(self):
154 arch_vars = ["ALL_MULTILIB_PACKAGE_ARCHS",
155 "SDK_PACKAGE_ARCHS",
156 "MULTILIB_ARCHS"]
157
158 opkg_index_cmd = bb.utils.which(os.getenv('PATH'), "opkg-make-index")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500159 if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1':
160 signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND', True))
161 else:
162 signer = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500163
164 if not os.path.exists(os.path.join(self.deploy_dir, "Packages")):
165 open(os.path.join(self.deploy_dir, "Packages"), "w").close()
166
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500167 index_cmds = set()
168 index_sign_files = set()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500169 for arch_var in arch_vars:
170 archs = self.d.getVar(arch_var, True)
171 if archs is None:
172 continue
173
174 for arch in archs.split():
175 pkgs_dir = os.path.join(self.deploy_dir, arch)
176 pkgs_file = os.path.join(pkgs_dir, "Packages")
177
178 if not os.path.isdir(pkgs_dir):
179 continue
180
181 if not os.path.exists(pkgs_file):
182 open(pkgs_file, "w").close()
183
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500184 index_cmds.add('%s -r %s -p %s -m %s' %
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500185 (opkg_index_cmd, pkgs_file, pkgs_file, pkgs_dir))
186
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500187 index_sign_files.add(pkgs_file)
188
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500189 if len(index_cmds) == 0:
190 bb.note("There are no packages in %s!" % self.deploy_dir)
191 return
192
193 result = oe.utils.multiprocess_exec(index_cmds, create_index)
194 if result:
195 bb.fatal('%s' % ('\n'.join(result)))
196
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500197 if signer:
198 feed_sig_type = self.d.getVar('PACKAGE_FEED_GPG_SIGNATURE_TYPE', True)
199 is_ascii_sig = (feed_sig_type.upper() != "BIN")
200 for f in index_sign_files:
201 signer.detach_sign(f,
202 self.d.getVar('PACKAGE_FEED_GPG_NAME', True),
203 self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE', True),
204 armor=is_ascii_sig)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500205
206
207class DpkgIndexer(Indexer):
208 def _create_configs(self):
209 bb.utils.mkdirhier(self.apt_conf_dir)
210 bb.utils.mkdirhier(os.path.join(self.apt_conf_dir, "lists", "partial"))
211 bb.utils.mkdirhier(os.path.join(self.apt_conf_dir, "apt.conf.d"))
212 bb.utils.mkdirhier(os.path.join(self.apt_conf_dir, "preferences.d"))
213
214 with open(os.path.join(self.apt_conf_dir, "preferences"),
215 "w") as prefs_file:
216 pass
217 with open(os.path.join(self.apt_conf_dir, "sources.list"),
218 "w+") as sources_file:
219 pass
220
221 with open(self.apt_conf_file, "w") as apt_conf:
222 with open(os.path.join(self.d.expand("${STAGING_ETCDIR_NATIVE}"),
223 "apt", "apt.conf.sample")) as apt_conf_sample:
224 for line in apt_conf_sample.read().split("\n"):
225 line = re.sub("#ROOTFS#", "/dev/null", line)
226 line = re.sub("#APTCONF#", self.apt_conf_dir, line)
227 apt_conf.write(line + "\n")
228
229 def write_index(self):
230 self.apt_conf_dir = os.path.join(self.d.expand("${APTCONF_TARGET}"),
231 "apt-ftparchive")
232 self.apt_conf_file = os.path.join(self.apt_conf_dir, "apt.conf")
233 self._create_configs()
234
235 os.environ['APT_CONFIG'] = self.apt_conf_file
236
237 pkg_archs = self.d.getVar('PACKAGE_ARCHS', True)
238 if pkg_archs is not None:
239 arch_list = pkg_archs.split()
240 sdk_pkg_archs = self.d.getVar('SDK_PACKAGE_ARCHS', True)
241 if sdk_pkg_archs is not None:
242 for a in sdk_pkg_archs.split():
243 if a not in pkg_archs:
244 arch_list.append(a)
245
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500246 all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500247 arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in arch_list)
248
249 apt_ftparchive = bb.utils.which(os.getenv('PATH'), "apt-ftparchive")
250 gzip = bb.utils.which(os.getenv('PATH'), "gzip")
251
252 index_cmds = []
253 deb_dirs_found = False
254 for arch in arch_list:
255 arch_dir = os.path.join(self.deploy_dir, arch)
256 if not os.path.isdir(arch_dir):
257 continue
258
259 cmd = "cd %s; PSEUDO_UNLOAD=1 %s packages . > Packages;" % (arch_dir, apt_ftparchive)
260
261 cmd += "%s -fc Packages > Packages.gz;" % gzip
262
263 with open(os.path.join(arch_dir, "Release"), "w+") as release:
264 release.write("Label: %s\n" % arch)
265
266 cmd += "PSEUDO_UNLOAD=1 %s release . >> Release" % apt_ftparchive
267
268 index_cmds.append(cmd)
269
270 deb_dirs_found = True
271
272 if not deb_dirs_found:
273 bb.note("There are no packages in %s" % self.deploy_dir)
274 return
275
276 result = oe.utils.multiprocess_exec(index_cmds, create_index)
277 if result:
278 bb.fatal('%s' % ('\n'.join(result)))
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500279 if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1':
280 raise NotImplementedError('Package feed signing not implementd for dpkg')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500281
282
283
284class PkgsList(object):
285 __metaclass__ = ABCMeta
286
287 def __init__(self, d, rootfs_dir):
288 self.d = d
289 self.rootfs_dir = rootfs_dir
290
291 @abstractmethod
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500292 def list_pkgs(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500293 pass
294
295
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500296 """
297 This method parse the output from the package manager
298 and return a dictionary with the information of the
299 installed packages. This is used whne the packages are
300 in deb or ipk format
301 """
302 def opkg_query(self, cmd_output):
303 verregex = re.compile(' \([=<>]* [^ )]*\)')
304 output = dict()
305 filename = ""
306 dep = []
307 pkg = ""
308 for line in cmd_output.splitlines():
309 line = line.rstrip()
310 if ':' in line:
311 if line.startswith("Package: "):
312 pkg = line.split(": ")[1]
313 elif line.startswith("Architecture: "):
314 arch = line.split(": ")[1]
315 elif line.startswith("Version: "):
316 ver = line.split(": ")[1]
317 elif line.startswith("File: "):
318 filename = line.split(": ")[1]
319 elif line.startswith("Depends: "):
320 depends = verregex.sub('', line.split(": ")[1])
321 for depend in depends.split(", "):
322 dep.append(depend)
323 elif line.startswith("Recommends: "):
324 recommends = verregex.sub('', line.split(": ")[1])
325 for recommend in recommends.split(", "):
326 dep.append("%s [REC]" % recommend)
327 else:
328 # IPK doesn't include the filename
329 if not filename:
330 filename = "%s_%s_%s.ipk" % (pkg, ver, arch)
331 if pkg:
332 output[pkg] = {"arch":arch, "ver":ver,
333 "filename":filename, "deps": dep }
334 pkg = ""
335 filename = ""
336 dep = []
337
338 if pkg:
339 if not filename:
340 filename = "%s_%s_%s.ipk" % (pkg, ver, arch)
341 output[pkg] = {"arch":arch, "ver":ver,
342 "filename":filename, "deps": dep }
343
344 return output
345
346
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500347class RpmPkgsList(PkgsList):
348 def __init__(self, d, rootfs_dir, arch_var=None, os_var=None):
349 super(RpmPkgsList, self).__init__(d, rootfs_dir)
350
351 self.rpm_cmd = bb.utils.which(os.getenv('PATH'), "rpm")
352 self.image_rpmlib = os.path.join(self.rootfs_dir, 'var/lib/rpm')
353
354 self.ml_prefix_list, self.ml_os_list = \
355 RpmIndexer(d, rootfs_dir).get_ml_prefix_and_os_list(arch_var, os_var)
356
357 # Determine rpm version
358 cmd = "%s --version" % self.rpm_cmd
359 try:
360 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
361 except subprocess.CalledProcessError as e:
362 bb.fatal("Getting rpm version failed. Command '%s' "
363 "returned %d:\n%s" % (cmd, e.returncode, e.output))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500364
365 '''
366 Translate the RPM/Smart format names to the OE multilib format names
367 '''
368 def _pkg_translate_smart_to_oe(self, pkg, arch):
369 new_pkg = pkg
370 new_arch = arch
371 fixed_arch = arch.replace('_', '-')
372 found = 0
373 for mlib in self.ml_prefix_list:
374 for cmp_arch in self.ml_prefix_list[mlib]:
375 fixed_cmp_arch = cmp_arch.replace('_', '-')
376 if fixed_arch == fixed_cmp_arch:
377 if mlib == 'default':
378 new_pkg = pkg
379 new_arch = cmp_arch
380 else:
381 new_pkg = mlib + '-' + pkg
382 # We need to strip off the ${mlib}_ prefix on the arch
383 new_arch = cmp_arch.replace(mlib + '_', '')
384
385 # Workaround for bug 3565. Simply look to see if we
386 # know of a package with that name, if not try again!
387 filename = os.path.join(self.d.getVar('PKGDATA_DIR', True),
388 'runtime-reverse',
389 new_pkg)
390 if os.path.exists(filename):
391 found = 1
392 break
393
394 if found == 1 and fixed_arch == fixed_cmp_arch:
395 break
396 #bb.note('%s, %s -> %s, %s' % (pkg, arch, new_pkg, new_arch))
397 return new_pkg, new_arch
398
399 def _list_pkg_deps(self):
400 cmd = [bb.utils.which(os.getenv('PATH'), "rpmresolve"),
401 "-t", self.image_rpmlib]
402
403 try:
404 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).strip()
405 except subprocess.CalledProcessError as e:
406 bb.fatal("Cannot get the package dependencies. Command '%s' "
407 "returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output))
408
409 return output
410
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500411 def list_pkgs(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500412 cmd = self.rpm_cmd + ' --root ' + self.rootfs_dir
413 cmd += ' -D "_dbpath /var/lib/rpm" -qa'
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500414 cmd += " --qf '[%{NAME} %{ARCH} %{VERSION} %{PACKAGEORIGIN}\n]'"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500415
416 try:
417 # bb.note(cmd)
418 tmp_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).strip()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500419 except subprocess.CalledProcessError as e:
420 bb.fatal("Cannot get the installed packages list. Command '%s' "
421 "returned %d:\n%s" % (cmd, e.returncode, e.output))
422
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500423 output = dict()
424 deps = dict()
425 dependencies = self._list_pkg_deps()
426
427 # Populate deps dictionary for better manipulation
428 for line in dependencies.splitlines():
429 try:
430 pkg, dep = line.split("|")
431 if not pkg in deps:
432 deps[pkg] = list()
433 if not dep in deps[pkg]:
434 deps[pkg].append(dep)
435 except:
436 # Ignore any other lines they're debug or errors
437 pass
438
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500439 for line in tmp_output.split('\n'):
440 if len(line.strip()) == 0:
441 continue
442 pkg = line.split()[0]
443 arch = line.split()[1]
444 ver = line.split()[2]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500445 dep = deps.get(pkg, [])
446
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500447 # Skip GPG keys
448 if pkg == 'gpg-pubkey':
449 continue
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500450
451 pkgorigin = line.split()[3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500452 new_pkg, new_arch = self._pkg_translate_smart_to_oe(pkg, arch)
453
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500454 output[new_pkg] = {"arch":new_arch, "ver":ver,
455 "filename":pkgorigin, "deps":dep}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500456
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500457 return output
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500458
459
460class OpkgPkgsList(PkgsList):
461 def __init__(self, d, rootfs_dir, config_file):
462 super(OpkgPkgsList, self).__init__(d, rootfs_dir)
463
464 self.opkg_cmd = bb.utils.which(os.getenv('PATH'), "opkg")
465 self.opkg_args = "-f %s -o %s " % (config_file, rootfs_dir)
466 self.opkg_args += self.d.getVar("OPKG_ARGS", True)
467
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500468 def list_pkgs(self, format=None):
469 cmd = "%s %s status" % (self.opkg_cmd, self.opkg_args)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500470
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500471 # opkg returns success even when it printed some
472 # "Collected errors:" report to stderr. Mixing stderr into
473 # stdout then leads to random failures later on when
474 # parsing the output. To avoid this we need to collect both
475 # output streams separately and check for empty stderr.
476 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
477 cmd_output, cmd_stderr = p.communicate()
478 if p.returncode or cmd_stderr:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500479 bb.fatal("Cannot get the installed packages list. Command '%s' "
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500480 "returned %d and stderr:\n%s" % (cmd, p.returncode, cmd_stderr))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500481
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500482 return self.opkg_query(cmd_output)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500483
484
485class DpkgPkgsList(PkgsList):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500486
487 def list_pkgs(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500488 cmd = [bb.utils.which(os.getenv('PATH'), "dpkg-query"),
489 "--admindir=%s/var/lib/dpkg" % self.rootfs_dir,
490 "-W"]
491
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500492 cmd.append("-f=Package: ${Package}\nArchitecture: ${PackageArch}\nVersion: ${Version}\nFile: ${Package}_${Version}_${Architecture}.deb\nDepends: ${Depends}\nRecommends: ${Recommends}\n\n")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500493
494 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500495 cmd_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).strip()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500496 except subprocess.CalledProcessError as e:
497 bb.fatal("Cannot get the installed packages list. Command '%s' "
498 "returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output))
499
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500500 return self.opkg_query(cmd_output)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500501
502
503class PackageManager(object):
504 """
505 This is an abstract class. Do not instantiate this directly.
506 """
507 __metaclass__ = ABCMeta
508
509 def __init__(self, d):
510 self.d = d
511 self.deploy_dir = None
512 self.deploy_lock = None
513 self.feed_uris = self.d.getVar('PACKAGE_FEED_URIS', True) or ""
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500514 self.feed_base_paths = self.d.getVar('PACKAGE_FEED_BASE_PATHS', True) or ""
515 self.feed_archs = self.d.getVar('PACKAGE_FEED_ARCHS', True)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500516
517 """
518 Update the package manager package database.
519 """
520 @abstractmethod
521 def update(self):
522 pass
523
524 """
525 Install a list of packages. 'pkgs' is a list object. If 'attempt_only' is
526 True, installation failures are ignored.
527 """
528 @abstractmethod
529 def install(self, pkgs, attempt_only=False):
530 pass
531
532 """
533 Remove a list of packages. 'pkgs' is a list object. If 'with_dependencies'
534 is False, the any dependencies are left in place.
535 """
536 @abstractmethod
537 def remove(self, pkgs, with_dependencies=True):
538 pass
539
540 """
541 This function creates the index files
542 """
543 @abstractmethod
544 def write_index(self):
545 pass
546
547 @abstractmethod
548 def remove_packaging_data(self):
549 pass
550
551 @abstractmethod
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500552 def list_installed(self):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500553 pass
554
555 @abstractmethod
556 def insert_feeds_uris(self):
557 pass
558
559 """
560 Install complementary packages based upon the list of currently installed
561 packages e.g. locales, *-dev, *-dbg, etc. This will only attempt to install
562 these packages, if they don't exist then no error will occur. Note: every
563 backend needs to call this function explicitly after the normal package
564 installation
565 """
566 def install_complementary(self, globs=None):
567 # we need to write the list of installed packages to a file because the
568 # oe-pkgdata-util reads it from a file
569 installed_pkgs_file = os.path.join(self.d.getVar('WORKDIR', True),
570 "installed_pkgs.txt")
571 with open(installed_pkgs_file, "w+") as installed_pkgs:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500572 pkgs = self.list_installed()
573 output = oe.utils.format_pkg_list(pkgs, "arch")
574 installed_pkgs.write(output)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500575
576 if globs is None:
577 globs = self.d.getVar('IMAGE_INSTALL_COMPLEMENTARY', True)
578 split_linguas = set()
579
580 for translation in self.d.getVar('IMAGE_LINGUAS', True).split():
581 split_linguas.add(translation)
582 split_linguas.add(translation.split('-')[0])
583
584 split_linguas = sorted(split_linguas)
585
586 for lang in split_linguas:
587 globs += " *-locale-%s" % lang
588
589 if globs is None:
590 return
591
592 cmd = [bb.utils.which(os.getenv('PATH'), "oe-pkgdata-util"),
593 "-p", self.d.getVar('PKGDATA_DIR', True), "glob", installed_pkgs_file,
594 globs]
595 exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY', True)
596 if exclude:
597 cmd.extend(['-x', exclude])
598 try:
599 bb.note("Installing complementary packages ...")
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500600 bb.note('Running %s' % cmd)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500601 complementary_pkgs = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
602 except subprocess.CalledProcessError as e:
603 bb.fatal("Could not compute complementary packages list. Command "
604 "'%s' returned %d:\n%s" %
605 (' '.join(cmd), e.returncode, e.output))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500606 self.install(complementary_pkgs.split(), attempt_only=True)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500607 os.remove(installed_pkgs_file)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500608
609 def deploy_dir_lock(self):
610 if self.deploy_dir is None:
611 raise RuntimeError("deploy_dir is not set!")
612
613 lock_file_name = os.path.join(self.deploy_dir, "deploy.lock")
614
615 self.deploy_lock = bb.utils.lockfile(lock_file_name)
616
617 def deploy_dir_unlock(self):
618 if self.deploy_lock is None:
619 return
620
621 bb.utils.unlockfile(self.deploy_lock)
622
623 self.deploy_lock = None
624
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500625 """
626 Construct URIs based on the following pattern: uri/base_path where 'uri'
627 and 'base_path' correspond to each element of the corresponding array
628 argument leading to len(uris) x len(base_paths) elements on the returned
629 array
630 """
631 def construct_uris(self, uris, base_paths):
632 def _append(arr1, arr2, sep='/'):
633 res = []
634 narr1 = map(lambda a: string.rstrip(a, sep), arr1)
635 narr2 = map(lambda a: string.lstrip(string.rstrip(a, sep), sep), arr2)
636 for a1 in narr1:
637 if arr2:
638 for a2 in narr2:
639 res.append("%s%s%s" % (a1, sep, a2))
640 else:
641 res.append(a1)
642 return res
643 return _append(uris, base_paths)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500644
645class RpmPM(PackageManager):
646 def __init__(self,
647 d,
648 target_rootfs,
649 target_vendor,
650 task_name='target',
651 providename=None,
652 arch_var=None,
653 os_var=None):
654 super(RpmPM, self).__init__(d)
655 self.target_rootfs = target_rootfs
656 self.target_vendor = target_vendor
657 self.task_name = task_name
658 self.providename = providename
659 self.fullpkglist = list()
660 self.deploy_dir = self.d.getVar('DEPLOY_DIR_RPM', True)
661 self.etcrpm_dir = os.path.join(self.target_rootfs, "etc/rpm")
662 self.install_dir_name = "oe_install"
663 self.install_dir_path = os.path.join(self.target_rootfs, self.install_dir_name)
664 self.rpm_cmd = bb.utils.which(os.getenv('PATH'), "rpm")
665 self.smart_cmd = bb.utils.which(os.getenv('PATH'), "smart")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500666 # 0 = default, only warnings
667 # 1 = --log-level=info (includes information about executing scriptlets and their output)
668 # 2 = --log-level=debug
669 # 3 = --log-level=debug plus dumps of scriplet content and command invocation
670 self.debug_level = int(d.getVar('ROOTFS_RPM_DEBUG', True) or "0")
671 self.smart_opt = "--log-level=%s --data-dir=%s" % \
672 ("warning" if self.debug_level == 0 else
673 "info" if self.debug_level == 1 else
674 "debug",
675 os.path.join(target_rootfs, 'var/lib/smart'))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500676 self.scriptlet_wrapper = self.d.expand('${WORKDIR}/scriptlet_wrapper')
677 self.solution_manifest = self.d.expand('${T}/saved/%s_solution' %
678 self.task_name)
679 self.saved_rpmlib = self.d.expand('${T}/saved/%s' % self.task_name)
680 self.image_rpmlib = os.path.join(self.target_rootfs, 'var/lib/rpm')
681
682 if not os.path.exists(self.d.expand('${T}/saved')):
683 bb.utils.mkdirhier(self.d.expand('${T}/saved'))
684
685 self.indexer = RpmIndexer(self.d, self.deploy_dir)
686 self.pkgs_list = RpmPkgsList(self.d, self.target_rootfs, arch_var, os_var)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500687
688 self.ml_prefix_list, self.ml_os_list = self.indexer.get_ml_prefix_and_os_list(arch_var, os_var)
689
690 def insert_feeds_uris(self):
691 if self.feed_uris == "":
692 return
693
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500694 arch_list = []
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500695 if self.feed_archs is not None:
696 # User define feed architectures
697 arch_list = self.feed_archs.split()
698 else:
699 # List must be prefered to least preferred order
700 default_platform_extra = set()
701 platform_extra = set()
702 bbextendvariant = self.d.getVar('BBEXTENDVARIANT', True) or ""
703 for mlib in self.ml_os_list:
704 for arch in self.ml_prefix_list[mlib]:
705 plt = arch.replace('-', '_') + '-.*-' + self.ml_os_list[mlib]
706 if mlib == bbextendvariant:
707 default_platform_extra.add(plt)
708 else:
709 platform_extra.add(plt)
710
711 platform_extra = platform_extra.union(default_platform_extra)
712
713 for canonical_arch in platform_extra:
714 arch = canonical_arch.split('-')[0]
715 if not os.path.exists(os.path.join(self.deploy_dir, arch)):
716 continue
717 arch_list.append(arch)
718
719 feed_uris = self.construct_uris(self.feed_uris.split(), self.feed_base_paths.split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500720
721 uri_iterator = 0
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500722 channel_priority = 10 + 5 * len(feed_uris) * (len(arch_list) if arch_list else 1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500723
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500724 for uri in feed_uris:
725 if arch_list:
726 for arch in arch_list:
727 bb.note('Note: adding Smart channel url%d%s (%s)' %
728 (uri_iterator, arch, channel_priority))
729 self._invoke_smart('channel --add url%d-%s type=rpm-md baseurl=%s/%s -y'
730 % (uri_iterator, arch, uri, arch))
731 self._invoke_smart('channel --set url%d-%s priority=%d' %
732 (uri_iterator, arch, channel_priority))
733 channel_priority -= 5
734 else:
735 bb.note('Note: adding Smart channel url%d (%s)' %
736 (uri_iterator, channel_priority))
737 self._invoke_smart('channel --add url%d type=rpm-md baseurl=%s -y'
738 % (uri_iterator, uri))
739 self._invoke_smart('channel --set url%d priority=%d' %
740 (uri_iterator, channel_priority))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500741 channel_priority -= 5
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500742
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500743 uri_iterator += 1
744
745 '''
746 Create configs for rpm and smart, and multilib is supported
747 '''
748 def create_configs(self):
749 target_arch = self.d.getVar('TARGET_ARCH', True)
750 platform = '%s%s-%s' % (target_arch.replace('-', '_'),
751 self.target_vendor,
752 self.ml_os_list['default'])
753
754 # List must be prefered to least preferred order
755 default_platform_extra = list()
756 platform_extra = list()
757 bbextendvariant = self.d.getVar('BBEXTENDVARIANT', True) or ""
758 for mlib in self.ml_os_list:
759 for arch in self.ml_prefix_list[mlib]:
760 plt = arch.replace('-', '_') + '-.*-' + self.ml_os_list[mlib]
761 if mlib == bbextendvariant:
762 if plt not in default_platform_extra:
763 default_platform_extra.append(plt)
764 else:
765 if plt not in platform_extra:
766 platform_extra.append(plt)
767 platform_extra = default_platform_extra + platform_extra
768
769 self._create_configs(platform, platform_extra)
770
771 def _invoke_smart(self, args):
772 cmd = "%s %s %s" % (self.smart_cmd, self.smart_opt, args)
773 # bb.note(cmd)
774 try:
775 complementary_pkgs = subprocess.check_output(cmd,
776 stderr=subprocess.STDOUT,
777 shell=True)
778 # bb.note(complementary_pkgs)
779 return complementary_pkgs
780 except subprocess.CalledProcessError as e:
781 bb.fatal("Could not invoke smart. Command "
782 "'%s' returned %d:\n%s" % (cmd, e.returncode, e.output))
783
784 def _search_pkg_name_in_feeds(self, pkg, feed_archs):
785 for arch in feed_archs:
786 arch = arch.replace('-', '_')
787 regex_match = re.compile(r"^%s-[^-]*-[^-]*@%s$" % \
788 (re.escape(pkg), re.escape(arch)))
789 for p in self.fullpkglist:
790 if regex_match.match(p) is not None:
791 # First found is best match
792 # bb.note('%s -> %s' % (pkg, pkg + '@' + arch))
793 return pkg + '@' + arch
794
795 # Search provides if not found by pkgname.
796 bb.note('Not found %s by name, searching provides ...' % pkg)
797 cmd = "%s %s query --provides %s --show-format='$name-$version'" % \
798 (self.smart_cmd, self.smart_opt, pkg)
799 cmd += " | sed -ne 's/ *Provides://p'"
800 bb.note('cmd: %s' % cmd)
801 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
802 # Found a provider
803 if output:
804 bb.note('Found providers for %s: %s' % (pkg, output))
805 for p in output.split():
806 for arch in feed_archs:
807 arch = arch.replace('-', '_')
808 if p.rstrip().endswith('@' + arch):
809 return p
810
811 return ""
812
813 '''
814 Translate the OE multilib format names to the RPM/Smart format names
815 It searched the RPM/Smart format names in probable multilib feeds first,
816 and then searched the default base feed.
817 '''
818 def _pkg_translate_oe_to_smart(self, pkgs, attempt_only=False):
819 new_pkgs = list()
820
821 for pkg in pkgs:
822 new_pkg = pkg
823 # Search new_pkg in probable multilibs first
824 for mlib in self.ml_prefix_list:
825 # Jump the default archs
826 if mlib == 'default':
827 continue
828
829 subst = pkg.replace(mlib + '-', '')
830 # if the pkg in this multilib feed
831 if subst != pkg:
832 feed_archs = self.ml_prefix_list[mlib]
833 new_pkg = self._search_pkg_name_in_feeds(subst, feed_archs)
834 if not new_pkg:
835 # Failed to translate, package not found!
836 err_msg = '%s not found in the %s feeds (%s).\n' % \
837 (pkg, mlib, " ".join(feed_archs))
838 if not attempt_only:
839 err_msg += " ".join(self.fullpkglist)
840 bb.fatal(err_msg)
841 bb.warn(err_msg)
842 else:
843 new_pkgs.append(new_pkg)
844
845 break
846
847 # Apparently not a multilib package...
848 if pkg == new_pkg:
849 # Search new_pkg in default archs
850 default_archs = self.ml_prefix_list['default']
851 new_pkg = self._search_pkg_name_in_feeds(pkg, default_archs)
852 if not new_pkg:
853 err_msg = '%s not found in the base feeds (%s).\n' % \
854 (pkg, ' '.join(default_archs))
855 if not attempt_only:
856 err_msg += " ".join(self.fullpkglist)
857 bb.fatal(err_msg)
858 bb.warn(err_msg)
859 else:
860 new_pkgs.append(new_pkg)
861
862 return new_pkgs
863
864 def _create_configs(self, platform, platform_extra):
865 # Setup base system configuration
866 bb.note("configuring RPM platform settings")
867
868 # Configure internal RPM environment when using Smart
869 os.environ['RPM_ETCRPM'] = self.etcrpm_dir
870 bb.utils.mkdirhier(self.etcrpm_dir)
871
872 # Setup temporary directory -- install...
873 if os.path.exists(self.install_dir_path):
874 bb.utils.remove(self.install_dir_path, True)
875 bb.utils.mkdirhier(os.path.join(self.install_dir_path, 'tmp'))
876
877 channel_priority = 5
878 platform_dir = os.path.join(self.etcrpm_dir, "platform")
879 sdkos = self.d.getVar("SDK_OS", True)
880 with open(platform_dir, "w+") as platform_fd:
881 platform_fd.write(platform + '\n')
882 for pt in platform_extra:
883 channel_priority += 5
884 if sdkos:
885 tmp = re.sub("-%s$" % sdkos, "-%s\n" % sdkos, pt)
886 tmp = re.sub("-linux.*$", "-linux.*\n", tmp)
887 platform_fd.write(tmp)
888
889 # Tell RPM that the "/" directory exist and is available
890 bb.note("configuring RPM system provides")
891 sysinfo_dir = os.path.join(self.etcrpm_dir, "sysinfo")
892 bb.utils.mkdirhier(sysinfo_dir)
893 with open(os.path.join(sysinfo_dir, "Dirnames"), "w+") as dirnames:
894 dirnames.write("/\n")
895
896 if self.providename:
897 providename_dir = os.path.join(sysinfo_dir, "Providename")
898 if not os.path.exists(providename_dir):
899 providename_content = '\n'.join(self.providename)
900 providename_content += '\n'
901 open(providename_dir, "w+").write(providename_content)
902
903 # Configure RPM... we enforce these settings!
904 bb.note("configuring RPM DB settings")
905 # After change the __db.* cache size, log file will not be
906 # generated automatically, that will raise some warnings,
907 # so touch a bare log for rpm write into it.
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500908 rpmlib_log = os.path.join(self.image_rpmlib, 'log', 'log.0000000001')
909 if not os.path.exists(rpmlib_log):
910 bb.utils.mkdirhier(os.path.join(self.image_rpmlib, 'log'))
911 open(rpmlib_log, 'w+').close()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500912
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500913 DB_CONFIG_CONTENT = "# ================ Environment\n" \
914 "set_data_dir .\n" \
915 "set_create_dir .\n" \
916 "set_lg_dir ./log\n" \
917 "set_tmp_dir ./tmp\n" \
918 "set_flags db_log_autoremove on\n" \
919 "\n" \
920 "# -- thread_count must be >= 8\n" \
921 "set_thread_count 64\n" \
922 "\n" \
923 "# ================ Logging\n" \
924 "\n" \
925 "# ================ Memory Pool\n" \
926 "set_cachesize 0 1048576 0\n" \
927 "set_mp_mmapsize 268435456\n" \
928 "\n" \
929 "# ================ Locking\n" \
930 "set_lk_max_locks 16384\n" \
931 "set_lk_max_lockers 16384\n" \
932 "set_lk_max_objects 16384\n" \
933 "mutex_set_max 163840\n" \
934 "\n" \
935 "# ================ Replication\n"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500936
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500937 db_config_dir = os.path.join(self.image_rpmlib, 'DB_CONFIG')
938 if not os.path.exists(db_config_dir):
939 open(db_config_dir, 'w+').write(DB_CONFIG_CONTENT)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500940
941 # Create database so that smart doesn't complain (lazy init)
942 opt = "-qa"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500943 cmd = "%s --root %s --dbpath /var/lib/rpm %s > /dev/null" % (
944 self.rpm_cmd, self.target_rootfs, opt)
945 try:
946 subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
947 except subprocess.CalledProcessError as e:
948 bb.fatal("Create rpm database failed. Command '%s' "
949 "returned %d:\n%s" % (cmd, e.returncode, e.output))
950 # Import GPG key to RPM database of the target system
951 if self.d.getVar('RPM_SIGN_PACKAGES', True) == '1':
952 pubkey_path = self.d.getVar('RPM_GPG_PUBKEY', True)
953 cmd = "%s --root %s --dbpath /var/lib/rpm --import %s > /dev/null" % (
954 self.rpm_cmd, self.target_rootfs, pubkey_path)
955 subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
956
957 # Configure smart
958 bb.note("configuring Smart settings")
959 bb.utils.remove(os.path.join(self.target_rootfs, 'var/lib/smart'),
960 True)
961 self._invoke_smart('config --set rpm-root=%s' % self.target_rootfs)
962 self._invoke_smart('config --set rpm-dbpath=/var/lib/rpm')
963 self._invoke_smart('config --set rpm-extra-macros._var=%s' %
964 self.d.getVar('localstatedir', True))
965 cmd = "config --set rpm-extra-macros._tmppath=/%s/tmp" % (self.install_dir_name)
966
967 prefer_color = self.d.getVar('RPM_PREFER_ELF_ARCH', True)
968 if prefer_color:
969 if prefer_color not in ['0', '1', '2', '4']:
970 bb.fatal("Invalid RPM_PREFER_ELF_ARCH: %s, it should be one of:\n"
971 "\t1: ELF32 wins\n"
972 "\t2: ELF64 wins\n"
973 "\t4: ELF64 N32 wins (mips64 or mips64el only)" %
974 prefer_color)
975 if prefer_color == "4" and self.d.getVar("TUNE_ARCH", True) not in \
976 ['mips64', 'mips64el']:
977 bb.fatal("RPM_PREFER_ELF_ARCH = \"4\" is for mips64 or mips64el "
978 "only.")
979 self._invoke_smart('config --set rpm-extra-macros._prefer_color=%s'
980 % prefer_color)
981
982 self._invoke_smart(cmd)
983 self._invoke_smart('config --set rpm-ignoresize=1')
984
985 # Write common configuration for host and target usage
986 self._invoke_smart('config --set rpm-nolinktos=1')
987 self._invoke_smart('config --set rpm-noparentdirs=1')
988 check_signature = self.d.getVar('RPM_CHECK_SIGNATURES', True)
989 if check_signature and check_signature.strip() == "0":
990 self._invoke_smart('config --set rpm-check-signatures=false')
991 for i in self.d.getVar('BAD_RECOMMENDATIONS', True).split():
992 self._invoke_smart('flag --set ignore-recommends %s' % i)
993
994 # Do the following configurations here, to avoid them being
995 # saved for field upgrade
996 if self.d.getVar('NO_RECOMMENDATIONS', True).strip() == "1":
997 self._invoke_smart('config --set ignore-all-recommends=1')
998 pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE', True) or ""
999 for i in pkg_exclude.split():
1000 self._invoke_smart('flag --set exclude-packages %s' % i)
1001
1002 # Optional debugging
1003 # self._invoke_smart('config --set rpm-log-level=debug')
1004 # cmd = 'config --set rpm-log-file=/tmp/smart-debug-logfile'
1005 # self._invoke_smart(cmd)
1006 ch_already_added = []
1007 for canonical_arch in platform_extra:
1008 arch = canonical_arch.split('-')[0]
1009 arch_channel = os.path.join(self.deploy_dir, arch)
1010 if os.path.exists(arch_channel) and not arch in ch_already_added:
1011 bb.note('Note: adding Smart channel %s (%s)' %
1012 (arch, channel_priority))
1013 self._invoke_smart('channel --add %s type=rpm-md baseurl=%s -y'
1014 % (arch, arch_channel))
1015 self._invoke_smart('channel --set %s priority=%d' %
1016 (arch, channel_priority))
1017 channel_priority -= 5
1018
1019 ch_already_added.append(arch)
1020
1021 bb.note('adding Smart RPM DB channel')
1022 self._invoke_smart('channel --add rpmsys type=rpm-sys -y')
1023
1024 # Construct install scriptlet wrapper.
1025 # Scripts need to be ordered when executed, this ensures numeric order.
1026 # If we ever run into needing more the 899 scripts, we'll have to.
1027 # change num to start with 1000.
1028 #
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001029 scriptletcmd = "$2 $1/$3 $4\n"
1030 scriptpath = "$1/$3"
1031
1032 # When self.debug_level >= 3, also dump the content of the
1033 # executed scriptlets and how they get invoked. We have to
1034 # replace "exit 1" and "ERR" because printing those as-is
1035 # would trigger a log analysis failure.
1036 if self.debug_level >= 3:
1037 dump_invocation = 'echo "Executing ${name} ${kind} with: ' + scriptletcmd + '"\n'
1038 dump_script = 'cat ' + scriptpath + '| sed -e "s/exit 1/exxxit 1/g" -e "s/ERR/IRR/g"; echo\n'
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001039 else:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001040 dump_invocation = 'echo "Executing ${name} ${kind}"\n'
1041 dump_script = ''
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001042
1043 SCRIPTLET_FORMAT = "#!/bin/bash\n" \
1044 "\n" \
1045 "export PATH=%s\n" \
1046 "export D=%s\n" \
1047 'export OFFLINE_ROOT="$D"\n' \
1048 'export IPKG_OFFLINE_ROOT="$D"\n' \
1049 'export OPKG_OFFLINE_ROOT="$D"\n' \
1050 "export INTERCEPT_DIR=%s\n" \
1051 "export NATIVE_ROOT=%s\n" \
1052 "\n" \
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001053 "name=`head -1 " + scriptpath + " | cut -d\' \' -f 2`\n" \
1054 "kind=`head -1 " + scriptpath + " | cut -d\' \' -f 4`\n" \
1055 + dump_invocation \
1056 + dump_script \
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001057 + scriptletcmd + \
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001058 "ret=$?\n" \
1059 "echo Result of ${name} ${kind}: ${ret}\n" \
1060 "if [ ${ret} -ne 0 ]; then\n" \
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001061 " if [ $4 -eq 1 ]; then\n" \
1062 " mkdir -p $1/etc/rpm-postinsts\n" \
1063 " num=100\n" \
1064 " while [ -e $1/etc/rpm-postinsts/${num}-* ]; do num=$((num + 1)); done\n" \
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001065 ' echo "#!$2" > $1/etc/rpm-postinsts/${num}-${name}\n' \
1066 ' echo "# Arg: $4" >> $1/etc/rpm-postinsts/${num}-${name}\n' \
1067 " cat " + scriptpath + " >> $1/etc/rpm-postinsts/${num}-${name}\n" \
1068 " chmod +x $1/etc/rpm-postinsts/${num}-${name}\n" \
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001069 ' echo "Info: deferring ${name} ${kind} install scriptlet to first boot"\n' \
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001070 " else\n" \
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001071 ' echo "Error: ${name} ${kind} remove scriptlet failed"\n' \
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001072 " fi\n" \
1073 "fi\n"
1074
1075 intercept_dir = self.d.expand('${WORKDIR}/intercept_scripts')
1076 native_root = self.d.getVar('STAGING_DIR_NATIVE', True)
1077 scriptlet_content = SCRIPTLET_FORMAT % (os.environ['PATH'],
1078 self.target_rootfs,
1079 intercept_dir,
1080 native_root)
1081 open(self.scriptlet_wrapper, 'w+').write(scriptlet_content)
1082
1083 bb.note("Note: configuring RPM cross-install scriptlet_wrapper")
1084 os.chmod(self.scriptlet_wrapper, 0755)
1085 cmd = 'config --set rpm-extra-macros._cross_scriptlet_wrapper=%s' % \
1086 self.scriptlet_wrapper
1087 self._invoke_smart(cmd)
1088
1089 # Debug to show smart config info
1090 # bb.note(self._invoke_smart('config --show'))
1091
1092 def update(self):
1093 self._invoke_smart('update rpmsys')
1094
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001095 def get_rdepends_recursively(self, pkgs):
1096 # pkgs will be changed during the loop, so use [:] to make a copy.
1097 for pkg in pkgs[:]:
1098 sub_data = oe.packagedata.read_subpkgdata(pkg, self.d)
1099 sub_rdep = sub_data.get("RDEPENDS_" + pkg)
1100 if not sub_rdep:
1101 continue
1102 done = bb.utils.explode_dep_versions2(sub_rdep).keys()
1103 next = done
1104 # Find all the rdepends on dependency chain
1105 while next:
1106 new = []
1107 for sub_pkg in next:
1108 sub_data = oe.packagedata.read_subpkgdata(sub_pkg, self.d)
1109 sub_pkg_rdep = sub_data.get("RDEPENDS_" + sub_pkg)
1110 if not sub_pkg_rdep:
1111 continue
1112 for p in bb.utils.explode_dep_versions2(sub_pkg_rdep):
1113 # Already handled, skip it.
1114 if p in done or p in pkgs:
1115 continue
1116 # It's a new dep
1117 if oe.packagedata.has_subpkgdata(p, self.d):
1118 done.append(p)
1119 new.append(p)
1120 next = new
1121 pkgs.extend(done)
1122 return pkgs
1123
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001124 '''
1125 Install pkgs with smart, the pkg name is oe format
1126 '''
1127 def install(self, pkgs, attempt_only=False):
1128
1129 if not pkgs:
1130 bb.note("There are no packages to install")
1131 return
1132 bb.note("Installing the following packages: %s" % ' '.join(pkgs))
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001133 if not attempt_only:
1134 # Pull in multilib requires since rpm may not pull in them
1135 # correctly, for example,
1136 # lib32-packagegroup-core-standalone-sdk-target requires
1137 # lib32-libc6, but rpm may pull in libc6 rather than lib32-libc6
1138 # since it doesn't know mlprefix (lib32-), bitbake knows it and
1139 # can handle it well, find out the RDEPENDS on the chain will
1140 # fix the problem. Both do_rootfs and do_populate_sdk have this
1141 # issue.
1142 # The attempt_only packages don't need this since they are
1143 # based on the installed ones.
1144 #
1145 # Separate pkgs into two lists, one is multilib, the other one
1146 # is non-multilib.
1147 ml_pkgs = []
1148 non_ml_pkgs = pkgs[:]
1149 for pkg in pkgs:
1150 for mlib in (self.d.getVar("MULTILIB_VARIANTS", True) or "").split():
1151 if pkg.startswith(mlib + '-'):
1152 ml_pkgs.append(pkg)
1153 non_ml_pkgs.remove(pkg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001154
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001155 if len(ml_pkgs) > 0 and len(non_ml_pkgs) > 0:
1156 # Found both foo and lib-foo
1157 ml_pkgs = self.get_rdepends_recursively(ml_pkgs)
1158 non_ml_pkgs = self.get_rdepends_recursively(non_ml_pkgs)
1159 # Longer list makes smart slower, so only keep the pkgs
1160 # which have the same BPN, and smart can handle others
1161 # correctly.
1162 pkgs_new = []
1163 for pkg in non_ml_pkgs:
1164 for mlib in (self.d.getVar("MULTILIB_VARIANTS", True) or "").split():
1165 mlib_pkg = mlib + "-" + pkg
1166 if mlib_pkg in ml_pkgs:
1167 pkgs_new.append(pkg)
1168 pkgs_new.append(mlib_pkg)
1169 for pkg in pkgs:
1170 if pkg not in pkgs_new:
1171 pkgs_new.append(pkg)
1172 pkgs = pkgs_new
1173 new_depends = {}
1174 deps = bb.utils.explode_dep_versions2(" ".join(pkgs))
1175 for depend in deps:
1176 data = oe.packagedata.read_subpkgdata(depend, self.d)
1177 key = "PKG_%s" % depend
1178 if key in data:
1179 new_depend = data[key]
1180 else:
1181 new_depend = depend
1182 new_depends[new_depend] = deps[depend]
1183 pkgs = bb.utils.join_deps(new_depends, commasep=True).split(', ')
1184 pkgs = self._pkg_translate_oe_to_smart(pkgs, attempt_only)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001185 if not attempt_only:
1186 bb.note('to be installed: %s' % ' '.join(pkgs))
1187 cmd = "%s %s install -y %s" % \
1188 (self.smart_cmd, self.smart_opt, ' '.join(pkgs))
1189 bb.note(cmd)
1190 else:
1191 bb.note('installing attempt only packages...')
1192 bb.note('Attempting %s' % ' '.join(pkgs))
1193 cmd = "%s %s install --attempt -y %s" % \
1194 (self.smart_cmd, self.smart_opt, ' '.join(pkgs))
1195 try:
1196 output = subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1197 bb.note(output)
1198 except subprocess.CalledProcessError as e:
1199 bb.fatal("Unable to install packages. Command '%s' "
1200 "returned %d:\n%s" % (cmd, e.returncode, e.output))
1201
1202 '''
1203 Remove pkgs with smart, the pkg name is smart/rpm format
1204 '''
1205 def remove(self, pkgs, with_dependencies=True):
1206 bb.note('to be removed: ' + ' '.join(pkgs))
1207
1208 if not with_dependencies:
1209 cmd = "%s -e --nodeps " % self.rpm_cmd
1210 cmd += "--root=%s " % self.target_rootfs
1211 cmd += "--dbpath=/var/lib/rpm "
1212 cmd += "--define='_cross_scriptlet_wrapper %s' " % \
1213 self.scriptlet_wrapper
1214 cmd += "--define='_tmppath /%s/tmp' %s" % (self.install_dir_name, ' '.join(pkgs))
1215 else:
1216 # for pkg in pkgs:
1217 # bb.note('Debug: What required: %s' % pkg)
1218 # bb.note(self._invoke_smart('query %s --show-requiredby' % pkg))
1219
1220 cmd = "%s %s remove -y %s" % (self.smart_cmd,
1221 self.smart_opt,
1222 ' '.join(pkgs))
1223
1224 try:
1225 bb.note(cmd)
1226 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
1227 bb.note(output)
1228 except subprocess.CalledProcessError as e:
1229 bb.note("Unable to remove packages. Command '%s' "
1230 "returned %d:\n%s" % (cmd, e.returncode, e.output))
1231
1232 def upgrade(self):
1233 bb.note('smart upgrade')
1234 self._invoke_smart('upgrade')
1235
1236 def write_index(self):
1237 result = self.indexer.write_index()
1238
1239 if result is not None:
1240 bb.fatal(result)
1241
1242 def remove_packaging_data(self):
1243 bb.utils.remove(self.image_rpmlib, True)
1244 bb.utils.remove(os.path.join(self.target_rootfs, 'var/lib/smart'),
1245 True)
1246 bb.utils.remove(os.path.join(self.target_rootfs, 'var/lib/opkg'), True)
1247
1248 # remove temp directory
1249 bb.utils.remove(self.install_dir_path, True)
1250
1251 def backup_packaging_data(self):
1252 # Save the rpmlib for increment rpm image generation
1253 if os.path.exists(self.saved_rpmlib):
1254 bb.utils.remove(self.saved_rpmlib, True)
1255 shutil.copytree(self.image_rpmlib,
1256 self.saved_rpmlib,
1257 symlinks=True)
1258
1259 def recovery_packaging_data(self):
1260 # Move the rpmlib back
1261 if os.path.exists(self.saved_rpmlib):
1262 if os.path.exists(self.image_rpmlib):
1263 bb.utils.remove(self.image_rpmlib, True)
1264
1265 bb.note('Recovery packaging data')
1266 shutil.copytree(self.saved_rpmlib,
1267 self.image_rpmlib,
1268 symlinks=True)
1269
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001270 def list_installed(self):
1271 return self.pkgs_list.list_pkgs()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001272
1273 '''
1274 If incremental install, we need to determine what we've got,
1275 what we need to add, and what to remove...
1276 The dump_install_solution will dump and save the new install
1277 solution.
1278 '''
1279 def dump_install_solution(self, pkgs):
1280 bb.note('creating new install solution for incremental install')
1281 if len(pkgs) == 0:
1282 return
1283
1284 pkgs = self._pkg_translate_oe_to_smart(pkgs, False)
1285 install_pkgs = list()
1286
1287 cmd = "%s %s install -y --dump %s 2>%s" % \
1288 (self.smart_cmd,
1289 self.smart_opt,
1290 ' '.join(pkgs),
1291 self.solution_manifest)
1292 try:
1293 # Disable rpmsys channel for the fake install
1294 self._invoke_smart('channel --disable rpmsys')
1295
1296 subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
1297 with open(self.solution_manifest, 'r') as manifest:
1298 for pkg in manifest.read().split('\n'):
1299 if '@' in pkg:
1300 install_pkgs.append(pkg)
1301 except subprocess.CalledProcessError as e:
1302 bb.note("Unable to dump install packages. Command '%s' "
1303 "returned %d:\n%s" % (cmd, e.returncode, e.output))
1304 # Recovery rpmsys channel
1305 self._invoke_smart('channel --enable rpmsys')
1306 return install_pkgs
1307
1308 '''
1309 If incremental install, we need to determine what we've got,
1310 what we need to add, and what to remove...
1311 The load_old_install_solution will load the previous install
1312 solution
1313 '''
1314 def load_old_install_solution(self):
1315 bb.note('load old install solution for incremental install')
1316 installed_pkgs = list()
1317 if not os.path.exists(self.solution_manifest):
1318 bb.note('old install solution not exist')
1319 return installed_pkgs
1320
1321 with open(self.solution_manifest, 'r') as manifest:
1322 for pkg in manifest.read().split('\n'):
1323 if '@' in pkg:
1324 installed_pkgs.append(pkg.strip())
1325
1326 return installed_pkgs
1327
1328 '''
1329 Dump all available packages in feeds, it should be invoked after the
1330 newest rpm index was created
1331 '''
1332 def dump_all_available_pkgs(self):
1333 available_manifest = self.d.expand('${T}/saved/available_pkgs.txt')
1334 available_pkgs = list()
1335 cmd = "%s %s query --output %s" % \
1336 (self.smart_cmd, self.smart_opt, available_manifest)
1337 try:
1338 subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
1339 with open(available_manifest, 'r') as manifest:
1340 for pkg in manifest.read().split('\n'):
1341 if '@' in pkg:
1342 available_pkgs.append(pkg.strip())
1343 except subprocess.CalledProcessError as e:
1344 bb.note("Unable to list all available packages. Command '%s' "
1345 "returned %d:\n%s" % (cmd, e.returncode, e.output))
1346
1347 self.fullpkglist = available_pkgs
1348
1349 return
1350
1351 def save_rpmpostinst(self, pkg):
1352 mlibs = (self.d.getVar('MULTILIB_GLOBAL_VARIANTS', False) or "").split()
1353
1354 new_pkg = pkg
1355 # Remove any multilib prefix from the package name
1356 for mlib in mlibs:
1357 if mlib in pkg:
1358 new_pkg = pkg.replace(mlib + '-', '')
1359 break
1360
1361 bb.note(' * postponing %s' % new_pkg)
1362 saved_dir = self.target_rootfs + self.d.expand('${sysconfdir}/rpm-postinsts/') + new_pkg
1363
1364 cmd = self.rpm_cmd + ' -q --scripts --root ' + self.target_rootfs
1365 cmd += ' --dbpath=/var/lib/rpm ' + new_pkg
1366 cmd += ' | sed -n -e "/^postinstall scriptlet (using .*):$/,/^.* scriptlet (using .*):$/ {/.*/p}"'
1367 cmd += ' | sed -e "/postinstall scriptlet (using \(.*\)):$/d"'
1368 cmd += ' -e "/^.* scriptlet (using .*):$/d" > %s' % saved_dir
1369
1370 try:
1371 bb.note(cmd)
1372 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).strip()
1373 bb.note(output)
1374 os.chmod(saved_dir, 0755)
1375 except subprocess.CalledProcessError as e:
1376 bb.fatal("Invoke save_rpmpostinst failed. Command '%s' "
1377 "returned %d:\n%s" % (cmd, e.returncode, e.output))
1378
1379 '''Write common configuration for target usage'''
1380 def rpm_setup_smart_target_config(self):
1381 bb.utils.remove(os.path.join(self.target_rootfs, 'var/lib/smart'),
1382 True)
1383
1384 self._invoke_smart('config --set rpm-nolinktos=1')
1385 self._invoke_smart('config --set rpm-noparentdirs=1')
1386 for i in self.d.getVar('BAD_RECOMMENDATIONS', True).split():
1387 self._invoke_smart('flag --set ignore-recommends %s' % i)
1388 self._invoke_smart('channel --add rpmsys type=rpm-sys -y')
1389
1390 '''
1391 The rpm db lock files were produced after invoking rpm to query on
1392 build system, and they caused the rpm on target didn't work, so we
1393 need to unlock the rpm db by removing the lock files.
1394 '''
1395 def unlock_rpm_db(self):
1396 # Remove rpm db lock files
1397 rpm_db_locks = glob.glob('%s/var/lib/rpm/__db.*' % self.target_rootfs)
1398 for f in rpm_db_locks:
1399 bb.utils.remove(f, True)
1400
1401
1402class OpkgPM(PackageManager):
1403 def __init__(self, d, target_rootfs, config_file, archs, task_name='target'):
1404 super(OpkgPM, self).__init__(d)
1405
1406 self.target_rootfs = target_rootfs
1407 self.config_file = config_file
1408 self.pkg_archs = archs
1409 self.task_name = task_name
1410
1411 self.deploy_dir = self.d.getVar("DEPLOY_DIR_IPK", True)
1412 self.deploy_lock_file = os.path.join(self.deploy_dir, "deploy.lock")
1413 self.opkg_cmd = bb.utils.which(os.getenv('PATH'), "opkg")
1414 self.opkg_args = "--volatile-cache -f %s -o %s " % (self.config_file, target_rootfs)
1415 self.opkg_args += self.d.getVar("OPKG_ARGS", True)
1416
1417 opkg_lib_dir = self.d.getVar('OPKGLIBDIR', True)
1418 if opkg_lib_dir[0] == "/":
1419 opkg_lib_dir = opkg_lib_dir[1:]
1420
1421 self.opkg_dir = os.path.join(target_rootfs, opkg_lib_dir, "opkg")
1422
1423 bb.utils.mkdirhier(self.opkg_dir)
1424
1425 self.saved_opkg_dir = self.d.expand('${T}/saved/%s' % self.task_name)
1426 if not os.path.exists(self.d.expand('${T}/saved')):
1427 bb.utils.mkdirhier(self.d.expand('${T}/saved'))
1428
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001429 self.from_feeds = (self.d.getVar('BUILD_IMAGES_FROM_FEEDS', True) or "") == "1"
1430 if self.from_feeds:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001431 self._create_custom_config()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001432 else:
1433 self._create_config()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001434
1435 self.indexer = OpkgIndexer(self.d, self.deploy_dir)
1436
1437 """
1438 This function will change a package's status in /var/lib/opkg/status file.
1439 If 'packages' is None then the new_status will be applied to all
1440 packages
1441 """
1442 def mark_packages(self, status_tag, packages=None):
1443 status_file = os.path.join(self.opkg_dir, "status")
1444
1445 with open(status_file, "r") as sf:
1446 with open(status_file + ".tmp", "w+") as tmp_sf:
1447 if packages is None:
1448 tmp_sf.write(re.sub(r"Package: (.*?)\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)",
1449 r"Package: \1\n\2Status: \3%s" % status_tag,
1450 sf.read()))
1451 else:
1452 if type(packages).__name__ != "list":
1453 raise TypeError("'packages' should be a list object")
1454
1455 status = sf.read()
1456 for pkg in packages:
1457 status = re.sub(r"Package: %s\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)" % pkg,
1458 r"Package: %s\n\1Status: \2%s" % (pkg, status_tag),
1459 status)
1460
1461 tmp_sf.write(status)
1462
1463 os.rename(status_file + ".tmp", status_file)
1464
1465 def _create_custom_config(self):
1466 bb.note("Building from feeds activated!")
1467
1468 with open(self.config_file, "w+") as config_file:
1469 priority = 1
1470 for arch in self.pkg_archs.split():
1471 config_file.write("arch %s %d\n" % (arch, priority))
1472 priority += 5
1473
1474 for line in (self.d.getVar('IPK_FEED_URIS', True) or "").split():
1475 feed_match = re.match("^[ \t]*(.*)##([^ \t]*)[ \t]*$", line)
1476
1477 if feed_match is not None:
1478 feed_name = feed_match.group(1)
1479 feed_uri = feed_match.group(2)
1480
1481 bb.note("Add %s feed with URL %s" % (feed_name, feed_uri))
1482
1483 config_file.write("src/gz %s %s\n" % (feed_name, feed_uri))
1484
1485 """
1486 Allow to use package deploy directory contents as quick devel-testing
1487 feed. This creates individual feed configs for each arch subdir of those
1488 specified as compatible for the current machine.
1489 NOTE: Development-helper feature, NOT a full-fledged feed.
1490 """
1491 if (self.d.getVar('FEED_DEPLOYDIR_BASE_URI', True) or "") != "":
1492 for arch in self.pkg_archs.split():
1493 cfg_file_name = os.path.join(self.target_rootfs,
1494 self.d.getVar("sysconfdir", True),
1495 "opkg",
1496 "local-%s-feed.conf" % arch)
1497
1498 with open(cfg_file_name, "w+") as cfg_file:
1499 cfg_file.write("src/gz local-%s %s/%s" %
1500 (arch,
1501 self.d.getVar('FEED_DEPLOYDIR_BASE_URI', True),
1502 arch))
1503
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001504 if self.opkg_dir != '/var/lib/opkg':
1505 # There is no command line option for this anymore, we need to add
1506 # info_dir and status_file to config file, if OPKGLIBDIR doesn't have
1507 # the default value of "/var/lib" as defined in opkg:
1508 # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR "/var/lib/opkg/info"
1509 # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE "/var/lib/opkg/status"
1510 cfg_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'info'))
1511 cfg_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'status'))
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001512
1513
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001514 def _create_config(self):
1515 with open(self.config_file, "w+") as config_file:
1516 priority = 1
1517 for arch in self.pkg_archs.split():
1518 config_file.write("arch %s %d\n" % (arch, priority))
1519 priority += 5
1520
1521 config_file.write("src oe file:%s\n" % self.deploy_dir)
1522
1523 for arch in self.pkg_archs.split():
1524 pkgs_dir = os.path.join(self.deploy_dir, arch)
1525 if os.path.isdir(pkgs_dir):
1526 config_file.write("src oe-%s file:%s\n" %
1527 (arch, pkgs_dir))
1528
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001529 if self.opkg_dir != '/var/lib/opkg':
1530 # There is no command line option for this anymore, we need to add
1531 # info_dir and status_file to config file, if OPKGLIBDIR doesn't have
1532 # the default value of "/var/lib" as defined in opkg:
1533 # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR "/var/lib/opkg/info"
1534 # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE "/var/lib/opkg/status"
1535 config_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'info'))
1536 config_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'status'))
1537
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001538 def insert_feeds_uris(self):
1539 if self.feed_uris == "":
1540 return
1541
1542 rootfs_config = os.path.join('%s/etc/opkg/base-feeds.conf'
1543 % self.target_rootfs)
1544
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001545 feed_uris = self.construct_uris(self.feed_uris.split(), self.feed_base_paths.split())
1546 archs = self.pkg_archs.split() if self.feed_archs is None else self.feed_archs.split()
1547
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001548 with open(rootfs_config, "w+") as config_file:
1549 uri_iterator = 0
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001550 for uri in feed_uris:
1551 if archs:
1552 for arch in archs:
1553 if (self.feed_archs is None) and (not os.path.exists(os.path.join(self.deploy_dir, arch))):
1554 continue
1555 bb.note('Note: adding opkg feed url-%s-%d (%s)' %
1556 (arch, uri_iterator, uri))
1557 config_file.write("src/gz uri-%s-%d %s/%s\n" %
1558 (arch, uri_iterator, uri, arch))
1559 else:
1560 bb.note('Note: adding opkg feed url-%d (%s)' %
1561 (uri_iterator, uri))
1562 config_file.write("src/gz uri-%d %s\n" %
1563 (uri_iterator, uri))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001564
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001565 uri_iterator += 1
1566
1567 def update(self):
1568 self.deploy_dir_lock()
1569
1570 cmd = "%s %s update" % (self.opkg_cmd, self.opkg_args)
1571
1572 try:
1573 subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1574 except subprocess.CalledProcessError as e:
1575 self.deploy_dir_unlock()
1576 bb.fatal("Unable to update the package index files. Command '%s' "
1577 "returned %d:\n%s" % (cmd, e.returncode, e.output))
1578
1579 self.deploy_dir_unlock()
1580
1581 def install(self, pkgs, attempt_only=False):
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001582 if not pkgs:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001583 return
1584
1585 cmd = "%s %s install %s" % (self.opkg_cmd, self.opkg_args, ' '.join(pkgs))
1586
1587 os.environ['D'] = self.target_rootfs
1588 os.environ['OFFLINE_ROOT'] = self.target_rootfs
1589 os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs
1590 os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs
1591 os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR', True),
1592 "intercept_scripts")
1593 os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE', True)
1594
1595 try:
1596 bb.note("Installing the following packages: %s" % ' '.join(pkgs))
1597 bb.note(cmd)
1598 output = subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1599 bb.note(output)
1600 except subprocess.CalledProcessError as e:
1601 (bb.fatal, bb.note)[attempt_only]("Unable to install packages. "
1602 "Command '%s' returned %d:\n%s" %
1603 (cmd, e.returncode, e.output))
1604
1605 def remove(self, pkgs, with_dependencies=True):
1606 if with_dependencies:
1607 cmd = "%s %s --force-depends --force-remove --force-removal-of-dependent-packages remove %s" % \
1608 (self.opkg_cmd, self.opkg_args, ' '.join(pkgs))
1609 else:
1610 cmd = "%s %s --force-depends remove %s" % \
1611 (self.opkg_cmd, self.opkg_args, ' '.join(pkgs))
1612
1613 try:
1614 bb.note(cmd)
1615 output = subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1616 bb.note(output)
1617 except subprocess.CalledProcessError as e:
1618 bb.fatal("Unable to remove packages. Command '%s' "
1619 "returned %d:\n%s" % (e.cmd, e.returncode, e.output))
1620
1621 def write_index(self):
1622 self.deploy_dir_lock()
1623
1624 result = self.indexer.write_index()
1625
1626 self.deploy_dir_unlock()
1627
1628 if result is not None:
1629 bb.fatal(result)
1630
1631 def remove_packaging_data(self):
1632 bb.utils.remove(self.opkg_dir, True)
1633 # create the directory back, it's needed by PM lock
1634 bb.utils.mkdirhier(self.opkg_dir)
1635
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001636 def remove_lists(self):
1637 if not self.from_feeds:
1638 bb.utils.remove(os.path.join(self.opkg_dir, "lists"), True)
1639
1640 def list_installed(self):
1641 return OpkgPkgsList(self.d, self.target_rootfs, self.config_file).list_pkgs()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001642
1643 def handle_bad_recommendations(self):
1644 bad_recommendations = self.d.getVar("BAD_RECOMMENDATIONS", True) or ""
1645 if bad_recommendations.strip() == "":
1646 return
1647
1648 status_file = os.path.join(self.opkg_dir, "status")
1649
1650 # If status file existed, it means the bad recommendations has already
1651 # been handled
1652 if os.path.exists(status_file):
1653 return
1654
1655 cmd = "%s %s info " % (self.opkg_cmd, self.opkg_args)
1656
1657 with open(status_file, "w+") as status:
1658 for pkg in bad_recommendations.split():
1659 pkg_info = cmd + pkg
1660
1661 try:
1662 output = subprocess.check_output(pkg_info.split(), stderr=subprocess.STDOUT).strip()
1663 except subprocess.CalledProcessError as e:
1664 bb.fatal("Cannot get package info. Command '%s' "
1665 "returned %d:\n%s" % (pkg_info, e.returncode, e.output))
1666
1667 if output == "":
1668 bb.note("Ignored bad recommendation: '%s' is "
1669 "not a package" % pkg)
1670 continue
1671
1672 for line in output.split('\n'):
1673 if line.startswith("Status:"):
1674 status.write("Status: deinstall hold not-installed\n")
1675 else:
1676 status.write(line + "\n")
1677
1678 # Append a blank line after each package entry to ensure that it
1679 # is separated from the following entry
1680 status.write("\n")
1681
1682 '''
1683 The following function dummy installs pkgs and returns the log of output.
1684 '''
1685 def dummy_install(self, pkgs):
1686 if len(pkgs) == 0:
1687 return
1688
1689 # Create an temp dir as opkg root for dummy installation
1690 temp_rootfs = self.d.expand('${T}/opkg')
1691 temp_opkg_dir = os.path.join(temp_rootfs, 'var/lib/opkg')
1692 bb.utils.mkdirhier(temp_opkg_dir)
1693
1694 opkg_args = "-f %s -o %s " % (self.config_file, temp_rootfs)
1695 opkg_args += self.d.getVar("OPKG_ARGS", True)
1696
1697 cmd = "%s %s update" % (self.opkg_cmd, opkg_args)
1698 try:
1699 subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
1700 except subprocess.CalledProcessError as e:
1701 bb.fatal("Unable to update. Command '%s' "
1702 "returned %d:\n%s" % (cmd, e.returncode, e.output))
1703
1704 # Dummy installation
1705 cmd = "%s %s --noaction install %s " % (self.opkg_cmd,
1706 opkg_args,
1707 ' '.join(pkgs))
1708 try:
1709 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
1710 except subprocess.CalledProcessError as e:
1711 bb.fatal("Unable to dummy install packages. Command '%s' "
1712 "returned %d:\n%s" % (cmd, e.returncode, e.output))
1713
1714 bb.utils.remove(temp_rootfs, True)
1715
1716 return output
1717
1718 def backup_packaging_data(self):
1719 # Save the opkglib for increment ipk image generation
1720 if os.path.exists(self.saved_opkg_dir):
1721 bb.utils.remove(self.saved_opkg_dir, True)
1722 shutil.copytree(self.opkg_dir,
1723 self.saved_opkg_dir,
1724 symlinks=True)
1725
1726 def recover_packaging_data(self):
1727 # Move the opkglib back
1728 if os.path.exists(self.saved_opkg_dir):
1729 if os.path.exists(self.opkg_dir):
1730 bb.utils.remove(self.opkg_dir, True)
1731
1732 bb.note('Recover packaging data')
1733 shutil.copytree(self.saved_opkg_dir,
1734 self.opkg_dir,
1735 symlinks=True)
1736
1737
1738class DpkgPM(PackageManager):
1739 def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None):
1740 super(DpkgPM, self).__init__(d)
1741 self.target_rootfs = target_rootfs
1742 self.deploy_dir = self.d.getVar('DEPLOY_DIR_DEB', True)
1743 if apt_conf_dir is None:
1744 self.apt_conf_dir = self.d.expand("${APTCONF_TARGET}/apt")
1745 else:
1746 self.apt_conf_dir = apt_conf_dir
1747 self.apt_conf_file = os.path.join(self.apt_conf_dir, "apt.conf")
1748 self.apt_get_cmd = bb.utils.which(os.getenv('PATH'), "apt-get")
1749
1750 self.apt_args = d.getVar("APT_ARGS", True)
1751
1752 self.all_arch_list = archs.split()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001753 all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001754 self.all_arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in self.all_arch_list)
1755
1756 self._create_configs(archs, base_archs)
1757
1758 self.indexer = DpkgIndexer(self.d, self.deploy_dir)
1759
1760 """
1761 This function will change a package's status in /var/lib/dpkg/status file.
1762 If 'packages' is None then the new_status will be applied to all
1763 packages
1764 """
1765 def mark_packages(self, status_tag, packages=None):
1766 status_file = self.target_rootfs + "/var/lib/dpkg/status"
1767
1768 with open(status_file, "r") as sf:
1769 with open(status_file + ".tmp", "w+") as tmp_sf:
1770 if packages is None:
1771 tmp_sf.write(re.sub(r"Package: (.*?)\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)",
1772 r"Package: \1\n\2Status: \3%s" % status_tag,
1773 sf.read()))
1774 else:
1775 if type(packages).__name__ != "list":
1776 raise TypeError("'packages' should be a list object")
1777
1778 status = sf.read()
1779 for pkg in packages:
1780 status = re.sub(r"Package: %s\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)" % pkg,
1781 r"Package: %s\n\1Status: \2%s" % (pkg, status_tag),
1782 status)
1783
1784 tmp_sf.write(status)
1785
1786 os.rename(status_file + ".tmp", status_file)
1787
1788 """
1789 Run the pre/post installs for package "package_name". If package_name is
1790 None, then run all pre/post install scriptlets.
1791 """
1792 def run_pre_post_installs(self, package_name=None):
1793 info_dir = self.target_rootfs + "/var/lib/dpkg/info"
1794 suffixes = [(".preinst", "Preinstall"), (".postinst", "Postinstall")]
1795 status_file = self.target_rootfs + "/var/lib/dpkg/status"
1796 installed_pkgs = []
1797
1798 with open(status_file, "r") as status:
1799 for line in status.read().split('\n'):
1800 m = re.match("^Package: (.*)", line)
1801 if m is not None:
1802 installed_pkgs.append(m.group(1))
1803
1804 if package_name is not None and not package_name in installed_pkgs:
1805 return
1806
1807 os.environ['D'] = self.target_rootfs
1808 os.environ['OFFLINE_ROOT'] = self.target_rootfs
1809 os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs
1810 os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs
1811 os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR', True),
1812 "intercept_scripts")
1813 os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE', True)
1814
1815 failed_pkgs = []
1816 for pkg_name in installed_pkgs:
1817 for suffix in suffixes:
1818 p_full = os.path.join(info_dir, pkg_name + suffix[0])
1819 if os.path.exists(p_full):
1820 try:
1821 bb.note("Executing %s for package: %s ..." %
1822 (suffix[1].lower(), pkg_name))
1823 subprocess.check_output(p_full, stderr=subprocess.STDOUT)
1824 except subprocess.CalledProcessError as e:
1825 bb.note("%s for package %s failed with %d:\n%s" %
1826 (suffix[1], pkg_name, e.returncode, e.output))
1827 failed_pkgs.append(pkg_name)
1828 break
1829
1830 if len(failed_pkgs):
1831 self.mark_packages("unpacked", failed_pkgs)
1832
1833 def update(self):
1834 os.environ['APT_CONFIG'] = self.apt_conf_file
1835
1836 self.deploy_dir_lock()
1837
1838 cmd = "%s update" % self.apt_get_cmd
1839
1840 try:
1841 subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1842 except subprocess.CalledProcessError as e:
1843 bb.fatal("Unable to update the package index files. Command '%s' "
1844 "returned %d:\n%s" % (e.cmd, e.returncode, e.output))
1845
1846 self.deploy_dir_unlock()
1847
1848 def install(self, pkgs, attempt_only=False):
1849 if attempt_only and len(pkgs) == 0:
1850 return
1851
1852 os.environ['APT_CONFIG'] = self.apt_conf_file
1853
1854 cmd = "%s %s install --force-yes --allow-unauthenticated %s" % \
1855 (self.apt_get_cmd, self.apt_args, ' '.join(pkgs))
1856
1857 try:
1858 bb.note("Installing the following packages: %s" % ' '.join(pkgs))
1859 subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1860 except subprocess.CalledProcessError as e:
1861 (bb.fatal, bb.note)[attempt_only]("Unable to install packages. "
1862 "Command '%s' returned %d:\n%s" %
1863 (cmd, e.returncode, e.output))
1864
1865 # rename *.dpkg-new files/dirs
1866 for root, dirs, files in os.walk(self.target_rootfs):
1867 for dir in dirs:
1868 new_dir = re.sub("\.dpkg-new", "", dir)
1869 if dir != new_dir:
1870 os.rename(os.path.join(root, dir),
1871 os.path.join(root, new_dir))
1872
1873 for file in files:
1874 new_file = re.sub("\.dpkg-new", "", file)
1875 if file != new_file:
1876 os.rename(os.path.join(root, file),
1877 os.path.join(root, new_file))
1878
1879
1880 def remove(self, pkgs, with_dependencies=True):
1881 if with_dependencies:
1882 os.environ['APT_CONFIG'] = self.apt_conf_file
1883 cmd = "%s purge %s" % (self.apt_get_cmd, ' '.join(pkgs))
1884 else:
1885 cmd = "%s --admindir=%s/var/lib/dpkg --instdir=%s" \
1886 " -P --force-depends %s" % \
1887 (bb.utils.which(os.getenv('PATH'), "dpkg"),
1888 self.target_rootfs, self.target_rootfs, ' '.join(pkgs))
1889
1890 try:
1891 subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1892 except subprocess.CalledProcessError as e:
1893 bb.fatal("Unable to remove packages. Command '%s' "
1894 "returned %d:\n%s" % (e.cmd, e.returncode, e.output))
1895
1896 def write_index(self):
1897 self.deploy_dir_lock()
1898
1899 result = self.indexer.write_index()
1900
1901 self.deploy_dir_unlock()
1902
1903 if result is not None:
1904 bb.fatal(result)
1905
1906 def insert_feeds_uris(self):
1907 if self.feed_uris == "":
1908 return
1909
1910 sources_conf = os.path.join("%s/etc/apt/sources.list"
1911 % self.target_rootfs)
1912 arch_list = []
1913
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001914 if self.feed_archs is None:
1915 for arch in self.all_arch_list:
1916 if not os.path.exists(os.path.join(self.deploy_dir, arch)):
1917 continue
1918 arch_list.append(arch)
1919 else:
1920 arch_list = self.feed_archs.split()
1921
1922 feed_uris = self.construct_uris(self.feed_uris.split(), self.feed_base_paths.split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001923
1924 with open(sources_conf, "w+") as sources_file:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001925 for uri in feed_uris:
1926 if arch_list:
1927 for arch in arch_list:
1928 bb.note('Note: adding dpkg channel at (%s)' % uri)
1929 sources_file.write("deb %s/%s ./\n" %
1930 (uri, arch))
1931 else:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001932 bb.note('Note: adding dpkg channel at (%s)' % uri)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001933 sources_file.write("deb %s ./\n" % uri)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001934
1935 def _create_configs(self, archs, base_archs):
1936 base_archs = re.sub("_", "-", base_archs)
1937
1938 if os.path.exists(self.apt_conf_dir):
1939 bb.utils.remove(self.apt_conf_dir, True)
1940
1941 bb.utils.mkdirhier(self.apt_conf_dir)
1942 bb.utils.mkdirhier(self.apt_conf_dir + "/lists/partial/")
1943 bb.utils.mkdirhier(self.apt_conf_dir + "/apt.conf.d/")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001944 bb.utils.mkdirhier(self.apt_conf_dir + "/preferences.d/")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001945
1946 arch_list = []
1947 for arch in self.all_arch_list:
1948 if not os.path.exists(os.path.join(self.deploy_dir, arch)):
1949 continue
1950 arch_list.append(arch)
1951
1952 with open(os.path.join(self.apt_conf_dir, "preferences"), "w+") as prefs_file:
1953 priority = 801
1954 for arch in arch_list:
1955 prefs_file.write(
1956 "Package: *\n"
1957 "Pin: release l=%s\n"
1958 "Pin-Priority: %d\n\n" % (arch, priority))
1959
1960 priority += 5
1961
1962 pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE', True) or ""
1963 for pkg in pkg_exclude.split():
1964 prefs_file.write(
1965 "Package: %s\n"
1966 "Pin: release *\n"
1967 "Pin-Priority: -1\n\n" % pkg)
1968
1969 arch_list.reverse()
1970
1971 with open(os.path.join(self.apt_conf_dir, "sources.list"), "w+") as sources_file:
1972 for arch in arch_list:
1973 sources_file.write("deb file:%s/ ./\n" %
1974 os.path.join(self.deploy_dir, arch))
1975
1976 base_arch_list = base_archs.split()
1977 multilib_variants = self.d.getVar("MULTILIB_VARIANTS", True);
1978 for variant in multilib_variants.split():
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001979 localdata = bb.data.createCopy(self.d)
1980 variant_tune = localdata.getVar("DEFAULTTUNE_virtclass-multilib-" + variant, False)
1981 orig_arch = localdata.getVar("DPKG_ARCH", True)
1982 localdata.setVar("DEFAULTTUNE", variant_tune)
1983 bb.data.update_data(localdata)
1984 variant_arch = localdata.getVar("DPKG_ARCH", True)
1985 if variant_arch not in base_arch_list:
1986 base_arch_list.append(variant_arch)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001987
1988 with open(self.apt_conf_file, "w+") as apt_conf:
1989 with open(self.d.expand("${STAGING_ETCDIR_NATIVE}/apt/apt.conf.sample")) as apt_conf_sample:
1990 for line in apt_conf_sample.read().split("\n"):
1991 match_arch = re.match(" Architecture \".*\";$", line)
1992 architectures = ""
1993 if match_arch:
1994 for base_arch in base_arch_list:
1995 architectures += "\"%s\";" % base_arch
1996 apt_conf.write(" Architectures {%s};\n" % architectures);
1997 apt_conf.write(" Architecture \"%s\";\n" % base_archs)
1998 else:
1999 line = re.sub("#ROOTFS#", self.target_rootfs, line)
2000 line = re.sub("#APTCONF#", self.apt_conf_dir, line)
2001 apt_conf.write(line + "\n")
2002
2003 target_dpkg_dir = "%s/var/lib/dpkg" % self.target_rootfs
2004 bb.utils.mkdirhier(os.path.join(target_dpkg_dir, "info"))
2005
2006 bb.utils.mkdirhier(os.path.join(target_dpkg_dir, "updates"))
2007
2008 if not os.path.exists(os.path.join(target_dpkg_dir, "status")):
2009 open(os.path.join(target_dpkg_dir, "status"), "w+").close()
2010 if not os.path.exists(os.path.join(target_dpkg_dir, "available")):
2011 open(os.path.join(target_dpkg_dir, "available"), "w+").close()
2012
2013 def remove_packaging_data(self):
2014 bb.utils.remove(os.path.join(self.target_rootfs,
2015 self.d.getVar('opkglibdir', True)), True)
2016 bb.utils.remove(self.target_rootfs + "/var/lib/dpkg/", True)
2017
2018 def fix_broken_dependencies(self):
2019 os.environ['APT_CONFIG'] = self.apt_conf_file
2020
2021 cmd = "%s %s -f install" % (self.apt_get_cmd, self.apt_args)
2022
2023 try:
2024 subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
2025 except subprocess.CalledProcessError as e:
2026 bb.fatal("Cannot fix broken dependencies. Command '%s' "
2027 "returned %d:\n%s" % (cmd, e.returncode, e.output))
2028
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05002029 def list_installed(self):
2030 return DpkgPkgsList(self.d, self.target_rootfs).list_pkgs()
Patrick Williamsc124f4f2015-09-15 14:41:29 -05002031
2032
2033def generate_index_files(d):
2034 classes = d.getVar('PACKAGE_CLASSES', True).replace("package_", "").split()
2035
2036 indexer_map = {
2037 "rpm": (RpmIndexer, d.getVar('DEPLOY_DIR_RPM', True)),
2038 "ipk": (OpkgIndexer, d.getVar('DEPLOY_DIR_IPK', True)),
2039 "deb": (DpkgIndexer, d.getVar('DEPLOY_DIR_DEB', True))
2040 }
2041
2042 result = None
2043
2044 for pkg_class in classes:
2045 if not pkg_class in indexer_map:
2046 continue
2047
2048 if os.path.exists(indexer_map[pkg_class][1]):
2049 result = indexer_map[pkg_class][0](d, indexer_map[pkg_class][1]).write_index()
2050
2051 if result is not None:
2052 bb.fatal(result)
2053
2054if __name__ == "__main__":
2055 """
2056 We should be able to run this as a standalone script, from outside bitbake
2057 environment.
2058 """
2059 """
2060 TBD
2061 """