blob: 4a403a2590ec11823ca40eed38deacf5945813e1 [file] [log] [blame]
Patrick Williams92b42cb2022-09-03 06:53:57 -05001#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7#
8# Sanity check the users setup for common misconfigurations
9#
10
11SANITY_REQUIRED_UTILITIES ?= "patch diffstat git bzip2 tar \
12 gzip gawk chrpath wget cpio perl file which"
13
14def bblayers_conf_file(d):
15 return os.path.join(d.getVar('TOPDIR'), 'conf/bblayers.conf')
16
17def sanity_conf_read(fn):
18 with open(fn, 'r') as f:
19 lines = f.readlines()
20 return lines
21
22def sanity_conf_find_line(pattern, lines):
23 import re
24 return next(((index, line)
25 for index, line in enumerate(lines)
26 if re.search(pattern, line)), (None, None))
27
28def sanity_conf_update(fn, lines, version_var_name, new_version):
29 index, line = sanity_conf_find_line(r"^%s" % version_var_name, lines)
30 lines[index] = '%s = "%d"\n' % (version_var_name, new_version)
31 with open(fn, "w") as f:
32 f.write(''.join(lines))
33
34# Functions added to this variable MUST throw a NotImplementedError exception unless
35# they successfully changed the config version in the config file. Exceptions
36# are used since exec_func doesn't handle return values.
37BBLAYERS_CONF_UPDATE_FUNCS += " \
38 conf/bblayers.conf:LCONF_VERSION:LAYER_CONF_VERSION:oecore_update_bblayers \
39 conf/local.conf:CONF_VERSION:LOCALCONF_VERSION:oecore_update_localconf \
40 conf/site.conf:SCONF_VERSION:SITE_CONF_VERSION:oecore_update_siteconf \
41"
42
43SANITY_DIFF_TOOL ?= "meld"
44
45SANITY_LOCALCONF_SAMPLE ?= "${COREBASE}/meta*/conf/local.conf.sample"
46python oecore_update_localconf() {
47 # Check we are using a valid local.conf
48 current_conf = d.getVar('CONF_VERSION')
49 conf_version = d.getVar('LOCALCONF_VERSION')
50
51 failmsg = """Your version of local.conf was generated from an older/newer version of
52local.conf.sample and there have been updates made to this file. Please compare the two
53files and merge any changes before continuing.
54
55Matching the version numbers will remove this message.
56
57\"${SANITY_DIFF_TOOL} conf/local.conf ${SANITY_LOCALCONF_SAMPLE}\"
58
59is a good way to visualise the changes."""
60 failmsg = d.expand(failmsg)
61
62 raise NotImplementedError(failmsg)
63}
64
65SANITY_SITECONF_SAMPLE ?= "${COREBASE}/meta*/conf/site.conf.sample"
66python oecore_update_siteconf() {
67 # If we have a site.conf, check it's valid
68 current_sconf = d.getVar('SCONF_VERSION')
69 sconf_version = d.getVar('SITE_CONF_VERSION')
70
71 failmsg = """Your version of site.conf was generated from an older version of
72site.conf.sample and there have been updates made to this file. Please compare the two
73files and merge any changes before continuing.
74
75Matching the version numbers will remove this message.
76
77\"${SANITY_DIFF_TOOL} conf/site.conf ${SANITY_SITECONF_SAMPLE}\"
78
79is a good way to visualise the changes."""
80 failmsg = d.expand(failmsg)
81
82 raise NotImplementedError(failmsg)
83}
84
85SANITY_BBLAYERCONF_SAMPLE ?= "${COREBASE}/meta*/conf/bblayers.conf.sample"
86python oecore_update_bblayers() {
87 # bblayers.conf is out of date, so see if we can resolve that
88
89 current_lconf = int(d.getVar('LCONF_VERSION'))
90 lconf_version = int(d.getVar('LAYER_CONF_VERSION'))
91
92 failmsg = """Your version of bblayers.conf has the wrong LCONF_VERSION (has ${LCONF_VERSION}, expecting ${LAYER_CONF_VERSION}).
93Please compare your file against bblayers.conf.sample and merge any changes before continuing.
94"${SANITY_DIFF_TOOL} conf/bblayers.conf ${SANITY_BBLAYERCONF_SAMPLE}"
95
96is a good way to visualise the changes."""
97 failmsg = d.expand(failmsg)
98
99 if not current_lconf:
100 raise NotImplementedError(failmsg)
101
102 lines = []
103
104 if current_lconf < 4:
105 raise NotImplementedError(failmsg)
106
107 bblayers_fn = bblayers_conf_file(d)
108 lines = sanity_conf_read(bblayers_fn)
109
110 if current_lconf == 4 and lconf_version > 4:
111 topdir_var = '$' + '{TOPDIR}'
112 index, bbpath_line = sanity_conf_find_line('BBPATH', lines)
113 if bbpath_line:
114 start = bbpath_line.find('"')
115 if start != -1 and (len(bbpath_line) != (start + 1)):
116 if bbpath_line[start + 1] == '"':
117 lines[index] = (bbpath_line[:start + 1] +
118 topdir_var + bbpath_line[start + 1:])
119 else:
120 if not topdir_var in bbpath_line:
121 lines[index] = (bbpath_line[:start + 1] +
122 topdir_var + ':' + bbpath_line[start + 1:])
123 else:
124 raise NotImplementedError(failmsg)
125 else:
126 index, bbfiles_line = sanity_conf_find_line('BBFILES', lines)
127 if bbfiles_line:
128 lines.insert(index, 'BBPATH = "' + topdir_var + '"\n')
129 else:
130 raise NotImplementedError(failmsg)
131
132 current_lconf += 1
133 sanity_conf_update(bblayers_fn, lines, 'LCONF_VERSION', current_lconf)
134 bb.note("Your conf/bblayers.conf has been automatically updated.")
135 return
136
137 elif current_lconf == 5 and lconf_version > 5:
138 # Null update, to avoid issues with people switching between poky and other distros
139 current_lconf = 6
140 sanity_conf_update(bblayers_fn, lines, 'LCONF_VERSION', current_lconf)
141 bb.note("Your conf/bblayers.conf has been automatically updated.")
142 return
143
144 status.addresult()
145
146 elif current_lconf == 6 and lconf_version > 6:
147 # Handle rename of meta-yocto -> meta-poky
148 # This marks the start of separate version numbers but code is needed in OE-Core
149 # for the migration, one last time.
150 layers = d.getVar('BBLAYERS').split()
151 layers = [ os.path.basename(path) for path in layers ]
152 if 'meta-yocto' in layers:
153 found = False
154 while True:
155 index, meta_yocto_line = sanity_conf_find_line(r'.*meta-yocto[\'"\s\n]', lines)
156 if meta_yocto_line:
157 lines[index] = meta_yocto_line.replace('meta-yocto', 'meta-poky')
158 found = True
159 else:
160 break
161 if not found:
162 raise NotImplementedError(failmsg)
163 index, meta_yocto_line = sanity_conf_find_line('LCONF_VERSION.*\n', lines)
164 if meta_yocto_line:
165 lines[index] = 'POKY_BBLAYERS_CONF_VERSION = "1"\n'
166 else:
167 raise NotImplementedError(failmsg)
168 with open(bblayers_fn, "w") as f:
169 f.write(''.join(lines))
170 bb.note("Your conf/bblayers.conf has been automatically updated.")
171 return
172 current_lconf += 1
173 sanity_conf_update(bblayers_fn, lines, 'LCONF_VERSION', current_lconf)
174 bb.note("Your conf/bblayers.conf has been automatically updated.")
175 return
176
177 raise NotImplementedError(failmsg)
178}
179
180def raise_sanity_error(msg, d, network_error=False):
181 if d.getVar("SANITY_USE_EVENTS") == "1":
182 try:
183 bb.event.fire(bb.event.SanityCheckFailed(msg, network_error), d)
184 except TypeError:
185 bb.event.fire(bb.event.SanityCheckFailed(msg), d)
186 return
187
188 bb.fatal(""" OE-core's config sanity checker detected a potential misconfiguration.
189 Either fix the cause of this error or at your own risk disable the checker (see sanity.conf).
190 Following is the list of potential problems / advisories:
191
192 %s""" % msg)
193
194# Check a single tune for validity.
195def check_toolchain_tune(data, tune, multilib):
196 tune_errors = []
197 if not tune:
198 return "No tuning found for %s multilib." % multilib
199 localdata = bb.data.createCopy(data)
200 if multilib != "default":
201 # Apply the overrides so we can look at the details.
202 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + multilib
203 localdata.setVar("OVERRIDES", overrides)
204 bb.debug(2, "Sanity-checking tuning '%s' (%s) features:" % (tune, multilib))
205 features = (localdata.getVar("TUNE_FEATURES:tune-%s" % tune) or "").split()
206 if not features:
207 return "Tuning '%s' has no defined features, and cannot be used." % tune
208 valid_tunes = localdata.getVarFlags('TUNEVALID') or {}
209 conflicts = localdata.getVarFlags('TUNECONFLICTS') or {}
210 # [doc] is the documentation for the variable, not a real feature
211 if 'doc' in valid_tunes:
212 del valid_tunes['doc']
213 if 'doc' in conflicts:
214 del conflicts['doc']
215 for feature in features:
216 if feature in conflicts:
217 for conflict in conflicts[feature].split():
218 if conflict in features:
219 tune_errors.append("Feature '%s' conflicts with '%s'." %
220 (feature, conflict))
221 if feature in valid_tunes:
222 bb.debug(2, " %s: %s" % (feature, valid_tunes[feature]))
223 else:
224 tune_errors.append("Feature '%s' is not defined." % feature)
225 if tune_errors:
226 return "Tuning '%s' has the following errors:\n" % tune + '\n'.join(tune_errors)
227
228def check_toolchain(data):
229 tune_error_set = []
230 deftune = data.getVar("DEFAULTTUNE")
231 tune_errors = check_toolchain_tune(data, deftune, 'default')
232 if tune_errors:
233 tune_error_set.append(tune_errors)
234
235 multilibs = (data.getVar("MULTILIB_VARIANTS") or "").split()
236 global_multilibs = (data.getVar("MULTILIB_GLOBAL_VARIANTS") or "").split()
237
238 if multilibs:
239 seen_libs = []
240 seen_tunes = []
241 for lib in multilibs:
242 if lib in seen_libs:
243 tune_error_set.append("The multilib '%s' appears more than once." % lib)
244 else:
245 seen_libs.append(lib)
246 if not lib in global_multilibs:
247 tune_error_set.append("Multilib %s is not present in MULTILIB_GLOBAL_VARIANTS" % lib)
248 tune = data.getVar("DEFAULTTUNE:virtclass-multilib-%s" % lib)
249 if tune in seen_tunes:
250 tune_error_set.append("The tuning '%s' appears in more than one multilib." % tune)
251 else:
252 seen_libs.append(tune)
253 if tune == deftune:
254 tune_error_set.append("Multilib '%s' (%s) is also the default tuning." % (lib, deftune))
255 else:
256 tune_errors = check_toolchain_tune(data, tune, lib)
257 if tune_errors:
258 tune_error_set.append(tune_errors)
259 if tune_error_set:
260 return "Toolchain tunings invalid:\n" + '\n'.join(tune_error_set) + "\n"
261
262 return ""
263
264def check_conf_exists(fn, data):
265 bbpath = []
266 fn = data.expand(fn)
267 vbbpath = data.getVar("BBPATH", False)
268 if vbbpath:
269 bbpath += vbbpath.split(":")
270 for p in bbpath:
271 currname = os.path.join(data.expand(p), fn)
272 if os.access(currname, os.R_OK):
273 return True
274 return False
275
276def check_create_long_filename(filepath, pathname):
277 import string, random
278 testfile = os.path.join(filepath, ''.join(random.choice(string.ascii_letters) for x in range(200)))
279 try:
280 if not os.path.exists(filepath):
281 bb.utils.mkdirhier(filepath)
282 f = open(testfile, "w")
283 f.close()
284 os.remove(testfile)
285 except IOError as e:
286 import errno
287 err, strerror = e.args
288 if err == errno.ENAMETOOLONG:
289 return "Failed to create a file with a long name in %s. Please use a filesystem that does not unreasonably limit filename length.\n" % pathname
290 else:
291 return "Failed to create a file in %s: %s.\n" % (pathname, strerror)
292 except OSError as e:
293 errno, strerror = e.args
294 return "Failed to create %s directory in which to run long name sanity check: %s.\n" % (pathname, strerror)
295 return ""
296
297def check_path_length(filepath, pathname, limit):
298 if len(filepath) > limit:
299 return "The length of %s is longer than %s, this would cause unexpected errors, please use a shorter path.\n" % (pathname, limit)
300 return ""
301
302def get_filesystem_id(path):
303 import subprocess
304 try:
305 return subprocess.check_output(["stat", "-f", "-c", "%t", path]).decode('utf-8').strip()
306 except subprocess.CalledProcessError:
307 bb.warn("Can't get filesystem id of: %s" % path)
308 return None
309
310# Check that the path isn't located on nfs.
311def check_not_nfs(path, name):
312 # The nfs' filesystem id is 6969
313 if get_filesystem_id(path) == "6969":
314 return "The %s: %s can't be located on nfs.\n" % (name, path)
315 return ""
316
317# Check that the path is on a case-sensitive file system
318def check_case_sensitive(path, name):
319 import tempfile
320 with tempfile.NamedTemporaryFile(prefix='TmP', dir=path) as tmp_file:
321 if os.path.exists(tmp_file.name.lower()):
322 return "The %s (%s) can't be on a case-insensitive file system.\n" % (name, path)
323 return ""
324
325# Check that path isn't a broken symlink
326def check_symlink(lnk, data):
327 if os.path.islink(lnk) and not os.path.exists(lnk):
328 raise_sanity_error("%s is a broken symlink." % lnk, data)
329
330def check_connectivity(d):
331 # URI's to check can be set in the CONNECTIVITY_CHECK_URIS variable
332 # using the same syntax as for SRC_URI. If the variable is not set
333 # the check is skipped
334 test_uris = (d.getVar('CONNECTIVITY_CHECK_URIS') or "").split()
335 retval = ""
336
337 bbn = d.getVar('BB_NO_NETWORK')
338 if bbn not in (None, '0', '1'):
339 return 'BB_NO_NETWORK should be "0" or "1", but it is "%s"' % bbn
340
341 # Only check connectivity if network enabled and the
342 # CONNECTIVITY_CHECK_URIS are set
343 network_enabled = not (bbn == '1')
344 check_enabled = len(test_uris)
345 if check_enabled and network_enabled:
346 # Take a copy of the data store and unset MIRRORS and PREMIRRORS
347 data = bb.data.createCopy(d)
348 data.delVar('PREMIRRORS')
349 data.delVar('MIRRORS')
350 try:
351 fetcher = bb.fetch2.Fetch(test_uris, data)
352 fetcher.checkstatus()
353 except Exception as err:
354 # Allow the message to be configured so that users can be
355 # pointed to a support mechanism.
356 msg = data.getVar('CONNECTIVITY_CHECK_MSG') or ""
357 if len(msg) == 0:
358 msg = "%s.\n" % err
359 msg += " Please ensure your host's network is configured correctly.\n"
360 msg += " Please ensure CONNECTIVITY_CHECK_URIS is correct and specified URIs are available.\n"
361 msg += " If your ISP or network is blocking the above URL,\n"
362 msg += " try with another domain name, for example by setting:\n"
363 msg += " CONNECTIVITY_CHECK_URIS = \"https://www.example.com/\""
364 msg += " You could also set BB_NO_NETWORK = \"1\" to disable network\n"
365 msg += " access if all required sources are on local disk.\n"
366 retval = msg
367
368 return retval
369
370def check_supported_distro(sanity_data):
371 from fnmatch import fnmatch
372
373 tested_distros = sanity_data.getVar('SANITY_TESTED_DISTROS')
374 if not tested_distros:
375 return
376
377 try:
378 distro = oe.lsb.distro_identifier()
379 except Exception:
380 distro = None
381
382 if not distro:
383 bb.warn('Host distribution could not be determined; you may possibly experience unexpected failures. It is recommended that you use a tested distribution.')
384
385 for supported in [x.strip() for x in tested_distros.split('\\n')]:
386 if fnmatch(distro, supported):
387 return
388
389 bb.warn('Host distribution "%s" has not been validated with this version of the build system; you may possibly experience unexpected failures. It is recommended that you use a tested distribution.' % distro)
390
391# Checks we should only make if MACHINE is set correctly
392def check_sanity_validmachine(sanity_data):
393 messages = ""
394
395 # Check TUNE_ARCH is set
396 if sanity_data.getVar('TUNE_ARCH') == 'INVALID':
397 messages = messages + 'TUNE_ARCH is unset. Please ensure your MACHINE configuration includes a valid tune configuration file which will set this correctly.\n'
398
399 # Check TARGET_OS is set
400 if sanity_data.getVar('TARGET_OS') == 'INVALID':
401 messages = messages + 'Please set TARGET_OS directly, or choose a MACHINE or DISTRO that does so.\n'
402
403 # Check that we don't have duplicate entries in PACKAGE_ARCHS & that TUNE_PKGARCH is in PACKAGE_ARCHS
404 pkgarchs = sanity_data.getVar('PACKAGE_ARCHS')
405 tunepkg = sanity_data.getVar('TUNE_PKGARCH')
406 defaulttune = sanity_data.getVar('DEFAULTTUNE')
407 tunefound = False
408 seen = {}
409 dups = []
410
411 for pa in pkgarchs.split():
412 if seen.get(pa, 0) == 1:
413 dups.append(pa)
414 else:
415 seen[pa] = 1
416 if pa == tunepkg:
417 tunefound = True
418
419 if len(dups):
420 messages = messages + "Error, the PACKAGE_ARCHS variable contains duplicates. The following archs are listed more than once: %s" % " ".join(dups)
421
422 if tunefound == False:
423 messages = messages + "Error, the PACKAGE_ARCHS variable (%s) for DEFAULTTUNE (%s) does not contain TUNE_PKGARCH (%s)." % (pkgarchs, defaulttune, tunepkg)
424
425 return messages
426
427# Patch before 2.7 can't handle all the features in git-style diffs. Some
428# patches may incorrectly apply, and others won't apply at all.
429def check_patch_version(sanity_data):
430 import re, subprocess
431
432 try:
433 result = subprocess.check_output(["patch", "--version"], stderr=subprocess.STDOUT).decode('utf-8')
434 version = re.search(r"[0-9.]+", result.splitlines()[0]).group()
435 if bb.utils.vercmp_string_op(version, "2.7", "<"):
436 return "Your version of patch is older than 2.7 and has bugs which will break builds. Please install a newer version of patch.\n"
437 else:
438 return None
439 except subprocess.CalledProcessError as e:
440 return "Unable to execute patch --version, exit code %d:\n%s\n" % (e.returncode, e.output)
441
442# Glibc needs make 4.0 or later, we may as well match at this point
443def check_make_version(sanity_data):
444 import subprocess
445
446 try:
447 result = subprocess.check_output(['make', '--version'], stderr=subprocess.STDOUT).decode('utf-8')
448 except subprocess.CalledProcessError as e:
449 return "Unable to execute make --version, exit code %d\n%s\n" % (e.returncode, e.output)
450 version = result.split()[2]
451 if bb.utils.vercmp_string_op(version, "4.0", "<"):
452 return "Please install a make version of 4.0 or later.\n"
453
454 if bb.utils.vercmp_string_op(version, "4.2.1", "=="):
455 distro = oe.lsb.distro_identifier()
456 if "ubuntu" in distro or "debian" in distro or "linuxmint" in distro:
457 return None
458 return "make version 4.2.1 is known to have issues on Centos/OpenSUSE and other non-Ubuntu systems. Please use a buildtools-make-tarball or a newer version of make.\n"
459 return None
460
461
462# Check if we're running on WSL (Windows Subsystem for Linux).
463# WSLv1 is known not to work but WSLv2 should work properly as
464# long as the VHDX file is optimized often, let the user know
465# upfront.
466# More information on installing WSLv2 at:
467# https://docs.microsoft.com/en-us/windows/wsl/wsl2-install
468def check_wsl(d):
469 with open("/proc/version", "r") as f:
470 verdata = f.readlines()
471 for l in verdata:
472 if "Microsoft" in l:
473 return "OpenEmbedded doesn't work under WSLv1, please upgrade to WSLv2 if you want to run builds on Windows"
474 elif "microsoft" in l:
475 bb.warn("You are running bitbake under WSLv2, this works properly but you should optimize your VHDX file eventually to avoid running out of storage space")
476 return None
477
478# Require at least gcc version 7.5.
479#
480# This can be fixed on CentOS-7 with devtoolset-6+
481# https://www.softwarecollections.org/en/scls/rhscl/devtoolset-6/
482#
483# A less invasive fix is with scripts/install-buildtools (or with user
484# built buildtools-extended-tarball)
485#
486def check_gcc_version(sanity_data):
487 import subprocess
488
489 build_cc, version = oe.utils.get_host_compiler_version(sanity_data)
490 if build_cc.strip() == "gcc":
491 if bb.utils.vercmp_string_op(version, "7.5", "<"):
492 return "Your version of gcc is older than 7.5 and will break builds. Please install a newer version of gcc (you could use the project's buildtools-extended-tarball or use scripts/install-buildtools).\n"
493 return None
494
495# Tar version 1.24 and onwards handle overwriting symlinks correctly
496# but earlier versions do not; this needs to work properly for sstate
497# Version 1.28 is needed so opkg-build works correctly when reproducibile builds are enabled
498def check_tar_version(sanity_data):
499 import subprocess
500 try:
501 result = subprocess.check_output(["tar", "--version"], stderr=subprocess.STDOUT).decode('utf-8')
502 except subprocess.CalledProcessError as e:
503 return "Unable to execute tar --version, exit code %d\n%s\n" % (e.returncode, e.output)
504 version = result.split()[3]
505 if bb.utils.vercmp_string_op(version, "1.28", "<"):
506 return "Your version of tar is older than 1.28 and does not have the support needed to enable reproducible builds. Please install a newer version of tar (you could use the project's buildtools-tarball from our last release or use scripts/install-buildtools).\n"
507 return None
508
509# We use git parameters and functionality only found in 1.7.8 or later
510# The kernel tools assume git >= 1.8.3.1 (verified needed > 1.7.9.5) see #6162
511# The git fetcher also had workarounds for git < 1.7.9.2 which we've dropped
512def check_git_version(sanity_data):
513 import subprocess
514 try:
515 result = subprocess.check_output(["git", "--version"], stderr=subprocess.DEVNULL).decode('utf-8')
516 except subprocess.CalledProcessError as e:
517 return "Unable to execute git --version, exit code %d\n%s\n" % (e.returncode, e.output)
518 version = result.split()[2]
519 if bb.utils.vercmp_string_op(version, "1.8.3.1", "<"):
520 return "Your version of git is older than 1.8.3.1 and has bugs which will break builds. Please install a newer version of git.\n"
521 return None
522
523# Check the required perl modules which may not be installed by default
524def check_perl_modules(sanity_data):
525 import subprocess
526 ret = ""
527 modules = ( "Text::ParseWords", "Thread::Queue", "Data::Dumper" )
528 errresult = ''
529 for m in modules:
530 try:
531 subprocess.check_output(["perl", "-e", "use %s" % m])
532 except subprocess.CalledProcessError as e:
533 errresult += bytes.decode(e.output)
534 ret += "%s " % m
535 if ret:
536 return "Required perl module(s) not found: %s\n\n%s\n" % (ret, errresult)
537 return None
538
539def sanity_check_conffiles(d):
540 funcs = d.getVar('BBLAYERS_CONF_UPDATE_FUNCS').split()
541 for func in funcs:
542 conffile, current_version, required_version, func = func.split(":")
543 if check_conf_exists(conffile, d) and d.getVar(current_version) is not None and \
544 d.getVar(current_version) != d.getVar(required_version):
545 try:
546 bb.build.exec_func(func, d)
547 except NotImplementedError as e:
548 bb.fatal(str(e))
549 d.setVar("BB_INVALIDCONF", True)
550
551def drop_v14_cross_builds(d):
552 import glob
553 indexes = glob.glob(d.expand("${SSTATE_MANIFESTS}/index-${BUILD_ARCH}_*"))
554 for i in indexes:
555 with open(i, "r") as f:
556 lines = f.readlines()
557 for l in reversed(lines):
558 try:
559 (stamp, manifest, workdir) = l.split()
560 except ValueError:
561 bb.fatal("Invalid line '%s' in sstate manifest '%s'" % (l, i))
562 for m in glob.glob(manifest + ".*"):
563 if m.endswith(".postrm"):
564 continue
565 sstate_clean_manifest(m, d)
566 bb.utils.remove(stamp + "*")
567 bb.utils.remove(workdir, recurse = True)
568
569def sanity_handle_abichanges(status, d):
570 #
571 # Check the 'ABI' of TMPDIR
572 #
573 import subprocess
574
575 current_abi = d.getVar('OELAYOUT_ABI')
576 abifile = d.getVar('SANITY_ABIFILE')
577 if os.path.exists(abifile):
578 with open(abifile, "r") as f:
579 abi = f.read().strip()
580 if not abi.isdigit():
581 with open(abifile, "w") as f:
582 f.write(current_abi)
583 elif int(abi) <= 11 and current_abi == "12":
584 status.addresult("The layout of TMPDIR changed for Recipe Specific Sysroots.\nConversion doesn't make sense and this change will rebuild everything so please delete TMPDIR (%s).\n" % d.getVar("TMPDIR"))
585 elif int(abi) <= 13 and current_abi == "14":
586 status.addresult("TMPDIR changed to include path filtering from the pseudo database.\nIt is recommended to use a clean TMPDIR with the new pseudo path filtering so TMPDIR (%s) would need to be removed to continue.\n" % d.getVar("TMPDIR"))
587 elif int(abi) == 14 and current_abi == "15":
588 drop_v14_cross_builds(d)
589 with open(abifile, "w") as f:
590 f.write(current_abi)
591 elif (abi != current_abi):
592 # Code to convert from one ABI to another could go here if possible.
593 status.addresult("Error, TMPDIR has changed its layout version number (%s to %s) and you need to either rebuild, revert or adjust it at your own risk.\n" % (abi, current_abi))
594 else:
595 with open(abifile, "w") as f:
596 f.write(current_abi)
597
598def check_sanity_sstate_dir_change(sstate_dir, data):
599 # Sanity checks to be done when the value of SSTATE_DIR changes
600
601 # Check that SSTATE_DIR isn't on a filesystem with limited filename length (eg. eCryptFS)
602 testmsg = ""
603 if sstate_dir != "":
604 testmsg = check_create_long_filename(sstate_dir, "SSTATE_DIR")
605 # If we don't have permissions to SSTATE_DIR, suggest the user set it as an SSTATE_MIRRORS
606 try:
607 err = testmsg.split(': ')[1].strip()
608 if err == "Permission denied.":
609 testmsg = testmsg + "You could try using %s in SSTATE_MIRRORS rather than as an SSTATE_CACHE.\n" % (sstate_dir)
610 except IndexError:
611 pass
612 return testmsg
613
614def check_sanity_version_change(status, d):
615 # Sanity checks to be done when SANITY_VERSION or NATIVELSBSTRING changes
616 # In other words, these tests run once in a given build directory and then
617 # never again until the sanity version or host distrubution id/version changes.
618
619 # Check the python install is complete. Examples that are often removed in
620 # minimal installations: glib-2.0-natives requries # xml.parsers.expat and icu
621 # requires distutils.sysconfig.
622 try:
623 import xml.parsers.expat
624 import distutils.sysconfig
625 except ImportError as e:
626 status.addresult('Your Python 3 is not a full install. Please install the module %s (see the Getting Started guide for further information).\n' % e.name)
627
628 status.addresult(check_gcc_version(d))
629 status.addresult(check_make_version(d))
630 status.addresult(check_patch_version(d))
631 status.addresult(check_tar_version(d))
632 status.addresult(check_git_version(d))
633 status.addresult(check_perl_modules(d))
634 status.addresult(check_wsl(d))
635
636 missing = ""
637
638 if not check_app_exists("${MAKE}", d):
639 missing = missing + "GNU make,"
640
641 if not check_app_exists('${BUILD_CC}', d):
642 missing = missing + "C Compiler (%s)," % d.getVar("BUILD_CC")
643
644 if not check_app_exists('${BUILD_CXX}', d):
645 missing = missing + "C++ Compiler (%s)," % d.getVar("BUILD_CXX")
646
647 required_utilities = d.getVar('SANITY_REQUIRED_UTILITIES')
648
649 for util in required_utilities.split():
650 if not check_app_exists(util, d):
651 missing = missing + "%s," % util
652
653 if missing:
654 missing = missing.rstrip(',')
655 status.addresult("Please install the following missing utilities: %s\n" % missing)
656
657 assume_provided = d.getVar('ASSUME_PROVIDED').split()
658 # Check user doesn't have ASSUME_PROVIDED = instead of += in local.conf
659 if "diffstat-native" not in assume_provided:
660 status.addresult('Please use ASSUME_PROVIDED +=, not ASSUME_PROVIDED = in your local.conf\n')
661
662 # Check that TMPDIR isn't on a filesystem with limited filename length (eg. eCryptFS)
663 import stat
664 tmpdir = d.getVar('TMPDIR')
665 status.addresult(check_create_long_filename(tmpdir, "TMPDIR"))
666 tmpdirmode = os.stat(tmpdir).st_mode
667 if (tmpdirmode & stat.S_ISGID):
668 status.addresult("TMPDIR is setgid, please don't build in a setgid directory")
669 if (tmpdirmode & stat.S_ISUID):
670 status.addresult("TMPDIR is setuid, please don't build in a setuid directory")
671
672 # Check that a user isn't building in a path in PSEUDO_IGNORE_PATHS
673 pseudoignorepaths = d.getVar('PSEUDO_IGNORE_PATHS', expand=True).split(",")
674 workdir = d.getVar('WORKDIR', expand=True)
675 for i in pseudoignorepaths:
676 if i and workdir.startswith(i):
677 status.addresult("You are building in a path included in PSEUDO_IGNORE_PATHS " + str(i) + " please locate the build outside this path.\n")
678
679 # Check if PSEUDO_IGNORE_PATHS and and paths under pseudo control overlap
680 pseudoignorepaths = d.getVar('PSEUDO_IGNORE_PATHS', expand=True).split(",")
681 pseudo_control_dir = "${D},${PKGD},${PKGDEST},${IMAGEROOTFS},${SDK_OUTPUT}"
682 pseudocontroldir = d.expand(pseudo_control_dir).split(",")
683 for i in pseudoignorepaths:
684 for j in pseudocontroldir:
685 if i and j:
686 if j.startswith(i):
687 status.addresult("A path included in PSEUDO_IGNORE_PATHS " + str(i) + " and the path " + str(j) + " overlap and this will break pseudo permission and ownership tracking. Please set the path " + str(j) + " to a different directory which does not overlap with pseudo controlled directories. \n")
688
689 # Some third-party software apparently relies on chmod etc. being suid root (!!)
690 import stat
691 suid_check_bins = "chown chmod mknod".split()
692 for bin_cmd in suid_check_bins:
693 bin_path = bb.utils.which(os.environ["PATH"], bin_cmd)
694 if bin_path:
695 bin_stat = os.stat(bin_path)
696 if bin_stat.st_uid == 0 and bin_stat.st_mode & stat.S_ISUID:
697 status.addresult('%s has the setuid bit set. This interferes with pseudo and may cause other issues that break the build process.\n' % bin_path)
698
699 # Check that we can fetch from various network transports
700 netcheck = check_connectivity(d)
701 status.addresult(netcheck)
702 if netcheck:
703 status.network_error = True
704
705 nolibs = d.getVar('NO32LIBS')
706 if not nolibs:
707 lib32path = '/lib'
708 if os.path.exists('/lib64') and ( os.path.islink('/lib64') or os.path.islink('/lib') ):
709 lib32path = '/lib32'
710
711 if os.path.exists('%s/libc.so.6' % lib32path) and not os.path.exists('/usr/include/gnu/stubs-32.h'):
712 status.addresult("You have a 32-bit libc, but no 32-bit headers. You must install the 32-bit libc headers.\n")
713
714 bbpaths = d.getVar('BBPATH').split(":")
715 if ("." in bbpaths or "./" in bbpaths or "" in bbpaths):
716 status.addresult("BBPATH references the current directory, either through " \
717 "an empty entry, a './' or a '.'.\n\t This is unsafe and means your "\
718 "layer configuration is adding empty elements to BBPATH.\n\t "\
719 "Please check your layer.conf files and other BBPATH " \
720 "settings to remove the current working directory " \
721 "references.\n" \
722 "Parsed BBPATH is" + str(bbpaths));
723
724 oes_bb_conf = d.getVar( 'OES_BITBAKE_CONF')
725 if not oes_bb_conf:
726 status.addresult('You are not using the OpenEmbedded version of conf/bitbake.conf. This means your environment is misconfigured, in particular check BBPATH.\n')
727
728 # The length of TMPDIR can't be longer than 410
729 status.addresult(check_path_length(tmpdir, "TMPDIR", 410))
730
731 # Check that TMPDIR isn't located on nfs
732 status.addresult(check_not_nfs(tmpdir, "TMPDIR"))
733
734 # Check for case-insensitive file systems (such as Linux in Docker on
735 # macOS with default HFS+ file system)
736 status.addresult(check_case_sensitive(tmpdir, "TMPDIR"))
737
738def sanity_check_locale(d):
739 """
740 Currently bitbake switches locale to en_US.UTF-8 so check that this locale actually exists.
741 """
742 import locale
743 try:
744 locale.setlocale(locale.LC_ALL, "en_US.UTF-8")
745 except locale.Error:
746 raise_sanity_error("Your system needs to support the en_US.UTF-8 locale.", d)
747
748def check_sanity_everybuild(status, d):
749 import os, stat
750 # Sanity tests which test the users environment so need to run at each build (or are so cheap
751 # it makes sense to always run them.
752
753 if 0 == os.getuid():
754 raise_sanity_error("Do not use Bitbake as root.", d)
755
756 # Check the Python version, we now have a minimum of Python 3.6
757 import sys
758 if sys.hexversion < 0x030600F0:
759 status.addresult('The system requires at least Python 3.6 to run. Please update your Python interpreter.\n')
760
761 # Check the bitbake version meets minimum requirements
762 minversion = d.getVar('BB_MIN_VERSION')
763 if bb.utils.vercmp_string_op(bb.__version__, minversion, "<"):
764 status.addresult('Bitbake version %s is required and version %s was found\n' % (minversion, bb.__version__))
765
766 sanity_check_locale(d)
767
768 paths = d.getVar('PATH').split(":")
769 if "." in paths or "./" in paths or "" in paths:
770 status.addresult("PATH contains '.', './' or '' (empty element), which will break the build, please remove this.\nParsed PATH is " + str(paths) + "\n")
771
772 #Check if bitbake is present in PATH environment variable
773 bb_check = bb.utils.which(d.getVar('PATH'), 'bitbake')
774 if not bb_check:
775 bb.warn("bitbake binary is not found in PATH, did you source the script?")
776
777 # Check whether 'inherit' directive is found (used for a class to inherit)
778 # in conf file it's supposed to be uppercase INHERIT
779 inherit = d.getVar('inherit')
780 if inherit:
781 status.addresult("Please don't use inherit directive in your local.conf. The directive is supposed to be used in classes and recipes only to inherit of bbclasses. Here INHERIT should be used.\n")
782
783 # Check that the DISTRO is valid, if set
784 # need to take into account DISTRO renaming DISTRO
785 distro = d.getVar('DISTRO')
786 if distro and distro != "nodistro":
787 if not ( check_conf_exists("conf/distro/${DISTRO}.conf", d) or check_conf_exists("conf/distro/include/${DISTRO}.inc", d) ):
788 status.addresult("DISTRO '%s' not found. Please set a valid DISTRO in your local.conf\n" % d.getVar("DISTRO"))
789
790 # Check that these variables don't use tilde-expansion as we don't do that
791 for v in ("TMPDIR", "DL_DIR", "SSTATE_DIR"):
792 if d.getVar(v).startswith("~"):
793 status.addresult("%s uses ~ but Bitbake will not expand this, use an absolute path or variables." % v)
794
795 # Check that DL_DIR is set, exists and is writable. In theory, we should never even hit the check if DL_DIR isn't
796 # set, since so much relies on it being set.
797 dldir = d.getVar('DL_DIR')
798 if not dldir:
799 status.addresult("DL_DIR is not set. Your environment is misconfigured, check that DL_DIR is set, and if the directory exists, that it is writable. \n")
800 if os.path.exists(dldir) and not os.access(dldir, os.W_OK):
801 status.addresult("DL_DIR: %s exists but you do not appear to have write access to it. \n" % dldir)
802 check_symlink(dldir, d)
803
804 # Check that the MACHINE is valid, if it is set
805 machinevalid = True
806 if d.getVar('MACHINE'):
807 if not check_conf_exists("conf/machine/${MACHINE}.conf", d):
808 status.addresult('MACHINE=%s is invalid. Please set a valid MACHINE in your local.conf, environment or other configuration file.\n' % (d.getVar('MACHINE')))
809 machinevalid = False
810 else:
811 status.addresult(check_sanity_validmachine(d))
812 else:
813 status.addresult('Please set a MACHINE in your local.conf or environment\n')
814 machinevalid = False
815 if machinevalid:
816 status.addresult(check_toolchain(d))
817
818 # Check that the SDKMACHINE is valid, if it is set
819 if d.getVar('SDKMACHINE'):
820 if not check_conf_exists("conf/machine-sdk/${SDKMACHINE}.conf", d):
821 status.addresult('Specified SDKMACHINE value is not valid\n')
822 elif d.getVar('SDK_ARCH', False) == "${BUILD_ARCH}":
823 status.addresult('SDKMACHINE is set, but SDK_ARCH has not been changed as a result - SDKMACHINE may have been set too late (e.g. in the distro configuration)\n')
824
825 # If SDK_VENDOR looks like "-my-sdk" then the triples are badly formed so fail early
826 sdkvendor = d.getVar("SDK_VENDOR")
827 if not (sdkvendor.startswith("-") and sdkvendor.count("-") == 1):
828 status.addresult("SDK_VENDOR should be of the form '-foosdk' with a single dash; found '%s'\n" % sdkvendor)
829
830 check_supported_distro(d)
831
832 omask = os.umask(0o022)
833 if omask & 0o755:
834 status.addresult("Please use a umask which allows a+rx and u+rwx\n")
835 os.umask(omask)
836
837 if d.getVar('TARGET_ARCH') == "arm":
838 # This path is no longer user-readable in modern (very recent) Linux
839 try:
840 if os.path.exists("/proc/sys/vm/mmap_min_addr"):
841 f = open("/proc/sys/vm/mmap_min_addr", "r")
842 try:
843 if (int(f.read().strip()) > 65536):
844 status.addresult("/proc/sys/vm/mmap_min_addr is not <= 65536. This will cause problems with qemu so please fix the value (as root).\n\nTo fix this in later reboots, set vm.mmap_min_addr = 65536 in /etc/sysctl.conf.\n")
845 finally:
846 f.close()
847 except:
848 pass
849
850 for checkdir in ['COREBASE', 'TMPDIR']:
851 val = d.getVar(checkdir)
852 if val.find('..') != -1:
853 status.addresult("Error, you have '..' in your %s directory path. Please ensure the variable contains an absolute path as this can break some recipe builds in obtuse ways." % checkdir)
854 if val.find('+') != -1:
855 status.addresult("Error, you have an invalid character (+) in your %s directory path. Please move the installation to a directory which doesn't include any + characters." % checkdir)
856 if val.find('@') != -1:
857 status.addresult("Error, you have an invalid character (@) in your %s directory path. Please move the installation to a directory which doesn't include any @ characters." % checkdir)
858 if val.find(' ') != -1:
859 status.addresult("Error, you have a space in your %s directory path. Please move the installation to a directory which doesn't include a space since autotools doesn't support this." % checkdir)
860 if val.find('%') != -1:
861 status.addresult("Error, you have an invalid character (%) in your %s directory path which causes problems with python string formatting. Please move the installation to a directory which doesn't include any % characters." % checkdir)
862
863 # Check the format of MIRRORS, PREMIRRORS and SSTATE_MIRRORS
864 import re
865 mirror_vars = ['MIRRORS', 'PREMIRRORS', 'SSTATE_MIRRORS']
866 protocols = ['http', 'ftp', 'file', 'https', \
867 'git', 'gitsm', 'hg', 'osc', 'p4', 'svn', \
868 'bzr', 'cvs', 'npm', 'sftp', 'ssh', 's3', 'az', 'ftps']
869 for mirror_var in mirror_vars:
870 mirrors = (d.getVar(mirror_var) or '').replace('\\n', ' ').split()
871
872 # Split into pairs
873 if len(mirrors) % 2 != 0:
874 bb.warn('Invalid mirror variable value for %s: %s, should contain paired members.' % (mirror_var, str(mirrors)))
875 continue
876 mirrors = list(zip(*[iter(mirrors)]*2))
877
878 for mirror_entry in mirrors:
879 pattern, mirror = mirror_entry
880
881 decoded = bb.fetch2.decodeurl(pattern)
882 try:
883 pattern_scheme = re.compile(decoded[0])
884 except re.error as exc:
885 bb.warn('Invalid scheme regex (%s) in %s; %s' % (pattern, mirror_var, mirror_entry))
886 continue
887
888 if not any(pattern_scheme.match(protocol) for protocol in protocols):
889 bb.warn('Invalid protocol (%s) in %s: %s' % (decoded[0], mirror_var, mirror_entry))
890 continue
891
892 if not any(mirror.startswith(protocol + '://') for protocol in protocols):
893 bb.warn('Invalid protocol in %s: %s' % (mirror_var, mirror_entry))
894 continue
895
896 if mirror.startswith('file://'):
897 import urllib
898 check_symlink(urllib.parse.urlparse(mirror).path, d)
899 # SSTATE_MIRROR ends with a /PATH string
900 if mirror.endswith('/PATH'):
901 # remove /PATH$ from SSTATE_MIRROR to get a working
902 # base directory path
903 mirror_base = urllib.parse.urlparse(mirror[:-1*len('/PATH')]).path
904 check_symlink(mirror_base, d)
905
906 # Check sstate mirrors aren't being used with a local hash server and no remote
907 hashserv = d.getVar("BB_HASHSERVE")
908 if d.getVar("SSTATE_MIRRORS") and hashserv and hashserv.startswith("unix://") and not d.getVar("BB_HASHSERVE_UPSTREAM"):
909 bb.warn("You are using a local hash equivalence server but have configured an sstate mirror. This will likely mean no sstate will match from the mirror. You may wish to disable the hash equivalence use (BB_HASHSERVE), or use a hash equivalence server alongside the sstate mirror.")
910
911 # Check that TMPDIR hasn't changed location since the last time we were run
912 tmpdir = d.getVar('TMPDIR')
913 checkfile = os.path.join(tmpdir, "saved_tmpdir")
914 if os.path.exists(checkfile):
915 with open(checkfile, "r") as f:
916 saved_tmpdir = f.read().strip()
917 if (saved_tmpdir != tmpdir):
918 status.addresult("Error, TMPDIR has changed location. You need to either move it back to %s or delete it and rebuild\n" % saved_tmpdir)
919 else:
920 bb.utils.mkdirhier(tmpdir)
921 # Remove setuid, setgid and sticky bits from TMPDIR
922 try:
923 os.chmod(tmpdir, os.stat(tmpdir).st_mode & ~ stat.S_ISUID)
924 os.chmod(tmpdir, os.stat(tmpdir).st_mode & ~ stat.S_ISGID)
925 os.chmod(tmpdir, os.stat(tmpdir).st_mode & ~ stat.S_ISVTX)
926 except OSError as exc:
927 bb.warn("Unable to chmod TMPDIR: %s" % exc)
928 with open(checkfile, "w") as f:
929 f.write(tmpdir)
930
931 # If /bin/sh is a symlink, check that it points to dash or bash
932 if os.path.islink('/bin/sh'):
933 real_sh = os.path.realpath('/bin/sh')
934 # Due to update-alternatives, the shell name may take various
935 # forms, such as /bin/dash, bin/bash, /bin/bash.bash ...
936 if '/dash' not in real_sh and '/bash' not in real_sh:
937 status.addresult("Error, /bin/sh links to %s, must be dash or bash\n" % real_sh)
938
939def check_sanity(sanity_data):
940 class SanityStatus(object):
941 def __init__(self):
942 self.messages = ""
943 self.network_error = False
944
945 def addresult(self, message):
946 if message:
947 self.messages = self.messages + message
948
949 status = SanityStatus()
950
951 tmpdir = sanity_data.getVar('TMPDIR')
952 sstate_dir = sanity_data.getVar('SSTATE_DIR')
953
954 check_symlink(sstate_dir, sanity_data)
955
956 # Check saved sanity info
957 last_sanity_version = 0
958 last_tmpdir = ""
959 last_sstate_dir = ""
960 last_nativelsbstr = ""
961 sanityverfile = sanity_data.expand("${TOPDIR}/cache/sanity_info")
962 if os.path.exists(sanityverfile):
963 with open(sanityverfile, 'r') as f:
964 for line in f:
965 if line.startswith('SANITY_VERSION'):
966 last_sanity_version = int(line.split()[1])
967 if line.startswith('TMPDIR'):
968 last_tmpdir = line.split()[1]
969 if line.startswith('SSTATE_DIR'):
970 last_sstate_dir = line.split()[1]
971 if line.startswith('NATIVELSBSTRING'):
972 last_nativelsbstr = line.split()[1]
973
974 check_sanity_everybuild(status, sanity_data)
975
976 sanity_version = int(sanity_data.getVar('SANITY_VERSION') or 1)
977 network_error = False
978 # NATIVELSBSTRING var may have been overridden with "universal", so
979 # get actual host distribution id and version
980 nativelsbstr = lsb_distro_identifier(sanity_data)
981 if last_sanity_version < sanity_version or last_nativelsbstr != nativelsbstr:
982 check_sanity_version_change(status, sanity_data)
983 status.addresult(check_sanity_sstate_dir_change(sstate_dir, sanity_data))
984 else:
985 if last_sstate_dir != sstate_dir:
986 status.addresult(check_sanity_sstate_dir_change(sstate_dir, sanity_data))
987
988 if os.path.exists(os.path.dirname(sanityverfile)) and not status.messages:
989 with open(sanityverfile, 'w') as f:
990 f.write("SANITY_VERSION %s\n" % sanity_version)
991 f.write("TMPDIR %s\n" % tmpdir)
992 f.write("SSTATE_DIR %s\n" % sstate_dir)
993 f.write("NATIVELSBSTRING %s\n" % nativelsbstr)
994
995 sanity_handle_abichanges(status, sanity_data)
996
997 if status.messages != "":
998 raise_sanity_error(sanity_data.expand(status.messages), sanity_data, status.network_error)
999
1000# Create a copy of the datastore and finalise it to ensure appends and
1001# overrides are set - the datastore has yet to be finalised at ConfigParsed
1002def copy_data(e):
1003 sanity_data = bb.data.createCopy(e.data)
1004 sanity_data.finalize()
1005 return sanity_data
1006
1007addhandler config_reparse_eventhandler
1008config_reparse_eventhandler[eventmask] = "bb.event.ConfigParsed"
1009python config_reparse_eventhandler() {
1010 sanity_check_conffiles(e.data)
1011}
1012
1013addhandler check_sanity_eventhandler
1014check_sanity_eventhandler[eventmask] = "bb.event.SanityCheck bb.event.NetworkTest"
1015python check_sanity_eventhandler() {
1016 if bb.event.getName(e) == "SanityCheck":
1017 sanity_data = copy_data(e)
1018 check_sanity(sanity_data)
1019 if e.generateevents:
1020 sanity_data.setVar("SANITY_USE_EVENTS", "1")
1021 bb.event.fire(bb.event.SanityCheckPassed(), e.data)
1022 elif bb.event.getName(e) == "NetworkTest":
1023 sanity_data = copy_data(e)
1024 if e.generateevents:
1025 sanity_data.setVar("SANITY_USE_EVENTS", "1")
1026 bb.event.fire(bb.event.NetworkTestFailed() if check_connectivity(sanity_data) else bb.event.NetworkTestPassed(), e.data)
1027
1028 return
1029}