blob: 3544bbe1707dedca6009795d013102dad42e89ad [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake Utility Functions
5"""
6
7# Copyright (C) 2004 Michael Lauer
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21
22import re, fcntl, os, string, stat, shutil, time
23import sys
24import errno
25import logging
26import bb
27import bb.msg
28import multiprocessing
29import fcntl
30import subprocess
31import glob
32import fnmatch
33import traceback
34import errno
35import signal
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050036import ast
Patrick Williamsc124f4f2015-09-15 14:41:29 -050037from commands import getstatusoutput
38from contextlib import contextmanager
39from ctypes import cdll
40
41
42logger = logging.getLogger("BitBake.Util")
43
44def clean_context():
45 return {
46 "os": os,
47 "bb": bb,
48 "time": time,
49 }
50
51def get_context():
52 return _context
53
54
55def set_context(ctx):
56 _context = ctx
57
58# Context used in better_exec, eval
59_context = clean_context()
60
61class VersionStringException(Exception):
62 """Exception raised when an invalid version specification is found"""
63
64def explode_version(s):
65 r = []
66 alpha_regexp = re.compile('^([a-zA-Z]+)(.*)$')
67 numeric_regexp = re.compile('^(\d+)(.*)$')
68 while (s != ''):
69 if s[0] in string.digits:
70 m = numeric_regexp.match(s)
71 r.append((0, int(m.group(1))))
72 s = m.group(2)
73 continue
74 if s[0] in string.letters:
75 m = alpha_regexp.match(s)
76 r.append((1, m.group(1)))
77 s = m.group(2)
78 continue
79 if s[0] == '~':
80 r.append((-1, s[0]))
81 else:
82 r.append((2, s[0]))
83 s = s[1:]
84 return r
85
86def split_version(s):
87 """Split a version string into its constituent parts (PE, PV, PR)"""
88 s = s.strip(" <>=")
89 e = 0
90 if s.count(':'):
91 e = int(s.split(":")[0])
92 s = s.split(":")[1]
93 r = ""
94 if s.count('-'):
95 r = s.rsplit("-", 1)[1]
96 s = s.rsplit("-", 1)[0]
97 v = s
98 return (e, v, r)
99
100def vercmp_part(a, b):
101 va = explode_version(a)
102 vb = explode_version(b)
103 while True:
104 if va == []:
105 (oa, ca) = (0, None)
106 else:
107 (oa, ca) = va.pop(0)
108 if vb == []:
109 (ob, cb) = (0, None)
110 else:
111 (ob, cb) = vb.pop(0)
112 if (oa, ca) == (0, None) and (ob, cb) == (0, None):
113 return 0
114 if oa < ob:
115 return -1
116 elif oa > ob:
117 return 1
118 elif ca < cb:
119 return -1
120 elif ca > cb:
121 return 1
122
123def vercmp(ta, tb):
124 (ea, va, ra) = ta
125 (eb, vb, rb) = tb
126
127 r = int(ea or 0) - int(eb or 0)
128 if (r == 0):
129 r = vercmp_part(va, vb)
130 if (r == 0):
131 r = vercmp_part(ra, rb)
132 return r
133
134def vercmp_string(a, b):
135 ta = split_version(a)
136 tb = split_version(b)
137 return vercmp(ta, tb)
138
139def vercmp_string_op(a, b, op):
140 """
141 Compare two versions and check if the specified comparison operator matches the result of the comparison.
142 This function is fairly liberal about what operators it will accept since there are a variety of styles
143 depending on the context.
144 """
145 res = vercmp_string(a, b)
146 if op in ('=', '=='):
147 return res == 0
148 elif op == '<=':
149 return res <= 0
150 elif op == '>=':
151 return res >= 0
152 elif op in ('>', '>>'):
153 return res > 0
154 elif op in ('<', '<<'):
155 return res < 0
156 elif op == '!=':
157 return res != 0
158 else:
159 raise VersionStringException('Unsupported comparison operator "%s"' % op)
160
161def explode_deps(s):
162 """
163 Take an RDEPENDS style string of format:
164 "DEPEND1 (optional version) DEPEND2 (optional version) ..."
165 and return a list of dependencies.
166 Version information is ignored.
167 """
168 r = []
169 l = s.split()
170 flag = False
171 for i in l:
172 if i[0] == '(':
173 flag = True
174 #j = []
175 if not flag:
176 r.append(i)
177 #else:
178 # j.append(i)
179 if flag and i.endswith(')'):
180 flag = False
181 # Ignore version
182 #r[-1] += ' ' + ' '.join(j)
183 return r
184
185def explode_dep_versions2(s):
186 """
187 Take an RDEPENDS style string of format:
188 "DEPEND1 (optional version) DEPEND2 (optional version) ..."
189 and return a dictionary of dependencies and versions.
190 """
191 r = {}
192 l = s.replace(",", "").split()
193 lastdep = None
194 lastcmp = ""
195 lastver = ""
196 incmp = False
197 inversion = False
198 for i in l:
199 if i[0] == '(':
200 incmp = True
201 i = i[1:].strip()
202 if not i:
203 continue
204
205 if incmp:
206 incmp = False
207 inversion = True
208 # This list is based on behavior and supported comparisons from deb, opkg and rpm.
209 #
210 # Even though =<, <<, ==, !=, =>, and >> may not be supported,
211 # we list each possibly valid item.
212 # The build system is responsible for validation of what it supports.
213 if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
214 lastcmp = i[0:2]
215 i = i[2:]
216 elif i.startswith(('<', '>', '=')):
217 lastcmp = i[0:1]
218 i = i[1:]
219 else:
220 # This is an unsupported case!
221 raise VersionStringException('Invalid version specification in "(%s" - invalid or missing operator' % i)
222 lastcmp = (i or "")
223 i = ""
224 i.strip()
225 if not i:
226 continue
227
228 if inversion:
229 if i.endswith(')'):
230 i = i[:-1] or ""
231 inversion = False
232 if lastver and i:
233 lastver += " "
234 if i:
235 lastver += i
236 if lastdep not in r:
237 r[lastdep] = []
238 r[lastdep].append(lastcmp + " " + lastver)
239 continue
240
241 #if not inversion:
242 lastdep = i
243 lastver = ""
244 lastcmp = ""
245 if not (i in r and r[i]):
246 r[lastdep] = []
247
248 return r
249
250def explode_dep_versions(s):
251 r = explode_dep_versions2(s)
252 for d in r:
253 if not r[d]:
254 r[d] = None
255 continue
256 if len(r[d]) > 1:
257 bb.warn("explode_dep_versions(): Item %s appeared in dependency string '%s' multiple times with different values. explode_dep_versions cannot cope with this." % (d, s))
258 r[d] = r[d][0]
259 return r
260
261def join_deps(deps, commasep=True):
262 """
263 Take the result from explode_dep_versions and generate a dependency string
264 """
265 result = []
266 for dep in deps:
267 if deps[dep]:
268 if isinstance(deps[dep], list):
269 for v in deps[dep]:
270 result.append(dep + " (" + v + ")")
271 else:
272 result.append(dep + " (" + deps[dep] + ")")
273 else:
274 result.append(dep)
275 if commasep:
276 return ", ".join(result)
277 else:
278 return " ".join(result)
279
280def _print_trace(body, line):
281 """
282 Print the Environment of a Text Body
283 """
284 error = []
285 # print the environment of the method
286 min_line = max(1, line-4)
287 max_line = min(line + 4, len(body))
288 for i in range(min_line, max_line + 1):
289 if line == i:
290 error.append(' *** %.4d:%s' % (i, body[i-1].rstrip()))
291 else:
292 error.append(' %.4d:%s' % (i, body[i-1].rstrip()))
293 return error
294
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500295def better_compile(text, file, realfile, mode = "exec", lineno = 0):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500296 """
297 A better compile method. This method
298 will print the offending lines.
299 """
300 try:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500301 cache = bb.methodpool.compile_cache(text)
302 if cache:
303 return cache
304 # We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though
305 text2 = "\n" * int(lineno) + text
306 code = compile(text2, realfile, mode)
307 bb.methodpool.compile_cache_add(text, code)
308 return code
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500309 except Exception as e:
310 error = []
311 # split the text into lines again
312 body = text.split('\n')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500313 error.append("Error in compiling python function in %s, line %s:\n" % (realfile, lineno))
314 if hasattr(e, "lineno"):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500315 error.append("The code lines resulting in this error were:")
316 error.extend(_print_trace(body, e.lineno))
317 else:
318 error.append("The function causing this error was:")
319 for line in body:
320 error.append(line)
321 error.append("%s: %s" % (e.__class__.__name__, str(e)))
322
323 logger.error("\n".join(error))
324
325 e = bb.BBHandledException(e)
326 raise e
327
328def _print_exception(t, value, tb, realfile, text, context):
329 error = []
330 try:
331 exception = traceback.format_exception_only(t, value)
332 error.append('Error executing a python function in %s:\n' % realfile)
333
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500334 # Strip 'us' from the stack (better_exec call) unless that was where the
335 # error came from
336 if tb.tb_next is not None:
337 tb = tb.tb_next
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500338
339 textarray = text.split('\n')
340
341 linefailed = tb.tb_lineno
342
343 tbextract = traceback.extract_tb(tb)
344 tbformat = traceback.format_list(tbextract)
345 error.append("The stack trace of python calls that resulted in this exception/failure was:")
346 error.append("File: '%s', lineno: %s, function: %s" % (tbextract[0][0], tbextract[0][1], tbextract[0][2]))
347 error.extend(_print_trace(textarray, linefailed))
348
349 # See if this is a function we constructed and has calls back into other functions in
350 # "text". If so, try and improve the context of the error by diving down the trace
351 level = 0
352 nexttb = tb.tb_next
353 while nexttb is not None and (level+1) < len(tbextract):
354 error.append("File: '%s', lineno: %s, function: %s" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2]))
355 if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
356 # The code was possibly in the string we compiled ourselves
357 error.extend(_print_trace(textarray, tbextract[level+1][1]))
358 elif tbextract[level+1][0].startswith("/"):
359 # The code looks like it might be in a file, try and load it
360 try:
361 with open(tbextract[level+1][0], "r") as f:
362 text = f.readlines()
363 error.extend(_print_trace(text, tbextract[level+1][1]))
364 except:
365 error.append(tbformat[level+1])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500366 else:
367 error.append(tbformat[level+1])
368 nexttb = tb.tb_next
369 level = level + 1
370
371 error.append("Exception: %s" % ''.join(exception))
372 finally:
373 logger.error("\n".join(error))
374
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500375def better_exec(code, context, text = None, realfile = "<code>", pythonexception=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500376 """
377 Similiar to better_compile, better_exec will
378 print the lines that are responsible for the
379 error.
380 """
381 import bb.parse
382 if not text:
383 text = code
384 if not hasattr(code, "co_filename"):
385 code = better_compile(code, realfile, realfile)
386 try:
387 exec(code, get_context(), context)
388 except (bb.BBHandledException, bb.parse.SkipRecipe, bb.build.FuncFailed, bb.data_smart.ExpansionError):
389 # Error already shown so passthrough, no need for traceback
390 raise
391 except Exception as e:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500392 if pythonexception:
393 raise
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500394 (t, value, tb) = sys.exc_info()
395 try:
396 _print_exception(t, value, tb, realfile, text, context)
397 except Exception as e:
398 logger.error("Exception handler error: %s" % str(e))
399
400 e = bb.BBHandledException(e)
401 raise e
402
403def simple_exec(code, context):
404 exec(code, get_context(), context)
405
406def better_eval(source, locals):
407 return eval(source, get_context(), locals)
408
409@contextmanager
410def fileslocked(files):
411 """Context manager for locking and unlocking file locks."""
412 locks = []
413 if files:
414 for lockfile in files:
415 locks.append(bb.utils.lockfile(lockfile))
416
417 yield
418
419 for lock in locks:
420 bb.utils.unlockfile(lock)
421
422@contextmanager
423def timeout(seconds):
424 def timeout_handler(signum, frame):
425 pass
426
427 original_handler = signal.signal(signal.SIGALRM, timeout_handler)
428
429 try:
430 signal.alarm(seconds)
431 yield
432 finally:
433 signal.alarm(0)
434 signal.signal(signal.SIGALRM, original_handler)
435
436def lockfile(name, shared=False, retry=True, block=False):
437 """
438 Use the specified file as a lock file, return when the lock has
439 been acquired. Returns a variable to pass to unlockfile().
440 Parameters:
441 retry: True to re-try locking if it fails, False otherwise
442 block: True to block until the lock succeeds, False otherwise
443 The retry and block parameters are kind of equivalent unless you
444 consider the possibility of sending a signal to the process to break
445 out - at which point you want block=True rather than retry=True.
446 """
447 dirname = os.path.dirname(name)
448 mkdirhier(dirname)
449
450 if not os.access(dirname, os.W_OK):
451 logger.error("Unable to acquire lock '%s', directory is not writable",
452 name)
453 sys.exit(1)
454
455 op = fcntl.LOCK_EX
456 if shared:
457 op = fcntl.LOCK_SH
458 if not retry and not block:
459 op = op | fcntl.LOCK_NB
460
461 while True:
462 # If we leave the lockfiles lying around there is no problem
463 # but we should clean up after ourselves. This gives potential
464 # for races though. To work around this, when we acquire the lock
465 # we check the file we locked was still the lock file on disk.
466 # by comparing inode numbers. If they don't match or the lockfile
467 # no longer exists, we start again.
468
469 # This implementation is unfair since the last person to request the
470 # lock is the most likely to win it.
471
472 try:
473 lf = open(name, 'a+')
474 fileno = lf.fileno()
475 fcntl.flock(fileno, op)
476 statinfo = os.fstat(fileno)
477 if os.path.exists(lf.name):
478 statinfo2 = os.stat(lf.name)
479 if statinfo.st_ino == statinfo2.st_ino:
480 return lf
481 lf.close()
482 except Exception:
483 try:
484 lf.close()
485 except Exception:
486 pass
487 pass
488 if not retry:
489 return None
490
491def unlockfile(lf):
492 """
493 Unlock a file locked using lockfile()
494 """
495 try:
496 # If we had a shared lock, we need to promote to exclusive before
497 # removing the lockfile. Attempt this, ignore failures.
498 fcntl.flock(lf.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
499 os.unlink(lf.name)
500 except (IOError, OSError):
501 pass
502 fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
503 lf.close()
504
505def md5_file(filename):
506 """
507 Return the hex string representation of the MD5 checksum of filename.
508 """
509 try:
510 import hashlib
511 m = hashlib.md5()
512 except ImportError:
513 import md5
514 m = md5.new()
515
516 with open(filename, "rb") as f:
517 for line in f:
518 m.update(line)
519 return m.hexdigest()
520
521def sha256_file(filename):
522 """
523 Return the hex string representation of the 256-bit SHA checksum of
524 filename. On Python 2.4 this will return None, so callers will need to
525 handle that by either skipping SHA checks, or running a standalone sha256sum
526 binary.
527 """
528 try:
529 import hashlib
530 except ImportError:
531 return None
532
533 s = hashlib.sha256()
534 with open(filename, "rb") as f:
535 for line in f:
536 s.update(line)
537 return s.hexdigest()
538
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500539def sha1_file(filename):
540 """
541 Return the hex string representation of the SHA1 checksum of the filename
542 """
543 try:
544 import hashlib
545 except ImportError:
546 return None
547
548 s = hashlib.sha1()
549 with open(filename, "rb") as f:
550 for line in f:
551 s.update(line)
552 return s.hexdigest()
553
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500554def preserved_envvars_exported():
555 """Variables which are taken from the environment and placed in and exported
556 from the metadata"""
557 return [
558 'BB_TASKHASH',
559 'HOME',
560 'LOGNAME',
561 'PATH',
562 'PWD',
563 'SHELL',
564 'TERM',
565 'USER',
566 ]
567
568def preserved_envvars():
569 """Variables which are taken from the environment and placed in the metadata"""
570 v = [
571 'BBPATH',
572 'BB_PRESERVE_ENV',
573 'BB_ENV_WHITELIST',
574 'BB_ENV_EXTRAWHITE',
575 ]
576 return v + preserved_envvars_exported()
577
578def filter_environment(good_vars):
579 """
580 Create a pristine environment for bitbake. This will remove variables that
581 are not known and may influence the build in a negative way.
582 """
583
584 removed_vars = {}
585 for key in os.environ.keys():
586 if key in good_vars:
587 continue
588
589 removed_vars[key] = os.environ[key]
590 os.unsetenv(key)
591 del os.environ[key]
592
593 if removed_vars:
594 logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
595
596 return removed_vars
597
598def approved_variables():
599 """
600 Determine and return the list of whitelisted variables which are approved
601 to remain in the environment.
602 """
603 if 'BB_PRESERVE_ENV' in os.environ:
604 return os.environ.keys()
605 approved = []
606 if 'BB_ENV_WHITELIST' in os.environ:
607 approved = os.environ['BB_ENV_WHITELIST'].split()
608 approved.extend(['BB_ENV_WHITELIST'])
609 else:
610 approved = preserved_envvars()
611 if 'BB_ENV_EXTRAWHITE' in os.environ:
612 approved.extend(os.environ['BB_ENV_EXTRAWHITE'].split())
613 if 'BB_ENV_EXTRAWHITE' not in approved:
614 approved.extend(['BB_ENV_EXTRAWHITE'])
615 return approved
616
617def clean_environment():
618 """
619 Clean up any spurious environment variables. This will remove any
620 variables the user hasn't chosen to preserve.
621 """
622 if 'BB_PRESERVE_ENV' not in os.environ:
623 good_vars = approved_variables()
624 return filter_environment(good_vars)
625
626 return {}
627
628def empty_environment():
629 """
630 Remove all variables from the environment.
631 """
632 for s in os.environ.keys():
633 os.unsetenv(s)
634 del os.environ[s]
635
636def build_environment(d):
637 """
638 Build an environment from all exported variables.
639 """
640 import bb.data
641 for var in bb.data.keys(d):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500642 export = d.getVarFlag(var, "export", False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500643 if export:
644 os.environ[var] = d.getVar(var, True) or ""
645
646def _check_unsafe_delete_path(path):
647 """
648 Basic safeguard against recursively deleting something we shouldn't. If it returns True,
649 the caller should raise an exception with an appropriate message.
650 NOTE: This is NOT meant to be a security mechanism - just a guard against silly mistakes
651 with potentially disastrous results.
652 """
653 extra = ''
654 # HOME might not be /home/something, so in case we can get it, check against it
655 homedir = os.environ.get('HOME', '')
656 if homedir:
657 extra = '|%s' % homedir
658 if re.match('(/|//|/home|/home/[^/]*%s)$' % extra, os.path.abspath(path)):
659 return True
660 return False
661
662def remove(path, recurse=False):
663 """Equivalent to rm -f or rm -rf"""
664 if not path:
665 return
666 if recurse:
667 for name in glob.glob(path):
668 if _check_unsafe_delete_path(path):
669 raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path)
670 # shutil.rmtree(name) would be ideal but its too slow
671 subprocess.call(['rm', '-rf'] + glob.glob(path))
672 return
673 for name in glob.glob(path):
674 try:
675 os.unlink(name)
676 except OSError as exc:
677 if exc.errno != errno.ENOENT:
678 raise
679
680def prunedir(topdir):
681 # Delete everything reachable from the directory named in 'topdir'.
682 # CAUTION: This is dangerous!
683 if _check_unsafe_delete_path(topdir):
684 raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
685 for root, dirs, files in os.walk(topdir, topdown = False):
686 for name in files:
687 os.remove(os.path.join(root, name))
688 for name in dirs:
689 if os.path.islink(os.path.join(root, name)):
690 os.remove(os.path.join(root, name))
691 else:
692 os.rmdir(os.path.join(root, name))
693 os.rmdir(topdir)
694
695#
696# Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
697# but thats possibly insane and suffixes is probably going to be small
698#
699def prune_suffix(var, suffixes, d):
700 # See if var ends with any of the suffixes listed and
701 # remove it if found
702 for suffix in suffixes:
703 if var.endswith(suffix):
704 return var.replace(suffix, "")
705 return var
706
707def mkdirhier(directory):
708 """Create a directory like 'mkdir -p', but does not complain if
709 directory already exists like os.makedirs
710 """
711
712 try:
713 os.makedirs(directory)
714 except OSError as e:
715 if e.errno != errno.EEXIST:
716 raise e
717
718def movefile(src, dest, newmtime = None, sstat = None):
719 """Moves a file from src to dest, preserving all permissions and
720 attributes; mtime will be preserved even when moving across
721 filesystems. Returns true on success and false on failure. Move is
722 atomic.
723 """
724
725 #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
726 try:
727 if not sstat:
728 sstat = os.lstat(src)
729 except Exception as e:
730 print("movefile: Stating source file failed...", e)
731 return None
732
733 destexists = 1
734 try:
735 dstat = os.lstat(dest)
736 except:
737 dstat = os.lstat(os.path.dirname(dest))
738 destexists = 0
739
740 if destexists:
741 if stat.S_ISLNK(dstat[stat.ST_MODE]):
742 try:
743 os.unlink(dest)
744 destexists = 0
745 except Exception as e:
746 pass
747
748 if stat.S_ISLNK(sstat[stat.ST_MODE]):
749 try:
750 target = os.readlink(src)
751 if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
752 os.unlink(dest)
753 os.symlink(target, dest)
754 #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
755 os.unlink(src)
756 return os.lstat(dest)
757 except Exception as e:
758 print("movefile: failed to properly create symlink:", dest, "->", target, e)
759 return None
760
761 renamefailed = 1
762 if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
763 try:
764 # os.rename needs to know the dest path ending with file name
765 # so append the file name to a path only if it's a dir specified
766 srcfname = os.path.basename(src)
767 destpath = os.path.join(dest, srcfname) if os.path.isdir(dest) \
768 else dest
769 os.rename(src, destpath)
770 renamefailed = 0
771 except Exception as e:
772 if e[0] != errno.EXDEV:
773 # Some random error.
774 print("movefile: Failed to move", src, "to", dest, e)
775 return None
776 # Invalid cross-device-link 'bind' mounted or actually Cross-Device
777
778 if renamefailed:
779 didcopy = 0
780 if stat.S_ISREG(sstat[stat.ST_MODE]):
781 try: # For safety copy then move it over.
782 shutil.copyfile(src, dest + "#new")
783 os.rename(dest + "#new", dest)
784 didcopy = 1
785 except Exception as e:
786 print('movefile: copy', src, '->', dest, 'failed.', e)
787 return None
788 else:
789 #we don't yet handle special, so we need to fall back to /bin/mv
790 a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
791 if a[0] != 0:
792 print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
793 return None # failure
794 try:
795 if didcopy:
796 os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
797 os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
798 os.unlink(src)
799 except Exception as e:
800 print("movefile: Failed to chown/chmod/unlink", dest, e)
801 return None
802
803 if newmtime:
804 os.utime(dest, (newmtime, newmtime))
805 else:
806 os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
807 newmtime = sstat[stat.ST_MTIME]
808 return newmtime
809
810def copyfile(src, dest, newmtime = None, sstat = None):
811 """
812 Copies a file from src to dest, preserving all permissions and
813 attributes; mtime will be preserved even when moving across
814 filesystems. Returns true on success and false on failure.
815 """
816 #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
817 try:
818 if not sstat:
819 sstat = os.lstat(src)
820 except Exception as e:
821 logger.warn("copyfile: stat of %s failed (%s)" % (src, e))
822 return False
823
824 destexists = 1
825 try:
826 dstat = os.lstat(dest)
827 except:
828 dstat = os.lstat(os.path.dirname(dest))
829 destexists = 0
830
831 if destexists:
832 if stat.S_ISLNK(dstat[stat.ST_MODE]):
833 try:
834 os.unlink(dest)
835 destexists = 0
836 except Exception as e:
837 pass
838
839 if stat.S_ISLNK(sstat[stat.ST_MODE]):
840 try:
841 target = os.readlink(src)
842 if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
843 os.unlink(dest)
844 os.symlink(target, dest)
845 #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
846 return os.lstat(dest)
847 except Exception as e:
848 logger.warn("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
849 return False
850
851 if stat.S_ISREG(sstat[stat.ST_MODE]):
852 try:
853 srcchown = False
854 if not os.access(src, os.R_OK):
855 # Make sure we can read it
856 srcchown = True
857 os.chmod(src, sstat[stat.ST_MODE] | stat.S_IRUSR)
858
859 # For safety copy then move it over.
860 shutil.copyfile(src, dest + "#new")
861 os.rename(dest + "#new", dest)
862 except Exception as e:
863 logger.warn("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
864 return False
865 finally:
866 if srcchown:
867 os.chmod(src, sstat[stat.ST_MODE])
868 os.utime(src, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
869
870 else:
871 #we don't yet handle special, so we need to fall back to /bin/mv
872 a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
873 if a[0] != 0:
874 logger.warn("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a))
875 return False # failure
876 try:
877 os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
878 os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
879 except Exception as e:
880 logger.warn("copyfile: failed to chown/chmod %s (%s)" % (dest, e))
881 return False
882
883 if newmtime:
884 os.utime(dest, (newmtime, newmtime))
885 else:
886 os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
887 newmtime = sstat[stat.ST_MTIME]
888 return newmtime
889
890def which(path, item, direction = 0, history = False):
891 """
892 Locate a file in a PATH
893 """
894
895 hist = []
896 paths = (path or "").split(':')
897 if direction != 0:
898 paths.reverse()
899
900 for p in paths:
901 next = os.path.join(p, item)
902 hist.append(next)
903 if os.path.exists(next):
904 if not os.path.isabs(next):
905 next = os.path.abspath(next)
906 if history:
907 return next, hist
908 return next
909
910 if history:
911 return "", hist
912 return ""
913
914def to_boolean(string, default=None):
915 if not string:
916 return default
917
918 normalized = string.lower()
919 if normalized in ("y", "yes", "1", "true"):
920 return True
921 elif normalized in ("n", "no", "0", "false"):
922 return False
923 else:
924 raise ValueError("Invalid value for to_boolean: %s" % string)
925
926def contains(variable, checkvalues, truevalue, falsevalue, d):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500927 """Check if a variable contains all the values specified.
928
929 Arguments:
930
931 variable -- the variable name. This will be fetched and expanded (using
932 d.getVar(variable, True)) and then split into a set().
933
934 checkvalues -- if this is a string it is split on whitespace into a set(),
935 otherwise coerced directly into a set().
936
937 truevalue -- the value to return if checkvalues is a subset of variable.
938
939 falsevalue -- the value to return if variable is empty or if checkvalues is
940 not a subset of variable.
941
942 d -- the data store.
943 """
944
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500945 val = d.getVar(variable, True)
946 if not val:
947 return falsevalue
948 val = set(val.split())
949 if isinstance(checkvalues, basestring):
950 checkvalues = set(checkvalues.split())
951 else:
952 checkvalues = set(checkvalues)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500953 if checkvalues.issubset(val):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500954 return truevalue
955 return falsevalue
956
957def contains_any(variable, checkvalues, truevalue, falsevalue, d):
958 val = d.getVar(variable, True)
959 if not val:
960 return falsevalue
961 val = set(val.split())
962 if isinstance(checkvalues, basestring):
963 checkvalues = set(checkvalues.split())
964 else:
965 checkvalues = set(checkvalues)
966 if checkvalues & val:
967 return truevalue
968 return falsevalue
969
970def cpu_count():
971 return multiprocessing.cpu_count()
972
973def nonblockingfd(fd):
974 fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
975
976def process_profilelog(fn, pout = None):
977 # Either call with a list of filenames and set pout or a filename and optionally pout.
978 if not pout:
979 pout = fn + '.processed'
980 pout = open(pout, 'w')
981
982 import pstats
983 if isinstance(fn, list):
984 p = pstats.Stats(*fn, stream=pout)
985 else:
986 p = pstats.Stats(fn, stream=pout)
987 p.sort_stats('time')
988 p.print_stats()
989 p.print_callers()
990 p.sort_stats('cumulative')
991 p.print_stats()
992
993 pout.flush()
994 pout.close()
995
996#
997# Was present to work around multiprocessing pool bugs in python < 2.7.3
998#
999def multiprocessingpool(*args, **kwargs):
1000
1001 import multiprocessing.pool
1002 #import multiprocessing.util
1003 #multiprocessing.util.log_to_stderr(10)
1004 # Deal with a multiprocessing bug where signals to the processes would be delayed until the work
1005 # completes. Putting in a timeout means the signals (like SIGINT/SIGTERM) get processed.
1006 def wrapper(func):
1007 def wrap(self, timeout=None):
1008 return func(self, timeout=timeout if timeout is not None else 1e100)
1009 return wrap
1010 multiprocessing.pool.IMapIterator.next = wrapper(multiprocessing.pool.IMapIterator.next)
1011
1012 return multiprocessing.Pool(*args, **kwargs)
1013
1014def exec_flat_python_func(func, *args, **kwargs):
1015 """Execute a flat python function (defined with def funcname(args):...)"""
1016 # Prepare a small piece of python code which calls the requested function
1017 # To do this we need to prepare two things - a set of variables we can use to pass
1018 # the values of arguments into the calling function, and the list of arguments for
1019 # the function being called
1020 context = {}
1021 funcargs = []
1022 # Handle unnamed arguments
1023 aidx = 1
1024 for arg in args:
1025 argname = 'arg_%s' % aidx
1026 context[argname] = arg
1027 funcargs.append(argname)
1028 aidx += 1
1029 # Handle keyword arguments
1030 context.update(kwargs)
1031 funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.iterkeys()])
1032 code = 'retval = %s(%s)' % (func, ', '.join(funcargs))
1033 comp = bb.utils.better_compile(code, '<string>', '<string>')
1034 bb.utils.better_exec(comp, context, code, '<string>')
1035 return context['retval']
1036
1037def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
1038 """Edit lines from a recipe or config file and modify one or more
1039 specified variable values set in the file using a specified callback
1040 function. Lines are expected to have trailing newlines.
1041 Parameters:
1042 meta_lines: lines from the file; can be a list or an iterable
1043 (e.g. file pointer)
1044 variables: a list of variable names to look for. Functions
1045 may also be specified, but must be specified with '()' at
1046 the end of the name. Note that the function doesn't have
1047 any intrinsic understanding of _append, _prepend, _remove,
1048 or overrides, so these are considered as part of the name.
1049 These values go into a regular expression, so regular
1050 expression syntax is allowed.
1051 varfunc: callback function called for every variable matching
1052 one of the entries in the variables parameter. The function
1053 should take four arguments:
1054 varname: name of variable matched
1055 origvalue: current value in file
1056 op: the operator (e.g. '+=')
1057 newlines: list of lines up to this point. You can use
1058 this to prepend lines before this variable setting
1059 if you wish.
1060 and should return a three-element tuple:
1061 newvalue: new value to substitute in, or None to drop
1062 the variable setting entirely. (If the removal
1063 results in two consecutive blank lines, one of the
1064 blank lines will also be dropped).
1065 newop: the operator to use - if you specify None here,
1066 the original operation will be used.
1067 indent: number of spaces to indent multi-line entries,
1068 or -1 to indent up to the level of the assignment
1069 and opening quote, or a string to use as the indent.
1070 minbreak: True to allow the first element of a
1071 multi-line value to continue on the same line as
1072 the assignment, False to indent before the first
1073 element.
1074 match_overrides: True to match items with _overrides on the end,
1075 False otherwise
1076 Returns a tuple:
1077 updated:
1078 True if changes were made, False otherwise.
1079 newlines:
1080 Lines after processing
1081 """
1082
1083 var_res = {}
1084 if match_overrides:
1085 override_re = '(_[a-zA-Z0-9-_$(){}]+)?'
1086 else:
1087 override_re = ''
1088 for var in variables:
1089 if var.endswith('()'):
1090 var_res[var] = re.compile('^(%s%s)[ \\t]*\([ \\t]*\)[ \\t]*{' % (var[:-2].rstrip(), override_re))
1091 else:
1092 var_res[var] = re.compile('^(%s%s)[ \\t]*[?+:.]*=[+.]*[ \\t]*(["\'])' % (var, override_re))
1093
1094 updated = False
1095 varset_start = ''
1096 varlines = []
1097 newlines = []
1098 in_var = None
1099 full_value = ''
1100 var_end = ''
1101
1102 def handle_var_end():
1103 prerun_newlines = newlines[:]
1104 op = varset_start[len(in_var):].strip()
1105 (newvalue, newop, indent, minbreak) = varfunc(in_var, full_value, op, newlines)
1106 changed = (prerun_newlines != newlines)
1107
1108 if newvalue is None:
1109 # Drop the value
1110 return True
1111 elif newvalue != full_value or (newop not in [None, op]):
1112 if newop not in [None, op]:
1113 # Callback changed the operator
1114 varset_new = "%s %s" % (in_var, newop)
1115 else:
1116 varset_new = varset_start
1117
1118 if isinstance(indent, (int, long)):
1119 if indent == -1:
1120 indentspc = ' ' * (len(varset_new) + 2)
1121 else:
1122 indentspc = ' ' * indent
1123 else:
1124 indentspc = indent
1125 if in_var.endswith('()'):
1126 # A function definition
1127 if isinstance(newvalue, list):
1128 newlines.append('%s {\n%s%s\n}\n' % (varset_new, indentspc, ('\n%s' % indentspc).join(newvalue)))
1129 else:
1130 if not newvalue.startswith('\n'):
1131 newvalue = '\n' + newvalue
1132 if not newvalue.endswith('\n'):
1133 newvalue = newvalue + '\n'
1134 newlines.append('%s {%s}\n' % (varset_new, newvalue))
1135 else:
1136 # Normal variable
1137 if isinstance(newvalue, list):
1138 if not newvalue:
1139 # Empty list -> empty string
1140 newlines.append('%s ""\n' % varset_new)
1141 elif minbreak:
1142 # First item on first line
1143 if len(newvalue) == 1:
1144 newlines.append('%s "%s"\n' % (varset_new, newvalue[0]))
1145 else:
1146 newlines.append('%s "%s \\\n' % (varset_new, newvalue[0]))
1147 for item in newvalue[1:]:
1148 newlines.append('%s%s \\\n' % (indentspc, item))
1149 newlines.append('%s"\n' % indentspc)
1150 else:
1151 # No item on first line
1152 newlines.append('%s " \\\n' % varset_new)
1153 for item in newvalue:
1154 newlines.append('%s%s \\\n' % (indentspc, item))
1155 newlines.append('%s"\n' % indentspc)
1156 else:
1157 newlines.append('%s "%s"\n' % (varset_new, newvalue))
1158 return True
1159 else:
1160 # Put the old lines back where they were
1161 newlines.extend(varlines)
1162 # If newlines was touched by the function, we'll need to return True
1163 return changed
1164
1165 checkspc = False
1166
1167 for line in meta_lines:
1168 if in_var:
1169 value = line.rstrip()
1170 varlines.append(line)
1171 if in_var.endswith('()'):
1172 full_value += '\n' + value
1173 else:
1174 full_value += value[:-1]
1175 if value.endswith(var_end):
1176 if in_var.endswith('()'):
1177 if full_value.count('{') - full_value.count('}') >= 0:
1178 continue
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001179 full_value = full_value[:-1]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001180 if handle_var_end():
1181 updated = True
1182 checkspc = True
1183 in_var = None
1184 else:
1185 skip = False
1186 for (varname, var_re) in var_res.iteritems():
1187 res = var_re.match(line)
1188 if res:
1189 isfunc = varname.endswith('()')
1190 if isfunc:
1191 splitvalue = line.split('{', 1)
1192 var_end = '}'
1193 else:
1194 var_end = res.groups()[-1]
1195 splitvalue = line.split(var_end, 1)
1196 varset_start = splitvalue[0].rstrip()
1197 value = splitvalue[1].rstrip()
1198 if not isfunc and value.endswith('\\'):
1199 value = value[:-1]
1200 full_value = value
1201 varlines = [line]
1202 in_var = res.group(1)
1203 if isfunc:
1204 in_var += '()'
1205 if value.endswith(var_end):
1206 full_value = full_value[:-1]
1207 if handle_var_end():
1208 updated = True
1209 checkspc = True
1210 in_var = None
1211 skip = True
1212 break
1213 if not skip:
1214 if checkspc:
1215 checkspc = False
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001216 if newlines and newlines[-1] == '\n' and line == '\n':
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001217 # Squash blank line if there are two consecutive blanks after a removal
1218 continue
1219 newlines.append(line)
1220 return (updated, newlines)
1221
1222
1223def edit_metadata_file(meta_file, variables, varfunc):
1224 """Edit a recipe or config file and modify one or more specified
1225 variable values set in the file using a specified callback function.
1226 The file is only written to if the value(s) actually change.
1227 This is basically the file version of edit_metadata(), see that
1228 function's description for parameter/usage information.
1229 Returns True if the file was written to, False otherwise.
1230 """
1231 with open(meta_file, 'r') as f:
1232 (updated, newlines) = edit_metadata(f, variables, varfunc)
1233 if updated:
1234 with open(meta_file, 'w') as f:
1235 f.writelines(newlines)
1236 return updated
1237
1238
1239def edit_bblayers_conf(bblayers_conf, add, remove):
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001240 """Edit bblayers.conf, adding and/or removing layers
1241 Parameters:
1242 bblayers_conf: path to bblayers.conf file to edit
1243 add: layer path (or list of layer paths) to add; None or empty
1244 list to add nothing
1245 remove: layer path (or list of layer paths) to remove; None or
1246 empty list to remove nothing
1247 Returns a tuple:
1248 notadded: list of layers specified to be added but weren't
1249 (because they were already in the list)
1250 notremoved: list of layers that were specified to be removed
1251 but weren't (because they weren't in the list)
1252 """
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001253
1254 import fnmatch
1255
1256 def remove_trailing_sep(pth):
1257 if pth and pth[-1] == os.sep:
1258 pth = pth[:-1]
1259 return pth
1260
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001261 approved = bb.utils.approved_variables()
1262 def canonicalise_path(pth):
1263 pth = remove_trailing_sep(pth)
1264 if 'HOME' in approved and '~' in pth:
1265 pth = os.path.expanduser(pth)
1266 return pth
1267
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001268 def layerlist_param(value):
1269 if not value:
1270 return []
1271 elif isinstance(value, list):
1272 return [remove_trailing_sep(x) for x in value]
1273 else:
1274 return [remove_trailing_sep(value)]
1275
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001276 addlayers = layerlist_param(add)
1277 removelayers = layerlist_param(remove)
1278
1279 # Need to use a list here because we can't set non-local variables from a callback in python 2.x
1280 bblayercalls = []
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001281 removed = []
1282 plusequals = False
1283 orig_bblayers = []
1284
1285 def handle_bblayers_firstpass(varname, origvalue, op, newlines):
1286 bblayercalls.append(op)
1287 if op == '=':
1288 del orig_bblayers[:]
1289 orig_bblayers.extend([canonicalise_path(x) for x in origvalue.split()])
1290 return (origvalue, None, 2, False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001291
1292 def handle_bblayers(varname, origvalue, op, newlines):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001293 updated = False
1294 bblayers = [remove_trailing_sep(x) for x in origvalue.split()]
1295 if removelayers:
1296 for removelayer in removelayers:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001297 for layer in bblayers:
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001298 if fnmatch.fnmatch(canonicalise_path(layer), canonicalise_path(removelayer)):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001299 updated = True
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001300 bblayers.remove(layer)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001301 removed.append(removelayer)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001302 break
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001303 if addlayers and not plusequals:
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001304 for addlayer in addlayers:
1305 if addlayer not in bblayers:
1306 updated = True
1307 bblayers.append(addlayer)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001308 del addlayers[:]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001309
1310 if updated:
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001311 if op == '+=' and not bblayers:
1312 bblayers = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001313 return (bblayers, None, 2, False)
1314 else:
1315 return (origvalue, None, 2, False)
1316
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001317 with open(bblayers_conf, 'r') as f:
1318 (_, newlines) = edit_metadata(f, ['BBLAYERS'], handle_bblayers_firstpass)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001319
1320 if not bblayercalls:
1321 raise Exception('Unable to find BBLAYERS in %s' % bblayers_conf)
1322
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001323 # Try to do the "smart" thing depending on how the user has laid out
1324 # their bblayers.conf file
1325 if bblayercalls.count('+=') > 1:
1326 plusequals = True
1327
1328 removelayers_canon = [canonicalise_path(layer) for layer in removelayers]
1329 notadded = []
1330 for layer in addlayers:
1331 layer_canon = canonicalise_path(layer)
1332 if layer_canon in orig_bblayers and not layer_canon in removelayers_canon:
1333 notadded.append(layer)
1334 notadded_canon = [canonicalise_path(layer) for layer in notadded]
1335 addlayers[:] = [layer for layer in addlayers if canonicalise_path(layer) not in notadded_canon]
1336
1337 (updated, newlines) = edit_metadata(newlines, ['BBLAYERS'], handle_bblayers)
1338 if addlayers:
1339 # Still need to add these
1340 for addlayer in addlayers:
1341 newlines.append('BBLAYERS += "%s"\n' % addlayer)
1342 updated = True
1343
1344 if updated:
1345 with open(bblayers_conf, 'w') as f:
1346 f.writelines(newlines)
1347
1348 notremoved = list(set(removelayers) - set(removed))
1349
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001350 return (notadded, notremoved)
1351
1352
1353def get_file_layer(filename, d):
1354 """Determine the collection (as defined by a layer's layer.conf file) containing the specified file"""
1355 collections = (d.getVar('BBFILE_COLLECTIONS', True) or '').split()
1356 collection_res = {}
1357 for collection in collections:
1358 collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection, True) or ''
1359
1360 def path_to_layer(path):
1361 # Use longest path so we handle nested layers
1362 matchlen = 0
1363 match = None
1364 for collection, regex in collection_res.iteritems():
1365 if len(regex) > matchlen and re.match(regex, path):
1366 matchlen = len(regex)
1367 match = collection
1368 return match
1369
1370 result = None
1371 bbfiles = (d.getVar('BBFILES', True) or '').split()
1372 bbfilesmatch = False
1373 for bbfilesentry in bbfiles:
1374 if fnmatch.fnmatch(filename, bbfilesentry):
1375 bbfilesmatch = True
1376 result = path_to_layer(bbfilesentry)
1377
1378 if not bbfilesmatch:
1379 # Probably a bbclass
1380 result = path_to_layer(filename)
1381
1382 return result
1383
1384
1385# Constant taken from http://linux.die.net/include/linux/prctl.h
1386PR_SET_PDEATHSIG = 1
1387
1388class PrCtlError(Exception):
1389 pass
1390
1391def signal_on_parent_exit(signame):
1392 """
1393 Trigger signame to be sent when the parent process dies
1394 """
1395 signum = getattr(signal, signame)
1396 # http://linux.die.net/man/2/prctl
1397 result = cdll['libc.so.6'].prctl(PR_SET_PDEATHSIG, signum)
1398 if result != 0:
1399 raise PrCtlError('prctl failed with error code %s' % result)
Patrick Williamsf1e5d692016-03-30 15:21:19 -05001400
1401#
1402# Manually call the ioprio syscall. We could depend on other libs like psutil
1403# however this gets us enough of what we need to bitbake for now without the
1404# dependency
1405#
1406_unamearch = os.uname()[4]
1407IOPRIO_WHO_PROCESS = 1
1408IOPRIO_CLASS_SHIFT = 13
1409
1410def ioprio_set(who, cls, value):
1411 NR_ioprio_set = None
1412 if _unamearch == "x86_64":
1413 NR_ioprio_set = 251
1414 elif _unamearch[0] == "i" and _unamearch[2:3] == "86":
1415 NR_ioprio_set = 289
1416
1417 if NR_ioprio_set:
1418 ioprio = value | (cls << IOPRIO_CLASS_SHIFT)
1419 rc = cdll['libc.so.6'].syscall(NR_ioprio_set, IOPRIO_WHO_PROCESS, who, ioprio)
1420 if rc != 0:
1421 raise ValueError("Unable to set ioprio, syscall returned %s" % rc)
1422 else:
1423 bb.warn("Unable to set IO Prio for arch %s" % _unamearch)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001424
1425def set_process_name(name):
1426 from ctypes import cdll, byref, create_string_buffer
1427 # This is nice to have for debugging, not essential
1428 try:
1429 libc = cdll.LoadLibrary('libc.so.6')
1430 buff = create_string_buffer(len(name)+1)
1431 buff.value = name
1432 libc.prctl(15, byref(buff), 0, 0, 0)
1433 except:
1434 pass
1435
1436# export common proxies variables from datastore to environment
1437def export_proxies(d):
1438 import os
1439
1440 variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY',
1441 'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY']
1442 exported = False
1443
1444 for v in variables:
1445 if v in os.environ.keys():
1446 exported = True
1447 else:
1448 v_proxy = d.getVar(v, True)
1449 if v_proxy is not None:
1450 os.environ[v] = v_proxy
1451 exported = True
1452
1453 return exported