blob: 91faa494ca5adb84e6997c5570ab3e88d2b8558b [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake Utility Functions
5"""
6
7# Copyright (C) 2004 Michael Lauer
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21
22import re, fcntl, os, string, stat, shutil, time
23import sys
24import errno
25import logging
26import bb
27import bb.msg
28import multiprocessing
29import fcntl
30import subprocess
31import glob
32import fnmatch
33import traceback
34import errno
35import signal
36from commands import getstatusoutput
37from contextlib import contextmanager
38from ctypes import cdll
39
40
41logger = logging.getLogger("BitBake.Util")
42
43def clean_context():
44 return {
45 "os": os,
46 "bb": bb,
47 "time": time,
48 }
49
50def get_context():
51 return _context
52
53
54def set_context(ctx):
55 _context = ctx
56
57# Context used in better_exec, eval
58_context = clean_context()
59
60class VersionStringException(Exception):
61 """Exception raised when an invalid version specification is found"""
62
63def explode_version(s):
64 r = []
65 alpha_regexp = re.compile('^([a-zA-Z]+)(.*)$')
66 numeric_regexp = re.compile('^(\d+)(.*)$')
67 while (s != ''):
68 if s[0] in string.digits:
69 m = numeric_regexp.match(s)
70 r.append((0, int(m.group(1))))
71 s = m.group(2)
72 continue
73 if s[0] in string.letters:
74 m = alpha_regexp.match(s)
75 r.append((1, m.group(1)))
76 s = m.group(2)
77 continue
78 if s[0] == '~':
79 r.append((-1, s[0]))
80 else:
81 r.append((2, s[0]))
82 s = s[1:]
83 return r
84
85def split_version(s):
86 """Split a version string into its constituent parts (PE, PV, PR)"""
87 s = s.strip(" <>=")
88 e = 0
89 if s.count(':'):
90 e = int(s.split(":")[0])
91 s = s.split(":")[1]
92 r = ""
93 if s.count('-'):
94 r = s.rsplit("-", 1)[1]
95 s = s.rsplit("-", 1)[0]
96 v = s
97 return (e, v, r)
98
99def vercmp_part(a, b):
100 va = explode_version(a)
101 vb = explode_version(b)
102 while True:
103 if va == []:
104 (oa, ca) = (0, None)
105 else:
106 (oa, ca) = va.pop(0)
107 if vb == []:
108 (ob, cb) = (0, None)
109 else:
110 (ob, cb) = vb.pop(0)
111 if (oa, ca) == (0, None) and (ob, cb) == (0, None):
112 return 0
113 if oa < ob:
114 return -1
115 elif oa > ob:
116 return 1
117 elif ca < cb:
118 return -1
119 elif ca > cb:
120 return 1
121
122def vercmp(ta, tb):
123 (ea, va, ra) = ta
124 (eb, vb, rb) = tb
125
126 r = int(ea or 0) - int(eb or 0)
127 if (r == 0):
128 r = vercmp_part(va, vb)
129 if (r == 0):
130 r = vercmp_part(ra, rb)
131 return r
132
133def vercmp_string(a, b):
134 ta = split_version(a)
135 tb = split_version(b)
136 return vercmp(ta, tb)
137
138def vercmp_string_op(a, b, op):
139 """
140 Compare two versions and check if the specified comparison operator matches the result of the comparison.
141 This function is fairly liberal about what operators it will accept since there are a variety of styles
142 depending on the context.
143 """
144 res = vercmp_string(a, b)
145 if op in ('=', '=='):
146 return res == 0
147 elif op == '<=':
148 return res <= 0
149 elif op == '>=':
150 return res >= 0
151 elif op in ('>', '>>'):
152 return res > 0
153 elif op in ('<', '<<'):
154 return res < 0
155 elif op == '!=':
156 return res != 0
157 else:
158 raise VersionStringException('Unsupported comparison operator "%s"' % op)
159
160def explode_deps(s):
161 """
162 Take an RDEPENDS style string of format:
163 "DEPEND1 (optional version) DEPEND2 (optional version) ..."
164 and return a list of dependencies.
165 Version information is ignored.
166 """
167 r = []
168 l = s.split()
169 flag = False
170 for i in l:
171 if i[0] == '(':
172 flag = True
173 #j = []
174 if not flag:
175 r.append(i)
176 #else:
177 # j.append(i)
178 if flag and i.endswith(')'):
179 flag = False
180 # Ignore version
181 #r[-1] += ' ' + ' '.join(j)
182 return r
183
184def explode_dep_versions2(s):
185 """
186 Take an RDEPENDS style string of format:
187 "DEPEND1 (optional version) DEPEND2 (optional version) ..."
188 and return a dictionary of dependencies and versions.
189 """
190 r = {}
191 l = s.replace(",", "").split()
192 lastdep = None
193 lastcmp = ""
194 lastver = ""
195 incmp = False
196 inversion = False
197 for i in l:
198 if i[0] == '(':
199 incmp = True
200 i = i[1:].strip()
201 if not i:
202 continue
203
204 if incmp:
205 incmp = False
206 inversion = True
207 # This list is based on behavior and supported comparisons from deb, opkg and rpm.
208 #
209 # Even though =<, <<, ==, !=, =>, and >> may not be supported,
210 # we list each possibly valid item.
211 # The build system is responsible for validation of what it supports.
212 if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
213 lastcmp = i[0:2]
214 i = i[2:]
215 elif i.startswith(('<', '>', '=')):
216 lastcmp = i[0:1]
217 i = i[1:]
218 else:
219 # This is an unsupported case!
220 raise VersionStringException('Invalid version specification in "(%s" - invalid or missing operator' % i)
221 lastcmp = (i or "")
222 i = ""
223 i.strip()
224 if not i:
225 continue
226
227 if inversion:
228 if i.endswith(')'):
229 i = i[:-1] or ""
230 inversion = False
231 if lastver and i:
232 lastver += " "
233 if i:
234 lastver += i
235 if lastdep not in r:
236 r[lastdep] = []
237 r[lastdep].append(lastcmp + " " + lastver)
238 continue
239
240 #if not inversion:
241 lastdep = i
242 lastver = ""
243 lastcmp = ""
244 if not (i in r and r[i]):
245 r[lastdep] = []
246
247 return r
248
249def explode_dep_versions(s):
250 r = explode_dep_versions2(s)
251 for d in r:
252 if not r[d]:
253 r[d] = None
254 continue
255 if len(r[d]) > 1:
256 bb.warn("explode_dep_versions(): Item %s appeared in dependency string '%s' multiple times with different values. explode_dep_versions cannot cope with this." % (d, s))
257 r[d] = r[d][0]
258 return r
259
260def join_deps(deps, commasep=True):
261 """
262 Take the result from explode_dep_versions and generate a dependency string
263 """
264 result = []
265 for dep in deps:
266 if deps[dep]:
267 if isinstance(deps[dep], list):
268 for v in deps[dep]:
269 result.append(dep + " (" + v + ")")
270 else:
271 result.append(dep + " (" + deps[dep] + ")")
272 else:
273 result.append(dep)
274 if commasep:
275 return ", ".join(result)
276 else:
277 return " ".join(result)
278
279def _print_trace(body, line):
280 """
281 Print the Environment of a Text Body
282 """
283 error = []
284 # print the environment of the method
285 min_line = max(1, line-4)
286 max_line = min(line + 4, len(body))
287 for i in range(min_line, max_line + 1):
288 if line == i:
289 error.append(' *** %.4d:%s' % (i, body[i-1].rstrip()))
290 else:
291 error.append(' %.4d:%s' % (i, body[i-1].rstrip()))
292 return error
293
294def better_compile(text, file, realfile, mode = "exec"):
295 """
296 A better compile method. This method
297 will print the offending lines.
298 """
299 try:
300 return compile(text, file, mode)
301 except Exception as e:
302 error = []
303 # split the text into lines again
304 body = text.split('\n')
305 error.append("Error in compiling python function in %s:\n" % realfile)
306 if e.lineno:
307 error.append("The code lines resulting in this error were:")
308 error.extend(_print_trace(body, e.lineno))
309 else:
310 error.append("The function causing this error was:")
311 for line in body:
312 error.append(line)
313 error.append("%s: %s" % (e.__class__.__name__, str(e)))
314
315 logger.error("\n".join(error))
316
317 e = bb.BBHandledException(e)
318 raise e
319
320def _print_exception(t, value, tb, realfile, text, context):
321 error = []
322 try:
323 exception = traceback.format_exception_only(t, value)
324 error.append('Error executing a python function in %s:\n' % realfile)
325
326 # Strip 'us' from the stack (better_exec call)
327 tb = tb.tb_next
328
329 textarray = text.split('\n')
330
331 linefailed = tb.tb_lineno
332
333 tbextract = traceback.extract_tb(tb)
334 tbformat = traceback.format_list(tbextract)
335 error.append("The stack trace of python calls that resulted in this exception/failure was:")
336 error.append("File: '%s', lineno: %s, function: %s" % (tbextract[0][0], tbextract[0][1], tbextract[0][2]))
337 error.extend(_print_trace(textarray, linefailed))
338
339 # See if this is a function we constructed and has calls back into other functions in
340 # "text". If so, try and improve the context of the error by diving down the trace
341 level = 0
342 nexttb = tb.tb_next
343 while nexttb is not None and (level+1) < len(tbextract):
344 error.append("File: '%s', lineno: %s, function: %s" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2]))
345 if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
346 # The code was possibly in the string we compiled ourselves
347 error.extend(_print_trace(textarray, tbextract[level+1][1]))
348 elif tbextract[level+1][0].startswith("/"):
349 # The code looks like it might be in a file, try and load it
350 try:
351 with open(tbextract[level+1][0], "r") as f:
352 text = f.readlines()
353 error.extend(_print_trace(text, tbextract[level+1][1]))
354 except:
355 error.append(tbformat[level+1])
356 elif "d" in context and tbextract[level+1][2]:
357 # Try and find the code in the datastore based on the functionname
358 d = context["d"]
359 functionname = tbextract[level+1][2]
360 text = d.getVar(functionname, True)
361 if text:
362 error.extend(_print_trace(text.split('\n'), tbextract[level+1][1]))
363 else:
364 error.append(tbformat[level+1])
365 else:
366 error.append(tbformat[level+1])
367 nexttb = tb.tb_next
368 level = level + 1
369
370 error.append("Exception: %s" % ''.join(exception))
371 finally:
372 logger.error("\n".join(error))
373
374def better_exec(code, context, text = None, realfile = "<code>"):
375 """
376 Similiar to better_compile, better_exec will
377 print the lines that are responsible for the
378 error.
379 """
380 import bb.parse
381 if not text:
382 text = code
383 if not hasattr(code, "co_filename"):
384 code = better_compile(code, realfile, realfile)
385 try:
386 exec(code, get_context(), context)
387 except (bb.BBHandledException, bb.parse.SkipRecipe, bb.build.FuncFailed, bb.data_smart.ExpansionError):
388 # Error already shown so passthrough, no need for traceback
389 raise
390 except Exception as e:
391 (t, value, tb) = sys.exc_info()
392 try:
393 _print_exception(t, value, tb, realfile, text, context)
394 except Exception as e:
395 logger.error("Exception handler error: %s" % str(e))
396
397 e = bb.BBHandledException(e)
398 raise e
399
400def simple_exec(code, context):
401 exec(code, get_context(), context)
402
403def better_eval(source, locals):
404 return eval(source, get_context(), locals)
405
406@contextmanager
407def fileslocked(files):
408 """Context manager for locking and unlocking file locks."""
409 locks = []
410 if files:
411 for lockfile in files:
412 locks.append(bb.utils.lockfile(lockfile))
413
414 yield
415
416 for lock in locks:
417 bb.utils.unlockfile(lock)
418
419@contextmanager
420def timeout(seconds):
421 def timeout_handler(signum, frame):
422 pass
423
424 original_handler = signal.signal(signal.SIGALRM, timeout_handler)
425
426 try:
427 signal.alarm(seconds)
428 yield
429 finally:
430 signal.alarm(0)
431 signal.signal(signal.SIGALRM, original_handler)
432
433def lockfile(name, shared=False, retry=True, block=False):
434 """
435 Use the specified file as a lock file, return when the lock has
436 been acquired. Returns a variable to pass to unlockfile().
437 Parameters:
438 retry: True to re-try locking if it fails, False otherwise
439 block: True to block until the lock succeeds, False otherwise
440 The retry and block parameters are kind of equivalent unless you
441 consider the possibility of sending a signal to the process to break
442 out - at which point you want block=True rather than retry=True.
443 """
444 dirname = os.path.dirname(name)
445 mkdirhier(dirname)
446
447 if not os.access(dirname, os.W_OK):
448 logger.error("Unable to acquire lock '%s', directory is not writable",
449 name)
450 sys.exit(1)
451
452 op = fcntl.LOCK_EX
453 if shared:
454 op = fcntl.LOCK_SH
455 if not retry and not block:
456 op = op | fcntl.LOCK_NB
457
458 while True:
459 # If we leave the lockfiles lying around there is no problem
460 # but we should clean up after ourselves. This gives potential
461 # for races though. To work around this, when we acquire the lock
462 # we check the file we locked was still the lock file on disk.
463 # by comparing inode numbers. If they don't match or the lockfile
464 # no longer exists, we start again.
465
466 # This implementation is unfair since the last person to request the
467 # lock is the most likely to win it.
468
469 try:
470 lf = open(name, 'a+')
471 fileno = lf.fileno()
472 fcntl.flock(fileno, op)
473 statinfo = os.fstat(fileno)
474 if os.path.exists(lf.name):
475 statinfo2 = os.stat(lf.name)
476 if statinfo.st_ino == statinfo2.st_ino:
477 return lf
478 lf.close()
479 except Exception:
480 try:
481 lf.close()
482 except Exception:
483 pass
484 pass
485 if not retry:
486 return None
487
488def unlockfile(lf):
489 """
490 Unlock a file locked using lockfile()
491 """
492 try:
493 # If we had a shared lock, we need to promote to exclusive before
494 # removing the lockfile. Attempt this, ignore failures.
495 fcntl.flock(lf.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
496 os.unlink(lf.name)
497 except (IOError, OSError):
498 pass
499 fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
500 lf.close()
501
502def md5_file(filename):
503 """
504 Return the hex string representation of the MD5 checksum of filename.
505 """
506 try:
507 import hashlib
508 m = hashlib.md5()
509 except ImportError:
510 import md5
511 m = md5.new()
512
513 with open(filename, "rb") as f:
514 for line in f:
515 m.update(line)
516 return m.hexdigest()
517
518def sha256_file(filename):
519 """
520 Return the hex string representation of the 256-bit SHA checksum of
521 filename. On Python 2.4 this will return None, so callers will need to
522 handle that by either skipping SHA checks, or running a standalone sha256sum
523 binary.
524 """
525 try:
526 import hashlib
527 except ImportError:
528 return None
529
530 s = hashlib.sha256()
531 with open(filename, "rb") as f:
532 for line in f:
533 s.update(line)
534 return s.hexdigest()
535
536def preserved_envvars_exported():
537 """Variables which are taken from the environment and placed in and exported
538 from the metadata"""
539 return [
540 'BB_TASKHASH',
541 'HOME',
542 'LOGNAME',
543 'PATH',
544 'PWD',
545 'SHELL',
546 'TERM',
547 'USER',
548 ]
549
550def preserved_envvars():
551 """Variables which are taken from the environment and placed in the metadata"""
552 v = [
553 'BBPATH',
554 'BB_PRESERVE_ENV',
555 'BB_ENV_WHITELIST',
556 'BB_ENV_EXTRAWHITE',
557 ]
558 return v + preserved_envvars_exported()
559
560def filter_environment(good_vars):
561 """
562 Create a pristine environment for bitbake. This will remove variables that
563 are not known and may influence the build in a negative way.
564 """
565
566 removed_vars = {}
567 for key in os.environ.keys():
568 if key in good_vars:
569 continue
570
571 removed_vars[key] = os.environ[key]
572 os.unsetenv(key)
573 del os.environ[key]
574
575 if removed_vars:
576 logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
577
578 return removed_vars
579
580def approved_variables():
581 """
582 Determine and return the list of whitelisted variables which are approved
583 to remain in the environment.
584 """
585 if 'BB_PRESERVE_ENV' in os.environ:
586 return os.environ.keys()
587 approved = []
588 if 'BB_ENV_WHITELIST' in os.environ:
589 approved = os.environ['BB_ENV_WHITELIST'].split()
590 approved.extend(['BB_ENV_WHITELIST'])
591 else:
592 approved = preserved_envvars()
593 if 'BB_ENV_EXTRAWHITE' in os.environ:
594 approved.extend(os.environ['BB_ENV_EXTRAWHITE'].split())
595 if 'BB_ENV_EXTRAWHITE' not in approved:
596 approved.extend(['BB_ENV_EXTRAWHITE'])
597 return approved
598
599def clean_environment():
600 """
601 Clean up any spurious environment variables. This will remove any
602 variables the user hasn't chosen to preserve.
603 """
604 if 'BB_PRESERVE_ENV' not in os.environ:
605 good_vars = approved_variables()
606 return filter_environment(good_vars)
607
608 return {}
609
610def empty_environment():
611 """
612 Remove all variables from the environment.
613 """
614 for s in os.environ.keys():
615 os.unsetenv(s)
616 del os.environ[s]
617
618def build_environment(d):
619 """
620 Build an environment from all exported variables.
621 """
622 import bb.data
623 for var in bb.data.keys(d):
624 export = d.getVarFlag(var, "export")
625 if export:
626 os.environ[var] = d.getVar(var, True) or ""
627
628def _check_unsafe_delete_path(path):
629 """
630 Basic safeguard against recursively deleting something we shouldn't. If it returns True,
631 the caller should raise an exception with an appropriate message.
632 NOTE: This is NOT meant to be a security mechanism - just a guard against silly mistakes
633 with potentially disastrous results.
634 """
635 extra = ''
636 # HOME might not be /home/something, so in case we can get it, check against it
637 homedir = os.environ.get('HOME', '')
638 if homedir:
639 extra = '|%s' % homedir
640 if re.match('(/|//|/home|/home/[^/]*%s)$' % extra, os.path.abspath(path)):
641 return True
642 return False
643
644def remove(path, recurse=False):
645 """Equivalent to rm -f or rm -rf"""
646 if not path:
647 return
648 if recurse:
649 for name in glob.glob(path):
650 if _check_unsafe_delete_path(path):
651 raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path)
652 # shutil.rmtree(name) would be ideal but its too slow
653 subprocess.call(['rm', '-rf'] + glob.glob(path))
654 return
655 for name in glob.glob(path):
656 try:
657 os.unlink(name)
658 except OSError as exc:
659 if exc.errno != errno.ENOENT:
660 raise
661
662def prunedir(topdir):
663 # Delete everything reachable from the directory named in 'topdir'.
664 # CAUTION: This is dangerous!
665 if _check_unsafe_delete_path(topdir):
666 raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
667 for root, dirs, files in os.walk(topdir, topdown = False):
668 for name in files:
669 os.remove(os.path.join(root, name))
670 for name in dirs:
671 if os.path.islink(os.path.join(root, name)):
672 os.remove(os.path.join(root, name))
673 else:
674 os.rmdir(os.path.join(root, name))
675 os.rmdir(topdir)
676
677#
678# Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
679# but thats possibly insane and suffixes is probably going to be small
680#
681def prune_suffix(var, suffixes, d):
682 # See if var ends with any of the suffixes listed and
683 # remove it if found
684 for suffix in suffixes:
685 if var.endswith(suffix):
686 return var.replace(suffix, "")
687 return var
688
689def mkdirhier(directory):
690 """Create a directory like 'mkdir -p', but does not complain if
691 directory already exists like os.makedirs
692 """
693
694 try:
695 os.makedirs(directory)
696 except OSError as e:
697 if e.errno != errno.EEXIST:
698 raise e
699
700def movefile(src, dest, newmtime = None, sstat = None):
701 """Moves a file from src to dest, preserving all permissions and
702 attributes; mtime will be preserved even when moving across
703 filesystems. Returns true on success and false on failure. Move is
704 atomic.
705 """
706
707 #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
708 try:
709 if not sstat:
710 sstat = os.lstat(src)
711 except Exception as e:
712 print("movefile: Stating source file failed...", e)
713 return None
714
715 destexists = 1
716 try:
717 dstat = os.lstat(dest)
718 except:
719 dstat = os.lstat(os.path.dirname(dest))
720 destexists = 0
721
722 if destexists:
723 if stat.S_ISLNK(dstat[stat.ST_MODE]):
724 try:
725 os.unlink(dest)
726 destexists = 0
727 except Exception as e:
728 pass
729
730 if stat.S_ISLNK(sstat[stat.ST_MODE]):
731 try:
732 target = os.readlink(src)
733 if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
734 os.unlink(dest)
735 os.symlink(target, dest)
736 #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
737 os.unlink(src)
738 return os.lstat(dest)
739 except Exception as e:
740 print("movefile: failed to properly create symlink:", dest, "->", target, e)
741 return None
742
743 renamefailed = 1
744 if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
745 try:
746 # os.rename needs to know the dest path ending with file name
747 # so append the file name to a path only if it's a dir specified
748 srcfname = os.path.basename(src)
749 destpath = os.path.join(dest, srcfname) if os.path.isdir(dest) \
750 else dest
751 os.rename(src, destpath)
752 renamefailed = 0
753 except Exception as e:
754 if e[0] != errno.EXDEV:
755 # Some random error.
756 print("movefile: Failed to move", src, "to", dest, e)
757 return None
758 # Invalid cross-device-link 'bind' mounted or actually Cross-Device
759
760 if renamefailed:
761 didcopy = 0
762 if stat.S_ISREG(sstat[stat.ST_MODE]):
763 try: # For safety copy then move it over.
764 shutil.copyfile(src, dest + "#new")
765 os.rename(dest + "#new", dest)
766 didcopy = 1
767 except Exception as e:
768 print('movefile: copy', src, '->', dest, 'failed.', e)
769 return None
770 else:
771 #we don't yet handle special, so we need to fall back to /bin/mv
772 a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
773 if a[0] != 0:
774 print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
775 return None # failure
776 try:
777 if didcopy:
778 os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
779 os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
780 os.unlink(src)
781 except Exception as e:
782 print("movefile: Failed to chown/chmod/unlink", dest, e)
783 return None
784
785 if newmtime:
786 os.utime(dest, (newmtime, newmtime))
787 else:
788 os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
789 newmtime = sstat[stat.ST_MTIME]
790 return newmtime
791
792def copyfile(src, dest, newmtime = None, sstat = None):
793 """
794 Copies a file from src to dest, preserving all permissions and
795 attributes; mtime will be preserved even when moving across
796 filesystems. Returns true on success and false on failure.
797 """
798 #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
799 try:
800 if not sstat:
801 sstat = os.lstat(src)
802 except Exception as e:
803 logger.warn("copyfile: stat of %s failed (%s)" % (src, e))
804 return False
805
806 destexists = 1
807 try:
808 dstat = os.lstat(dest)
809 except:
810 dstat = os.lstat(os.path.dirname(dest))
811 destexists = 0
812
813 if destexists:
814 if stat.S_ISLNK(dstat[stat.ST_MODE]):
815 try:
816 os.unlink(dest)
817 destexists = 0
818 except Exception as e:
819 pass
820
821 if stat.S_ISLNK(sstat[stat.ST_MODE]):
822 try:
823 target = os.readlink(src)
824 if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
825 os.unlink(dest)
826 os.symlink(target, dest)
827 #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
828 return os.lstat(dest)
829 except Exception as e:
830 logger.warn("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
831 return False
832
833 if stat.S_ISREG(sstat[stat.ST_MODE]):
834 try:
835 srcchown = False
836 if not os.access(src, os.R_OK):
837 # Make sure we can read it
838 srcchown = True
839 os.chmod(src, sstat[stat.ST_MODE] | stat.S_IRUSR)
840
841 # For safety copy then move it over.
842 shutil.copyfile(src, dest + "#new")
843 os.rename(dest + "#new", dest)
844 except Exception as e:
845 logger.warn("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
846 return False
847 finally:
848 if srcchown:
849 os.chmod(src, sstat[stat.ST_MODE])
850 os.utime(src, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
851
852 else:
853 #we don't yet handle special, so we need to fall back to /bin/mv
854 a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
855 if a[0] != 0:
856 logger.warn("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a))
857 return False # failure
858 try:
859 os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
860 os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
861 except Exception as e:
862 logger.warn("copyfile: failed to chown/chmod %s (%s)" % (dest, e))
863 return False
864
865 if newmtime:
866 os.utime(dest, (newmtime, newmtime))
867 else:
868 os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
869 newmtime = sstat[stat.ST_MTIME]
870 return newmtime
871
872def which(path, item, direction = 0, history = False):
873 """
874 Locate a file in a PATH
875 """
876
877 hist = []
878 paths = (path or "").split(':')
879 if direction != 0:
880 paths.reverse()
881
882 for p in paths:
883 next = os.path.join(p, item)
884 hist.append(next)
885 if os.path.exists(next):
886 if not os.path.isabs(next):
887 next = os.path.abspath(next)
888 if history:
889 return next, hist
890 return next
891
892 if history:
893 return "", hist
894 return ""
895
896def to_boolean(string, default=None):
897 if not string:
898 return default
899
900 normalized = string.lower()
901 if normalized in ("y", "yes", "1", "true"):
902 return True
903 elif normalized in ("n", "no", "0", "false"):
904 return False
905 else:
906 raise ValueError("Invalid value for to_boolean: %s" % string)
907
908def contains(variable, checkvalues, truevalue, falsevalue, d):
909 val = d.getVar(variable, True)
910 if not val:
911 return falsevalue
912 val = set(val.split())
913 if isinstance(checkvalues, basestring):
914 checkvalues = set(checkvalues.split())
915 else:
916 checkvalues = set(checkvalues)
917 if checkvalues.issubset(val):
918 return truevalue
919 return falsevalue
920
921def contains_any(variable, checkvalues, truevalue, falsevalue, d):
922 val = d.getVar(variable, True)
923 if not val:
924 return falsevalue
925 val = set(val.split())
926 if isinstance(checkvalues, basestring):
927 checkvalues = set(checkvalues.split())
928 else:
929 checkvalues = set(checkvalues)
930 if checkvalues & val:
931 return truevalue
932 return falsevalue
933
934def cpu_count():
935 return multiprocessing.cpu_count()
936
937def nonblockingfd(fd):
938 fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
939
940def process_profilelog(fn, pout = None):
941 # Either call with a list of filenames and set pout or a filename and optionally pout.
942 if not pout:
943 pout = fn + '.processed'
944 pout = open(pout, 'w')
945
946 import pstats
947 if isinstance(fn, list):
948 p = pstats.Stats(*fn, stream=pout)
949 else:
950 p = pstats.Stats(fn, stream=pout)
951 p.sort_stats('time')
952 p.print_stats()
953 p.print_callers()
954 p.sort_stats('cumulative')
955 p.print_stats()
956
957 pout.flush()
958 pout.close()
959
960#
961# Was present to work around multiprocessing pool bugs in python < 2.7.3
962#
963def multiprocessingpool(*args, **kwargs):
964
965 import multiprocessing.pool
966 #import multiprocessing.util
967 #multiprocessing.util.log_to_stderr(10)
968 # Deal with a multiprocessing bug where signals to the processes would be delayed until the work
969 # completes. Putting in a timeout means the signals (like SIGINT/SIGTERM) get processed.
970 def wrapper(func):
971 def wrap(self, timeout=None):
972 return func(self, timeout=timeout if timeout is not None else 1e100)
973 return wrap
974 multiprocessing.pool.IMapIterator.next = wrapper(multiprocessing.pool.IMapIterator.next)
975
976 return multiprocessing.Pool(*args, **kwargs)
977
978def exec_flat_python_func(func, *args, **kwargs):
979 """Execute a flat python function (defined with def funcname(args):...)"""
980 # Prepare a small piece of python code which calls the requested function
981 # To do this we need to prepare two things - a set of variables we can use to pass
982 # the values of arguments into the calling function, and the list of arguments for
983 # the function being called
984 context = {}
985 funcargs = []
986 # Handle unnamed arguments
987 aidx = 1
988 for arg in args:
989 argname = 'arg_%s' % aidx
990 context[argname] = arg
991 funcargs.append(argname)
992 aidx += 1
993 # Handle keyword arguments
994 context.update(kwargs)
995 funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.iterkeys()])
996 code = 'retval = %s(%s)' % (func, ', '.join(funcargs))
997 comp = bb.utils.better_compile(code, '<string>', '<string>')
998 bb.utils.better_exec(comp, context, code, '<string>')
999 return context['retval']
1000
1001def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
1002 """Edit lines from a recipe or config file and modify one or more
1003 specified variable values set in the file using a specified callback
1004 function. Lines are expected to have trailing newlines.
1005 Parameters:
1006 meta_lines: lines from the file; can be a list or an iterable
1007 (e.g. file pointer)
1008 variables: a list of variable names to look for. Functions
1009 may also be specified, but must be specified with '()' at
1010 the end of the name. Note that the function doesn't have
1011 any intrinsic understanding of _append, _prepend, _remove,
1012 or overrides, so these are considered as part of the name.
1013 These values go into a regular expression, so regular
1014 expression syntax is allowed.
1015 varfunc: callback function called for every variable matching
1016 one of the entries in the variables parameter. The function
1017 should take four arguments:
1018 varname: name of variable matched
1019 origvalue: current value in file
1020 op: the operator (e.g. '+=')
1021 newlines: list of lines up to this point. You can use
1022 this to prepend lines before this variable setting
1023 if you wish.
1024 and should return a three-element tuple:
1025 newvalue: new value to substitute in, or None to drop
1026 the variable setting entirely. (If the removal
1027 results in two consecutive blank lines, one of the
1028 blank lines will also be dropped).
1029 newop: the operator to use - if you specify None here,
1030 the original operation will be used.
1031 indent: number of spaces to indent multi-line entries,
1032 or -1 to indent up to the level of the assignment
1033 and opening quote, or a string to use as the indent.
1034 minbreak: True to allow the first element of a
1035 multi-line value to continue on the same line as
1036 the assignment, False to indent before the first
1037 element.
1038 match_overrides: True to match items with _overrides on the end,
1039 False otherwise
1040 Returns a tuple:
1041 updated:
1042 True if changes were made, False otherwise.
1043 newlines:
1044 Lines after processing
1045 """
1046
1047 var_res = {}
1048 if match_overrides:
1049 override_re = '(_[a-zA-Z0-9-_$(){}]+)?'
1050 else:
1051 override_re = ''
1052 for var in variables:
1053 if var.endswith('()'):
1054 var_res[var] = re.compile('^(%s%s)[ \\t]*\([ \\t]*\)[ \\t]*{' % (var[:-2].rstrip(), override_re))
1055 else:
1056 var_res[var] = re.compile('^(%s%s)[ \\t]*[?+:.]*=[+.]*[ \\t]*(["\'])' % (var, override_re))
1057
1058 updated = False
1059 varset_start = ''
1060 varlines = []
1061 newlines = []
1062 in_var = None
1063 full_value = ''
1064 var_end = ''
1065
1066 def handle_var_end():
1067 prerun_newlines = newlines[:]
1068 op = varset_start[len(in_var):].strip()
1069 (newvalue, newop, indent, minbreak) = varfunc(in_var, full_value, op, newlines)
1070 changed = (prerun_newlines != newlines)
1071
1072 if newvalue is None:
1073 # Drop the value
1074 return True
1075 elif newvalue != full_value or (newop not in [None, op]):
1076 if newop not in [None, op]:
1077 # Callback changed the operator
1078 varset_new = "%s %s" % (in_var, newop)
1079 else:
1080 varset_new = varset_start
1081
1082 if isinstance(indent, (int, long)):
1083 if indent == -1:
1084 indentspc = ' ' * (len(varset_new) + 2)
1085 else:
1086 indentspc = ' ' * indent
1087 else:
1088 indentspc = indent
1089 if in_var.endswith('()'):
1090 # A function definition
1091 if isinstance(newvalue, list):
1092 newlines.append('%s {\n%s%s\n}\n' % (varset_new, indentspc, ('\n%s' % indentspc).join(newvalue)))
1093 else:
1094 if not newvalue.startswith('\n'):
1095 newvalue = '\n' + newvalue
1096 if not newvalue.endswith('\n'):
1097 newvalue = newvalue + '\n'
1098 newlines.append('%s {%s}\n' % (varset_new, newvalue))
1099 else:
1100 # Normal variable
1101 if isinstance(newvalue, list):
1102 if not newvalue:
1103 # Empty list -> empty string
1104 newlines.append('%s ""\n' % varset_new)
1105 elif minbreak:
1106 # First item on first line
1107 if len(newvalue) == 1:
1108 newlines.append('%s "%s"\n' % (varset_new, newvalue[0]))
1109 else:
1110 newlines.append('%s "%s \\\n' % (varset_new, newvalue[0]))
1111 for item in newvalue[1:]:
1112 newlines.append('%s%s \\\n' % (indentspc, item))
1113 newlines.append('%s"\n' % indentspc)
1114 else:
1115 # No item on first line
1116 newlines.append('%s " \\\n' % varset_new)
1117 for item in newvalue:
1118 newlines.append('%s%s \\\n' % (indentspc, item))
1119 newlines.append('%s"\n' % indentspc)
1120 else:
1121 newlines.append('%s "%s"\n' % (varset_new, newvalue))
1122 return True
1123 else:
1124 # Put the old lines back where they were
1125 newlines.extend(varlines)
1126 # If newlines was touched by the function, we'll need to return True
1127 return changed
1128
1129 checkspc = False
1130
1131 for line in meta_lines:
1132 if in_var:
1133 value = line.rstrip()
1134 varlines.append(line)
1135 if in_var.endswith('()'):
1136 full_value += '\n' + value
1137 else:
1138 full_value += value[:-1]
1139 if value.endswith(var_end):
1140 if in_var.endswith('()'):
1141 if full_value.count('{') - full_value.count('}') >= 0:
1142 continue
1143 full_value = full_value[:-1]
1144 if handle_var_end():
1145 updated = True
1146 checkspc = True
1147 in_var = None
1148 else:
1149 skip = False
1150 for (varname, var_re) in var_res.iteritems():
1151 res = var_re.match(line)
1152 if res:
1153 isfunc = varname.endswith('()')
1154 if isfunc:
1155 splitvalue = line.split('{', 1)
1156 var_end = '}'
1157 else:
1158 var_end = res.groups()[-1]
1159 splitvalue = line.split(var_end, 1)
1160 varset_start = splitvalue[0].rstrip()
1161 value = splitvalue[1].rstrip()
1162 if not isfunc and value.endswith('\\'):
1163 value = value[:-1]
1164 full_value = value
1165 varlines = [line]
1166 in_var = res.group(1)
1167 if isfunc:
1168 in_var += '()'
1169 if value.endswith(var_end):
1170 full_value = full_value[:-1]
1171 if handle_var_end():
1172 updated = True
1173 checkspc = True
1174 in_var = None
1175 skip = True
1176 break
1177 if not skip:
1178 if checkspc:
1179 checkspc = False
1180 if newlines[-1] == '\n' and line == '\n':
1181 # Squash blank line if there are two consecutive blanks after a removal
1182 continue
1183 newlines.append(line)
1184 return (updated, newlines)
1185
1186
1187def edit_metadata_file(meta_file, variables, varfunc):
1188 """Edit a recipe or config file and modify one or more specified
1189 variable values set in the file using a specified callback function.
1190 The file is only written to if the value(s) actually change.
1191 This is basically the file version of edit_metadata(), see that
1192 function's description for parameter/usage information.
1193 Returns True if the file was written to, False otherwise.
1194 """
1195 with open(meta_file, 'r') as f:
1196 (updated, newlines) = edit_metadata(f, variables, varfunc)
1197 if updated:
1198 with open(meta_file, 'w') as f:
1199 f.writelines(newlines)
1200 return updated
1201
1202
1203def edit_bblayers_conf(bblayers_conf, add, remove):
1204 """Edit bblayers.conf, adding and/or removing layers"""
1205
1206 import fnmatch
1207
1208 def remove_trailing_sep(pth):
1209 if pth and pth[-1] == os.sep:
1210 pth = pth[:-1]
1211 return pth
1212
1213 def layerlist_param(value):
1214 if not value:
1215 return []
1216 elif isinstance(value, list):
1217 return [remove_trailing_sep(x) for x in value]
1218 else:
1219 return [remove_trailing_sep(value)]
1220
1221 notadded = []
1222 notremoved = []
1223
1224 addlayers = layerlist_param(add)
1225 removelayers = layerlist_param(remove)
1226
1227 # Need to use a list here because we can't set non-local variables from a callback in python 2.x
1228 bblayercalls = []
1229
1230 def handle_bblayers(varname, origvalue, op, newlines):
1231 bblayercalls.append(varname)
1232 updated = False
1233 bblayers = [remove_trailing_sep(x) for x in origvalue.split()]
1234 if removelayers:
1235 for removelayer in removelayers:
1236 matched = False
1237 for layer in bblayers:
1238 if fnmatch.fnmatch(layer, removelayer):
1239 updated = True
1240 matched = True
1241 bblayers.remove(layer)
1242 break
1243 if not matched:
1244 notremoved.append(removelayer)
1245 if addlayers:
1246 for addlayer in addlayers:
1247 if addlayer not in bblayers:
1248 updated = True
1249 bblayers.append(addlayer)
1250 else:
1251 notadded.append(addlayer)
1252
1253 if updated:
1254 return (bblayers, None, 2, False)
1255 else:
1256 return (origvalue, None, 2, False)
1257
1258 edit_metadata_file(bblayers_conf, ['BBLAYERS'], handle_bblayers)
1259
1260 if not bblayercalls:
1261 raise Exception('Unable to find BBLAYERS in %s' % bblayers_conf)
1262
1263 return (notadded, notremoved)
1264
1265
1266def get_file_layer(filename, d):
1267 """Determine the collection (as defined by a layer's layer.conf file) containing the specified file"""
1268 collections = (d.getVar('BBFILE_COLLECTIONS', True) or '').split()
1269 collection_res = {}
1270 for collection in collections:
1271 collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection, True) or ''
1272
1273 def path_to_layer(path):
1274 # Use longest path so we handle nested layers
1275 matchlen = 0
1276 match = None
1277 for collection, regex in collection_res.iteritems():
1278 if len(regex) > matchlen and re.match(regex, path):
1279 matchlen = len(regex)
1280 match = collection
1281 return match
1282
1283 result = None
1284 bbfiles = (d.getVar('BBFILES', True) or '').split()
1285 bbfilesmatch = False
1286 for bbfilesentry in bbfiles:
1287 if fnmatch.fnmatch(filename, bbfilesentry):
1288 bbfilesmatch = True
1289 result = path_to_layer(bbfilesentry)
1290
1291 if not bbfilesmatch:
1292 # Probably a bbclass
1293 result = path_to_layer(filename)
1294
1295 return result
1296
1297
1298# Constant taken from http://linux.die.net/include/linux/prctl.h
1299PR_SET_PDEATHSIG = 1
1300
1301class PrCtlError(Exception):
1302 pass
1303
1304def signal_on_parent_exit(signame):
1305 """
1306 Trigger signame to be sent when the parent process dies
1307 """
1308 signum = getattr(signal, signame)
1309 # http://linux.die.net/man/2/prctl
1310 result = cdll['libc.so.6'].prctl(PR_SET_PDEATHSIG, signum)
1311 if result != 0:
1312 raise PrCtlError('prctl failed with error code %s' % result)