blob: 5bc8a8fcb6fd6e0aba34b4cbe6960a737d016d61 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001# Local file checksum cache implementation
2#
3# Copyright (C) 2012 Intel Corporation
4#
Brad Bishopc342db32019-05-15 21:57:59 -04005# SPDX-License-Identifier: GPL-2.0-only
Patrick Williamsc124f4f2015-09-15 14:41:29 -05006#
Patrick Williamsc124f4f2015-09-15 14:41:29 -05007
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05008import glob
9import operator
Patrick Williamsc124f4f2015-09-15 14:41:29 -050010import os
11import stat
Patrick Williamsc0f7c042017-02-23 20:41:17 -060012import pickle
Patrick Williamsc124f4f2015-09-15 14:41:29 -050013import bb.utils
14import logging
15from bb.cache import MultiProcessCache
16
17logger = logging.getLogger("BitBake.Cache")
18
Patrick Williamsc124f4f2015-09-15 14:41:29 -050019# mtime cache (non-persistent)
20# based upon the assumption that files do not change during bitbake run
21class FileMtimeCache(object):
22 cache = {}
23
24 def cached_mtime(self, f):
25 if f not in self.cache:
26 self.cache[f] = os.stat(f)[stat.ST_MTIME]
27 return self.cache[f]
28
29 def cached_mtime_noerror(self, f):
30 if f not in self.cache:
31 try:
32 self.cache[f] = os.stat(f)[stat.ST_MTIME]
33 except OSError:
34 return 0
35 return self.cache[f]
36
37 def update_mtime(self, f):
38 self.cache[f] = os.stat(f)[stat.ST_MTIME]
39 return self.cache[f]
40
41 def clear(self):
42 self.cache.clear()
43
44# Checksum + mtime cache (persistent)
45class FileChecksumCache(MultiProcessCache):
46 cache_file_name = "local_file_checksum_cache.dat"
47 CACHE_VERSION = 1
48
49 def __init__(self):
50 self.mtime_cache = FileMtimeCache()
51 MultiProcessCache.__init__(self)
52
53 def get_checksum(self, f):
54 entry = self.cachedata[0].get(f)
55 cmtime = self.mtime_cache.cached_mtime(f)
56 if entry:
57 (mtime, hashval) = entry
58 if cmtime == mtime:
59 return hashval
60 else:
61 bb.debug(2, "file %s changed mtime, recompute checksum" % f)
62
63 hashval = bb.utils.md5_file(f)
64 self.cachedata_extras[0][f] = (cmtime, hashval)
65 return hashval
66
67 def merge_data(self, source, dest):
68 for h in source[0]:
69 if h in dest:
70 (smtime, _) = source[0][h]
71 (dmtime, _) = dest[0][h]
72 if smtime > dmtime:
73 dest[0][h] = source[0][h]
74 else:
75 dest[0][h] = source[0][h]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050076
77 def get_checksums(self, filelist, pn):
78 """Get checksums for a list of files"""
79
80 def checksum_file(f):
81 try:
82 checksum = self.get_checksum(f)
83 except OSError as e:
84 bb.warn("Unable to get checksum for %s SRC_URI entry %s: %s" % (pn, os.path.basename(f), e))
85 return None
86 return checksum
87
88 def checksum_dir(pth):
89 # Handle directories recursively
Brad Bishop220d5532018-08-14 00:59:39 +010090 if pth == "/":
91 bb.fatal("Refusing to checksum /")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050092 dirchecksums = []
93 for root, dirs, files in os.walk(pth):
94 for name in files:
95 fullpth = os.path.join(root, name)
96 checksum = checksum_file(fullpth)
97 if checksum:
98 dirchecksums.append((fullpth, checksum))
99 return dirchecksums
100
101 checksums = []
102 for pth in filelist.split():
103 exist = pth.split(":")[1]
104 if exist == "False":
105 continue
106 pth = pth.split(":")[0]
107 if '*' in pth:
108 # Handle globs
109 for f in glob.glob(pth):
110 if os.path.isdir(f):
111 if not os.path.islink(f):
112 checksums.extend(checksum_dir(f))
113 else:
114 checksum = checksum_file(f)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600115 if checksum:
116 checksums.append((f, checksum))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500117 elif os.path.isdir(pth):
118 if not os.path.islink(pth):
119 checksums.extend(checksum_dir(pth))
120 else:
121 checksum = checksum_file(pth)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600122 if checksum:
123 checksums.append((pth, checksum))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500124
125 checksums.sort(key=operator.itemgetter(1))
126 return checksums