blob: c4ff9d8de166099d6a50cac264c2396f35d36ded [file] [log] [blame]
Brad Bishopc342db32019-05-15 21:57:59 -04001#
Patrick Williams92b42cb2022-09-03 06:53:57 -05002# Copyright BitBake Contributors
3#
Brad Bishopc342db32019-05-15 21:57:59 -04004# SPDX-License-Identifier: GPL-2.0-only
5#
6
Patrick Williamsc124f4f2015-09-15 14:41:29 -05007import hashlib
8import logging
9import os
10import re
11import tempfile
Patrick Williamsc0f7c042017-02-23 20:41:17 -060012import pickle
Patrick Williamsc124f4f2015-09-15 14:41:29 -050013import bb.data
Brad Bishop6e60e8b2018-02-01 10:27:11 -050014import difflib
15import simplediff
Andrew Geisslereff27472021-10-29 15:35:00 -050016import json
Andrew Geissler517393d2023-01-13 08:55:19 -060017import types
Andrew Geisslereff27472021-10-29 15:35:00 -050018import bb.compress.zstd
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050019from bb.checksum import FileChecksumCache
Brad Bishop08902b02019-08-20 09:16:51 -040020from bb import runqueue
Brad Bishopa34c0302019-09-23 22:34:48 -040021import hashserv
Andrew Geissler475cb722020-07-10 16:00:51 -050022import hashserv.client
Patrick Williamsc124f4f2015-09-15 14:41:29 -050023
24logger = logging.getLogger('BitBake.SigGen')
Andrew Geissler82c905d2020-04-13 13:39:40 -050025hashequiv_logger = logging.getLogger('BitBake.SigGen.HashEquiv')
Patrick Williamsc124f4f2015-09-15 14:41:29 -050026
Andrew Geisslereff27472021-10-29 15:35:00 -050027class SetEncoder(json.JSONEncoder):
28 def default(self, obj):
Andrew Geissler517393d2023-01-13 08:55:19 -060029 if isinstance(obj, set) or isinstance(obj, frozenset):
Andrew Geisslereff27472021-10-29 15:35:00 -050030 return dict(_set_object=list(sorted(obj)))
31 return json.JSONEncoder.default(self, obj)
32
33def SetDecoder(dct):
34 if '_set_object' in dct:
Andrew Geissler517393d2023-01-13 08:55:19 -060035 return frozenset(dct['_set_object'])
Andrew Geisslereff27472021-10-29 15:35:00 -050036 return dct
37
Patrick Williamsc124f4f2015-09-15 14:41:29 -050038def init(d):
Patrick Williamsc0f7c042017-02-23 20:41:17 -060039 siggens = [obj for obj in globals().values()
Patrick Williamsc124f4f2015-09-15 14:41:29 -050040 if type(obj) is type and issubclass(obj, SignatureGenerator)]
41
Brad Bishop6e60e8b2018-02-01 10:27:11 -050042 desired = d.getVar("BB_SIGNATURE_HANDLER") or "noop"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050043 for sg in siggens:
44 if desired == sg.name:
45 return sg(d)
Patrick Williamsc124f4f2015-09-15 14:41:29 -050046 else:
47 logger.error("Invalid signature generator '%s', using default 'noop'\n"
48 "Available generators: %s", desired,
49 ', '.join(obj.name for obj in siggens))
50 return SignatureGenerator(d)
51
52class SignatureGenerator(object):
53 """
54 """
55 name = "noop"
56
57 def __init__(self, data):
Brad Bishop37a0e4d2017-12-04 01:01:44 -050058 self.basehash = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -050059 self.taskhash = {}
Andrew Geissler82c905d2020-04-13 13:39:40 -050060 self.unihash = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -050061 self.runtaskdeps = {}
62 self.file_checksum_values = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050063 self.taints = {}
Brad Bishop08902b02019-08-20 09:16:51 -040064 self.unitaskhashes = {}
Andrew Geissler82c905d2020-04-13 13:39:40 -050065 self.tidtopn = {}
66 self.setscenetasks = set()
Patrick Williamsc124f4f2015-09-15 14:41:29 -050067
68 def finalise(self, fn, d, varient):
69 return
70
Andrew Geissler82c905d2020-04-13 13:39:40 -050071 def postparsing_clean_cache(self):
72 return
73
Andrew Geissler517393d2023-01-13 08:55:19 -060074 def setup_datacache(self, datacaches):
75 self.datacaches = datacaches
76
77 def setup_datacache_from_datastore(self, mcfn, d):
78 # In task context we have no cache so setup internal data structures
79 # from the fully parsed data store provided
80
81 mc = d.getVar("__BBMULTICONFIG", False) or ""
82 tasks = d.getVar('__BBTASKS', False)
83
84 self.datacaches = {}
85 self.datacaches[mc] = types.SimpleNamespace()
86 setattr(self.datacaches[mc], "stamp", {})
87 self.datacaches[mc].stamp[mcfn] = d.getVar('STAMP')
88 setattr(self.datacaches[mc], "stamp_extrainfo", {})
89 self.datacaches[mc].stamp_extrainfo[mcfn] = {}
90 for t in tasks:
91 flag = d.getVarFlag(t, "stamp-extra-info")
92 if flag:
93 self.datacaches[mc].stamp_extrainfo[mcfn][t] = flag
94
Brad Bishop08902b02019-08-20 09:16:51 -040095 def get_unihash(self, tid):
96 return self.taskhash[tid]
Brad Bishop19323692019-04-05 15:28:33 -040097
Andrew Geissler5a43b432020-06-13 10:46:56 -050098 def prep_taskhash(self, tid, deps, dataCaches):
Andrew Geissler82c905d2020-04-13 13:39:40 -050099 return
100
Andrew Geissler5a43b432020-06-13 10:46:56 -0500101 def get_taskhash(self, tid, deps, dataCaches):
Brad Bishop08902b02019-08-20 09:16:51 -0400102 self.taskhash[tid] = hashlib.sha256(tid.encode("utf-8")).hexdigest()
103 return self.taskhash[tid]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500104
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500105 def writeout_file_checksum_cache(self):
106 """Write/update the file checksum cache onto disk"""
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500107 return
108
Andrew Geissler517393d2023-01-13 08:55:19 -0600109 def stampfile_base(self, mcfn):
110 mc = bb.runqueue.mc_from_tid(mcfn)
111 return self.datacaches[mc].stamp[mcfn]
112
113 def stampfile_mcfn(self, taskname, mcfn, extrainfo=True):
114 mc = bb.runqueue.mc_from_tid(mcfn)
115 stamp = self.datacaches[mc].stamp[mcfn]
116 if not stamp:
117 return
118
119 stamp_extrainfo = ""
120 if extrainfo:
121 taskflagname = taskname
122 if taskname.endswith("_setscene"):
123 taskflagname = taskname.replace("_setscene", "")
124 stamp_extrainfo = self.datacaches[mc].stamp_extrainfo[mcfn].get(taskflagname) or ""
125
126 return self.stampfile(stamp, mcfn, taskname, stamp_extrainfo)
127
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500128 def stampfile(self, stampbase, file_name, taskname, extrainfo):
129 return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
130
Andrew Geissler517393d2023-01-13 08:55:19 -0600131 def stampcleanmask_mcfn(self, taskname, mcfn):
132 mc = bb.runqueue.mc_from_tid(mcfn)
133 stamp = self.datacaches[mc].stamp[mcfn]
134 if not stamp:
135 return []
136
137 taskflagname = taskname
138 if taskname.endswith("_setscene"):
139 taskflagname = taskname.replace("_setscene", "")
140 stamp_extrainfo = self.datacaches[mc].stamp_extrainfo[mcfn].get(taskflagname) or ""
141
142 return self.stampcleanmask(stamp, mcfn, taskname, stamp_extrainfo)
143
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500144 def stampcleanmask(self, stampbase, file_name, taskname, extrainfo):
145 return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
146
Andrew Geissler517393d2023-01-13 08:55:19 -0600147 def dump_sigtask(self, mcfn, task, stampbase, runtime):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500148 return
149
Andrew Geissler517393d2023-01-13 08:55:19 -0600150 def invalidate_task(self, task, mcfn):
151 mc = bb.runqueue.mc_from_tid(mcfn)
152 stamp = self.datacaches[mc].stamp[mcfn]
153 bb.utils.remove(stamp)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500154
155 def dump_sigs(self, dataCache, options):
156 return
157
158 def get_taskdata(self):
Andrew Geissler82c905d2020-04-13 13:39:40 -0500159 return (self.runtaskdeps, self.taskhash, self.unihash, self.file_checksum_values, self.taints, self.basehash, self.unitaskhashes, self.tidtopn, self.setscenetasks)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500160
161 def set_taskdata(self, data):
Andrew Geissler82c905d2020-04-13 13:39:40 -0500162 self.runtaskdeps, self.taskhash, self.unihash, self.file_checksum_values, self.taints, self.basehash, self.unitaskhashes, self.tidtopn, self.setscenetasks = data
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500163
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500164 def reset(self, data):
165 self.__init__(data)
166
Brad Bishop08902b02019-08-20 09:16:51 -0400167 def get_taskhashes(self):
Andrew Geissler82c905d2020-04-13 13:39:40 -0500168 return self.taskhash, self.unihash, self.unitaskhashes, self.tidtopn
Brad Bishop08902b02019-08-20 09:16:51 -0400169
170 def set_taskhashes(self, hashes):
Andrew Geissler82c905d2020-04-13 13:39:40 -0500171 self.taskhash, self.unihash, self.unitaskhashes, self.tidtopn = hashes
Brad Bishop08902b02019-08-20 09:16:51 -0400172
173 def save_unitaskhashes(self):
174 return
175
Andrew Geissler78b72792022-06-14 06:47:25 -0500176 def copy_unitaskhashes(self, targetdir):
177 return
178
Brad Bishopa34c0302019-09-23 22:34:48 -0400179 def set_setscene_tasks(self, setscene_tasks):
180 return
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500181
Andrew Geissler9aee5002022-03-30 16:27:02 +0000182 def exit(self):
183 return
184
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500185class SignatureGeneratorBasic(SignatureGenerator):
186 """
187 """
188 name = "basic"
189
190 def __init__(self, data):
191 self.basehash = {}
192 self.taskhash = {}
Andrew Geissler82c905d2020-04-13 13:39:40 -0500193 self.unihash = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500194 self.runtaskdeps = {}
195 self.file_checksum_values = {}
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500196 self.taints = {}
Andrew Geissler82c905d2020-04-13 13:39:40 -0500197 self.setscenetasks = set()
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000198 self.basehash_ignore_vars = set((data.getVar("BB_BASEHASH_IGNORE_VARS") or "").split())
199 self.taskhash_ignore_tasks = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500200 self.init_rundepcheck(data)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500201 checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500202 if checksum_cache_file:
203 self.checksum_cache = FileChecksumCache()
204 self.checksum_cache.init_cache(data, checksum_cache_file)
205 else:
206 self.checksum_cache = None
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500207
Andrew Geissler82c905d2020-04-13 13:39:40 -0500208 self.unihash_cache = bb.cache.SimpleCache("3")
Brad Bishop08902b02019-08-20 09:16:51 -0400209 self.unitaskhashes = self.unihash_cache.init_cache(data, "bb_unihashes.dat", {})
Andrew Geissler82c905d2020-04-13 13:39:40 -0500210 self.localdirsexclude = (data.getVar("BB_SIGNATURE_LOCAL_DIRS_EXCLUDE") or "CVS .bzr .git .hg .osc .p4 .repo .svn").split()
211 self.tidtopn = {}
Brad Bishop08902b02019-08-20 09:16:51 -0400212
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500213 def init_rundepcheck(self, data):
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000214 self.taskhash_ignore_tasks = data.getVar("BB_TASKHASH_IGNORE_TASKS") or None
215 if self.taskhash_ignore_tasks:
216 self.twl = re.compile(self.taskhash_ignore_tasks)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500217 else:
218 self.twl = None
219
Andrew Geissler517393d2023-01-13 08:55:19 -0600220 def _build_data(self, mcfn, d):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500221
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500222 ignore_mismatch = ((d.getVar("BB_HASH_IGNORE_MISMATCH") or '') == '1')
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000223 tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d, self.basehash_ignore_vars)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500224
Andrew Geissler517393d2023-01-13 08:55:19 -0600225 taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, self.basehash_ignore_vars, mcfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500226
227 for task in tasklist:
Andrew Geissler517393d2023-01-13 08:55:19 -0600228 tid = mcfn + ":" + task
Brad Bishop08902b02019-08-20 09:16:51 -0400229 if not ignore_mismatch and tid in self.basehash and self.basehash[tid] != basehash[tid]:
230 bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (tid, self.basehash[tid], basehash[tid]))
Brad Bishopc342db32019-05-15 21:57:59 -0400231 bb.error("The following commands may help:")
232 cmd = "$ bitbake %s -c%s" % (d.getVar('PN'), task)
233 # Make sure sigdata is dumped before run printdiff
234 bb.error("%s -Snone" % cmd)
235 bb.error("Then:")
236 bb.error("%s -Sprintdiff\n" % cmd)
Brad Bishop08902b02019-08-20 09:16:51 -0400237 self.basehash[tid] = basehash[tid]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500238
Andrew Geissler517393d2023-01-13 08:55:19 -0600239 return taskdeps, gendeps, lookupcache
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500240
Brad Bishopa34c0302019-09-23 22:34:48 -0400241 def set_setscene_tasks(self, setscene_tasks):
Andrew Geissler82c905d2020-04-13 13:39:40 -0500242 self.setscenetasks = set(setscene_tasks)
Brad Bishopa34c0302019-09-23 22:34:48 -0400243
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500244 def finalise(self, fn, d, variant):
245
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600246 mc = d.getVar("__BBMULTICONFIG", False) or ""
Andrew Geissler517393d2023-01-13 08:55:19 -0600247 mcfn = fn
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600248 if variant or mc:
Andrew Geissler517393d2023-01-13 08:55:19 -0600249 mcfn = bb.cache.realfn2virtual(fn, variant, mc)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500250
251 try:
Andrew Geissler517393d2023-01-13 08:55:19 -0600252 taskdeps, gendeps, lookupcache = self._build_data(mcfn, d)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500253 except bb.parse.SkipRecipe:
254 raise
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500255 except:
Andrew Geissler517393d2023-01-13 08:55:19 -0600256 bb.warn("Error during finalise of %s" % mcfn)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500257 raise
258
259 #Slow but can be useful for debugging mismatched basehashes
Andrew Geissler517393d2023-01-13 08:55:19 -0600260 #for task in self.taskdeps[mcfn]:
261 # self.dump_sigtask(mcfn, task, d.getVar("STAMP"), False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500262
Andrew Geissler517393d2023-01-13 08:55:19 -0600263 basehashes = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500264 for task in taskdeps:
Andrew Geissler517393d2023-01-13 08:55:19 -0600265 basehashes[task] = self.basehash[mcfn + ":" + task]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500266
Andrew Geissler517393d2023-01-13 08:55:19 -0600267 d.setVar("__siggen_basehashes", basehashes)
268 d.setVar("__siggen_gendeps", gendeps)
269 d.setVar("__siggen_varvals", lookupcache)
270 d.setVar("__siggen_taskdeps", taskdeps)
271
272 def setup_datacache_from_datastore(self, mcfn, d):
273 super().setup_datacache_from_datastore(mcfn, d)
274
275 mc = bb.runqueue.mc_from_tid(mcfn)
276 for attr in ["siggen_varvals", "siggen_taskdeps", "siggen_gendeps"]:
277 if not hasattr(self.datacaches[mc], attr):
278 setattr(self.datacaches[mc], attr, {})
279 self.datacaches[mc].siggen_varvals[mcfn] = d.getVar("__siggen_varvals")
280 self.datacaches[mc].siggen_taskdeps[mcfn] = d.getVar("__siggen_taskdeps")
281 self.datacaches[mc].siggen_gendeps[mcfn] = d.getVar("__siggen_gendeps")
Andrew Geissler82c905d2020-04-13 13:39:40 -0500282
Andrew Geissler5a43b432020-06-13 10:46:56 -0500283 def rundep_check(self, fn, recipename, task, dep, depname, dataCaches):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500284 # Return True if we should keep the dependency, False to drop it
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000285 # We only manipulate the dependencies for packages not in the ignore
286 # list
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500287 if self.twl and not self.twl.search(recipename):
288 # then process the actual dependencies
289 if self.twl.search(depname):
290 return False
291 return True
292
293 def read_taint(self, fn, task, stampbase):
294 taint = None
295 try:
296 with open(stampbase + '.' + task + '.taint', 'r') as taintf:
297 taint = taintf.read()
298 except IOError:
299 pass
300 return taint
301
Andrew Geissler5a43b432020-06-13 10:46:56 -0500302 def prep_taskhash(self, tid, deps, dataCaches):
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800303
Andrew Geissler517393d2023-01-13 08:55:19 -0600304 (mc, _, task, mcfn) = bb.runqueue.split_tid_mcfn(tid)
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800305
Andrew Geissler5a43b432020-06-13 10:46:56 -0500306 self.basehash[tid] = dataCaches[mc].basetaskhash[tid]
Brad Bishop08902b02019-08-20 09:16:51 -0400307 self.runtaskdeps[tid] = []
308 self.file_checksum_values[tid] = []
Andrew Geissler517393d2023-01-13 08:55:19 -0600309 recipename = dataCaches[mc].pkg_fn[mcfn]
Andrew Geissler82c905d2020-04-13 13:39:40 -0500310
311 self.tidtopn[tid] = recipename
312
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500313 for dep in sorted(deps, key=clean_basepath):
Andrew Geissler5a43b432020-06-13 10:46:56 -0500314 (depmc, _, _, depmcfn) = bb.runqueue.split_tid_mcfn(dep)
315 depname = dataCaches[depmc].pkg_fn[depmcfn]
Andrew Geissler517393d2023-01-13 08:55:19 -0600316 if not self.rundep_check(mcfn, recipename, task, dep, depname, dataCaches):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500317 continue
318 if dep not in self.taskhash:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800319 bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?" % dep)
Brad Bishop08902b02019-08-20 09:16:51 -0400320 self.runtaskdeps[tid].append(dep)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500321
Andrew Geissler517393d2023-01-13 08:55:19 -0600322 if task in dataCaches[mc].file_checksums[mcfn]:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500323 if self.checksum_cache:
Andrew Geissler517393d2023-01-13 08:55:19 -0600324 checksums = self.checksum_cache.get_checksums(dataCaches[mc].file_checksums[mcfn][task], recipename, self.localdirsexclude)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500325 else:
Andrew Geissler517393d2023-01-13 08:55:19 -0600326 checksums = bb.fetch2.get_file_checksums(dataCaches[mc].file_checksums[mcfn][task], recipename, self.localdirsexclude)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500327 for (f,cs) in checksums:
Brad Bishop08902b02019-08-20 09:16:51 -0400328 self.file_checksum_values[tid].append((f,cs))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500329
Andrew Geissler517393d2023-01-13 08:55:19 -0600330 taskdep = dataCaches[mc].task_deps[mcfn]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500331 if 'nostamp' in taskdep and task in taskdep['nostamp']:
332 # Nostamp tasks need an implicit taint so that they force any dependent tasks to run
Andrew Geissler82c905d2020-04-13 13:39:40 -0500333 if tid in self.taints and self.taints[tid].startswith("nostamp:"):
334 # Don't reset taint value upon every call
335 pass
336 else:
337 import uuid
338 taint = str(uuid.uuid4())
339 self.taints[tid] = "nostamp:" + taint
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500340
Andrew Geissler517393d2023-01-13 08:55:19 -0600341 taint = self.read_taint(mcfn, task, dataCaches[mc].stamp[mcfn])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500342 if taint:
Brad Bishop08902b02019-08-20 09:16:51 -0400343 self.taints[tid] = taint
344 logger.warning("%s is tainted from a forced run" % tid)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500345
Andrew Geissler82c905d2020-04-13 13:39:40 -0500346 return
347
Andrew Geissler5a43b432020-06-13 10:46:56 -0500348 def get_taskhash(self, tid, deps, dataCaches):
Andrew Geissler82c905d2020-04-13 13:39:40 -0500349
350 data = self.basehash[tid]
351 for dep in self.runtaskdeps[tid]:
Patrick Williams864cc432023-02-09 14:54:44 -0600352 data += self.get_unihash(dep)
Andrew Geissler82c905d2020-04-13 13:39:40 -0500353
354 for (f, cs) in self.file_checksum_values[tid]:
355 if cs:
Andrew Geissler595f6302022-01-24 19:11:47 +0000356 if "/./" in f:
Patrick Williams864cc432023-02-09 14:54:44 -0600357 data += "./" + f.split("/./")[1]
358 data += cs
Andrew Geissler82c905d2020-04-13 13:39:40 -0500359
360 if tid in self.taints:
361 if self.taints[tid].startswith("nostamp:"):
Patrick Williams864cc432023-02-09 14:54:44 -0600362 data += self.taints[tid][8:]
Andrew Geissler82c905d2020-04-13 13:39:40 -0500363 else:
Patrick Williams864cc432023-02-09 14:54:44 -0600364 data += self.taints[tid]
Andrew Geissler82c905d2020-04-13 13:39:40 -0500365
Brad Bishop19323692019-04-05 15:28:33 -0400366 h = hashlib.sha256(data.encode("utf-8")).hexdigest()
Brad Bishop08902b02019-08-20 09:16:51 -0400367 self.taskhash[tid] = h
Patrick Williams213cb262021-08-07 19:21:33 -0500368 #d.setVar("BB_TASKHASH:task-%s" % task, taskhash[task])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500369 return h
370
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500371 def writeout_file_checksum_cache(self):
372 """Write/update the file checksum cache onto disk"""
373 if self.checksum_cache:
374 self.checksum_cache.save_extras()
375 self.checksum_cache.save_merge()
376 else:
377 bb.fetch2.fetcher_parse_save()
378 bb.fetch2.fetcher_parse_done()
379
Brad Bishop08902b02019-08-20 09:16:51 -0400380 def save_unitaskhashes(self):
381 self.unihash_cache.save(self.unitaskhashes)
382
Andrew Geissler78b72792022-06-14 06:47:25 -0500383 def copy_unitaskhashes(self, targetdir):
384 self.unihash_cache.copyfile(targetdir)
385
Andrew Geissler517393d2023-01-13 08:55:19 -0600386 def dump_sigtask(self, mcfn, task, stampbase, runtime):
387 tid = mcfn + ":" + task
388 mc = bb.runqueue.mc_from_tid(mcfn)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500389 referencestamp = stampbase
390 if isinstance(runtime, str) and runtime.startswith("customfile"):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500391 sigfile = stampbase
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500392 referencestamp = runtime[11:]
Brad Bishop08902b02019-08-20 09:16:51 -0400393 elif runtime and tid in self.taskhash:
Brad Bishop00e122a2019-10-05 11:10:57 -0400394 sigfile = stampbase + "." + task + ".sigdata" + "." + self.get_unihash(tid)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500395 else:
Brad Bishop08902b02019-08-20 09:16:51 -0400396 sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[tid]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500397
Andrew Geisslerc3d88e42020-10-02 09:45:00 -0500398 with bb.utils.umask(0o002):
399 bb.utils.mkdirhier(os.path.dirname(sigfile))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500400
401 data = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500402 data['task'] = task
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000403 data['basehash_ignore_vars'] = self.basehash_ignore_vars
404 data['taskhash_ignore_tasks'] = self.taskhash_ignore_tasks
Andrew Geissler517393d2023-01-13 08:55:19 -0600405 data['taskdeps'] = self.datacaches[mc].siggen_taskdeps[mcfn][task]
Brad Bishop08902b02019-08-20 09:16:51 -0400406 data['basehash'] = self.basehash[tid]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500407 data['gendeps'] = {}
408 data['varvals'] = {}
Andrew Geissler517393d2023-01-13 08:55:19 -0600409 data['varvals'][task] = self.datacaches[mc].siggen_varvals[mcfn][task]
410 for dep in self.datacaches[mc].siggen_taskdeps[mcfn][task]:
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000411 if dep in self.basehash_ignore_vars:
Andrew Geissler517393d2023-01-13 08:55:19 -0600412 continue
413 data['gendeps'][dep] = self.datacaches[mc].siggen_gendeps[mcfn][dep]
414 data['varvals'][dep] = self.datacaches[mc].siggen_varvals[mcfn][dep]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500415
Brad Bishop08902b02019-08-20 09:16:51 -0400416 if runtime and tid in self.taskhash:
417 data['runtaskdeps'] = self.runtaskdeps[tid]
Andrew Geissler595f6302022-01-24 19:11:47 +0000418 data['file_checksum_values'] = []
419 for f,cs in self.file_checksum_values[tid]:
420 if "/./" in f:
421 data['file_checksum_values'].append(("./" + f.split("/./")[1], cs))
422 else:
423 data['file_checksum_values'].append((os.path.basename(f), cs))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500424 data['runtaskhashes'] = {}
425 for dep in data['runtaskdeps']:
Brad Bishop19323692019-04-05 15:28:33 -0400426 data['runtaskhashes'][dep] = self.get_unihash(dep)
Brad Bishop08902b02019-08-20 09:16:51 -0400427 data['taskhash'] = self.taskhash[tid]
Brad Bishop00e122a2019-10-05 11:10:57 -0400428 data['unihash'] = self.get_unihash(tid)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500429
Andrew Geissler517393d2023-01-13 08:55:19 -0600430 taint = self.read_taint(mcfn, task, referencestamp)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500431 if taint:
432 data['taint'] = taint
433
Brad Bishop08902b02019-08-20 09:16:51 -0400434 if runtime and tid in self.taints:
435 if 'nostamp:' in self.taints[tid]:
436 data['taint'] = self.taints[tid]
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500437
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500438 computed_basehash = calc_basehash(data)
Brad Bishop08902b02019-08-20 09:16:51 -0400439 if computed_basehash != self.basehash[tid]:
440 bb.error("Basehash mismatch %s versus %s for %s" % (computed_basehash, self.basehash[tid], tid))
441 if runtime and tid in self.taskhash:
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500442 computed_taskhash = calc_taskhash(data)
Brad Bishop08902b02019-08-20 09:16:51 -0400443 if computed_taskhash != self.taskhash[tid]:
444 bb.error("Taskhash mismatch %s versus %s for %s" % (computed_taskhash, self.taskhash[tid], tid))
445 sigfile = sigfile.replace(self.taskhash[tid], computed_taskhash)
Brad Bishop37a0e4d2017-12-04 01:01:44 -0500446
Patrick Williams92b42cb2022-09-03 06:53:57 -0500447 fd, tmpfile = bb.utils.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500448 try:
Andrew Geisslereff27472021-10-29 15:35:00 -0500449 with bb.compress.zstd.open(fd, "wt", encoding="utf-8", num_threads=1) as f:
450 json.dump(data, f, sort_keys=True, separators=(",", ":"), cls=SetEncoder)
451 f.flush()
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600452 os.chmod(tmpfile, 0o664)
Andrew Geisslerc926e172021-05-07 16:11:35 -0500453 bb.utils.rename(tmpfile, sigfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500454 except (OSError, IOError) as err:
455 try:
456 os.unlink(tmpfile)
457 except OSError:
458 pass
459 raise err
460
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500461class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
462 name = "basichash"
463
Brad Bishop08902b02019-08-20 09:16:51 -0400464 def get_stampfile_hash(self, tid):
465 if tid in self.taskhash:
466 return self.taskhash[tid]
Brad Bishop19323692019-04-05 15:28:33 -0400467
468 # If task is not in basehash, then error
Brad Bishop08902b02019-08-20 09:16:51 -0400469 return self.basehash[tid]
Brad Bishop19323692019-04-05 15:28:33 -0400470
Andrew Geissler517393d2023-01-13 08:55:19 -0600471 def stampfile(self, stampbase, mcfn, taskname, extrainfo, clean=False):
472 if taskname.endswith("_setscene"):
473 tid = mcfn + ":" + taskname[:-9]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500474 else:
Andrew Geissler517393d2023-01-13 08:55:19 -0600475 tid = mcfn + ":" + taskname
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500476 if clean:
477 h = "*"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500478 else:
Brad Bishop08902b02019-08-20 09:16:51 -0400479 h = self.get_stampfile_hash(tid)
Brad Bishop19323692019-04-05 15:28:33 -0400480
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500481 return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.')
482
Andrew Geissler517393d2023-01-13 08:55:19 -0600483 def stampcleanmask(self, stampbase, mcfn, taskname, extrainfo):
484 return self.stampfile(stampbase, mcfn, taskname, extrainfo, clean=True)
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800485
Andrew Geissler517393d2023-01-13 08:55:19 -0600486 def invalidate_task(self, task, mcfn):
487 bb.note("Tainting hash to force rebuild of task %s, %s" % (mcfn, task))
488
489 mc = bb.runqueue.mc_from_tid(mcfn)
490 stamp = self.datacaches[mc].stamp[mcfn]
491
492 taintfn = stamp + '.' + task + '.taint'
493
494 import uuid
495 bb.utils.mkdirhier(os.path.dirname(taintfn))
496 # The specific content of the taint file is not really important,
497 # we just need it to be random, so a random UUID is used
498 with open(taintfn, 'w') as taintf:
499 taintf.write(str(uuid.uuid4()))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500500
Brad Bishop08902b02019-08-20 09:16:51 -0400501class SignatureGeneratorUniHashMixIn(object):
Andrew Geissler82c905d2020-04-13 13:39:40 -0500502 def __init__(self, data):
503 self.extramethod = {}
504 super().__init__(data)
505
Brad Bishop08902b02019-08-20 09:16:51 -0400506 def get_taskdata(self):
Andrew Geissler82c905d2020-04-13 13:39:40 -0500507 return (self.server, self.method, self.extramethod) + super().get_taskdata()
Brad Bishop08902b02019-08-20 09:16:51 -0400508
509 def set_taskdata(self, data):
Andrew Geissler82c905d2020-04-13 13:39:40 -0500510 self.server, self.method, self.extramethod = data[:3]
511 super().set_taskdata(data[3:])
Brad Bishop08902b02019-08-20 09:16:51 -0400512
Brad Bishopa34c0302019-09-23 22:34:48 -0400513 def client(self):
514 if getattr(self, '_client', None) is None:
515 self._client = hashserv.create_client(self.server)
516 return self._client
517
Andrew Geissler9aee5002022-03-30 16:27:02 +0000518 def reset(self, data):
519 if getattr(self, '_client', None) is not None:
520 self._client.close()
521 self._client = None
522 return super().reset(data)
523
524 def exit(self):
525 if getattr(self, '_client', None) is not None:
526 self._client.close()
527 self._client = None
528 return super().exit()
529
Brad Bishop08902b02019-08-20 09:16:51 -0400530 def get_stampfile_hash(self, tid):
531 if tid in self.taskhash:
532 # If a unique hash is reported, use it as the stampfile hash. This
533 # ensures that if a task won't be re-run if the taskhash changes,
534 # but it would result in the same output hash
Andrew Geissler82c905d2020-04-13 13:39:40 -0500535 unihash = self._get_unihash(tid)
Brad Bishop08902b02019-08-20 09:16:51 -0400536 if unihash is not None:
537 return unihash
538
539 return super().get_stampfile_hash(tid)
540
541 def set_unihash(self, tid, unihash):
Andrew Geissler82c905d2020-04-13 13:39:40 -0500542 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
543 key = mc + ":" + self.tidtopn[tid] + ":" + taskname
544 self.unitaskhashes[key] = (self.taskhash[tid], unihash)
545 self.unihash[tid] = unihash
546
547 def _get_unihash(self, tid, checkkey=None):
548 if tid not in self.tidtopn:
549 return None
550 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
551 key = mc + ":" + self.tidtopn[tid] + ":" + taskname
552 if key not in self.unitaskhashes:
553 return None
554 if not checkkey:
555 checkkey = self.taskhash[tid]
556 (key, unihash) = self.unitaskhashes[key]
557 if key != checkkey:
558 return None
559 return unihash
Brad Bishop08902b02019-08-20 09:16:51 -0400560
561 def get_unihash(self, tid):
Brad Bishop08902b02019-08-20 09:16:51 -0400562 taskhash = self.taskhash[tid]
563
Brad Bishopa34c0302019-09-23 22:34:48 -0400564 # If its not a setscene task we can return
565 if self.setscenetasks and tid not in self.setscenetasks:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500566 self.unihash[tid] = None
Brad Bishopa34c0302019-09-23 22:34:48 -0400567 return taskhash
568
Brad Bishop08902b02019-08-20 09:16:51 -0400569 # TODO: This cache can grow unbounded. It probably only needs to keep
570 # for each task
Andrew Geissler82c905d2020-04-13 13:39:40 -0500571 unihash = self._get_unihash(tid)
Brad Bishop08902b02019-08-20 09:16:51 -0400572 if unihash is not None:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500573 self.unihash[tid] = unihash
Brad Bishop08902b02019-08-20 09:16:51 -0400574 return unihash
575
576 # In the absence of being able to discover a unique hash from the
577 # server, make it be equivalent to the taskhash. The unique "hash" only
578 # really needs to be a unique string (not even necessarily a hash), but
579 # making it match the taskhash has a few advantages:
580 #
581 # 1) All of the sstate code that assumes hashes can be the same
582 # 2) It provides maximal compatibility with builders that don't use
583 # an equivalency server
584 # 3) The value is easy for multiple independent builders to derive the
585 # same unique hash from the same input. This means that if the
586 # independent builders find the same taskhash, but it isn't reported
587 # to the server, there is a better chance that they will agree on
588 # the unique hash.
589 unihash = taskhash
590
591 try:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500592 method = self.method
593 if tid in self.extramethod:
594 method = method + self.extramethod[tid]
595 data = self.client().get_unihash(method, self.taskhash[tid])
Brad Bishopa34c0302019-09-23 22:34:48 -0400596 if data:
597 unihash = data
Brad Bishop08902b02019-08-20 09:16:51 -0400598 # A unique hash equal to the taskhash is not very interesting,
599 # so it is reported it at debug level 2. If they differ, that
600 # is much more interesting, so it is reported at debug level 1
Andrew Geissler6aa7eec2023-03-03 12:41:14 -0600601 hashequiv_logger.bbdebug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server))
Brad Bishop08902b02019-08-20 09:16:51 -0400602 else:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600603 hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server))
Andrew Geisslerc926e172021-05-07 16:11:35 -0500604 except ConnectionError as e:
Brad Bishopa34c0302019-09-23 22:34:48 -0400605 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
Brad Bishop08902b02019-08-20 09:16:51 -0400606
Andrew Geissler82c905d2020-04-13 13:39:40 -0500607 self.set_unihash(tid, unihash)
608 self.unihash[tid] = unihash
Brad Bishop08902b02019-08-20 09:16:51 -0400609 return unihash
610
611 def report_unihash(self, path, task, d):
Brad Bishop08902b02019-08-20 09:16:51 -0400612 import importlib
613
614 taskhash = d.getVar('BB_TASKHASH')
615 unihash = d.getVar('BB_UNIHASH')
616 report_taskdata = d.getVar('SSTATE_HASHEQUIV_REPORT_TASKDATA') == '1'
617 tempdir = d.getVar('T')
Andrew Geissler517393d2023-01-13 08:55:19 -0600618 mcfn = d.getVar('BB_FILENAME')
619 tid = mcfn + ':do_' + task
Andrew Geissler82c905d2020-04-13 13:39:40 -0500620 key = tid + ':' + taskhash
Brad Bishop00e122a2019-10-05 11:10:57 -0400621
622 if self.setscenetasks and tid not in self.setscenetasks:
623 return
Brad Bishop08902b02019-08-20 09:16:51 -0400624
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000625 # This can happen if locked sigs are in action. Detect and just exit
Andrew Geissler82c905d2020-04-13 13:39:40 -0500626 if taskhash != self.taskhash[tid]:
627 return
628
Brad Bishop08902b02019-08-20 09:16:51 -0400629 # Sanity checks
Andrew Geissler82c905d2020-04-13 13:39:40 -0500630 cache_unihash = self._get_unihash(tid, checkkey=taskhash)
Brad Bishop08902b02019-08-20 09:16:51 -0400631 if cache_unihash is None:
632 bb.fatal('%s not in unihash cache. Please report this error' % key)
633
634 if cache_unihash != unihash:
635 bb.fatal("Cache unihash %s doesn't match BB_UNIHASH %s" % (cache_unihash, unihash))
636
637 sigfile = None
638 sigfile_name = "depsig.do_%s.%d" % (task, os.getpid())
639 sigfile_link = "depsig.do_%s" % task
640
641 try:
642 sigfile = open(os.path.join(tempdir, sigfile_name), 'w+b')
643
644 locs = {'path': path, 'sigfile': sigfile, 'task': task, 'd': d}
645
646 if "." in self.method:
647 (module, method) = self.method.rsplit('.', 1)
648 locs['method'] = getattr(importlib.import_module(module), method)
649 outhash = bb.utils.better_eval('method(path, sigfile, task, d)', locs)
650 else:
651 outhash = bb.utils.better_eval(self.method + '(path, sigfile, task, d)', locs)
652
653 try:
Brad Bishopa34c0302019-09-23 22:34:48 -0400654 extra_data = {}
655
656 owner = d.getVar('SSTATE_HASHEQUIV_OWNER')
657 if owner:
658 extra_data['owner'] = owner
Brad Bishop08902b02019-08-20 09:16:51 -0400659
660 if report_taskdata:
661 sigfile.seek(0)
662
Brad Bishopa34c0302019-09-23 22:34:48 -0400663 extra_data['PN'] = d.getVar('PN')
664 extra_data['PV'] = d.getVar('PV')
665 extra_data['PR'] = d.getVar('PR')
666 extra_data['task'] = task
667 extra_data['outhash_siginfo'] = sigfile.read().decode('utf-8')
Brad Bishop08902b02019-08-20 09:16:51 -0400668
Andrew Geissler82c905d2020-04-13 13:39:40 -0500669 method = self.method
670 if tid in self.extramethod:
671 method = method + self.extramethod[tid]
672
673 data = self.client().report_unihash(taskhash, method, outhash, unihash, extra_data)
Brad Bishopa34c0302019-09-23 22:34:48 -0400674 new_unihash = data['unihash']
Brad Bishop08902b02019-08-20 09:16:51 -0400675
676 if new_unihash != unihash:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600677 hashequiv_logger.debug('Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server))
Andrew Geissler517393d2023-01-13 08:55:19 -0600678 bb.event.fire(bb.runqueue.taskUniHashUpdate(mcfn + ':do_' + task, new_unihash), d)
Andrew Geissler82c905d2020-04-13 13:39:40 -0500679 self.set_unihash(tid, new_unihash)
680 d.setVar('BB_UNIHASH', new_unihash)
Brad Bishop08902b02019-08-20 09:16:51 -0400681 else:
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600682 hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server))
Andrew Geisslerc926e172021-05-07 16:11:35 -0500683 except ConnectionError as e:
Brad Bishopa34c0302019-09-23 22:34:48 -0400684 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
Brad Bishop08902b02019-08-20 09:16:51 -0400685 finally:
686 if sigfile:
687 sigfile.close()
688
689 sigfile_link_path = os.path.join(tempdir, sigfile_link)
690 bb.utils.remove(sigfile_link_path)
691
692 try:
693 os.symlink(sigfile_name, sigfile_link_path)
694 except OSError:
695 pass
696
Andrew Geissler82c905d2020-04-13 13:39:40 -0500697 def report_unihash_equiv(self, tid, taskhash, wanted_unihash, current_unihash, datacaches):
698 try:
699 extra_data = {}
700 method = self.method
701 if tid in self.extramethod:
702 method = method + self.extramethod[tid]
703
704 data = self.client().report_unihash_equiv(taskhash, method, wanted_unihash, extra_data)
705 hashequiv_logger.verbose('Reported task %s as unihash %s to %s (%s)' % (tid, wanted_unihash, self.server, str(data)))
706
707 if data is None:
708 bb.warn("Server unable to handle unihash report")
709 return False
710
711 finalunihash = data['unihash']
712
713 if finalunihash == current_unihash:
714 hashequiv_logger.verbose('Task %s unihash %s unchanged by server' % (tid, finalunihash))
715 elif finalunihash == wanted_unihash:
716 hashequiv_logger.verbose('Task %s unihash changed %s -> %s as wanted' % (tid, current_unihash, finalunihash))
717 self.set_unihash(tid, finalunihash)
718 return True
719 else:
720 # TODO: What to do here?
721 hashequiv_logger.verbose('Task %s unihash reported as unwanted hash %s' % (tid, finalunihash))
722
Andrew Geisslerc926e172021-05-07 16:11:35 -0500723 except ConnectionError as e:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500724 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
725
726 return False
Brad Bishop08902b02019-08-20 09:16:51 -0400727
728#
729# Dummy class used for bitbake-selftest
730#
731class SignatureGeneratorTestEquivHash(SignatureGeneratorUniHashMixIn, SignatureGeneratorBasicHash):
732 name = "TestEquivHash"
733 def init_rundepcheck(self, data):
734 super().init_rundepcheck(data)
Brad Bishopa34c0302019-09-23 22:34:48 -0400735 self.server = data.getVar('BB_HASHSERVE')
Brad Bishop08902b02019-08-20 09:16:51 -0400736 self.method = "sstate_output_hash"
737
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500738def dump_this_task(outfile, d):
739 import bb.parse
Andrew Geissler517393d2023-01-13 08:55:19 -0600740 mcfn = d.getVar("BB_FILENAME")
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500741 task = "do_" + d.getVar("BB_CURRENTTASK")
Andrew Geissler517393d2023-01-13 08:55:19 -0600742 referencestamp = bb.parse.siggen.stampfile_base(mcfn)
743 bb.parse.siggen.dump_sigtask(mcfn, task, outfile, "customfile:" + referencestamp)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500744
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500745def init_colors(enable_color):
746 """Initialise colour dict for passing to compare_sigfiles()"""
747 # First set up the colours
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800748 colors = {'color_title': '\033[1m',
749 'color_default': '\033[0m',
750 'color_add': '\033[0;32m',
751 'color_remove': '\033[0;31m',
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500752 }
753 # Leave all keys present but clear the values
754 if not enable_color:
755 for k in colors.keys():
756 colors[k] = ''
757 return colors
758
759def worddiff_str(oldstr, newstr, colors=None):
760 if not colors:
761 colors = init_colors(False)
762 diff = simplediff.diff(oldstr.split(' '), newstr.split(' '))
763 ret = []
764 for change, value in diff:
765 value = ' '.join(value)
766 if change == '=':
767 ret.append(value)
768 elif change == '+':
769 item = '{color_add}{{+{value}+}}{color_default}'.format(value=value, **colors)
770 ret.append(item)
771 elif change == '-':
772 item = '{color_remove}[-{value}-]{color_default}'.format(value=value, **colors)
773 ret.append(item)
774 whitespace_note = ''
775 if oldstr != newstr and ' '.join(oldstr.split()) == ' '.join(newstr.split()):
776 whitespace_note = ' (whitespace changed)'
777 return '"%s"%s' % (' '.join(ret), whitespace_note)
778
779def list_inline_diff(oldlist, newlist, colors=None):
780 if not colors:
781 colors = init_colors(False)
782 diff = simplediff.diff(oldlist, newlist)
783 ret = []
784 for change, value in diff:
785 value = ' '.join(value)
786 if change == '=':
787 ret.append("'%s'" % value)
788 elif change == '+':
789 item = '{color_add}+{value}{color_default}'.format(value=value, **colors)
790 ret.append(item)
791 elif change == '-':
792 item = '{color_remove}-{value}{color_default}'.format(value=value, **colors)
793 ret.append(item)
794 return '[%s]' % (', '.join(ret))
795
Andrew Geisslerc3d88e42020-10-02 09:45:00 -0500796def clean_basepath(basepath):
797 basepath, dir, recipe_task = basepath.rsplit("/", 2)
798 cleaned = dir + '/' + recipe_task
799
800 if basepath[0] == '/':
801 return cleaned
802
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600803 if basepath.startswith("mc:") and basepath.count(':') >= 2:
Andrew Geisslerc3d88e42020-10-02 09:45:00 -0500804 mc, mc_name, basepath = basepath.split(":", 2)
805 mc_suffix = ':mc:' + mc_name
806 else:
807 mc_suffix = ''
808
809 # mc stuff now removed from basepath. Whatever was next, if present will be the first
810 # suffix. ':/', recipe path start, marks the end of this. Something like
811 # 'virtual:a[:b[:c]]:/path...' (b and c being optional)
812 if basepath[0] != '/':
813 cleaned += ':' + basepath.split(':/', 1)[0]
814
815 return cleaned + mc_suffix
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500816
817def clean_basepaths(a):
818 b = {}
819 for x in a:
820 b[clean_basepath(x)] = a[x]
821 return b
822
823def clean_basepaths_list(a):
824 b = []
825 for x in a:
826 b.append(clean_basepath(x))
827 return b
828
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000829# Handled renamed fields
830def handle_renames(data):
831 if 'basewhitelist' in data:
832 data['basehash_ignore_vars'] = data['basewhitelist']
833 del data['basewhitelist']
834 if 'taskwhitelist' in data:
835 data['taskhash_ignore_tasks'] = data['taskwhitelist']
836 del data['taskwhitelist']
837
838
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500839def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500840 output = []
841
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500842 colors = init_colors(color)
843 def color_format(formatstr, **values):
844 """
845 Return colour formatted string.
846 NOTE: call with the format string, not an already formatted string
847 containing values (otherwise you could have trouble with { and }
848 characters)
849 """
850 if not formatstr.endswith('{color_default}'):
851 formatstr += '{color_default}'
852 # In newer python 3 versions you can pass both of these directly,
853 # but we only require 3.4 at the moment
854 formatparams = {}
855 formatparams.update(colors)
856 formatparams.update(values)
857 return formatstr.format(**formatparams)
858
Andrew Geisslereff27472021-10-29 15:35:00 -0500859 with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
860 a_data = json.load(f, object_hook=SetDecoder)
861 with bb.compress.zstd.open(b, "rt", encoding="utf-8", num_threads=1) as f:
862 b_data = json.load(f, object_hook=SetDecoder)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500863
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000864 for data in [a_data, b_data]:
865 handle_renames(data)
866
867 def dict_diff(a, b, ignored_vars=set()):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500868 sa = set(a.keys())
869 sb = set(b.keys())
870 common = sa & sb
871 changed = set()
872 for i in common:
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000873 if a[i] != b[i] and i not in ignored_vars:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500874 changed.add(i)
875 added = sb - sa
876 removed = sa - sb
877 return changed, added, removed
878
879 def file_checksums_diff(a, b):
880 from collections import Counter
Andrew Geisslereff27472021-10-29 15:35:00 -0500881
882 # Convert lists back to tuples
883 a = [(f[0], f[1]) for f in a]
884 b = [(f[0], f[1]) for f in b]
885
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500886 # Compare lists, ensuring we can handle duplicate filenames if they exist
887 removedcount = Counter(a)
888 removedcount.subtract(b)
889 addedcount = Counter(b)
890 addedcount.subtract(a)
891 added = []
892 for x in b:
893 if addedcount[x] > 0:
894 addedcount[x] -= 1
895 added.append(x)
896 removed = []
897 changed = []
898 for x in a:
899 if removedcount[x] > 0:
900 removedcount[x] -= 1
901 for y in added:
902 if y[0] == x[0]:
903 changed.append((x[0], x[1], y[1]))
904 added.remove(y)
905 break
906 else:
907 removed.append(x)
908 added = [x[0] for x in added]
909 removed = [x[0] for x in removed]
910 return changed, added, removed
911
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000912 if 'basehash_ignore_vars' in a_data and a_data['basehash_ignore_vars'] != b_data['basehash_ignore_vars']:
913 output.append(color_format("{color_title}basehash_ignore_vars changed{color_default} from '%s' to '%s'") % (a_data['basehash_ignore_vars'], b_data['basehash_ignore_vars']))
914 if a_data['basehash_ignore_vars'] and b_data['basehash_ignore_vars']:
915 output.append("changed items: %s" % a_data['basehash_ignore_vars'].symmetric_difference(b_data['basehash_ignore_vars']))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500916
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000917 if 'taskhash_ignore_tasks' in a_data and a_data['taskhash_ignore_tasks'] != b_data['taskhash_ignore_tasks']:
918 output.append(color_format("{color_title}taskhash_ignore_tasks changed{color_default} from '%s' to '%s'") % (a_data['taskhash_ignore_tasks'], b_data['taskhash_ignore_tasks']))
919 if a_data['taskhash_ignore_tasks'] and b_data['taskhash_ignore_tasks']:
920 output.append("changed items: %s" % a_data['taskhash_ignore_tasks'].symmetric_difference(b_data['taskhash_ignore_tasks']))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500921
922 if a_data['taskdeps'] != b_data['taskdeps']:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500923 output.append(color_format("{color_title}Task dependencies changed{color_default} from:\n%s\nto:\n%s") % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps'])))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500924
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500925 if a_data['basehash'] != b_data['basehash'] and not collapsed:
926 output.append(color_format("{color_title}basehash changed{color_default} from %s to %s") % (a_data['basehash'], b_data['basehash']))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500927
Andrew Geissler7e0e3c02022-02-25 20:34:39 +0000928 changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'], a_data['basehash_ignore_vars'] & b_data['basehash_ignore_vars'])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500929 if changed:
Patrick Williams93c203f2021-10-06 16:15:23 -0500930 for dep in sorted(changed):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500931 output.append(color_format("{color_title}List of dependencies for variable %s changed from '{color_default}%s{color_title}' to '{color_default}%s{color_title}'") % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500932 if a_data['gendeps'][dep] and b_data['gendeps'][dep]:
933 output.append("changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep]))
934 if added:
Patrick Williams93c203f2021-10-06 16:15:23 -0500935 for dep in sorted(added):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500936 output.append(color_format("{color_title}Dependency on variable %s was added") % (dep))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500937 if removed:
Patrick Williams93c203f2021-10-06 16:15:23 -0500938 for dep in sorted(removed):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500939 output.append(color_format("{color_title}Dependency on Variable %s was removed") % (dep))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500940
941
942 changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals'])
943 if changed:
Patrick Williams93c203f2021-10-06 16:15:23 -0500944 for dep in sorted(changed):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500945 oldval = a_data['varvals'][dep]
946 newval = b_data['varvals'][dep]
947 if newval and oldval and ('\n' in oldval or '\n' in newval):
948 diff = difflib.unified_diff(oldval.splitlines(), newval.splitlines(), lineterm='')
949 # Cut off the first two lines, since we aren't interested in
950 # the old/new filename (they are blank anyway in this case)
951 difflines = list(diff)[2:]
952 if color:
953 # Add colour to diff output
954 for i, line in enumerate(difflines):
955 if line.startswith('+'):
956 line = color_format('{color_add}{line}', line=line)
957 difflines[i] = line
958 elif line.startswith('-'):
959 line = color_format('{color_remove}{line}', line=line)
960 difflines[i] = line
961 output.append(color_format("{color_title}Variable {var} value changed:{color_default}\n{diff}", var=dep, diff='\n'.join(difflines)))
962 elif newval and oldval and (' ' in oldval or ' ' in newval):
963 output.append(color_format("{color_title}Variable {var} value changed:{color_default}\n{diff}", var=dep, diff=worddiff_str(oldval, newval, colors)))
964 else:
965 output.append(color_format("{color_title}Variable {var} value changed from '{color_default}{oldval}{color_title}' to '{color_default}{newval}{color_title}'{color_default}", var=dep, oldval=oldval, newval=newval))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500966
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600967 if not 'file_checksum_values' in a_data:
Andrew Geisslereff27472021-10-29 15:35:00 -0500968 a_data['file_checksum_values'] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600969 if not 'file_checksum_values' in b_data:
Andrew Geisslereff27472021-10-29 15:35:00 -0500970 b_data['file_checksum_values'] = []
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600971
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500972 changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values'])
973 if changed:
974 for f, old, new in changed:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500975 output.append(color_format("{color_title}Checksum for file %s changed{color_default} from %s to %s") % (f, old, new))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500976 if added:
977 for f in added:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500978 output.append(color_format("{color_title}Dependency on checksum of file %s was added") % (f))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500979 if removed:
980 for f in removed:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500981 output.append(color_format("{color_title}Dependency on checksum of file %s was removed") % (f))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500982
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600983 if not 'runtaskdeps' in a_data:
984 a_data['runtaskdeps'] = {}
985 if not 'runtaskdeps' in b_data:
986 b_data['runtaskdeps'] = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500987
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500988 if not collapsed:
989 if len(a_data['runtaskdeps']) != len(b_data['runtaskdeps']):
990 changed = ["Number of task dependencies changed"]
991 else:
992 changed = []
993 for idx, task in enumerate(a_data['runtaskdeps']):
994 a = a_data['runtaskdeps'][idx]
995 b = b_data['runtaskdeps'][idx]
996 if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b] and not collapsed:
997 changed.append("%s with hash %s\n changed to\n%s with hash %s" % (clean_basepath(a), a_data['runtaskhashes'][a], clean_basepath(b), b_data['runtaskhashes'][b]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500998
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500999 if changed:
1000 clean_a = clean_basepaths_list(a_data['runtaskdeps'])
1001 clean_b = clean_basepaths_list(b_data['runtaskdeps'])
1002 if clean_a != clean_b:
1003 output.append(color_format("{color_title}runtaskdeps changed:{color_default}\n%s") % list_inline_diff(clean_a, clean_b, colors))
1004 else:
1005 output.append(color_format("{color_title}runtaskdeps changed:"))
1006 output.append("\n".join(changed))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001007
1008
1009 if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
Patrick Williams03907ee2022-05-01 06:28:52 -05001010 a = clean_basepaths(a_data['runtaskhashes'])
1011 b = clean_basepaths(b_data['runtaskhashes'])
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001012 changed, added, removed = dict_diff(a, b)
1013 if added:
Patrick Williams93c203f2021-10-06 16:15:23 -05001014 for dep in sorted(added):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001015 bdep_found = False
1016 if removed:
1017 for bdep in removed:
1018 if b[dep] == a[bdep]:
1019 #output.append("Dependency on task %s was replaced by %s with same hash" % (dep, bdep))
1020 bdep_found = True
1021 if not bdep_found:
Patrick Williams03907ee2022-05-01 06:28:52 -05001022 output.append(color_format("{color_title}Dependency on task %s was added{color_default} with hash %s") % (dep, b[dep]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001023 if removed:
Patrick Williams93c203f2021-10-06 16:15:23 -05001024 for dep in sorted(removed):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001025 adep_found = False
1026 if added:
1027 for adep in added:
1028 if b[adep] == a[dep]:
1029 #output.append("Dependency on task %s was replaced by %s with same hash" % (adep, dep))
1030 adep_found = True
1031 if not adep_found:
Patrick Williams03907ee2022-05-01 06:28:52 -05001032 output.append(color_format("{color_title}Dependency on task %s was removed{color_default} with hash %s") % (dep, a[dep]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001033 if changed:
Patrick Williams93c203f2021-10-06 16:15:23 -05001034 for dep in sorted(changed):
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001035 if not collapsed:
Patrick Williams03907ee2022-05-01 06:28:52 -05001036 output.append(color_format("{color_title}Hash for task dependency %s changed{color_default} from %s to %s") % (dep, a[dep], b[dep]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001037 if callable(recursecb):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001038 recout = recursecb(dep, a[dep], b[dep])
1039 if recout:
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001040 if collapsed:
1041 output.extend(recout)
1042 else:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -08001043 # If a dependent hash changed, might as well print the line above and then defer to the changes in
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001044 # that hash since in all likelyhood, they're the same changes this task also saw.
1045 output = [output[-1]] + recout
Andrew Geisslerd5838332022-05-27 11:33:10 -05001046 break
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001047
1048 a_taint = a_data.get('taint', None)
1049 b_taint = b_data.get('taint', None)
1050 if a_taint != b_taint:
Brad Bishop96ff1982019-08-19 13:50:42 -04001051 if a_taint and a_taint.startswith('nostamp:'):
Brad Bishopc342db32019-05-15 21:57:59 -04001052 a_taint = a_taint.replace('nostamp:', 'nostamp(uuid4):')
Brad Bishop96ff1982019-08-19 13:50:42 -04001053 if b_taint and b_taint.startswith('nostamp:'):
Brad Bishopc342db32019-05-15 21:57:59 -04001054 b_taint = b_taint.replace('nostamp:', 'nostamp(uuid4):')
Brad Bishop6e60e8b2018-02-01 10:27:11 -05001055 output.append(color_format("{color_title}Taint (by forced/invalidated task) changed{color_default} from %s to %s") % (a_taint, b_taint))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001056
1057 return output
1058
1059
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001060def calc_basehash(sigdata):
1061 task = sigdata['task']
1062 basedata = sigdata['varvals'][task]
1063
1064 if basedata is None:
1065 basedata = ''
1066
1067 alldeps = sigdata['taskdeps']
Andrew Geissler517393d2023-01-13 08:55:19 -06001068 for dep in sorted(alldeps):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001069 basedata = basedata + dep
1070 val = sigdata['varvals'][dep]
1071 if val is not None:
1072 basedata = basedata + str(val)
1073
Brad Bishop19323692019-04-05 15:28:33 -04001074 return hashlib.sha256(basedata.encode("utf-8")).hexdigest()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001075
1076def calc_taskhash(sigdata):
1077 data = sigdata['basehash']
1078
1079 for dep in sigdata['runtaskdeps']:
1080 data = data + sigdata['runtaskhashes'][dep]
1081
1082 for c in sigdata['file_checksum_values']:
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001083 if c[1]:
Andrew Geissler595f6302022-01-24 19:11:47 +00001084 if "./" in c[0]:
1085 data = data + c[0]
Brad Bishop37a0e4d2017-12-04 01:01:44 -05001086 data = data + c[1]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001087
1088 if 'taint' in sigdata:
1089 if 'nostamp:' in sigdata['taint']:
1090 data = data + sigdata['taint'][8:]
1091 else:
1092 data = data + sigdata['taint']
1093
Brad Bishop19323692019-04-05 15:28:33 -04001094 return hashlib.sha256(data.encode("utf-8")).hexdigest()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001095
1096
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001097def dump_sigfile(a):
1098 output = []
1099
Andrew Geisslereff27472021-10-29 15:35:00 -05001100 with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
1101 a_data = json.load(f, object_hook=SetDecoder)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001102
Andrew Geissler7e0e3c02022-02-25 20:34:39 +00001103 handle_renames(a_data)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001104
Andrew Geissler7e0e3c02022-02-25 20:34:39 +00001105 output.append("basehash_ignore_vars: %s" % (sorted(a_data['basehash_ignore_vars'])))
1106
1107 output.append("taskhash_ignore_tasks: %s" % (sorted(a_data['taskhash_ignore_tasks'] or [])))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001108
1109 output.append("Task dependencies: %s" % (sorted(a_data['taskdeps'])))
1110
1111 output.append("basehash: %s" % (a_data['basehash']))
1112
Andrew Geissler595f6302022-01-24 19:11:47 +00001113 for dep in sorted(a_data['gendeps']):
1114 output.append("List of dependencies for variable %s is %s" % (dep, sorted(a_data['gendeps'][dep])))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001115
Andrew Geissler595f6302022-01-24 19:11:47 +00001116 for dep in sorted(a_data['varvals']):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001117 output.append("Variable %s value is %s" % (dep, a_data['varvals'][dep]))
1118
1119 if 'runtaskdeps' in a_data:
Andrew Geissler595f6302022-01-24 19:11:47 +00001120 output.append("Tasks this task depends on: %s" % (sorted(a_data['runtaskdeps'])))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001121
1122 if 'file_checksum_values' in a_data:
Andrew Geissler595f6302022-01-24 19:11:47 +00001123 output.append("This task depends on the checksums of files: %s" % (sorted(a_data['file_checksum_values'])))
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001124
1125 if 'runtaskhashes' in a_data:
Andrew Geissler595f6302022-01-24 19:11:47 +00001126 for dep in sorted(a_data['runtaskhashes']):
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001127 output.append("Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep]))
1128
1129 if 'taint' in a_data:
Brad Bishopc342db32019-05-15 21:57:59 -04001130 if a_data['taint'].startswith('nostamp:'):
1131 msg = a_data['taint'].replace('nostamp:', 'nostamp(uuid4):')
1132 else:
1133 msg = a_data['taint']
1134 output.append("Tainted (by forced/invalidated task): %s" % msg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001135
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001136 if 'task' in a_data:
1137 computed_basehash = calc_basehash(a_data)
1138 output.append("Computed base hash is %s and from file %s" % (computed_basehash, a_data['basehash']))
1139 else:
1140 output.append("Unable to compute base hash")
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001141
Patrick Williamsd8c66bc2016-06-20 12:57:21 -05001142 computed_taskhash = calc_taskhash(a_data)
1143 output.append("Computed task hash is %s" % computed_taskhash)
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001144
1145 return output