blob: 13af16e473c5591bc64f8879dd569c8261fdbea6 [file] [log] [blame]
Brad Bishopc342db32019-05-15 21:57:59 -04001#
2# SPDX-License-Identifier: GPL-2.0-only
3#
Patrick Williamsc124f4f2015-09-15 14:41:29 -05004import bb.siggen
Brad Bishop316dfdd2018-06-25 12:45:53 -04005import oe
Patrick Williamsc124f4f2015-09-15 14:41:29 -05006
7def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCache):
8 # Return True if we should keep the dependency, False to drop it
9 def isNative(x):
10 return x.endswith("-native")
11 def isCross(x):
12 return "-cross-" in x
13 def isNativeSDK(x):
14 return x.startswith("nativesdk-")
15 def isKernel(fn):
16 inherits = " ".join(dataCache.inherits[fn])
17 return inherits.find("/module-base.bbclass") != -1 or inherits.find("/linux-kernel-base.bbclass") != -1
18 def isPackageGroup(fn):
19 inherits = " ".join(dataCache.inherits[fn])
20 return "/packagegroup.bbclass" in inherits
21 def isAllArch(fn):
22 inherits = " ".join(dataCache.inherits[fn])
23 return "/allarch.bbclass" in inherits
24 def isImage(fn):
25 return "/image.bbclass" in " ".join(dataCache.inherits[fn])
26
Brad Bishop6e60e8b2018-02-01 10:27:11 -050027 # (Almost) always include our own inter-task dependencies.
28 # The exception is the special do_kernel_configme->do_unpack_and_patch
29 # dependency from archiver.bbclass.
Patrick Williamsc124f4f2015-09-15 14:41:29 -050030 if recipename == depname:
Brad Bishop6e60e8b2018-02-01 10:27:11 -050031 if task == "do_kernel_configme" and dep.endswith(".do_unpack_and_patch"):
32 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -050033 return True
34
Patrick Williamsc124f4f2015-09-15 14:41:29 -050035 # Exclude well defined recipe->dependency
36 if "%s->%s" % (recipename, depname) in siggen.saferecipedeps:
37 return False
38
Brad Bishop316dfdd2018-06-25 12:45:53 -040039 # Check for special wildcard
40 if "*->%s" % depname in siggen.saferecipedeps and recipename != depname:
41 return False
42
Patrick Williamsc124f4f2015-09-15 14:41:29 -050043 # Don't change native/cross/nativesdk recipe dependencies any further
44 if isNative(recipename) or isCross(recipename) or isNativeSDK(recipename):
45 return True
46
47 # Only target packages beyond here
48
49 # allarch packagegroups are assumed to have well behaved names which don't change between architecures/tunes
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050050 if isPackageGroup(fn) and isAllArch(fn) and not isNative(depname):
Brad Bishop1a4b7ee2018-12-16 17:11:34 -080051 return False
Patrick Williamsc124f4f2015-09-15 14:41:29 -050052
53 # Exclude well defined machine specific configurations which don't change ABI
54 if depname in siggen.abisaferecipes and not isImage(fn):
55 return False
56
57 # Kernel modules are well namespaced. We don't want to depend on the kernel's checksum
58 # if we're just doing an RRECOMMENDS_xxx = "kernel-module-*", not least because the checksum
59 # is machine specific.
60 # Therefore if we're not a kernel or a module recipe (inheriting the kernel classes)
61 # and we reccomend a kernel-module, we exclude the dependency.
62 depfn = dep.rsplit(".", 1)[0]
63 if dataCache and isKernel(depfn) and not isKernel(fn):
64 for pkg in dataCache.runrecs[fn]:
65 if " ".join(dataCache.runrecs[fn][pkg]).find("kernel-module-") != -1:
66 return False
67
68 # Default to keep dependencies
69 return True
70
71def sstate_lockedsigs(d):
72 sigs = {}
Brad Bishop6e60e8b2018-02-01 10:27:11 -050073 types = (d.getVar("SIGGEN_LOCKEDSIGS_TYPES") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -050074 for t in types:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050075 siggen_lockedsigs_var = "SIGGEN_LOCKEDSIGS_%s" % t
Brad Bishop6e60e8b2018-02-01 10:27:11 -050076 lockedsigs = (d.getVar(siggen_lockedsigs_var) or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -050077 for ls in lockedsigs:
78 pn, task, h = ls.split(":", 2)
79 if pn not in sigs:
80 sigs[pn] = {}
Patrick Williamsd8c66bc2016-06-20 12:57:21 -050081 sigs[pn][task] = [h, siggen_lockedsigs_var]
Patrick Williamsc124f4f2015-09-15 14:41:29 -050082 return sigs
83
84class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic):
85 name = "OEBasic"
86 def init_rundepcheck(self, data):
Brad Bishop6e60e8b2018-02-01 10:27:11 -050087 self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
88 self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -050089 pass
90 def rundep_check(self, fn, recipename, task, dep, depname, dataCache = None):
91 return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache)
92
93class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash):
94 name = "OEBasicHash"
95 def init_rundepcheck(self, data):
Brad Bishop6e60e8b2018-02-01 10:27:11 -050096 self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
97 self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -050098 self.lockedsigs = sstate_lockedsigs(data)
99 self.lockedhashes = {}
100 self.lockedpnmap = {}
101 self.lockedhashfn = {}
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500102 self.machine = data.getVar("MACHINE")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500103 self.mismatch_msgs = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500104 self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500105 "").split()
106 self.unlockedrecipes = { k: "" for k in self.unlockedrecipes }
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500107 pass
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500108
109 def tasks_resolved(self, virtmap, virtpnmap, dataCache):
110 # Translate virtual/xxx entries to PN values
111 newabisafe = []
112 for a in self.abisaferecipes:
113 if a in virtpnmap:
114 newabisafe.append(virtpnmap[a])
115 else:
116 newabisafe.append(a)
117 self.abisaferecipes = newabisafe
118 newsafedeps = []
119 for a in self.saferecipedeps:
120 a1, a2 = a.split("->")
121 if a1 in virtpnmap:
122 a1 = virtpnmap[a1]
123 if a2 in virtpnmap:
124 a2 = virtpnmap[a2]
125 newsafedeps.append(a1 + "->" + a2)
126 self.saferecipedeps = newsafedeps
127
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500128 def rundep_check(self, fn, recipename, task, dep, depname, dataCache = None):
129 return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache)
130
131 def get_taskdata(self):
132 data = super(bb.siggen.SignatureGeneratorBasicHash, self).get_taskdata()
133 return (data, self.lockedpnmap, self.lockedhashfn)
134
135 def set_taskdata(self, data):
136 coredata, self.lockedpnmap, self.lockedhashfn = data
137 super(bb.siggen.SignatureGeneratorBasicHash, self).set_taskdata(coredata)
138
139 def dump_sigs(self, dataCache, options):
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600140 sigfile = os.getcwd() + "/locked-sigs.inc"
141 bb.plain("Writing locked sigs to %s" % sigfile)
142 self.dump_lockedsigs(sigfile)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500143 return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options)
144
145 def get_taskhash(self, fn, task, deps, dataCache):
146 h = super(bb.siggen.SignatureGeneratorBasicHash, self).get_taskhash(fn, task, deps, dataCache)
147
148 recipename = dataCache.pkg_fn[fn]
149 self.lockedpnmap[fn] = recipename
150 self.lockedhashfn[fn] = dataCache.hashfn[fn]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500151
152 unlocked = False
153 if recipename in self.unlockedrecipes:
154 unlocked = True
155 else:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800156 def get_mc(tid):
157 tid = tid.rsplit('.', 1)[0]
Brad Bishop15ae2502019-06-18 21:44:24 -0400158 if tid.startswith('mc:'):
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800159 elems = tid.split(':')
160 return elems[1]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500161 def recipename_from_dep(dep):
162 # The dep entry will look something like
163 # /path/path/recipename.bb.task, virtual:native:/p/foo.bb.task,
164 # ...
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800165
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500166 fn = dep.rsplit('.', 1)[0]
167 return dataCache.pkg_fn[fn]
168
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800169 mc = get_mc(fn)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500170 # If any unlocked recipe is in the direct dependencies then the
171 # current recipe should be unlocked as well.
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800172 depnames = [ recipename_from_dep(x) for x in deps if mc == get_mc(x)]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500173 if any(x in y for y in depnames for x in self.unlockedrecipes):
174 self.unlockedrecipes[recipename] = ''
175 unlocked = True
176
177 if not unlocked and recipename in self.lockedsigs:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500178 if task in self.lockedsigs[recipename]:
179 k = fn + "." + task
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500180 h_locked = self.lockedsigs[recipename][task][0]
181 var = self.lockedsigs[recipename][task][1]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500182 self.lockedhashes[k] = h_locked
183 self.taskhash[k] = h_locked
184 #bb.warn("Using %s %s %s" % (recipename, task, h))
185
186 if h != h_locked:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500187 self.mismatch_msgs.append('The %s:%s sig is computed to be %s, but the sig is locked to %s in %s'
188 % (recipename, task, h, h_locked, var))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500189
190 return h_locked
191 #bb.warn("%s %s %s" % (recipename, task, h))
192 return h
193
194 def dump_sigtask(self, fn, task, stampbase, runtime):
195 k = fn + "." + task
196 if k in self.lockedhashes:
197 return
198 super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigtask(fn, task, stampbase, runtime)
199
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600200 def dump_lockedsigs(self, sigfile, taskfilter=None):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500201 types = {}
202 for k in self.runtaskdeps:
203 if taskfilter:
204 if not k in taskfilter:
205 continue
206 fn = k.rsplit(".",1)[0]
207 t = self.lockedhashfn[fn].split(" ")[1].split(":")[5]
208 t = 't-' + t.replace('_', '-')
209 if t not in types:
210 types[t] = []
211 types[t].append(k)
212
213 with open(sigfile, "w") as f:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500214 l = sorted(types)
215 for t in l:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500216 f.write('SIGGEN_LOCKEDSIGS_%s = "\\\n' % t)
217 types[t].sort()
218 sortedk = sorted(types[t], key=lambda k: self.lockedpnmap[k.rsplit(".",1)[0]])
219 for k in sortedk:
220 fn = k.rsplit(".",1)[0]
221 task = k.rsplit(".",1)[1]
222 if k not in self.taskhash:
223 continue
224 f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.taskhash[k] + " \\\n")
225 f.write(' "\n')
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500226 f.write('SIGGEN_LOCKEDSIGS_TYPES_%s = "%s"' % (self.machine, " ".join(l)))
227
228 def dump_siglist(self, sigfile):
229 with open(sigfile, "w") as f:
230 tasks = []
231 for taskitem in self.taskhash:
232 (fn, task) = taskitem.rsplit(".", 1)
233 pn = self.lockedpnmap[fn]
234 tasks.append((pn, task, fn, self.taskhash[taskitem]))
235 for (pn, task, fn, taskhash) in sorted(tasks):
236 f.write('%s.%s %s %s\n' % (pn, task, fn, taskhash))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500237
238 def checkhashes(self, missed, ret, sq_fn, sq_task, sq_hash, sq_hashfn, d):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500239 warn_msgs = []
240 error_msgs = []
241 sstate_missing_msgs = []
242
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500243 for task in range(len(sq_fn)):
244 if task not in ret:
245 for pn in self.lockedsigs:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600246 if sq_hash[task] in iter(self.lockedsigs[pn].values()):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500247 if sq_task[task] == 'do_shared_workdir':
248 continue
249 sstate_missing_msgs.append("Locked sig is set for %s:%s (%s) yet not in sstate cache?"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500250 % (pn, sq_task[task], sq_hash[task]))
251
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500252 checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500253 if checklevel == 'warn':
254 warn_msgs += self.mismatch_msgs
255 elif checklevel == 'error':
256 error_msgs += self.mismatch_msgs
257
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500258 checklevel = d.getVar("SIGGEN_LOCKEDSIGS_SSTATE_EXISTS_CHECK")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500259 if checklevel == 'warn':
260 warn_msgs += sstate_missing_msgs
261 elif checklevel == 'error':
262 error_msgs += sstate_missing_msgs
263
264 if warn_msgs:
265 bb.warn("\n".join(warn_msgs))
266 if error_msgs:
267 bb.fatal("\n".join(error_msgs))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500268
Brad Bishop19323692019-04-05 15:28:33 -0400269class SignatureGeneratorOEEquivHash(SignatureGeneratorOEBasicHash):
270 name = "OEEquivHash"
271
272 def init_rundepcheck(self, data):
273 super().init_rundepcheck(data)
274 self.server = data.getVar('SSTATE_HASHEQUIV_SERVER')
275 self.method = data.getVar('SSTATE_HASHEQUIV_METHOD')
276 self.unihashes = bb.persist_data.persist('SSTATESIG_UNIHASH_CACHE_v1_' + self.method.replace('.', '_'), data)
277
278 def get_taskdata(self):
279 return (self.server, self.method) + super().get_taskdata()
280
281 def set_taskdata(self, data):
282 self.server, self.method = data[:2]
283 super().set_taskdata(data[2:])
284
285 def __get_task_unihash_key(self, task):
286 # TODO: The key only *needs* to be the taskhash, the task is just
287 # convenient
288 return '%s:%s' % (task, self.taskhash[task])
289
290 def get_stampfile_hash(self, task):
291 if task in self.taskhash:
292 # If a unique hash is reported, use it as the stampfile hash. This
293 # ensures that if a task won't be re-run if the taskhash changes,
294 # but it would result in the same output hash
295 unihash = self.unihashes.get(self.__get_task_unihash_key(task))
296 if unihash is not None:
297 return unihash
298
299 return super().get_stampfile_hash(task)
300
301 def get_unihash(self, task):
302 import urllib
303 import json
304
305 taskhash = self.taskhash[task]
306
307 key = self.__get_task_unihash_key(task)
308
309 # TODO: This cache can grow unbounded. It probably only needs to keep
310 # for each task
311 unihash = self.unihashes.get(key)
312 if unihash is not None:
313 return unihash
314
315 # In the absence of being able to discover a unique hash from the
316 # server, make it be equivalent to the taskhash. The unique "hash" only
317 # really needs to be a unique string (not even necessarily a hash), but
318 # making it match the taskhash has a few advantages:
319 #
320 # 1) All of the sstate code that assumes hashes can be the same
321 # 2) It provides maximal compatibility with builders that don't use
322 # an equivalency server
323 # 3) The value is easy for multiple independent builders to derive the
324 # same unique hash from the same input. This means that if the
325 # independent builders find the same taskhash, but it isn't reported
326 # to the server, there is a better chance that they will agree on
327 # the unique hash.
328 unihash = taskhash
329
330 try:
331 url = '%s/v1/equivalent?%s' % (self.server,
332 urllib.parse.urlencode({'method': self.method, 'taskhash': self.taskhash[task]}))
333
334 request = urllib.request.Request(url)
335 response = urllib.request.urlopen(request)
336 data = response.read().decode('utf-8')
337
338 json_data = json.loads(data)
339
340 if json_data:
341 unihash = json_data['unihash']
342 # A unique hash equal to the taskhash is not very interesting,
343 # so it is reported it at debug level 2. If they differ, that
344 # is much more interesting, so it is reported at debug level 1
345 bb.debug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, task, self.server))
346 else:
347 bb.debug(2, 'No reported unihash for %s:%s from %s' % (task, taskhash, self.server))
348 except urllib.error.URLError as e:
349 bb.warn('Failure contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
350 except (KeyError, json.JSONDecodeError) as e:
351 bb.warn('Poorly formatted response from %s: %s' % (self.server, str(e)))
352
353 self.unihashes[key] = unihash
354 return unihash
355
356 def report_unihash(self, path, task, d):
357 import urllib
358 import json
359 import tempfile
360 import base64
361 import importlib
362
363 taskhash = d.getVar('BB_TASKHASH')
364 unihash = d.getVar('BB_UNIHASH')
365 report_taskdata = d.getVar('SSTATE_HASHEQUIV_REPORT_TASKDATA') == '1'
366 tempdir = d.getVar('T')
367 fn = d.getVar('BB_FILENAME')
368 key = fn + '.do_' + task + ':' + taskhash
369
370 # Sanity checks
371 cache_unihash = self.unihashes.get(key)
372 if cache_unihash is None:
373 bb.fatal('%s not in unihash cache. Please report this error' % key)
374
375 if cache_unihash != unihash:
376 bb.fatal("Cache unihash %s doesn't match BB_UNIHASH %s" % (cache_unihash, unihash))
377
378 sigfile = None
379 sigfile_name = "depsig.do_%s.%d" % (task, os.getpid())
380 sigfile_link = "depsig.do_%s" % task
381
382 try:
383 sigfile = open(os.path.join(tempdir, sigfile_name), 'w+b')
384
385 locs = {'path': path, 'sigfile': sigfile, 'task': task, 'd': d}
386
387 (module, method) = self.method.rsplit('.', 1)
388 locs['method'] = getattr(importlib.import_module(module), method)
389
390 outhash = bb.utils.better_eval('method(path, sigfile, task, d)', locs)
391
392 try:
393 url = '%s/v1/equivalent' % self.server
394 task_data = {
395 'taskhash': taskhash,
396 'method': self.method,
397 'outhash': outhash,
398 'unihash': unihash,
399 'owner': d.getVar('SSTATE_HASHEQUIV_OWNER')
400 }
401
402 if report_taskdata:
403 sigfile.seek(0)
404
405 task_data['PN'] = d.getVar('PN')
406 task_data['PV'] = d.getVar('PV')
407 task_data['PR'] = d.getVar('PR')
408 task_data['task'] = task
409 task_data['outhash_siginfo'] = sigfile.read().decode('utf-8')
410
411 headers = {'content-type': 'application/json'}
412
413 request = urllib.request.Request(url, json.dumps(task_data).encode('utf-8'), headers)
414 response = urllib.request.urlopen(request)
415 data = response.read().decode('utf-8')
416
417 json_data = json.loads(data)
418 new_unihash = json_data['unihash']
419
420 if new_unihash != unihash:
421 bb.debug(1, 'Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server))
422 else:
423 bb.debug(1, 'Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server))
424 except urllib.error.URLError as e:
425 bb.warn('Failure contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
426 except (KeyError, json.JSONDecodeError) as e:
427 bb.warn('Poorly formatted response from %s: %s' % (self.server, str(e)))
428 finally:
429 if sigfile:
430 sigfile.close()
431
432 sigfile_link_path = os.path.join(tempdir, sigfile_link)
433 bb.utils.remove(sigfile_link_path)
434
435 try:
436 os.symlink(sigfile_name, sigfile_link_path)
437 except OSError:
438 pass
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500439
440# Insert these classes into siggen's namespace so it can see and select them
441bb.siggen.SignatureGeneratorOEBasic = SignatureGeneratorOEBasic
442bb.siggen.SignatureGeneratorOEBasicHash = SignatureGeneratorOEBasicHash
Brad Bishop19323692019-04-05 15:28:33 -0400443bb.siggen.SignatureGeneratorOEEquivHash = SignatureGeneratorOEEquivHash
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500444
445
446def find_siginfo(pn, taskname, taskhashlist, d):
447 """ Find signature data files for comparison purposes """
448
449 import fnmatch
450 import glob
451
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500452 if not taskname:
453 # We have to derive pn and taskname
454 key = pn
455 splitit = key.split('.bb.')
456 taskname = splitit[1]
457 pn = os.path.basename(splitit[0]).split('_')[0]
458 if key.startswith('virtual:native:'):
459 pn = pn + '-native'
460
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500461 hashfiles = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500462 filedates = {}
463
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500464 def get_hashval(siginfo):
465 if siginfo.endswith('.siginfo'):
466 return siginfo.rpartition(':')[2].partition('_')[0]
467 else:
468 return siginfo.rpartition('.')[2]
469
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500470 # First search in stamps dir
471 localdata = d.createCopy()
472 localdata.setVar('MULTIMACH_TARGET_SYS', '*')
473 localdata.setVar('PN', pn)
474 localdata.setVar('PV', '*')
475 localdata.setVar('PR', '*')
476 localdata.setVar('EXTENDPE', '')
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500477 stamp = localdata.getVar('STAMP')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500478 if pn.startswith("gcc-source"):
479 # gcc-source shared workdir is a special case :(
480 stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}")
481
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500482 filespec = '%s.%s.sigdata.*' % (stamp, taskname)
483 foundall = False
484 import glob
485 for fullpath in glob.glob(filespec):
486 match = False
487 if taskhashlist:
488 for taskhash in taskhashlist:
489 if fullpath.endswith('.%s' % taskhash):
490 hashfiles[taskhash] = fullpath
491 if len(hashfiles) == len(taskhashlist):
492 foundall = True
493 break
494 else:
495 try:
496 filedates[fullpath] = os.stat(fullpath).st_mtime
497 except OSError:
498 continue
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500499 hashval = get_hashval(fullpath)
500 hashfiles[hashval] = fullpath
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500501
502 if not taskhashlist or (len(filedates) < 2 and not foundall):
503 # That didn't work, look in sstate-cache
Brad Bishop19323692019-04-05 15:28:33 -0400504 hashes = taskhashlist or ['?' * 64]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500505 localdata = bb.data.createCopy(d)
506 for hashval in hashes:
507 localdata.setVar('PACKAGE_ARCH', '*')
508 localdata.setVar('TARGET_VENDOR', '*')
509 localdata.setVar('TARGET_OS', '*')
510 localdata.setVar('PN', pn)
511 localdata.setVar('PV', '*')
512 localdata.setVar('PR', '*')
513 localdata.setVar('BB_TASKHASH', hashval)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500514 swspec = localdata.getVar('SSTATE_SWSPEC')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500515 if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec:
516 localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}')
517 elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn:
518 localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/")
519 sstatename = taskname[3:]
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500520 filespec = '%s_%s.*.siginfo' % (localdata.getVar('SSTATE_PKG'), sstatename)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500521
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500522 matchedfiles = glob.glob(filespec)
523 for fullpath in matchedfiles:
524 actual_hashval = get_hashval(fullpath)
525 if actual_hashval in hashfiles:
526 continue
527 hashfiles[hashval] = fullpath
528 if not taskhashlist:
529 try:
530 filedates[fullpath] = os.stat(fullpath).st_mtime
531 except:
532 continue
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500533
534 if taskhashlist:
535 return hashfiles
536 else:
537 return filedates
538
539bb.siggen.find_siginfo = find_siginfo
540
541
542def sstate_get_manifest_filename(task, d):
543 """
544 Return the sstate manifest file path for a particular task.
545 Also returns the datastore that can be used to query related variables.
546 """
547 d2 = d.createCopy()
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500548 extrainf = d.getVarFlag("do_" + task, 'stamp-extra-info')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500549 if extrainf:
550 d2.setVar("SSTATE_MANMACH", extrainf)
551 return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2)
Brad Bishop316dfdd2018-06-25 12:45:53 -0400552
553def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache):
554 d2 = d
555 variant = ''
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800556 curr_variant = ''
557 if d.getVar("BBEXTENDCURR") == "multilib":
558 curr_variant = d.getVar("BBEXTENDVARIANT")
559 if "virtclass-multilib" not in d.getVar("OVERRIDES"):
560 curr_variant = "invalid"
Brad Bishop316dfdd2018-06-25 12:45:53 -0400561 if taskdata2.startswith("virtual:multilib"):
562 variant = taskdata2.split(":")[2]
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800563 if curr_variant != variant:
Brad Bishop316dfdd2018-06-25 12:45:53 -0400564 if variant not in multilibcache:
565 multilibcache[variant] = oe.utils.get_multilib_datastore(variant, d)
566 d2 = multilibcache[variant]
567
568 if taskdata.endswith("-native"):
569 pkgarchs = ["${BUILD_ARCH}"]
570 elif taskdata.startswith("nativesdk-"):
571 pkgarchs = ["${SDK_ARCH}_${SDK_OS}", "allarch"]
572 elif "-cross-canadian" in taskdata:
573 pkgarchs = ["${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}"]
574 elif "-cross-" in taskdata:
575 pkgarchs = ["${BUILD_ARCH}_${TARGET_ARCH}"]
576 elif "-crosssdk" in taskdata:
577 pkgarchs = ["${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS}"]
578 else:
579 pkgarchs = ['${MACHINE_ARCH}']
580 pkgarchs = pkgarchs + list(reversed(d2.getVar("PACKAGE_EXTRA_ARCHS").split()))
581 pkgarchs.append('allarch')
582 pkgarchs.append('${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}')
583
584 for pkgarch in pkgarchs:
585 manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-%s-%s.%s" % (pkgarch, taskdata, taskname))
586 if os.path.exists(manifest):
587 return manifest, d2
588 bb.warn("Manifest %s not found in %s (variant '%s')?" % (manifest, d2.expand(" ".join(pkgarchs)), variant))
589 return None, d2
590
Brad Bishop19323692019-04-05 15:28:33 -0400591def OEOuthashBasic(path, sigfile, task, d):
592 """
593 Basic output hash function
594
595 Calculates the output hash of a task by hashing all output file metadata,
596 and file contents.
597 """
598 import hashlib
599 import stat
600 import pwd
601 import grp
602
603 def update_hash(s):
604 s = s.encode('utf-8')
605 h.update(s)
606 if sigfile:
607 sigfile.write(s)
608
609 h = hashlib.sha256()
610 prev_dir = os.getcwd()
611 include_owners = os.environ.get('PSEUDO_DISABLED') == '0'
612
613 try:
614 os.chdir(path)
615
616 update_hash("OEOuthashBasic\n")
617
618 # It is only currently useful to get equivalent hashes for things that
619 # can be restored from sstate. Since the sstate object is named using
620 # SSTATE_PKGSPEC and the task name, those should be included in the
621 # output hash calculation.
622 update_hash("SSTATE_PKGSPEC=%s\n" % d.getVar('SSTATE_PKGSPEC'))
623 update_hash("task=%s\n" % task)
624
625 for root, dirs, files in os.walk('.', topdown=True):
626 # Sort directories to ensure consistent ordering when recursing
627 dirs.sort()
628 files.sort()
629
630 def process(path):
631 s = os.lstat(path)
632
633 if stat.S_ISDIR(s.st_mode):
634 update_hash('d')
635 elif stat.S_ISCHR(s.st_mode):
636 update_hash('c')
637 elif stat.S_ISBLK(s.st_mode):
638 update_hash('b')
639 elif stat.S_ISSOCK(s.st_mode):
640 update_hash('s')
641 elif stat.S_ISLNK(s.st_mode):
642 update_hash('l')
643 elif stat.S_ISFIFO(s.st_mode):
644 update_hash('p')
645 else:
646 update_hash('-')
647
648 def add_perm(mask, on, off='-'):
649 if mask & s.st_mode:
650 update_hash(on)
651 else:
652 update_hash(off)
653
654 add_perm(stat.S_IRUSR, 'r')
655 add_perm(stat.S_IWUSR, 'w')
656 if stat.S_ISUID & s.st_mode:
657 add_perm(stat.S_IXUSR, 's', 'S')
658 else:
659 add_perm(stat.S_IXUSR, 'x')
660
661 add_perm(stat.S_IRGRP, 'r')
662 add_perm(stat.S_IWGRP, 'w')
663 if stat.S_ISGID & s.st_mode:
664 add_perm(stat.S_IXGRP, 's', 'S')
665 else:
666 add_perm(stat.S_IXGRP, 'x')
667
668 add_perm(stat.S_IROTH, 'r')
669 add_perm(stat.S_IWOTH, 'w')
670 if stat.S_ISVTX & s.st_mode:
671 update_hash('t')
672 else:
673 add_perm(stat.S_IXOTH, 'x')
674
675 if include_owners:
676 update_hash(" %10s" % pwd.getpwuid(s.st_uid).pw_name)
677 update_hash(" %10s" % grp.getgrgid(s.st_gid).gr_name)
678
679 update_hash(" ")
680 if stat.S_ISBLK(s.st_mode) or stat.S_ISCHR(s.st_mode):
681 update_hash("%9s" % ("%d.%d" % (os.major(s.st_rdev), os.minor(s.st_rdev))))
682 else:
683 update_hash(" " * 9)
684
685 update_hash(" ")
686 if stat.S_ISREG(s.st_mode):
687 update_hash("%10d" % s.st_size)
688 else:
689 update_hash(" " * 10)
690
691 update_hash(" ")
692 fh = hashlib.sha256()
693 if stat.S_ISREG(s.st_mode):
694 # Hash file contents
695 with open(path, 'rb') as d:
696 for chunk in iter(lambda: d.read(4096), b""):
697 fh.update(chunk)
698 update_hash(fh.hexdigest())
699 else:
700 update_hash(" " * len(fh.hexdigest()))
701
702 update_hash(" %s" % path)
703
704 if stat.S_ISLNK(s.st_mode):
705 update_hash(" -> %s" % os.readlink(path))
706
707 update_hash("\n")
708
709 # Process this directory and all its child files
710 process(root)
711 for f in files:
712 if f == 'fixmepath':
713 continue
714 process(os.path.join(root, f))
715 finally:
716 os.chdir(prev_dir)
717
718 return h.hexdigest()
719
Brad Bishop316dfdd2018-06-25 12:45:53 -0400720