Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1 | import bb.siggen |
Brad Bishop | 316dfdd | 2018-06-25 12:45:53 -0400 | [diff] [blame] | 2 | import oe |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 3 | |
| 4 | def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCache): |
| 5 | # Return True if we should keep the dependency, False to drop it |
| 6 | def isNative(x): |
| 7 | return x.endswith("-native") |
| 8 | def isCross(x): |
| 9 | return "-cross-" in x |
| 10 | def isNativeSDK(x): |
| 11 | return x.startswith("nativesdk-") |
| 12 | def isKernel(fn): |
| 13 | inherits = " ".join(dataCache.inherits[fn]) |
| 14 | return inherits.find("/module-base.bbclass") != -1 or inherits.find("/linux-kernel-base.bbclass") != -1 |
| 15 | def isPackageGroup(fn): |
| 16 | inherits = " ".join(dataCache.inherits[fn]) |
| 17 | return "/packagegroup.bbclass" in inherits |
| 18 | def isAllArch(fn): |
| 19 | inherits = " ".join(dataCache.inherits[fn]) |
| 20 | return "/allarch.bbclass" in inherits |
| 21 | def isImage(fn): |
| 22 | return "/image.bbclass" in " ".join(dataCache.inherits[fn]) |
| 23 | |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 24 | # (Almost) always include our own inter-task dependencies. |
| 25 | # The exception is the special do_kernel_configme->do_unpack_and_patch |
| 26 | # dependency from archiver.bbclass. |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 27 | if recipename == depname: |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 28 | if task == "do_kernel_configme" and dep.endswith(".do_unpack_and_patch"): |
| 29 | return False |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 30 | return True |
| 31 | |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 32 | # Exclude well defined recipe->dependency |
| 33 | if "%s->%s" % (recipename, depname) in siggen.saferecipedeps: |
| 34 | return False |
| 35 | |
Brad Bishop | 316dfdd | 2018-06-25 12:45:53 -0400 | [diff] [blame] | 36 | # Check for special wildcard |
| 37 | if "*->%s" % depname in siggen.saferecipedeps and recipename != depname: |
| 38 | return False |
| 39 | |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 40 | # Don't change native/cross/nativesdk recipe dependencies any further |
| 41 | if isNative(recipename) or isCross(recipename) or isNativeSDK(recipename): |
| 42 | return True |
| 43 | |
| 44 | # Only target packages beyond here |
| 45 | |
| 46 | # allarch packagegroups are assumed to have well behaved names which don't change between architecures/tunes |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 47 | if isPackageGroup(fn) and isAllArch(fn) and not isNative(depname): |
Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 48 | return False |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 49 | |
| 50 | # Exclude well defined machine specific configurations which don't change ABI |
| 51 | if depname in siggen.abisaferecipes and not isImage(fn): |
| 52 | return False |
| 53 | |
| 54 | # Kernel modules are well namespaced. We don't want to depend on the kernel's checksum |
| 55 | # if we're just doing an RRECOMMENDS_xxx = "kernel-module-*", not least because the checksum |
| 56 | # is machine specific. |
| 57 | # Therefore if we're not a kernel or a module recipe (inheriting the kernel classes) |
| 58 | # and we reccomend a kernel-module, we exclude the dependency. |
| 59 | depfn = dep.rsplit(".", 1)[0] |
| 60 | if dataCache and isKernel(depfn) and not isKernel(fn): |
| 61 | for pkg in dataCache.runrecs[fn]: |
| 62 | if " ".join(dataCache.runrecs[fn][pkg]).find("kernel-module-") != -1: |
| 63 | return False |
| 64 | |
| 65 | # Default to keep dependencies |
| 66 | return True |
| 67 | |
| 68 | def sstate_lockedsigs(d): |
| 69 | sigs = {} |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 70 | types = (d.getVar("SIGGEN_LOCKEDSIGS_TYPES") or "").split() |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 71 | for t in types: |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 72 | siggen_lockedsigs_var = "SIGGEN_LOCKEDSIGS_%s" % t |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 73 | lockedsigs = (d.getVar(siggen_lockedsigs_var) or "").split() |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 74 | for ls in lockedsigs: |
| 75 | pn, task, h = ls.split(":", 2) |
| 76 | if pn not in sigs: |
| 77 | sigs[pn] = {} |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 78 | sigs[pn][task] = [h, siggen_lockedsigs_var] |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 79 | return sigs |
| 80 | |
| 81 | class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic): |
| 82 | name = "OEBasic" |
| 83 | def init_rundepcheck(self, data): |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 84 | self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() |
| 85 | self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 86 | pass |
| 87 | def rundep_check(self, fn, recipename, task, dep, depname, dataCache = None): |
| 88 | return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache) |
| 89 | |
| 90 | class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash): |
| 91 | name = "OEBasicHash" |
| 92 | def init_rundepcheck(self, data): |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 93 | self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() |
| 94 | self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 95 | self.lockedsigs = sstate_lockedsigs(data) |
| 96 | self.lockedhashes = {} |
| 97 | self.lockedpnmap = {} |
| 98 | self.lockedhashfn = {} |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 99 | self.machine = data.getVar("MACHINE") |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 100 | self.mismatch_msgs = [] |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 101 | self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 102 | "").split() |
| 103 | self.unlockedrecipes = { k: "" for k in self.unlockedrecipes } |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 104 | pass |
Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame] | 105 | |
| 106 | def tasks_resolved(self, virtmap, virtpnmap, dataCache): |
| 107 | # Translate virtual/xxx entries to PN values |
| 108 | newabisafe = [] |
| 109 | for a in self.abisaferecipes: |
| 110 | if a in virtpnmap: |
| 111 | newabisafe.append(virtpnmap[a]) |
| 112 | else: |
| 113 | newabisafe.append(a) |
| 114 | self.abisaferecipes = newabisafe |
| 115 | newsafedeps = [] |
| 116 | for a in self.saferecipedeps: |
| 117 | a1, a2 = a.split("->") |
| 118 | if a1 in virtpnmap: |
| 119 | a1 = virtpnmap[a1] |
| 120 | if a2 in virtpnmap: |
| 121 | a2 = virtpnmap[a2] |
| 122 | newsafedeps.append(a1 + "->" + a2) |
| 123 | self.saferecipedeps = newsafedeps |
| 124 | |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 125 | def rundep_check(self, fn, recipename, task, dep, depname, dataCache = None): |
| 126 | return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache) |
| 127 | |
| 128 | def get_taskdata(self): |
| 129 | data = super(bb.siggen.SignatureGeneratorBasicHash, self).get_taskdata() |
| 130 | return (data, self.lockedpnmap, self.lockedhashfn) |
| 131 | |
| 132 | def set_taskdata(self, data): |
| 133 | coredata, self.lockedpnmap, self.lockedhashfn = data |
| 134 | super(bb.siggen.SignatureGeneratorBasicHash, self).set_taskdata(coredata) |
| 135 | |
| 136 | def dump_sigs(self, dataCache, options): |
Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 137 | sigfile = os.getcwd() + "/locked-sigs.inc" |
| 138 | bb.plain("Writing locked sigs to %s" % sigfile) |
| 139 | self.dump_lockedsigs(sigfile) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 140 | return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options) |
| 141 | |
| 142 | def get_taskhash(self, fn, task, deps, dataCache): |
| 143 | h = super(bb.siggen.SignatureGeneratorBasicHash, self).get_taskhash(fn, task, deps, dataCache) |
| 144 | |
| 145 | recipename = dataCache.pkg_fn[fn] |
| 146 | self.lockedpnmap[fn] = recipename |
| 147 | self.lockedhashfn[fn] = dataCache.hashfn[fn] |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 148 | |
| 149 | unlocked = False |
| 150 | if recipename in self.unlockedrecipes: |
| 151 | unlocked = True |
| 152 | else: |
Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 153 | def get_mc(tid): |
| 154 | tid = tid.rsplit('.', 1)[0] |
| 155 | if tid.startswith('multiconfig:'): |
| 156 | elems = tid.split(':') |
| 157 | return elems[1] |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 158 | def recipename_from_dep(dep): |
| 159 | # The dep entry will look something like |
| 160 | # /path/path/recipename.bb.task, virtual:native:/p/foo.bb.task, |
| 161 | # ... |
Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 162 | |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 163 | fn = dep.rsplit('.', 1)[0] |
| 164 | return dataCache.pkg_fn[fn] |
| 165 | |
Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 166 | mc = get_mc(fn) |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 167 | # If any unlocked recipe is in the direct dependencies then the |
| 168 | # current recipe should be unlocked as well. |
Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 169 | depnames = [ recipename_from_dep(x) for x in deps if mc == get_mc(x)] |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 170 | if any(x in y for y in depnames for x in self.unlockedrecipes): |
| 171 | self.unlockedrecipes[recipename] = '' |
| 172 | unlocked = True |
| 173 | |
| 174 | if not unlocked and recipename in self.lockedsigs: |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 175 | if task in self.lockedsigs[recipename]: |
| 176 | k = fn + "." + task |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 177 | h_locked = self.lockedsigs[recipename][task][0] |
| 178 | var = self.lockedsigs[recipename][task][1] |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 179 | self.lockedhashes[k] = h_locked |
| 180 | self.taskhash[k] = h_locked |
| 181 | #bb.warn("Using %s %s %s" % (recipename, task, h)) |
| 182 | |
| 183 | if h != h_locked: |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 184 | self.mismatch_msgs.append('The %s:%s sig is computed to be %s, but the sig is locked to %s in %s' |
| 185 | % (recipename, task, h, h_locked, var)) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 186 | |
| 187 | return h_locked |
| 188 | #bb.warn("%s %s %s" % (recipename, task, h)) |
| 189 | return h |
| 190 | |
| 191 | def dump_sigtask(self, fn, task, stampbase, runtime): |
| 192 | k = fn + "." + task |
| 193 | if k in self.lockedhashes: |
| 194 | return |
| 195 | super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigtask(fn, task, stampbase, runtime) |
| 196 | |
Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 197 | def dump_lockedsigs(self, sigfile, taskfilter=None): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 198 | types = {} |
| 199 | for k in self.runtaskdeps: |
| 200 | if taskfilter: |
| 201 | if not k in taskfilter: |
| 202 | continue |
| 203 | fn = k.rsplit(".",1)[0] |
| 204 | t = self.lockedhashfn[fn].split(" ")[1].split(":")[5] |
| 205 | t = 't-' + t.replace('_', '-') |
| 206 | if t not in types: |
| 207 | types[t] = [] |
| 208 | types[t].append(k) |
| 209 | |
| 210 | with open(sigfile, "w") as f: |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 211 | l = sorted(types) |
| 212 | for t in l: |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 213 | f.write('SIGGEN_LOCKEDSIGS_%s = "\\\n' % t) |
| 214 | types[t].sort() |
| 215 | sortedk = sorted(types[t], key=lambda k: self.lockedpnmap[k.rsplit(".",1)[0]]) |
| 216 | for k in sortedk: |
| 217 | fn = k.rsplit(".",1)[0] |
| 218 | task = k.rsplit(".",1)[1] |
| 219 | if k not in self.taskhash: |
| 220 | continue |
| 221 | f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.taskhash[k] + " \\\n") |
| 222 | f.write(' "\n') |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 223 | f.write('SIGGEN_LOCKEDSIGS_TYPES_%s = "%s"' % (self.machine, " ".join(l))) |
| 224 | |
| 225 | def dump_siglist(self, sigfile): |
| 226 | with open(sigfile, "w") as f: |
| 227 | tasks = [] |
| 228 | for taskitem in self.taskhash: |
| 229 | (fn, task) = taskitem.rsplit(".", 1) |
| 230 | pn = self.lockedpnmap[fn] |
| 231 | tasks.append((pn, task, fn, self.taskhash[taskitem])) |
| 232 | for (pn, task, fn, taskhash) in sorted(tasks): |
| 233 | f.write('%s.%s %s %s\n' % (pn, task, fn, taskhash)) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 234 | |
| 235 | def checkhashes(self, missed, ret, sq_fn, sq_task, sq_hash, sq_hashfn, d): |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 236 | warn_msgs = [] |
| 237 | error_msgs = [] |
| 238 | sstate_missing_msgs = [] |
| 239 | |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 240 | for task in range(len(sq_fn)): |
| 241 | if task not in ret: |
| 242 | for pn in self.lockedsigs: |
Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 243 | if sq_hash[task] in iter(self.lockedsigs[pn].values()): |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 244 | if sq_task[task] == 'do_shared_workdir': |
| 245 | continue |
| 246 | sstate_missing_msgs.append("Locked sig is set for %s:%s (%s) yet not in sstate cache?" |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 247 | % (pn, sq_task[task], sq_hash[task])) |
| 248 | |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 249 | checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK") |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 250 | if checklevel == 'warn': |
| 251 | warn_msgs += self.mismatch_msgs |
| 252 | elif checklevel == 'error': |
| 253 | error_msgs += self.mismatch_msgs |
| 254 | |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 255 | checklevel = d.getVar("SIGGEN_LOCKEDSIGS_SSTATE_EXISTS_CHECK") |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 256 | if checklevel == 'warn': |
| 257 | warn_msgs += sstate_missing_msgs |
| 258 | elif checklevel == 'error': |
| 259 | error_msgs += sstate_missing_msgs |
| 260 | |
| 261 | if warn_msgs: |
| 262 | bb.warn("\n".join(warn_msgs)) |
| 263 | if error_msgs: |
| 264 | bb.fatal("\n".join(error_msgs)) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 265 | |
Brad Bishop | 1932369 | 2019-04-05 15:28:33 -0400 | [diff] [blame] | 266 | class SignatureGeneratorOEEquivHash(SignatureGeneratorOEBasicHash): |
| 267 | name = "OEEquivHash" |
| 268 | |
| 269 | def init_rundepcheck(self, data): |
| 270 | super().init_rundepcheck(data) |
| 271 | self.server = data.getVar('SSTATE_HASHEQUIV_SERVER') |
| 272 | self.method = data.getVar('SSTATE_HASHEQUIV_METHOD') |
| 273 | self.unihashes = bb.persist_data.persist('SSTATESIG_UNIHASH_CACHE_v1_' + self.method.replace('.', '_'), data) |
| 274 | |
| 275 | def get_taskdata(self): |
| 276 | return (self.server, self.method) + super().get_taskdata() |
| 277 | |
| 278 | def set_taskdata(self, data): |
| 279 | self.server, self.method = data[:2] |
| 280 | super().set_taskdata(data[2:]) |
| 281 | |
| 282 | def __get_task_unihash_key(self, task): |
| 283 | # TODO: The key only *needs* to be the taskhash, the task is just |
| 284 | # convenient |
| 285 | return '%s:%s' % (task, self.taskhash[task]) |
| 286 | |
| 287 | def get_stampfile_hash(self, task): |
| 288 | if task in self.taskhash: |
| 289 | # If a unique hash is reported, use it as the stampfile hash. This |
| 290 | # ensures that if a task won't be re-run if the taskhash changes, |
| 291 | # but it would result in the same output hash |
| 292 | unihash = self.unihashes.get(self.__get_task_unihash_key(task)) |
| 293 | if unihash is not None: |
| 294 | return unihash |
| 295 | |
| 296 | return super().get_stampfile_hash(task) |
| 297 | |
| 298 | def get_unihash(self, task): |
| 299 | import urllib |
| 300 | import json |
| 301 | |
| 302 | taskhash = self.taskhash[task] |
| 303 | |
| 304 | key = self.__get_task_unihash_key(task) |
| 305 | |
| 306 | # TODO: This cache can grow unbounded. It probably only needs to keep |
| 307 | # for each task |
| 308 | unihash = self.unihashes.get(key) |
| 309 | if unihash is not None: |
| 310 | return unihash |
| 311 | |
| 312 | # In the absence of being able to discover a unique hash from the |
| 313 | # server, make it be equivalent to the taskhash. The unique "hash" only |
| 314 | # really needs to be a unique string (not even necessarily a hash), but |
| 315 | # making it match the taskhash has a few advantages: |
| 316 | # |
| 317 | # 1) All of the sstate code that assumes hashes can be the same |
| 318 | # 2) It provides maximal compatibility with builders that don't use |
| 319 | # an equivalency server |
| 320 | # 3) The value is easy for multiple independent builders to derive the |
| 321 | # same unique hash from the same input. This means that if the |
| 322 | # independent builders find the same taskhash, but it isn't reported |
| 323 | # to the server, there is a better chance that they will agree on |
| 324 | # the unique hash. |
| 325 | unihash = taskhash |
| 326 | |
| 327 | try: |
| 328 | url = '%s/v1/equivalent?%s' % (self.server, |
| 329 | urllib.parse.urlencode({'method': self.method, 'taskhash': self.taskhash[task]})) |
| 330 | |
| 331 | request = urllib.request.Request(url) |
| 332 | response = urllib.request.urlopen(request) |
| 333 | data = response.read().decode('utf-8') |
| 334 | |
| 335 | json_data = json.loads(data) |
| 336 | |
| 337 | if json_data: |
| 338 | unihash = json_data['unihash'] |
| 339 | # A unique hash equal to the taskhash is not very interesting, |
| 340 | # so it is reported it at debug level 2. If they differ, that |
| 341 | # is much more interesting, so it is reported at debug level 1 |
| 342 | bb.debug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, task, self.server)) |
| 343 | else: |
| 344 | bb.debug(2, 'No reported unihash for %s:%s from %s' % (task, taskhash, self.server)) |
| 345 | except urllib.error.URLError as e: |
| 346 | bb.warn('Failure contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) |
| 347 | except (KeyError, json.JSONDecodeError) as e: |
| 348 | bb.warn('Poorly formatted response from %s: %s' % (self.server, str(e))) |
| 349 | |
| 350 | self.unihashes[key] = unihash |
| 351 | return unihash |
| 352 | |
| 353 | def report_unihash(self, path, task, d): |
| 354 | import urllib |
| 355 | import json |
| 356 | import tempfile |
| 357 | import base64 |
| 358 | import importlib |
| 359 | |
| 360 | taskhash = d.getVar('BB_TASKHASH') |
| 361 | unihash = d.getVar('BB_UNIHASH') |
| 362 | report_taskdata = d.getVar('SSTATE_HASHEQUIV_REPORT_TASKDATA') == '1' |
| 363 | tempdir = d.getVar('T') |
| 364 | fn = d.getVar('BB_FILENAME') |
| 365 | key = fn + '.do_' + task + ':' + taskhash |
| 366 | |
| 367 | # Sanity checks |
| 368 | cache_unihash = self.unihashes.get(key) |
| 369 | if cache_unihash is None: |
| 370 | bb.fatal('%s not in unihash cache. Please report this error' % key) |
| 371 | |
| 372 | if cache_unihash != unihash: |
| 373 | bb.fatal("Cache unihash %s doesn't match BB_UNIHASH %s" % (cache_unihash, unihash)) |
| 374 | |
| 375 | sigfile = None |
| 376 | sigfile_name = "depsig.do_%s.%d" % (task, os.getpid()) |
| 377 | sigfile_link = "depsig.do_%s" % task |
| 378 | |
| 379 | try: |
| 380 | sigfile = open(os.path.join(tempdir, sigfile_name), 'w+b') |
| 381 | |
| 382 | locs = {'path': path, 'sigfile': sigfile, 'task': task, 'd': d} |
| 383 | |
| 384 | (module, method) = self.method.rsplit('.', 1) |
| 385 | locs['method'] = getattr(importlib.import_module(module), method) |
| 386 | |
| 387 | outhash = bb.utils.better_eval('method(path, sigfile, task, d)', locs) |
| 388 | |
| 389 | try: |
| 390 | url = '%s/v1/equivalent' % self.server |
| 391 | task_data = { |
| 392 | 'taskhash': taskhash, |
| 393 | 'method': self.method, |
| 394 | 'outhash': outhash, |
| 395 | 'unihash': unihash, |
| 396 | 'owner': d.getVar('SSTATE_HASHEQUIV_OWNER') |
| 397 | } |
| 398 | |
| 399 | if report_taskdata: |
| 400 | sigfile.seek(0) |
| 401 | |
| 402 | task_data['PN'] = d.getVar('PN') |
| 403 | task_data['PV'] = d.getVar('PV') |
| 404 | task_data['PR'] = d.getVar('PR') |
| 405 | task_data['task'] = task |
| 406 | task_data['outhash_siginfo'] = sigfile.read().decode('utf-8') |
| 407 | |
| 408 | headers = {'content-type': 'application/json'} |
| 409 | |
| 410 | request = urllib.request.Request(url, json.dumps(task_data).encode('utf-8'), headers) |
| 411 | response = urllib.request.urlopen(request) |
| 412 | data = response.read().decode('utf-8') |
| 413 | |
| 414 | json_data = json.loads(data) |
| 415 | new_unihash = json_data['unihash'] |
| 416 | |
| 417 | if new_unihash != unihash: |
| 418 | bb.debug(1, 'Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server)) |
| 419 | else: |
| 420 | bb.debug(1, 'Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server)) |
| 421 | except urllib.error.URLError as e: |
| 422 | bb.warn('Failure contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) |
| 423 | except (KeyError, json.JSONDecodeError) as e: |
| 424 | bb.warn('Poorly formatted response from %s: %s' % (self.server, str(e))) |
| 425 | finally: |
| 426 | if sigfile: |
| 427 | sigfile.close() |
| 428 | |
| 429 | sigfile_link_path = os.path.join(tempdir, sigfile_link) |
| 430 | bb.utils.remove(sigfile_link_path) |
| 431 | |
| 432 | try: |
| 433 | os.symlink(sigfile_name, sigfile_link_path) |
| 434 | except OSError: |
| 435 | pass |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 436 | |
| 437 | # Insert these classes into siggen's namespace so it can see and select them |
| 438 | bb.siggen.SignatureGeneratorOEBasic = SignatureGeneratorOEBasic |
| 439 | bb.siggen.SignatureGeneratorOEBasicHash = SignatureGeneratorOEBasicHash |
Brad Bishop | 1932369 | 2019-04-05 15:28:33 -0400 | [diff] [blame] | 440 | bb.siggen.SignatureGeneratorOEEquivHash = SignatureGeneratorOEEquivHash |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 441 | |
| 442 | |
| 443 | def find_siginfo(pn, taskname, taskhashlist, d): |
| 444 | """ Find signature data files for comparison purposes """ |
| 445 | |
| 446 | import fnmatch |
| 447 | import glob |
| 448 | |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 449 | if not taskname: |
| 450 | # We have to derive pn and taskname |
| 451 | key = pn |
| 452 | splitit = key.split('.bb.') |
| 453 | taskname = splitit[1] |
| 454 | pn = os.path.basename(splitit[0]).split('_')[0] |
| 455 | if key.startswith('virtual:native:'): |
| 456 | pn = pn + '-native' |
| 457 | |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 458 | hashfiles = {} |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 459 | filedates = {} |
| 460 | |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 461 | def get_hashval(siginfo): |
| 462 | if siginfo.endswith('.siginfo'): |
| 463 | return siginfo.rpartition(':')[2].partition('_')[0] |
| 464 | else: |
| 465 | return siginfo.rpartition('.')[2] |
| 466 | |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 467 | # First search in stamps dir |
| 468 | localdata = d.createCopy() |
| 469 | localdata.setVar('MULTIMACH_TARGET_SYS', '*') |
| 470 | localdata.setVar('PN', pn) |
| 471 | localdata.setVar('PV', '*') |
| 472 | localdata.setVar('PR', '*') |
| 473 | localdata.setVar('EXTENDPE', '') |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 474 | stamp = localdata.getVar('STAMP') |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 475 | if pn.startswith("gcc-source"): |
| 476 | # gcc-source shared workdir is a special case :( |
| 477 | stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}") |
| 478 | |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 479 | filespec = '%s.%s.sigdata.*' % (stamp, taskname) |
| 480 | foundall = False |
| 481 | import glob |
| 482 | for fullpath in glob.glob(filespec): |
| 483 | match = False |
| 484 | if taskhashlist: |
| 485 | for taskhash in taskhashlist: |
| 486 | if fullpath.endswith('.%s' % taskhash): |
| 487 | hashfiles[taskhash] = fullpath |
| 488 | if len(hashfiles) == len(taskhashlist): |
| 489 | foundall = True |
| 490 | break |
| 491 | else: |
| 492 | try: |
| 493 | filedates[fullpath] = os.stat(fullpath).st_mtime |
| 494 | except OSError: |
| 495 | continue |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 496 | hashval = get_hashval(fullpath) |
| 497 | hashfiles[hashval] = fullpath |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 498 | |
| 499 | if not taskhashlist or (len(filedates) < 2 and not foundall): |
| 500 | # That didn't work, look in sstate-cache |
Brad Bishop | 1932369 | 2019-04-05 15:28:33 -0400 | [diff] [blame] | 501 | hashes = taskhashlist or ['?' * 64] |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 502 | localdata = bb.data.createCopy(d) |
| 503 | for hashval in hashes: |
| 504 | localdata.setVar('PACKAGE_ARCH', '*') |
| 505 | localdata.setVar('TARGET_VENDOR', '*') |
| 506 | localdata.setVar('TARGET_OS', '*') |
| 507 | localdata.setVar('PN', pn) |
| 508 | localdata.setVar('PV', '*') |
| 509 | localdata.setVar('PR', '*') |
| 510 | localdata.setVar('BB_TASKHASH', hashval) |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 511 | swspec = localdata.getVar('SSTATE_SWSPEC') |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 512 | if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec: |
| 513 | localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}') |
| 514 | elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn: |
| 515 | localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/") |
| 516 | sstatename = taskname[3:] |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 517 | filespec = '%s_%s.*.siginfo' % (localdata.getVar('SSTATE_PKG'), sstatename) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 518 | |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 519 | matchedfiles = glob.glob(filespec) |
| 520 | for fullpath in matchedfiles: |
| 521 | actual_hashval = get_hashval(fullpath) |
| 522 | if actual_hashval in hashfiles: |
| 523 | continue |
| 524 | hashfiles[hashval] = fullpath |
| 525 | if not taskhashlist: |
| 526 | try: |
| 527 | filedates[fullpath] = os.stat(fullpath).st_mtime |
| 528 | except: |
| 529 | continue |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 530 | |
| 531 | if taskhashlist: |
| 532 | return hashfiles |
| 533 | else: |
| 534 | return filedates |
| 535 | |
| 536 | bb.siggen.find_siginfo = find_siginfo |
| 537 | |
| 538 | |
| 539 | def sstate_get_manifest_filename(task, d): |
| 540 | """ |
| 541 | Return the sstate manifest file path for a particular task. |
| 542 | Also returns the datastore that can be used to query related variables. |
| 543 | """ |
| 544 | d2 = d.createCopy() |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 545 | extrainf = d.getVarFlag("do_" + task, 'stamp-extra-info') |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 546 | if extrainf: |
| 547 | d2.setVar("SSTATE_MANMACH", extrainf) |
| 548 | return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2) |
Brad Bishop | 316dfdd | 2018-06-25 12:45:53 -0400 | [diff] [blame] | 549 | |
| 550 | def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache): |
| 551 | d2 = d |
| 552 | variant = '' |
Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 553 | curr_variant = '' |
| 554 | if d.getVar("BBEXTENDCURR") == "multilib": |
| 555 | curr_variant = d.getVar("BBEXTENDVARIANT") |
| 556 | if "virtclass-multilib" not in d.getVar("OVERRIDES"): |
| 557 | curr_variant = "invalid" |
Brad Bishop | 316dfdd | 2018-06-25 12:45:53 -0400 | [diff] [blame] | 558 | if taskdata2.startswith("virtual:multilib"): |
| 559 | variant = taskdata2.split(":")[2] |
Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 560 | if curr_variant != variant: |
Brad Bishop | 316dfdd | 2018-06-25 12:45:53 -0400 | [diff] [blame] | 561 | if variant not in multilibcache: |
| 562 | multilibcache[variant] = oe.utils.get_multilib_datastore(variant, d) |
| 563 | d2 = multilibcache[variant] |
| 564 | |
| 565 | if taskdata.endswith("-native"): |
| 566 | pkgarchs = ["${BUILD_ARCH}"] |
| 567 | elif taskdata.startswith("nativesdk-"): |
| 568 | pkgarchs = ["${SDK_ARCH}_${SDK_OS}", "allarch"] |
| 569 | elif "-cross-canadian" in taskdata: |
| 570 | pkgarchs = ["${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}"] |
| 571 | elif "-cross-" in taskdata: |
| 572 | pkgarchs = ["${BUILD_ARCH}_${TARGET_ARCH}"] |
| 573 | elif "-crosssdk" in taskdata: |
| 574 | pkgarchs = ["${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS}"] |
| 575 | else: |
| 576 | pkgarchs = ['${MACHINE_ARCH}'] |
| 577 | pkgarchs = pkgarchs + list(reversed(d2.getVar("PACKAGE_EXTRA_ARCHS").split())) |
| 578 | pkgarchs.append('allarch') |
| 579 | pkgarchs.append('${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}') |
| 580 | |
| 581 | for pkgarch in pkgarchs: |
| 582 | manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-%s-%s.%s" % (pkgarch, taskdata, taskname)) |
| 583 | if os.path.exists(manifest): |
| 584 | return manifest, d2 |
| 585 | bb.warn("Manifest %s not found in %s (variant '%s')?" % (manifest, d2.expand(" ".join(pkgarchs)), variant)) |
| 586 | return None, d2 |
| 587 | |
Brad Bishop | 1932369 | 2019-04-05 15:28:33 -0400 | [diff] [blame] | 588 | def OEOuthashBasic(path, sigfile, task, d): |
| 589 | """ |
| 590 | Basic output hash function |
| 591 | |
| 592 | Calculates the output hash of a task by hashing all output file metadata, |
| 593 | and file contents. |
| 594 | """ |
| 595 | import hashlib |
| 596 | import stat |
| 597 | import pwd |
| 598 | import grp |
| 599 | |
| 600 | def update_hash(s): |
| 601 | s = s.encode('utf-8') |
| 602 | h.update(s) |
| 603 | if sigfile: |
| 604 | sigfile.write(s) |
| 605 | |
| 606 | h = hashlib.sha256() |
| 607 | prev_dir = os.getcwd() |
| 608 | include_owners = os.environ.get('PSEUDO_DISABLED') == '0' |
| 609 | |
| 610 | try: |
| 611 | os.chdir(path) |
| 612 | |
| 613 | update_hash("OEOuthashBasic\n") |
| 614 | |
| 615 | # It is only currently useful to get equivalent hashes for things that |
| 616 | # can be restored from sstate. Since the sstate object is named using |
| 617 | # SSTATE_PKGSPEC and the task name, those should be included in the |
| 618 | # output hash calculation. |
| 619 | update_hash("SSTATE_PKGSPEC=%s\n" % d.getVar('SSTATE_PKGSPEC')) |
| 620 | update_hash("task=%s\n" % task) |
| 621 | |
| 622 | for root, dirs, files in os.walk('.', topdown=True): |
| 623 | # Sort directories to ensure consistent ordering when recursing |
| 624 | dirs.sort() |
| 625 | files.sort() |
| 626 | |
| 627 | def process(path): |
| 628 | s = os.lstat(path) |
| 629 | |
| 630 | if stat.S_ISDIR(s.st_mode): |
| 631 | update_hash('d') |
| 632 | elif stat.S_ISCHR(s.st_mode): |
| 633 | update_hash('c') |
| 634 | elif stat.S_ISBLK(s.st_mode): |
| 635 | update_hash('b') |
| 636 | elif stat.S_ISSOCK(s.st_mode): |
| 637 | update_hash('s') |
| 638 | elif stat.S_ISLNK(s.st_mode): |
| 639 | update_hash('l') |
| 640 | elif stat.S_ISFIFO(s.st_mode): |
| 641 | update_hash('p') |
| 642 | else: |
| 643 | update_hash('-') |
| 644 | |
| 645 | def add_perm(mask, on, off='-'): |
| 646 | if mask & s.st_mode: |
| 647 | update_hash(on) |
| 648 | else: |
| 649 | update_hash(off) |
| 650 | |
| 651 | add_perm(stat.S_IRUSR, 'r') |
| 652 | add_perm(stat.S_IWUSR, 'w') |
| 653 | if stat.S_ISUID & s.st_mode: |
| 654 | add_perm(stat.S_IXUSR, 's', 'S') |
| 655 | else: |
| 656 | add_perm(stat.S_IXUSR, 'x') |
| 657 | |
| 658 | add_perm(stat.S_IRGRP, 'r') |
| 659 | add_perm(stat.S_IWGRP, 'w') |
| 660 | if stat.S_ISGID & s.st_mode: |
| 661 | add_perm(stat.S_IXGRP, 's', 'S') |
| 662 | else: |
| 663 | add_perm(stat.S_IXGRP, 'x') |
| 664 | |
| 665 | add_perm(stat.S_IROTH, 'r') |
| 666 | add_perm(stat.S_IWOTH, 'w') |
| 667 | if stat.S_ISVTX & s.st_mode: |
| 668 | update_hash('t') |
| 669 | else: |
| 670 | add_perm(stat.S_IXOTH, 'x') |
| 671 | |
| 672 | if include_owners: |
| 673 | update_hash(" %10s" % pwd.getpwuid(s.st_uid).pw_name) |
| 674 | update_hash(" %10s" % grp.getgrgid(s.st_gid).gr_name) |
| 675 | |
| 676 | update_hash(" ") |
| 677 | if stat.S_ISBLK(s.st_mode) or stat.S_ISCHR(s.st_mode): |
| 678 | update_hash("%9s" % ("%d.%d" % (os.major(s.st_rdev), os.minor(s.st_rdev)))) |
| 679 | else: |
| 680 | update_hash(" " * 9) |
| 681 | |
| 682 | update_hash(" ") |
| 683 | if stat.S_ISREG(s.st_mode): |
| 684 | update_hash("%10d" % s.st_size) |
| 685 | else: |
| 686 | update_hash(" " * 10) |
| 687 | |
| 688 | update_hash(" ") |
| 689 | fh = hashlib.sha256() |
| 690 | if stat.S_ISREG(s.st_mode): |
| 691 | # Hash file contents |
| 692 | with open(path, 'rb') as d: |
| 693 | for chunk in iter(lambda: d.read(4096), b""): |
| 694 | fh.update(chunk) |
| 695 | update_hash(fh.hexdigest()) |
| 696 | else: |
| 697 | update_hash(" " * len(fh.hexdigest())) |
| 698 | |
| 699 | update_hash(" %s" % path) |
| 700 | |
| 701 | if stat.S_ISLNK(s.st_mode): |
| 702 | update_hash(" -> %s" % os.readlink(path)) |
| 703 | |
| 704 | update_hash("\n") |
| 705 | |
| 706 | # Process this directory and all its child files |
| 707 | process(root) |
| 708 | for f in files: |
| 709 | if f == 'fixmepath': |
| 710 | continue |
| 711 | process(os.path.join(root, f)) |
| 712 | finally: |
| 713 | os.chdir(prev_dir) |
| 714 | |
| 715 | return h.hexdigest() |
| 716 | |
Brad Bishop | 316dfdd | 2018-06-25 12:45:53 -0400 | [diff] [blame] | 717 | |