blob: a65fcc6c1db104905bc40148fa3882b3c81e1513 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001BB_DEFAULT_TASK ?= "build"
2CLASSOVERRIDE ?= "class-target"
3
4inherit patch
5inherit staging
6
7inherit mirrors
8inherit utils
9inherit utility-tasks
10inherit metadata_scm
11inherit logging
12
Brad Bishop15ae2502019-06-18 21:44:24 -040013OE_EXTRA_IMPORTS ?= ""
14
Andrew Geisslereff27472021-10-29 15:35:00 -050015OE_IMPORTS += "os sys time oe.path oe.utils oe.types oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath oe.license oe.qa oe.reproducible ${OE_EXTRA_IMPORTS}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050016OE_IMPORTS[type] = "list"
17
Brad Bishopf3fd2882019-06-21 08:06:37 -040018PACKAGECONFIG_CONFARGS ??= ""
19
Patrick Williamsc124f4f2015-09-15 14:41:29 -050020def oe_import(d):
21 import sys
22
Brad Bishop6e60e8b2018-02-01 10:27:11 -050023 bbpath = d.getVar("BBPATH").split(":")
Patrick Williamsc124f4f2015-09-15 14:41:29 -050024 sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath]
25
26 def inject(name, value):
27 """Make a python object accessible from the metadata"""
28 if hasattr(bb.utils, "_context"):
29 bb.utils._context[name] = value
30 else:
31 __builtins__[name] = value
32
33 import oe.data
34 for toimport in oe.data.typed_value("OE_IMPORTS", d):
Brad Bishop00e122a2019-10-05 11:10:57 -040035 try:
36 imported = __import__(toimport)
37 inject(toimport.split(".", 1)[0], imported)
38 except AttributeError as e:
39 bb.error("Error importing OE modules: %s" % str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050040 return ""
41
42# We need the oe module name space early (before INHERITs get added)
43OE_IMPORTED := "${@oe_import(d)}"
44
45def lsb_distro_identifier(d):
Brad Bishop6e60e8b2018-02-01 10:27:11 -050046 adjust = d.getVar('LSB_DISTRO_ADJUST')
Patrick Williamsc124f4f2015-09-15 14:41:29 -050047 adjust_func = None
48 if adjust:
49 try:
50 adjust_func = globals()[adjust]
51 except KeyError:
52 pass
53 return oe.lsb.distro_identifier(adjust_func)
54
55die() {
56 bbfatal_log "$*"
57}
58
59oe_runmake_call() {
60 bbnote ${MAKE} ${EXTRA_OEMAKE} "$@"
61 ${MAKE} ${EXTRA_OEMAKE} "$@"
62}
63
64oe_runmake() {
65 oe_runmake_call "$@" || die "oe_runmake failed"
66}
67
68
Patrick Williams213cb262021-08-07 19:21:33 -050069def get_base_dep(d):
Brad Bishopd7bf8c12018-02-25 22:55:05 -050070 if d.getVar('INHIBIT_DEFAULT_DEPS', False):
71 return ""
72 return "${BASE_DEFAULT_DEPS}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050073
Brad Bishopd7bf8c12018-02-25 22:55:05 -050074BASE_DEFAULT_DEPS = "virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050075
Brad Bishopd7bf8c12018-02-25 22:55:05 -050076BASEDEPENDS = ""
Patrick Williams213cb262021-08-07 19:21:33 -050077BASEDEPENDS:class-target = "${@get_base_dep(d)}"
78BASEDEPENDS:class-nativesdk = "${@get_base_dep(d)}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050079
Patrick Williams213cb262021-08-07 19:21:33 -050080DEPENDS:prepend="${BASEDEPENDS} "
Patrick Williamsc124f4f2015-09-15 14:41:29 -050081
82FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}"
83# THISDIR only works properly with imediate expansion as it has to run
84# in the context of the location its used (:=)
Brad Bishop6e60e8b2018-02-01 10:27:11 -050085THISDIR = "${@os.path.dirname(d.getVar('FILE'))}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050086
87def extra_path_elements(d):
88 path = ""
Brad Bishop6e60e8b2018-02-01 10:27:11 -050089 elements = (d.getVar('EXTRANATIVEPATH') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -050090 for e in elements:
91 path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":"
92 return path
93
Patrick Williams213cb262021-08-07 19:21:33 -050094PATH:prepend = "${@extra_path_elements(d)}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050095
96def get_lic_checksum_file_list(d):
97 filelist = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -050098 lic_files = d.getVar("LIC_FILES_CHKSUM") or ''
99 tmpdir = d.getVar("TMPDIR")
100 s = d.getVar("S")
101 b = d.getVar("B")
102 workdir = d.getVar("WORKDIR")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500103
104 urls = lic_files.split()
105 for url in urls:
106 # We only care about items that are absolute paths since
107 # any others should be covered by SRC_URI.
108 try:
Brad Bishop220d5532018-08-14 00:59:39 +0100109 (method, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
110 if method != "file" or not path:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600111 raise bb.fetch.MalformedUrl(url)
112
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500113 if path[0] == '/':
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500114 if path.startswith((tmpdir, s, b, workdir)):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500115 continue
116 filelist.append(path + ":" + str(os.path.exists(path)))
117 except bb.fetch.MalformedUrl:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500118 bb.fatal(d.getVar('PN') + ": LIC_FILES_CHKSUM contains an invalid URL: " + url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500119 return " ".join(filelist)
120
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500121def setup_hosttools_dir(dest, toolsvar, d, fatal=True):
122 tools = d.getVar(toolsvar).split()
123 origbbenv = d.getVar("BB_ORIGENV", False)
124 path = origbbenv.getVar("PATH")
125 bb.utils.mkdirhier(dest)
126 notfound = []
127 for tool in tools:
128 desttool = os.path.join(dest, tool)
129 if not os.path.exists(desttool):
Andrew Geissler82c905d2020-04-13 13:39:40 -0500130 # clean up dead symlink
131 if os.path.islink(desttool):
132 os.unlink(desttool)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500133 srctool = bb.utils.which(path, tool, executable=True)
Brad Bishop19323692019-04-05 15:28:33 -0400134 # gcc/g++ may link to ccache on some hosts, e.g.,
135 # /usr/local/bin/ccache/gcc -> /usr/bin/ccache, then which(gcc)
136 # would return /usr/local/bin/ccache/gcc, but what we need is
137 # /usr/bin/gcc, this code can check and fix that.
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500138 if "ccache" in srctool:
139 srctool = bb.utils.which(path, tool, executable=True, direction=1)
140 if srctool:
141 os.symlink(srctool, desttool)
142 else:
143 notfound.append(tool)
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800144
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500145 if notfound and fatal:
146 bb.fatal("The following required tools (as specified by HOSTTOOLS) appear to be unavailable in PATH, please install them in order to proceed:\n %s" % " ".join(notfound))
147
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500148addtask fetch
149do_fetch[dirs] = "${DL_DIR}"
150do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
151do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
152do_fetch[vardeps] += "SRCREV"
153python base_do_fetch() {
154
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500155 src_uri = (d.getVar('SRC_URI') or "").split()
Andrew Geisslereff27472021-10-29 15:35:00 -0500156 if not src_uri:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500157 return
158
159 try:
160 fetcher = bb.fetch2.Fetch(src_uri, d)
161 fetcher.download()
162 except bb.fetch2.BBFetchException as e:
Andrew Geisslereff27472021-10-29 15:35:00 -0500163 bb.fatal("Bitbake Fetcher Error: " + repr(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500164}
165
166addtask unpack after do_fetch
167do_unpack[dirs] = "${WORKDIR}"
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600168
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800169do_unpack[cleandirs] = "${@d.getVar('S') if os.path.normpath(d.getVar('S')) != os.path.normpath(d.getVar('WORKDIR')) else os.path.join('${S}', 'patches')}"
Brad Bishop316dfdd2018-06-25 12:45:53 -0400170
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500171python base_do_unpack() {
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500172 src_uri = (d.getVar('SRC_URI') or "").split()
Andrew Geisslereff27472021-10-29 15:35:00 -0500173 if not src_uri:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500174 return
175
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500176 try:
177 fetcher = bb.fetch2.Fetch(src_uri, d)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500178 fetcher.unpack(d.getVar('WORKDIR'))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500179 except bb.fetch2.BBFetchException as e:
Andrew Geisslereff27472021-10-29 15:35:00 -0500180 bb.fatal("Bitbake Fetcher Error: " + repr(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500181}
182
Andrew Geisslereff27472021-10-29 15:35:00 -0500183SSTATETASKS += "do_deploy_source_date_epoch"
184
185do_deploy_source_date_epoch () {
186 mkdir -p ${SDE_DEPLOYDIR}
187 if [ -e ${SDE_FILE} ]; then
188 echo "Deploying SDE from ${SDE_FILE} -> ${SDE_DEPLOYDIR}."
189 cp -p ${SDE_FILE} ${SDE_DEPLOYDIR}/__source_date_epoch.txt
190 else
191 echo "${SDE_FILE} not found!"
192 fi
193}
194
195python do_deploy_source_date_epoch_setscene () {
196 sstate_setscene(d)
197 bb.utils.mkdirhier(d.getVar('SDE_DIR'))
198 sde_file = os.path.join(d.getVar('SDE_DEPLOYDIR'), '__source_date_epoch.txt')
199 if os.path.exists(sde_file):
200 target = d.getVar('SDE_FILE')
201 bb.debug(1, "Moving setscene SDE file %s -> %s" % (sde_file, target))
202 bb.utils.rename(sde_file, target)
203 else:
204 bb.debug(1, "%s not found!" % sde_file)
205}
206
207do_deploy_source_date_epoch[dirs] = "${SDE_DEPLOYDIR}"
208do_deploy_source_date_epoch[sstate-plaindirs] = "${SDE_DEPLOYDIR}"
209addtask do_deploy_source_date_epoch_setscene
210addtask do_deploy_source_date_epoch before do_configure after do_patch
211
212python create_source_date_epoch_stamp() {
213 source_date_epoch = oe.reproducible.get_source_date_epoch(d, d.getVar('S'))
214 oe.reproducible.epochfile_write(source_date_epoch, d.getVar('SDE_FILE'), d)
215}
216do_unpack[postfuncs] += "create_source_date_epoch_stamp"
217
218def get_source_date_epoch_value(d):
219 return oe.reproducible.epochfile_read(d.getVar('SDE_FILE'), d)
220
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500221def get_layers_branch_rev(d):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500222 layers = (d.getVar("BBLAYERS") or "").split()
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500223 layers_branch_rev = ["%-20s = \"%s:%s\"" % (os.path.basename(i), \
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500224 base_get_metadata_git_branch(i, None).strip(), \
225 base_get_metadata_git_revision(i, None)) \
226 for i in layers]
227 i = len(layers_branch_rev)-1
228 p1 = layers_branch_rev[i].find("=")
229 s1 = layers_branch_rev[i][p1:]
230 while i > 0:
231 p2 = layers_branch_rev[i-1].find("=")
232 s2= layers_branch_rev[i-1][p2:]
233 if s1 == s2:
234 layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2]
235 i -= 1
236 else:
237 i -= 1
238 p1 = layers_branch_rev[i].find("=")
239 s1= layers_branch_rev[i][p1:]
240 return layers_branch_rev
241
242
243BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars"
244BUILDCFG_FUNCS[type] = "list"
245
246def buildcfg_vars(d):
247 statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
248 for var in statusvars:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500249 value = d.getVar(var)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500250 if value is not None:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500251 yield '%-20s = "%s"' % (var, value)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500252
253def buildcfg_neededvars(d):
254 needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
255 pesteruser = []
256 for v in needed_vars:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500257 val = d.getVar(v)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500258 if not val or val == 'INVALID':
259 pesteruser.append(v)
260
261 if pesteruser:
262 bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
263
264addhandler base_eventhandler
Brad Bishop19323692019-04-05 15:28:33 -0400265base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.MultiConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.event.RecipeParsed"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500266python base_eventhandler() {
267 import bb.runqueue
268
269 if isinstance(e, bb.event.ConfigParsed):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400270 if not d.getVar("NATIVELSBSTRING", False):
271 d.setVar("NATIVELSBSTRING", lsb_distro_identifier(d))
Andrew Geissler6ce62a22020-11-30 19:58:47 -0600272 d.setVar("ORIGNATIVELSBSTRING", d.getVar("NATIVELSBSTRING", False))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400273 d.setVar('BB_VERSION', bb.__version__)
Brad Bishop19323692019-04-05 15:28:33 -0400274
275 # There might be no bb.event.ConfigParsed event if bitbake server is
276 # running, so check bb.event.BuildStarted too to make sure ${HOSTTOOLS_DIR}
277 # exists.
278 if isinstance(e, bb.event.ConfigParsed) or \
279 (isinstance(e, bb.event.BuildStarted) and not os.path.exists(d.getVar('HOSTTOOLS_DIR'))):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500280 # Works with the line in layer.conf which changes PATH to point here
281 setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS', d)
282 setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS_NONFATAL', d, fatal=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500283
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500284 if isinstance(e, bb.event.MultiConfigParsed):
285 # We need to expand SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS in each of the multiconfig data stores
286 # own contexts so the variables get expanded correctly for that arch, then inject back into
287 # the main data store.
288 deps = []
289 for config in e.mcdata:
290 deps.append(e.mcdata[config].getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS"))
291 deps = " ".join(deps)
292 e.mcdata[''].setVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", deps)
293
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500294 if isinstance(e, bb.event.BuildStarted):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400295 localdata = bb.data.createCopy(d)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500296 statuslines = []
297 for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata):
298 g = globals()
299 if func not in g:
300 bb.warn("Build configuration function '%s' does not exist" % func)
301 else:
302 flines = g[func](localdata)
303 if flines:
304 statuslines.extend(flines)
305
Brad Bishop316dfdd2018-06-25 12:45:53 -0400306 statusheader = d.getVar('BUILDCFG_HEADER')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500307 if statusheader:
308 bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500309
310 # This code is to silence warnings where the SDK variables overwrite the
311 # target ones and we'd see dulpicate key names overwriting each other
312 # for various PREFERRED_PROVIDERS
313 if isinstance(e, bb.event.RecipePreFinalise):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400314 if d.getVar("TARGET_PREFIX") == d.getVar("SDK_PREFIX"):
315 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400316 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc")
317 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}g++")
318 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}compilerlibs")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500319
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500320 if isinstance(e, bb.event.RecipeParsed):
321 #
322 # If we have multiple providers of virtual/X and a PREFERRED_PROVIDER_virtual/X is set
323 # skip parsing for all the other providers which will mean they get uninstalled from the
324 # sysroot since they're now "unreachable". This makes switching virtual/kernel work in
325 # particular.
326 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500327 pn = d.getVar('PN')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500328 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
329 if not source_mirror_fetch:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500330 provs = (d.getVar("PROVIDES") or "").split()
331 multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST") or "").split()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500332 for p in provs:
333 if p.startswith("virtual/") and p not in multiwhitelist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500334 profprov = d.getVar("PREFERRED_PROVIDER_" + p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500335 if profprov and pn != profprov:
Brad Bishop316dfdd2018-06-25 12:45:53 -0400336 raise bb.parse.SkipRecipe("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500337}
338
339CONFIGURESTAMPFILE = "${WORKDIR}/configure.sstate"
340CLEANBROKEN = "0"
341
342addtask configure after do_patch
343do_configure[dirs] = "${B}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500344base_do_configure() {
345 if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then
346 if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then
347 cd ${B}
348 if [ "${CLEANBROKEN}" != "1" -a \( -e Makefile -o -e makefile -o -e GNUmakefile \) ]; then
349 oe_runmake clean
350 fi
Brad Bishopc4ea0752018-11-15 14:30:15 -0800351 # -ignore_readdir_race does not work correctly with -delete;
352 # use xargs to avoid spurious build failures
353 find ${B} -ignore_readdir_race -name \*.la -type f -print0 | xargs -0 rm -f
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500354 fi
355 fi
356 if [ -n "${CONFIGURESTAMPFILE}" ]; then
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500357 mkdir -p `dirname ${CONFIGURESTAMPFILE}`
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500358 echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE}
359 fi
360}
361
362addtask compile after do_configure
363do_compile[dirs] = "${B}"
364base_do_compile() {
365 if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then
366 oe_runmake || die "make failed"
367 else
368 bbnote "nothing to compile"
369 fi
370}
371
372addtask install after do_compile
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600373do_install[dirs] = "${B}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500374# Remove and re-create ${D} so that is it guaranteed to be empty
375do_install[cleandirs] = "${D}"
376
377base_do_install() {
378 :
379}
380
381base_do_package() {
382 :
383}
384
385addtask build after do_populate_sysroot
386do_build[noexec] = "1"
387do_build[recrdeptask] += "do_deploy"
388do_build () {
389 :
390}
391
392def set_packagetriplet(d):
393 archs = []
394 tos = []
395 tvs = []
396
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500397 archs.append(d.getVar("PACKAGE_ARCHS").split())
398 tos.append(d.getVar("TARGET_OS"))
399 tvs.append(d.getVar("TARGET_VENDOR"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500400
401 def settriplet(d, varname, archs, tos, tvs):
402 triplets = []
403 for i in range(len(archs)):
404 for arch in archs[i]:
405 triplets.append(arch + tvs[i] + "-" + tos[i])
406 triplets.reverse()
407 d.setVar(varname, " ".join(triplets))
408
409 settriplet(d, "PKGTRIPLETS", archs, tos, tvs)
410
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500411 variants = d.getVar("MULTILIB_VARIANTS") or ""
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500412 for item in variants.split():
413 localdata = bb.data.createCopy(d)
414 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
415 localdata.setVar("OVERRIDES", overrides)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500416
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500417 archs.append(localdata.getVar("PACKAGE_ARCHS").split())
418 tos.append(localdata.getVar("TARGET_OS"))
419 tvs.append(localdata.getVar("TARGET_VENDOR"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500420
421 settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs)
422
423python () {
424 import string, re
425
Brad Bishop316dfdd2018-06-25 12:45:53 -0400426 # Handle backfilling
427 oe.utils.features_backfill("DISTRO_FEATURES", d)
428 oe.utils.features_backfill("MACHINE_FEATURES", d)
429
Andrew Geisslerf0343792020-11-18 10:42:21 -0600430 if d.getVar("S")[-1] == '/':
431 bb.warn("Recipe %s sets S variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("S")))
432 if d.getVar("B")[-1] == '/':
433 bb.warn("Recipe %s sets B variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("B")))
434
435 if os.path.normpath(d.getVar("WORKDIR")) != os.path.normpath(d.getVar("S")):
436 d.appendVar("PSEUDO_IGNORE_PATHS", ",${S}")
437 if os.path.normpath(d.getVar("WORKDIR")) != os.path.normpath(d.getVar("B")):
438 d.appendVar("PSEUDO_IGNORE_PATHS", ",${B}")
439
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500440 # Handle PACKAGECONFIG
441 #
442 # These take the form:
443 #
444 # PACKAGECONFIG ??= "<default options>"
Andrew Geissler82c905d2020-04-13 13:39:40 -0500445 # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends,foo_runtime_recommends,foo_conflict_packageconfig"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500446 pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
447 if pkgconfigflags:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500448 pkgconfig = (d.getVar('PACKAGECONFIG') or "").split()
449 pn = d.getVar("PN")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500450
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500451 mlprefix = d.getVar("MLPREFIX")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500452
453 def expandFilter(appends, extension, prefix):
454 appends = bb.utils.explode_deps(d.expand(" ".join(appends)))
455 newappends = []
456 for a in appends:
457 if a.endswith("-native") or ("-cross-" in a):
458 newappends.append(a)
459 elif a.startswith("virtual/"):
460 subs = a.split("/", 1)[1]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500461 if subs.startswith(prefix):
462 newappends.append(a + extension)
463 else:
464 newappends.append("virtual/" + prefix + subs + extension)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500465 else:
466 if a.startswith(prefix):
467 newappends.append(a + extension)
468 else:
469 newappends.append(prefix + a + extension)
470 return newappends
471
472 def appendVar(varname, appends):
473 if not appends:
474 return
475 if varname.find("DEPENDS") != -1:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500476 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d) :
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500477 appends = expandFilter(appends, "", "nativesdk-")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500478 elif bb.data.inherits_class('native', d):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500479 appends = expandFilter(appends, "-native", "")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500480 elif mlprefix:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500481 appends = expandFilter(appends, "", mlprefix)
482 varname = d.expand(varname)
483 d.appendVar(varname, " " + " ".join(appends))
484
485 extradeps = []
486 extrardeps = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500487 extrarrecs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500488 extraconf = []
489 for flag, flagval in sorted(pkgconfigflags.items()):
490 items = flagval.split(",")
491 num = len(items)
Andrew Geissler82c905d2020-04-13 13:39:40 -0500492 if num > 6:
493 bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend,rrecommend,conflict_packageconfig can be specified!"
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500494 % (d.getVar('PN'), flag))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500495
496 if flag in pkgconfig:
497 if num >= 3 and items[2]:
498 extradeps.append(items[2])
499 if num >= 4 and items[3]:
500 extrardeps.append(items[3])
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500501 if num >= 5 and items[4]:
502 extrarrecs.append(items[4])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500503 if num >= 1 and items[0]:
504 extraconf.append(items[0])
505 elif num >= 2 and items[1]:
506 extraconf.append(items[1])
Andrew Geissler82c905d2020-04-13 13:39:40 -0500507
508 if num >= 6 and items[5]:
509 conflicts = set(items[5].split())
510 invalid = conflicts.difference(set(pkgconfigflags.keys()))
511 if invalid:
512 bb.error("%s: PACKAGECONFIG[%s] Invalid conflict package config%s '%s' specified."
513 % (d.getVar('PN'), flag, 's' if len(invalid) > 1 else '', ' '.join(invalid)))
514
515 if flag in pkgconfig:
516 intersec = conflicts.intersection(set(pkgconfig))
517 if intersec:
518 bb.fatal("%s: PACKAGECONFIG[%s] Conflict package config%s '%s' set in PACKAGECONFIG."
519 % (d.getVar('PN'), flag, 's' if len(intersec) > 1 else '', ' '.join(intersec)))
520
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500521 appendVar('DEPENDS', extradeps)
Patrick Williams213cb262021-08-07 19:21:33 -0500522 appendVar('RDEPENDS:${PN}', extrardeps)
523 appendVar('RRECOMMENDS:${PN}', extrarrecs)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500524 appendVar('PACKAGECONFIG_CONFARGS', extraconf)
525
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500526 pn = d.getVar('PN')
527 license = d.getVar('LICENSE')
Brad Bishop316dfdd2018-06-25 12:45:53 -0400528 if license == "INVALID" and pn != "defaultpkgname":
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500529 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
530
531 if bb.data.inherits_class('license', d):
532 check_license_format(d)
Brad Bishop19323692019-04-05 15:28:33 -0400533 unmatched_license_flags = check_license_flags(d)
534 if unmatched_license_flags:
535 if len(unmatched_license_flags) == 1:
536 message = "because it has a restricted license '{0}'. Which is not whitelisted in LICENSE_FLAGS_WHITELIST".format(unmatched_license_flags[0])
537 else:
538 message = "because it has restricted licenses {0}. Which are not whitelisted in LICENSE_FLAGS_WHITELIST".format(
539 ", ".join("'{0}'".format(f) for f in unmatched_license_flags))
540 bb.debug(1, "Skipping %s %s" % (pn, message))
541 raise bb.parse.SkipRecipe(message)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500542
543 # If we're building a target package we need to use fakeroot (pseudo)
544 # in order to capture permissions, owners, groups and special files
545 if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d):
Brad Bishop64c979e2019-11-04 13:55:29 -0500546 d.appendVarFlag('do_prepare_recipe_sysroot', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500547 d.appendVarFlag('do_install', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500548 d.setVarFlag('do_install', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500549 d.appendVarFlag('do_package', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500550 d.setVarFlag('do_package', 'fakeroot', '1')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500551 d.setVarFlag('do_package_setscene', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500552 d.appendVarFlag('do_package_setscene', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500553 d.setVarFlag('do_devshell', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500554 d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500555
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500556 need_machine = d.getVar('COMPATIBLE_MACHINE')
Andrew Geissler82c905d2020-04-13 13:39:40 -0500557 if need_machine and not d.getVar('PARSE_ALL_RECIPES', False):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500558 import re
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500559 compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500560 for m in compat_machines:
561 if re.match(need_machine, m):
562 break
563 else:
Brad Bishop316dfdd2018-06-25 12:45:53 -0400564 raise bb.parse.SkipRecipe("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE'))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500565
Andrew Geissler82c905d2020-04-13 13:39:40 -0500566 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) or d.getVar('PARSE_ALL_RECIPES', False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500567 if not source_mirror_fetch:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500568 need_host = d.getVar('COMPATIBLE_HOST')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500569 if need_host:
570 import re
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500571 this_host = d.getVar('HOST_SYS')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500572 if not re.match(need_host, this_host):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400573 raise bb.parse.SkipRecipe("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500574
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500575 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500576
577 check_license = False if pn.startswith("nativesdk-") else True
578 for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}",
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600579 "-crosssdk-${SDK_SYS}", "-crosssdk-initial-${SDK_SYS}",
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500580 "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]:
581 if pn.endswith(d.expand(t)):
582 check_license = False
583 if pn.startswith("gcc-source-"):
584 check_license = False
585
586 if check_license and bad_licenses:
587 bad_licenses = expand_wildcard_licenses(d, bad_licenses)
588
589 whitelist = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500590 for lic in bad_licenses:
591 spdx_license = return_spdx(d, lic)
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800592 whitelist.extend((d.getVar("WHITELIST_" + lic) or "").split())
593 if spdx_license:
594 whitelist.extend((d.getVar("WHITELIST_" + spdx_license) or "").split())
Andrew Geissler82c905d2020-04-13 13:39:40 -0500595
596 if pn in whitelist:
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800597 '''
598 We need to track what we are whitelisting and why. If pn is
599 incompatible we need to be able to note that the image that
600 is created may infact contain incompatible licenses despite
601 INCOMPATIBLE_LICENSE being set.
602 '''
Andrew Geissler82c905d2020-04-13 13:39:40 -0500603 bb.note("Including %s as buildable despite it having an incompatible license because it has been whitelisted" % pn)
604 else:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500605 pkgs = d.getVar('PACKAGES').split()
Andrew Geissler82c905d2020-04-13 13:39:40 -0500606 skipped_pkgs = {}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500607 unskipped_pkgs = []
608 for pkg in pkgs:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500609 incompatible_lic = incompatible_license(d, bad_licenses, pkg)
610 if incompatible_lic:
611 skipped_pkgs[pkg] = incompatible_lic
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500612 else:
613 unskipped_pkgs.append(pkg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500614 if unskipped_pkgs:
615 for pkg in skipped_pkgs:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500616 bb.debug(1, "Skipping the package %s at do_rootfs because of incompatible license(s): %s" % (pkg, ' '.join(skipped_pkgs[pkg])))
Andrew Geissler1e34c2d2020-05-29 16:02:59 -0500617 d.setVar('LICENSE_EXCLUSION-' + pkg, ' '.join(skipped_pkgs[pkg]))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500618 for pkg in unskipped_pkgs:
Andrew Geissler82c905d2020-04-13 13:39:40 -0500619 bb.debug(1, "Including the package %s" % pkg)
620 else:
621 incompatible_lic = incompatible_license(d, bad_licenses)
622 for pkg in skipped_pkgs:
623 incompatible_lic += skipped_pkgs[pkg]
624 incompatible_lic = sorted(list(set(incompatible_lic)))
625
626 if incompatible_lic:
627 bb.debug(1, "Skipping recipe %s because of incompatible license(s): %s" % (pn, ' '.join(incompatible_lic)))
628 raise bb.parse.SkipRecipe("it has incompatible license(s): %s" % ' '.join(incompatible_lic))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500629
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500630 needsrcrev = False
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500631 srcuri = d.getVar('SRC_URI')
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600632 for uri_string in srcuri.split():
633 uri = bb.fetch.URI(uri_string)
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500634 # Also check downloadfilename as the URL path might not be useful for sniffing
635 path = uri.params.get("downloadfilename", uri.path)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500636
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500637 # HTTP/FTP use the wget fetcher
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600638 if uri.scheme in ("http", "https", "ftp"):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500639 d.appendVarFlag('do_fetch', 'depends', ' wget-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500640
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500641 # Svn packages should DEPEND on subversion-native
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600642 if uri.scheme == "svn":
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500643 needsrcrev = True
644 d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500645
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500646 # Git packages should DEPEND on git-native
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600647 elif uri.scheme in ("git", "gitsm"):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500648 needsrcrev = True
649 d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500650
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500651 # Mercurial packages should DEPEND on mercurial-native
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600652 elif uri.scheme == "hg":
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500653 needsrcrev = True
Andrew Geissler82c905d2020-04-13 13:39:40 -0500654 d.appendVar("EXTRANATIVEPATH", ' python3-native ')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500655 d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500656
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600657 # Perforce packages support SRCREV = "${AUTOREV}"
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600658 elif uri.scheme == "p4":
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600659 needsrcrev = True
660
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500661 # OSC packages should DEPEND on osc-native
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600662 elif uri.scheme == "osc":
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500663 d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500664
Andrew Geisslerd1e89492021-02-12 15:35:20 -0600665 elif uri.scheme == "npm":
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500666 d.appendVarFlag('do_fetch', 'depends', ' nodejs-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500667
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500668 # *.lz4 should DEPEND on lz4-native for unpacking
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500669 if path.endswith('.lz4'):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500670 d.appendVarFlag('do_unpack', 'depends', ' lz4-native:do_populate_sysroot')
671
Andrew Geisslerd159c7f2021-09-02 21:05:58 -0500672 # *.zst should DEPEND on zstd-native for unpacking
673 elif path.endswith('.zst'):
674 d.appendVarFlag('do_unpack', 'depends', ' zstd-native:do_populate_sysroot')
675
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500676 # *.lz should DEPEND on lzip-native for unpacking
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500677 elif path.endswith('.lz'):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500678 d.appendVarFlag('do_unpack', 'depends', ' lzip-native:do_populate_sysroot')
679
680 # *.xz should DEPEND on xz-native for unpacking
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500681 elif path.endswith('.xz') or path.endswith('.txz'):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500682 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
683
684 # .zip should DEPEND on unzip-native for unpacking
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500685 elif path.endswith('.zip') or path.endswith('.jar'):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500686 d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot')
687
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800688 # Some rpm files may be compressed internally using xz (for example, rpms from Fedora)
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500689 elif path.endswith('.rpm'):
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500690 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500691
Brad Bishop316dfdd2018-06-25 12:45:53 -0400692 # *.deb should DEPEND on xz-native for unpacking
Andrew Geissler95ac1b82021-03-31 14:34:31 -0500693 elif path.endswith('.deb'):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400694 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
695
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500696 if needsrcrev:
697 d.setVar("SRCPV", "${@bb.fetch2.get_srcrev(d)}")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500698
Brad Bishop15ae2502019-06-18 21:44:24 -0400699 # Gather all named SRCREVs to add to the sstate hash calculation
700 # This anonymous python snippet is called multiple times so we
701 # need to be careful to not double up the appends here and cause
702 # the base hash to mismatch the task hash
703 for uri in srcuri.split():
704 parm = bb.fetch.decodeurl(uri)[5]
705 uri_names = parm.get("name", "").split(",")
706 for uri_name in filter(None, uri_names):
707 srcrev_name = "SRCREV_{}".format(uri_name)
708 if srcrev_name not in (d.getVarFlag("do_fetch", "vardeps") or "").split():
709 d.appendVarFlag("do_fetch", "vardeps", " {}".format(srcrev_name))
710
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500711 set_packagetriplet(d)
712
713 # 'multimachine' handling
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500714 mach_arch = d.getVar('MACHINE_ARCH')
715 pkg_arch = d.getVar('PACKAGE_ARCH')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500716
717 if (pkg_arch == mach_arch):
718 # Already machine specific - nothing further to do
719 return
720
721 #
722 # We always try to scan SRC_URI for urls with machine overrides
723 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
724 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500725 override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500726 if override != '0':
727 paths = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500728 fpaths = (d.getVar('FILESPATH') or '').split(':')
729 machine = d.getVar('MACHINE')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500730 for p in fpaths:
731 if os.path.basename(p) == machine and os.path.isdir(p):
732 paths.append(p)
733
Andrew Geisslereff27472021-10-29 15:35:00 -0500734 if paths:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500735 for s in srcuri.split():
736 if not s.startswith("file://"):
737 continue
738 fetcher = bb.fetch2.Fetch([s], d)
739 local = fetcher.localpath(s)
740 for mp in paths:
741 if local.startswith(mp):
742 #bb.note("overriding PACKAGE_ARCH from %s to %s for %s" % (pkg_arch, mach_arch, pn))
743 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
744 return
745
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500746 packages = d.getVar('PACKAGES').split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500747 for pkg in packages:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500748 pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500749
750 # We could look for != PACKAGE_ARCH here but how to choose
751 # if multiple differences are present?
752 # Look through PACKAGE_ARCHS for the priority order?
753 if pkgarch and pkgarch == mach_arch:
754 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500755 bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500756}
757
758addtask cleansstate after do_clean
759python do_cleansstate() {
760 sstate_clean_cachefiles(d)
761}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500762addtask cleanall after do_cleansstate
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500763do_cleansstate[nostamp] = "1"
764
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500765python do_cleanall() {
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500766 src_uri = (d.getVar('SRC_URI') or "").split()
Andrew Geisslereff27472021-10-29 15:35:00 -0500767 if not src_uri:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500768 return
769
770 try:
771 fetcher = bb.fetch2.Fetch(src_uri, d)
772 fetcher.clean()
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600773 except bb.fetch2.BBFetchException as e:
774 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500775}
776do_cleanall[nostamp] = "1"
777
778
779EXPORT_FUNCTIONS do_fetch do_unpack do_configure do_compile do_install do_package