blob: d3184ecf7bb2381070cb5899d4f69be7aff3686f [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001BB_DEFAULT_TASK ?= "build"
2CLASSOVERRIDE ?= "class-target"
3
4inherit patch
5inherit staging
6
7inherit mirrors
8inherit utils
9inherit utility-tasks
10inherit metadata_scm
11inherit logging
12
Brad Bishop15ae2502019-06-18 21:44:24 -040013OE_EXTRA_IMPORTS ?= ""
14
15OE_IMPORTS += "os sys time oe.path oe.utils oe.types oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath oe.license ${OE_EXTRA_IMPORTS}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050016OE_IMPORTS[type] = "list"
17
Brad Bishopf3fd2882019-06-21 08:06:37 -040018PACKAGECONFIG_CONFARGS ??= ""
19
Patrick Williamsc124f4f2015-09-15 14:41:29 -050020def oe_import(d):
21 import sys
22
Brad Bishop6e60e8b2018-02-01 10:27:11 -050023 bbpath = d.getVar("BBPATH").split(":")
Patrick Williamsc124f4f2015-09-15 14:41:29 -050024 sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath]
25
26 def inject(name, value):
27 """Make a python object accessible from the metadata"""
28 if hasattr(bb.utils, "_context"):
29 bb.utils._context[name] = value
30 else:
31 __builtins__[name] = value
32
33 import oe.data
34 for toimport in oe.data.typed_value("OE_IMPORTS", d):
Brad Bishop00e122a2019-10-05 11:10:57 -040035 try:
36 imported = __import__(toimport)
37 inject(toimport.split(".", 1)[0], imported)
38 except AttributeError as e:
39 bb.error("Error importing OE modules: %s" % str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -050040 return ""
41
42# We need the oe module name space early (before INHERITs get added)
43OE_IMPORTED := "${@oe_import(d)}"
44
45def lsb_distro_identifier(d):
Brad Bishop6e60e8b2018-02-01 10:27:11 -050046 adjust = d.getVar('LSB_DISTRO_ADJUST')
Patrick Williamsc124f4f2015-09-15 14:41:29 -050047 adjust_func = None
48 if adjust:
49 try:
50 adjust_func = globals()[adjust]
51 except KeyError:
52 pass
53 return oe.lsb.distro_identifier(adjust_func)
54
55die() {
56 bbfatal_log "$*"
57}
58
59oe_runmake_call() {
60 bbnote ${MAKE} ${EXTRA_OEMAKE} "$@"
61 ${MAKE} ${EXTRA_OEMAKE} "$@"
62}
63
64oe_runmake() {
65 oe_runmake_call "$@" || die "oe_runmake failed"
66}
67
68
69def base_dep_prepend(d):
Brad Bishopd7bf8c12018-02-25 22:55:05 -050070 if d.getVar('INHIBIT_DEFAULT_DEPS', False):
71 return ""
72 return "${BASE_DEFAULT_DEPS}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050073
Brad Bishopd7bf8c12018-02-25 22:55:05 -050074BASE_DEFAULT_DEPS = "virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050075
Brad Bishopd7bf8c12018-02-25 22:55:05 -050076BASEDEPENDS = ""
77BASEDEPENDS_class-target = "${@base_dep_prepend(d)}"
78BASEDEPENDS_class-nativesdk = "${@base_dep_prepend(d)}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050079
80DEPENDS_prepend="${BASEDEPENDS} "
81
82FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}"
83# THISDIR only works properly with imediate expansion as it has to run
84# in the context of the location its used (:=)
Brad Bishop6e60e8b2018-02-01 10:27:11 -050085THISDIR = "${@os.path.dirname(d.getVar('FILE'))}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050086
87def extra_path_elements(d):
88 path = ""
Brad Bishop6e60e8b2018-02-01 10:27:11 -050089 elements = (d.getVar('EXTRANATIVEPATH') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -050090 for e in elements:
91 path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":"
92 return path
93
94PATH_prepend = "${@extra_path_elements(d)}"
95
96def get_lic_checksum_file_list(d):
97 filelist = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -050098 lic_files = d.getVar("LIC_FILES_CHKSUM") or ''
99 tmpdir = d.getVar("TMPDIR")
100 s = d.getVar("S")
101 b = d.getVar("B")
102 workdir = d.getVar("WORKDIR")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500103
104 urls = lic_files.split()
105 for url in urls:
106 # We only care about items that are absolute paths since
107 # any others should be covered by SRC_URI.
108 try:
Brad Bishop220d5532018-08-14 00:59:39 +0100109 (method, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
110 if method != "file" or not path:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600111 raise bb.fetch.MalformedUrl(url)
112
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500113 if path[0] == '/':
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500114 if path.startswith((tmpdir, s, b, workdir)):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500115 continue
116 filelist.append(path + ":" + str(os.path.exists(path)))
117 except bb.fetch.MalformedUrl:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500118 bb.fatal(d.getVar('PN') + ": LIC_FILES_CHKSUM contains an invalid URL: " + url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500119 return " ".join(filelist)
120
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500121def setup_hosttools_dir(dest, toolsvar, d, fatal=True):
122 tools = d.getVar(toolsvar).split()
123 origbbenv = d.getVar("BB_ORIGENV", False)
124 path = origbbenv.getVar("PATH")
125 bb.utils.mkdirhier(dest)
126 notfound = []
127 for tool in tools:
128 desttool = os.path.join(dest, tool)
129 if not os.path.exists(desttool):
130 srctool = bb.utils.which(path, tool, executable=True)
Brad Bishop19323692019-04-05 15:28:33 -0400131 # gcc/g++ may link to ccache on some hosts, e.g.,
132 # /usr/local/bin/ccache/gcc -> /usr/bin/ccache, then which(gcc)
133 # would return /usr/local/bin/ccache/gcc, but what we need is
134 # /usr/bin/gcc, this code can check and fix that.
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500135 if "ccache" in srctool:
136 srctool = bb.utils.which(path, tool, executable=True, direction=1)
137 if srctool:
138 os.symlink(srctool, desttool)
139 else:
140 notfound.append(tool)
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800141 # Force "python" -> "python2"
142 desttool = os.path.join(dest, "python")
143 if not os.path.exists(desttool):
144 srctool = "python2"
145 os.symlink(srctool, desttool)
146
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500147 if notfound and fatal:
148 bb.fatal("The following required tools (as specified by HOSTTOOLS) appear to be unavailable in PATH, please install them in order to proceed:\n %s" % " ".join(notfound))
149
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500150addtask fetch
151do_fetch[dirs] = "${DL_DIR}"
152do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
153do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
154do_fetch[vardeps] += "SRCREV"
155python base_do_fetch() {
156
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500157 src_uri = (d.getVar('SRC_URI') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500158 if len(src_uri) == 0:
159 return
160
161 try:
162 fetcher = bb.fetch2.Fetch(src_uri, d)
163 fetcher.download()
164 except bb.fetch2.BBFetchException as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600165 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500166}
167
168addtask unpack after do_fetch
169do_unpack[dirs] = "${WORKDIR}"
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600170
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800171do_unpack[cleandirs] = "${@d.getVar('S') if os.path.normpath(d.getVar('S')) != os.path.normpath(d.getVar('WORKDIR')) else os.path.join('${S}', 'patches')}"
Brad Bishop316dfdd2018-06-25 12:45:53 -0400172
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500173python base_do_unpack() {
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500174 src_uri = (d.getVar('SRC_URI') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500175 if len(src_uri) == 0:
176 return
177
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500178 try:
179 fetcher = bb.fetch2.Fetch(src_uri, d)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500180 fetcher.unpack(d.getVar('WORKDIR'))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500181 except bb.fetch2.BBFetchException as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600182 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500183}
184
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500185def get_layers_branch_rev(d):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500186 layers = (d.getVar("BBLAYERS") or "").split()
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500187 layers_branch_rev = ["%-20s = \"%s:%s\"" % (os.path.basename(i), \
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500188 base_get_metadata_git_branch(i, None).strip(), \
189 base_get_metadata_git_revision(i, None)) \
190 for i in layers]
191 i = len(layers_branch_rev)-1
192 p1 = layers_branch_rev[i].find("=")
193 s1 = layers_branch_rev[i][p1:]
194 while i > 0:
195 p2 = layers_branch_rev[i-1].find("=")
196 s2= layers_branch_rev[i-1][p2:]
197 if s1 == s2:
198 layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2]
199 i -= 1
200 else:
201 i -= 1
202 p1 = layers_branch_rev[i].find("=")
203 s1= layers_branch_rev[i][p1:]
204 return layers_branch_rev
205
206
207BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars"
208BUILDCFG_FUNCS[type] = "list"
209
210def buildcfg_vars(d):
211 statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
212 for var in statusvars:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500213 value = d.getVar(var)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500214 if value is not None:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500215 yield '%-20s = "%s"' % (var, value)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500216
217def buildcfg_neededvars(d):
218 needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
219 pesteruser = []
220 for v in needed_vars:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500221 val = d.getVar(v)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500222 if not val or val == 'INVALID':
223 pesteruser.append(v)
224
225 if pesteruser:
226 bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
227
228addhandler base_eventhandler
Brad Bishop19323692019-04-05 15:28:33 -0400229base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.MultiConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.event.RecipeParsed"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500230python base_eventhandler() {
231 import bb.runqueue
232
233 if isinstance(e, bb.event.ConfigParsed):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400234 if not d.getVar("NATIVELSBSTRING", False):
235 d.setVar("NATIVELSBSTRING", lsb_distro_identifier(d))
236 d.setVar('BB_VERSION', bb.__version__)
Brad Bishop19323692019-04-05 15:28:33 -0400237
238 # There might be no bb.event.ConfigParsed event if bitbake server is
239 # running, so check bb.event.BuildStarted too to make sure ${HOSTTOOLS_DIR}
240 # exists.
241 if isinstance(e, bb.event.ConfigParsed) or \
242 (isinstance(e, bb.event.BuildStarted) and not os.path.exists(d.getVar('HOSTTOOLS_DIR'))):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500243 # Works with the line in layer.conf which changes PATH to point here
244 setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS', d)
245 setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS_NONFATAL', d, fatal=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500246
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500247 if isinstance(e, bb.event.MultiConfigParsed):
248 # We need to expand SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS in each of the multiconfig data stores
249 # own contexts so the variables get expanded correctly for that arch, then inject back into
250 # the main data store.
251 deps = []
252 for config in e.mcdata:
253 deps.append(e.mcdata[config].getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS"))
254 deps = " ".join(deps)
255 e.mcdata[''].setVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", deps)
256
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500257 if isinstance(e, bb.event.BuildStarted):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400258 localdata = bb.data.createCopy(d)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500259 statuslines = []
260 for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata):
261 g = globals()
262 if func not in g:
263 bb.warn("Build configuration function '%s' does not exist" % func)
264 else:
265 flines = g[func](localdata)
266 if flines:
267 statuslines.extend(flines)
268
Brad Bishop316dfdd2018-06-25 12:45:53 -0400269 statusheader = d.getVar('BUILDCFG_HEADER')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500270 if statusheader:
271 bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500272
273 # This code is to silence warnings where the SDK variables overwrite the
274 # target ones and we'd see dulpicate key names overwriting each other
275 # for various PREFERRED_PROVIDERS
276 if isinstance(e, bb.event.RecipePreFinalise):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400277 if d.getVar("TARGET_PREFIX") == d.getVar("SDK_PREFIX"):
278 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400279 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc")
280 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}g++")
281 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}compilerlibs")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500282
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500283 if isinstance(e, bb.event.RecipeParsed):
284 #
285 # If we have multiple providers of virtual/X and a PREFERRED_PROVIDER_virtual/X is set
286 # skip parsing for all the other providers which will mean they get uninstalled from the
287 # sysroot since they're now "unreachable". This makes switching virtual/kernel work in
288 # particular.
289 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500290 pn = d.getVar('PN')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500291 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
292 if not source_mirror_fetch:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500293 provs = (d.getVar("PROVIDES") or "").split()
294 multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST") or "").split()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500295 for p in provs:
296 if p.startswith("virtual/") and p not in multiwhitelist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500297 profprov = d.getVar("PREFERRED_PROVIDER_" + p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500298 if profprov and pn != profprov:
Brad Bishop316dfdd2018-06-25 12:45:53 -0400299 raise bb.parse.SkipRecipe("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500300}
301
302CONFIGURESTAMPFILE = "${WORKDIR}/configure.sstate"
303CLEANBROKEN = "0"
304
305addtask configure after do_patch
306do_configure[dirs] = "${B}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500307base_do_configure() {
308 if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then
309 if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then
310 cd ${B}
311 if [ "${CLEANBROKEN}" != "1" -a \( -e Makefile -o -e makefile -o -e GNUmakefile \) ]; then
312 oe_runmake clean
313 fi
Brad Bishopc4ea0752018-11-15 14:30:15 -0800314 # -ignore_readdir_race does not work correctly with -delete;
315 # use xargs to avoid spurious build failures
316 find ${B} -ignore_readdir_race -name \*.la -type f -print0 | xargs -0 rm -f
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500317 fi
318 fi
319 if [ -n "${CONFIGURESTAMPFILE}" ]; then
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500320 mkdir -p `dirname ${CONFIGURESTAMPFILE}`
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500321 echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE}
322 fi
323}
324
325addtask compile after do_configure
326do_compile[dirs] = "${B}"
327base_do_compile() {
328 if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then
329 oe_runmake || die "make failed"
330 else
331 bbnote "nothing to compile"
332 fi
333}
334
335addtask install after do_compile
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600336do_install[dirs] = "${B}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500337# Remove and re-create ${D} so that is it guaranteed to be empty
338do_install[cleandirs] = "${D}"
339
340base_do_install() {
341 :
342}
343
344base_do_package() {
345 :
346}
347
348addtask build after do_populate_sysroot
349do_build[noexec] = "1"
350do_build[recrdeptask] += "do_deploy"
351do_build () {
352 :
353}
354
355def set_packagetriplet(d):
356 archs = []
357 tos = []
358 tvs = []
359
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500360 archs.append(d.getVar("PACKAGE_ARCHS").split())
361 tos.append(d.getVar("TARGET_OS"))
362 tvs.append(d.getVar("TARGET_VENDOR"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500363
364 def settriplet(d, varname, archs, tos, tvs):
365 triplets = []
366 for i in range(len(archs)):
367 for arch in archs[i]:
368 triplets.append(arch + tvs[i] + "-" + tos[i])
369 triplets.reverse()
370 d.setVar(varname, " ".join(triplets))
371
372 settriplet(d, "PKGTRIPLETS", archs, tos, tvs)
373
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500374 variants = d.getVar("MULTILIB_VARIANTS") or ""
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500375 for item in variants.split():
376 localdata = bb.data.createCopy(d)
377 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
378 localdata.setVar("OVERRIDES", overrides)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500379
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500380 archs.append(localdata.getVar("PACKAGE_ARCHS").split())
381 tos.append(localdata.getVar("TARGET_OS"))
382 tvs.append(localdata.getVar("TARGET_VENDOR"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500383
384 settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs)
385
386python () {
387 import string, re
388
Brad Bishop316dfdd2018-06-25 12:45:53 -0400389 # Handle backfilling
390 oe.utils.features_backfill("DISTRO_FEATURES", d)
391 oe.utils.features_backfill("MACHINE_FEATURES", d)
392
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500393 # Handle PACKAGECONFIG
394 #
395 # These take the form:
396 #
397 # PACKAGECONFIG ??= "<default options>"
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500398 # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends,foo_runtime_recommends"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500399 pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
400 if pkgconfigflags:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500401 pkgconfig = (d.getVar('PACKAGECONFIG') or "").split()
402 pn = d.getVar("PN")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500403
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500404 mlprefix = d.getVar("MLPREFIX")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500405
406 def expandFilter(appends, extension, prefix):
407 appends = bb.utils.explode_deps(d.expand(" ".join(appends)))
408 newappends = []
409 for a in appends:
410 if a.endswith("-native") or ("-cross-" in a):
411 newappends.append(a)
412 elif a.startswith("virtual/"):
413 subs = a.split("/", 1)[1]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500414 if subs.startswith(prefix):
415 newappends.append(a + extension)
416 else:
417 newappends.append("virtual/" + prefix + subs + extension)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500418 else:
419 if a.startswith(prefix):
420 newappends.append(a + extension)
421 else:
422 newappends.append(prefix + a + extension)
423 return newappends
424
425 def appendVar(varname, appends):
426 if not appends:
427 return
428 if varname.find("DEPENDS") != -1:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500429 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d) :
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500430 appends = expandFilter(appends, "", "nativesdk-")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500431 elif bb.data.inherits_class('native', d):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500432 appends = expandFilter(appends, "-native", "")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500433 elif mlprefix:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500434 appends = expandFilter(appends, "", mlprefix)
435 varname = d.expand(varname)
436 d.appendVar(varname, " " + " ".join(appends))
437
438 extradeps = []
439 extrardeps = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500440 extrarrecs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500441 extraconf = []
442 for flag, flagval in sorted(pkgconfigflags.items()):
443 items = flagval.split(",")
444 num = len(items)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500445 if num > 5:
446 bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend,rrecommend can be specified!"
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500447 % (d.getVar('PN'), flag))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500448
449 if flag in pkgconfig:
450 if num >= 3 and items[2]:
451 extradeps.append(items[2])
452 if num >= 4 and items[3]:
453 extrardeps.append(items[3])
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500454 if num >= 5 and items[4]:
455 extrarrecs.append(items[4])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500456 if num >= 1 and items[0]:
457 extraconf.append(items[0])
458 elif num >= 2 and items[1]:
459 extraconf.append(items[1])
460 appendVar('DEPENDS', extradeps)
461 appendVar('RDEPENDS_${PN}', extrardeps)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500462 appendVar('RRECOMMENDS_${PN}', extrarrecs)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500463 appendVar('PACKAGECONFIG_CONFARGS', extraconf)
464
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500465 pn = d.getVar('PN')
466 license = d.getVar('LICENSE')
Brad Bishop316dfdd2018-06-25 12:45:53 -0400467 if license == "INVALID" and pn != "defaultpkgname":
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500468 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
469
470 if bb.data.inherits_class('license', d):
471 check_license_format(d)
Brad Bishop19323692019-04-05 15:28:33 -0400472 unmatched_license_flags = check_license_flags(d)
473 if unmatched_license_flags:
474 if len(unmatched_license_flags) == 1:
475 message = "because it has a restricted license '{0}'. Which is not whitelisted in LICENSE_FLAGS_WHITELIST".format(unmatched_license_flags[0])
476 else:
477 message = "because it has restricted licenses {0}. Which are not whitelisted in LICENSE_FLAGS_WHITELIST".format(
478 ", ".join("'{0}'".format(f) for f in unmatched_license_flags))
479 bb.debug(1, "Skipping %s %s" % (pn, message))
480 raise bb.parse.SkipRecipe(message)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500481
482 # If we're building a target package we need to use fakeroot (pseudo)
483 # in order to capture permissions, owners, groups and special files
484 if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d):
485 d.setVarFlag('do_unpack', 'umask', '022')
486 d.setVarFlag('do_configure', 'umask', '022')
487 d.setVarFlag('do_compile', 'umask', '022')
488 d.appendVarFlag('do_install', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500489 d.setVarFlag('do_install', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500490 d.setVarFlag('do_install', 'umask', '022')
491 d.appendVarFlag('do_package', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500492 d.setVarFlag('do_package', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500493 d.setVarFlag('do_package', 'umask', '022')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500494 d.setVarFlag('do_package_setscene', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500495 d.appendVarFlag('do_package_setscene', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500496 d.setVarFlag('do_devshell', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500497 d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500498
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500499 need_machine = d.getVar('COMPATIBLE_MACHINE')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500500 if need_machine:
501 import re
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500502 compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500503 for m in compat_machines:
504 if re.match(need_machine, m):
505 break
506 else:
Brad Bishop316dfdd2018-06-25 12:45:53 -0400507 raise bb.parse.SkipRecipe("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE'))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500508
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600509 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500510 if not source_mirror_fetch:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500511 need_host = d.getVar('COMPATIBLE_HOST')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500512 if need_host:
513 import re
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500514 this_host = d.getVar('HOST_SYS')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500515 if not re.match(need_host, this_host):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400516 raise bb.parse.SkipRecipe("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500517
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500518 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500519
520 check_license = False if pn.startswith("nativesdk-") else True
521 for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}",
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600522 "-crosssdk-${SDK_SYS}", "-crosssdk-initial-${SDK_SYS}",
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500523 "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]:
524 if pn.endswith(d.expand(t)):
525 check_license = False
526 if pn.startswith("gcc-source-"):
527 check_license = False
528
529 if check_license and bad_licenses:
530 bad_licenses = expand_wildcard_licenses(d, bad_licenses)
531
532 whitelist = []
533 incompatwl = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500534 for lic in bad_licenses:
535 spdx_license = return_spdx(d, lic)
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800536 whitelist.extend((d.getVar("WHITELIST_" + lic) or "").split())
537 if spdx_license:
538 whitelist.extend((d.getVar("WHITELIST_" + spdx_license) or "").split())
539 '''
540 We need to track what we are whitelisting and why. If pn is
541 incompatible we need to be able to note that the image that
542 is created may infact contain incompatible licenses despite
543 INCOMPATIBLE_LICENSE being set.
544 '''
545 incompatwl.extend((d.getVar("WHITELIST_" + lic) or "").split())
546 if spdx_license:
547 incompatwl.extend((d.getVar("WHITELIST_" + spdx_license) or "").split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500548
549 if not pn in whitelist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500550 pkgs = d.getVar('PACKAGES').split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500551 skipped_pkgs = []
552 unskipped_pkgs = []
553 for pkg in pkgs:
554 if incompatible_license(d, bad_licenses, pkg):
555 skipped_pkgs.append(pkg)
556 else:
557 unskipped_pkgs.append(pkg)
558 all_skipped = skipped_pkgs and not unskipped_pkgs
559 if unskipped_pkgs:
560 for pkg in skipped_pkgs:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500561 bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + license)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500562 mlprefix = d.getVar('MLPREFIX')
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500563 d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, 1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500564 for pkg in unskipped_pkgs:
565 bb.debug(1, "INCLUDING the package " + pkg)
566 elif all_skipped or incompatible_license(d, bad_licenses):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500567 bb.debug(1, "SKIPPING recipe %s because it's %s" % (pn, license))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400568 raise bb.parse.SkipRecipe("it has an incompatible license: %s" % license)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500569 elif pn in whitelist:
570 if pn in incompatwl:
571 bb.note("INCLUDING " + pn + " as buildable despite INCOMPATIBLE_LICENSE because it has been whitelisted")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500572
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600573 # Try to verify per-package (LICENSE_<pkg>) values. LICENSE should be a
574 # superset of all per-package licenses. We do not do advanced (pattern)
575 # matching of license expressions - just check that all license strings
576 # in LICENSE_<pkg> are found in LICENSE.
577 license_set = oe.license.list_licenses(license)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500578 for pkg in d.getVar('PACKAGES').split():
579 pkg_license = d.getVar('LICENSE_' + pkg)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600580 if pkg_license:
581 unlisted = oe.license.list_licenses(pkg_license) - license_set
582 if unlisted:
583 bb.warn("LICENSE_%s includes licenses (%s) that are not "
584 "listed in LICENSE" % (pkg, ' '.join(unlisted)))
585
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500586 needsrcrev = False
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500587 srcuri = d.getVar('SRC_URI')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500588 for uri in srcuri.split():
589 (scheme, _ , path) = bb.fetch.decodeurl(uri)[:3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500590
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500591 # HTTP/FTP use the wget fetcher
592 if scheme in ("http", "https", "ftp"):
593 d.appendVarFlag('do_fetch', 'depends', ' wget-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500594
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500595 # Svn packages should DEPEND on subversion-native
596 if scheme == "svn":
597 needsrcrev = True
598 d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500599
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500600 # Git packages should DEPEND on git-native
601 elif scheme in ("git", "gitsm"):
602 needsrcrev = True
603 d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500604
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500605 # Mercurial packages should DEPEND on mercurial-native
606 elif scheme == "hg":
607 needsrcrev = True
608 d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500609
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600610 # Perforce packages support SRCREV = "${AUTOREV}"
611 elif scheme == "p4":
612 needsrcrev = True
613
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500614 # OSC packages should DEPEND on osc-native
615 elif scheme == "osc":
616 d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500617
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500618 elif scheme == "npm":
619 d.appendVarFlag('do_fetch', 'depends', ' nodejs-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500620
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500621 # *.lz4 should DEPEND on lz4-native for unpacking
622 if path.endswith('.lz4'):
623 d.appendVarFlag('do_unpack', 'depends', ' lz4-native:do_populate_sysroot')
624
625 # *.lz should DEPEND on lzip-native for unpacking
626 elif path.endswith('.lz'):
627 d.appendVarFlag('do_unpack', 'depends', ' lzip-native:do_populate_sysroot')
628
629 # *.xz should DEPEND on xz-native for unpacking
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500630 elif path.endswith('.xz') or path.endswith('.txz'):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500631 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
632
633 # .zip should DEPEND on unzip-native for unpacking
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500634 elif path.endswith('.zip') or path.endswith('.jar'):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500635 d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot')
636
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800637 # Some rpm files may be compressed internally using xz (for example, rpms from Fedora)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500638 elif path.endswith('.rpm'):
639 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500640
Brad Bishop316dfdd2018-06-25 12:45:53 -0400641 # *.deb should DEPEND on xz-native for unpacking
642 elif path.endswith('.deb'):
643 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
644
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500645 if needsrcrev:
646 d.setVar("SRCPV", "${@bb.fetch2.get_srcrev(d)}")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500647
Brad Bishop15ae2502019-06-18 21:44:24 -0400648 # Gather all named SRCREVs to add to the sstate hash calculation
649 # This anonymous python snippet is called multiple times so we
650 # need to be careful to not double up the appends here and cause
651 # the base hash to mismatch the task hash
652 for uri in srcuri.split():
653 parm = bb.fetch.decodeurl(uri)[5]
654 uri_names = parm.get("name", "").split(",")
655 for uri_name in filter(None, uri_names):
656 srcrev_name = "SRCREV_{}".format(uri_name)
657 if srcrev_name not in (d.getVarFlag("do_fetch", "vardeps") or "").split():
658 d.appendVarFlag("do_fetch", "vardeps", " {}".format(srcrev_name))
659
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500660 set_packagetriplet(d)
661
662 # 'multimachine' handling
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500663 mach_arch = d.getVar('MACHINE_ARCH')
664 pkg_arch = d.getVar('PACKAGE_ARCH')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500665
666 if (pkg_arch == mach_arch):
667 # Already machine specific - nothing further to do
668 return
669
670 #
671 # We always try to scan SRC_URI for urls with machine overrides
672 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
673 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500674 override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500675 if override != '0':
676 paths = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500677 fpaths = (d.getVar('FILESPATH') or '').split(':')
678 machine = d.getVar('MACHINE')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500679 for p in fpaths:
680 if os.path.basename(p) == machine and os.path.isdir(p):
681 paths.append(p)
682
683 if len(paths) != 0:
684 for s in srcuri.split():
685 if not s.startswith("file://"):
686 continue
687 fetcher = bb.fetch2.Fetch([s], d)
688 local = fetcher.localpath(s)
689 for mp in paths:
690 if local.startswith(mp):
691 #bb.note("overriding PACKAGE_ARCH from %s to %s for %s" % (pkg_arch, mach_arch, pn))
692 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
693 return
694
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500695 packages = d.getVar('PACKAGES').split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500696 for pkg in packages:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500697 pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500698
699 # We could look for != PACKAGE_ARCH here but how to choose
700 # if multiple differences are present?
701 # Look through PACKAGE_ARCHS for the priority order?
702 if pkgarch and pkgarch == mach_arch:
703 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500704 bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500705}
706
707addtask cleansstate after do_clean
708python do_cleansstate() {
709 sstate_clean_cachefiles(d)
710}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500711addtask cleanall after do_cleansstate
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500712do_cleansstate[nostamp] = "1"
713
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500714python do_cleanall() {
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500715 src_uri = (d.getVar('SRC_URI') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500716 if len(src_uri) == 0:
717 return
718
719 try:
720 fetcher = bb.fetch2.Fetch(src_uri, d)
721 fetcher.clean()
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600722 except bb.fetch2.BBFetchException as e:
723 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500724}
725do_cleanall[nostamp] = "1"
726
727
728EXPORT_FUNCTIONS do_fetch do_unpack do_configure do_compile do_install do_package