blob: 0c8a4b28629e7b9b14f586308cde81cf8a2f20d5 [file] [log] [blame]
Patrick Williamsc124f4f2015-09-15 14:41:29 -05001BB_DEFAULT_TASK ?= "build"
2CLASSOVERRIDE ?= "class-target"
3
4inherit patch
5inherit staging
6
7inherit mirrors
8inherit utils
9inherit utility-tasks
10inherit metadata_scm
11inherit logging
12
Brad Bishop15ae2502019-06-18 21:44:24 -040013OE_EXTRA_IMPORTS ?= ""
14
15OE_IMPORTS += "os sys time oe.path oe.utils oe.types oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath oe.license ${OE_EXTRA_IMPORTS}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050016OE_IMPORTS[type] = "list"
17
Brad Bishopf3fd2882019-06-21 08:06:37 -040018PACKAGECONFIG_CONFARGS ??= ""
19
Patrick Williamsc124f4f2015-09-15 14:41:29 -050020def oe_import(d):
21 import sys
22
Brad Bishop6e60e8b2018-02-01 10:27:11 -050023 bbpath = d.getVar("BBPATH").split(":")
Patrick Williamsc124f4f2015-09-15 14:41:29 -050024 sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath]
25
26 def inject(name, value):
27 """Make a python object accessible from the metadata"""
28 if hasattr(bb.utils, "_context"):
29 bb.utils._context[name] = value
30 else:
31 __builtins__[name] = value
32
33 import oe.data
34 for toimport in oe.data.typed_value("OE_IMPORTS", d):
35 imported = __import__(toimport)
36 inject(toimport.split(".", 1)[0], imported)
37
38 return ""
39
40# We need the oe module name space early (before INHERITs get added)
41OE_IMPORTED := "${@oe_import(d)}"
42
43def lsb_distro_identifier(d):
Brad Bishop6e60e8b2018-02-01 10:27:11 -050044 adjust = d.getVar('LSB_DISTRO_ADJUST')
Patrick Williamsc124f4f2015-09-15 14:41:29 -050045 adjust_func = None
46 if adjust:
47 try:
48 adjust_func = globals()[adjust]
49 except KeyError:
50 pass
51 return oe.lsb.distro_identifier(adjust_func)
52
53die() {
54 bbfatal_log "$*"
55}
56
57oe_runmake_call() {
58 bbnote ${MAKE} ${EXTRA_OEMAKE} "$@"
59 ${MAKE} ${EXTRA_OEMAKE} "$@"
60}
61
62oe_runmake() {
63 oe_runmake_call "$@" || die "oe_runmake failed"
64}
65
66
67def base_dep_prepend(d):
Brad Bishopd7bf8c12018-02-25 22:55:05 -050068 if d.getVar('INHIBIT_DEFAULT_DEPS', False):
69 return ""
70 return "${BASE_DEFAULT_DEPS}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050071
Brad Bishopd7bf8c12018-02-25 22:55:05 -050072BASE_DEFAULT_DEPS = "virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050073
Brad Bishopd7bf8c12018-02-25 22:55:05 -050074BASEDEPENDS = ""
75BASEDEPENDS_class-target = "${@base_dep_prepend(d)}"
76BASEDEPENDS_class-nativesdk = "${@base_dep_prepend(d)}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050077
78DEPENDS_prepend="${BASEDEPENDS} "
79
80FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}"
81# THISDIR only works properly with imediate expansion as it has to run
82# in the context of the location its used (:=)
Brad Bishop6e60e8b2018-02-01 10:27:11 -050083THISDIR = "${@os.path.dirname(d.getVar('FILE'))}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -050084
85def extra_path_elements(d):
86 path = ""
Brad Bishop6e60e8b2018-02-01 10:27:11 -050087 elements = (d.getVar('EXTRANATIVEPATH') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -050088 for e in elements:
89 path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":"
90 return path
91
92PATH_prepend = "${@extra_path_elements(d)}"
93
94def get_lic_checksum_file_list(d):
95 filelist = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -050096 lic_files = d.getVar("LIC_FILES_CHKSUM") or ''
97 tmpdir = d.getVar("TMPDIR")
98 s = d.getVar("S")
99 b = d.getVar("B")
100 workdir = d.getVar("WORKDIR")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500101
102 urls = lic_files.split()
103 for url in urls:
104 # We only care about items that are absolute paths since
105 # any others should be covered by SRC_URI.
106 try:
Brad Bishop220d5532018-08-14 00:59:39 +0100107 (method, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
108 if method != "file" or not path:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600109 raise bb.fetch.MalformedUrl(url)
110
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500111 if path[0] == '/':
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500112 if path.startswith((tmpdir, s, b, workdir)):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500113 continue
114 filelist.append(path + ":" + str(os.path.exists(path)))
115 except bb.fetch.MalformedUrl:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500116 bb.fatal(d.getVar('PN') + ": LIC_FILES_CHKSUM contains an invalid URL: " + url)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500117 return " ".join(filelist)
118
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500119def setup_hosttools_dir(dest, toolsvar, d, fatal=True):
120 tools = d.getVar(toolsvar).split()
121 origbbenv = d.getVar("BB_ORIGENV", False)
122 path = origbbenv.getVar("PATH")
123 bb.utils.mkdirhier(dest)
124 notfound = []
125 for tool in tools:
126 desttool = os.path.join(dest, tool)
127 if not os.path.exists(desttool):
128 srctool = bb.utils.which(path, tool, executable=True)
Brad Bishop19323692019-04-05 15:28:33 -0400129 # gcc/g++ may link to ccache on some hosts, e.g.,
130 # /usr/local/bin/ccache/gcc -> /usr/bin/ccache, then which(gcc)
131 # would return /usr/local/bin/ccache/gcc, but what we need is
132 # /usr/bin/gcc, this code can check and fix that.
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500133 if "ccache" in srctool:
134 srctool = bb.utils.which(path, tool, executable=True, direction=1)
135 if srctool:
136 os.symlink(srctool, desttool)
137 else:
138 notfound.append(tool)
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800139 # Force "python" -> "python2"
140 desttool = os.path.join(dest, "python")
141 if not os.path.exists(desttool):
142 srctool = "python2"
143 os.symlink(srctool, desttool)
144
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500145 if notfound and fatal:
146 bb.fatal("The following required tools (as specified by HOSTTOOLS) appear to be unavailable in PATH, please install them in order to proceed:\n %s" % " ".join(notfound))
147
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500148addtask fetch
149do_fetch[dirs] = "${DL_DIR}"
150do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
151do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
152do_fetch[vardeps] += "SRCREV"
153python base_do_fetch() {
154
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500155 src_uri = (d.getVar('SRC_URI') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500156 if len(src_uri) == 0:
157 return
158
159 try:
160 fetcher = bb.fetch2.Fetch(src_uri, d)
161 fetcher.download()
162 except bb.fetch2.BBFetchException as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600163 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500164}
165
166addtask unpack after do_fetch
167do_unpack[dirs] = "${WORKDIR}"
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600168
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800169do_unpack[cleandirs] = "${@d.getVar('S') if os.path.normpath(d.getVar('S')) != os.path.normpath(d.getVar('WORKDIR')) else os.path.join('${S}', 'patches')}"
Brad Bishop316dfdd2018-06-25 12:45:53 -0400170
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500171python base_do_unpack() {
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500172 src_uri = (d.getVar('SRC_URI') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500173 if len(src_uri) == 0:
174 return
175
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500176 try:
177 fetcher = bb.fetch2.Fetch(src_uri, d)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500178 fetcher.unpack(d.getVar('WORKDIR'))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500179 except bb.fetch2.BBFetchException as e:
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600180 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500181}
182
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500183def get_layers_branch_rev(d):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500184 layers = (d.getVar("BBLAYERS") or "").split()
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500185 layers_branch_rev = ["%-20s = \"%s:%s\"" % (os.path.basename(i), \
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500186 base_get_metadata_git_branch(i, None).strip(), \
187 base_get_metadata_git_revision(i, None)) \
188 for i in layers]
189 i = len(layers_branch_rev)-1
190 p1 = layers_branch_rev[i].find("=")
191 s1 = layers_branch_rev[i][p1:]
192 while i > 0:
193 p2 = layers_branch_rev[i-1].find("=")
194 s2= layers_branch_rev[i-1][p2:]
195 if s1 == s2:
196 layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2]
197 i -= 1
198 else:
199 i -= 1
200 p1 = layers_branch_rev[i].find("=")
201 s1= layers_branch_rev[i][p1:]
202 return layers_branch_rev
203
204
205BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars"
206BUILDCFG_FUNCS[type] = "list"
207
208def buildcfg_vars(d):
209 statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
210 for var in statusvars:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500211 value = d.getVar(var)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500212 if value is not None:
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500213 yield '%-20s = "%s"' % (var, value)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500214
215def buildcfg_neededvars(d):
216 needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
217 pesteruser = []
218 for v in needed_vars:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500219 val = d.getVar(v)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500220 if not val or val == 'INVALID':
221 pesteruser.append(v)
222
223 if pesteruser:
224 bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
225
226addhandler base_eventhandler
Brad Bishop19323692019-04-05 15:28:33 -0400227base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.MultiConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.event.RecipeParsed"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500228python base_eventhandler() {
229 import bb.runqueue
230
231 if isinstance(e, bb.event.ConfigParsed):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400232 if not d.getVar("NATIVELSBSTRING", False):
233 d.setVar("NATIVELSBSTRING", lsb_distro_identifier(d))
234 d.setVar('BB_VERSION', bb.__version__)
Brad Bishop19323692019-04-05 15:28:33 -0400235
236 # There might be no bb.event.ConfigParsed event if bitbake server is
237 # running, so check bb.event.BuildStarted too to make sure ${HOSTTOOLS_DIR}
238 # exists.
239 if isinstance(e, bb.event.ConfigParsed) or \
240 (isinstance(e, bb.event.BuildStarted) and not os.path.exists(d.getVar('HOSTTOOLS_DIR'))):
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500241 # Works with the line in layer.conf which changes PATH to point here
242 setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS', d)
243 setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS_NONFATAL', d, fatal=False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500244
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500245 if isinstance(e, bb.event.MultiConfigParsed):
246 # We need to expand SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS in each of the multiconfig data stores
247 # own contexts so the variables get expanded correctly for that arch, then inject back into
248 # the main data store.
249 deps = []
250 for config in e.mcdata:
251 deps.append(e.mcdata[config].getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS"))
252 deps = " ".join(deps)
253 e.mcdata[''].setVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", deps)
254
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500255 if isinstance(e, bb.event.BuildStarted):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400256 localdata = bb.data.createCopy(d)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500257 statuslines = []
258 for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata):
259 g = globals()
260 if func not in g:
261 bb.warn("Build configuration function '%s' does not exist" % func)
262 else:
263 flines = g[func](localdata)
264 if flines:
265 statuslines.extend(flines)
266
Brad Bishop316dfdd2018-06-25 12:45:53 -0400267 statusheader = d.getVar('BUILDCFG_HEADER')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500268 if statusheader:
269 bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500270
271 # This code is to silence warnings where the SDK variables overwrite the
272 # target ones and we'd see dulpicate key names overwriting each other
273 # for various PREFERRED_PROVIDERS
274 if isinstance(e, bb.event.RecipePreFinalise):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400275 if d.getVar("TARGET_PREFIX") == d.getVar("SDK_PREFIX"):
276 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils")
Brad Bishop316dfdd2018-06-25 12:45:53 -0400277 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc")
278 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}g++")
279 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}compilerlibs")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500280
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500281 if isinstance(e, bb.event.RecipeParsed):
282 #
283 # If we have multiple providers of virtual/X and a PREFERRED_PROVIDER_virtual/X is set
284 # skip parsing for all the other providers which will mean they get uninstalled from the
285 # sysroot since they're now "unreachable". This makes switching virtual/kernel work in
286 # particular.
287 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500288 pn = d.getVar('PN')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500289 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
290 if not source_mirror_fetch:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500291 provs = (d.getVar("PROVIDES") or "").split()
292 multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST") or "").split()
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500293 for p in provs:
294 if p.startswith("virtual/") and p not in multiwhitelist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500295 profprov = d.getVar("PREFERRED_PROVIDER_" + p)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500296 if profprov and pn != profprov:
Brad Bishop316dfdd2018-06-25 12:45:53 -0400297 raise bb.parse.SkipRecipe("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500298}
299
300CONFIGURESTAMPFILE = "${WORKDIR}/configure.sstate"
301CLEANBROKEN = "0"
302
303addtask configure after do_patch
304do_configure[dirs] = "${B}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500305base_do_configure() {
306 if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then
307 if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then
308 cd ${B}
309 if [ "${CLEANBROKEN}" != "1" -a \( -e Makefile -o -e makefile -o -e GNUmakefile \) ]; then
310 oe_runmake clean
311 fi
Brad Bishopc4ea0752018-11-15 14:30:15 -0800312 # -ignore_readdir_race does not work correctly with -delete;
313 # use xargs to avoid spurious build failures
314 find ${B} -ignore_readdir_race -name \*.la -type f -print0 | xargs -0 rm -f
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500315 fi
316 fi
317 if [ -n "${CONFIGURESTAMPFILE}" ]; then
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500318 mkdir -p `dirname ${CONFIGURESTAMPFILE}`
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500319 echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE}
320 fi
321}
322
323addtask compile after do_configure
324do_compile[dirs] = "${B}"
325base_do_compile() {
326 if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then
327 oe_runmake || die "make failed"
328 else
329 bbnote "nothing to compile"
330 fi
331}
332
333addtask install after do_compile
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600334do_install[dirs] = "${B}"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500335# Remove and re-create ${D} so that is it guaranteed to be empty
336do_install[cleandirs] = "${D}"
337
338base_do_install() {
339 :
340}
341
342base_do_package() {
343 :
344}
345
346addtask build after do_populate_sysroot
347do_build[noexec] = "1"
348do_build[recrdeptask] += "do_deploy"
349do_build () {
350 :
351}
352
353def set_packagetriplet(d):
354 archs = []
355 tos = []
356 tvs = []
357
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500358 archs.append(d.getVar("PACKAGE_ARCHS").split())
359 tos.append(d.getVar("TARGET_OS"))
360 tvs.append(d.getVar("TARGET_VENDOR"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500361
362 def settriplet(d, varname, archs, tos, tvs):
363 triplets = []
364 for i in range(len(archs)):
365 for arch in archs[i]:
366 triplets.append(arch + tvs[i] + "-" + tos[i])
367 triplets.reverse()
368 d.setVar(varname, " ".join(triplets))
369
370 settriplet(d, "PKGTRIPLETS", archs, tos, tvs)
371
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500372 variants = d.getVar("MULTILIB_VARIANTS") or ""
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500373 for item in variants.split():
374 localdata = bb.data.createCopy(d)
375 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
376 localdata.setVar("OVERRIDES", overrides)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500377
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500378 archs.append(localdata.getVar("PACKAGE_ARCHS").split())
379 tos.append(localdata.getVar("TARGET_OS"))
380 tvs.append(localdata.getVar("TARGET_VENDOR"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500381
382 settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs)
383
384python () {
385 import string, re
386
Brad Bishop316dfdd2018-06-25 12:45:53 -0400387 # Handle backfilling
388 oe.utils.features_backfill("DISTRO_FEATURES", d)
389 oe.utils.features_backfill("MACHINE_FEATURES", d)
390
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500391 # Handle PACKAGECONFIG
392 #
393 # These take the form:
394 #
395 # PACKAGECONFIG ??= "<default options>"
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500396 # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends,foo_runtime_recommends"
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500397 pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
398 if pkgconfigflags:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500399 pkgconfig = (d.getVar('PACKAGECONFIG') or "").split()
400 pn = d.getVar("PN")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500401
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500402 mlprefix = d.getVar("MLPREFIX")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500403
404 def expandFilter(appends, extension, prefix):
405 appends = bb.utils.explode_deps(d.expand(" ".join(appends)))
406 newappends = []
407 for a in appends:
408 if a.endswith("-native") or ("-cross-" in a):
409 newappends.append(a)
410 elif a.startswith("virtual/"):
411 subs = a.split("/", 1)[1]
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500412 if subs.startswith(prefix):
413 newappends.append(a + extension)
414 else:
415 newappends.append("virtual/" + prefix + subs + extension)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500416 else:
417 if a.startswith(prefix):
418 newappends.append(a + extension)
419 else:
420 newappends.append(prefix + a + extension)
421 return newappends
422
423 def appendVar(varname, appends):
424 if not appends:
425 return
426 if varname.find("DEPENDS") != -1:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500427 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d) :
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500428 appends = expandFilter(appends, "", "nativesdk-")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500429 elif bb.data.inherits_class('native', d):
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500430 appends = expandFilter(appends, "-native", "")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500431 elif mlprefix:
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500432 appends = expandFilter(appends, "", mlprefix)
433 varname = d.expand(varname)
434 d.appendVar(varname, " " + " ".join(appends))
435
436 extradeps = []
437 extrardeps = []
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500438 extrarrecs = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500439 extraconf = []
440 for flag, flagval in sorted(pkgconfigflags.items()):
441 items = flagval.split(",")
442 num = len(items)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500443 if num > 5:
444 bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend,rrecommend can be specified!"
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500445 % (d.getVar('PN'), flag))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500446
447 if flag in pkgconfig:
448 if num >= 3 and items[2]:
449 extradeps.append(items[2])
450 if num >= 4 and items[3]:
451 extrardeps.append(items[3])
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500452 if num >= 5 and items[4]:
453 extrarrecs.append(items[4])
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500454 if num >= 1 and items[0]:
455 extraconf.append(items[0])
456 elif num >= 2 and items[1]:
457 extraconf.append(items[1])
458 appendVar('DEPENDS', extradeps)
459 appendVar('RDEPENDS_${PN}', extrardeps)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500460 appendVar('RRECOMMENDS_${PN}', extrarrecs)
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500461 appendVar('PACKAGECONFIG_CONFARGS', extraconf)
462
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500463 pn = d.getVar('PN')
464 license = d.getVar('LICENSE')
Brad Bishop316dfdd2018-06-25 12:45:53 -0400465 if license == "INVALID" and pn != "defaultpkgname":
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500466 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
467
468 if bb.data.inherits_class('license', d):
469 check_license_format(d)
Brad Bishop19323692019-04-05 15:28:33 -0400470 unmatched_license_flags = check_license_flags(d)
471 if unmatched_license_flags:
472 if len(unmatched_license_flags) == 1:
473 message = "because it has a restricted license '{0}'. Which is not whitelisted in LICENSE_FLAGS_WHITELIST".format(unmatched_license_flags[0])
474 else:
475 message = "because it has restricted licenses {0}. Which are not whitelisted in LICENSE_FLAGS_WHITELIST".format(
476 ", ".join("'{0}'".format(f) for f in unmatched_license_flags))
477 bb.debug(1, "Skipping %s %s" % (pn, message))
478 raise bb.parse.SkipRecipe(message)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500479
480 # If we're building a target package we need to use fakeroot (pseudo)
481 # in order to capture permissions, owners, groups and special files
482 if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d):
483 d.setVarFlag('do_unpack', 'umask', '022')
484 d.setVarFlag('do_configure', 'umask', '022')
485 d.setVarFlag('do_compile', 'umask', '022')
486 d.appendVarFlag('do_install', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500487 d.setVarFlag('do_install', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500488 d.setVarFlag('do_install', 'umask', '022')
489 d.appendVarFlag('do_package', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500490 d.setVarFlag('do_package', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500491 d.setVarFlag('do_package', 'umask', '022')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500492 d.setVarFlag('do_package_setscene', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500493 d.appendVarFlag('do_package_setscene', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500494 d.setVarFlag('do_devshell', 'fakeroot', '1')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500495 d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500496
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500497 need_machine = d.getVar('COMPATIBLE_MACHINE')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500498 if need_machine:
499 import re
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500500 compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":")
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500501 for m in compat_machines:
502 if re.match(need_machine, m):
503 break
504 else:
Brad Bishop316dfdd2018-06-25 12:45:53 -0400505 raise bb.parse.SkipRecipe("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE'))
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500506
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600507 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500508 if not source_mirror_fetch:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500509 need_host = d.getVar('COMPATIBLE_HOST')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500510 if need_host:
511 import re
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500512 this_host = d.getVar('HOST_SYS')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500513 if not re.match(need_host, this_host):
Brad Bishop316dfdd2018-06-25 12:45:53 -0400514 raise bb.parse.SkipRecipe("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500515
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500516 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500517
518 check_license = False if pn.startswith("nativesdk-") else True
519 for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}",
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600520 "-crosssdk-${SDK_SYS}", "-crosssdk-initial-${SDK_SYS}",
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500521 "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]:
522 if pn.endswith(d.expand(t)):
523 check_license = False
524 if pn.startswith("gcc-source-"):
525 check_license = False
526
527 if check_license and bad_licenses:
528 bad_licenses = expand_wildcard_licenses(d, bad_licenses)
529
530 whitelist = []
531 incompatwl = []
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500532 for lic in bad_licenses:
533 spdx_license = return_spdx(d, lic)
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800534 whitelist.extend((d.getVar("WHITELIST_" + lic) or "").split())
535 if spdx_license:
536 whitelist.extend((d.getVar("WHITELIST_" + spdx_license) or "").split())
537 '''
538 We need to track what we are whitelisting and why. If pn is
539 incompatible we need to be able to note that the image that
540 is created may infact contain incompatible licenses despite
541 INCOMPATIBLE_LICENSE being set.
542 '''
543 incompatwl.extend((d.getVar("WHITELIST_" + lic) or "").split())
544 if spdx_license:
545 incompatwl.extend((d.getVar("WHITELIST_" + spdx_license) or "").split())
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500546
547 if not pn in whitelist:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500548 pkgs = d.getVar('PACKAGES').split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500549 skipped_pkgs = []
550 unskipped_pkgs = []
551 for pkg in pkgs:
552 if incompatible_license(d, bad_licenses, pkg):
553 skipped_pkgs.append(pkg)
554 else:
555 unskipped_pkgs.append(pkg)
556 all_skipped = skipped_pkgs and not unskipped_pkgs
557 if unskipped_pkgs:
558 for pkg in skipped_pkgs:
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500559 bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + license)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500560 mlprefix = d.getVar('MLPREFIX')
Patrick Williamsf1e5d692016-03-30 15:21:19 -0500561 d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, 1)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500562 for pkg in unskipped_pkgs:
563 bb.debug(1, "INCLUDING the package " + pkg)
564 elif all_skipped or incompatible_license(d, bad_licenses):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500565 bb.debug(1, "SKIPPING recipe %s because it's %s" % (pn, license))
Brad Bishop316dfdd2018-06-25 12:45:53 -0400566 raise bb.parse.SkipRecipe("it has an incompatible license: %s" % license)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500567 elif pn in whitelist:
568 if pn in incompatwl:
569 bb.note("INCLUDING " + pn + " as buildable despite INCOMPATIBLE_LICENSE because it has been whitelisted")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500570
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600571 # Try to verify per-package (LICENSE_<pkg>) values. LICENSE should be a
572 # superset of all per-package licenses. We do not do advanced (pattern)
573 # matching of license expressions - just check that all license strings
574 # in LICENSE_<pkg> are found in LICENSE.
575 license_set = oe.license.list_licenses(license)
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500576 for pkg in d.getVar('PACKAGES').split():
577 pkg_license = d.getVar('LICENSE_' + pkg)
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600578 if pkg_license:
579 unlisted = oe.license.list_licenses(pkg_license) - license_set
580 if unlisted:
581 bb.warn("LICENSE_%s includes licenses (%s) that are not "
582 "listed in LICENSE" % (pkg, ' '.join(unlisted)))
583
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500584 needsrcrev = False
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500585 srcuri = d.getVar('SRC_URI')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500586 for uri in srcuri.split():
587 (scheme, _ , path) = bb.fetch.decodeurl(uri)[:3]
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500588
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500589 # HTTP/FTP use the wget fetcher
590 if scheme in ("http", "https", "ftp"):
591 d.appendVarFlag('do_fetch', 'depends', ' wget-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500592
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500593 # Svn packages should DEPEND on subversion-native
594 if scheme == "svn":
595 needsrcrev = True
596 d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500597
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500598 # Git packages should DEPEND on git-native
599 elif scheme in ("git", "gitsm"):
600 needsrcrev = True
601 d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500602
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500603 # Mercurial packages should DEPEND on mercurial-native
604 elif scheme == "hg":
605 needsrcrev = True
606 d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500607
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600608 # Perforce packages support SRCREV = "${AUTOREV}"
609 elif scheme == "p4":
610 needsrcrev = True
611
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500612 # OSC packages should DEPEND on osc-native
613 elif scheme == "osc":
614 d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500615
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500616 elif scheme == "npm":
617 d.appendVarFlag('do_fetch', 'depends', ' nodejs-native:do_populate_sysroot')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500618
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500619 # *.lz4 should DEPEND on lz4-native for unpacking
620 if path.endswith('.lz4'):
621 d.appendVarFlag('do_unpack', 'depends', ' lz4-native:do_populate_sysroot')
622
623 # *.lz should DEPEND on lzip-native for unpacking
624 elif path.endswith('.lz'):
625 d.appendVarFlag('do_unpack', 'depends', ' lzip-native:do_populate_sysroot')
626
627 # *.xz should DEPEND on xz-native for unpacking
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500628 elif path.endswith('.xz') or path.endswith('.txz'):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500629 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
630
631 # .zip should DEPEND on unzip-native for unpacking
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500632 elif path.endswith('.zip') or path.endswith('.jar'):
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500633 d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot')
634
Brad Bishop1a4b7ee2018-12-16 17:11:34 -0800635 # Some rpm files may be compressed internally using xz (for example, rpms from Fedora)
Brad Bishopd7bf8c12018-02-25 22:55:05 -0500636 elif path.endswith('.rpm'):
637 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500638
Brad Bishop316dfdd2018-06-25 12:45:53 -0400639 # *.deb should DEPEND on xz-native for unpacking
640 elif path.endswith('.deb'):
641 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
642
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500643 if needsrcrev:
644 d.setVar("SRCPV", "${@bb.fetch2.get_srcrev(d)}")
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500645
Brad Bishop15ae2502019-06-18 21:44:24 -0400646 # Gather all named SRCREVs to add to the sstate hash calculation
647 # This anonymous python snippet is called multiple times so we
648 # need to be careful to not double up the appends here and cause
649 # the base hash to mismatch the task hash
650 for uri in srcuri.split():
651 parm = bb.fetch.decodeurl(uri)[5]
652 uri_names = parm.get("name", "").split(",")
653 for uri_name in filter(None, uri_names):
654 srcrev_name = "SRCREV_{}".format(uri_name)
655 if srcrev_name not in (d.getVarFlag("do_fetch", "vardeps") or "").split():
656 d.appendVarFlag("do_fetch", "vardeps", " {}".format(srcrev_name))
657
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500658 set_packagetriplet(d)
659
660 # 'multimachine' handling
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500661 mach_arch = d.getVar('MACHINE_ARCH')
662 pkg_arch = d.getVar('PACKAGE_ARCH')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500663
664 if (pkg_arch == mach_arch):
665 # Already machine specific - nothing further to do
666 return
667
668 #
669 # We always try to scan SRC_URI for urls with machine overrides
670 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
671 #
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500672 override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500673 if override != '0':
674 paths = []
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500675 fpaths = (d.getVar('FILESPATH') or '').split(':')
676 machine = d.getVar('MACHINE')
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500677 for p in fpaths:
678 if os.path.basename(p) == machine and os.path.isdir(p):
679 paths.append(p)
680
681 if len(paths) != 0:
682 for s in srcuri.split():
683 if not s.startswith("file://"):
684 continue
685 fetcher = bb.fetch2.Fetch([s], d)
686 local = fetcher.localpath(s)
687 for mp in paths:
688 if local.startswith(mp):
689 #bb.note("overriding PACKAGE_ARCH from %s to %s for %s" % (pkg_arch, mach_arch, pn))
690 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
691 return
692
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500693 packages = d.getVar('PACKAGES').split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500694 for pkg in packages:
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500695 pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg)
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500696
697 # We could look for != PACKAGE_ARCH here but how to choose
698 # if multiple differences are present?
699 # Look through PACKAGE_ARCHS for the priority order?
700 if pkgarch and pkgarch == mach_arch:
701 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500702 bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN"))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500703}
704
705addtask cleansstate after do_clean
706python do_cleansstate() {
707 sstate_clean_cachefiles(d)
708}
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500709addtask cleanall after do_cleansstate
Patrick Williamsd8c66bc2016-06-20 12:57:21 -0500710do_cleansstate[nostamp] = "1"
711
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500712python do_cleanall() {
Brad Bishop6e60e8b2018-02-01 10:27:11 -0500713 src_uri = (d.getVar('SRC_URI') or "").split()
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500714 if len(src_uri) == 0:
715 return
716
717 try:
718 fetcher = bb.fetch2.Fetch(src_uri, d)
719 fetcher.clean()
Patrick Williamsc0f7c042017-02-23 20:41:17 -0600720 except bb.fetch2.BBFetchException as e:
721 bb.fatal(str(e))
Patrick Williamsc124f4f2015-09-15 14:41:29 -0500722}
723do_cleanall[nostamp] = "1"
724
725
726EXPORT_FUNCTIONS do_fetch do_unpack do_configure do_compile do_install do_package